diff -Nru tilemill-0.10.1~saucy9/VERSION tilemill-0.10.1~saucy10/VERSION --- tilemill-0.10.1~saucy9/VERSION 2013-10-25 01:46:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/VERSION 2013-10-25 23:14:57.000000000 +0000 @@ -1,2 +1,2 @@ -v0.10.1-163-g3e27b93 -0.10.1.163 +v0.10.1-177-gc33d633 +0.10.1.177 diff -Nru tilemill-0.10.1~saucy9/debian/changelog tilemill-0.10.1~saucy10/debian/changelog --- tilemill-0.10.1~saucy9/debian/changelog 2013-10-25 20:48:20.000000000 +0000 +++ tilemill-0.10.1~saucy10/debian/changelog 2013-10-25 23:25:30.000000000 +0000 @@ -1,4 +1,4 @@ -tilemill (0.10.1~saucy9) saucy; urgency=medium +tilemill (0.10.1~saucy10) saucy; urgency=medium * tilemill -- Dane Springmeyer Thursday, 24 Oct 2013 23:39:00 -0400 diff -Nru tilemill-0.10.1~saucy9/node_modules/JSV/package.json tilemill-0.10.1~saucy10/node_modules/JSV/package.json --- tilemill-0.10.1~saucy9/node_modules/JSV/package.json 2013-10-25 01:44:46.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/JSV/package.json 2013-10-25 23:13:17.000000000 +0000 @@ -42,9 +42,5 @@ "url": "git://github.com/garycourt/JSV.git" }, "_id": "JSV@4.0.2", - "dist": { - "shasum": "26d57abfa36f6ca480277c3b9c5630d58b5bc483" - }, - "_from": "JSV@4.0.x", - "_resolved": "https://registry.npmjs.org/JSV/-/JSV-4.0.2.tgz" + "_from": "JSV@4.0.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/.npmignore tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/.npmignore --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/.npmignore 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/.npmignore 2013-03-27 12:32:27.000000000 +0000 @@ -1,2 +1,2 @@ *.dirty -node_modules \ No newline at end of file +node_modules diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/Makefile tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/Makefile --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/Makefile 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/Makefile 2013-03-27 18:11:53.000000000 +0000 @@ -1,5 +1,5 @@ test: - @find test/{simple,system}/test-*.js | xargs -n 1 -t node + npm test benchmark-v8: @find benchmark/v8/*.js | xargs -n 1 -t node diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/README.md tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/README.md --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/README.md 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/README.md 2013-03-27 20:26:26.000000000 +0000 @@ -6,7 +6,9 @@ ## Installation - npm install dirty +```bash +npm install dirty +``` ## Why dirty? @@ -21,26 +23,27 @@ ## Tutorial - require('../test/common'); - var db = require('dirty')('user.db'); +```javascript + var dirty = require('dirty'); + var db = dirty('user.db'); + + db.on('load', function() { + db.set('john', {eyes: 'blue'}); + console.log('Added john, he has %s eyes.', db.get('john').eyes); - db.on('load', function() { - db.set('john', {eyes: 'blue'}); - console.log('Added john, he has %s eyes.', db.get('john').eyes); - - db.set('bob', {eyes: 'brown'}, function() { - console.log('User bob is now saved on disk.') - }); - - db.forEach(function(key, val) { - console.log('Found key: %s, val: %j', key, val); - }); + db.set('bob', {eyes: 'brown'}, function() { + console.log('User bob is now saved on disk.') }); - db.on('drain', function() { - console.log('All records are saved on disk now.'); + db.forEach(function(key, val) { + console.log('Found key: %s, val: %j', key, val); }); + }); + db.on('drain', function() { + console.log('All records are saved on disk now.'); + }); +``` Output: Added john, he has blue eyes. @@ -58,11 +61,12 @@ The constructor can be invoked in multiple ways: - require('dirty')('my.db'); - require('dirty').Dirty('my.db'); - new (require('dirty'))('my.db'); - new (require('dirty').Dirty)('my.db'); - +```javascript +require('dirty')('my.db'); +require('dirty').Dirty('my.db'); +new (require('dirty'))('my.db'); +new (require('dirty').Dirty)('my.db'); +``` ### dirty.path The path of the dirty database. @@ -103,6 +107,17 @@ Emitted whenever all records have been written to disk. +## Tests + +Dirty utilizes the [Mocha](http://visionmedia.github.com/mocha/) test framework. + +```bash +git clone https://github.com/felixge/node-dirty +cd node-dirty +npm install +npm test +``` + ## License node-dirty is licensed under the MIT license. diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/for-each.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/for-each.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/for-each.js 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/for-each.js 2013-03-27 20:43:36.000000000 +0000 @@ -1,23 +1,22 @@ -require('../../test/common'); +var config = require('../../test/config'); var COUNT = 1e6, - dirty = require('dirty')(), + dirty = require(config.LIB_DIRTY)(), util = require('util'); for (var i = 0; i < COUNT; i++) { dirty.set(i, i); } -var start = +new Date, i = 0; +var start = Date.now(), i = 0; dirty.forEach(function(key, doc) { if (!key && key !== 0) { throw new Error('implementation fail'); } }); -var ms = +new Date - start, +var ms = Date.now() - start, mhz = ((COUNT / (ms / 1000)) / 1e6).toFixed(2), million = COUNT / 1e6; // Can't use console.log() since since I also test this in ancient node versions util.log(mhz+' Mhz ('+million+' million in '+ms+' ms)'); - diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/get.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/get.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/get.js 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/get.js 2013-03-27 20:37:09.000000000 +0000 @@ -1,20 +1,20 @@ -require('../../test/common'); +var config = require('../../test/config'); var COUNT = 1e6, - dirty = require('dirty')(), + dirty = require(config.LIB_DIRTY)(), util = require('util'); for (var i = 0; i < COUNT; i++) { dirty.set(i, i); } -var start = +new Date; +var start = Date.now(); for (var i = 0; i < COUNT; i++) { if (dirty.get(i) !== i) { throw new Error('implementation fail'); } } -var ms = +new Date - start, +var ms = Date.now() - start, mhz = ((COUNT / (ms / 1000)) / 1e6).toFixed(2), million = COUNT / 1e6; diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/load.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/load.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/load.js 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/load.js 2013-03-27 20:37:22.000000000 +0000 @@ -1,7 +1,7 @@ -require('../../test/common'); +var config = require('../../test/config'); var COUNT = 1e4, - DB_FILE = __dirname+'/../../test/tmp/benchmark-set-drain.dirty', - dirty = require('dirty')(DB_FILE), + DB_FILE = config.TMP_PATH + '/benchmark-set-drain.dirty', + dirty = require(config.LIB_DIRTY)(DB_FILE), util = require('util'), loaded = false; @@ -10,9 +10,9 @@ } dirty.on('drain', function() { - var start = +new Date; + var start = Date.now(); require('dirty')(DB_FILE).on('load', function(length) { - var ms = +new Date - start, + var ms = Date.now() - start, mhz = ((COUNT / (ms / 1000)) / 1e3).toFixed(2), million = COUNT / 1e6; diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain-256-bytes-per-doc.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain-256-bytes-per-doc.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain-256-bytes-per-doc.js 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain-256-bytes-per-doc.js 2013-03-27 20:37:29.000000000 +0000 @@ -1,16 +1,16 @@ -require('../../test/common'); +var config = require('../../test/config'); var COUNT = 1e5, - dirty = require('dirty')(__dirname+'/../../test/tmp/benchmark-set-drain.dirty'), + dirty = require(config.LIB_DIRTY)(config.TMP_PATH + '/benchmark-set-drain.dirty'), util = require('util'), drained = false; -var start = +new Date; +var start = Date.now(); for (var i = 0; i < COUNT; i++) { dirty.set(i, 'This string has 256 bytes. This string has 256 bytes. This string has 256 bytes. This string has 256 bytes. This string has 256 bytes. This string has 256 bytes. This string has 256 bytes. This string has 256 bytes. This string has 256 bytes. This string'); } dirty.on('drain', function() { - var ms = +new Date - start, + var ms = Date.now() - start, mhz = ((COUNT / (ms / 1000)) / 1e3).toFixed(2), million = COUNT / 1e6; diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain.js 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set-drain.js 2013-03-27 20:37:34.000000000 +0000 @@ -1,16 +1,16 @@ -require('../../test/common'); +var config = require('../../test/config'); var COUNT = 1e4, - dirty = require('dirty')(__dirname+'/../../test/tmp/benchmark-set-drain.dirty'), + dirty = require(config.LIB_DIRTY)(config.TMP_PATH + '/benchmark-set-drain.dirty'), util = require('util'), drained = false; -var start = +new Date; +var start = Date.now(); for (var i = 0; i < COUNT; i++) { dirty.set(i, i); } dirty.on('drain', function() { - var ms = +new Date - start, + var ms = Date.now() - start, mhz = ((COUNT / (ms / 1000)) / 1e3).toFixed(2), million = COUNT / 1e6; diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set.js 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/benchmark/dirty/set.js 2013-03-27 20:43:27.000000000 +0000 @@ -1,18 +1,16 @@ -require('../../test/common'); +var config = require('../../test/config'); var COUNT = 1e6, - dirty = require('dirty')(__dirname+'/../../test/tmp/benchmark-set.dirty'), + dirty = require(config.LIB_DIRTY)(config.TMP_PATH + '/benchmark-set.dirty'), util = require('util'); -var start = +new Date; +var start = Date.now(); for (var i = 0; i < COUNT; i++) { dirty.set(i, i); } -var ms = +new Date - start, +var ms = Date.now() - start, mhz = ((COUNT / (ms / 1000)) / 1e6).toFixed(2), million = COUNT / 1e6; // Can't use console.log() since since I also test this in ancient node versions util.log(mhz+' Mhz ('+million+' million in '+ms+' ms)'); -process.exit(0); - diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/lib/dirty/dirty.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/lib/dirty/dirty.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/lib/dirty/dirty.js 2012-02-05 22:26:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/lib/dirty/dirty.js 2013-03-27 18:11:02.000000000 +0000 @@ -4,6 +4,10 @@ util = require('util'), EventEmitter = require('events').EventEmitter; + +/** +* Constructor function +*/ var Dirty = exports.Dirty = function(path) { if (!(this instanceof Dirty)) return new Dirty(path); @@ -25,12 +29,18 @@ Dirty.Dirty = Dirty; module.exports = Dirty; + +/** +* set() stores a JSON object in the database at key +* cb is fired when the data is persisted. +* In memory, this is immediate- on disk, it will take some time. +*/ Dirty.prototype.set = function(key, val, cb) { if (val === undefined) { - this._keys.splice(this._keys.indexOf(key), 1) + this._keys.splice(this._keys.indexOf(key), 1); delete this._docs[key]; } else { - if (this._keys.indexOf(key) === -1) { + if (this._keys.indexOf(key) === -1) { this._keys.push(key); } this._docs[key] = val; @@ -45,18 +55,32 @@ this._maybeFlush(); }; +/** +* Get the value stored at a key in the database +* This is synchronous since a cache is maintained in-memory +*/ Dirty.prototype.get = function(key) { return this._docs[key]; }; +/** +* Get total number of stored keys +*/ Dirty.prototype.size = function() { return this._keys.length; }; +/** +* Remove a key and the value stored there +*/ Dirty.prototype.rm = function(key, cb) { this.set(key, undefined, cb); }; + +/** +* Iterate over keys, applying match function +*/ Dirty.prototype.forEach = function(fn) { for (var i = 0; i < this._keys.length; i++) { @@ -68,6 +92,11 @@ }; + + + + +// Called when a dirty connection is instantiated Dirty.prototype._load = function() { var self = this, buffer = '', length = 0; @@ -100,10 +129,12 @@ arr.forEach(function(rowStr) { if (!rowStr) { self.emit('error', new Error('Empty lines never appear in a healthy database')); - return + return; } + + var row; try { - var row = JSON.parse(rowStr); + row = JSON.parse(rowStr); if (!('key' in row)) { throw new Error(); } @@ -146,13 +177,13 @@ Dirty.prototype._writeDrain = function() { this.flushing = false; - + if (!this._queue.length) { this.emit('drain'); } else { this._maybeFlush(); } -} +}; Dirty.prototype._maybeFlush = function() { if (this.flushing || !this._queue.length) { @@ -193,6 +224,8 @@ } (function(cbs) { + var isDrained; + if (!self.path) { process.nextTick(function() { callbacks(null, cbs); @@ -201,7 +234,11 @@ return; } - self._writeStream.write(bundleStr, function(err) { + isDrained = self._writeStream.write(bundleStr, function(err) { + if (isDrained) { + self._writeDrain(); + } + if (!cbs.length && err) { self.emit('error', err); return; @@ -209,6 +246,7 @@ callbacks(err, cbs); }); + })(cbs); bundleStr = ''; diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/package.json tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/package.json --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/dirty/package.json 2012-10-11 02:23:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/dirty/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -1,16 +1,25 @@ { "name": "dirty", "description": "A tiny & fast key value store with append-only disk log. Ideal for apps with < 1 million records.", - "version": "0.9.6", + "version": "0.9.7", "dependencies": {}, "main": "./lib/dirty", "devDependencies": { - "gently": ">=0.8.0" + "mocha": "~1.8.2", + "rimraf": "~2.1.4" + }, + "scripts": { + "test": "./node_modules/mocha/bin/mocha test/test-*.js -R list" }, "engines": { "node": "*" }, - "readme": "# node-dirty\n\n## Purpose\n\nA tiny & fast key value store with append-only disk log. Ideal for apps with < 1 million records.\n\n## Installation\n\n npm install dirty\n\n## Why dirty?\n\nThis module is called dirty because:\n\n* The file format is newline separated JSON\n* Your database lives in the same process as your application, they share memory\n* There is no query language, you just `forEach` through all records\n\nSo dirty means that you will hit a very hard wall with this database after ~1 million records,\nbut it is a wonderful solution for anything smaller than that.\n\n## Tutorial\n\n require('../test/common');\n var db = require('dirty')('user.db');\n\n db.on('load', function() {\n db.set('john', {eyes: 'blue'});\n console.log('Added john, he has %s eyes.', db.get('john').eyes);\n\n db.set('bob', {eyes: 'brown'}, function() {\n console.log('User bob is now saved on disk.')\n });\n\n db.forEach(function(key, val) {\n console.log('Found key: %s, val: %j', key, val);\n });\n });\n\n db.on('drain', function() {\n console.log('All records are saved on disk now.');\n });\n\nOutput:\n\n Added john, he has blue eyes.\n Found key: john, val: {\"eyes\":\"blue\"}\n Found key: bob, val: {\"eyes\":\"brown\"}\n User bob is now saved on disk.\n All records are saved on disk now.\n\n## API\n\n### new Dirty([path])\n\nCreates a new dirty database. If `path` does not exist yet, it is created. You\ncan also omit the `path` if you don't want disk persistence (useful for testing).\n\nThe constructor can be invoked in multiple ways:\n\n require('dirty')('my.db');\n require('dirty').Dirty('my.db');\n new (require('dirty'))('my.db');\n new (require('dirty').Dirty)('my.db');\n\n### dirty.path\n\nThe path of the dirty database.\n\n### dirty.set(key, value, [cb])\n\nSet's the given `key` / `val` pair. The state of the database is affected instantly,\nthe optional `cb` callback is fired when the record was written to disk.\n\n`val` can be any JSON-serializable type, it does not have to be an object.\n\n### dirty.get(key)\n\nRetrieves the value for the given `key`.\n\n### dirty.rm(key, cb)\n\nRemoves the record with the given `key`. This is identical to setting the `key`'s value\nto `undefined`.\n\n### dirty.forEach(fn)\n\nCalls the given `fn` function for every document in the database. The passed\narguments are `key` and `val`. You can return `false` to abort a query (useful\nif you are only interested in a limited number of records).\n\nThis function is blocking and runs at ~4 Mhz.\n\n### dirty event: 'load' (length)\n\nEmitted once the database file has finished loading. It is not safe to access\nrecords before this event fires. Writing records however should be fine.\n\n`length` is the amount of records the database is holding. This only counts each\nkey once, even if it had been overwritten.\n\n### dirty event: 'drain' ()\n\nEmitted whenever all records have been written to disk.\n\n## License\n\nnode-dirty is licensed under the MIT license.\n", - "_id": "dirty@0.9.6", - "_from": "dirty@0.9.x" + "readme": "# node-dirty\n\n## Purpose\n\nA tiny & fast key value store with append-only disk log. Ideal for apps with < 1 million records.\n\n## Installation\n\n```bash\nnpm install dirty\n```\n\n## Why dirty?\n\nThis module is called dirty because:\n\n* The file format is newline separated JSON\n* Your database lives in the same process as your application, they share memory\n* There is no query language, you just `forEach` through all records\n\nSo dirty means that you will hit a very hard wall with this database after ~1 million records,\nbut it is a wonderful solution for anything smaller than that.\n\n## Tutorial\n\n```javascript\n var dirty = require('dirty');\n var db = dirty('user.db');\n\n db.on('load', function() {\n db.set('john', {eyes: 'blue'});\n console.log('Added john, he has %s eyes.', db.get('john').eyes);\n\n db.set('bob', {eyes: 'brown'}, function() {\n console.log('User bob is now saved on disk.')\n });\n\n db.forEach(function(key, val) {\n console.log('Found key: %s, val: %j', key, val);\n });\n });\n\n db.on('drain', function() {\n console.log('All records are saved on disk now.');\n });\n```\nOutput:\n\n Added john, he has blue eyes.\n Found key: john, val: {\"eyes\":\"blue\"}\n Found key: bob, val: {\"eyes\":\"brown\"}\n User bob is now saved on disk.\n All records are saved on disk now.\n\n## API\n\n### new Dirty([path])\n\nCreates a new dirty database. If `path` does not exist yet, it is created. You\ncan also omit the `path` if you don't want disk persistence (useful for testing).\n\nThe constructor can be invoked in multiple ways:\n\n```javascript\nrequire('dirty')('my.db');\nrequire('dirty').Dirty('my.db');\nnew (require('dirty'))('my.db');\nnew (require('dirty').Dirty)('my.db');\n```\n### dirty.path\n\nThe path of the dirty database.\n\n### dirty.set(key, value, [cb])\n\nSet's the given `key` / `val` pair. The state of the database is affected instantly,\nthe optional `cb` callback is fired when the record was written to disk.\n\n`val` can be any JSON-serializable type, it does not have to be an object.\n\n### dirty.get(key)\n\nRetrieves the value for the given `key`.\n\n### dirty.rm(key, cb)\n\nRemoves the record with the given `key`. This is identical to setting the `key`'s value\nto `undefined`.\n\n### dirty.forEach(fn)\n\nCalls the given `fn` function for every document in the database. The passed\narguments are `key` and `val`. You can return `false` to abort a query (useful\nif you are only interested in a limited number of records).\n\nThis function is blocking and runs at ~4 Mhz.\n\n### dirty event: 'load' (length)\n\nEmitted once the database file has finished loading. It is not safe to access\nrecords before this event fires. Writing records however should be fine.\n\n`length` is the amount of records the database is holding. This only counts each\nkey once, even if it had been overwritten.\n\n### dirty event: 'drain' ()\n\nEmitted whenever all records have been written to disk.\n\n## Tests\n\nDirty utilizes the [Mocha](http://visionmedia.github.com/mocha/) test framework.\n\n```bash\ngit clone https://github.com/felixge/node-dirty\ncd node-dirty\nnpm install\nnpm test\n```\n\n## License\n\nnode-dirty is licensed under the MIT license.\n", + "readmeFilename": "README.md", + "_id": "dirty@0.9.7", + "dist": { + "shasum": "196c9c9f88badb8818d5539749abe8f9931f244c" + }, + "_from": "dirty@0.9.x", + "_resolved": "https://registry.npmjs.org/dirty/-/dirty-0.9.7.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/.npmignore tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/.npmignore --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/.npmignore 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/.npmignore 2011-12-06 22:32:37.000000000 +0000 @@ -0,0 +1,3 @@ +test/ +Rakefile +docs/ \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/CNAME tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/CNAME --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/CNAME 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/CNAME 2012-04-02 17:06:52.000000000 +0000 @@ -0,0 +1 @@ +underscorejs.org diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/LICENSE tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/LICENSE --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/LICENSE 2012-01-04 21:17:38.000000000 +0000 @@ -0,0 +1,22 @@ +Copyright (c) 2009-2012 Jeremy Ashkenas, DocumentCloud + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/README.md tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/README.md --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/README.md 2012-01-11 16:41:26.000000000 +0000 @@ -0,0 +1,19 @@ + __ + /\ \ __ + __ __ ___ \_\ \ __ _ __ ____ ___ ___ _ __ __ /\_\ ____ + /\ \/\ \ /' _ `\ /'_ \ /'__`\/\ __\/ ,__\ / ___\ / __`\/\ __\/'__`\ \/\ \ /',__\ + \ \ \_\ \/\ \/\ \/\ \ \ \/\ __/\ \ \//\__, `\/\ \__//\ \ \ \ \ \//\ __/ __ \ \ \/\__, `\ + \ \____/\ \_\ \_\ \___,_\ \____\\ \_\\/\____/\ \____\ \____/\ \_\\ \____\/\_\ _\ \ \/\____/ + \/___/ \/_/\/_/\/__,_ /\/____/ \/_/ \/___/ \/____/\/___/ \/_/ \/____/\/_//\ \_\ \/___/ + \ \____/ + \/___/ + +Underscore.js is a utility-belt library for JavaScript that provides +support for the usual functional suspects (each, map, reduce, filter...) +without extending any core JavaScript objects. + +For Docs, License, Tests, and pre-packed downloads, see: +http://documentcloud.github.com/underscore/ + +Many thanks to our contributors: +https://github.com/documentcloud/underscore/contributors Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/favicon.ico and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/favicon.ico differ diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/index.html tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/index.html --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/index.html 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/index.html 2012-04-10 14:43:40.000000000 +0000 @@ -0,0 +1,2109 @@ + + + + + + + Underscore.js + + + + + + +
+ +

+ Underscore.js +

+ +

+ Underscore is a + utility-belt library for JavaScript that provides a lot of the + functional programming support that you would expect in + Prototype.js + (or Ruby), + but without extending any of the built-in JavaScript objects. It's the + tie to go along with jQuery's tux, + and Backbone.js's suspenders. +

+ +

+ Underscore provides 60-odd functions that support both the usual + functional suspects: map, select, invoke — + as well as more specialized helpers: function binding, javascript + templating, deep equality testing, and so on. It delegates to built-in + functions, if present, so modern browsers will use the + native implementations of forEach, map, reduce, + filter, every, some and indexOf. +

+ +

+ A complete Test & Benchmark Suite + is included for your perusal. +

+ +

+ You may also read through the annotated source code. +

+ +

+ The project is + hosted on GitHub. + You can report bugs and discuss features on the + issues page, + on Freenode in the #documentcloud channel, + or send tweets to @documentcloud. +

+ +

+ Underscore is an open-source component of DocumentCloud. +

+ +

Downloads (Right-click, and use "Save As")

+ + + + + + + + + + +
Development Version (1.3.3)37kb, Uncompressed with Plentiful Comments
Production Version (1.3.3)4kb, Minified and Gzipped
+ +
Upgrade warning: versions 1.3.0 and higher remove AMD (RequireJS) support.
+ +
+ +

Collection Functions (Arrays or Objects)

+ +

+ each_.each(list, iterator, [context]) + Alias: forEach +
+ Iterates over a list of elements, yielding each in turn to an iterator + function. The iterator is bound to the context object, if one is + passed. Each invocation of iterator is called with three arguments: + (element, index, list). If list is a JavaScript object, iterator's + arguments will be (value, key, list). Delegates to the native + forEach function if it exists. +

+
+_.each([1, 2, 3], function(num){ alert(num); });
+=> alerts each number in turn...
+_.each({one : 1, two : 2, three : 3}, function(num, key){ alert(num); });
+=> alerts each number in turn...
+ +

+ map_.map(list, iterator, [context]) + Alias: collect +
+ Produces a new array of values by mapping each value in list + through a transformation function (iterator). If the native map method + exists, it will be used instead. If list is a JavaScript object, + iterator's arguments will be (value, key, list). +

+
+_.map([1, 2, 3], function(num){ return num * 3; });
+=> [3, 6, 9]
+_.map({one : 1, two : 2, three : 3}, function(num, key){ return num * 3; });
+=> [3, 6, 9]
+ +

+ reduce_.reduce(list, iterator, memo, [context]) + Aliases: inject, foldl +
+ Also known as inject and foldl, reduce boils down a + list of values into a single value. Memo is the initial state + of the reduction, and each successive step of it should be returned by + iterator. +

+
+var sum = _.reduce([1, 2, 3], function(memo, num){ return memo + num; }, 0);
+=> 6
+
+ +

+ reduceRight_.reduceRight(list, iterator, memo, [context]) + Alias: foldr +
+ The right-associative version of reduce. Delegates to the + JavaScript 1.8 version of reduceRight, if it exists. Foldr + is not as useful in JavaScript as it would be in a language with lazy + evaluation. +

+
+var list = [[0, 1], [2, 3], [4, 5]];
+var flat = _.reduceRight(list, function(a, b) { return a.concat(b); }, []);
+=> [4, 5, 2, 3, 0, 1]
+
+ +

+ find_.find(list, iterator, [context]) + Alias: detect +
+ Looks through each value in the list, returning the first one that + passes a truth test (iterator). The function returns as + soon as it finds an acceptable element, and doesn't traverse the + entire list. +

+
+var even = _.find([1, 2, 3, 4, 5, 6], function(num){ return num % 2 == 0; });
+=> 2
+
+ +

+ filter_.filter(list, iterator, [context]) + Alias: select +
+ Looks through each value in the list, returning an array of all + the values that pass a truth test (iterator). Delegates to the + native filter method, if it exists. +

+
+var evens = _.filter([1, 2, 3, 4, 5, 6], function(num){ return num % 2 == 0; });
+=> [2, 4, 6]
+
+ +

+ reject_.reject(list, iterator, [context]) +
+ Returns the values in list without the elements that the truth + test (iterator) passes. The opposite of filter. +

+
+var odds = _.reject([1, 2, 3, 4, 5, 6], function(num){ return num % 2 == 0; });
+=> [1, 3, 5]
+
+ +

+ all_.all(list, iterator, [context]) + Alias: every +
+ Returns true if all of the values in the list pass the iterator + truth test. Delegates to the native method every, if present. +

+
+_.all([true, 1, null, 'yes'], _.identity);
+=> false
+
+ +

+ any_.any(list, [iterator], [context]) + Alias: some +
+ Returns true if any of the values in the list pass the + iterator truth test. Short-circuits and stops traversing the list + if a true element is found. Delegates to the native method some, + if present. +

+
+_.any([null, 0, 'yes', false]);
+=> true
+
+ +

+ include_.include(list, value) + Alias: contains +
+ Returns true if the value is present in the list, using + === to test equality. Uses indexOf internally, if list + is an Array. +

+
+_.include([1, 2, 3], 3);
+=> true
+
+ +

+ invoke_.invoke(list, methodName, [*arguments]) +
+ Calls the method named by methodName on each value in the list. + Any extra arguments passed to invoke will be forwarded on to the + method invocation. +

+
+_.invoke([[5, 1, 7], [3, 2, 1]], 'sort');
+=> [[1, 5, 7], [1, 2, 3]]
+
+ +

+ pluck_.pluck(list, propertyName) +
+ A convenient version of what is perhaps the most common use-case for + map: extracting a list of property values. +

+
+var stooges = [{name : 'moe', age : 40}, {name : 'larry', age : 50}, {name : 'curly', age : 60}];
+_.pluck(stooges, 'name');
+=> ["moe", "larry", "curly"]
+
+ +

+ max_.max(list, [iterator], [context]) +
+ Returns the maximum value in list. If iterator is passed, + it will be used on each value to generate the criterion by which the + value is ranked. +

+
+var stooges = [{name : 'moe', age : 40}, {name : 'larry', age : 50}, {name : 'curly', age : 60}];
+_.max(stooges, function(stooge){ return stooge.age; });
+=> {name : 'curly', age : 60};
+
+ +

+ min_.min(list, [iterator], [context]) +
+ Returns the minimum value in list. If iterator is passed, + it will be used on each value to generate the criterion by which the + value is ranked. +

+
+var numbers = [10, 5, 100, 2, 1000];
+_.min(numbers);
+=> 2
+
+ +

+ sortBy_.sortBy(list, iterator, [context]) +
+ Returns a sorted copy of list, ranked in ascending order by the + results of running each value through iterator. Iterator may + also be the string name of the property to sort by (eg. length). +

+
+_.sortBy([1, 2, 3, 4, 5, 6], function(num){ return Math.sin(num); });
+=> [5, 4, 6, 3, 1, 2]
+
+ +

+ groupBy_.groupBy(list, iterator) +
+ Splits a collection into sets, grouped by the result of running each + value through iterator. If iterator is a string instead of + a function, groups by the property named by iterator on each of + the values. +

+
+_.groupBy([1.3, 2.1, 2.4], function(num){ return Math.floor(num); });
+=> {1: [1.3], 2: [2.1, 2.4]}
+
+_.groupBy(['one', 'two', 'three'], 'length');
+=> {3: ["one", "two"], 5: ["three"]}
+
+ +

+ sortedIndex_.sortedIndex(list, value, [iterator]) +
+ Uses a binary search to determine the index at which the value + should be inserted into the list in order to maintain the list's + sorted order. If an iterator is passed, it will be used to compute + the sort ranking of each value. +

+
+_.sortedIndex([10, 20, 30, 40, 50], 35);
+=> 3
+
+ +

+ shuffle_.shuffle(list) +
+ Returns a shuffled copy of the list, using a version of the + Fisher-Yates shuffle. +

+
+_.shuffle([1, 2, 3, 4, 5, 6]);
+=> [4, 1, 6, 3, 5, 2]
+
+ +

+ toArray_.toArray(list) +
+ Converts the list (anything that can be iterated over), into a + real Array. Useful for transmuting the arguments object. +

+
+(function(){ return _.toArray(arguments).slice(1); })(1, 2, 3, 4);
+=> [2, 3, 4]
+
+ +

+ size_.size(list) +
+ Return the number of values in the list. +

+
+_.size({one : 1, two : 2, three : 3});
+=> 3
+
+ +

Array Functions

+ +

+ Note: All array functions will also work on the arguments object. +

+ +

+ first_.first(array, [n]) + Alias: head +
+ Returns the first element of an array. Passing n will + return the first n elements of the array. +

+
+_.first([5, 4, 3, 2, 1]);
+=> 5
+
+ +

+ initial_.initial(array, [n]) +
+ Returns everything but the last entry of the array. Especially useful on + the arguments object. Pass n to exclude the last n elements + from the result. +

+
+_.initial([5, 4, 3, 2, 1]);
+=> [5, 4, 3, 2]
+
+ +

+ last_.last(array, [n]) +
+ Returns the last element of an array. Passing n will return + the last n elements of the array. +

+
+_.last([5, 4, 3, 2, 1]);
+=> 1
+
+ +

+ rest_.rest(array, [index]) + Alias: tail +
+ Returns the rest of the elements in an array. Pass an index + to return the values of the array from that index onward. +

+
+_.rest([5, 4, 3, 2, 1]);
+=> [4, 3, 2, 1]
+
+ +

+ compact_.compact(array) +
+ Returns a copy of the array with all falsy values removed. + In JavaScript, false, null, 0, "", + undefined and NaN are all falsy. +

+
+_.compact([0, 1, false, 2, '', 3]);
+=> [1, 2, 3]
+
+ +

+ flatten_.flatten(array, [shallow]) +
+ Flattens a nested array (the nesting can be to any depth). If you + pass shallow, the array will only be flattened a single level. +

+
+_.flatten([1, [2], [3, [[4]]]]);
+=> [1, 2, 3, 4];
+
+_.flatten([1, [2], [3, [[4]]]], true);
+=> [1, 2, 3, [[4]]];
+
+ +

+ without_.without(array, [*values]) +
+ Returns a copy of the array with all instances of the values + removed. === is used for the equality test. +

+
+_.without([1, 2, 1, 0, 3, 1, 4], 0, 1);
+=> [2, 3, 4]
+
+ +

+ union_.union(*arrays) +
+ Computes the union of the passed-in arrays: the list of unique items, + in order, that are present in one or more of the arrays. +

+
+_.union([1, 2, 3], [101, 2, 1, 10], [2, 1]);
+=> [1, 2, 3, 101, 10]
+
+ +

+ intersection_.intersection(*arrays) +
+ Computes the list of values that are the intersection of all the arrays. + Each value in the result is present in each of the arrays. +

+
+_.intersection([1, 2, 3], [101, 2, 1, 10], [2, 1]);
+=> [1, 2]
+
+ +

+ difference_.difference(array, *others) +
+ Similar to without, but returns the values from array that + are not present in the other arrays. +

+
+_.difference([1, 2, 3, 4, 5], [5, 2, 10]);
+=> [1, 3, 4]
+
+ +

+ uniq_.uniq(array, [isSorted], [iterator]) + Alias: unique +
+ Produces a duplicate-free version of the array, using === to test + object equality. If you know in advance that the array is sorted, + passing true for isSorted will run a much faster algorithm. + If you want to compute unique items based on a transformation, pass an + iterator function. +

+
+_.uniq([1, 2, 1, 3, 1, 4]);
+=> [1, 2, 3, 4]
+
+ +

+ zip_.zip(*arrays) +
+ Merges together the values of each of the arrays with the + values at the corresponding position. Useful when you have separate + data sources that are coordinated through matching array indexes. + If you're working with a matrix of nested arrays, zip.apply + can transpose the matrix in a similar fashion. +

+
+_.zip(['moe', 'larry', 'curly'], [30, 40, 50], [true, false, false]);
+=> [["moe", 30, true], ["larry", 40, false], ["curly", 50, false]]
+
+ +

+ indexOf_.indexOf(array, value, [isSorted]) +
+ Returns the index at which value can be found in the array, + or -1 if value is not present in the array. Uses the native + indexOf function unless it's missing. If you're working with a + large array, and you know that the array is already sorted, pass true + for isSorted to use a faster binary search. +

+
+_.indexOf([1, 2, 3], 2);
+=> 1
+
+ +

+ lastIndexOf_.lastIndexOf(array, value) +
+ Returns the index of the last occurrence of value in the array, + or -1 if value is not present. Uses the native lastIndexOf + function if possible. +

+
+_.lastIndexOf([1, 2, 3, 1, 2, 3], 2);
+=> 4
+
+ +

+ range_.range([start], stop, [step]) +
+ A function to create flexibly-numbered lists of integers, handy for + each and map loops. start, if omitted, defaults + to 0; step defaults to 1. Returns a list of integers + from start to stop, incremented (or decremented) by step, + exclusive. +

+
+_.range(10);
+=> [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
+_.range(1, 11);
+=> [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+_.range(0, 30, 5);
+=> [0, 5, 10, 15, 20, 25]
+_.range(0, -10, -1);
+=> [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]
+_.range(0);
+=> []
+
+ +

Function (uh, ahem) Functions

+ +

+ bind_.bind(function, object, [*arguments]) +
+ Bind a function to an object, meaning that whenever + the function is called, the value of this will be the object. + Optionally, bind arguments to the function to pre-fill them, + also known as partial application. +

+
+var func = function(greeting){ return greeting + ': ' + this.name };
+func = _.bind(func, {name : 'moe'}, 'hi');
+func();
+=> 'hi: moe'
+
+ +

+ bindAll_.bindAll(object, [*methodNames]) +
+ Binds a number of methods on the object, specified by + methodNames, to be run in the context of that object whenever they + are invoked. Very handy for binding functions that are going to be used + as event handlers, which would otherwise be invoked with a fairly useless + this. If no methodNames are provided, all of the object's + function properties will be bound to it. +

+
+var buttonView = {
+  label   : 'underscore',
+  onClick : function(){ alert('clicked: ' + this.label); },
+  onHover : function(){ console.log('hovering: ' + this.label); }
+};
+_.bindAll(buttonView);
+jQuery('#underscore_button').bind('click', buttonView.onClick);
+=> When the button is clicked, this.label will have the correct value...
+
+ +

+ memoize_.memoize(function, [hashFunction]) +
+ Memoizes a given function by caching the computed result. Useful + for speeding up slow-running computations. If passed an optional + hashFunction, it will be used to compute the hash key for storing + the result, based on the arguments to the original function. The default + hashFunction just uses the first argument to the memoized function + as the key. +

+
+var fibonacci = _.memoize(function(n) {
+  return n < 2 ? n : fibonacci(n - 1) + fibonacci(n - 2);
+});
+
+ +

+ delay_.delay(function, wait, [*arguments]) +
+ Much like setTimeout, invokes function after wait + milliseconds. If you pass the optional arguments, they will be + forwarded on to the function when it is invoked. +

+
+var log = _.bind(console.log, console);
+_.delay(log, 1000, 'logged later');
+=> 'logged later' // Appears after one second.
+
+ +

+ defer_.defer(function) +
+ Defers invoking the function until the current call stack has cleared, + similar to using setTimeout with a delay of 0. Useful for performing + expensive computations or HTML rendering in chunks without blocking the UI thread + from updating. +

+
+_.defer(function(){ alert('deferred'); });
+// Returns from the function before the alert runs.
+
+ +

+ throttle_.throttle(function, wait) +
+ Creates and returns a new, throttled version of the passed function, + that, when invoked repeatedly, will only actually call the original function + at most once per every wait + milliseconds. Useful for rate-limiting events that occur faster than you + can keep up with. +

+
+var throttled = _.throttle(updatePosition, 100);
+$(window).scroll(throttled);
+
+ +

+ debounce_.debounce(function, wait, [immediate]) +
+ Creates and returns a new debounced version of the passed function that + will postpone its execution until after + wait milliseconds have elapsed since the last time it + was invoked. Useful for implementing behavior that should only happen + after the input has stopped arriving. For example: rendering a + preview of a Markdown comment, recalculating a layout after the window + has stopped being resized, and so on. +

+ +

+ Pass true for the immediate parameter to cause + debounce to trigger the function on the leading intead of the + trailing edge of the wait interval. Useful in circumstances like + preventing accidental double-clicks on a "submit" button from firing a + second time. +

+ +
+var lazyLayout = _.debounce(calculateLayout, 300);
+$(window).resize(lazyLayout);
+
+ +

+ once_.once(function) +
+ Creates a version of the function that can only be called one time. + Repeated calls to the modified function will have no effect, returning + the value from the original call. Useful for initialization functions, + instead of having to set a boolean flag and then check it later. +

+
+var initialize = _.once(createApplication);
+initialize();
+initialize();
+// Application is only created once.
+
+ +

+ after_.after(count, function) +
+ Creates a version of the function that will only be run after first + being called count times. Useful for grouping asynchronous responses, + where you want to be sure that all the async calls have finished, before + proceeding. +

+
+var renderNotes = _.after(notes.length, render);
+_.each(notes, function(note) {
+  note.asyncSave({success: renderNotes});
+});
+// renderNotes is run once, after all notes have saved.
+
+ +

+ wrap_.wrap(function, wrapper) +
+ Wraps the first function inside of the wrapper function, + passing it as the first argument. This allows the wrapper to + execute code before and after the function runs, adjust the arguments, + and execute it conditionally. +

+
+var hello = function(name) { return "hello: " + name; };
+hello = _.wrap(hello, function(func) {
+  return "before, " + func("moe") + ", after";
+});
+hello();
+=> 'before, hello: moe, after'
+
+ +

+ compose_.compose(*functions) +
+ Returns the composition of a list of functions, where each function + consumes the return value of the function that follows. In math terms, + composing the functions f(), g(), and h() produces + f(g(h())). +

+
+var greet    = function(name){ return "hi: " + name; };
+var exclaim  = function(statement){ return statement + "!"; };
+var welcome = _.compose(exclaim, greet);
+welcome('moe');
+=> 'hi: moe!'
+
+ +

Object Functions

+ +

+ keys_.keys(object) +
+ Retrieve all the names of the object's properties. +

+
+_.keys({one : 1, two : 2, three : 3});
+=> ["one", "two", "three"]
+
+ +

+ values_.values(object) +
+ Return all of the values of the object's properties. +

+
+_.values({one : 1, two : 2, three : 3});
+=> [1, 2, 3]
+
+ +

+ functions_.functions(object) + Alias: methods +
+ Returns a sorted list of the names of every method in an object — + that is to say, the name of every function property of the object. +

+
+_.functions(_);
+=> ["all", "any", "bind", "bindAll", "clone", "compact", "compose" ...
+
+ +

+ extend_.extend(destination, *sources) +
+ Copy all of the properties in the source objects over to the + destination object, and return the destination object. + It's in-order, so the last source will override properties of the same + name in previous arguments. +

+
+_.extend({name : 'moe'}, {age : 50});
+=> {name : 'moe', age : 50}
+
+ +

+ pick_.pick(object, *keys) +
+ Return a copy of the object, filtered to only have values for + the whitelisted keys (or array of valid keys). +

+
+_.pick({name : 'moe', age: 50, userid : 'moe1'}, 'name', 'age');
+=> {name : 'moe', age : 50}
+
+ +

+ defaults_.defaults(object, *defaults) +
+ Fill in missing properties in object with default values from the + defaults objects, and return the object. As soon as the + property is filled, further defaults will have no effect. +

+
+var iceCream = {flavor : "chocolate"};
+_.defaults(iceCream, {flavor : "vanilla", sprinkles : "lots"});
+=> {flavor : "chocolate", sprinkles : "lots"}
+
+ +

+ clone_.clone(object) +
+ Create a shallow-copied clone of the object. Any nested objects + or arrays will be copied by reference, not duplicated. +

+
+_.clone({name : 'moe'});
+=> {name : 'moe'};
+
+ +

+ tap_.tap(object, interceptor) +
+ Invokes interceptor with the object, and then returns object. + The primary purpose of this method is to "tap into" a method chain, in order to perform operations on intermediate results within the chain. +

+
+_.chain([1,2,3,200])
+  .filter(function(num) { return num % 2 == 0; })
+  .tap(alert)
+  .map(function(num) { return num * num })
+  .value();
+=> // [2, 200] (alerted)
+=> [4, 40000]
+
+ +

+ has_.has(object, key) +
+ Does the object contain the given key? Identical to + object.hasOwnProperty(key), but uses a safe reference to the + hasOwnProperty function, in case it's been + overridden accidentally. +

+
+_.has({a: 1, b: 2, c: 3}, "b");
+=> true
+
+ +

+ isEqual_.isEqual(object, other) +
+ Performs an optimized deep comparison between the two objects, to determine + if they should be considered equal. +

+
+var moe   = {name : 'moe', luckyNumbers : [13, 27, 34]};
+var clone = {name : 'moe', luckyNumbers : [13, 27, 34]};
+moe == clone;
+=> false
+_.isEqual(moe, clone);
+=> true
+
+ +

+ isEmpty_.isEmpty(object) +
+ Returns true if object contains no values. +

+
+_.isEmpty([1, 2, 3]);
+=> false
+_.isEmpty({});
+=> true
+
+ +

+ isElement_.isElement(object) +
+ Returns true if object is a DOM element. +

+
+_.isElement(jQuery('body')[0]);
+=> true
+
+ +

+ isArray_.isArray(object) +
+ Returns true if object is an Array. +

+
+(function(){ return _.isArray(arguments); })();
+=> false
+_.isArray([1,2,3]);
+=> true
+
+ +

+ isObject_.isObject(value) +
+ Returns true if value is an Object. +

+
+_.isObject({});
+=> true
+_.isObject(1);
+=> false
+
+ +

+ isArguments_.isArguments(object) +
+ Returns true if object is an Arguments object. +

+
+(function(){ return _.isArguments(arguments); })(1, 2, 3);
+=> true
+_.isArguments([1,2,3]);
+=> false
+
+ +

+ isFunction_.isFunction(object) +
+ Returns true if object is a Function. +

+
+_.isFunction(alert);
+=> true
+
+ +

+ isString_.isString(object) +
+ Returns true if object is a String. +

+
+_.isString("moe");
+=> true
+
+ +

+ isNumber_.isNumber(object) +
+ Returns true if object is a Number (including NaN). +

+
+_.isNumber(8.4 * 5);
+=> true
+
+ +

+ isFinite_.isFinite(object) +
+ Returns true if object is a finite Number. +

+
+_.isFinite(-101);
+=> true
+
+_.isFinite(-Infinity);
+=> false
+
+ +

+ isBoolean_.isBoolean(object) +
+ Returns true if object is either true or false. +

+
+_.isBoolean(null);
+=> false
+
+ +

+ isDate_.isDate(object) +
+ Returns true if object is a Date. +

+
+_.isDate(new Date());
+=> true
+
+ +

+ isRegExp_.isRegExp(object) +
+ Returns true if object is a RegExp. +

+
+_.isRegExp(/moe/);
+=> true
+
+ +

+ isNaN_.isNaN(object) +
+ Returns true if object is NaN.
Note: this is not + the same as the native isNaN function, which will also return + true if the variable is undefined. +

+
+_.isNaN(NaN);
+=> true
+isNaN(undefined);
+=> true
+_.isNaN(undefined);
+=> false
+
+ +

+ isNull_.isNull(object) +
+ Returns true if the value of object is null. +

+
+_.isNull(null);
+=> true
+_.isNull(undefined);
+=> false
+
+ +

+ isUndefined_.isUndefined(variable) +
+ Returns true if variable is undefined. +

+
+_.isUndefined(window.missingVariable);
+=> true
+
+ +

Utility Functions

+ +

+ noConflict_.noConflict() +
+ Give control of the "_" variable back to its previous owner. Returns + a reference to the Underscore object. +

+
+var underscore = _.noConflict();
+ +

+ identity_.identity(value) +
+ Returns the same value that is used as the argument. In math: + f(x) = x
+ This function looks useless, but is used throughout Underscore as + a default iterator. +

+
+var moe = {name : 'moe'};
+moe === _.identity(moe);
+=> true
+ +

+ times_.times(n, iterator) +
+ Invokes the given iterator function n times. +

+
+_(3).times(function(){ genie.grantWish(); });
+ +

+ mixin_.mixin(object) +
+ Allows you to extend Underscore with your own utility functions. Pass + a hash of {name: function} definitions to have your functions + added to the Underscore object, as well as the OOP wrapper. +

+
+_.mixin({
+  capitalize : function(string) {
+    return string.charAt(0).toUpperCase() + string.substring(1).toLowerCase();
+  }
+});
+_("fabio").capitalize();
+=> "Fabio"
+
+ +

+ uniqueId_.uniqueId([prefix]) +
+ Generate a globally-unique id for client-side models or DOM elements + that need one. If prefix is passed, the id will be appended to it. + Without prefix, returns an integer. +

+
+_.uniqueId('contact_');
+=> 'contact_104'
+ +

+ escape_.escape(string) +
+ Escapes a string for insertion into HTML, replacing + &, <, >, ", ', and / characters. +

+
+_.escape('Curly, Larry & Moe');
+=> "Curly, Larry &amp; Moe"
+ +

+ result_.result(object, property) +
+ If the value of the named property is a function then invoke it; otherwise, return it. +

+
+var object = {cheese: 'crumpets', stuff: function(){ return 'nonsense'; }};
+_.result(object, 'cheese');
+=> "crumpets"
+_.result(object, 'stuff');
+=> "nonsense"
+ +

+ template_.template(templateString, [data], [settings]) +
+ Compiles JavaScript templates into functions that can be evaluated + for rendering. Useful for rendering complicated bits of HTML from JSON + data sources. Template functions can both interpolate variables, using + <%= … %>, as well as execute arbitrary JavaScript code, with + <% … %>. If you wish to interpolate a value, and have + it be HTML-escaped, use <%- … %> When you evaluate a template function, pass in a + data object that has properties corresponding to the template's free + variables. If you're writing a one-off, you can pass the data + object as the second parameter to template in order to render + immediately instead of returning a template function. The settings argument + should be a hash containing any _.templateSettings that should be overriden. +

+ +
+var compiled = _.template("hello: <%= name %>");
+compiled({name : 'moe'});
+=> "hello: moe"
+
+var list = "<% _.each(people, function(name) { %> <li><%= name %></li> <% }); %>";
+_.template(list, {people : ['moe', 'curly', 'larry']});
+=> "<li>moe</li><li>curly</li><li>larry</li>"
+
+var template = _.template("<b><%- value %></b>");
+template({value : '<script>'});
+=> "<b>&lt;script&gt;</b>"
+ +

+ You can also use print from within JavaScript code. This is + sometimes more convenient than using <%= ... %>. +

+ +
+var compiled = _.template("<% print('Hello ' + epithet); %>");
+compiled({epithet: "stooge"});
+=> "Hello stooge."
+ +

+ If ERB-style delimiters aren't your cup of tea, you can change Underscore's + template settings to use different symbols to set off interpolated code. + Define an interpolate regex to match expressions that should be + interpolated verbatim, an escape regex to match expressions that should + be inserted after being HTML escaped, and an evaluate regex to match + expressions that should be evaluated without insertion into the resulting + string. You may define or omit any combination of the three. + For example, to perform + Mustache.js + style templating: +

+ +
+_.templateSettings = {
+  interpolate : /\{\{(.+?)\}\}/g
+};
+
+var template = _.template("Hello {{ name }}!");
+template({name : "Mustache"});
+=> "Hello Mustache!"
+ +

+ By default, template places the values from your data in the local scope + via the with statement. However, you can specify a single variable name + with the variable setting. This can significantly improve the speed + at which a template is able to render. +

+ +
+_.template("<%= data.hasWith %>", {hasWith: 'no'}, {variable: 'data'});
+=> "no"
+ +

+ Precompiling your templates can be a big help when debugging errors you can't + reproduce. This is because precompiled templates can provide line numbers and + a stack trace, something that is not possible when compiling templates on the client. + The source property is available on the compiled template + function for easy precompilation. +

+ +
<script>
+  JST.project = <%= _.template(jstText).source %>;
+</script>
+ + +

Chaining

+ +

+ You can use Underscore in either an object-oriented or a functional style, + depending on your preference. The following two lines of code are + identical ways to double a list of numbers. +

+ +
+_.map([1, 2, 3], function(n){ return n * 2; });
+_([1, 2, 3]).map(function(n){ return n * 2; });
+ +

+ Calling chain on a wrapped object will cause all future method calls to + return wrapped objects as well. When you've finished the computation, + use value to retrieve the final value. Here's an example of chaining + together a map/flatten/reduce, in order to get the word count of + every word in a song. +

+ +
+var lyrics = [
+  {line : 1, words : "I'm a lumberjack and I'm okay"},
+  {line : 2, words : "I sleep all night and I work all day"},
+  {line : 3, words : "He's a lumberjack and he's okay"},
+  {line : 4, words : "He sleeps all night and he works all day"}
+];
+
+_.chain(lyrics)
+  .map(function(line) { return line.words.split(' '); })
+  .flatten()
+  .reduce(function(counts, word) {
+    counts[word] = (counts[word] || 0) + 1;
+    return counts;
+}, {}).value();
+
+=> {lumberjack : 2, all : 4, night : 2 ... }
+ +

+ In addition, the + Array prototype's methods + are proxied through the chained Underscore object, so you can slip a + reverse or a push into your chain, and continue to + modify the array. +

+ +

+ chain_.chain(obj) +
+ Returns a wrapped object. Calling methods on this object will continue + to return wrapped objects until value is used. +

+
+var stooges = [{name : 'curly', age : 25}, {name : 'moe', age : 21}, {name : 'larry', age : 23}];
+var youngest = _.chain(stooges)
+  .sortBy(function(stooge){ return stooge.age; })
+  .map(function(stooge){ return stooge.name + ' is ' + stooge.age; })
+  .first()
+  .value();
+=> "moe is 21"
+
+ +

+ value_(obj).value() +
+ Extracts the value of a wrapped object. +

+
+_([1, 2, 3]).value();
+=> [1, 2, 3]
+
+ + + +

+ Underscore.lua, + a Lua port of the functions that are applicable in both languages. + Includes OOP-wrapping and chaining. + The source is + available on GitHub. +

+ +

+ Underscore.php, + a PHP port of the functions that are applicable in both languages. + Includes OOP-wrapping and chaining. + The source is + available on GitHub. +

+ +

+ Underscore-perl, + a Perl port of many of the Underscore.js functions, + aimed at on Perl hashes and arrays, also + available on GitHub. +

+ +

+ Underscore.string, + an Underscore extension that adds functions for string-manipulation: + trim, startsWith, contains, capitalize, + reverse, sprintf, and more. +

+ +

+ Ruby's Enumerable module. +

+ +

+ Prototype.js, which provides + JavaScript with collection functions in the manner closest to Ruby's Enumerable. +

+ +

+ Oliver Steele's + Functional JavaScript, + which includes comprehensive higher-order function support as well as string lambdas. +

+ +

+ Michael Aufreiter's Data.js, + a data manipulation + persistence library for JavaScript. +

+ +

+ Python's itertools. +

+ +

Change Log

+ +

+ 1.3.3April 10, 2012
+

    +
  • + Many improvements to _.template, which now provides the + source of the template function as a property, for potentially + even more efficient pre-compilation on the server-side. You may now + also set the variable option when creating a template, + which will cause your passed-in data to be made available under the + variable you named, instead of using a with statement — + significantly improving the speed of rendering the template. +
  • +
  • + Added the pick function, which allows you to filter an + object literal with a whitelist of allowed property names. +
  • +
  • + Added the result function, for convenience when working + with APIs that allow either functions or raw properties. +
  • +
  • + Added the isFinite function, because sometimes knowing that + a value is a number just ain't quite enough. +
  • +
  • + The sortBy function may now also be passed the string name + of a property to use as the sort order on each object. +
  • +
  • + Fixed uniq to work with sparse arrays. +
  • +
  • + The difference function now performs a shallow flatten + instead of a deep one when computing array differences. +
  • +
  • + The debounce function now takes an immediate + parameter, which will cause the callback to fire on the leading + instead of the trailing edge. +
  • +
+

+ +

+ 1.3.1Jan. 23, 2012
+

    +
  • + Added an _.has function, as a safer way to use hasOwnProperty. +
  • +
  • + Added _.collect as an alias for _.map. Smalltalkers, rejoice. +
  • +
  • + Reverted an old change so that _.extend will correctly copy + over keys with undefined values again. +
  • +
  • + Bugfix to stop escaping slashes within interpolations in _.template. +
  • +
+

+ +

+ 1.3.0Jan. 11, 2012
+

    +
  • + Removed AMD (RequireJS) support from Underscore. If you'd like to use + Underscore with RequireJS, you can load it as a normal script, wrap + or patch your copy, or download a forked version. +
  • +
+

+ +

+ 1.2.4Jan. 4, 2012
+

    +
  • + You now can (and probably should, as it's simpler) + write _.chain(list) + instead of _(list).chain(). +
  • +
  • + Fix for escaped characters in Underscore templates, and for supporting + customizations of _.templateSettings that only define one or + two of the required regexes. +
  • +
  • + Fix for passing an array as the first argument to an _.wrap'd function. +
  • +
  • + Improved compatibility with ClojureScript, which adds a call + function to String.prototype. +
  • +
+

+ +

+ 1.2.3Dec. 7, 2011
+

    +
  • + Dynamic scope is now preserved for compiled _.template functions, + so you can use the value of this if you like. +
  • +
  • + Sparse array support of _.indexOf, _.lastIndexOf. +
  • +
  • + Both _.reduce and _.reduceRight can now be passed an + explicitly undefined value. (There's no reason why you'd + want to do this.) +
  • +
+

+ +

+ 1.2.2Nov. 14, 2011
+

    +
  • + Continued tweaks to _.isEqual semantics. Now JS primitives are + considered equivalent to their wrapped versions, and arrays are compared + by their numeric properties only (#351). +
  • +
  • + _.escape no longer tries to be smart about not double-escaping + already-escaped HTML entities. Now it just escapes regardless (#350). +
  • +
  • + In _.template, you may now leave semicolons out of evaluated + statements if you wish: <% }) %> (#369). +
  • +
  • + _.after(callback, 0) will now trigger the callback immediately, + making "after" easier to use with asynchronous APIs (#366). +
  • +
+

+ +

+ 1.2.1Oct. 24, 2011
+

    +
  • + Several important bug fixes for _.isEqual, which should now + do better on mutated Arrays, and on non-Array objects with + length properties. (#329) +
  • +
  • + jrburke contributed Underscore exporting for AMD module loaders, + and tonylukasavage for Appcelerator Titanium. + (#335, #338) +
  • +
  • + You can now _.groupBy(list, 'property') as a shortcut for + grouping values by a particular common property. +
  • +
  • + _.throttle'd functions now fire immediately upon invocation, + and are rate-limited thereafter (#170, #266). +
  • +
  • + Most of the _.is[Type] checks no longer ducktype. +
  • +
  • + The _.bind function now also works on constructors, a-la + ES5 ... but you would never want to use _.bind on a + constructor function. +
  • +
  • + _.clone no longer wraps non-object types in Objects. +
  • +
  • + _.find and _.filter are now the preferred names for + _.detect and _.select. +
  • +
+

+ +

+ 1.2.0Oct. 5, 2011
+

    +
  • + The _.isEqual function now + supports true deep equality comparisons, with checks for cyclic structures, + thanks to Kit Cambridge. +
  • +
  • + Underscore templates now support HTML escaping interpolations, using + <%- ... %> syntax. +
  • +
  • + Ryan Tenney contributed _.shuffle, which uses a modified + Fisher-Yates to give you a shuffled copy of an array. +
  • +
  • + _.uniq can now be passed an optional iterator, to determine by + what criteria an object should be considered unique. +
  • +
  • + _.last now takes an optional argument which will return the last + N elements of the list. +
  • +
  • + A new _.initial function was added, as a mirror of _.rest, + which returns all the initial values of a list (except the last N). +
  • +
+

+ +

+ 1.1.7July 13, 2011
+ Added _.groupBy, which aggregates a collection into groups of like items. + Added _.union and _.difference, to complement the + (re-named) _.intersection. + Various improvements for support of sparse arrays. + _.toArray now returns a clone, if directly passed an array. + _.functions now also returns the names of functions that are present + in the prototype chain. +

+ +

+ 1.1.6April 18, 2011
+ Added _.after, which will return a function that only runs after + first being called a specified number of times. + _.invoke can now take a direct function reference. + _.every now requires an iterator function to be passed, which + mirrors the ECMA5 API. + _.extend no longer copies keys when the value is undefined. + _.bind now errors when trying to bind an undefined value. +

+ +

+ 1.1.5Mar 20, 2011
+ Added an _.defaults function, for use merging together JS objects + representing default options. + Added an _.once function, for manufacturing functions that should + only ever execute a single time. + _.bind now delegates to the native ECMAScript 5 version, + where available. + _.keys now throws an error when used on non-Object values, as in + ECMAScript 5. + Fixed a bug with _.keys when used over sparse arrays. +

+ +

+ 1.1.4Jan 9, 2011
+ Improved compliance with ES5's Array methods when passing null + as a value. _.wrap now correctly sets this for the + wrapped function. _.indexOf now takes an optional flag for + finding the insertion index in an array that is guaranteed to already + be sorted. Avoiding the use of .callee, to allow _.isArray + to work properly in ES5's strict mode. +

+ +

+ 1.1.3Dec 1, 2010
+ In CommonJS, Underscore may now be required with just:
+ var _ = require("underscore"). + Added _.throttle and _.debounce functions. + Removed _.breakLoop, in favor of an ECMA5-style un-break-able + each implementation — this removes the try/catch, and you'll now have + better stack traces for exceptions that are thrown within an Underscore iterator. + Improved the isType family of functions for better interoperability + with Internet Explorer host objects. + _.template now correctly escapes backslashes in templates. + Improved _.reduce compatibility with the ECMA5 version: + if you don't pass an initial value, the first item in the collection is used. + _.each no longer returns the iterated collection, for improved + consistency with ES5's forEach. +

+ +

+ 1.1.2
+ Fixed _.contains, which was mistakenly pointing at + _.intersect instead of _.include, like it should + have been. Added _.unique as an alias for _.uniq. +

+ +

+ 1.1.1
+ Improved the speed of _.template, and its handling of multiline + interpolations. Ryan Tenney contributed optimizations to many Underscore + functions. An annotated version of the source code is now available. +

+ +

+ 1.1.0
+ The method signature of _.reduce has been changed to match + the ECMAScript 5 signature, instead of the Ruby/Prototype.js version. + This is a backwards-incompatible change. _.template may now be + called with no arguments, and preserves whitespace. _.contains + is a new alias for _.include. +

+ +

+ 1.0.4
+ Andri Möll contributed the _.memoize + function, which can be used to speed up expensive repeated computations + by caching the results. +

+ +

+ 1.0.3
+ Patch that makes _.isEqual return false if any property + of the compared object has a NaN value. Technically the correct + thing to do, but of questionable semantics. Watch out for NaN comparisons. +

+ +

+ 1.0.2
+ Fixes _.isArguments in recent versions of Opera, which have + arguments objects as real Arrays. +

+ +

+ 1.0.1
+ Bugfix for _.isEqual, when comparing two objects with the same + number of undefined keys, but with different names. +

+ +

+ 1.0.0
+ Things have been stable for many months now, so Underscore is now + considered to be out of beta, at 1.0. Improvements since 0.6 + include _.isBoolean, and the ability to have _.extend + take multiple source objects. +

+ +

+ 0.6.0
+ Major release. Incorporates a number of + Mile Frawley's refactors for + safer duck-typing on collection functions, and cleaner internals. A new + _.mixin method that allows you to extend Underscore with utility + functions of your own. Added _.times, which works the same as in + Ruby or Prototype.js. Native support for ECMAScript 5's Array.isArray, + and Object.keys. +

+ +

+ 0.5.8
+ Fixed Underscore's collection functions to work on + NodeLists and + HTMLCollections + once more, thanks to + Justin Tulloss. +

+ +

+ 0.5.7
+ A safer implementation of _.isArguments, and a + faster _.isNumber,
thanks to + Jed Schmidt. +

+ +

+ 0.5.6
+ Customizable delimiters for _.template, contributed by + Noah Sloan. +

+ +

+ 0.5.5
+ Fix for a bug in MobileSafari's OOP-wrapper, with the arguments object. +

+ +

+ 0.5.4
+ Fix for multiple single quotes within a template string for + _.template. See: + Rick Strahl's blog post. +

+ +

+ 0.5.2
+ New implementations of isArray, isDate, isFunction, + isNumber, isRegExp, and isString, thanks to + a suggestion from + Robert Kieffer. + Instead of doing Object#toString + comparisons, they now check for expected properties, which is less safe, + but more than an order of magnitude faster. Most other Underscore + functions saw minor speed improvements as a result. + Evgeniy Dolzhenko + contributed _.tap, + similar to Ruby 1.9's, + which is handy for injecting side effects (like logging) into chained calls. +

+ +

+ 0.5.1
+ Added an _.isArguments function. Lots of little safety checks + and optimizations contributed by + Noah Sloan and + Andri Möll. +

+ +

+ 0.5.0
+ [API Changes] _.bindAll now takes the context object as + its first parameter. If no method names are passed, all of the context + object's methods are bound to it, enabling chaining and easier binding. + _.functions now takes a single argument and returns the names + of its Function properties. Calling _.functions(_) will get you + the previous behavior. + Added _.isRegExp so that isEqual can now test for RegExp equality. + All of the "is" functions have been shrunk down into a single definition. + Karl Guertin contributed patches. +

+ +

+ 0.4.7
+ Added isDate, isNaN, and isNull, for completeness. + Optimizations for isEqual when checking equality between Arrays + or Dates. _.keys is now 25%–2X faster (depending on your + browser) which speeds up the functions that rely on it, such as _.each. +

+ +

+ 0.4.6
+ Added the range function, a port of the + Python + function of the same name, for generating flexibly-numbered lists + of integers. Original patch contributed by + Kirill Ishanov. +

+ +

+ 0.4.5
+ Added rest for Arrays and arguments objects, and aliased + first as head, and rest as tail, + thanks to Luke Sutton's patches. + Added tests ensuring that all Underscore Array functions also work on + arguments objects. +

+ +

+ 0.4.4
+ Added isString, and isNumber, for consistency. Fixed + _.isEqual(NaN, NaN) to return true (which is debatable). +

+ +

+ 0.4.3
+ Started using the native StopIteration object in browsers that support it. + Fixed Underscore setup for CommonJS environments. +

+ +

+ 0.4.2
+ Renamed the unwrapping function to value, for clarity. +

+ +

+ 0.4.1
+ Chained Underscore objects now support the Array prototype methods, so + that you can perform the full range of operations on a wrapped array + without having to break your chain. Added a breakLoop method + to break in the middle of any Underscore iteration. Added an + isEmpty function that works on arrays and objects. +

+ +

+ 0.4.0
+ All Underscore functions can now be called in an object-oriented style, + like so: _([1, 2, 3]).map(...);. Original patch provided by + Marc-André Cournoyer. + Wrapped objects can be chained through multiple + method invocations. A functions method + was added, providing a sorted list of all the functions in Underscore. +

+ +

+ 0.3.3
+ Added the JavaScript 1.8 function reduceRight. Aliased it + as foldr, and aliased reduce as foldl. +

+ +

+ 0.3.2
+ Now runs on stock Rhino + interpreters with: load("underscore.js"). + Added identity as a utility function. +

+ +

+ 0.3.1
+ All iterators are now passed in the original collection as their third + argument, the same as JavaScript 1.6's forEach. Iterating over + objects is now called with (value, key, collection), for details + see _.each. +

+ +

+ 0.3.0
+ Added Dmitry Baranovskiy's + comprehensive optimizations, merged in + Kris Kowal's patches to make Underscore + CommonJS and + Narwhal compliant. +

+ +

+ 0.2.0
+ Added compose and lastIndexOf, renamed inject to + reduce, added aliases for inject, filter, + every, some, and forEach. +

+ +

+ 0.1.1
+ Added noConflict, so that the "Underscore" object can be assigned to + other variables. +

+ +

+ 0.1.0
+ Initial release of Underscore.js. +

+ +

+ + A DocumentCloud Project + +

+ +
+ +
+ + + + + + diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/index.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/index.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/index.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/index.js 2011-12-06 22:32:37.000000000 +0000 @@ -0,0 +1 @@ +module.exports = require('./underscore'); diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/package.json tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/package.json --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -0,0 +1,29 @@ +{ + "name": "underscore", + "description": "JavaScript's functional programming helper library.", + "homepage": "http://documentcloud.github.com/underscore/", + "keywords": [ + "util", + "functional", + "server", + "client", + "browser" + ], + "author": { + "name": "Jeremy Ashkenas", + "email": "jeremy@documentcloud.org" + }, + "repository": { + "type": "git", + "url": "git://github.com/documentcloud/underscore.git" + }, + "main": "underscore.js", + "version": "1.3.3", + "readme": " __ \n /\\ \\ __ \n __ __ ___ \\_\\ \\ __ _ __ ____ ___ ___ _ __ __ /\\_\\ ____ \n /\\ \\/\\ \\ /' _ `\\ /'_ \\ /'__`\\/\\ __\\/ ,__\\ / ___\\ / __`\\/\\ __\\/'__`\\ \\/\\ \\ /',__\\ \n \\ \\ \\_\\ \\/\\ \\/\\ \\/\\ \\ \\ \\/\\ __/\\ \\ \\//\\__, `\\/\\ \\__//\\ \\ \\ \\ \\ \\//\\ __/ __ \\ \\ \\/\\__, `\\\n \\ \\____/\\ \\_\\ \\_\\ \\___,_\\ \\____\\\\ \\_\\\\/\\____/\\ \\____\\ \\____/\\ \\_\\\\ \\____\\/\\_\\ _\\ \\ \\/\\____/\n \\/___/ \\/_/\\/_/\\/__,_ /\\/____/ \\/_/ \\/___/ \\/____/\\/___/ \\/_/ \\/____/\\/_//\\ \\_\\ \\/___/ \n \\ \\____/ \n \\/___/\n \nUnderscore.js is a utility-belt library for JavaScript that provides \nsupport for the usual functional suspects (each, map, reduce, filter...) \nwithout extending any core JavaScript objects.\n\nFor Docs, License, Tests, and pre-packed downloads, see:\nhttp://documentcloud.github.com/underscore/\n\nMany thanks to our contributors:\nhttps://github.com/documentcloud/underscore/contributors\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/documentcloud/underscore/issues" + }, + "_id": "underscore@1.3.3", + "_from": "underscore@~1.3.3" +} Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/raw/underscore.psd and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/raw/underscore.psd differ diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/underscore-min.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/underscore-min.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/underscore-min.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/underscore-min.js 2012-04-10 14:43:40.000000000 +0000 @@ -0,0 +1,32 @@ +// Underscore.js 1.3.3 +// (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc. +// Underscore is freely distributable under the MIT license. +// Portions of Underscore are inspired or borrowed from Prototype, +// Oliver Steele's Functional, and John Resig's Micro-Templating. +// For all details and documentation: +// http://documentcloud.github.com/underscore +(function(){function r(a,c,d){if(a===c)return 0!==a||1/a==1/c;if(null==a||null==c)return a===c;a._chain&&(a=a._wrapped);c._chain&&(c=c._wrapped);if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return!1;switch(e){case "[object String]":return a==""+c;case "[object Number]":return a!=+a?c!=+c:0==a?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source== +c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if("object"!=typeof a||"object"!=typeof c)return!1;for(var f=d.length;f--;)if(d[f]==a)return!0;d.push(a);var f=0,g=!0;if("[object Array]"==e){if(f=a.length,g=f==c.length)for(;f--&&(g=f in a==f in c&&r(a[f],c[f],d)););}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return!1;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&r(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c,h)&&!f--)break; +g=!f}}d.pop();return g}var s=this,I=s._,o={},k=Array.prototype,p=Object.prototype,i=k.slice,J=k.unshift,l=p.toString,K=p.hasOwnProperty,y=k.forEach,z=k.map,A=k.reduce,B=k.reduceRight,C=k.filter,D=k.every,E=k.some,q=k.indexOf,F=k.lastIndexOf,p=Array.isArray,L=Object.keys,t=Function.prototype.bind,b=function(a){return new m(a)};"undefined"!==typeof exports?("undefined"!==typeof module&&module.exports&&(exports=module.exports=b),exports._=b):s._=b;b.VERSION="1.3.3";var j=b.each=b.forEach=function(a, +c,d){if(a!=null)if(y&&a.forEach===y)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e2;a==null&&(a=[]);if(A&& +a.reduce===A){e&&(c=b.bind(c,e));return f?a.reduce(c,d):a.reduce(c)}j(a,function(a,b,i){if(f)d=c.call(e,d,a,b,i);else{d=a;f=true}});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(B&&a.reduceRight===B){e&&(c=b.bind(c,e));return f?a.reduceRight(c,d):a.reduceRight(c)}var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect=function(a, +c,b){var e;G(a,function(a,g,h){if(c.call(b,a,g,h)){e=a;return true}});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(C&&a.filter===C)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(D&&a.every===D)return a.every(c,b);j(a,function(a,g,h){if(!(e=e&&c.call(b, +a,g,h)))return o});return!!e};var G=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(E&&a.some===E)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return o});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;if(q&&a.indexOf===q)return a.indexOf(c)!=-1;return b=G(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck= +function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a)&&a[0]===+a[0])return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a)&&a[0]===+a[0])return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;bd?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]}; +j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a,c,d){d||(d=b.identity);for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1),true);return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a= +i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e=0;d--)b=[a[d].apply(this,b)];return b[0]}};b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=L||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&& +c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.pick=function(a){var c={};j(b.flatten(i.call(arguments,1)),function(b){b in a&&(c[b]=a[b])});return c};b.defaults=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return r(a,b,[])};b.isEmpty= +function(a){if(a==null)return true;if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=p||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)};b.isArguments=function(a){return l.call(a)=="[object Arguments]"};b.isArguments(arguments)||(b.isArguments=function(a){return!(!a||!b.has(a,"callee"))});b.isFunction=function(a){return l.call(a)=="[object Function]"}; +b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isFinite=function(a){return b.isNumber(a)&&isFinite(a)};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"};b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a, +b){return K.call(a,b)};b.noConflict=function(){s._=I;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e/g,">").replace(/"/g,""").replace(/'/g,"'").replace(/\//g,"/")};b.result=function(a,c){if(a==null)return null;var d=a[c];return b.isFunction(d)?d.call(a):d};b.mixin=function(a){j(b.functions(a),function(c){M(c,b[c]=a[c])})};var N=0;b.uniqueId= +function(a){var b=N++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var u=/.^/,n={"\\":"\\","'":"'",r:"\r",n:"\n",t:"\t",u2028:"\u2028",u2029:"\u2029"},v;for(v in n)n[n[v]]=v;var O=/\\|'|\r|\n|\t|\u2028|\u2029/g,P=/\\(\\|'|r|n|t|u2028|u2029)/g,w=function(a){return a.replace(P,function(a,b){return n[b]})};b.template=function(a,c,d){d=b.defaults(d||{},b.templateSettings);a="__p+='"+a.replace(O,function(a){return"\\"+n[a]}).replace(d.escape|| +u,function(a,b){return"'+\n_.escape("+w(b)+")+\n'"}).replace(d.interpolate||u,function(a,b){return"'+\n("+w(b)+")+\n'"}).replace(d.evaluate||u,function(a,b){return"';\n"+w(b)+"\n;__p+='"})+"';\n";d.variable||(a="with(obj||{}){\n"+a+"}\n");var a="var __p='';var print=function(){__p+=Array.prototype.join.call(arguments, '')};\n"+a+"return __p;\n",e=new Function(d.variable||"obj","_",a);if(c)return e(c,b);c=function(a){return e.call(this,a,b)};c.source="function("+(d.variable||"obj")+"){\n"+a+"}";return c}; +b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var x=function(a,c){return c?b(a).chain():a},M=function(a,c){m.prototype[a]=function(){var a=i.call(arguments);J.call(a,this._wrapped);return x(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return x(d, +this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return x(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain=true;return this};m.prototype.value=function(){return this._wrapped}}).call(this); diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/underscore.js tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/underscore.js --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/node_modules/underscore/underscore.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/node_modules/underscore/underscore.js 2012-04-10 14:43:40.000000000 +0000 @@ -0,0 +1,1059 @@ +// Underscore.js 1.3.3 +// (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc. +// Underscore is freely distributable under the MIT license. +// Portions of Underscore are inspired or borrowed from Prototype, +// Oliver Steele's Functional, and John Resig's Micro-Templating. +// For all details and documentation: +// http://documentcloud.github.com/underscore + +(function() { + + // Baseline setup + // -------------- + + // Establish the root object, `window` in the browser, or `global` on the server. + var root = this; + + // Save the previous value of the `_` variable. + var previousUnderscore = root._; + + // Establish the object that gets returned to break out of a loop iteration. + var breaker = {}; + + // Save bytes in the minified (but not gzipped) version: + var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; + + // Create quick reference variables for speed access to core prototypes. + var slice = ArrayProto.slice, + unshift = ArrayProto.unshift, + toString = ObjProto.toString, + hasOwnProperty = ObjProto.hasOwnProperty; + + // All **ECMAScript 5** native function implementations that we hope to use + // are declared here. + var + nativeForEach = ArrayProto.forEach, + nativeMap = ArrayProto.map, + nativeReduce = ArrayProto.reduce, + nativeReduceRight = ArrayProto.reduceRight, + nativeFilter = ArrayProto.filter, + nativeEvery = ArrayProto.every, + nativeSome = ArrayProto.some, + nativeIndexOf = ArrayProto.indexOf, + nativeLastIndexOf = ArrayProto.lastIndexOf, + nativeIsArray = Array.isArray, + nativeKeys = Object.keys, + nativeBind = FuncProto.bind; + + // Create a safe reference to the Underscore object for use below. + var _ = function(obj) { return new wrapper(obj); }; + + // Export the Underscore object for **Node.js**, with + // backwards-compatibility for the old `require()` API. If we're in + // the browser, add `_` as a global object via a string identifier, + // for Closure Compiler "advanced" mode. + if (typeof exports !== 'undefined') { + if (typeof module !== 'undefined' && module.exports) { + exports = module.exports = _; + } + exports._ = _; + } else { + root['_'] = _; + } + + // Current version. + _.VERSION = '1.3.3'; + + // Collection Functions + // -------------------- + + // The cornerstone, an `each` implementation, aka `forEach`. + // Handles objects with the built-in `forEach`, arrays, and raw objects. + // Delegates to **ECMAScript 5**'s native `forEach` if available. + var each = _.each = _.forEach = function(obj, iterator, context) { + if (obj == null) return; + if (nativeForEach && obj.forEach === nativeForEach) { + obj.forEach(iterator, context); + } else if (obj.length === +obj.length) { + for (var i = 0, l = obj.length; i < l; i++) { + if (i in obj && iterator.call(context, obj[i], i, obj) === breaker) return; + } + } else { + for (var key in obj) { + if (_.has(obj, key)) { + if (iterator.call(context, obj[key], key, obj) === breaker) return; + } + } + } + }; + + // Return the results of applying the iterator to each element. + // Delegates to **ECMAScript 5**'s native `map` if available. + _.map = _.collect = function(obj, iterator, context) { + var results = []; + if (obj == null) return results; + if (nativeMap && obj.map === nativeMap) return obj.map(iterator, context); + each(obj, function(value, index, list) { + results[results.length] = iterator.call(context, value, index, list); + }); + if (obj.length === +obj.length) results.length = obj.length; + return results; + }; + + // **Reduce** builds up a single result from a list of values, aka `inject`, + // or `foldl`. Delegates to **ECMAScript 5**'s native `reduce` if available. + _.reduce = _.foldl = _.inject = function(obj, iterator, memo, context) { + var initial = arguments.length > 2; + if (obj == null) obj = []; + if (nativeReduce && obj.reduce === nativeReduce) { + if (context) iterator = _.bind(iterator, context); + return initial ? obj.reduce(iterator, memo) : obj.reduce(iterator); + } + each(obj, function(value, index, list) { + if (!initial) { + memo = value; + initial = true; + } else { + memo = iterator.call(context, memo, value, index, list); + } + }); + if (!initial) throw new TypeError('Reduce of empty array with no initial value'); + return memo; + }; + + // The right-associative version of reduce, also known as `foldr`. + // Delegates to **ECMAScript 5**'s native `reduceRight` if available. + _.reduceRight = _.foldr = function(obj, iterator, memo, context) { + var initial = arguments.length > 2; + if (obj == null) obj = []; + if (nativeReduceRight && obj.reduceRight === nativeReduceRight) { + if (context) iterator = _.bind(iterator, context); + return initial ? obj.reduceRight(iterator, memo) : obj.reduceRight(iterator); + } + var reversed = _.toArray(obj).reverse(); + if (context && !initial) iterator = _.bind(iterator, context); + return initial ? _.reduce(reversed, iterator, memo, context) : _.reduce(reversed, iterator); + }; + + // Return the first value which passes a truth test. Aliased as `detect`. + _.find = _.detect = function(obj, iterator, context) { + var result; + any(obj, function(value, index, list) { + if (iterator.call(context, value, index, list)) { + result = value; + return true; + } + }); + return result; + }; + + // Return all the elements that pass a truth test. + // Delegates to **ECMAScript 5**'s native `filter` if available. + // Aliased as `select`. + _.filter = _.select = function(obj, iterator, context) { + var results = []; + if (obj == null) return results; + if (nativeFilter && obj.filter === nativeFilter) return obj.filter(iterator, context); + each(obj, function(value, index, list) { + if (iterator.call(context, value, index, list)) results[results.length] = value; + }); + return results; + }; + + // Return all the elements for which a truth test fails. + _.reject = function(obj, iterator, context) { + var results = []; + if (obj == null) return results; + each(obj, function(value, index, list) { + if (!iterator.call(context, value, index, list)) results[results.length] = value; + }); + return results; + }; + + // Determine whether all of the elements match a truth test. + // Delegates to **ECMAScript 5**'s native `every` if available. + // Aliased as `all`. + _.every = _.all = function(obj, iterator, context) { + var result = true; + if (obj == null) return result; + if (nativeEvery && obj.every === nativeEvery) return obj.every(iterator, context); + each(obj, function(value, index, list) { + if (!(result = result && iterator.call(context, value, index, list))) return breaker; + }); + return !!result; + }; + + // Determine if at least one element in the object matches a truth test. + // Delegates to **ECMAScript 5**'s native `some` if available. + // Aliased as `any`. + var any = _.some = _.any = function(obj, iterator, context) { + iterator || (iterator = _.identity); + var result = false; + if (obj == null) return result; + if (nativeSome && obj.some === nativeSome) return obj.some(iterator, context); + each(obj, function(value, index, list) { + if (result || (result = iterator.call(context, value, index, list))) return breaker; + }); + return !!result; + }; + + // Determine if a given value is included in the array or object using `===`. + // Aliased as `contains`. + _.include = _.contains = function(obj, target) { + var found = false; + if (obj == null) return found; + if (nativeIndexOf && obj.indexOf === nativeIndexOf) return obj.indexOf(target) != -1; + found = any(obj, function(value) { + return value === target; + }); + return found; + }; + + // Invoke a method (with arguments) on every item in a collection. + _.invoke = function(obj, method) { + var args = slice.call(arguments, 2); + return _.map(obj, function(value) { + return (_.isFunction(method) ? method || value : value[method]).apply(value, args); + }); + }; + + // Convenience version of a common use case of `map`: fetching a property. + _.pluck = function(obj, key) { + return _.map(obj, function(value){ return value[key]; }); + }; + + // Return the maximum element or (element-based computation). + _.max = function(obj, iterator, context) { + if (!iterator && _.isArray(obj) && obj[0] === +obj[0]) return Math.max.apply(Math, obj); + if (!iterator && _.isEmpty(obj)) return -Infinity; + var result = {computed : -Infinity}; + each(obj, function(value, index, list) { + var computed = iterator ? iterator.call(context, value, index, list) : value; + computed >= result.computed && (result = {value : value, computed : computed}); + }); + return result.value; + }; + + // Return the minimum element (or element-based computation). + _.min = function(obj, iterator, context) { + if (!iterator && _.isArray(obj) && obj[0] === +obj[0]) return Math.min.apply(Math, obj); + if (!iterator && _.isEmpty(obj)) return Infinity; + var result = {computed : Infinity}; + each(obj, function(value, index, list) { + var computed = iterator ? iterator.call(context, value, index, list) : value; + computed < result.computed && (result = {value : value, computed : computed}); + }); + return result.value; + }; + + // Shuffle an array. + _.shuffle = function(obj) { + var shuffled = [], rand; + each(obj, function(value, index, list) { + rand = Math.floor(Math.random() * (index + 1)); + shuffled[index] = shuffled[rand]; + shuffled[rand] = value; + }); + return shuffled; + }; + + // Sort the object's values by a criterion produced by an iterator. + _.sortBy = function(obj, val, context) { + var iterator = _.isFunction(val) ? val : function(obj) { return obj[val]; }; + return _.pluck(_.map(obj, function(value, index, list) { + return { + value : value, + criteria : iterator.call(context, value, index, list) + }; + }).sort(function(left, right) { + var a = left.criteria, b = right.criteria; + if (a === void 0) return 1; + if (b === void 0) return -1; + return a < b ? -1 : a > b ? 1 : 0; + }), 'value'); + }; + + // Groups the object's values by a criterion. Pass either a string attribute + // to group by, or a function that returns the criterion. + _.groupBy = function(obj, val) { + var result = {}; + var iterator = _.isFunction(val) ? val : function(obj) { return obj[val]; }; + each(obj, function(value, index) { + var key = iterator(value, index); + (result[key] || (result[key] = [])).push(value); + }); + return result; + }; + + // Use a comparator function to figure out at what index an object should + // be inserted so as to maintain order. Uses binary search. + _.sortedIndex = function(array, obj, iterator) { + iterator || (iterator = _.identity); + var low = 0, high = array.length; + while (low < high) { + var mid = (low + high) >> 1; + iterator(array[mid]) < iterator(obj) ? low = mid + 1 : high = mid; + } + return low; + }; + + // Safely convert anything iterable into a real, live array. + _.toArray = function(obj) { + if (!obj) return []; + if (_.isArray(obj)) return slice.call(obj); + if (_.isArguments(obj)) return slice.call(obj); + if (obj.toArray && _.isFunction(obj.toArray)) return obj.toArray(); + return _.values(obj); + }; + + // Return the number of elements in an object. + _.size = function(obj) { + return _.isArray(obj) ? obj.length : _.keys(obj).length; + }; + + // Array Functions + // --------------- + + // Get the first element of an array. Passing **n** will return the first N + // values in the array. Aliased as `head` and `take`. The **guard** check + // allows it to work with `_.map`. + _.first = _.head = _.take = function(array, n, guard) { + return (n != null) && !guard ? slice.call(array, 0, n) : array[0]; + }; + + // Returns everything but the last entry of the array. Especcialy useful on + // the arguments object. Passing **n** will return all the values in + // the array, excluding the last N. The **guard** check allows it to work with + // `_.map`. + _.initial = function(array, n, guard) { + return slice.call(array, 0, array.length - ((n == null) || guard ? 1 : n)); + }; + + // Get the last element of an array. Passing **n** will return the last N + // values in the array. The **guard** check allows it to work with `_.map`. + _.last = function(array, n, guard) { + if ((n != null) && !guard) { + return slice.call(array, Math.max(array.length - n, 0)); + } else { + return array[array.length - 1]; + } + }; + + // Returns everything but the first entry of the array. Aliased as `tail`. + // Especially useful on the arguments object. Passing an **index** will return + // the rest of the values in the array from that index onward. The **guard** + // check allows it to work with `_.map`. + _.rest = _.tail = function(array, index, guard) { + return slice.call(array, (index == null) || guard ? 1 : index); + }; + + // Trim out all falsy values from an array. + _.compact = function(array) { + return _.filter(array, function(value){ return !!value; }); + }; + + // Return a completely flattened version of an array. + _.flatten = function(array, shallow) { + return _.reduce(array, function(memo, value) { + if (_.isArray(value)) return memo.concat(shallow ? value : _.flatten(value)); + memo[memo.length] = value; + return memo; + }, []); + }; + + // Return a version of the array that does not contain the specified value(s). + _.without = function(array) { + return _.difference(array, slice.call(arguments, 1)); + }; + + // Produce a duplicate-free version of the array. If the array has already + // been sorted, you have the option of using a faster algorithm. + // Aliased as `unique`. + _.uniq = _.unique = function(array, isSorted, iterator) { + var initial = iterator ? _.map(array, iterator) : array; + var results = []; + // The `isSorted` flag is irrelevant if the array only contains two elements. + if (array.length < 3) isSorted = true; + _.reduce(initial, function (memo, value, index) { + if (isSorted ? _.last(memo) !== value || !memo.length : !_.include(memo, value)) { + memo.push(value); + results.push(array[index]); + } + return memo; + }, []); + return results; + }; + + // Produce an array that contains the union: each distinct element from all of + // the passed-in arrays. + _.union = function() { + return _.uniq(_.flatten(arguments, true)); + }; + + // Produce an array that contains every item shared between all the + // passed-in arrays. (Aliased as "intersect" for back-compat.) + _.intersection = _.intersect = function(array) { + var rest = slice.call(arguments, 1); + return _.filter(_.uniq(array), function(item) { + return _.every(rest, function(other) { + return _.indexOf(other, item) >= 0; + }); + }); + }; + + // Take the difference between one array and a number of other arrays. + // Only the elements present in just the first array will remain. + _.difference = function(array) { + var rest = _.flatten(slice.call(arguments, 1), true); + return _.filter(array, function(value){ return !_.include(rest, value); }); + }; + + // Zip together multiple lists into a single array -- elements that share + // an index go together. + _.zip = function() { + var args = slice.call(arguments); + var length = _.max(_.pluck(args, 'length')); + var results = new Array(length); + for (var i = 0; i < length; i++) results[i] = _.pluck(args, "" + i); + return results; + }; + + // If the browser doesn't supply us with indexOf (I'm looking at you, **MSIE**), + // we need this function. Return the position of the first occurrence of an + // item in an array, or -1 if the item is not included in the array. + // Delegates to **ECMAScript 5**'s native `indexOf` if available. + // If the array is large and already in sort order, pass `true` + // for **isSorted** to use binary search. + _.indexOf = function(array, item, isSorted) { + if (array == null) return -1; + var i, l; + if (isSorted) { + i = _.sortedIndex(array, item); + return array[i] === item ? i : -1; + } + if (nativeIndexOf && array.indexOf === nativeIndexOf) return array.indexOf(item); + for (i = 0, l = array.length; i < l; i++) if (i in array && array[i] === item) return i; + return -1; + }; + + // Delegates to **ECMAScript 5**'s native `lastIndexOf` if available. + _.lastIndexOf = function(array, item) { + if (array == null) return -1; + if (nativeLastIndexOf && array.lastIndexOf === nativeLastIndexOf) return array.lastIndexOf(item); + var i = array.length; + while (i--) if (i in array && array[i] === item) return i; + return -1; + }; + + // Generate an integer Array containing an arithmetic progression. A port of + // the native Python `range()` function. See + // [the Python documentation](http://docs.python.org/library/functions.html#range). + _.range = function(start, stop, step) { + if (arguments.length <= 1) { + stop = start || 0; + start = 0; + } + step = arguments[2] || 1; + + var len = Math.max(Math.ceil((stop - start) / step), 0); + var idx = 0; + var range = new Array(len); + + while(idx < len) { + range[idx++] = start; + start += step; + } + + return range; + }; + + // Function (ahem) Functions + // ------------------ + + // Reusable constructor function for prototype setting. + var ctor = function(){}; + + // Create a function bound to a given object (assigning `this`, and arguments, + // optionally). Binding with arguments is also known as `curry`. + // Delegates to **ECMAScript 5**'s native `Function.bind` if available. + // We check for `func.bind` first, to fail fast when `func` is undefined. + _.bind = function bind(func, context) { + var bound, args; + if (func.bind === nativeBind && nativeBind) return nativeBind.apply(func, slice.call(arguments, 1)); + if (!_.isFunction(func)) throw new TypeError; + args = slice.call(arguments, 2); + return bound = function() { + if (!(this instanceof bound)) return func.apply(context, args.concat(slice.call(arguments))); + ctor.prototype = func.prototype; + var self = new ctor; + var result = func.apply(self, args.concat(slice.call(arguments))); + if (Object(result) === result) return result; + return self; + }; + }; + + // Bind all of an object's methods to that object. Useful for ensuring that + // all callbacks defined on an object belong to it. + _.bindAll = function(obj) { + var funcs = slice.call(arguments, 1); + if (funcs.length == 0) funcs = _.functions(obj); + each(funcs, function(f) { obj[f] = _.bind(obj[f], obj); }); + return obj; + }; + + // Memoize an expensive function by storing its results. + _.memoize = function(func, hasher) { + var memo = {}; + hasher || (hasher = _.identity); + return function() { + var key = hasher.apply(this, arguments); + return _.has(memo, key) ? memo[key] : (memo[key] = func.apply(this, arguments)); + }; + }; + + // Delays a function for the given number of milliseconds, and then calls + // it with the arguments supplied. + _.delay = function(func, wait) { + var args = slice.call(arguments, 2); + return setTimeout(function(){ return func.apply(null, args); }, wait); + }; + + // Defers a function, scheduling it to run after the current call stack has + // cleared. + _.defer = function(func) { + return _.delay.apply(_, [func, 1].concat(slice.call(arguments, 1))); + }; + + // Returns a function, that, when invoked, will only be triggered at most once + // during a given window of time. + _.throttle = function(func, wait) { + var context, args, timeout, throttling, more, result; + var whenDone = _.debounce(function(){ more = throttling = false; }, wait); + return function() { + context = this; args = arguments; + var later = function() { + timeout = null; + if (more) func.apply(context, args); + whenDone(); + }; + if (!timeout) timeout = setTimeout(later, wait); + if (throttling) { + more = true; + } else { + result = func.apply(context, args); + } + whenDone(); + throttling = true; + return result; + }; + }; + + // Returns a function, that, as long as it continues to be invoked, will not + // be triggered. The function will be called after it stops being called for + // N milliseconds. If `immediate` is passed, trigger the function on the + // leading edge, instead of the trailing. + _.debounce = function(func, wait, immediate) { + var timeout; + return function() { + var context = this, args = arguments; + var later = function() { + timeout = null; + if (!immediate) func.apply(context, args); + }; + if (immediate && !timeout) func.apply(context, args); + clearTimeout(timeout); + timeout = setTimeout(later, wait); + }; + }; + + // Returns a function that will be executed at most one time, no matter how + // often you call it. Useful for lazy initialization. + _.once = function(func) { + var ran = false, memo; + return function() { + if (ran) return memo; + ran = true; + return memo = func.apply(this, arguments); + }; + }; + + // Returns the first function passed as an argument to the second, + // allowing you to adjust arguments, run code before and after, and + // conditionally execute the original function. + _.wrap = function(func, wrapper) { + return function() { + var args = [func].concat(slice.call(arguments, 0)); + return wrapper.apply(this, args); + }; + }; + + // Returns a function that is the composition of a list of functions, each + // consuming the return value of the function that follows. + _.compose = function() { + var funcs = arguments; + return function() { + var args = arguments; + for (var i = funcs.length - 1; i >= 0; i--) { + args = [funcs[i].apply(this, args)]; + } + return args[0]; + }; + }; + + // Returns a function that will only be executed after being called N times. + _.after = function(times, func) { + if (times <= 0) return func(); + return function() { + if (--times < 1) { return func.apply(this, arguments); } + }; + }; + + // Object Functions + // ---------------- + + // Retrieve the names of an object's properties. + // Delegates to **ECMAScript 5**'s native `Object.keys` + _.keys = nativeKeys || function(obj) { + if (obj !== Object(obj)) throw new TypeError('Invalid object'); + var keys = []; + for (var key in obj) if (_.has(obj, key)) keys[keys.length] = key; + return keys; + }; + + // Retrieve the values of an object's properties. + _.values = function(obj) { + return _.map(obj, _.identity); + }; + + // Return a sorted list of the function names available on the object. + // Aliased as `methods` + _.functions = _.methods = function(obj) { + var names = []; + for (var key in obj) { + if (_.isFunction(obj[key])) names.push(key); + } + return names.sort(); + }; + + // Extend a given object with all the properties in passed-in object(s). + _.extend = function(obj) { + each(slice.call(arguments, 1), function(source) { + for (var prop in source) { + obj[prop] = source[prop]; + } + }); + return obj; + }; + + // Return a copy of the object only containing the whitelisted properties. + _.pick = function(obj) { + var result = {}; + each(_.flatten(slice.call(arguments, 1)), function(key) { + if (key in obj) result[key] = obj[key]; + }); + return result; + }; + + // Fill in a given object with default properties. + _.defaults = function(obj) { + each(slice.call(arguments, 1), function(source) { + for (var prop in source) { + if (obj[prop] == null) obj[prop] = source[prop]; + } + }); + return obj; + }; + + // Create a (shallow-cloned) duplicate of an object. + _.clone = function(obj) { + if (!_.isObject(obj)) return obj; + return _.isArray(obj) ? obj.slice() : _.extend({}, obj); + }; + + // Invokes interceptor with the obj, and then returns obj. + // The primary purpose of this method is to "tap into" a method chain, in + // order to perform operations on intermediate results within the chain. + _.tap = function(obj, interceptor) { + interceptor(obj); + return obj; + }; + + // Internal recursive comparison function. + function eq(a, b, stack) { + // Identical objects are equal. `0 === -0`, but they aren't identical. + // See the Harmony `egal` proposal: http://wiki.ecmascript.org/doku.php?id=harmony:egal. + if (a === b) return a !== 0 || 1 / a == 1 / b; + // A strict comparison is necessary because `null == undefined`. + if (a == null || b == null) return a === b; + // Unwrap any wrapped objects. + if (a._chain) a = a._wrapped; + if (b._chain) b = b._wrapped; + // Invoke a custom `isEqual` method if one is provided. + if (a.isEqual && _.isFunction(a.isEqual)) return a.isEqual(b); + if (b.isEqual && _.isFunction(b.isEqual)) return b.isEqual(a); + // Compare `[[Class]]` names. + var className = toString.call(a); + if (className != toString.call(b)) return false; + switch (className) { + // Strings, numbers, dates, and booleans are compared by value. + case '[object String]': + // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is + // equivalent to `new String("5")`. + return a == String(b); + case '[object Number]': + // `NaN`s are equivalent, but non-reflexive. An `egal` comparison is performed for + // other numeric values. + return a != +a ? b != +b : (a == 0 ? 1 / a == 1 / b : a == +b); + case '[object Date]': + case '[object Boolean]': + // Coerce dates and booleans to numeric primitive values. Dates are compared by their + // millisecond representations. Note that invalid dates with millisecond representations + // of `NaN` are not equivalent. + return +a == +b; + // RegExps are compared by their source patterns and flags. + case '[object RegExp]': + return a.source == b.source && + a.global == b.global && + a.multiline == b.multiline && + a.ignoreCase == b.ignoreCase; + } + if (typeof a != 'object' || typeof b != 'object') return false; + // Assume equality for cyclic structures. The algorithm for detecting cyclic + // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`. + var length = stack.length; + while (length--) { + // Linear search. Performance is inversely proportional to the number of + // unique nested structures. + if (stack[length] == a) return true; + } + // Add the first object to the stack of traversed objects. + stack.push(a); + var size = 0, result = true; + // Recursively compare objects and arrays. + if (className == '[object Array]') { + // Compare array lengths to determine if a deep comparison is necessary. + size = a.length; + result = size == b.length; + if (result) { + // Deep compare the contents, ignoring non-numeric properties. + while (size--) { + // Ensure commutative equality for sparse arrays. + if (!(result = size in a == size in b && eq(a[size], b[size], stack))) break; + } + } + } else { + // Objects with different constructors are not equivalent. + if ('constructor' in a != 'constructor' in b || a.constructor != b.constructor) return false; + // Deep compare objects. + for (var key in a) { + if (_.has(a, key)) { + // Count the expected number of properties. + size++; + // Deep compare each member. + if (!(result = _.has(b, key) && eq(a[key], b[key], stack))) break; + } + } + // Ensure that both objects contain the same number of properties. + if (result) { + for (key in b) { + if (_.has(b, key) && !(size--)) break; + } + result = !size; + } + } + // Remove the first object from the stack of traversed objects. + stack.pop(); + return result; + } + + // Perform a deep comparison to check if two objects are equal. + _.isEqual = function(a, b) { + return eq(a, b, []); + }; + + // Is a given array, string, or object empty? + // An "empty" object has no enumerable own-properties. + _.isEmpty = function(obj) { + if (obj == null) return true; + if (_.isArray(obj) || _.isString(obj)) return obj.length === 0; + for (var key in obj) if (_.has(obj, key)) return false; + return true; + }; + + // Is a given value a DOM element? + _.isElement = function(obj) { + return !!(obj && obj.nodeType == 1); + }; + + // Is a given value an array? + // Delegates to ECMA5's native Array.isArray + _.isArray = nativeIsArray || function(obj) { + return toString.call(obj) == '[object Array]'; + }; + + // Is a given variable an object? + _.isObject = function(obj) { + return obj === Object(obj); + }; + + // Is a given variable an arguments object? + _.isArguments = function(obj) { + return toString.call(obj) == '[object Arguments]'; + }; + if (!_.isArguments(arguments)) { + _.isArguments = function(obj) { + return !!(obj && _.has(obj, 'callee')); + }; + } + + // Is a given value a function? + _.isFunction = function(obj) { + return toString.call(obj) == '[object Function]'; + }; + + // Is a given value a string? + _.isString = function(obj) { + return toString.call(obj) == '[object String]'; + }; + + // Is a given value a number? + _.isNumber = function(obj) { + return toString.call(obj) == '[object Number]'; + }; + + // Is a given object a finite number? + _.isFinite = function(obj) { + return _.isNumber(obj) && isFinite(obj); + }; + + // Is the given value `NaN`? + _.isNaN = function(obj) { + // `NaN` is the only value for which `===` is not reflexive. + return obj !== obj; + }; + + // Is a given value a boolean? + _.isBoolean = function(obj) { + return obj === true || obj === false || toString.call(obj) == '[object Boolean]'; + }; + + // Is a given value a date? + _.isDate = function(obj) { + return toString.call(obj) == '[object Date]'; + }; + + // Is the given value a regular expression? + _.isRegExp = function(obj) { + return toString.call(obj) == '[object RegExp]'; + }; + + // Is a given value equal to null? + _.isNull = function(obj) { + return obj === null; + }; + + // Is a given variable undefined? + _.isUndefined = function(obj) { + return obj === void 0; + }; + + // Has own property? + _.has = function(obj, key) { + return hasOwnProperty.call(obj, key); + }; + + // Utility Functions + // ----------------- + + // Run Underscore.js in *noConflict* mode, returning the `_` variable to its + // previous owner. Returns a reference to the Underscore object. + _.noConflict = function() { + root._ = previousUnderscore; + return this; + }; + + // Keep the identity function around for default iterators. + _.identity = function(value) { + return value; + }; + + // Run a function **n** times. + _.times = function (n, iterator, context) { + for (var i = 0; i < n; i++) iterator.call(context, i); + }; + + // Escape a string for HTML interpolation. + _.escape = function(string) { + return (''+string).replace(/&/g, '&').replace(//g, '>').replace(/"/g, '"').replace(/'/g, ''').replace(/\//g,'/'); + }; + + // If the value of the named property is a function then invoke it; + // otherwise, return it. + _.result = function(object, property) { + if (object == null) return null; + var value = object[property]; + return _.isFunction(value) ? value.call(object) : value; + }; + + // Add your own custom functions to the Underscore object, ensuring that + // they're correctly added to the OOP wrapper as well. + _.mixin = function(obj) { + each(_.functions(obj), function(name){ + addToWrapper(name, _[name] = obj[name]); + }); + }; + + // Generate a unique integer id (unique within the entire client session). + // Useful for temporary DOM ids. + var idCounter = 0; + _.uniqueId = function(prefix) { + var id = idCounter++; + return prefix ? prefix + id : id; + }; + + // By default, Underscore uses ERB-style template delimiters, change the + // following template settings to use alternative delimiters. + _.templateSettings = { + evaluate : /<%([\s\S]+?)%>/g, + interpolate : /<%=([\s\S]+?)%>/g, + escape : /<%-([\s\S]+?)%>/g + }; + + // When customizing `templateSettings`, if you don't want to define an + // interpolation, evaluation or escaping regex, we need one that is + // guaranteed not to match. + var noMatch = /.^/; + + // Certain characters need to be escaped so that they can be put into a + // string literal. + var escapes = { + '\\': '\\', + "'": "'", + 'r': '\r', + 'n': '\n', + 't': '\t', + 'u2028': '\u2028', + 'u2029': '\u2029' + }; + + for (var p in escapes) escapes[escapes[p]] = p; + var escaper = /\\|'|\r|\n|\t|\u2028|\u2029/g; + var unescaper = /\\(\\|'|r|n|t|u2028|u2029)/g; + + // Within an interpolation, evaluation, or escaping, remove HTML escaping + // that had been previously added. + var unescape = function(code) { + return code.replace(unescaper, function(match, escape) { + return escapes[escape]; + }); + }; + + // JavaScript micro-templating, similar to John Resig's implementation. + // Underscore templating handles arbitrary delimiters, preserves whitespace, + // and correctly escapes quotes within interpolated code. + _.template = function(text, data, settings) { + settings = _.defaults(settings || {}, _.templateSettings); + + // Compile the template source, taking care to escape characters that + // cannot be included in a string literal and then unescape them in code + // blocks. + var source = "__p+='" + text + .replace(escaper, function(match) { + return '\\' + escapes[match]; + }) + .replace(settings.escape || noMatch, function(match, code) { + return "'+\n_.escape(" + unescape(code) + ")+\n'"; + }) + .replace(settings.interpolate || noMatch, function(match, code) { + return "'+\n(" + unescape(code) + ")+\n'"; + }) + .replace(settings.evaluate || noMatch, function(match, code) { + return "';\n" + unescape(code) + "\n;__p+='"; + }) + "';\n"; + + // If a variable is not specified, place data values in local scope. + if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n'; + + source = "var __p='';" + + "var print=function(){__p+=Array.prototype.join.call(arguments, '')};\n" + + source + "return __p;\n"; + + var render = new Function(settings.variable || 'obj', '_', source); + if (data) return render(data, _); + var template = function(data) { + return render.call(this, data, _); + }; + + // Provide the compiled function source as a convenience for build time + // precompilation. + template.source = 'function(' + (settings.variable || 'obj') + '){\n' + + source + '}'; + + return template; + }; + + // Add a "chain" function, which will delegate to the wrapper. + _.chain = function(obj) { + return _(obj).chain(); + }; + + // The OOP Wrapper + // --------------- + + // If Underscore is called as a function, it returns a wrapped object that + // can be used OO-style. This wrapper holds altered versions of all the + // underscore functions. Wrapped objects may be chained. + var wrapper = function(obj) { this._wrapped = obj; }; + + // Expose `wrapper.prototype` as `_.prototype` + _.prototype = wrapper.prototype; + + // Helper function to continue chaining intermediate results. + var result = function(obj, chain) { + return chain ? _(obj).chain() : obj; + }; + + // A method to easily add functions to the OOP wrapper. + var addToWrapper = function(name, func) { + wrapper.prototype[name] = function() { + var args = slice.call(arguments); + unshift.call(args, this._wrapped); + return result(func.apply(_, args), this._chain); + }; + }; + + // Add all of the Underscore functions to the wrapper object. + _.mixin(_); + + // Add all mutator Array functions to the wrapper. + each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) { + var method = ArrayProto[name]; + wrapper.prototype[name] = function() { + var wrapped = this._wrapped; + method.apply(wrapped, arguments); + var length = wrapped.length; + if ((name == 'shift' || name == 'splice') && length === 0) delete wrapped[0]; + return result(wrapped, this._chain); + }; + }); + + // Add all accessor Array functions to the wrapper. + each(['concat', 'join', 'slice'], function(name) { + var method = ArrayProto[name]; + wrapper.prototype[name] = function() { + return result(method.apply(this._wrapped, arguments), this._chain); + }; + }); + + // Start chaining a wrapped Underscore object. + wrapper.prototype.chain = function() { + this._chain = true; + return this; + }; + + // Extracts the result from a wrapped and chained object. + wrapper.prototype.value = function() { + return this._wrapped; + }; + +}).call(this); diff -Nru tilemill-0.10.1~saucy9/node_modules/backbone-dirty/package.json tilemill-0.10.1~saucy10/node_modules/backbone-dirty/package.json --- tilemill-0.10.1~saucy9/node_modules/backbone-dirty/package.json 2012-10-11 02:23:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/backbone-dirty/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -19,7 +19,12 @@ "test": "expresso" }, "readme": "Backbone Dirty\n--------------\nServer-side overrides for Backbone to use `node-dirty` for Model persistence.\n\n### Compatibility\n\nBackbone 0.3.3.\n\n### Usage\n\nPass a filepath to the db (will be created if it doesn't exist yet) when\ncalling `require()`.\n\n var Backbone = require('backbone');\n Backbone.sync = require('backbone-dirty')('app.db').sync;\n\n // Backbone.sync will now load and save models from app.db.\n\n### Conventions\n\n`backbone-dirty` stores models in the `node-dirty` db using the `model.url` as\nits key. Collections retrieve models by matching the Collection url against\nthe initial portion of the Model url.\n\n var orange = new FruitModel({id: 'orange'});\n var apple = new FruitModel({id: 'apple'});\n var banana = new FruitModel({id: 'banana'});\n\n console.log(orange.url()); // fruits/orange\n console.log(apple.url()); // fruits/apple\n console.log(banana.url()); // fruits/banana\n\n var fruits = new FruitCollection();\n\n console.log(fruits.url); // fruits\n\n fruits.fetch(); // retrieves orange, apple, banana\n\n### Authors\n\n- [Will White](http://github.com/willwhite)\n- [Young Hahn](http://github.com/yhahn)\n\n", - "_id": "backbone-dirty@1.1.3", + "readmeFilename": "README.md", "description": "Backbone Dirty -------------- Server-side overrides for Backbone to use `node-dirty` for Model persistence.", - "_from": "backbone-dirty@~1.1.3" + "_id": "backbone-dirty@1.1.3", + "dist": { + "shasum": "dae6623129d42618b9481a2e2e5617176b519a7a" + }, + "_from": "backbone-dirty@~1.1.3", + "_resolved": "https://registry.npmjs.org/backbone-dirty/-/backbone-dirty-1.1.3.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/bones/node_modules/backbone/package.json tilemill-0.10.1~saucy10/node_modules/bones/node_modules/backbone/package.json --- tilemill-0.10.1~saucy9/node_modules/bones/node_modules/backbone/package.json 2013-10-25 01:44:51.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/bones/node_modules/backbone/package.json 2013-10-25 23:13:15.000000000 +0000 @@ -23,9 +23,5 @@ "readme": " ____ _ _ _ \n | _ \\ | | | | (_) \n | |_) | __ _ ___| | __| |__ ___ _ __ ___ _ ___ \n | _ < / _` |/ __| |/ /| '_ \\ / _ \\| '_ \\ / _ \\ | / __|\n | |_) | (_| | (__| < | |_) | (_) | | | | __/_| \\__ \\\n |____/ \\__,_|\\___|_|\\_\\|_.__/ \\___/|_| |_|\\___(_) |___/\n _/ | \n |__/\n(_'___________________________________________________'_)\n(_.———————————————————————————————————————————————————._)\n \n \nBackbone supplies structure to JavaScript-heavy applications by providing models key-value binding and custom events, collections with a rich API of enumerable functions, views with declarative event handling, and connects it all to your existing application over a RESTful JSON interface.\n\nFor Docs, License, Tests, and pre-packed downloads, see:\nhttp://documentcloud.github.com/backbone/\n\nTo suggest a feature, report a bug, or general discussion:\nhttp://github.com/documentcloud/backbone/issues/\n\nAll contributors are listed here:\nhttp://github.com/documentcloud/backbone/contributors\n\nSpecial thanks to Robert Kieffer for the original philosophy behind Backbone.\nhttp://github.com/broofa\n", "readmeFilename": "README", "_id": "backbone@0.3.3", - "dist": { - "shasum": "8dc7a6a604e02242cd1f5b56fe47ece4310ca91b" - }, - "_from": "backbone@0.3.3", - "_resolved": "https://registry.npmjs.org/backbone/-/backbone-0.3.3.tgz" + "_from": "backbone@0.3.3" } diff -Nru tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/connect/node_modules/formidable/package.json tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/connect/node_modules/formidable/package.json --- tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/connect/node_modules/formidable/package.json 2013-10-25 01:44:56.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/connect/node_modules/formidable/package.json 2013-10-25 23:13:16.000000000 +0000 @@ -34,9 +34,5 @@ "readmeFilename": "Readme.md", "dependencies": {}, "_id": "formidable@1.0.14", - "dist": { - "shasum": "5af657e0d5f6297d220ff04f040214ad7cabec0b" - }, - "_from": "formidable@1.0.x", - "_resolved": "https://registry.npmjs.org/formidable/-/formidable-1.0.14.tgz" + "_from": "formidable@1.0.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/connect/package.json tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/connect/package.json --- tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/connect/package.json 2013-10-25 01:44:54.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/connect/package.json 2013-10-25 23:13:15.000000000 +0000 @@ -41,9 +41,5 @@ }, "readme": "ERROR: No README data found!", "_id": "connect@1.9.2", - "dist": { - "shasum": "4817f59f2058ad5ebdcadadafce22ae24d18f35d" - }, - "_from": "connect@1.x", - "_resolved": "https://registry.npmjs.org/connect/-/connect-1.9.2.tgz" + "_from": "connect@1.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/mime/package.json tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/mime/package.json --- tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/mime/package.json 2013-10-25 01:44:53.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/mime/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -33,9 +33,5 @@ "url": "https://github.com/bentomas/node-mime/issues" }, "_id": "mime@1.2.4", - "dist": { - "shasum": "49f20547bc4845b440379a7ae48821b1fee936a1" - }, - "_from": "mime@1.2.4", - "_resolved": "https://registry.npmjs.org/mime/-/mime-1.2.4.tgz" + "_from": "mime@1.2.4" } diff -Nru tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/qs/package.json tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/qs/package.json --- tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/node_modules/qs/package.json 2013-10-25 01:44:53.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/node_modules/qs/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -25,9 +25,5 @@ "url": "https://github.com/visionmedia/node-querystring/issues" }, "_id": "qs@0.4.2", - "dist": { - "shasum": "d4f9a94991c3bbee4d1abec770b014ecae41cbb3" - }, - "_from": "qs@0.4.x", - "_resolved": "https://registry.npmjs.org/qs/-/qs-0.4.2.tgz" + "_from": "qs@0.4.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/package.json tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/package.json --- tilemill-0.10.1~saucy9/node_modules/bones/node_modules/express/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/bones/node_modules/express/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -67,9 +67,5 @@ "url": "https://github.com/visionmedia/express/issues" }, "_id": "express@2.5.11", - "dist": { - "shasum": "846aa1534e2d9baf28d4c523e997edc582181771" - }, - "_from": "express@~2.5.11", - "_resolved": "https://registry.npmjs.org/express/-/express-2.5.11.tgz" + "_from": "express@~2.5.11" } diff -Nru tilemill-0.10.1~saucy9/node_modules/bones/node_modules/jquery/node_modules/htmlparser/package.json tilemill-0.10.1~saucy10/node_modules/bones/node_modules/jquery/node_modules/htmlparser/package.json --- tilemill-0.10.1~saucy9/node_modules/bones/node_modules/jquery/node_modules/htmlparser/package.json 2013-10-25 01:44:54.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/bones/node_modules/jquery/node_modules/htmlparser/package.json 2013-10-25 23:13:18.000000000 +0000 @@ -37,9 +37,5 @@ "readme": "#NodeHtmlParser\nA forgiving HTML/XML/RSS parser written in JS for both the browser and NodeJS (yes, despite the name it works just fine in any modern browser). The parser can handle streams (chunked data) and supports custom handlers for writing custom DOMs/output.\n\n##Installing\n\n\tnpm install htmlparser\n\n##Running Tests\n\n###Run tests under node:\n\tnode runtests.js\n\n###Run tests in browser:\nView runtests.html in any browser\n\n##Usage In Node\n\n```javascript\nvar htmlparser = require(\"htmlparser\");\nvar rawHtml = \"Xyz \n\n```\n\n## Documentation\n\n### Collections\n\n* [each](#each)\n* [map](#map)\n* [filter](#filter)\n* [reject](#reject)\n* [reduce](#reduce)\n* [detect](#detect)\n* [sortBy](#sortBy)\n* [some](#some)\n* [every](#every)\n* [concat](#concat)\n\n### Control Flow\n\n* [series](#series)\n* [parallel](#parallel)\n* [whilst](#whilst)\n* [doWhilst](#doWhilst)\n* [until](#until)\n* [doUntil](#doUntil)\n* [forever](#forever)\n* [waterfall](#waterfall)\n* [compose](#compose)\n* [applyEach](#applyEach)\n* [queue](#queue)\n* [cargo](#cargo)\n* [auto](#auto)\n* [iterator](#iterator)\n* [apply](#apply)\n* [nextTick](#nextTick)\n* [times](#times)\n* [timesSeries](#timesSeries)\n\n### Utils\n\n* [memoize](#memoize)\n* [unmemoize](#unmemoize)\n* [log](#log)\n* [dir](#dir)\n* [noConflict](#noConflict)\n\n\n## Collections\n\n\n\n### each(arr, iterator, callback)\n\nApplies an iterator function to each item in an array, in parallel.\nThe iterator is called with an item from the list and a callback for when it\nhas finished. If the iterator passes an error to this callback, the main\ncallback for the each function is immediately called with the error.\n\nNote, that since this function applies the iterator to each item in parallel\nthere is no guarantee that the iterator functions will complete in order.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err) which must be called once it has \n completed. If no error has occured, the callback should be run without \n arguments or with an explicit null argument.\n* callback(err) - A callback which is called after all the iterator functions\n have finished, or an error has occurred.\n\n__Example__\n\n```js\n// assuming openFiles is an array of file names and saveFile is a function\n// to save the modified contents of that file:\n\nasync.each(openFiles, saveFile, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n---------------------------------------\n\n\n\n### eachSeries(arr, iterator, callback)\n\nThe same as each only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. This means the iterator functions will complete in order.\n\n\n---------------------------------------\n\n\n\n### eachLimit(arr, limit, iterator, callback)\n\nThe same as each only no more than \"limit\" iterators will be simultaneously \nrunning at any time.\n\nNote that the items are not processed in batches, so there is no guarantee that\n the first \"limit\" iterator functions will complete before any others are \nstarted.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* limit - The maximum number of iterators to run at any time.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err) which must be called once it has \n completed. If no error has occured, the callback should be run without \n arguments or with an explicit null argument.\n* callback(err) - A callback which is called after all the iterator functions\n have finished, or an error has occurred.\n\n__Example__\n\n```js\n// Assume documents is an array of JSON objects and requestApi is a\n// function that interacts with a rate-limited REST api.\n\nasync.eachLimit(documents, 20, requestApi, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n---------------------------------------\n\n\n### map(arr, iterator, callback)\n\nProduces a new array of values by mapping each value in the given array through\nthe iterator function. The iterator is called with an item from the array and a\ncallback for when it has finished processing. The callback takes 2 arguments, \nan error and the transformed item from the array. If the iterator passes an\nerror to this callback, the main callback for the map function is immediately\ncalled with the error.\n\nNote, that since this function applies the iterator to each item in parallel\nthere is no guarantee that the iterator functions will complete in order, however\nthe results array will be in the same order as the original array.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, transformed) which must be called once \n it has completed with an error (which can be null) and a transformed item.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array of the\n transformed items from the original array.\n\n__Example__\n\n```js\nasync.map(['file1','file2','file3'], fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n\n### mapSeries(arr, iterator, callback)\n\nThe same as map only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n\n---------------------------------------\n\n\n### mapLimit(arr, limit, iterator, callback)\n\nThe same as map only no more than \"limit\" iterators will be simultaneously \nrunning at any time.\n\nNote that the items are not processed in batches, so there is no guarantee that\n the first \"limit\" iterator functions will complete before any others are \nstarted.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* limit - The maximum number of iterators to run at any time.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, transformed) which must be called once \n it has completed with an error (which can be null) and a transformed item.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array of the\n transformed items from the original array.\n\n__Example__\n\n```js\nasync.map(['file1','file2','file3'], 1, fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n\n### filter(arr, iterator, callback)\n\n__Alias:__ select\n\nReturns a new array of all the values which pass an async truth test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists. This operation is\nperformed in parallel, but the results array will be in the same order as the\noriginal.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(results) - A callback which is called after all the iterator\n functions have finished.\n\n__Example__\n\n```js\nasync.filter(['file1','file2','file3'], fs.exists, function(results){\n // results now equals an array of the existing files\n});\n```\n\n---------------------------------------\n\n\n### filterSeries(arr, iterator, callback)\n\n__alias:__ selectSeries\n\nThe same as filter only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n---------------------------------------\n\n\n### reject(arr, iterator, callback)\n\nThe opposite of filter. Removes values that pass an async truth test.\n\n---------------------------------------\n\n\n### rejectSeries(arr, iterator, callback)\n\nThe same as reject, only the iterator is applied to each item in the array\nin series.\n\n\n---------------------------------------\n\n\n### reduce(arr, memo, iterator, callback)\n\n__aliases:__ inject, foldl\n\nReduces a list of values into a single value using an async iterator to return\neach successive step. Memo is the initial state of the reduction. This\nfunction only operates in series. For performance reasons, it may make sense to\nsplit a call to this function into a parallel map, then use the normal\nArray.prototype.reduce on the results. This function is for situations where\neach step in the reduction needs to be async, if you can get the data before\nreducing it then it's probably a good idea to do so.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* memo - The initial state of the reduction.\n* iterator(memo, item, callback) - A function applied to each item in the\n array to produce the next step in the reduction. The iterator is passed a\n callback(err, reduction) which accepts an optional error as its first \n argument, and the state of the reduction as the second. If an error is \n passed to the callback, the reduction is stopped and the main callback is \n immediately called with the error.\n* callback(err, result) - A callback which is called after all the iterator\n functions have finished. Result is the reduced value.\n\n__Example__\n\n```js\nasync.reduce([1,2,3], 0, function(memo, item, callback){\n // pointless async:\n process.nextTick(function(){\n callback(null, memo + item)\n });\n}, function(err, result){\n // result is now equal to the last value of memo, which is 6\n});\n```\n\n---------------------------------------\n\n\n### reduceRight(arr, memo, iterator, callback)\n\n__Alias:__ foldr\n\nSame as reduce, only operates on the items in the array in reverse order.\n\n\n---------------------------------------\n\n\n### detect(arr, iterator, callback)\n\nReturns the first value in a list that passes an async truth test. The\niterator is applied in parallel, meaning the first iterator to return true will\nfire the detect callback with that result. That means the result might not be\nthe first item in the original array (in terms of order) that passes the test.\n\nIf order within the original array is important then look at detectSeries.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called as soon as any iterator returns\n true, or after all the iterator functions have finished. Result will be\n the first item in the array that passes the truth test (iterator) or the\n value undefined if none passed.\n\n__Example__\n\n```js\nasync.detect(['file1','file2','file3'], fs.exists, function(result){\n // result now equals the first file in the list that exists\n});\n```\n\n---------------------------------------\n\n\n### detectSeries(arr, iterator, callback)\n\nThe same as detect, only the iterator is applied to each item in the array\nin series. This means the result is always the first in the original array (in\nterms of array order) that passes the truth test.\n\n\n---------------------------------------\n\n\n### sortBy(arr, iterator, callback)\n\nSorts a list by the results of running each value through an async iterator.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, sortValue) which must be called once it\n has completed with an error (which can be null) and a value to use as the sort\n criteria.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is the items from\n the original array sorted by the values returned by the iterator calls.\n\n__Example__\n\n```js\nasync.sortBy(['file1','file2','file3'], function(file, callback){\n fs.stat(file, function(err, stats){\n callback(err, stats.mtime);\n });\n}, function(err, results){\n // results is now the original array of files sorted by\n // modified date\n});\n```\n\n---------------------------------------\n\n\n### some(arr, iterator, callback)\n\n__Alias:__ any\n\nReturns true if at least one element in the array satisfies an async test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists. Once any iterator\ncall returns true, the main callback is immediately called.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called as soon as any iterator returns\n true, or after all the iterator functions have finished. Result will be\n either true or false depending on the values of the async tests.\n\n__Example__\n\n```js\nasync.some(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then at least one of the files exists\n});\n```\n\n---------------------------------------\n\n\n### every(arr, iterator, callback)\n\n__Alias:__ all\n\nReturns true if every element in the array satisfies an async test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called after all the iterator\n functions have finished. Result will be either true or false depending on\n the values of the async tests.\n\n__Example__\n\n```js\nasync.every(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then every file exists\n});\n```\n\n---------------------------------------\n\n\n### concat(arr, iterator, callback)\n\nApplies an iterator to each item in a list, concatenating the results. Returns the\nconcatenated list. The iterators are called in parallel, and the results are\nconcatenated as they return. There is no guarantee that the results array will\nbe returned in the original order of the arguments passed to the iterator function.\n\n__Arguments__\n\n* arr - An array to iterate over\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, results) which must be called once it \n has completed with an error (which can be null) and an array of results.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array containing\n the concatenated results of the iterator function.\n\n__Example__\n\n```js\nasync.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){\n // files is now a list of filenames that exist in the 3 directories\n});\n```\n\n---------------------------------------\n\n\n### concatSeries(arr, iterator, callback)\n\nSame as async.concat, but executes in series instead of parallel.\n\n\n## Control Flow\n\n\n### series(tasks, [callback])\n\nRun an array of functions in series, each one running once the previous\nfunction has completed. If any functions in the series pass an error to its\ncallback, no more functions are run and the callback for the series is\nimmediately called with the value of the error. Once the tasks have completed,\nthe results are passed to the final callback as an array.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function and the results will be passed to the final callback as an object\ninstead of an array. This can be a more readable way of handling results from\nasync.series.\n\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed\n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n__Example__\n\n```js\nasync.series([\n function(callback){\n // do some stuff ...\n callback(null, 'one');\n },\n function(callback){\n // do some more stuff ...\n callback(null, 'two');\n }\n],\n// optional callback\nfunction(err, results){\n // results is now equal to ['one', 'two']\n});\n\n\n// an example using an object instead of an array\nasync.series({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equal to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n\n### parallel(tasks, [callback])\n\nRun an array of functions in parallel, without waiting until the previous\nfunction has completed. If any of the functions pass an error to its\ncallback, the main callback is immediately called with the value of the error.\nOnce the tasks have completed, the results are passed to the final callback as an\narray.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function and the results will be passed to the final callback as an object\ninstead of an array. This can be a more readable way of handling results from\nasync.parallel.\n\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed \n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n__Example__\n\n```js\nasync.parallel([\n function(callback){\n setTimeout(function(){\n callback(null, 'one');\n }, 200);\n },\n function(callback){\n setTimeout(function(){\n callback(null, 'two');\n }, 100);\n }\n],\n// optional callback\nfunction(err, results){\n // the results array will equal ['one','two'] even though\n // the second function had a shorter timeout.\n});\n\n\n// an example using an object instead of an array\nasync.parallel({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equals to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n\n### parallelLimit(tasks, limit, [callback])\n\nThe same as parallel only the tasks are executed in parallel with a maximum of \"limit\" \ntasks executing at any time.\n\nNote that the tasks are not executed in batches, so there is no guarantee that \nthe first \"limit\" tasks will complete before any others are started.\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed \n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* limit - The maximum number of tasks to run at any time.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n---------------------------------------\n\n\n### whilst(test, fn, callback)\n\nRepeatedly call fn, while test returns true. Calls the callback when stopped,\nor an error occurs.\n\n__Arguments__\n\n* test() - synchronous truth test to perform before each execution of fn.\n* fn(callback) - A function to call each time the test passes. The function is\n passed a callback(err) which must be called once it has completed with an \n optional error argument.\n* callback(err) - A callback which is called after the test fails and repeated\n execution of fn has stopped.\n\n__Example__\n\n```js\nvar count = 0;\n\nasync.whilst(\n function () { return count < 5; },\n function (callback) {\n count++;\n setTimeout(callback, 1000);\n },\n function (err) {\n // 5 seconds have passed\n }\n);\n```\n\n---------------------------------------\n\n\n### doWhilst(fn, test, callback)\n\nThe post check version of whilst. To reflect the difference in the order of operations `test` and `fn` arguments are switched. `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.\n\n---------------------------------------\n\n\n### until(test, fn, callback)\n\nRepeatedly call fn, until test returns true. Calls the callback when stopped,\nor an error occurs.\n\nThe inverse of async.whilst.\n\n---------------------------------------\n\n\n### doUntil(fn, test, callback)\n\nLike doWhilst except the test is inverted. Note the argument ordering differs from `until`.\n\n---------------------------------------\n\n\n### forever(fn, callback)\n\nCalls the asynchronous function 'fn' repeatedly, in series, indefinitely.\nIf an error is passed to fn's callback then 'callback' is called with the\nerror, otherwise it will never be called.\n\n---------------------------------------\n\n\n### waterfall(tasks, [callback])\n\nRuns an array of functions in series, each passing their results to the next in\nthe array. However, if any of the functions pass an error to the callback, the\nnext function is not executed and the main callback is immediately called with\nthe error.\n\n__Arguments__\n\n* tasks - An array of functions to run, each function is passed a \n callback(err, result1, result2, ...) it must call on completion. The first\n argument is an error (which can be null) and any further arguments will be \n passed as arguments in order to the next task.\n* callback(err, [results]) - An optional callback to run once all the functions\n have completed. This will be passed the results of the last task's callback.\n\n\n\n__Example__\n\n```js\nasync.waterfall([\n function(callback){\n callback(null, 'one', 'two');\n },\n function(arg1, arg2, callback){\n callback(null, 'three');\n },\n function(arg1, callback){\n // arg1 now equals 'three'\n callback(null, 'done');\n }\n], function (err, result) {\n // result now equals 'done' \n});\n```\n\n---------------------------------------\n\n### compose(fn1, fn2...)\n\nCreates a function which is a composition of the passed asynchronous\nfunctions. Each function consumes the return value of the function that\nfollows. Composing functions f(), g() and h() would produce the result of\nf(g(h())), only this version uses callbacks to obtain the return values.\n\nEach function is executed with the `this` binding of the composed function.\n\n__Arguments__\n\n* functions... - the asynchronous functions to compose\n\n\n__Example__\n\n```js\nfunction add1(n, callback) {\n setTimeout(function () {\n callback(null, n + 1);\n }, 10);\n}\n\nfunction mul3(n, callback) {\n setTimeout(function () {\n callback(null, n * 3);\n }, 10);\n}\n\nvar add1mul3 = async.compose(mul3, add1);\n\nadd1mul3(4, function (err, result) {\n // result now equals 15\n});\n```\n\n---------------------------------------\n\n### applyEach(fns, args..., callback)\n\nApplies the provided arguments to each function in the array, calling the\ncallback after all functions have completed. If you only provide the first\nargument then it will return a function which lets you pass in the\narguments as if it were a single function call.\n\n__Arguments__\n\n* fns - the asynchronous functions to all call with the same arguments\n* args... - any number of separate arguments to pass to the function\n* callback - the final argument should be the callback, called when all\n functions have completed processing\n\n\n__Example__\n\n```js\nasync.applyEach([enableSearch, updateSchema], 'bucket', callback);\n\n// partial application example:\nasync.each(\n buckets,\n async.applyEach([enableSearch, updateSchema]),\n callback\n);\n```\n\n---------------------------------------\n\n\n### applyEachSeries(arr, iterator, callback)\n\nThe same as applyEach only the functions are applied in series.\n\n---------------------------------------\n\n\n### queue(worker, concurrency)\n\nCreates a queue object with the specified concurrency. Tasks added to the\nqueue will be processed in parallel (up to the concurrency limit). If all\nworkers are in progress, the task is queued until one is available. Once\na worker has completed a task, the task's callback is called.\n\n__Arguments__\n\n* worker(task, callback) - An asynchronous function for processing a queued\n task, which must call its callback(err) argument when finished, with an \n optional error as an argument.\n* concurrency - An integer for determining how many worker functions should be\n run in parallel.\n\n__Queue objects__\n\nThe queue object returned by this function has the following properties and\nmethods:\n\n* length() - a function returning the number of items waiting to be processed.\n* concurrency - an integer for determining how many worker functions should be\n run in parallel. This property can be changed after a queue is created to\n alter the concurrency on-the-fly.\n* push(task, [callback]) - add a new task to the queue, the callback is called\n once the worker has finished processing the task.\n instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.\n* unshift(task, [callback]) - add a new task to the front of the queue.\n* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued\n* empty - a callback that is called when the last item from the queue is given to a worker\n* drain - a callback that is called when the last item from the queue has returned from the worker\n\n__Example__\n\n```js\n// create a queue object with concurrency 2\n\nvar q = async.queue(function (task, callback) {\n console.log('hello ' + task.name);\n callback();\n}, 2);\n\n\n// assign a callback\nq.drain = function() {\n console.log('all items have been processed');\n}\n\n// add some items to the queue\n\nq.push({name: 'foo'}, function (err) {\n console.log('finished processing foo');\n});\nq.push({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the queue (batch-wise)\n\nq.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the front of the queue\n\nq.unshift({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n```\n\n---------------------------------------\n\n\n### cargo(worker, [payload])\n\nCreates a cargo object with the specified payload. Tasks added to the\ncargo will be processed altogether (up to the payload limit). If the\nworker is in progress, the task is queued until it is available. Once\nthe worker has completed some tasks, each callback of those tasks is called.\n\n__Arguments__\n\n* worker(tasks, callback) - An asynchronous function for processing an array of\n queued tasks, which must call its callback(err) argument when finished, with \n an optional error as an argument.\n* payload - An optional integer for determining how many tasks should be\n processed per round; if omitted, the default is unlimited.\n\n__Cargo objects__\n\nThe cargo object returned by this function has the following properties and\nmethods:\n\n* length() - a function returning the number of items waiting to be processed.\n* payload - an integer for determining how many tasks should be\n process per round. This property can be changed after a cargo is created to\n alter the payload on-the-fly.\n* push(task, [callback]) - add a new task to the queue, the callback is called\n once the worker has finished processing the task.\n instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.\n* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued\n* empty - a callback that is called when the last item from the queue is given to a worker\n* drain - a callback that is called when the last item from the queue has returned from the worker\n\n__Example__\n\n```js\n// create a cargo object with payload 2\n\nvar cargo = async.cargo(function (tasks, callback) {\n for(var i=0; i\n### auto(tasks, [callback])\n\nDetermines the best order for running functions based on their requirements.\nEach function can optionally depend on other functions being completed first,\nand each function is run as soon as its requirements are satisfied. If any of\nthe functions pass an error to their callback, that function will not complete\n(so any other functions depending on it will not run) and the main callback\nwill be called immediately with the error. Functions also receive an object\ncontaining the results of functions which have completed so far.\n\nNote, all functions are called with a results object as a second argument, \nso it is unsafe to pass functions in the tasks object which cannot handle the\nextra argument. For example, this snippet of code:\n\n```js\nasync.auto({\n readData: async.apply(fs.readFile, 'data.txt', 'utf-8');\n}, callback);\n```\n\nwill have the effect of calling readFile with the results object as the last\nargument, which will fail:\n\n```js\nfs.readFile('data.txt', 'utf-8', cb, {});\n```\n\nInstead, wrap the call to readFile in a function which does not forward the \nresults object:\n\n```js\nasync.auto({\n readData: function(cb, results){\n fs.readFile('data.txt', 'utf-8', cb);\n }\n}, callback);\n```\n\n__Arguments__\n\n* tasks - An object literal containing named functions or an array of\n requirements, with the function itself the last item in the array. The key\n used for each function or array is used when specifying requirements. The \n function receives two arguments: (1) a callback(err, result) which must be \n called when finished, passing an error (which can be null) and the result of \n the function's execution, and (2) a results object, containing the results of\n the previously executed functions.\n* callback(err, results) - An optional callback which is called when all the\n tasks have been completed. The callback will receive an error as an argument\n if any tasks pass an error to their callback. Results will always be passed\n\tbut if an error occurred, no other tasks will be performed, and the results\n\tobject will only contain partial results.\n \n\n__Example__\n\n```js\nasync.auto({\n get_data: function(callback){\n // async code to get some data\n },\n make_folder: function(callback){\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n },\n write_file: ['get_data', 'make_folder', function(callback){\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n callback(null, filename);\n }],\n email_link: ['write_file', function(callback, results){\n // once the file is written let's email a link to it...\n // results.write_file contains the filename returned by write_file.\n }]\n});\n```\n\nThis is a fairly trivial example, but to do this using the basic parallel and\nseries functions would look like this:\n\n```js\nasync.parallel([\n function(callback){\n // async code to get some data\n },\n function(callback){\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n }\n],\nfunction(err, results){\n async.series([\n function(callback){\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n },\n function(callback){\n // once the file is written let's email a link to it...\n }\n ]);\n});\n```\n\nFor a complicated series of async tasks using the auto function makes adding\nnew tasks much easier and makes the code more readable.\n\n\n---------------------------------------\n\n\n### iterator(tasks)\n\nCreates an iterator function which calls the next function in the array,\nreturning a continuation to call the next one after that. It's also possible to\n'peek' the next iterator by doing iterator.next().\n\nThis function is used internally by the async module but can be useful when\nyou want to manually control the flow of functions in series.\n\n__Arguments__\n\n* tasks - An array of functions to run.\n\n__Example__\n\n```js\nvar iterator = async.iterator([\n function(){ sys.p('one'); },\n function(){ sys.p('two'); },\n function(){ sys.p('three'); }\n]);\n\nnode> var iterator2 = iterator();\n'one'\nnode> var iterator3 = iterator2();\n'two'\nnode> iterator3();\n'three'\nnode> var nextfn = iterator2.next();\nnode> nextfn();\n'three'\n```\n\n---------------------------------------\n\n\n### apply(function, arguments..)\n\nCreates a continuation function with some arguments already applied, a useful\nshorthand when combined with other control flow functions. Any arguments\npassed to the returned function are added to the arguments originally passed\nto apply.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to automatically apply when the\n continuation is called.\n\n__Example__\n\n```js\n// using apply\n\nasync.parallel([\n async.apply(fs.writeFile, 'testfile1', 'test1'),\n async.apply(fs.writeFile, 'testfile2', 'test2'),\n]);\n\n\n// the same process without using apply\n\nasync.parallel([\n function(callback){\n fs.writeFile('testfile1', 'test1', callback);\n },\n function(callback){\n fs.writeFile('testfile2', 'test2', callback);\n }\n]);\n```\n\nIt's possible to pass any number of additional arguments when calling the\ncontinuation:\n\n```js\nnode> var fn = async.apply(sys.puts, 'one');\nnode> fn('two', 'three');\none\ntwo\nthree\n```\n\n---------------------------------------\n\n\n### nextTick(callback)\n\nCalls the callback on a later loop around the event loop. In node.js this just\ncalls process.nextTick, in the browser it falls back to setImmediate(callback)\nif available, otherwise setTimeout(callback, 0), which means other higher priority\nevents may precede the execution of the callback.\n\nThis is used internally for browser-compatibility purposes.\n\n__Arguments__\n\n* callback - The function to call on a later loop around the event loop.\n\n__Example__\n\n```js\nvar call_order = [];\nasync.nextTick(function(){\n call_order.push('two');\n // call_order now equals ['one','two']\n});\ncall_order.push('one')\n```\n\n\n### times(n, callback)\n\nCalls the callback n times and accumulates results in the same manner\nyou would use with async.map.\n\n__Arguments__\n\n* n - The number of times to run the function.\n* callback - The function to call n times.\n\n__Example__\n\n```js\n// Pretend this is some complicated async factory\nvar createUser = function(id, callback) {\n callback(null, {\n id: 'user' + id\n })\n}\n// generate 5 users\nasync.times(5, function(n, next){\n createUser(n, function(err, user) {\n next(err, user)\n })\n}, function(err, users) {\n // we should now have 5 users\n});\n```\n\n\n### timesSeries(n, callback)\n\nThe same as times only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n\n## Utils\n\n\n### memoize(fn, [hasher])\n\nCaches the results of an async function. When creating a hash to store function\nresults against, the callback is omitted from the hash and an optional hash\nfunction can be used.\n\nThe cache of results is exposed as the `memo` property of the function returned\nby `memoize`.\n\n__Arguments__\n\n* fn - the function you to proxy and cache results from.\n* hasher - an optional function for generating a custom hash for storing\n results, it has all the arguments applied to it apart from the callback, and\n must be synchronous.\n\n__Example__\n\n```js\nvar slow_fn = function (name, callback) {\n // do something\n callback(null, result);\n};\nvar fn = async.memoize(slow_fn);\n\n// fn can now be used as if it were slow_fn\nfn('some name', function () {\n // callback\n});\n```\n\n\n### unmemoize(fn)\n\nUndoes a memoized function, reverting it to the original, unmemoized\nform. Comes handy in tests.\n\n__Arguments__\n\n* fn - the memoized function\n\n\n### log(function, arguments)\n\nLogs the result of an async function to the console. Only works in node.js or\nin browsers that support console.log and console.error (such as FF and Chrome).\nIf multiple arguments are returned from the async function, console.log is\ncalled on each argument in order.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, 'hello ' + name);\n }, 1000);\n};\n```\n```js\nnode> async.log(hello, 'world');\n'hello world'\n```\n\n---------------------------------------\n\n\n### dir(function, arguments)\n\nLogs the result of an async function to the console using console.dir to\ndisplay the properties of the resulting object. Only works in node.js or\nin browsers that support console.dir and console.error (such as FF and Chrome).\nIf multiple arguments are returned from the async function, console.dir is\ncalled on each argument in order.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, {hello: name});\n }, 1000);\n};\n```\n```js\nnode> async.dir(hello, 'world');\n{hello: 'world'}\n```\n\n---------------------------------------\n\n\n### noConflict()\n\nChanges the value of async back to its original value, returning a reference to the\nasync object.\n", "readmeFilename": "README.md", "_id": "async@0.2.9", - "dist": { - "shasum": "df63060fbf3d33286a76aaf6d55a2986d9ff8619" - }, - "_from": "async@~0.2.9", - "_resolved": "https://registry.npmjs.org/async/-/async-0.2.9.tgz" + "_from": "async@~0.2.9" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json 2013-10-25 01:45:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -27,10 +27,6 @@ "url": "https://github.com/felixge/node-delayed-stream/issues" }, "_id": "delayed-stream@0.0.5", - "dist": { - "shasum": "baee50472834ab3c3ac481905d546834a548b8fc" - }, "_from": "delayed-stream@0.0.5", - "_resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz", "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -28,10 +28,6 @@ "url": "https://github.com/felixge/node-combined-stream/issues" }, "_id": "combined-stream@0.0.4", - "dist": { - "shasum": "5c38e18675140e6f8aeac1dfc6fb350f16ac49fc" - }, "_from": "combined-stream@~0.0.4", - "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz", "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/form-data/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/form-data/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/form-data/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/form-data/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -41,9 +41,5 @@ "url": "https://github.com/felixge/node-form-data/issues" }, "_id": "form-data@0.1.2", - "dist": { - "shasum": "a2776685729e771eb60b3ee3d290122f78c97da2" - }, - "_from": "form-data@~0.1.0", - "_resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.2.tgz" + "_from": "form-data@~0.1.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/boom/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/boom/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/boom/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/boom/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -42,9 +42,5 @@ "url": "https://github.com/spumko/boom/issues" }, "_id": "boom@0.4.2", - "dist": { - "shasum": "7a636e9ded4efcefb19cef4947a3c67dfaee911b" - }, - "_from": "boom@0.4.x", - "_resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz" + "_from": "boom@0.4.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -43,9 +43,5 @@ "url": "https://github.com/hueniverse/cryptiles/issues" }, "_id": "cryptiles@0.2.2", - "dist": { - "shasum": "ed91ff1f17ad13d3748288594f8a48a0d26f325c" - }, - "_from": "cryptiles@0.2.x", - "_resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz" + "_from": "cryptiles@0.2.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/hoek/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/hoek/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/hoek/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/hoek/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -44,9 +44,5 @@ "url": "https://github.com/spumko/hoek/issues" }, "_id": "hoek@0.9.1", - "dist": { - "shasum": "3d322462badf07716ea7eb85baf88079cddce505" - }, - "_from": "hoek@0.9.x", - "_resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz" + "_from": "hoek@0.9.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/sntp/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/sntp/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/sntp/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/node_modules/sntp/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -43,9 +43,5 @@ "url": "https://github.com/hueniverse/sntp/issues" }, "_id": "sntp@0.2.4", - "dist": { - "shasum": "fb885f18b0f3aad189f824862536bceeec750900" - }, - "_from": "sntp@0.2.x", - "_resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz" + "_from": "sntp@0.2.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/hawk/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/hawk/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -48,9 +48,5 @@ "url": "https://github.com/hueniverse/hawk/issues" }, "_id": "hawk@1.0.0", - "dist": { - "shasum": "b90bb169807285411da7ffcb8dd2598502d3b52d" - }, - "_from": "hawk@~1.0.0", - "_resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz" + "_from": "hawk@~1.0.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -38,9 +38,5 @@ "url": "https://github.com/mcavage/node-asn1/issues" }, "_id": "asn1@0.1.11", - "dist": { - "shasum": "007e1b8d6667c4cea50ecfbd9aac8a08f599f57d" - }, - "_from": "asn1@0.1.11", - "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" + "_from": "asn1@0.1.11" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -16,10 +16,6 @@ "readme": "# node-assert-plus\n\nThis library is a super small wrapper over node's assert module that has two\nthings: (1) the ability to disable assertions with the environment variable\nNODE_NDEBUG, and (2) some API wrappers for argument testing. Like\n`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks\nlike this:\n\n var assert = require('assert-plus');\n\n function fooAccount(options, callback) {\n\t assert.object(options, 'options');\n\t\tassert.number(options.id, 'options.id);\n\t\tassert.bool(options.isManager, 'options.isManager');\n\t\tassert.string(options.name, 'options.name');\n\t\tassert.arrayOfString(options.email, 'options.email');\n\t\tassert.func(callback, 'callback');\n\n // Do stuff\n\t\tcallback(null, {});\n }\n\n# API\n\nAll methods that *aren't* part of node's core assert API are simply assumed to\ntake an argument, and then a string 'name' that's not a message; `AssertionError`\nwill be thrown if the assertion fails with a message like:\n\n AssertionError: foo (string) is required\n\tat test (/home/mark/work/foo/foo.js:3:9)\n\tat Object. (/home/mark/work/foo/foo.js:15:1)\n\tat Module._compile (module.js:446:26)\n\tat Object..js (module.js:464:10)\n\tat Module.load (module.js:353:31)\n\tat Function._load (module.js:311:12)\n\tat Array.0 (module.js:484:10)\n\tat EventEmitter._tickCallback (node.js:190:38)\n\nfrom:\n\n function test(foo) {\n\t assert.string(foo, 'foo');\n }\n\nThere you go. You can check that arrays are of a homogenous type with `Arrayof$Type`:\n\n function test(foo) {\n\t assert.arrayOfString(foo, 'foo');\n }\n\nYou can assert IFF an argument is not `undefined` (i.e., an optional arg):\n\n assert.optionalString(foo, 'foo');\n\nLastly, you can opt-out of assertion checking altogether by setting the\nenvironment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have\nlots of assertions, and don't want to pay `typeof ()` taxes to v8 in\nproduction.\n\nThe complete list of APIs is:\n\n* assert.bool\n* assert.buffer\n* assert.func\n* assert.number\n* assert.object\n* assert.string\n* assert.arrayOfBool\n* assert.arrayOfFunc\n* assert.arrayOfNumber\n* assert.arrayOfObject\n* assert.arrayOfString\n* assert.optionalBool\n* assert.optionalBuffer\n* assert.optionalFunc\n* assert.optionalNumber\n* assert.optionalObject\n* assert.optionalString\n* assert.optionalArrayOfBool\n* assert.optionalArrayOfFunc\n* assert.optionalArrayOfNumber\n* assert.optionalArrayOfObject\n* assert.optionalArrayOfString\n* assert.AssertionError\n* assert.fail\n* assert.ok\n* assert.equal\n* assert.notEqual\n* assert.deepEqual\n* assert.notDeepEqual\n* assert.strictEqual\n* assert.notStrictEqual\n* assert.throws\n* assert.doesNotThrow\n* assert.ifError\n\n# Installation\n\n npm install assert-plus\n\n## License\n\nThe MIT License (MIT)\nCopyright (c) 2012 Mark Cavage\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n## Bugs\n\nSee .\n", "readmeFilename": "README.md", "_id": "assert-plus@0.1.2", - "dist": { - "shasum": "48cf59454db6d5617d855c23049099480ae07f17" - }, "_from": "assert-plus@0.1.2", - "_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.2.tgz", "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -14,10 +14,6 @@ "readme": "Node-CType is a way to read and write binary data in structured and easy to use\nformat. Its name comes from the C header file.\n\nTo get started, simply clone the repository or use npm to install it. Once it is\nthere, simply require it.\n\ngit clone git://github.com/rmustacc/node-ctype\nnpm install ctype\nvar mod_ctype = require('ctype')\n\n\nThere are two APIs that you can use, depending on what abstraction you'd like.\nThe low level API let's you read and write individual integers and floats from\nbuffers. The higher level API let's you read and write structures of these. To\nillustrate this, let's looks look at how we would read and write a binary\nencoded x,y point.\n\nIn C we would define this structure as follows:\n\ntypedef struct point {\n\tuint16_t\tp_x;\n\tuint16_t\tp_y;\n} point_t;\n\nTo read a binary encoded point from a Buffer, we first need to create a CType\nparser (where we specify the endian and other options) and add the typedef.\n\nvar parser = new mod_ctype.Parser({ endian: 'big' });\nparser.typedef('point_t', [\n\t{ x: { type: 'uint16_t' } },\n\t{ y: { type: 'uint16_t' } }\n]);\n\nFrom here, given a buffer buf and an offset into it, we can read a point.\n\nvar out = parser.readData([ { point: { type: 'point_t' } } ], buffer, 0);\nconsole.log(out);\n{ point: { x: 23, y: 42 } }\n\nAnother way to get the same information would be to use the low level methods.\nNote that these require you to manually deal with the offset. Here's how we'd\nget the same values of x and y from the buffer.\n\nvar x = mod_ctype.ruint16(buf, 'big', 0);\nvar y = mod_ctype.ruint16(buf, 'big', 2);\nconsole.log(x + ', ' + y);\n23, 42\n\nThe true power of this API comes from the ability to define and nest typedefs,\njust as you would in C. By default, the following types are defined by default.\nNote that they return a Number, unless indicated otherwise.\n\n * int8_t\n * int16_t\n * int32_t\n * int64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * uint8_t\n * uint16_t\n * uint32_t\n * uint64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * float\n * double\n * char (either returns a buffer with that character or a uint8_t)\n * char[] (returns an object with the buffer and the number of characters read which is either the total amount requested or until the first 0)\n\n\nctf2json integration:\n\nNode-CType supports consuming the output of ctf2json. Once you read in a JSON file,\nall you have to do to add all the definitions it contains is:\n\nvar data, parser;\ndata = JSON.parse(parsedJSONData);\nparser = mod_ctype.parseCTF(data, { endian: 'big' });\n\nFor more documentation, see the file README.old. Full documentation is in the\nprocess of being rewritten as a series of manual pages which will be available\nin the repository and online for viewing.\n\nTo read the ctio manual page simple run, from the root of the workspace:\n\nman -Mman -s 3ctype ctio\n", "readmeFilename": "README", "_id": "ctype@0.5.2", - "dist": { - "shasum": "a228fd6c5ee38f525010c240d8da14209e0c25b2" - }, "_from": "ctype@0.5.2", - "_resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.2.tgz", "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/http-signature/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/http-signature/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -31,9 +31,5 @@ "url": "https://github.com/joyent/node-http-signature/issues" }, "_id": "http-signature@0.10.0", - "dist": { - "shasum": "624c92d64e2b86416c8be76681dec50f5e5bea0b" - }, - "_from": "http-signature@~0.10.0", - "_resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.0.tgz" + "_from": "http-signature@~0.10.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/json-stringify-safe/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/json-stringify-safe/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/json-stringify-safe/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/json-stringify-safe/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -28,9 +28,5 @@ "url": "https://github.com/isaacs/json-stringify-safe/issues" }, "_id": "json-stringify-safe@5.0.0", - "dist": { - "shasum": "42de0e4afbc779e984da93cb0fafae2e647f0223" - }, - "_from": "json-stringify-safe@~5.0.0", - "_resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.0.tgz" + "_from": "json-stringify-safe@~5.0.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/node-uuid/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/node-uuid/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/node-uuid/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/node-uuid/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -30,10 +30,6 @@ "url": "https://github.com/broofa/node-uuid/issues" }, "_id": "node-uuid@1.4.1", - "dist": { - "shasum": "ebcc5c713c080b466142b080f0debc063b42e125" - }, "_from": "node-uuid@~1.4.0", - "_resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.1.tgz", "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/oauth-sign/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/oauth-sign/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/oauth-sign/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/oauth-sign/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -26,9 +26,5 @@ "url": "https://github.com/mikeal/oauth-sign/issues" }, "_id": "oauth-sign@0.3.0", - "dist": { - "shasum": "ede953b25907f6c2353888b62a2f8a0297f69f1b" - }, - "_from": "oauth-sign@~0.3.0", - "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.3.0.tgz" + "_from": "oauth-sign@~0.3.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/qs/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/qs/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/qs/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/qs/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -33,9 +33,5 @@ "url": "https://github.com/visionmedia/node-querystring/issues" }, "_id": "qs@0.6.5", - "dist": { - "shasum": "5172fe6969e83a8bf51d3c72aa973a0dcafefe2e" - }, - "_from": "qs@~0.6.0", - "_resolved": "https://registry.npmjs.org/qs/-/qs-0.6.5.tgz" + "_from": "qs@~0.6.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/tunnel-agent/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/tunnel-agent/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/node_modules/tunnel-agent/package.json 2013-10-25 01:45:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/node_modules/tunnel-agent/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -23,10 +23,6 @@ "url": "https://github.com/mikeal/tunnel-agent/issues" }, "_id": "tunnel-agent@0.3.0", - "dist": { - "shasum": "9327e5deaa41ffba0b9279555829afe6758aea9e" - }, "_from": "tunnel-agent@~0.3.0", - "_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.3.0.tgz", "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/request/package.json 2013-10-25 01:44:59.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/request/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -43,9 +43,5 @@ "readme": "# Request -- Simplified HTTP client\n\n[![NPM](https://nodei.co/npm/request.png)](https://nodei.co/npm/request/)\n\n## Super simple to use\n\nRequest is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default.\n\n```javascript\nvar request = require('request');\nrequest('http://www.google.com', function (error, response, body) {\n if (!error && response.statusCode == 200) {\n console.log(body) // Print the google web page.\n }\n})\n```\n\n## Streaming\n\nYou can stream any response to a file stream.\n\n```javascript\nrequest('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))\n```\n\nYou can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types, in this case `application/json`, and use the proper content-type in the PUT request if one is not already provided in the headers.\n\n```javascript\nfs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'))\n```\n\nRequest can also pipe to itself. When doing so the content-type and content-length will be preserved in the PUT headers.\n\n```javascript\nrequest.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))\n```\n\nNow let's get fancy.\n\n```javascript\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n if (req.method === 'PUT') {\n req.pipe(request.put('http://mysite.com/doodle.png'))\n } else if (req.method === 'GET' || req.method === 'HEAD') {\n request.get('http://mysite.com/doodle.png').pipe(resp)\n }\n }\n})\n```\n\nYou can also pipe() from a http.ServerRequest instance and to a http.ServerResponse instance. The HTTP method and headers will be sent as well as the entity-body data. Which means that, if you don't really care about security, you can do:\n\n```javascript\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n var x = request('http://mysite.com/doodle.png')\n req.pipe(x)\n x.pipe(resp)\n }\n})\n```\n\nAnd since pipe() returns the destination stream in node 0.5.x you can do one line proxying :)\n\n```javascript\nreq.pipe(request('http://mysite.com/doodle.png')).pipe(resp)\n```\n\nAlso, none of this new functionality conflicts with requests previous features, it just expands them.\n\n```javascript\nvar r = request.defaults({'proxy':'http://localproxy.com'})\n\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n r.get('http://google.com/doodle.png').pipe(resp)\n }\n})\n```\nYou can still use intermediate proxies, the requests will still follow HTTP forwards, etc.\n\n## Forms\n\n`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API.\n\nUrl encoded forms are simple\n\n```javascript\nrequest.post('http://service.com/upload', {form:{key:'value'}})\n// or\nrequest.post('http://service.com/upload').form({key:'value'})\n```\n\nFor `multipart/form-data` we use the [form-data](https://github.com/felixge/node-form-data) library by [@felixge](https://github.com/felixge). You don't need to worry about piping the form object or setting the headers, `request` will handle that for you.\n\n```javascript\nvar r = request.post('http://service.com/upload')\nvar form = r.form()\nform.append('my_field', 'my_value')\nform.append('my_buffer', new Buffer([1, 2, 3]))\nform.append('my_file', fs.createReadStream(path.join(__dirname, 'doodle.png'))\nform.append('remote_file', request('http://google.com/doodle.png'))\n```\n\n## HTTP Authentication\n\n```javascript\nrequest.get('http://some.server.com/').auth('username', 'password', false);\n// or\nrequest.get('http://some.server.com/', {\n 'auth': {\n 'user': 'username',\n 'pass': 'password',\n 'sendImmediately': false\n }\n});\n```\n\nIf passed as an option, `auth` should be a hash containing values `user` || `username`, `password` || `pass`, and `sendImmediately` (optional). The method form takes parameters `auth(username, password, sendImmediately)`.\n\n`sendImmediately` defaults to true, which will cause a basic authentication header to be sent. If `sendImmediately` is `false`, then `request` will retry with a proper authentication header after receiving a 401 response from the server (which must contain a `WWW-Authenticate` header indicating the required authentication method).\n\nDigest authentication is supported, but it only works with `sendImmediately` set to `false` (otherwise `request` will send basic authentication on the initial request, which will probably cause the request to fail).\n\n## OAuth Signing\n\n```javascript\n// Twitter OAuth\nvar qs = require('querystring')\n , oauth =\n { callback: 'http://mysite.com/callback/'\n , consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n }\n , url = 'https://api.twitter.com/oauth/request_token'\n ;\nrequest.post({url:url, oauth:oauth}, function (e, r, body) {\n // Ideally, you would take the body in the response\n // and construct a URL that a user clicks on (like a sign in button).\n // The verifier is only available in the response after a user has\n // verified with twitter that they are authorizing your app.\n var access_token = qs.parse(body)\n , oauth =\n { consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n , token: access_token.oauth_token\n , verifier: access_token.oauth_verifier\n }\n , url = 'https://api.twitter.com/oauth/access_token'\n ;\n request.post({url:url, oauth:oauth}, function (e, r, body) {\n var perm_token = qs.parse(body)\n , oauth =\n { consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n , token: perm_token.oauth_token\n , token_secret: perm_token.oauth_token_secret\n }\n , url = 'https://api.twitter.com/1/users/show.json?'\n , params =\n { screen_name: perm_token.screen_name\n , user_id: perm_token.user_id\n }\n ;\n url += qs.stringify(params)\n request.get({url:url, oauth:oauth, json:true}, function (e, r, user) {\n console.log(user)\n })\n })\n})\n```\n\n\n\n### request(options, callback)\n\nThe first argument can be either a url or an options object. The only required option is uri, all others are optional.\n\n* `uri` || `url` - fully qualified uri or a parsed url object from url.parse()\n* `qs` - object containing querystring values to be appended to the uri\n* `method` - http method, defaults to GET\n* `headers` - http headers, defaults to {}\n* `body` - entity body for PATCH, POST and PUT requests. Must be buffer or string.\n* `form` - when passed an object this will set `body` but to a querystring representation of value and adds `Content-type: application/x-www-form-urlencoded; charset=utf-8` header. When passed no option a FormData instance is returned that will be piped to request.\n* `auth` - A hash containing values `user` || `username`, `password` || `pass`, and `sendImmediately` (optional). See documentation above.\n* `json` - sets `body` but to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as json.\n* `multipart` - (experimental) array of objects which contains their own headers and `body` attribute. Sends `multipart/related` request. See example below.\n* `followRedirect` - follow HTTP 3xx responses as redirects. defaults to true.\n* `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects. defaults to false.\n* `maxRedirects` - the maximum number of redirects to follow, defaults to 10.\n* `encoding` - Encoding to be used on `setEncoding` of response data. If set to `null`, the body is returned as a Buffer.\n* `pool` - A hash object containing the agents for these requests. If omitted this request will use the global pool which is set to node's default maxSockets.\n* `pool.maxSockets` - Integer containing the maximum amount of sockets in the pool.\n* `timeout` - Integer containing the number of milliseconds to wait for a request to respond before aborting the request\n* `proxy` - An HTTP proxy to be used. Support proxy Auth with Basic Auth the same way it's supported with the `url` parameter by embedding the auth info in the uri.\n* `oauth` - Options for OAuth HMAC-SHA1 signing, see documentation above.\n* `hawk` - Options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example).\n* `strictSSL` - Set to `true` to require that SSL certificates be valid. Note: to use your own certificate authority, you need to specify an agent that was created with that ca as an option.\n* `jar` - Set to `true` if you want cookies to be remembered for future use, or define your custom cookie jar (see examples section)\n* `aws` - object containing aws signing information, should have the properties `key` and `secret` as well as `bucket` unless you're specifying your bucket as part of the path, or you are making a request that doesn't use a bucket (i.e. GET Services)\n* `httpSignature` - Options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options.\n* `localAddress` - Local interface to bind for network connections.\n\n\nThe callback argument gets 3 arguments. The first is an error when applicable (usually from the http.Client option not the http.ClientRequest object). The second is an http.ClientResponse object. The third is the response body String or Buffer.\n\n## Convenience methods\n\nThere are also shorthand methods for different HTTP METHODs and some other conveniences.\n\n### request.defaults(options)\n\nThis method returns a wrapper around the normal request API that defaults to whatever options you pass in to it.\n\n### request.put\n\nSame as request() but defaults to `method: \"PUT\"`.\n\n```javascript\nrequest.put(url)\n```\n\n### request.patch\n\nSame as request() but defaults to `method: \"PATCH\"`.\n\n```javascript\nrequest.patch(url)\n```\n\n### request.post\n\nSame as request() but defaults to `method: \"POST\"`.\n\n```javascript\nrequest.post(url)\n```\n\n### request.head\n\nSame as request() but defaults to `method: \"HEAD\"`.\n\n```javascript\nrequest.head(url)\n```\n\n### request.del\n\nSame as request() but defaults to `method: \"DELETE\"`.\n\n```javascript\nrequest.del(url)\n```\n\n### request.get\n\nAlias to normal request method for uniformity.\n\n```javascript\nrequest.get(url)\n```\n### request.cookie\n\nFunction that creates a new cookie.\n\n```javascript\nrequest.cookie('cookie_string_here')\n```\n### request.jar\n\nFunction that creates a new cookie jar.\n\n```javascript\nrequest.jar()\n```\n\n\n## Examples:\n\n```javascript\n var request = require('request')\n , rand = Math.floor(Math.random()*100000000).toString()\n ;\n request(\n { method: 'PUT'\n , uri: 'http://mikeal.iriscouch.com/testjs/' + rand\n , multipart:\n [ { 'content-type': 'application/json'\n , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})\n }\n , { body: 'I am an attachment' }\n ]\n }\n , function (error, response, body) {\n if(response.statusCode == 201){\n console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand)\n } else {\n console.log('error: '+ response.statusCode)\n console.log(body)\n }\n }\n )\n```\nCookies are disabled by default (else, they would be used in subsequent requests). To enable cookies set jar to true (either in defaults or in the options sent).\n\n```javascript\nvar request = request.defaults({jar: true})\nrequest('http://www.google.com', function () {\n request('http://images.google.com')\n})\n```\n\nIf you to use a custom cookie jar (instead of letting request use its own global cookie jar) you do so by setting the jar default or by specifying it as an option:\n\n```javascript\nvar j = request.jar()\nvar request = request.defaults({jar:j})\nrequest('http://www.google.com', function () {\n request('http://images.google.com')\n})\n```\nOR\n\n```javascript\nvar j = request.jar()\nvar cookie = request.cookie('your_cookie_here')\nj.add(cookie)\nrequest({url: 'http://www.google.com', jar: j}, function () {\n request('http://images.google.com')\n})\n```\n", "readmeFilename": "README.md", "_id": "request@2.26.0", - "dist": { - "shasum": "ec88bc2488e82fb9aba94367a96d36d1a0e8f3d6" - }, - "_from": "request@~2.26.0", - "_resolved": "https://registry.npmjs.org/request/-/request-2.26.0.tgz" + "_from": "request@~2.26.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/srs/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/srs/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/srs/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/srs/package.json 2013-10-25 23:13:22.000000000 +0000 @@ -37,9 +37,5 @@ "url": "https://github.com/springmeyer/node-srs/issues" }, "_id": "srs@0.3.6", - "dist": { - "shasum": "36b4e892b120d76bbfb907472a55ddd5773f5af2" - }, - "_from": "srs@~0.3.3", - "_resolved": "https://registry.npmjs.org/srs/-/srs-0.3.6.tgz" + "_from": "srs@~0.3.6" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/zipfile/package.json tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/zipfile/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/node_modules/zipfile/package.json 2013-10-25 01:45:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/node_modules/zipfile/package.json 2013-10-25 23:13:21.000000000 +0000 @@ -51,9 +51,5 @@ "url": "https://github.com/springmeyer/node-zipfile/issues" }, "_id": "zipfile@0.4.2", - "dist": { - "shasum": "e18d1a39c4f444a258d2fb85a8b9a64c0e400fc4" - }, - "_from": "zipfile@~0.4.1", - "_resolved": "https://registry.npmjs.org/zipfile/-/zipfile-0.4.2.tgz" + "_from": "zipfile@~0.4.2" } diff -Nru tilemill-0.10.1~saucy9/node_modules/millstone/package.json tilemill-0.10.1~saucy10/node_modules/millstone/package.json --- tilemill-0.10.1~saucy9/node_modules/millstone/package.json 2013-10-25 01:44:52.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/millstone/package.json 2013-10-25 23:13:20.000000000 +0000 @@ -1,6 +1,6 @@ { "name": "millstone", - "version": "0.6.5", + "version": "0.6.8", "main": "./lib/millstone.js", "description": "Prepares datasources in an MML file for consumption in Mapnik", "url": "https://github.com/mapbox/millstone", @@ -23,8 +23,8 @@ "step": "~0.0.5", "generic-pool": "~2.0.3", "request": "~2.26.0", - "srs": "~0.3.3", - "zipfile": "~0.4.1", + "srs": "~0.3.6", + "zipfile": "~0.4.2", "sqlite3": "2.x", "mime": "~1.2.9", "mkdirp": "~0.3.3", @@ -47,10 +47,10 @@ "bugs": { "url": "https://github.com/mapbox/millstone/issues" }, - "_id": "millstone@0.6.5", + "_id": "millstone@0.6.8", "dist": { - "shasum": "06c737d2c9f9de2d785bb7fd3c05c522e70fb478" + "shasum": "dcefacc02d08c2d65a9a1244e389d569f643939b" }, - "_from": "millstone@~0.6.4", - "_resolved": "https://registry.npmjs.org/millstone/-/millstone-0.6.5.tgz" + "_from": "millstone@~0.6.8", + "_resolved": "https://registry.npmjs.org/millstone/-/millstone-0.6.8.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/.npmignore tilemill-0.10.1~saucy10/node_modules/mocha/.npmignore --- tilemill-0.10.1~saucy9/node_modules/mocha/.npmignore 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -test -examples -*.sock -lib-cov -editors -support diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/.travis.yml tilemill-0.10.1~saucy10/node_modules/mocha/.travis.yml --- tilemill-0.10.1~saucy9/node_modules/mocha/.travis.yml 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/.travis.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -language: node_js -node_js: - - 0.4 - - 0.6 - - 0.8 \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/History.md tilemill-0.10.1~saucy10/node_modules/mocha/History.md --- tilemill-0.10.1~saucy9/node_modules/mocha/History.md 2012-10-02 16:04:44.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/History.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,457 +0,0 @@ - -1.6.0 / 2012-10-02 -================== - - * add object diffs when `err.showDiff` is present - * add hiding of empty suites when pass/failures are toggled - * add faster `.length` checks to `checkGlobals()` before performing the filter - -1.5.0 / 2012-09-21 -================== - - * add `ms()` to `.slow()` and `.timeout()` - * add `Mocha#checkLeaks()` to re-enable global leak checks - * add `this.slow()` option [aheckmann] - * add tab, CR, LF to error diffs for now - * add faster `.checkGlobals()` solution [guille] - * remove `fn.call()` from reduce util - * remove `fn.call()` from filter util - * fix forEach. Closes #582 - * fix relaying of signals [TooTallNate] - * fix TAP reporter grep number - -1.4.2 / 2012-09-01 -================== - - * add support to multiple `Mocha#globals()` calls, and strings - * add `mocha.reporter()` constructor support [jfirebaugh] - * add `mocha.timeout()` - * move query-string parser to utils.js - * move highlight code to utils.js - * fix third-party reporter support [exogen] - * fix client-side API to match node-side [jfirebaugh] - * fix mocha in iframe [joliss] - -1.4.1 / 2012-08-28 -================== - - * add missing `Markdown` export - * fix `Mocha#grep()`, escape regexp strings - * fix reference error when `devicePixelRatio` is not defined. Closes #549 - -1.4.0 / 2012-08-22 -================== - - * add mkdir -p to `mocha init`. Closes #539 - * add `.only()`. Closes #524 - * add `.skip()`. Closes #524 - * change str.trim() to use utils.trim(). Closes #533 - * fix HTML progress indicator retina display - * fix url-encoding of click-to-grep HTML functionality - -1.3.2 / 2012-08-01 -================== - - * fix exports double-execution regression. Closes #531 - -1.3.1 / 2012-08-01 -================== - - * add passes/failures toggling to HTML reporter - * add pending state to `xit()` and `xdescribe()` [Brian Moore] - * add the @charset "UTF-8"; to fix #522 with FireFox. [Jonathan Creamer] - * add border-bottom to #stats links - * add check for runnable in `Runner#uncaught()`. Closes #494 - * add 0.4 and 0.6 back to travis.yml - * add `-E, --growl-errors` to growl on failures only - * add prefixes to debug() names. Closes #497 - * add `Mocha#invert()` to js api - * change dot reporter to use sexy unicode dots - * fix error when clicking pending test in HTML reporter - * fix `make tm` - -1.3.0 / 2012-07-05 -================== - - * add window scrolling to `HTML` reporter - * add v8 `--trace-*` option support - * add support for custom reports via `--reporter MODULE` - * add `--invert` switch to invert `--grep` matches - * fix export of `Nyan` reporter. Closes #495 - * fix escaping of `HTML` suite titles. Closes #486 - * fix `done()` called multiple times with an error test - * change `--grep` - regexp escape the input - -1.2.2 / 2012-06-28 -================== - - * Added 0.8.0 support - -1.2.1 / 2012-06-25 -================== - - * Added `this.test.error(err)` support to after each hooks. Closes #287 - * Added: export top-level suite on global mocha object (mocha.suite). Closes #448 - * Fixed `js` code block format error in markdown reporter - * Fixed deprecation warning when using `path.existsSync` - * Fixed --globals with wildcard - * Fixed chars in nyan when his head moves back - * Remove `--growl` from test/mocha.opts. Closes #289 - -1.2.0 / 2012-06-17 -================== - - * Added `nyan` reporter [Atsuya Takagi] - * Added `mocha init ` to copy client files - * Added "specify" synonym for "it" [domenic] - * Added global leak wildcard support [nathanbowser] - * Fixed runner emitter leak. closes #432 - * Fixed omission of .js extension. Closes #454 - -1.1.0 / 2012-05-30 -================== - - * Added: check each `mocha(1)` arg for directories to walk - * Added `--recursive` [tricknotes] - * Added `context` for BDD [hokaccha] - * Added styling for new clickable titles - * Added clickable suite titles to HTML reporter - * Added warning when strings are thrown as errors - * Changed: green arrows again in HTML reporter styling - * Changed ul/li elements instead of divs for better copy-and-pasting [joliss] - * Fixed issue #325 - add better grep support to js api - * Fixed: save timer references to avoid Sinon interfering. - -1.0.3 / 2012-04-30 -================== - - * Fixed string diff newlines - * Fixed: removed mocha.css target. Closes #401 - -1.0.2 / 2012-04-25 -================== - - * Added HTML reporter duration. Closes #47 - * Fixed: one postMessage event listener [exogen] - * Fixed: allow --globals to be used multiple times. Closes #100 [brendannee] - * Fixed #158: removes jquery include from browser tests - * Fixed grep. Closes #372 [brendannee] - * Fixed #166 - When grepping don't display the empty suites - * Removed test/browser/style.css. Closes #385 - -1.0.1 / 2012-04-04 -================== - - * Fixed `.timeout()` in hooks - * Fixed: allow callback for `mocha.run()` in client version - * Fixed browser hook error display. Closes #361 - -1.0.0 / 2012-03-24 -================== - - * Added js API. Closes #265 - * Added: initial run of tests with `--watch`. Closes #345 - * Added: mark `location` as a global on the CS. Closes #311 - * Added `markdown` reporter (github flavour) - * Added: scrolling menu to coverage.html. Closes #335 - * Added source line to html report for Safari [Tyson Tate] - * Added "min" reporter, useful for `--watch` [Jakub Nešetřil] - * Added support for arbitrary compilers via . Closes #338 [Ian Young] - * Added Teamcity export to lib/reporters/index [Michael Riley] - * Fixed chopping of first char in error reporting. Closes #334 [reported by topfunky] - * Fixed terrible FF / Opera stack traces - -0.14.1 / 2012-03-06 -================== - - * Added lib-cov to _.npmignore_ - * Added reporter to `mocha.run([reporter])` as argument - * Added some margin-top to the HTML reporter - * Removed jQuery dependency - * Fixed `--watch`: purge require cache. Closes #266 - -0.14.0 / 2012-03-01 -================== - - * Added string diff support for terminal reporters - -0.13.0 / 2012-02-23 -================== - - * Added preliminary test coverage support. Closes #5 - * Added `HTMLCov` reporter - * Added `JSONCov` reporter [kunklejr] - * Added `xdescribe()` and `xit()` to the BDD interface. Closes #263 (docs * Changed: make json reporter output pretty json - * Fixed node-inspector support, swapped `--debug` for `debug` to match node. -needed) -Closes #247 - -0.12.1 / 2012-02-14 -================== - - * Added `npm docs mocha` support [TooTallNate] - * Added a `Context` object used for hook and test-case this. Closes #253 - * Fixed `Suite#clone()` `.ctx` reference. Closes #262 - -0.12.0 / 2012-02-02 -================== - - * Added .coffee `--watch` support. Closes #242 - * Added support to `--require` files relative to the CWD. Closes #241 - * Added quick n dirty syntax highlighting. Closes #248 - * Changed: made HTML progress indicator smaller - * Fixed xunit errors attribute [dhendo] - -0.10.2 / 2012-01-21 -================== - - * Fixed suite count in reporter stats. Closes #222 - * Fixed `done()` after timeout error reporting [Phil Sung] - * Changed the 0-based errors to 1 - -0.10.1 / 2012-01-17 -================== - - * Added support for node 0.7.x - * Fixed absolute path support. Closes #215 [kompiro] - * Fixed `--no-colors` option [Jussi Virtanen] - * Fixed Arial CSS typo in the correct file - -0.10.0 / 2012-01-13 -================== - - * Added `-b, --bail` to exit on first exception [guillermo] - * Added support for `-gc` / `--expose-gc` [TooTallNate] - * Added `qunit`-inspired interface - * Added MIT LICENSE. Closes #194 - * Added: `--watch` all .js in the CWD. Closes #139 - * Fixed `self.test` reference in runner. Closes #189 - * Fixed double reporting of uncaught exceptions after timeout. Closes #195 - -0.8.2 / 2012-01-05 -================== - - * Added test-case context support. Closes #113 - * Fixed exit status. Closes #187 - * Update commander. Closes #190 - -0.8.1 / 2011-12-30 -================== - - * Fixed reporting of uncaught exceptions. Closes #183 - * Fixed error message defaulting [indutny] - * Changed mocha(1) from bash to node for windows [Nathan Rajlich] - -0.8.0 / 2011-12-28 -================== - - * Added `XUnit` reporter [FeeFighters/visionmedia] - * Added `say(1)` notification support [Maciej Małecki] - * Changed: fail when done() is invoked with a non-Error. Closes #171 - * Fixed `err.stack`, defaulting to message. Closes #180 - * Fixed: `make tm` mkdir -p the dest. Closes #137 - * Fixed mocha(1) --help bin name - * Fixed `-d` for `--debug` support - -0.7.1 / 2011-12-22 -================== - - * Removed `mocha-debug(1)`, use `mocha --debug` - * Fixed CWD relative requires - * Fixed growl issue on windows [Raynos] - * Fixed: platform specific line endings [TooTallNate] - * Fixed: escape strings in HTML reporter. Closes #164 - -0.7.0 / 2011-12-18 -================== - - * Added support for IE{7,8} [guille] - * Changed: better browser nextTick implementation [guille] - -0.6.0 / 2011-12-18 -================== - - * Added setZeroTimeout timeout for browser (nicer stack traces). Closes #153 - * Added "view source" on hover for HTML reporter to make it obvious - * Changed: replace custom growl with growl lib - * Fixed duplicate reporting for HTML reporter. Closes #154 - * Fixed silent hook errors in the HTML reporter. Closes #150 - -0.5.0 / 2011-12-14 -================== - - * Added: push node_modules directory onto module.paths for relative require Closes #93 - * Added teamcity reporter [blindsey] - * Fixed: recover from uncaught exceptions for tests. Closes #94 - * Fixed: only emit "test end" for uncaught within test, not hook - -0.4.0 / 2011-12-14 -================== - - * Added support for test-specific timeouts via `this.timeout(0)`. Closes #134 - * Added guillermo's client-side EventEmitter. Closes #132 - * Added progress indicator to the HTML reporter - * Fixed slow browser tests. Closes #135 - * Fixed "suite" color for light terminals - * Fixed `require()` leak spotted by [guillermo] - -0.3.6 / 2011-12-09 -================== - - * Removed suite merging (for now) - -0.3.5 / 2011-12-08 -================== - - * Added support for `window.onerror` [guillermo] - * Fixed: clear timeout on uncaught exceptions. Closes #131 [guillermo] - * Added `mocha.css` to PHONY list. - * Added `mocha.js` to PHONY list. - -0.3.4 / 2011-12-08 -================== - - * Added: allow `done()` to be called with non-Error - * Added: return Runner from `mocha.run()`. Closes #126 - * Fixed: run afterEach even on failures. Closes #125 - * Fixed clobbering of current runnable. Closes #121 - -0.3.3 / 2011-12-08 -================== - - * Fixed hook timeouts. Closes #120 - * Fixed uncaught exceptions in hooks - -0.3.2 / 2011-12-05 -================== - - * Fixed weird reporting when `err.message` is not present - -0.3.1 / 2011-12-04 -================== - - * Fixed hook event emitter leak. Closes #117 - * Fixed: export `Spec` constructor. Closes #116 - -0.3.0 / 2011-12-04 -================== - - * Added `-w, --watch`. Closes #72 - * Added `--ignore-leaks` to ignore global leak checking - * Added browser `?grep=pattern` support - * Added `--globals ` to specify accepted globals. Closes #99 - * Fixed `mocha-debug(1)` on some systems. Closes #232 - * Fixed growl total, use `runner.total` - -0.2.0 / 2011-11-30 -================== - - * Added `--globals ` to specify accepted globals. Closes #99 - * Fixed funky highlighting of messages. Closes #97 - * Fixed `mocha-debug(1)`. Closes #232 - * Fixed growl total, use runner.total - -0.1.0 / 2011-11-29 -================== - - * Added `suiteSetup` and `suiteTeardown` to TDD interface [David Henderson] - * Added growl icons. Closes #84 - * Fixed coffee-script support - -0.0.8 / 2011-11-25 -================== - - * Fixed: use `Runner#total` for accurate reporting - -0.0.7 / 2011-11-25 -================== - - * Added `Hook` - * Added `Runnable` - * Changed: `Test` is `Runnable` - * Fixed global leak reporting in hooks - * Fixed: > 2 calls to done() only report the error once - * Fixed: clear timer on failure. Closes #80 - -0.0.6 / 2011-11-25 -================== - - * Fixed return on immediate async error. Closes #80 - -0.0.5 / 2011-11-24 -================== - - * Fixed: make mocha.opts whitespace less picky [kkaefer] - -0.0.4 / 2011-11-24 -================== - - * Added `--interfaces` - * Added `--reporters` - * Added `-c, --colors`. Closes #69 - * Fixed hook timeouts - -0.0.3 / 2011-11-23 -================== - - * Added `-C, --no-colors` to explicitly disable - * Added coffee-script support - -0.0.2 / 2011-11-22 -================== - - * Fixed global leak detection due to Safari bind() change - * Fixed: escape html entities in Doc reporter - * Fixed: escape html entities in HTML reporter - * Fixed pending test support for HTML reporter. Closes #66 - -0.0.1 / 2011-11-22 -================== - - * Added `--timeout` second shorthand support, ex `--timeout 3s`. - * Fixed "test end" event for uncaughtExceptions. Closes #61 - -0.0.1-alpha6 / 2011-11-19 -================== - - * Added travis CI support (needs enabling when public) - * Added preliminary browser support - * Added `make mocha.css` target. Closes #45 - * Added stack trace to TAP errors. Closes #52 - * Renamed tearDown to teardown. Closes #49 - * Fixed: cascading hooksc. Closes #30 - * Fixed some colors for non-tty - * Fixed errors thrown in sync test-cases due to nextTick - * Fixed Base.window.width... again give precedence to 0.6.x - -0.0.1-alpha5 / 2011-11-17 -================== - - * Added `doc` reporter. Closes #33 - * Added suite merging. Closes #28 - * Added TextMate bundle and `make tm`. Closes #20 - -0.0.1-alpha4 / 2011-11-15 -================== - - * Fixed getWindowSize() for 0.4.x - -0.0.1-alpha3 / 2011-11-15 -================== - - * Added `-s, --slow ` to specify "slow" test threshold - * Added `mocha-debug(1)` - * Added `mocha.opts` support. Closes #31 - * Added: default [files] to _test/*.js_ - * Added protection against multiple calls to `done()`. Closes #35 - * Changed: bright yellow for slow Dot reporter tests - -0.0.1-alpha1 / 2011-11-08 -================== - - * Missed this one :) - -0.0.1-alpha1 / 2011-11-08 -================== - - * Initial release diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/LICENSE 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -(The MIT License) - -Copyright (c) 20011-2012 TJ Holowaychuk - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/Makefile tilemill-0.10.1~saucy10/node_modules/mocha/Makefile --- tilemill-0.10.1~saucy9/node_modules/mocha/Makefile 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/Makefile 1970-01-01 00:00:00.000000000 +0000 @@ -1,120 +0,0 @@ - -REPORTER ?= dot -TM_DEST = ~/Library/Application\ Support/TextMate/Bundles -TM_BUNDLE = JavaScript\ mocha.tmbundle -SRC = $(shell find lib -name "*.js" -type f | sort) -SUPPORT = $(wildcard support/*.js) - -all: mocha.js - -mocha.js: $(SRC) $(SUPPORT) - @node support/compile $(SRC) - @cat \ - support/head.js \ - _mocha.js \ - support/tail.js \ - support/foot.js \ - > mocha.js - -clean: - rm -f mocha.js - rm -fr lib-cov - rm -f coverage.html - -test-cov: lib-cov - @COV=1 $(MAKE) test REPORTER=html-cov > coverage.html - -lib-cov: - @rm -fr ./$@ - @jscoverage lib $@ - -test: test-unit - -test-all: test-bdd test-tdd test-qunit test-exports test-unit test-grep test-jsapi test-compilers - -test-jsapi: - @node test/jsapi - -test-unit: - @./bin/mocha \ - --reporter $(REPORTER) \ - test/acceptance/*.js \ - test/*.js - -test-compilers: - @./bin/mocha \ - --reporter $(REPORTER) \ - --compilers coffee:coffee-script,foo:./test/compiler/foo \ - test/acceptance/test.coffee \ - test/acceptance/test.foo - -test-bdd: - @./bin/mocha \ - --reporter $(REPORTER) \ - --ui bdd \ - test/acceptance/interfaces/bdd - -test-tdd: - @./bin/mocha \ - --reporter $(REPORTER) \ - --ui tdd \ - test/acceptance/interfaces/tdd - -test-qunit: - @./bin/mocha \ - --reporter $(REPORTER) \ - --ui qunit \ - test/acceptance/interfaces/qunit - -test-exports: - @./bin/mocha \ - --reporter $(REPORTER) \ - --ui exports \ - test/acceptance/interfaces/exports - -test-grep: - @./bin/mocha \ - --reporter $(REPORTER) \ - --grep fast \ - test/acceptance/misc/grep - -test-invert: - @./bin/mocha \ - --reporter $(REPORTER) \ - --grep slow \ - --invert \ - test/acceptance/misc/grep - -test-bail: - @./bin/mocha \ - --reporter $(REPORTER) \ - --bail \ - test/acceptance/misc/bail - -non-tty: - @./bin/mocha \ - --reporter dot \ - test/acceptance/interfaces/bdd 2>&1 > /tmp/dot.out - - @echo dot: - @cat /tmp/dot.out - - @./bin/mocha \ - --reporter list \ - test/acceptance/interfaces/bdd 2>&1 > /tmp/list.out - - @echo list: - @cat /tmp/list.out - - @./bin/mocha \ - --reporter spec \ - test/acceptance/interfaces/bdd 2>&1 > /tmp/spec.out - - @echo spec: - @cat /tmp/spec.out - -tm: - mkdir -p $(TM_DEST) - cp -fr editors/$(TM_BUNDLE) $(TM_DEST) - -.PHONY: test-cov test-jsapi test-compilers watch test test-all test-bdd test-tdd test-qunit test-exports test-unit non-tty test-grep tm clean diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/Readme.md tilemill-0.10.1~saucy10/node_modules/mocha/Readme.md --- tilemill-0.10.1~saucy9/node_modules/mocha/Readme.md 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/Readme.md 2013-06-13 12:45:20.000000000 +0000 @@ -1,41 +1,124 @@ [![Build Status](https://secure.travis-ci.org/visionmedia/mocha.png)](http://travis-ci.org/visionmedia/mocha) - [![Mocha test framework](http://f.cl.ly/items/3l1k0n2A1U3M1I1L210p/Screen%20Shot%202012-02-24%20at%202.21.43%20PM.png)](http://visionmedia.github.com/mocha) + [![Mocha test framework](http://f.cl.ly/items/3l1k0n2A1U3M1I1L210p/Screen%20Shot%202012-02-24%20at%202.21.43%20PM.png)](http://visionmedia.github.io/mocha) - Mocha is a simple, flexible, fun JavaScript test framework for node.js and the browser. For more information view the [documentation](http://visionmedia.github.com/mocha). + Mocha is a simple, flexible, fun JavaScript test framework for node.js and the browser. For more information view the [documentation](http://visionmedia.github.io/mocha). ## Contributors ``` -project: mocha -commits: 502 -files : 86 -authors: - 352 Tj Holowaychuk 70.1% - 98 TJ Holowaychuk 19.5% - 21 Guillermo Rauch 4.2% - 6 James Carr 1.2% - 4 Joshua Krall 0.8% - 3 Ben Lindsey 0.6% - 3 Nathan Rajlich 0.6% - 2 FARKAS Máté 0.4% - 2 Quang Van 0.4% - 1 Steve Mason 0.2% - 1 Yuest Wang 0.2% - 1 hokaccha 0.2% - 1 David Henderson 0.2% - 1 Fedor Indutny 0.2% - 1 Fredrik Lindin 0.2% - 1 Harry Brundage 0.2% - 1 Konstantin Käfer 0.2% - 1 Maciej Małecki 0.2% - 1 Raynos 0.2% - 1 Ryunosuke SATO 0.2% + + project : mocha + repo age : 1 year, 7 months + active : 272 days + commits : 1116 + files : 123 + authors : + 504 TJ Holowaychuk 45.2% + 389 Tj Holowaychuk 34.9% + 31 Guillermo Rauch 2.8% + 13 Attila Domokos 1.2% + 9 John Firebaugh 0.8% + 8 Jo Liss 0.7% + 7 Nathan Rajlich 0.6% + 6 James Carr 0.5% + 6 Brendan Nee 0.5% + 5 Aaron Heckmann 0.4% + 4 hokaccha 0.4% + 4 Xavier Antoviaque 0.4% + 4 Joshua Krall 0.4% + 3 Wil Moore III 0.3% + 3 Jesse Dailey 0.3% + 3 Nathan Bowser 0.3% + 3 Tyson Tate 0.3% + 3 Cory Thomas 0.3% + 3 Ryunosuke SATO 0.3% + 3 Paul Miller 0.3% + 3 Ben Lindsey 0.3% + 2 Forbes Lindesay 0.2% + 2 Konstantin Käfer 0.2% + 2 Brian Beck 0.2% + 2 Merrick Christensen 0.2% + 2 Michael Riley 0.2% + 2 David Henderson 0.2% + 2 Nathan Alderson 0.2% + 2 Paul Armstrong 0.2% + 2 Pete Hawkins 0.2% + 2 Quang Van 0.2% + 2 Raynos 0.2% + 2 Jonas Westerlund 0.2% + 2 Domenic Denicola 0.2% + 2 Shawn Krisman 0.2% + 2 Simon Gaeremynck 0.2% + 2 FARKAS Máté 0.2% + 2 Timo Tijhof 0.2% + 2 Justin DuJardin 0.2% + 2 Juzer Ali 0.2% + 2 Ian Storm Taylor 0.2% + 2 Arian Stolwijk 0.2% + 2 domenic 0.2% + 1 Richard Dingwall 0.1% + 1 Russ Bradberry 0.1% + 1 Sasha Koss 0.1% + 1 Seiya Konno 0.1% + 1 Standa Opichal 0.1% + 1 Steve Mason 0.1% + 1 Will Langstroth 0.1% + 1 Yanis Wang 0.1% + 1 Yuest Wang 0.1% + 1 abrkn 0.1% + 1 airportyh 0.1% + 1 fengmk2 0.1% + 1 tgautier@yahoo.com 0.1% + 1 traleig1 0.1% + 1 vlad 0.1% + 1 yuitest 0.1% + 1 Adam Crabtree 0.1% + 1 Andreas Brekken 0.1% + 1 Atsuya Takagi 0.1% + 1 Austin Birch 0.1% + 1 Bjørge Næss 0.1% + 1 Brian Moore 0.1% + 1 Bryan Donovan 0.1% + 1 Casey Foster 0.1% + 1 Corey Butler 0.1% + 1 Dave McKenna 0.1% + 1 Fedor Indutny 0.1% + 1 Florian Margaine 0.1% + 1 Frederico Silva 0.1% + 1 Fredrik Lindin 0.1% + 1 Gareth Murphy 0.1% + 1 Gavin Mogan 0.1% + 1 Greg Perkins 0.1% + 1 Harry Brundage 0.1% + 1 Herman Junge 0.1% + 1 Ian Young 0.1% + 1 Ivan 0.1% + 1 Jaakko Salonen 0.1% + 1 Jakub Nešetřil 0.1% + 1 James Bowes 0.1% + 1 James Lal 0.1% + 1 Jason Barry 0.1% + 1 Javier Aranda 0.1% + 1 Jeff Kunkle 0.1% + 1 Jonathan Creamer 0.1% + 1 Jussi Virtanen 0.1% + 1 Katie Gengler 0.1% + 1 Kazuhito Hokamura 0.1% + 1 Koen Punt 0.1% + 1 Laszlo Bacsi 0.1% + 1 László Bácsi 0.1% + 1 Maciej Małecki 0.1% + 1 Matt Robenolt 0.1% + 1 Matt Smith 0.1% + 1 Matthew Shanley 0.1% + 1 Michael Schoonmaker 0.1% + 1 Phil Sung 0.1% + 1 R56 0.1% ``` ## Links - - [Chai](https://github.com/chaijs/chai) - BDD, TDD, and assert for node & the browser - - [Should.js](http://github.com/visionmedia/should.js) - BDD style assertions for node - - [Expect.js](https://github.com/LearnBoost/expect.js) - BDD style assertions for node & the browser - - [Google Group](http://groups.google.com/group/mochajs) \ No newline at end of file + - [Google Group](http://groups.google.com/group/mochajs) + - [Wiki](https://github.com/visionmedia/mocha/wiki) + - Mocha [Extensions and reporters](https://github.com/visionmedia/mocha/wiki) diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/_mocha.js tilemill-0.10.1~saucy10/node_modules/mocha/_mocha.js --- tilemill-0.10.1~saucy9/node_modules/mocha/_mocha.js 2012-10-02 16:02:57.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/_mocha.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,4793 +0,0 @@ - -// CommonJS require() - -function require(p){ - var path = require.resolve(p) - , mod = require.modules[path]; - if (!mod) throw new Error('failed to require "' + p + '"'); - if (!mod.exports) { - mod.exports = {}; - mod.call(mod.exports, mod, mod.exports, require.relative(path)); - } - return mod.exports; - } - -require.modules = {}; - -require.resolve = function (path){ - var orig = path - , reg = path + '.js' - , index = path + '/index.js'; - return require.modules[reg] && reg - || require.modules[index] && index - || orig; - }; - -require.register = function (path, fn){ - require.modules[path] = fn; - }; - -require.relative = function (parent) { - return function(p){ - if ('.' != p.charAt(0)) return require(p); - - var path = parent.split('/') - , segs = p.split('/'); - path.pop(); - - for (var i = 0; i < segs.length; i++) { - var seg = segs[i]; - if ('..' == seg) path.pop(); - else if ('.' != seg) path.push(seg); - } - - return require(path.join('/')); - }; - }; - - -require.register("browser/debug.js", function(module, exports, require){ - -module.exports = function(type){ - return function(){ - - } -}; -}); // module: browser/debug.js - -require.register("browser/diff.js", function(module, exports, require){ - -}); // module: browser/diff.js - -require.register("browser/events.js", function(module, exports, require){ - -/** - * Module exports. - */ - -exports.EventEmitter = EventEmitter; - -/** - * Check if `obj` is an array. - */ - -function isArray(obj) { - return '[object Array]' == {}.toString.call(obj); -} - -/** - * Event emitter constructor. - * - * @api public - */ - -function EventEmitter(){}; - -/** - * Adds a listener. - * - * @api public - */ - -EventEmitter.prototype.on = function (name, fn) { - if (!this.$events) { - this.$events = {}; - } - - if (!this.$events[name]) { - this.$events[name] = fn; - } else if (isArray(this.$events[name])) { - this.$events[name].push(fn); - } else { - this.$events[name] = [this.$events[name], fn]; - } - - return this; -}; - -EventEmitter.prototype.addListener = EventEmitter.prototype.on; - -/** - * Adds a volatile listener. - * - * @api public - */ - -EventEmitter.prototype.once = function (name, fn) { - var self = this; - - function on () { - self.removeListener(name, on); - fn.apply(this, arguments); - }; - - on.listener = fn; - this.on(name, on); - - return this; -}; - -/** - * Removes a listener. - * - * @api public - */ - -EventEmitter.prototype.removeListener = function (name, fn) { - if (this.$events && this.$events[name]) { - var list = this.$events[name]; - - if (isArray(list)) { - var pos = -1; - - for (var i = 0, l = list.length; i < l; i++) { - if (list[i] === fn || (list[i].listener && list[i].listener === fn)) { - pos = i; - break; - } - } - - if (pos < 0) { - return this; - } - - list.splice(pos, 1); - - if (!list.length) { - delete this.$events[name]; - } - } else if (list === fn || (list.listener && list.listener === fn)) { - delete this.$events[name]; - } - } - - return this; -}; - -/** - * Removes all listeners for an event. - * - * @api public - */ - -EventEmitter.prototype.removeAllListeners = function (name) { - if (name === undefined) { - this.$events = {}; - return this; - } - - if (this.$events && this.$events[name]) { - this.$events[name] = null; - } - - return this; -}; - -/** - * Gets all listeners for a certain event. - * - * @api public - */ - -EventEmitter.prototype.listeners = function (name) { - if (!this.$events) { - this.$events = {}; - } - - if (!this.$events[name]) { - this.$events[name] = []; - } - - if (!isArray(this.$events[name])) { - this.$events[name] = [this.$events[name]]; - } - - return this.$events[name]; -}; - -/** - * Emits an event. - * - * @api public - */ - -EventEmitter.prototype.emit = function (name) { - if (!this.$events) { - return false; - } - - var handler = this.$events[name]; - - if (!handler) { - return false; - } - - var args = [].slice.call(arguments, 1); - - if ('function' == typeof handler) { - handler.apply(this, args); - } else if (isArray(handler)) { - var listeners = handler.slice(); - - for (var i = 0, l = listeners.length; i < l; i++) { - listeners[i].apply(this, args); - } - } else { - return false; - } - - return true; -}; -}); // module: browser/events.js - -require.register("browser/fs.js", function(module, exports, require){ - -}); // module: browser/fs.js - -require.register("browser/path.js", function(module, exports, require){ - -}); // module: browser/path.js - -require.register("browser/progress.js", function(module, exports, require){ - -/** - * Expose `Progress`. - */ - -module.exports = Progress; - -/** - * Initialize a new `Progress` indicator. - */ - -function Progress() { - this.percent = 0; - this.size(0); - this.fontSize(11); - this.font('helvetica, arial, sans-serif'); -} - -/** - * Set progress size to `n`. - * - * @param {Number} n - * @return {Progress} for chaining - * @api public - */ - -Progress.prototype.size = function(n){ - this._size = n; - return this; -}; - -/** - * Set text to `str`. - * - * @param {String} str - * @return {Progress} for chaining - * @api public - */ - -Progress.prototype.text = function(str){ - this._text = str; - return this; -}; - -/** - * Set font size to `n`. - * - * @param {Number} n - * @return {Progress} for chaining - * @api public - */ - -Progress.prototype.fontSize = function(n){ - this._fontSize = n; - return this; -}; - -/** - * Set font `family`. - * - * @param {String} family - * @return {Progress} for chaining - */ - -Progress.prototype.font = function(family){ - this._font = family; - return this; -}; - -/** - * Update percentage to `n`. - * - * @param {Number} n - * @return {Progress} for chaining - */ - -Progress.prototype.update = function(n){ - this.percent = n; - return this; -}; - -/** - * Draw on `ctx`. - * - * @param {CanvasRenderingContext2d} ctx - * @return {Progress} for chaining - */ - -Progress.prototype.draw = function(ctx){ - var percent = Math.min(this.percent, 100) - , size = this._size - , half = size / 2 - , x = half - , y = half - , rad = half - 1 - , fontSize = this._fontSize; - - ctx.font = fontSize + 'px ' + this._font; - - var angle = Math.PI * 2 * (percent / 100); - ctx.clearRect(0, 0, size, size); - - // outer circle - ctx.strokeStyle = '#9f9f9f'; - ctx.beginPath(); - ctx.arc(x, y, rad, 0, angle, false); - ctx.stroke(); - - // inner circle - ctx.strokeStyle = '#eee'; - ctx.beginPath(); - ctx.arc(x, y, rad - 1, 0, angle, true); - ctx.stroke(); - - // text - var text = this._text || (percent | 0) + '%' - , w = ctx.measureText(text).width; - - ctx.fillText( - text - , x - w / 2 + 1 - , y + fontSize / 2 - 1); - - return this; -}; - -}); // module: browser/progress.js - -require.register("browser/tty.js", function(module, exports, require){ - -exports.isatty = function(){ - return true; -}; - -exports.getWindowSize = function(){ - return [window.innerHeight, window.innerWidth]; -}; -}); // module: browser/tty.js - -require.register("context.js", function(module, exports, require){ - -/** - * Expose `Context`. - */ - -module.exports = Context; - -/** - * Initialize a new `Context`. - * - * @api private - */ - -function Context(){} - -/** - * Set or get the context `Runnable` to `runnable`. - * - * @param {Runnable} runnable - * @return {Context} - * @api private - */ - -Context.prototype.runnable = function(runnable){ - if (0 == arguments.length) return this._runnable; - this.test = this._runnable = runnable; - return this; -}; - -/** - * Set test timeout `ms`. - * - * @param {Number} ms - * @return {Context} self - * @api private - */ - -Context.prototype.timeout = function(ms){ - this.runnable().timeout(ms); - return this; -}; - -/** - * Set test slowness threshold `ms`. - * - * @param {Number} ms - * @return {Context} self - * @api private - */ - -Context.prototype.slow = function(ms){ - this.runnable().slow(ms); - return this; -}; - -/** - * Inspect the context void of `._runnable`. - * - * @return {String} - * @api private - */ - -Context.prototype.inspect = function(){ - return JSON.stringify(this, function(key, val){ - if ('_runnable' == key) return; - if ('test' == key) return; - return val; - }, 2); -}; - -}); // module: context.js - -require.register("hook.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Runnable = require('./runnable'); - -/** - * Expose `Hook`. - */ - -module.exports = Hook; - -/** - * Initialize a new `Hook` with the given `title` and callback `fn`. - * - * @param {String} title - * @param {Function} fn - * @api private - */ - -function Hook(title, fn) { - Runnable.call(this, title, fn); - this.type = 'hook'; -} - -/** - * Inherit from `Runnable.prototype`. - */ - -Hook.prototype = new Runnable; -Hook.prototype.constructor = Hook; - - -/** - * Get or set the test `err`. - * - * @param {Error} err - * @return {Error} - * @api public - */ - -Hook.prototype.error = function(err){ - if (0 == arguments.length) { - var err = this._error; - this._error = null; - return err; - } - - this._error = err; -}; - - -}); // module: hook.js - -require.register("interfaces/bdd.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Suite = require('../suite') - , Test = require('../test'); - -/** - * BDD-style interface: - * - * describe('Array', function(){ - * describe('#indexOf()', function(){ - * it('should return -1 when not present', function(){ - * - * }); - * - * it('should return the index when present', function(){ - * - * }); - * }); - * }); - * - */ - -module.exports = function(suite){ - var suites = [suite]; - - suite.on('pre-require', function(context, file, mocha){ - - /** - * Execute before running tests. - */ - - context.before = function(fn){ - suites[0].beforeAll(fn); - }; - - /** - * Execute after running tests. - */ - - context.after = function(fn){ - suites[0].afterAll(fn); - }; - - /** - * Execute before each test case. - */ - - context.beforeEach = function(fn){ - suites[0].beforeEach(fn); - }; - - /** - * Execute after each test case. - */ - - context.afterEach = function(fn){ - suites[0].afterEach(fn); - }; - - /** - * Describe a "suite" with the given `title` - * and callback `fn` containing nested suites - * and/or tests. - */ - - context.describe = context.context = function(title, fn){ - var suite = Suite.create(suites[0], title); - suites.unshift(suite); - fn(); - suites.shift(); - return suite; - }; - - /** - * Pending describe. - */ - - context.xdescribe = - context.xcontext = - context.describe.skip = function(title, fn){ - var suite = Suite.create(suites[0], title); - suite.pending = true; - suites.unshift(suite); - fn(); - suites.shift(); - }; - - /** - * Exclusive suite. - */ - - context.describe.only = function(title, fn){ - var suite = context.describe(title, fn); - mocha.grep(suite.fullTitle()); - }; - - /** - * Describe a specification or test-case - * with the given `title` and callback `fn` - * acting as a thunk. - */ - - context.it = context.specify = function(title, fn){ - var suite = suites[0]; - if (suite.pending) var fn = null; - var test = new Test(title, fn); - suite.addTest(test); - return test; - }; - - /** - * Exclusive test-case. - */ - - context.it.only = function(title, fn){ - var test = context.it(title, fn); - mocha.grep(test.fullTitle()); - }; - - /** - * Pending test case. - */ - - context.xit = - context.xspecify = - context.it.skip = function(title){ - context.it(title); - }; - }); -}; - -}); // module: interfaces/bdd.js - -require.register("interfaces/exports.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Suite = require('../suite') - , Test = require('../test'); - -/** - * TDD-style interface: - * - * exports.Array = { - * '#indexOf()': { - * 'should return -1 when the value is not present': function(){ - * - * }, - * - * 'should return the correct index when the value is present': function(){ - * - * } - * } - * }; - * - */ - -module.exports = function(suite){ - var suites = [suite]; - - suite.on('require', visit); - - function visit(obj) { - var suite; - for (var key in obj) { - if ('function' == typeof obj[key]) { - var fn = obj[key]; - switch (key) { - case 'before': - suites[0].beforeAll(fn); - break; - case 'after': - suites[0].afterAll(fn); - break; - case 'beforeEach': - suites[0].beforeEach(fn); - break; - case 'afterEach': - suites[0].afterEach(fn); - break; - default: - suites[0].addTest(new Test(key, fn)); - } - } else { - var suite = Suite.create(suites[0], key); - suites.unshift(suite); - visit(obj[key]); - suites.shift(); - } - } - } -}; -}); // module: interfaces/exports.js - -require.register("interfaces/index.js", function(module, exports, require){ - -exports.bdd = require('./bdd'); -exports.tdd = require('./tdd'); -exports.qunit = require('./qunit'); -exports.exports = require('./exports'); - -}); // module: interfaces/index.js - -require.register("interfaces/qunit.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Suite = require('../suite') - , Test = require('../test'); - -/** - * QUnit-style interface: - * - * suite('Array'); - * - * test('#length', function(){ - * var arr = [1,2,3]; - * ok(arr.length == 3); - * }); - * - * test('#indexOf()', function(){ - * var arr = [1,2,3]; - * ok(arr.indexOf(1) == 0); - * ok(arr.indexOf(2) == 1); - * ok(arr.indexOf(3) == 2); - * }); - * - * suite('String'); - * - * test('#length', function(){ - * ok('foo'.length == 3); - * }); - * - */ - -module.exports = function(suite){ - var suites = [suite]; - - suite.on('pre-require', function(context){ - - /** - * Execute before running tests. - */ - - context.before = function(fn){ - suites[0].beforeAll(fn); - }; - - /** - * Execute after running tests. - */ - - context.after = function(fn){ - suites[0].afterAll(fn); - }; - - /** - * Execute before each test case. - */ - - context.beforeEach = function(fn){ - suites[0].beforeEach(fn); - }; - - /** - * Execute after each test case. - */ - - context.afterEach = function(fn){ - suites[0].afterEach(fn); - }; - - /** - * Describe a "suite" with the given `title`. - */ - - context.suite = function(title){ - if (suites.length > 1) suites.shift(); - var suite = Suite.create(suites[0], title); - suites.unshift(suite); - }; - - /** - * Describe a specification or test-case - * with the given `title` and callback `fn` - * acting as a thunk. - */ - - context.test = function(title, fn){ - suites[0].addTest(new Test(title, fn)); - }; - }); -}; - -}); // module: interfaces/qunit.js - -require.register("interfaces/tdd.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Suite = require('../suite') - , Test = require('../test'); - -/** - * TDD-style interface: - * - * suite('Array', function(){ - * suite('#indexOf()', function(){ - * suiteSetup(function(){ - * - * }); - * - * test('should return -1 when not present', function(){ - * - * }); - * - * test('should return the index when present', function(){ - * - * }); - * - * suiteTeardown(function(){ - * - * }); - * }); - * }); - * - */ - -module.exports = function(suite){ - var suites = [suite]; - - suite.on('pre-require', function(context, file, mocha){ - - /** - * Execute before each test case. - */ - - context.setup = function(fn){ - suites[0].beforeEach(fn); - }; - - /** - * Execute after each test case. - */ - - context.teardown = function(fn){ - suites[0].afterEach(fn); - }; - - /** - * Execute before the suite. - */ - - context.suiteSetup = function(fn){ - suites[0].beforeAll(fn); - }; - - /** - * Execute after the suite. - */ - - context.suiteTeardown = function(fn){ - suites[0].afterAll(fn); - }; - - /** - * Describe a "suite" with the given `title` - * and callback `fn` containing nested suites - * and/or tests. - */ - - context.suite = function(title, fn){ - var suite = Suite.create(suites[0], title); - suites.unshift(suite); - fn(); - suites.shift(); - return suite; - }; - - /** - * Exclusive test-case. - */ - - context.suite.only = function(title, fn){ - var suite = context.suite(title, fn); - mocha.grep(suite.fullTitle()); - }; - - /** - * Describe a specification or test-case - * with the given `title` and callback `fn` - * acting as a thunk. - */ - - context.test = function(title, fn){ - var test = new Test(title, fn); - suites[0].addTest(test); - return test; - }; - - /** - * Exclusive test-case. - */ - - context.test.only = function(title, fn){ - var test = context.test(title, fn); - mocha.grep(test.fullTitle()); - }; - }); -}; - -}); // module: interfaces/tdd.js - -require.register("mocha.js", function(module, exports, require){ -/*! - * mocha - * Copyright(c) 2011 TJ Holowaychuk - * MIT Licensed - */ - -/** - * Module dependencies. - */ - -var path = require('browser/path') - , utils = require('./utils'); - -/** - * Expose `Mocha`. - */ - -exports = module.exports = Mocha; - -/** - * Expose internals. - */ - -exports.utils = utils; -exports.interfaces = require('./interfaces'); -exports.reporters = require('./reporters'); -exports.Runnable = require('./runnable'); -exports.Context = require('./context'); -exports.Runner = require('./runner'); -exports.Suite = require('./suite'); -exports.Hook = require('./hook'); -exports.Test = require('./test'); - -/** - * Return image `name` path. - * - * @param {String} name - * @return {String} - * @api private - */ - -function image(name) { - return __dirname + '/../images/' + name + '.png'; -} - -/** - * Setup mocha with `options`. - * - * Options: - * - * - `ui` name "bdd", "tdd", "exports" etc - * - `reporter` reporter instance, defaults to `mocha.reporters.Dot` - * - `globals` array of accepted globals - * - `timeout` timeout in milliseconds - * - `slow` milliseconds to wait before considering a test slow - * - `ignoreLeaks` ignore global leaks - * - `grep` string or regexp to filter tests with - * - * @param {Object} options - * @api public - */ - -function Mocha(options) { - options = options || {}; - this.files = []; - this.options = options; - this.grep(options.grep); - this.suite = new exports.Suite('', new exports.Context); - this.ui(options.ui); - this.reporter(options.reporter); - if (options.timeout) this.timeout(options.timeout); - if (options.slow) this.slow(options.slow); -} - -/** - * Add test `file`. - * - * @param {String} file - * @api public - */ - -Mocha.prototype.addFile = function(file){ - this.files.push(file); - return this; -}; - -/** - * Set reporter to `reporter`, defaults to "dot". - * - * @param {String|Function} reporter name of a reporter or a reporter constructor - * @api public - */ - -Mocha.prototype.reporter = function(reporter){ - if ('function' == typeof reporter) { - this._reporter = reporter; - } else { - reporter = reporter || 'dot'; - try { - this._reporter = require('./reporters/' + reporter); - } catch (err) { - this._reporter = require(reporter); - } - if (!this._reporter) throw new Error('invalid reporter "' + reporter + '"'); - } - return this; -}; - -/** - * Set test UI `name`, defaults to "bdd". - * - * @param {String} bdd - * @api public - */ - -Mocha.prototype.ui = function(name){ - name = name || 'bdd'; - this._ui = exports.interfaces[name]; - if (!this._ui) throw new Error('invalid interface "' + name + '"'); - this._ui = this._ui(this.suite); - return this; -}; - -/** - * Load registered files. - * - * @api private - */ - -Mocha.prototype.loadFiles = function(fn){ - var self = this; - var suite = this.suite; - var pending = this.files.length; - this.files.forEach(function(file){ - file = path.resolve(file); - suite.emit('pre-require', global, file, self); - suite.emit('require', require(file), file, self); - suite.emit('post-require', global, file, self); - --pending || (fn && fn()); - }); -}; - -/** - * Enable growl support. - * - * @api private - */ - -Mocha.prototype._growl = function(runner, reporter) { - var notify = require('growl'); - - runner.on('end', function(){ - var stats = reporter.stats; - if (stats.failures) { - var msg = stats.failures + ' of ' + runner.total + ' tests failed'; - notify(msg, { name: 'mocha', title: 'Failed', image: image('error') }); - } else { - notify(stats.passes + ' tests passed in ' + stats.duration + 'ms', { - name: 'mocha' - , title: 'Passed' - , image: image('ok') - }); - } - }); -}; - -/** - * Add regexp to grep, if `re` is a string it is escaped. - * - * @param {RegExp|String} re - * @return {Mocha} - * @api public - */ - -Mocha.prototype.grep = function(re){ - this.options.grep = 'string' == typeof re - ? new RegExp(utils.escapeRegexp(re)) - : re; - return this; -}; - -/** - * Invert `.grep()` matches. - * - * @return {Mocha} - * @api public - */ - -Mocha.prototype.invert = function(){ - this.options.invert = true; - return this; -}; - -/** - * Ignore global leaks. - * - * @return {Mocha} - * @api public - */ - -Mocha.prototype.ignoreLeaks = function(){ - this.options.ignoreLeaks = true; - return this; -}; - -/** - * Enable global leak checking. - * - * @return {Mocha} - * @api public - */ - -Mocha.prototype.checkLeaks = function(){ - this.options.ignoreLeaks = false; - return this; -}; - -/** - * Enable growl support. - * - * @return {Mocha} - * @api public - */ - -Mocha.prototype.growl = function(){ - this.options.growl = true; - return this; -}; - -/** - * Ignore `globals` array or string. - * - * @param {Array|String} globals - * @return {Mocha} - * @api public - */ - -Mocha.prototype.globals = function(globals){ - this.options.globals = (this.options.globals || []).concat(globals); - return this; -}; - -/** - * Set the timeout in milliseconds. - * - * @param {Number} timeout - * @return {Mocha} - * @api public - */ - -Mocha.prototype.timeout = function(timeout){ - this.suite.timeout(timeout); - return this; -}; - -/** - * Set slowness threshold in milliseconds. - * - * @param {Number} slow - * @return {Mocha} - * @api public - */ - -Mocha.prototype.slow = function(slow){ - this.suite.slow(slow); - return this; -}; - -/** - * Run tests and invoke `fn()` when complete. - * - * @param {Function} fn - * @return {Runner} - * @api public - */ - -Mocha.prototype.run = function(fn){ - if (this.files.length) this.loadFiles(); - var suite = this.suite; - var options = this.options; - var runner = new exports.Runner(suite); - var reporter = new this._reporter(runner); - runner.ignoreLeaks = options.ignoreLeaks; - if (options.grep) runner.grep(options.grep, options.invert); - if (options.globals) runner.globals(options.globals); - if (options.growl) this._growl(runner, reporter); - return runner.run(fn); -}; - -}); // module: mocha.js - -require.register("ms.js", function(module, exports, require){ - -/** - * Helpers. - */ - -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; - -/** - * Parse or format the given `val`. - * - * @param {String|Number} val - * @return {String|Number} - * @api public - */ - -module.exports = function(val){ - if ('string' == typeof val) return parse(val); - return format(val); -} - -/** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private - */ - -function parse(str) { - var m = /^((?:\d+)?\.?\d+) *(ms|seconds?|s|minutes?|m|hours?|h|days?|d|years?|y)?$/i.exec(str); - if (!m) return; - var n = parseFloat(m[1]); - var type = (m[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'y': - return n * 31557600000; - case 'days': - case 'day': - case 'd': - return n * 86400000; - case 'hours': - case 'hour': - case 'h': - return n * 3600000; - case 'minutes': - case 'minute': - case 'm': - return n * 60000; - case 'seconds': - case 'second': - case 's': - return n * 1000; - case 'ms': - return n; - } -} - -/** - * Format the given `ms`. - * - * @param {Number} ms - * @return {String} - * @api public - */ - -function format(ms) { - if (ms == d) return (ms / d) + ' day'; - if (ms > d) return (ms / d) + ' days'; - if (ms == h) return (ms / h) + ' hour'; - if (ms > h) return (ms / h) + ' hours'; - if (ms == m) return (ms / m) + ' minute'; - if (ms > m) return (ms / m) + ' minutes'; - if (ms == s) return (ms / s) + ' second'; - if (ms > s) return (ms / s) + ' seconds'; - return ms + ' ms'; -} -}); // module: ms.js - -require.register("reporters/base.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var tty = require('browser/tty') - , diff = require('browser/diff') - , ms = require('../ms'); - -/** - * Save timer references to avoid Sinon interfering (see GH-237). - */ - -var Date = global.Date - , setTimeout = global.setTimeout - , setInterval = global.setInterval - , clearTimeout = global.clearTimeout - , clearInterval = global.clearInterval; - -/** - * Check if both stdio streams are associated with a tty. - */ - -var isatty = tty.isatty(1) && tty.isatty(2); - -/** - * Expose `Base`. - */ - -exports = module.exports = Base; - -/** - * Enable coloring by default. - */ - -exports.useColors = isatty; - -/** - * Default color map. - */ - -exports.colors = { - 'pass': 90 - , 'fail': 31 - , 'bright pass': 92 - , 'bright fail': 91 - , 'bright yellow': 93 - , 'pending': 36 - , 'suite': 0 - , 'error title': 0 - , 'error message': 31 - , 'error stack': 90 - , 'checkmark': 32 - , 'fast': 90 - , 'medium': 33 - , 'slow': 31 - , 'green': 32 - , 'light': 90 - , 'diff gutter': 90 - , 'diff added': 42 - , 'diff removed': 41 -}; - -/** - * Color `str` with the given `type`, - * allowing colors to be disabled, - * as well as user-defined color - * schemes. - * - * @param {String} type - * @param {String} str - * @return {String} - * @api private - */ - -var color = exports.color = function(type, str) { - if (!exports.useColors) return str; - return '\u001b[' + exports.colors[type] + 'm' + str + '\u001b[0m'; -}; - -/** - * Expose term window size, with some - * defaults for when stderr is not a tty. - */ - -exports.window = { - width: isatty - ? process.stdout.getWindowSize - ? process.stdout.getWindowSize(1)[0] - : tty.getWindowSize()[1] - : 75 -}; - -/** - * Expose some basic cursor interactions - * that are common among reporters. - */ - -exports.cursor = { - hide: function(){ - process.stdout.write('\u001b[?25l'); - }, - - show: function(){ - process.stdout.write('\u001b[?25h'); - }, - - deleteLine: function(){ - process.stdout.write('\u001b[2K'); - }, - - beginningOfLine: function(){ - process.stdout.write('\u001b[0G'); - }, - - CR: function(){ - exports.cursor.deleteLine(); - exports.cursor.beginningOfLine(); - } -}; - -/** - * Outut the given `failures` as a list. - * - * @param {Array} failures - * @api public - */ - -exports.list = function(failures){ - console.error(); - failures.forEach(function(test, i){ - // format - var fmt = color('error title', ' %s) %s:\n') - + color('error message', ' %s') - + color('error stack', '\n%s\n'); - - // msg - var err = test.err - , message = err.message || '' - , stack = err.stack || message - , index = stack.indexOf(message) + message.length - , msg = stack.slice(0, index) - , actual = err.actual - , expected = err.expected - , escape = true; - - // explicitly show diff - if (err.showDiff) { - escape = false; - err.actual = actual = JSON.stringify(actual, null, 2); - err.expected = expected = JSON.stringify(expected, null, 2); - } - - // actual / expected diff - if ('string' == typeof actual && 'string' == typeof expected) { - var len = Math.max(actual.length, expected.length); - - if (len < 20) msg = errorDiff(err, 'Chars', escape); - else msg = errorDiff(err, 'Words', escape); - - // linenos - var lines = msg.split('\n'); - if (lines.length > 4) { - var width = String(lines.length).length; - msg = lines.map(function(str, i){ - return pad(++i, width) + ' |' + ' ' + str; - }).join('\n'); - } - - // legend - msg = '\n' - + color('diff removed', 'actual') - + ' ' - + color('diff added', 'expected') - + '\n\n' - + msg - + '\n'; - - // indent - msg = msg.replace(/^/gm, ' '); - - fmt = color('error title', ' %s) %s:\n%s') - + color('error stack', '\n%s\n'); - } - - // indent stack trace without msg - stack = stack.slice(index ? index + 1 : index) - .replace(/^/gm, ' '); - - console.error(fmt, (i + 1), test.fullTitle(), msg, stack); - }); -}; - -/** - * Initialize a new `Base` reporter. - * - * All other reporters generally - * inherit from this reporter, providing - * stats such as test duration, number - * of tests passed / failed etc. - * - * @param {Runner} runner - * @api public - */ - -function Base(runner) { - var self = this - , stats = this.stats = { suites: 0, tests: 0, passes: 0, pending: 0, failures: 0 } - , failures = this.failures = []; - - if (!runner) return; - this.runner = runner; - - runner.on('start', function(){ - stats.start = new Date; - }); - - runner.on('suite', function(suite){ - stats.suites = stats.suites || 0; - suite.root || stats.suites++; - }); - - runner.on('test end', function(test){ - stats.tests = stats.tests || 0; - stats.tests++; - }); - - runner.on('pass', function(test){ - stats.passes = stats.passes || 0; - - var medium = test.slow() / 2; - test.speed = test.duration > test.slow() - ? 'slow' - : test.duration > medium - ? 'medium' - : 'fast'; - - stats.passes++; - }); - - runner.on('fail', function(test, err){ - stats.failures = stats.failures || 0; - stats.failures++; - test.err = err; - failures.push(test); - }); - - runner.on('end', function(){ - stats.end = new Date; - stats.duration = new Date - stats.start; - }); - - runner.on('pending', function(){ - stats.pending++; - }); -} - -/** - * Output common epilogue used by many of - * the bundled reporters. - * - * @api public - */ - -Base.prototype.epilogue = function(){ - var stats = this.stats - , fmt - , tests; - - console.log(); - - function pluralize(n) { - return 1 == n ? 'test' : 'tests'; - } - - // failure - if (stats.failures) { - fmt = color('bright fail', ' ✖') - + color('fail', ' %d of %d %s failed') - + color('light', ':') - - console.error(fmt, - stats.failures, - this.runner.total, - pluralize(this.runner.total)); - - Base.list(this.failures); - console.error(); - return; - } - - // pass - fmt = color('bright pass', ' ✔') - + color('green', ' %d %s complete') - + color('light', ' (%s)'); - - console.log(fmt, - stats.tests || 0, - pluralize(stats.tests), - ms(stats.duration)); - - // pending - if (stats.pending) { - fmt = color('pending', ' •') - + color('pending', ' %d %s pending'); - - console.log(fmt, stats.pending, pluralize(stats.pending)); - } - - console.log(); -}; - -/** - * Pad the given `str` to `len`. - * - * @param {String} str - * @param {String} len - * @return {String} - * @api private - */ - -function pad(str, len) { - str = String(str); - return Array(len - str.length + 1).join(' ') + str; -} - -/** - * Return a character diff for `err`. - * - * @param {Error} err - * @return {String} - * @api private - */ - -function errorDiff(err, type, escape) { - return diff['diff' + type](err.actual, err.expected).map(function(str){ - if (escape) { - str.value = str.value - .replace(/\t/g, '') - .replace(/\r/g, '') - .replace(/\n/g, '\n'); - } - if (str.added) return colorLines('diff added', str.value); - if (str.removed) return colorLines('diff removed', str.value); - return str.value; - }).join(''); -} - -/** - * Color lines for `str`, using the color `name`. - * - * @param {String} name - * @param {String} str - * @return {String} - * @api private - */ - -function colorLines(name, str) { - return str.split('\n').map(function(str){ - return color(name, str); - }).join('\n'); -} - -}); // module: reporters/base.js - -require.register("reporters/doc.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , utils = require('../utils'); - -/** - * Expose `Doc`. - */ - -exports = module.exports = Doc; - -/** - * Initialize a new `Doc` reporter. - * - * @param {Runner} runner - * @api public - */ - -function Doc(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , total = runner.total - , indents = 2; - - function indent() { - return Array(indents).join(' '); - } - - runner.on('suite', function(suite){ - if (suite.root) return; - ++indents; - console.log('%s
', indent()); - ++indents; - console.log('%s

%s

', indent(), suite.title); - console.log('%s
', indent()); - }); - - runner.on('suite end', function(suite){ - if (suite.root) return; - console.log('%s
', indent()); - --indents; - console.log('%s
', indent()); - --indents; - }); - - runner.on('pass', function(test){ - console.log('%s
%s
', indent(), test.title); - var code = utils.escape(utils.clean(test.fn.toString())); - console.log('%s
%s
', indent(), code); - }); -} - -}); // module: reporters/doc.js - -require.register("reporters/dot.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , color = Base.color; - -/** - * Expose `Dot`. - */ - -exports = module.exports = Dot; - -/** - * Initialize a new `Dot` matrix test reporter. - * - * @param {Runner} runner - * @api public - */ - -function Dot(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , width = Base.window.width * .75 | 0 - , c = '․' - , n = 0; - - runner.on('start', function(){ - process.stdout.write('\n '); - }); - - runner.on('pending', function(test){ - process.stdout.write(color('pending', c)); - }); - - runner.on('pass', function(test){ - if (++n % width == 0) process.stdout.write('\n '); - if ('slow' == test.speed) { - process.stdout.write(color('bright yellow', c)); - } else { - process.stdout.write(color(test.speed, c)); - } - }); - - runner.on('fail', function(test, err){ - if (++n % width == 0) process.stdout.write('\n '); - process.stdout.write(color('fail', c)); - }); - - runner.on('end', function(){ - console.log(); - self.epilogue(); - }); -} - -/** - * Inherit from `Base.prototype`. - */ - -Dot.prototype = new Base; -Dot.prototype.constructor = Dot; - -}); // module: reporters/dot.js - -require.register("reporters/html-cov.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var JSONCov = require('./json-cov') - , fs = require('browser/fs'); - -/** - * Expose `HTMLCov`. - */ - -exports = module.exports = HTMLCov; - -/** - * Initialize a new `JsCoverage` reporter. - * - * @param {Runner} runner - * @api public - */ - -function HTMLCov(runner) { - var jade = require('jade') - , file = __dirname + '/templates/coverage.jade' - , str = fs.readFileSync(file, 'utf8') - , fn = jade.compile(str, { filename: file }) - , self = this; - - JSONCov.call(this, runner, false); - - runner.on('end', function(){ - process.stdout.write(fn({ - cov: self.cov - , coverageClass: coverageClass - })); - }); -} - -/** - * Return coverage class for `n`. - * - * @return {String} - * @api private - */ - -function coverageClass(n) { - if (n >= 75) return 'high'; - if (n >= 50) return 'medium'; - if (n >= 25) return 'low'; - return 'terrible'; -} -}); // module: reporters/html-cov.js - -require.register("reporters/html.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , utils = require('../utils') - , Progress = require('../browser/progress') - , escape = utils.escape; - -/** - * Save timer references to avoid Sinon interfering (see GH-237). - */ - -var Date = global.Date - , setTimeout = global.setTimeout - , setInterval = global.setInterval - , clearTimeout = global.clearTimeout - , clearInterval = global.clearInterval; - -/** - * Expose `Doc`. - */ - -exports = module.exports = HTML; - -/** - * Stats template. - */ - -var statsTemplate = '
'; - -/** - * Initialize a new `Doc` reporter. - * - * @param {Runner} runner - * @api public - */ - -function HTML(runner, root) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , total = runner.total - , stat = fragment(statsTemplate) - , items = stat.getElementsByTagName('li') - , passes = items[1].getElementsByTagName('em')[0] - , passesLink = items[1].getElementsByTagName('a')[0] - , failures = items[2].getElementsByTagName('em')[0] - , failuresLink = items[2].getElementsByTagName('a')[0] - , duration = items[3].getElementsByTagName('em')[0] - , canvas = stat.getElementsByTagName('canvas')[0] - , report = fragment('
    ') - , stack = [report] - , progress - , ctx - - root = root || document.getElementById('mocha'); - - if (canvas.getContext) { - var ratio = window.devicePixelRatio || 1; - canvas.style.width = canvas.width; - canvas.style.height = canvas.height; - canvas.width *= ratio; - canvas.height *= ratio; - ctx = canvas.getContext('2d'); - ctx.scale(ratio, ratio); - progress = new Progress; - } - - if (!root) return error('#mocha div missing, add it to your document'); - - // pass toggle - on(passesLink, 'click', function(){ - unhide(); - var name = /pass/.test(report.className) ? '' : ' pass'; - report.className = report.className.replace(/fail|pass/g, '') + name; - if (report.className.trim()) hideSuitesWithout('test pass'); - }); - - // failure toggle - on(failuresLink, 'click', function(){ - unhide(); - var name = /fail/.test(report.className) ? '' : ' fail'; - report.className = report.className.replace(/fail|pass/g, '') + name; - if (report.className.trim()) hideSuitesWithout('test fail'); - }); - - root.appendChild(stat); - root.appendChild(report); - - if (progress) progress.size(40); - - runner.on('suite', function(suite){ - if (suite.root) return; - - // suite - var url = '?grep=' + encodeURIComponent(suite.fullTitle()); - var el = fragment('
  • %s

  • ', url, escape(suite.title)); - - // container - stack[0].appendChild(el); - stack.unshift(document.createElement('ul')); - el.appendChild(stack[0]); - }); - - runner.on('suite end', function(suite){ - if (suite.root) return; - stack.shift(); - }); - - runner.on('fail', function(test, err){ - if ('hook' == test.type || err.uncaught) runner.emit('test end', test); - }); - - runner.on('test end', function(test){ - window.scrollTo(0, document.body.scrollHeight); - - // TODO: add to stats - var percent = stats.tests / total * 100 | 0; - if (progress) progress.update(percent).draw(ctx); - - // update stats - var ms = new Date - stats.start; - text(passes, stats.passes); - text(failures, stats.failures); - text(duration, (ms / 1000).toFixed(2)); - - // test - if ('passed' == test.state) { - var el = fragment('
  • %e%ems

  • ', test.speed, test.title, test.duration); - } else if (test.pending) { - var el = fragment('
  • %e

  • ', test.title); - } else { - var el = fragment('
  • %e

  • ', test.title); - var str = test.err.stack || test.err.toString(); - - // FF / Opera do not add the message - if (!~str.indexOf(test.err.message)) { - str = test.err.message + '\n' + str; - } - - // <=IE7 stringifies to [Object Error]. Since it can be overloaded, we - // check for the result of the stringifying. - if ('[object Error]' == str) str = test.err.message; - - // Safari doesn't give you a stack. Let's at least provide a source line. - if (!test.err.stack && test.err.sourceURL && test.err.line !== undefined) { - str += "\n(" + test.err.sourceURL + ":" + test.err.line + ")"; - } - - el.appendChild(fragment('
    %e
    ', str)); - } - - // toggle code - // TODO: defer - if (!test.pending) { - var h2 = el.getElementsByTagName('h2')[0]; - - on(h2, 'click', function(){ - pre.style.display = 'none' == pre.style.display - ? 'inline-block' - : 'none'; - }); - - var pre = fragment('
    %e
    ', utils.clean(test.fn.toString())); - el.appendChild(pre); - pre.style.display = 'none'; - } - - stack[0].appendChild(el); - }); -} - -/** - * Display error `msg`. - */ - -function error(msg) { - document.body.appendChild(fragment('
    %s
    ', msg)); -} - -/** - * Return a DOM fragment from `html`. - */ - -function fragment(html) { - var args = arguments - , div = document.createElement('div') - , i = 1; - - div.innerHTML = html.replace(/%([se])/g, function(_, type){ - switch (type) { - case 's': return String(args[i++]); - case 'e': return escape(args[i++]); - } - }); - - return div.firstChild; -} - -/** - * Check for suites that do not have elements - * with `classname`, and hide them. - */ - -function hideSuitesWithout(classname) { - var suites = document.getElementsByClassName('suite'); - for (var i = 0; i < suites.length; i++) { - var els = suites[i].getElementsByClassName(classname); - if (0 == els.length) suites[i].className += ' hidden'; - } -} - -/** - * Unhide .hidden suites. - */ - -function unhide() { - var els = document.getElementsByClassName('suite hidden'); - for (var i = 0; i < els.length; ++i) { - els[i].className = els[i].className.replace('suite hidden', 'suite'); - } -} - -/** - * Set `el` text to `str`. - */ - -function text(el, str) { - if (el.textContent) { - el.textContent = str; - } else { - el.innerText = str; - } -} - -/** - * Listen on `event` with callback `fn`. - */ - -function on(el, event, fn) { - if (el.addEventListener) { - el.addEventListener(event, fn, false); - } else { - el.attachEvent('on' + event, fn); - } -} - -}); // module: reporters/html.js - -require.register("reporters/index.js", function(module, exports, require){ - -exports.Base = require('./base'); -exports.Dot = require('./dot'); -exports.Doc = require('./doc'); -exports.TAP = require('./tap'); -exports.JSON = require('./json'); -exports.HTML = require('./html'); -exports.List = require('./list'); -exports.Min = require('./min'); -exports.Spec = require('./spec'); -exports.Nyan = require('./nyan'); -exports.XUnit = require('./xunit'); -exports.Markdown = require('./markdown'); -exports.Progress = require('./progress'); -exports.Landing = require('./landing'); -exports.JSONCov = require('./json-cov'); -exports.HTMLCov = require('./html-cov'); -exports.JSONStream = require('./json-stream'); -exports.Teamcity = require('./teamcity'); - -}); // module: reporters/index.js - -require.register("reporters/json-cov.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base'); - -/** - * Expose `JSONCov`. - */ - -exports = module.exports = JSONCov; - -/** - * Initialize a new `JsCoverage` reporter. - * - * @param {Runner} runner - * @param {Boolean} output - * @api public - */ - -function JSONCov(runner, output) { - var self = this - , output = 1 == arguments.length ? true : output; - - Base.call(this, runner); - - var tests = [] - , failures = [] - , passes = []; - - runner.on('test end', function(test){ - tests.push(test); - }); - - runner.on('pass', function(test){ - passes.push(test); - }); - - runner.on('fail', function(test){ - failures.push(test); - }); - - runner.on('end', function(){ - var cov = global._$jscoverage || {}; - var result = self.cov = map(cov); - result.stats = self.stats; - result.tests = tests.map(clean); - result.failures = failures.map(clean); - result.passes = passes.map(clean); - if (!output) return; - process.stdout.write(JSON.stringify(result, null, 2 )); - }); -} - -/** - * Map jscoverage data to a JSON structure - * suitable for reporting. - * - * @param {Object} cov - * @return {Object} - * @api private - */ - -function map(cov) { - var ret = { - instrumentation: 'node-jscoverage' - , sloc: 0 - , hits: 0 - , misses: 0 - , coverage: 0 - , files: [] - }; - - for (var filename in cov) { - var data = coverage(filename, cov[filename]); - ret.files.push(data); - ret.hits += data.hits; - ret.misses += data.misses; - ret.sloc += data.sloc; - } - - if (ret.sloc > 0) { - ret.coverage = (ret.hits / ret.sloc) * 100; - } - - return ret; -}; - -/** - * Map jscoverage data for a single source file - * to a JSON structure suitable for reporting. - * - * @param {String} filename name of the source file - * @param {Object} data jscoverage coverage data - * @return {Object} - * @api private - */ - -function coverage(filename, data) { - var ret = { - filename: filename, - coverage: 0, - hits: 0, - misses: 0, - sloc: 0, - source: {} - }; - - data.source.forEach(function(line, num){ - num++; - - if (data[num] === 0) { - ret.misses++; - ret.sloc++; - } else if (data[num] !== undefined) { - ret.hits++; - ret.sloc++; - } - - ret.source[num] = { - source: line - , coverage: data[num] === undefined - ? '' - : data[num] - }; - }); - - ret.coverage = ret.hits / ret.sloc * 100; - - return ret; -} - -/** - * Return a plain-object representation of `test` - * free of cyclic properties etc. - * - * @param {Object} test - * @return {Object} - * @api private - */ - -function clean(test) { - return { - title: test.title - , fullTitle: test.fullTitle() - , duration: test.duration - } -} - -}); // module: reporters/json-cov.js - -require.register("reporters/json-stream.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , color = Base.color; - -/** - * Expose `List`. - */ - -exports = module.exports = List; - -/** - * Initialize a new `List` test reporter. - * - * @param {Runner} runner - * @api public - */ - -function List(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , total = runner.total; - - runner.on('start', function(){ - console.log(JSON.stringify(['start', { total: total }])); - }); - - runner.on('pass', function(test){ - console.log(JSON.stringify(['pass', clean(test)])); - }); - - runner.on('fail', function(test, err){ - console.log(JSON.stringify(['fail', clean(test)])); - }); - - runner.on('end', function(){ - process.stdout.write(JSON.stringify(['end', self.stats])); - }); -} - -/** - * Return a plain-object representation of `test` - * free of cyclic properties etc. - * - * @param {Object} test - * @return {Object} - * @api private - */ - -function clean(test) { - return { - title: test.title - , fullTitle: test.fullTitle() - , duration: test.duration - } -} -}); // module: reporters/json-stream.js - -require.register("reporters/json.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , cursor = Base.cursor - , color = Base.color; - -/** - * Expose `JSON`. - */ - -exports = module.exports = JSONReporter; - -/** - * Initialize a new `JSON` reporter. - * - * @param {Runner} runner - * @api public - */ - -function JSONReporter(runner) { - var self = this; - Base.call(this, runner); - - var tests = [] - , failures = [] - , passes = []; - - runner.on('test end', function(test){ - tests.push(test); - }); - - runner.on('pass', function(test){ - passes.push(test); - }); - - runner.on('fail', function(test){ - failures.push(test); - }); - - runner.on('end', function(){ - var obj = { - stats: self.stats - , tests: tests.map(clean) - , failures: failures.map(clean) - , passes: passes.map(clean) - }; - - process.stdout.write(JSON.stringify(obj, null, 2)); - }); -} - -/** - * Return a plain-object representation of `test` - * free of cyclic properties etc. - * - * @param {Object} test - * @return {Object} - * @api private - */ - -function clean(test) { - return { - title: test.title - , fullTitle: test.fullTitle() - , duration: test.duration - } -} -}); // module: reporters/json.js - -require.register("reporters/landing.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , cursor = Base.cursor - , color = Base.color; - -/** - * Expose `Landing`. - */ - -exports = module.exports = Landing; - -/** - * Airplane color. - */ - -Base.colors.plane = 0; - -/** - * Airplane crash color. - */ - -Base.colors['plane crash'] = 31; - -/** - * Runway color. - */ - -Base.colors.runway = 90; - -/** - * Initialize a new `Landing` reporter. - * - * @param {Runner} runner - * @api public - */ - -function Landing(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , width = Base.window.width * .75 | 0 - , total = runner.total - , stream = process.stdout - , plane = color('plane', '✈') - , crashed = -1 - , n = 0; - - function runway() { - var buf = Array(width).join('-'); - return ' ' + color('runway', buf); - } - - runner.on('start', function(){ - stream.write('\n '); - cursor.hide(); - }); - - runner.on('test end', function(test){ - // check if the plane crashed - var col = -1 == crashed - ? width * ++n / total | 0 - : crashed; - - // show the crash - if ('failed' == test.state) { - plane = color('plane crash', '✈'); - crashed = col; - } - - // render landing strip - stream.write('\u001b[4F\n\n'); - stream.write(runway()); - stream.write('\n '); - stream.write(color('runway', Array(col).join('⋅'))); - stream.write(plane) - stream.write(color('runway', Array(width - col).join('⋅') + '\n')); - stream.write(runway()); - stream.write('\u001b[0m'); - }); - - runner.on('end', function(){ - cursor.show(); - console.log(); - self.epilogue(); - }); -} - -/** - * Inherit from `Base.prototype`. - */ - -Landing.prototype = new Base; -Landing.prototype.constructor = Landing; - -}); // module: reporters/landing.js - -require.register("reporters/list.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , cursor = Base.cursor - , color = Base.color; - -/** - * Expose `List`. - */ - -exports = module.exports = List; - -/** - * Initialize a new `List` test reporter. - * - * @param {Runner} runner - * @api public - */ - -function List(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , n = 0; - - runner.on('start', function(){ - console.log(); - }); - - runner.on('test', function(test){ - process.stdout.write(color('pass', ' ' + test.fullTitle() + ': ')); - }); - - runner.on('pending', function(test){ - var fmt = color('checkmark', ' -') - + color('pending', ' %s'); - console.log(fmt, test.fullTitle()); - }); - - runner.on('pass', function(test){ - var fmt = color('checkmark', ' ✓') - + color('pass', ' %s: ') - + color(test.speed, '%dms'); - cursor.CR(); - console.log(fmt, test.fullTitle(), test.duration); - }); - - runner.on('fail', function(test, err){ - cursor.CR(); - console.log(color('fail', ' %d) %s'), ++n, test.fullTitle()); - }); - - runner.on('end', self.epilogue.bind(self)); -} - -/** - * Inherit from `Base.prototype`. - */ - -List.prototype = new Base; -List.prototype.constructor = List; - - -}); // module: reporters/list.js - -require.register("reporters/markdown.js", function(module, exports, require){ -/** - * Module dependencies. - */ - -var Base = require('./base') - , utils = require('../utils'); - -/** - * Expose `Markdown`. - */ - -exports = module.exports = Markdown; - -/** - * Initialize a new `Markdown` reporter. - * - * @param {Runner} runner - * @api public - */ - -function Markdown(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , total = runner.total - , level = 0 - , buf = ''; - - function title(str) { - return Array(level).join('#') + ' ' + str; - } - - function indent() { - return Array(level).join(' '); - } - - function mapTOC(suite, obj) { - var ret = obj; - obj = obj[suite.title] = obj[suite.title] || { suite: suite }; - suite.suites.forEach(function(suite){ - mapTOC(suite, obj); - }); - return ret; - } - - function stringifyTOC(obj, level) { - ++level; - var buf = ''; - var link; - for (var key in obj) { - if ('suite' == key) continue; - if (key) link = ' - [' + key + '](#' + utils.slug(obj[key].suite.fullTitle()) + ')\n'; - if (key) buf += Array(level).join(' ') + link; - buf += stringifyTOC(obj[key], level); - } - --level; - return buf; - } - - function generateTOC(suite) { - var obj = mapTOC(suite, {}); - return stringifyTOC(obj, 0); - } - - generateTOC(runner.suite); - - runner.on('suite', function(suite){ - ++level; - var slug = utils.slug(suite.fullTitle()); - buf += '' + '\n'; - buf += title(suite.title) + '\n'; - }); - - runner.on('suite end', function(suite){ - --level; - }); - - runner.on('pass', function(test){ - var code = utils.clean(test.fn.toString()); - buf += test.title + '.\n'; - buf += '\n```js\n'; - buf += code + '\n'; - buf += '```\n\n'; - }); - - runner.on('end', function(){ - process.stdout.write('# TOC\n'); - process.stdout.write(generateTOC(runner.suite)); - process.stdout.write(buf); - }); -} -}); // module: reporters/markdown.js - -require.register("reporters/min.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base'); - -/** - * Expose `Min`. - */ - -exports = module.exports = Min; - -/** - * Initialize a new `Min` minimal test reporter (best used with --watch). - * - * @param {Runner} runner - * @api public - */ - -function Min(runner) { - Base.call(this, runner); - - runner.on('start', function(){ - // clear screen - process.stdout.write('\u001b[2J'); - // set cursor position - process.stdout.write('\u001b[1;3H'); - }); - - runner.on('end', this.epilogue.bind(this)); -} - -/** - * Inherit from `Base.prototype`. - */ - -Min.prototype = new Base; -Min.prototype.constructor = Min; - -}); // module: reporters/min.js - -require.register("reporters/nyan.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , color = Base.color; - -/** - * Expose `Dot`. - */ - -exports = module.exports = NyanCat; - -/** - * Initialize a new `Dot` matrix test reporter. - * - * @param {Runner} runner - * @api public - */ - -function NyanCat(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , width = Base.window.width * .75 | 0 - , rainbowColors = this.rainbowColors = self.generateColors() - , colorIndex = this.colorIndex = 0 - , numerOfLines = this.numberOfLines = 4 - , trajectories = this.trajectories = [[], [], [], []] - , nyanCatWidth = this.nyanCatWidth = 11 - , trajectoryWidthMax = this.trajectoryWidthMax = (width - nyanCatWidth) - , scoreboardWidth = this.scoreboardWidth = 5 - , tick = this.tick = 0 - , n = 0; - - runner.on('start', function(){ - Base.cursor.hide(); - self.draw('start'); - }); - - runner.on('pending', function(test){ - self.draw('pending'); - }); - - runner.on('pass', function(test){ - self.draw('pass'); - }); - - runner.on('fail', function(test, err){ - self.draw('fail'); - }); - - runner.on('end', function(){ - Base.cursor.show(); - for (var i = 0; i < self.numberOfLines; i++) write('\n'); - self.epilogue(); - }); -} - -/** - * Draw the nyan cat with runner `status`. - * - * @param {String} status - * @api private - */ - -NyanCat.prototype.draw = function(status){ - this.appendRainbow(); - this.drawScoreboard(); - this.drawRainbow(); - this.drawNyanCat(status); - this.tick = !this.tick; -}; - -/** - * Draw the "scoreboard" showing the number - * of passes, failures and pending tests. - * - * @api private - */ - -NyanCat.prototype.drawScoreboard = function(){ - var stats = this.stats; - var colors = Base.colors; - - function draw(color, n) { - write(' '); - write('\u001b[' + color + 'm' + n + '\u001b[0m'); - write('\n'); - } - - draw(colors.green, stats.passes); - draw(colors.fail, stats.failures); - draw(colors.pending, stats.pending); - write('\n'); - - this.cursorUp(this.numberOfLines); -}; - -/** - * Append the rainbow. - * - * @api private - */ - -NyanCat.prototype.appendRainbow = function(){ - var segment = this.tick ? '_' : '-'; - var rainbowified = this.rainbowify(segment); - - for (var index = 0; index < this.numberOfLines; index++) { - var trajectory = this.trajectories[index]; - if (trajectory.length >= this.trajectoryWidthMax) trajectory.shift(); - trajectory.push(rainbowified); - } -}; - -/** - * Draw the rainbow. - * - * @api private - */ - -NyanCat.prototype.drawRainbow = function(){ - var self = this; - - this.trajectories.forEach(function(line, index) { - write('\u001b[' + self.scoreboardWidth + 'C'); - write(line.join('')); - write('\n'); - }); - - this.cursorUp(this.numberOfLines); -}; - -/** - * Draw the nyan cat with `status`. - * - * @param {String} status - * @api private - */ - -NyanCat.prototype.drawNyanCat = function(status) { - var self = this; - var startWidth = this.scoreboardWidth + this.trajectories[0].length; - - [0, 1, 2, 3].forEach(function(index) { - write('\u001b[' + startWidth + 'C'); - - switch (index) { - case 0: - write('_,------,'); - write('\n'); - break; - case 1: - var padding = self.tick ? ' ' : ' '; - write('_|' + padding + '/\\_/\\ '); - write('\n'); - break; - case 2: - var padding = self.tick ? '_' : '__'; - var tail = self.tick ? '~' : '^'; - var face; - switch (status) { - case 'pass': - face = '( ^ .^)'; - break; - case 'fail': - face = '( o .o)'; - break; - default: - face = '( - .-)'; - } - write(tail + '|' + padding + face + ' '); - write('\n'); - break; - case 3: - var padding = self.tick ? ' ' : ' '; - write(padding + '"" "" '); - write('\n'); - break; - } - }); - - this.cursorUp(this.numberOfLines); -}; - -/** - * Move cursor up `n`. - * - * @param {Number} n - * @api private - */ - -NyanCat.prototype.cursorUp = function(n) { - write('\u001b[' + n + 'A'); -}; - -/** - * Move cursor down `n`. - * - * @param {Number} n - * @api private - */ - -NyanCat.prototype.cursorDown = function(n) { - write('\u001b[' + n + 'B'); -}; - -/** - * Generate rainbow colors. - * - * @return {Array} - * @api private - */ - -NyanCat.prototype.generateColors = function(){ - var colors = []; - - for (var i = 0; i < (6 * 7); i++) { - var pi3 = Math.floor(Math.PI / 3); - var n = (i * (1.0 / 6)); - var r = Math.floor(3 * Math.sin(n) + 3); - var g = Math.floor(3 * Math.sin(n + 2 * pi3) + 3); - var b = Math.floor(3 * Math.sin(n + 4 * pi3) + 3); - colors.push(36 * r + 6 * g + b + 16); - } - - return colors; -}; - -/** - * Apply rainbow to the given `str`. - * - * @param {String} str - * @return {String} - * @api private - */ - -NyanCat.prototype.rainbowify = function(str){ - var color = this.rainbowColors[this.colorIndex % this.rainbowColors.length]; - this.colorIndex += 1; - return '\u001b[38;5;' + color + 'm' + str + '\u001b[0m'; -}; - -/** - * Stdout helper. - */ - -function write(string) { - process.stdout.write(string); -} - -/** - * Inherit from `Base.prototype`. - */ - -NyanCat.prototype = new Base; -NyanCat.prototype.constructor = NyanCat; - - -}); // module: reporters/nyan.js - -require.register("reporters/progress.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , cursor = Base.cursor - , color = Base.color; - -/** - * Expose `Progress`. - */ - -exports = module.exports = Progress; - -/** - * General progress bar color. - */ - -Base.colors.progress = 90; - -/** - * Initialize a new `Progress` bar test reporter. - * - * @param {Runner} runner - * @param {Object} options - * @api public - */ - -function Progress(runner, options) { - Base.call(this, runner); - - var self = this - , options = options || {} - , stats = this.stats - , width = Base.window.width * .50 | 0 - , total = runner.total - , complete = 0 - , max = Math.max; - - // default chars - options.open = options.open || '['; - options.complete = options.complete || '▬'; - options.incomplete = options.incomplete || '⋅'; - options.close = options.close || ']'; - options.verbose = false; - - // tests started - runner.on('start', function(){ - console.log(); - cursor.hide(); - }); - - // tests complete - runner.on('test end', function(){ - complete++; - var incomplete = total - complete - , percent = complete / total - , n = width * percent | 0 - , i = width - n; - - cursor.CR(); - process.stdout.write('\u001b[J'); - process.stdout.write(color('progress', ' ' + options.open)); - process.stdout.write(Array(n).join(options.complete)); - process.stdout.write(Array(i).join(options.incomplete)); - process.stdout.write(color('progress', options.close)); - if (options.verbose) { - process.stdout.write(color('progress', ' ' + complete + ' of ' + total)); - } - }); - - // tests are complete, output some stats - // and the failures if any - runner.on('end', function(){ - cursor.show(); - console.log(); - self.epilogue(); - }); -} - -/** - * Inherit from `Base.prototype`. - */ - -Progress.prototype = new Base; -Progress.prototype.constructor = Progress; - - -}); // module: reporters/progress.js - -require.register("reporters/spec.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , cursor = Base.cursor - , color = Base.color; - -/** - * Expose `Spec`. - */ - -exports = module.exports = Spec; - -/** - * Initialize a new `Spec` test reporter. - * - * @param {Runner} runner - * @api public - */ - -function Spec(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , indents = 0 - , n = 0; - - function indent() { - return Array(indents).join(' ') - } - - runner.on('start', function(){ - console.log(); - }); - - runner.on('suite', function(suite){ - ++indents; - console.log(color('suite', '%s%s'), indent(), suite.title); - }); - - runner.on('suite end', function(suite){ - --indents; - if (1 == indents) console.log(); - }); - - runner.on('test', function(test){ - process.stdout.write(indent() + color('pass', ' ◦ ' + test.title + ': ')); - }); - - runner.on('pending', function(test){ - var fmt = indent() + color('pending', ' - %s'); - console.log(fmt, test.title); - }); - - runner.on('pass', function(test){ - if ('fast' == test.speed) { - var fmt = indent() - + color('checkmark', ' ✓') - + color('pass', ' %s '); - cursor.CR(); - console.log(fmt, test.title); - } else { - var fmt = indent() - + color('checkmark', ' ✓') - + color('pass', ' %s ') - + color(test.speed, '(%dms)'); - cursor.CR(); - console.log(fmt, test.title, test.duration); - } - }); - - runner.on('fail', function(test, err){ - cursor.CR(); - console.log(indent() + color('fail', ' %d) %s'), ++n, test.title); - }); - - runner.on('end', self.epilogue.bind(self)); -} - -/** - * Inherit from `Base.prototype`. - */ - -Spec.prototype = new Base; -Spec.prototype.constructor = Spec; - - -}); // module: reporters/spec.js - -require.register("reporters/tap.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , cursor = Base.cursor - , color = Base.color; - -/** - * Expose `TAP`. - */ - -exports = module.exports = TAP; - -/** - * Initialize a new `TAP` reporter. - * - * @param {Runner} runner - * @api public - */ - -function TAP(runner) { - Base.call(this, runner); - - var self = this - , stats = this.stats - , n = 1; - - runner.on('start', function(){ - var total = runner.grepTotal(runner.suite); - console.log('%d..%d', 1, total); - }); - - runner.on('test end', function(){ - ++n; - }); - - runner.on('pending', function(test){ - console.log('ok %d %s # SKIP -', n, title(test)); - }); - - runner.on('pass', function(test){ - console.log('ok %d %s', n, title(test)); - }); - - runner.on('fail', function(test, err){ - console.log('not ok %d %s', n, title(test)); - console.log(err.stack.replace(/^/gm, ' ')); - }); -} - -/** - * Return a TAP-safe title of `test` - * - * @param {Object} test - * @return {String} - * @api private - */ - -function title(test) { - return test.fullTitle().replace(/#/g, ''); -} - -}); // module: reporters/tap.js - -require.register("reporters/teamcity.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base'); - -/** - * Expose `Teamcity`. - */ - -exports = module.exports = Teamcity; - -/** - * Initialize a new `Teamcity` reporter. - * - * @param {Runner} runner - * @api public - */ - -function Teamcity(runner) { - Base.call(this, runner); - var stats = this.stats; - - runner.on('start', function() { - console.log("##teamcity[testSuiteStarted name='mocha.suite']"); - }); - - runner.on('test', function(test) { - console.log("##teamcity[testStarted name='" + escape(test.fullTitle()) + "']"); - }); - - runner.on('fail', function(test, err) { - console.log("##teamcity[testFailed name='" + escape(test.fullTitle()) + "' message='" + escape(err.message) + "']"); - }); - - runner.on('pending', function(test) { - console.log("##teamcity[testIgnored name='" + escape(test.fullTitle()) + "' message='pending']"); - }); - - runner.on('test end', function(test) { - console.log("##teamcity[testFinished name='" + escape(test.fullTitle()) + "' duration='" + test.duration + "']"); - }); - - runner.on('end', function() { - console.log("##teamcity[testSuiteFinished name='mocha.suite' duration='" + stats.duration + "']"); - }); -} - -/** - * Escape the given `str`. - */ - -function escape(str) { - return str - .replace(/\|/g, "||") - .replace(/\n/g, "|n") - .replace(/\r/g, "|r") - .replace(/\[/g, "|[") - .replace(/\]/g, "|]") - .replace(/\u0085/g, "|x") - .replace(/\u2028/g, "|l") - .replace(/\u2029/g, "|p") - .replace(/'/g, "|'"); -} - -}); // module: reporters/teamcity.js - -require.register("reporters/xunit.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Base = require('./base') - , utils = require('../utils') - , escape = utils.escape; - -/** - * Save timer references to avoid Sinon interfering (see GH-237). - */ - -var Date = global.Date - , setTimeout = global.setTimeout - , setInterval = global.setInterval - , clearTimeout = global.clearTimeout - , clearInterval = global.clearInterval; - -/** - * Expose `XUnit`. - */ - -exports = module.exports = XUnit; - -/** - * Initialize a new `XUnit` reporter. - * - * @param {Runner} runner - * @api public - */ - -function XUnit(runner) { - Base.call(this, runner); - var stats = this.stats - , tests = [] - , self = this; - - runner.on('pass', function(test){ - tests.push(test); - }); - - runner.on('fail', function(test){ - tests.push(test); - }); - - runner.on('end', function(){ - console.log(tag('testsuite', { - name: 'Mocha Tests' - , tests: stats.tests - , failures: stats.failures - , errors: stats.failures - , skip: stats.tests - stats.failures - stats.passes - , timestamp: (new Date).toUTCString() - , time: stats.duration / 1000 - }, false)); - - tests.forEach(test); - console.log(''); - }); -} - -/** - * Inherit from `Base.prototype`. - */ - -XUnit.prototype = new Base; -XUnit.prototype.constructor = XUnit; - - -/** - * Output tag for the given `test.` - */ - -function test(test) { - var attrs = { - classname: test.parent.fullTitle() - , name: test.title - , time: test.duration / 1000 - }; - - if ('failed' == test.state) { - var err = test.err; - attrs.message = escape(err.message); - console.log(tag('testcase', attrs, false, tag('failure', attrs, false, cdata(err.stack)))); - } else if (test.pending) { - console.log(tag('testcase', attrs, false, tag('skipped', {}, true))); - } else { - console.log(tag('testcase', attrs, true) ); - } -} - -/** - * HTML tag helper. - */ - -function tag(name, attrs, close, content) { - var end = close ? '/>' : '>' - , pairs = [] - , tag; - - for (var key in attrs) { - pairs.push(key + '="' + escape(attrs[key]) + '"'); - } - - tag = '<' + name + (pairs.length ? ' ' + pairs.join(' ') : '') + end; - if (content) tag += content + ''; -} - -}); // module: reporters/xunit.js - -require.register("runnable.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var EventEmitter = require('browser/events').EventEmitter - , debug = require('browser/debug')('mocha:runnable'); - -/** - * Save timer references to avoid Sinon interfering (see GH-237). - */ - -var Date = global.Date - , setTimeout = global.setTimeout - , setInterval = global.setInterval - , clearTimeout = global.clearTimeout - , clearInterval = global.clearInterval; - -/** - * Expose `Runnable`. - */ - -module.exports = Runnable; - -/** - * Initialize a new `Runnable` with the given `title` and callback `fn`. - * - * @param {String} title - * @param {Function} fn - * @api private - */ - -function Runnable(title, fn) { - this.title = title; - this.fn = fn; - this.async = fn && fn.length; - this.sync = ! this.async; - this._timeout = 2000; - this._slow = 75; - this.timedOut = false; -} - -/** - * Inherit from `EventEmitter.prototype`. - */ - -Runnable.prototype = new EventEmitter; -Runnable.prototype.constructor = Runnable; - - -/** - * Set & get timeout `ms`. - * - * @param {Number} ms - * @return {Runnable|Number} ms or self - * @api private - */ - -Runnable.prototype.timeout = function(ms){ - if (0 == arguments.length) return this._timeout; - debug('timeout %d', ms); - this._timeout = ms; - if (this.timer) this.resetTimeout(); - return this; -}; - -/** - * Set & get slow `ms`. - * - * @param {Number} ms - * @return {Runnable|Number} ms or self - * @api private - */ - -Runnable.prototype.slow = function(ms){ - if (0 === arguments.length) return this._slow; - debug('timeout %d', ms); - this._slow = ms; - return this; -}; - -/** - * Return the full title generated by recursively - * concatenating the parent's full title. - * - * @return {String} - * @api public - */ - -Runnable.prototype.fullTitle = function(){ - return this.parent.fullTitle() + ' ' + this.title; -}; - -/** - * Clear the timeout. - * - * @api private - */ - -Runnable.prototype.clearTimeout = function(){ - clearTimeout(this.timer); -}; - -/** - * Inspect the runnable void of private properties. - * - * @return {String} - * @api private - */ - -Runnable.prototype.inspect = function(){ - return JSON.stringify(this, function(key, val){ - if ('_' == key[0]) return; - if ('parent' == key) return '#'; - if ('ctx' == key) return '#'; - return val; - }, 2); -}; - -/** - * Reset the timeout. - * - * @api private - */ - -Runnable.prototype.resetTimeout = function(){ - var self = this - , ms = this.timeout(); - - this.clearTimeout(); - if (ms) { - this.timer = setTimeout(function(){ - self.callback(new Error('timeout of ' + ms + 'ms exceeded')); - self.timedOut = true; - }, ms); - } -}; - -/** - * Run the test and invoke `fn(err)`. - * - * @param {Function} fn - * @api private - */ - -Runnable.prototype.run = function(fn){ - var self = this - , ms = this.timeout() - , start = new Date - , ctx = this.ctx - , finished - , emitted; - - if (ctx) ctx.runnable(this); - - // timeout - if (this.async) { - if (ms) { - this.timer = setTimeout(function(){ - done(new Error('timeout of ' + ms + 'ms exceeded')); - self.timedOut = true; - }, ms); - } - } - - // called multiple times - function multiple(err) { - if (emitted) return; - emitted = true; - self.emit('error', err || new Error('done() called multiple times')); - } - - // finished - function done(err) { - if (self.timedOut) return; - if (finished) return multiple(err); - self.clearTimeout(); - self.duration = new Date - start; - finished = true; - fn(err); - } - - // for .resetTimeout() - this.callback = done; - - // async - if (this.async) { - try { - this.fn.call(ctx, function(err){ - if (err instanceof Error) return done(err); - if (null != err) return done(new Error('done() invoked with non-Error: ' + err)); - done(); - }); - } catch (err) { - done(err); - } - return; - } - - // sync - try { - if (!this.pending) this.fn.call(ctx); - this.duration = new Date - start; - fn(); - } catch (err) { - fn(err); - } -}; - -}); // module: runnable.js - -require.register("runner.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var EventEmitter = require('browser/events').EventEmitter - , debug = require('browser/debug')('mocha:runner') - , Test = require('./test') - , utils = require('./utils') - , filter = utils.filter - , keys = utils.keys - , noop = function(){}; - -/** - * Expose `Runner`. - */ - -module.exports = Runner; - -/** - * Initialize a `Runner` for the given `suite`. - * - * Events: - * - * - `start` execution started - * - `end` execution complete - * - `suite` (suite) test suite execution started - * - `suite end` (suite) all tests (and sub-suites) have finished - * - `test` (test) test execution started - * - `test end` (test) test completed - * - `hook` (hook) hook execution started - * - `hook end` (hook) hook complete - * - `pass` (test) test passed - * - `fail` (test, err) test failed - * - * @api public - */ - -function Runner(suite) { - var self = this; - this._globals = []; - this.suite = suite; - this.total = suite.total(); - this.failures = 0; - this.on('test end', function(test){ self.checkGlobals(test); }); - this.on('hook end', function(hook){ self.checkGlobals(hook); }); - this.grep(/.*/); - this.globals(utils.keys(global).concat(['errno'])); -} - -/** - * Inherit from `EventEmitter.prototype`. - */ - -Runner.prototype = new EventEmitter; -Runner.prototype.constructor = Runner; - - -/** - * Run tests with full titles matching `re`. Updates runner.total - * with number of tests matched. - * - * @param {RegExp} re - * @param {Boolean} invert - * @return {Runner} for chaining - * @api public - */ - -Runner.prototype.grep = function(re, invert){ - debug('grep %s', re); - this._grep = re; - this._invert = invert; - this.total = this.grepTotal(this.suite); - return this; -}; - -/** - * Returns the number of tests matching the grep search for the - * given suite. - * - * @param {Suite} suite - * @return {Number} - * @api public - */ - -Runner.prototype.grepTotal = function(suite) { - var self = this; - var total = 0; - - suite.eachTest(function(test){ - var match = self._grep.test(test.fullTitle()); - if (self._invert) match = !match; - if (match) total++; - }); - - return total; -}; - -/** - * Allow the given `arr` of globals. - * - * @param {Array} arr - * @return {Runner} for chaining - * @api public - */ - -Runner.prototype.globals = function(arr){ - if (0 == arguments.length) return this._globals; - debug('globals %j', arr); - utils.forEach(arr, function(arr){ - this._globals.push(arr); - }, this); - return this; -}; - -/** - * Check for global variable leaks. - * - * @api private - */ - -Runner.prototype.checkGlobals = function(test){ - if (this.ignoreLeaks) return; - var ok = this._globals; - var globals = keys(global); - var isNode = process.kill; - var leaks; - - // check length - 2 ('errno' and 'location' globals) - if (isNode && 1 == ok.length - globals.length) return - else if (2 == ok.length - globals.length) return; - - leaks = filterLeaks(ok, globals); - this._globals = this._globals.concat(leaks); - - if (leaks.length > 1) { - this.fail(test, new Error('global leaks detected: ' + leaks.join(', ') + '')); - } else if (leaks.length) { - this.fail(test, new Error('global leak detected: ' + leaks[0])); - } -}; - -/** - * Fail the given `test`. - * - * @param {Test} test - * @param {Error} err - * @api private - */ - -Runner.prototype.fail = function(test, err){ - ++this.failures; - test.state = 'failed'; - if ('string' == typeof err) { - err = new Error('the string "' + err + '" was thrown, throw an Error :)'); - } - this.emit('fail', test, err); -}; - -/** - * Fail the given `hook` with `err`. - * - * Hook failures (currently) hard-end due - * to that fact that a failing hook will - * surely cause subsequent tests to fail, - * causing jumbled reporting. - * - * @param {Hook} hook - * @param {Error} err - * @api private - */ - -Runner.prototype.failHook = function(hook, err){ - this.fail(hook, err); - this.emit('end'); -}; - -/** - * Run hook `name` callbacks and then invoke `fn()`. - * - * @param {String} name - * @param {Function} function - * @api private - */ - -Runner.prototype.hook = function(name, fn){ - var suite = this.suite - , hooks = suite['_' + name] - , self = this - , timer; - - function next(i) { - var hook = hooks[i]; - if (!hook) return fn(); - self.currentRunnable = hook; - - self.emit('hook', hook); - - hook.on('error', function(err){ - self.failHook(hook, err); - }); - - hook.run(function(err){ - hook.removeAllListeners('error'); - var testError = hook.error(); - if (testError) self.fail(self.test, testError); - if (err) return self.failHook(hook, err); - self.emit('hook end', hook); - next(++i); - }); - } - - process.nextTick(function(){ - next(0); - }); -}; - -/** - * Run hook `name` for the given array of `suites` - * in order, and callback `fn(err)`. - * - * @param {String} name - * @param {Array} suites - * @param {Function} fn - * @api private - */ - -Runner.prototype.hooks = function(name, suites, fn){ - var self = this - , orig = this.suite; - - function next(suite) { - self.suite = suite; - - if (!suite) { - self.suite = orig; - return fn(); - } - - self.hook(name, function(err){ - if (err) { - self.suite = orig; - return fn(err); - } - - next(suites.pop()); - }); - } - - next(suites.pop()); -}; - -/** - * Run hooks from the top level down. - * - * @param {String} name - * @param {Function} fn - * @api private - */ - -Runner.prototype.hookUp = function(name, fn){ - var suites = [this.suite].concat(this.parents()).reverse(); - this.hooks(name, suites, fn); -}; - -/** - * Run hooks from the bottom up. - * - * @param {String} name - * @param {Function} fn - * @api private - */ - -Runner.prototype.hookDown = function(name, fn){ - var suites = [this.suite].concat(this.parents()); - this.hooks(name, suites, fn); -}; - -/** - * Return an array of parent Suites from - * closest to furthest. - * - * @return {Array} - * @api private - */ - -Runner.prototype.parents = function(){ - var suite = this.suite - , suites = []; - while (suite = suite.parent) suites.push(suite); - return suites; -}; - -/** - * Run the current test and callback `fn(err)`. - * - * @param {Function} fn - * @api private - */ - -Runner.prototype.runTest = function(fn){ - var test = this.test - , self = this; - - try { - test.on('error', function(err){ - self.fail(test, err); - }); - test.run(fn); - } catch (err) { - fn(err); - } -}; - -/** - * Run tests in the given `suite` and invoke - * the callback `fn()` when complete. - * - * @param {Suite} suite - * @param {Function} fn - * @api private - */ - -Runner.prototype.runTests = function(suite, fn){ - var self = this - , tests = suite.tests - , test; - - function next(err) { - // if we bail after first err - if (self.failures && suite._bail) return fn(); - - // next test - test = tests.shift(); - - // all done - if (!test) return fn(); - - // grep - var match = self._grep.test(test.fullTitle()); - if (self._invert) match = !match; - if (!match) return next(); - - // pending - if (test.pending) { - self.emit('pending', test); - self.emit('test end', test); - return next(); - } - - // execute test and hook(s) - self.emit('test', self.test = test); - self.hookDown('beforeEach', function(){ - self.currentRunnable = self.test; - self.runTest(function(err){ - test = self.test; - - if (err) { - self.fail(test, err); - self.emit('test end', test); - return self.hookUp('afterEach', next); - } - - test.state = 'passed'; - self.emit('pass', test); - self.emit('test end', test); - self.hookUp('afterEach', next); - }); - }); - } - - this.next = next; - next(); -}; - -/** - * Run the given `suite` and invoke the - * callback `fn()` when complete. - * - * @param {Suite} suite - * @param {Function} fn - * @api private - */ - -Runner.prototype.runSuite = function(suite, fn){ - var total = this.grepTotal(suite) - , self = this - , i = 0; - - debug('run suite %s', suite.fullTitle()); - - if (!total) return fn(); - - this.emit('suite', this.suite = suite); - - function next() { - var curr = suite.suites[i++]; - if (!curr) return done(); - self.runSuite(curr, next); - } - - function done() { - self.suite = suite; - self.hook('afterAll', function(){ - self.emit('suite end', suite); - fn(); - }); - } - - this.hook('beforeAll', function(){ - self.runTests(suite, next); - }); -}; - -/** - * Handle uncaught exceptions. - * - * @param {Error} err - * @api private - */ - -Runner.prototype.uncaught = function(err){ - debug('uncaught exception %s', err.message); - var runnable = this.currentRunnable; - if (!runnable || 'failed' == runnable.state) return; - runnable.clearTimeout(); - err.uncaught = true; - this.fail(runnable, err); - - // recover from test - if ('test' == runnable.type) { - this.emit('test end', runnable); - this.hookUp('afterEach', this.next); - return; - } - - // bail on hooks - this.emit('end'); -}; - -/** - * Run the root suite and invoke `fn(failures)` - * on completion. - * - * @param {Function} fn - * @return {Runner} for chaining - * @api public - */ - -Runner.prototype.run = function(fn){ - var self = this - , fn = fn || function(){}; - - debug('start'); - - // uncaught callback - function uncaught(err) { - self.uncaught(err); - } - - // callback - this.on('end', function(){ - debug('end'); - process.removeListener('uncaughtException', uncaught); - fn(self.failures); - }); - - // run suites - this.emit('start'); - this.runSuite(this.suite, function(){ - debug('finished running'); - self.emit('end'); - }); - - // uncaught exception - process.on('uncaughtException', uncaught); - - return this; -}; - -/** - * Filter leaks with the given globals flagged as `ok`. - * - * @param {Array} ok - * @param {Array} globals - * @return {Array} - * @api private - */ - -function filterLeaks(ok, globals) { - return filter(globals, function(key){ - var matched = filter(ok, function(ok){ - if (~ok.indexOf('*')) return 0 == key.indexOf(ok.split('*')[0]); - return key == ok; - }); - return matched.length == 0 && (!global.navigator || 'onerror' !== key); - }); -} - -}); // module: runner.js - -require.register("suite.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var EventEmitter = require('browser/events').EventEmitter - , debug = require('browser/debug')('mocha:suite') - , milliseconds = require('./ms') - , utils = require('./utils') - , Hook = require('./hook'); - -/** - * Expose `Suite`. - */ - -exports = module.exports = Suite; - -/** - * Create a new `Suite` with the given `title` - * and parent `Suite`. When a suite with the - * same title is already present, that suite - * is returned to provide nicer reporter - * and more flexible meta-testing. - * - * @param {Suite} parent - * @param {String} title - * @return {Suite} - * @api public - */ - -exports.create = function(parent, title){ - var suite = new Suite(title, parent.ctx); - suite.parent = parent; - if (parent.pending) suite.pending = true; - title = suite.fullTitle(); - parent.addSuite(suite); - return suite; -}; - -/** - * Initialize a new `Suite` with the given - * `title` and `ctx`. - * - * @param {String} title - * @param {Context} ctx - * @api private - */ - -function Suite(title, ctx) { - this.title = title; - this.ctx = ctx; - this.suites = []; - this.tests = []; - this.pending = false; - this._beforeEach = []; - this._beforeAll = []; - this._afterEach = []; - this._afterAll = []; - this.root = !title; - this._timeout = 2000; - this._slow = 75; - this._bail = false; -} - -/** - * Inherit from `EventEmitter.prototype`. - */ - -Suite.prototype = new EventEmitter; -Suite.prototype.constructor = Suite; - - -/** - * Return a clone of this `Suite`. - * - * @return {Suite} - * @api private - */ - -Suite.prototype.clone = function(){ - var suite = new Suite(this.title); - debug('clone'); - suite.ctx = this.ctx; - suite.timeout(this.timeout()); - suite.slow(this.slow()); - suite.bail(this.bail()); - return suite; -}; - -/** - * Set timeout `ms` or short-hand such as "2s". - * - * @param {Number|String} ms - * @return {Suite|Number} for chaining - * @api private - */ - -Suite.prototype.timeout = function(ms){ - if (0 == arguments.length) return this._timeout; - if ('string' == typeof ms) ms = milliseconds(ms); - debug('timeout %d', ms); - this._timeout = parseInt(ms, 10); - return this; -}; - -/** - * Set slow `ms` or short-hand such as "2s". - * - * @param {Number|String} ms - * @return {Suite|Number} for chaining - * @api private - */ - -Suite.prototype.slow = function(ms){ - if (0 === arguments.length) return this._slow; - if ('string' == typeof ms) ms = milliseconds(ms); - debug('slow %d', ms); - this._slow = ms; - return this; -}; - -/** - * Sets whether to bail after first error. - * - * @parma {Boolean} bail - * @return {Suite|Number} for chaining - * @api private - */ - -Suite.prototype.bail = function(bail){ - if (0 == arguments.length) return this._bail; - debug('bail %s', bail); - this._bail = bail; - return this; -}; - -/** - * Run `fn(test[, done])` before running tests. - * - * @param {Function} fn - * @return {Suite} for chaining - * @api private - */ - -Suite.prototype.beforeAll = function(fn){ - if (this.pending) return this; - var hook = new Hook('"before all" hook', fn); - hook.parent = this; - hook.timeout(this.timeout()); - hook.slow(this.slow()); - hook.ctx = this.ctx; - this._beforeAll.push(hook); - this.emit('beforeAll', hook); - return this; -}; - -/** - * Run `fn(test[, done])` after running tests. - * - * @param {Function} fn - * @return {Suite} for chaining - * @api private - */ - -Suite.prototype.afterAll = function(fn){ - if (this.pending) return this; - var hook = new Hook('"after all" hook', fn); - hook.parent = this; - hook.timeout(this.timeout()); - hook.slow(this.slow()); - hook.ctx = this.ctx; - this._afterAll.push(hook); - this.emit('afterAll', hook); - return this; -}; - -/** - * Run `fn(test[, done])` before each test case. - * - * @param {Function} fn - * @return {Suite} for chaining - * @api private - */ - -Suite.prototype.beforeEach = function(fn){ - if (this.pending) return this; - var hook = new Hook('"before each" hook', fn); - hook.parent = this; - hook.timeout(this.timeout()); - hook.slow(this.slow()); - hook.ctx = this.ctx; - this._beforeEach.push(hook); - this.emit('beforeEach', hook); - return this; -}; - -/** - * Run `fn(test[, done])` after each test case. - * - * @param {Function} fn - * @return {Suite} for chaining - * @api private - */ - -Suite.prototype.afterEach = function(fn){ - if (this.pending) return this; - var hook = new Hook('"after each" hook', fn); - hook.parent = this; - hook.timeout(this.timeout()); - hook.slow(this.slow()); - hook.ctx = this.ctx; - this._afterEach.push(hook); - this.emit('afterEach', hook); - return this; -}; - -/** - * Add a test `suite`. - * - * @param {Suite} suite - * @return {Suite} for chaining - * @api private - */ - -Suite.prototype.addSuite = function(suite){ - suite.parent = this; - suite.timeout(this.timeout()); - suite.slow(this.slow()); - suite.bail(this.bail()); - this.suites.push(suite); - this.emit('suite', suite); - return this; -}; - -/** - * Add a `test` to this suite. - * - * @param {Test} test - * @return {Suite} for chaining - * @api private - */ - -Suite.prototype.addTest = function(test){ - test.parent = this; - test.timeout(this.timeout()); - test.slow(this.slow()); - test.ctx = this.ctx; - this.tests.push(test); - this.emit('test', test); - return this; -}; - -/** - * Return the full title generated by recursively - * concatenating the parent's full title. - * - * @return {String} - * @api public - */ - -Suite.prototype.fullTitle = function(){ - if (this.parent) { - var full = this.parent.fullTitle(); - if (full) return full + ' ' + this.title; - } - return this.title; -}; - -/** - * Return the total number of tests. - * - * @return {Number} - * @api public - */ - -Suite.prototype.total = function(){ - return utils.reduce(this.suites, function(sum, suite){ - return sum + suite.total(); - }, 0) + this.tests.length; -}; - -/** - * Iterates through each suite recursively to find - * all tests. Applies a function in the format - * `fn(test)`. - * - * @param {Function} fn - * @return {Suite} - * @api private - */ - -Suite.prototype.eachTest = function(fn){ - utils.forEach(this.tests, fn); - utils.forEach(this.suites, function(suite){ - suite.eachTest(fn); - }); - return this; -}; - -}); // module: suite.js - -require.register("test.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var Runnable = require('./runnable'); - -/** - * Expose `Test`. - */ - -module.exports = Test; - -/** - * Initialize a new `Test` with the given `title` and callback `fn`. - * - * @param {String} title - * @param {Function} fn - * @api private - */ - -function Test(title, fn) { - Runnable.call(this, title, fn); - this.pending = !fn; - this.type = 'test'; -} - -/** - * Inherit from `Runnable.prototype`. - */ - -Test.prototype = new Runnable; -Test.prototype.constructor = Test; - - -}); // module: test.js - -require.register("utils.js", function(module, exports, require){ - -/** - * Module dependencies. - */ - -var fs = require('browser/fs') - , path = require('browser/path') - , join = path.join - , debug = require('browser/debug')('mocha:watch'); - -/** - * Ignored directories. - */ - -var ignore = ['node_modules', '.git']; - -/** - * Escape special characters in the given string of html. - * - * @param {String} html - * @return {String} - * @api private - */ - -exports.escape = function(html){ - return String(html) - .replace(/&/g, '&') - .replace(/"/g, '"') - .replace(//g, '>'); -}; - -/** - * Array#forEach (<=IE8) - * - * @param {Array} array - * @param {Function} fn - * @param {Object} scope - * @api private - */ - -exports.forEach = function(arr, fn, scope){ - for (var i = 0, l = arr.length; i < l; i++) - fn.call(scope, arr[i], i); -}; - -/** - * Array#indexOf (<=IE8) - * - * @parma {Array} arr - * @param {Object} obj to find index of - * @param {Number} start - * @api private - */ - -exports.indexOf = function(arr, obj, start){ - for (var i = start || 0, l = arr.length; i < l; i++) { - if (arr[i] === obj) - return i; - } - return -1; -}; - -/** - * Array#reduce (<=IE8) - * - * @param {Array} array - * @param {Function} fn - * @param {Object} initial value - * @api private - */ - -exports.reduce = function(arr, fn, val){ - var rval = val; - - for (var i = 0, l = arr.length; i < l; i++) { - rval = fn(rval, arr[i], i, arr); - } - - return rval; -}; - -/** - * Array#filter (<=IE8) - * - * @param {Array} array - * @param {Function} fn - * @api private - */ - -exports.filter = function(arr, fn){ - var ret = []; - - for (var i = 0, l = arr.length; i < l; i++) { - var val = arr[i]; - if (fn(val, i, arr)) ret.push(val); - } - - return ret; -}; - -/** - * Object.keys (<=IE8) - * - * @param {Object} obj - * @return {Array} keys - * @api private - */ - -exports.keys = Object.keys || function(obj) { - var keys = [] - , has = Object.prototype.hasOwnProperty // for `window` on <=IE8 - - for (var key in obj) { - if (has.call(obj, key)) { - keys.push(key); - } - } - - return keys; -}; - -/** - * Watch the given `files` for changes - * and invoke `fn(file)` on modification. - * - * @param {Array} files - * @param {Function} fn - * @api private - */ - -exports.watch = function(files, fn){ - var options = { interval: 100 }; - files.forEach(function(file){ - debug('file %s', file); - fs.watchFile(file, options, function(curr, prev){ - if (prev.mtime < curr.mtime) fn(file); - }); - }); -}; - -/** - * Ignored files. - */ - -function ignored(path){ - return !~ignore.indexOf(path); -} - -/** - * Lookup files in the given `dir`. - * - * @return {Array} - * @api private - */ - -exports.files = function(dir, ret){ - ret = ret || []; - - fs.readdirSync(dir) - .filter(ignored) - .forEach(function(path){ - path = join(dir, path); - if (fs.statSync(path).isDirectory()) { - exports.files(path, ret); - } else if (path.match(/\.(js|coffee)$/)) { - ret.push(path); - } - }); - - return ret; -}; - -/** - * Compute a slug from the given `str`. - * - * @param {String} str - * @return {String} - * @api private - */ - -exports.slug = function(str){ - return str - .toLowerCase() - .replace(/ +/g, '-') - .replace(/[^-\w]/g, ''); -}; - -/** - * Strip the function definition from `str`, - * and re-indent for pre whitespace. - */ - -exports.clean = function(str) { - str = str - .replace(/^function *\(.*\) *{/, '') - .replace(/\s+\}$/, ''); - - var spaces = str.match(/^\n?( *)/)[1].length - , re = new RegExp('^ {' + spaces + '}', 'gm'); - - str = str.replace(re, ''); - - return exports.trim(str); -}; - -/** - * Escape regular expression characters in `str`. - * - * @param {String} str - * @return {String} - * @api private - */ - -exports.escapeRegexp = function(str){ - return str.replace(/[-\\^$*+?.()|[\]{}]/g, "\\$&"); -}; - -/** - * Trim the given `str`. - * - * @param {String} str - * @return {String} - * @api private - */ - -exports.trim = function(str){ - return str.replace(/^\s+|\s+$/g, ''); -}; - -/** - * Parse the given `qs`. - * - * @param {String} qs - * @return {Object} - * @api private - */ - -exports.parseQuery = function(qs){ - return exports.reduce(qs.replace('?', '').split('&'), function(obj, pair){ - var i = pair.indexOf('=') - , key = pair.slice(0, i) - , val = pair.slice(++i); - - obj[key] = decodeURIComponent(val); - return obj; - }, {}); -}; - -/** - * Highlight the given string of `js`. - * - * @param {String} js - * @return {String} - * @api private - */ - -function highlight(js) { - return js - .replace(//g, '>') - .replace(/\/\/(.*)/gm, '//$1') - .replace(/('.*?')/gm, '$1') - .replace(/(\d+\.\d+)/gm, '$1') - .replace(/(\d+)/gm, '$1') - .replace(/\bnew *(\w+)/gm, 'new $1') - .replace(/\b(function|new|throw|return|var|if|else)\b/gm, '$1') -} - -/** - * Highlight the contents of tag `name`. - * - * @param {String} name - * @api private - */ - -exports.highlightTags = function(name) { - var code = document.getElementsByTagName(name); - for (var i = 0, len = code.length; i < len; ++i) { - code[i].innerHTML = highlight(code[i].innerHTML); - } -}; - -}); // module: utils.js diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/bin/_mocha tilemill-0.10.1~saucy10/node_modules/mocha/bin/_mocha --- tilemill-0.10.1~saucy9/node_modules/mocha/bin/_mocha 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/bin/_mocha 2013-09-15 15:29:27.000000000 +0000 @@ -5,10 +5,10 @@ */ var program = require('commander') - , exec = require('child_process').exec + , sprintf = require('util').format , path = require('path') , fs = require('fs') - , vm = require('vm') + , glob = require('glob') , resolve = path.resolve , exists = fs.existsSync || path.existsSync , Mocha = require('../') @@ -44,6 +44,12 @@ var globals = []; /** + * Requires. + */ + +var requires = []; + +/** * Images. */ @@ -70,10 +76,12 @@ .option('-G, --growl', 'enable growl notification support') .option('-d, --debug', "enable node's debugger, synonym for node --debug") .option('-b, --bail', "bail after first test failure") + .option('-A, --async-only', "force all tests to take a callback (async)") + .option('-S, --sort', "sort test files") .option('--recursive', 'include sub directories') .option('--debug-brk', "enable node's debugger breaking on the first line") .option('--globals ', 'allow the given comma-delimited global [names]', list, []) - .option('--ignore-leaks', 'ignore global variable leaks') + .option('--check-leaks', 'check for global variable leaks') .option('--interfaces', 'display available interfaces') .option('--reporters', 'display available reporters') .option('--compilers :,...', 'use the given module(s) to compile files', list, []) @@ -93,7 +101,8 @@ var tmpl = fs.readFileSync(join(__dirname, '..', 'lib/template.html')); fs.writeFileSync(join(path, 'mocha.css'), css); fs.writeFileSync(join(path, 'mocha.js'), js); - fs.writeFileSync(join(path, 'tests.html'), tmpl); + fs.writeFileSync(join(path, 'tests.js'), ''); + fs.writeFileSync(join(path, 'index.html'), tmpl); process.exit(0); }); @@ -115,7 +124,7 @@ console.log(' list - spec-style listing'); console.log(' tap - test-anything-protocol'); console.log(' landing - unicode landing strip'); - console.log(' xunit - xunit reportert'); + console.log(' xunit - xunit reporter'); console.log(' teamcity - teamcity ci support'); console.log(' html-cov - HTML test coverage'); console.log(' json-cov - JSON test coverage'); @@ -144,11 +153,9 @@ module.paths.push(cwd, join(cwd, 'node_modules')); program.on('require', function(mod){ - var abs = exists(mod) - || exists(mod + '.js'); - - if (abs) mod = join(cwd, mod); - require(mod); + var abs = exists(mod) || exists(mod + '.js'); + if (abs) mod = resolve(mod); + requires.push(mod); }); // mocha.opts support @@ -224,14 +231,18 @@ if (program.invert) mocha.invert(); -// --ignore-leaks +// --check-leaks -if (program.ignoreLeaks) mocha.ignoreLeaks(); +if (program.checkLeaks) mocha.checkLeaks(); // --growl if (program.growl) mocha.growl(); +// --async-only + +if (program.asyncOnly) mocha.asyncOnly(); + // --globals mocha.globals(globals); @@ -251,6 +262,12 @@ var re = new RegExp('\\.(' + extensions.join('|') + ')$'); +// requires + +requires.forEach(function(mod) { + require(mod); +}); + // files var files = [] @@ -270,6 +287,10 @@ return resolve(path); }); +if (program.sort) { + files.sort(); +} + // --watch if (program.watch) { @@ -281,22 +302,25 @@ process.exit(); }); - var frames = [ - ' \u001b[96m◜ \u001b[90mwatching\u001b[0m' - , ' \u001b[96m◠ \u001b[90mwatching\u001b[0m' - , ' \u001b[96m◝ \u001b[90mwatching\u001b[0m' - , ' \u001b[96m◞ \u001b[90mwatching\u001b[0m' - , ' \u001b[96m◡ \u001b[90mwatching\u001b[0m' - , ' \u001b[96m◟ \u001b[90mwatching\u001b[0m' - ]; + var spinner = 'win32' == process.platform + ? ['|','/','-','\\'] + : ['◜','◠','◝','◞','◡','◟']; + + var frames = spinner.map(function(c) { + return sprintf(' \u001b[96m%s \u001b[90mwatching\u001b[0m', c); + }); var watchFiles = utils.files(cwd); function loadAndRun() { - mocha.files = files; - mocha.run(function(){ - play(frames); - }); + try { + mocha.files = files; + mocha.run(function(){ + play(frames); + }); + } catch(e) { + console.log(e.stack); + } } function purge() { @@ -383,7 +407,14 @@ function lookupFiles(path, recursive) { var files = []; - if (!exists(path)) path += '.js'; + if (!exists(path)) { + if (exists(path + '.js')) { + path += '.js' + } else { + return glob.sync(path); + } + } + var stat = fs.statSync(path); if (stat.isFile()) return path; @@ -412,6 +443,6 @@ play.timer = setInterval(function(){ var str = arr[i++ % len]; - process.stdout.write('\r' + str); + process.stdout.write('\u001b[0G' + str); }, interval); } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/bin/mocha tilemill-0.10.1~saucy10/node_modules/mocha/bin/mocha --- tilemill-0.10.1~saucy9/node_modules/mocha/bin/mocha 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/bin/mocha 2013-06-12 15:25:22.000000000 +0000 @@ -8,8 +8,10 @@ var spawn = require('child_process').spawn , args = [ __dirname + '/_mocha' ]; -process.argv.slice(2).forEach(function (arg) { - switch (arg) { +process.argv.slice(2).forEach(function(arg){ + var flag = arg.split('=')[0]; + + switch (flag) { case '-d': args.unshift('--debug'); break; @@ -26,6 +28,8 @@ case '--harmony': case '--harmony-proxies': case '--harmony-collections': + case '--harmony-generators': + case '--prof': args.unshift(arg); break; default: diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/component.json tilemill-0.10.1~saucy10/node_modules/mocha/component.json --- tilemill-0.10.1~saucy9/node_modules/mocha/component.json 2012-10-02 16:03:51.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/component.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -{ - "name": "mocha", - "repo": "visionmedia/mocha", - "version": "1.6.0", - "description": "simple, flexible, fun test framework", - "keywords": ["test", "mocha", "bdd", "tdd"], - "styles": ["mocha.css"], - "scripts": [] -} \ No newline at end of file Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/mocha/images/error.png and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/mocha/images/error.png differ Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/mocha/images/ok.png and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/mocha/images/ok.png differ diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/browser/debug.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/browser/debug.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/browser/debug.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/browser/debug.js 2013-04-03 15:26:56.000000000 +0000 @@ -1,6 +1,5 @@ module.exports = function(type){ return function(){ - } -}; \ No newline at end of file +}; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/browser/tty.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/browser/tty.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/browser/tty.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/browser/tty.js 2013-05-31 20:47:05.000000000 +0000 @@ -4,5 +4,10 @@ }; exports.getWindowSize = function(){ - return [window.innerHeight, window.innerWidth]; -}; \ No newline at end of file + if ('innerHeight' in global) { + return [global.innerHeight, global.innerWidth]; + } else { + // In a Web Worker, the DOM Window is not available. + return [640, 480]; + } +}; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/hook.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/hook.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/hook.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/hook.js 2013-04-03 15:26:56.000000000 +0000 @@ -47,4 +47,3 @@ this._error = err; }; - diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/bdd.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/bdd.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/bdd.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/bdd.js 2013-08-09 08:27:14.000000000 +0000 @@ -4,11 +4,12 @@ */ var Suite = require('../suite') - , Test = require('../test'); + , Test = require('../test') + , utils = require('../utils'); /** * BDD-style interface: - * + * * describe('Array', function(){ * describe('#indexOf()', function(){ * it('should return -1 when not present', function(){ @@ -20,7 +21,7 @@ * }); * }); * }); - * + * */ module.exports = function(suite){ @@ -65,11 +66,11 @@ * and callback `fn` containing nested suites * and/or tests. */ - + context.describe = context.context = function(title, fn){ var suite = Suite.create(suites[0], title); suites.unshift(suite); - fn(); + fn.call(suite); suites.shift(); return suite; }; @@ -84,7 +85,7 @@ var suite = Suite.create(suites[0], title); suite.pending = true; suites.unshift(suite); - fn(); + fn.call(suite); suites.shift(); }; @@ -117,7 +118,8 @@ context.it.only = function(title, fn){ var test = context.it(title, fn); - mocha.grep(test.fullTitle()); + var reString = '^' + utils.escapeRegexp(test.fullTitle()) + '$'; + mocha.grep(new RegExp(reString)); }; /** diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/exports.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/exports.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/exports.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/exports.js 2013-04-03 15:26:56.000000000 +0000 @@ -8,19 +8,19 @@ /** * TDD-style interface: - * + * * exports.Array = { * '#indexOf()': { * 'should return -1 when the value is not present': function(){ - * + * * }, * * 'should return the correct index when the value is present': function(){ - * + * * } * } * }; - * + * */ module.exports = function(suite){ @@ -57,4 +57,4 @@ } } } -}; \ No newline at end of file +}; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/qunit.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/qunit.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/qunit.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/qunit.js 2013-08-09 08:27:14.000000000 +0000 @@ -4,37 +4,38 @@ */ var Suite = require('../suite') - , Test = require('../test'); + , Test = require('../test') + , utils = require('../utils'); /** * QUnit-style interface: - * + * * suite('Array'); - * + * * test('#length', function(){ * var arr = [1,2,3]; * ok(arr.length == 3); * }); - * + * * test('#indexOf()', function(){ * var arr = [1,2,3]; * ok(arr.indexOf(1) == 0); * ok(arr.indexOf(2) == 1); * ok(arr.indexOf(3) == 2); * }); - * + * * suite('String'); - * + * * test('#length', function(){ * ok('foo'.length == 3); * }); - * + * */ module.exports = function(suite){ var suites = [suite]; - suite.on('pre-require', function(context){ + suite.on('pre-require', function(context, file, mocha){ /** * Execute before running tests. @@ -71,11 +72,21 @@ /** * Describe a "suite" with the given `title`. */ - + context.suite = function(title){ if (suites.length > 1) suites.shift(); var suite = Suite.create(suites[0], title); suites.unshift(suite); + return suite; + }; + + /** + * Exclusive test-case. + */ + + context.suite.only = function(title, fn){ + var suite = context.suite(title, fn); + mocha.grep(suite.fullTitle()); }; /** @@ -85,7 +96,27 @@ */ context.test = function(title, fn){ - suites[0].addTest(new Test(title, fn)); + var test = new Test(title, fn); + suites[0].addTest(test); + return test; + }; + + /** + * Exclusive test-case. + */ + + context.test.only = function(title, fn){ + var test = context.test(title, fn); + var reString = '^' + utils.escapeRegexp(test.fullTitle()) + '$'; + mocha.grep(new RegExp(reString)); + }; + + /** + * Pending test case. + */ + + context.test.skip = function(title){ + context.test(title); }; }); }; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/tdd.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/tdd.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/interfaces/tdd.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/interfaces/tdd.js 2013-08-09 08:27:14.000000000 +0000 @@ -4,7 +4,8 @@ */ var Suite = require('../suite') - , Test = require('../test'); + , Test = require('../test') + , utils = require('../utils');; /** * TDD-style interface: @@ -14,7 +15,7 @@ * suiteSetup(function(){ * * }); - * + * * test('should return -1 when not present', function(){ * * }); @@ -77,12 +78,23 @@ context.suite = function(title, fn){ var suite = Suite.create(suites[0], title); suites.unshift(suite); - fn(); + fn.call(suite); suites.shift(); return suite; }; /** + * Pending suite. + */ + context.suite.skip = function(title, fn) { + var suite = Suite.create(suites[0], title); + suite.pending = true; + suites.unshift(suite); + fn.call(suite); + suites.shift(); + }; + + /** * Exclusive test-case. */ @@ -98,8 +110,10 @@ */ context.test = function(title, fn){ + var suite = suites[0]; + if (suite.pending) var fn = null; var test = new Test(title, fn); - suites[0].addTest(test); + suite.addTest(test); return test; }; @@ -109,7 +123,16 @@ context.test.only = function(title, fn){ var test = context.test(title, fn); - mocha.grep(test.fullTitle()); + var reString = '^' + utils.escapeRegexp(test.fullTitle()) + '$'; + mocha.grep(new RegExp(reString)); + }; + + /** + * Pending test case. + */ + + context.test.skip = function(title){ + context.test(title); }; }); }; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/mocha.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/mocha.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/mocha.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/mocha.js 2013-08-30 15:30:17.000000000 +0000 @@ -52,6 +52,7 @@ * - `reporter` reporter instance, defaults to `mocha.reporters.Dot` * - `globals` array of accepted globals * - `timeout` timeout in milliseconds + * - `bail` bail on the first test failure * - `slow` milliseconds to wait before considering a test slow * - `ignoreLeaks` ignore global leaks * - `grep` string or regexp to filter tests with @@ -67,12 +68,26 @@ this.grep(options.grep); this.suite = new exports.Suite('', new exports.Context); this.ui(options.ui); + this.bail(options.bail); this.reporter(options.reporter); - if (options.timeout) this.timeout(options.timeout); + if (null != options.timeout) this.timeout(options.timeout); if (options.slow) this.slow(options.slow); } /** + * Enable or disable bailing on the first failure. + * + * @param {Boolean} [bail] + * @api public + */ + +Mocha.prototype.bail = function(bail){ + if (0 == arguments.length) bail = true; + this.suite.bail(bail); + return this; +}; + +/** * Add test `file`. * * @param {String} file @@ -87,7 +102,7 @@ /** * Set reporter to `reporter`, defaults to "dot". * - * @param {String|Function} reporter name of a reporter or a reporter constructor + * @param {String|Function} reporter name or constructor * @api public */ @@ -194,12 +209,13 @@ /** * Ignore global leaks. * + * @param {Boolean} ignore * @return {Mocha} * @api public */ -Mocha.prototype.ignoreLeaks = function(){ - this.options.ignoreLeaks = true; +Mocha.prototype.ignoreLeaks = function(ignore){ + this.options.ignoreLeaks = !!ignore; return this; }; @@ -267,6 +283,18 @@ }; /** + * Makes all tests async (accepting a callback) + * + * @return {Mocha} + * @api public + */ + +Mocha.prototype.asyncOnly = function(){ + this.options.asyncOnly = true; + return this; +}; + +/** * Run tests and invoke `fn()` when complete. * * @param {Function} fn @@ -280,7 +308,8 @@ var options = this.options; var runner = new exports.Runner(suite); var reporter = new this._reporter(runner); - runner.ignoreLeaks = options.ignoreLeaks; + runner.ignoreLeaks = false !== options.ignoreLeaks; + runner.asyncOnly = options.asyncOnly; if (options.grep) runner.grep(options.grep, options.invert); if (options.globals) runner.globals(options.globals); if (options.growl) this._growl(runner, reporter); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/ms.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/ms.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/ms.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/ms.js 2013-08-30 03:01:20.000000000 +0000 @@ -1,4 +1,3 @@ - /** * Helpers. */ @@ -7,19 +6,28 @@ var m = s * 60; var h = m * 60; var d = h * 24; +var y = d * 365.25; /** * Parse or format the given `val`. * + * Options: + * + * - `long` verbose formatting [false] + * * @param {String|Number} val + * @param {Object} options * @return {String|Number} * @api public */ -module.exports = function(val){ +module.exports = function(val, options){ + options = options || {}; if ('string' == typeof val) return parse(val); - return format(val); -} + return options.long + ? long(val) + : short(val); +}; /** * Parse the given `str` and return milliseconds. @@ -30,52 +38,74 @@ */ function parse(str) { - var m = /^((?:\d+)?\.?\d+) *(ms|seconds?|s|minutes?|m|hours?|h|days?|d|years?|y)?$/i.exec(str); - if (!m) return; - var n = parseFloat(m[1]); - var type = (m[2] || 'ms').toLowerCase(); + var match = /^((?:\d+)?\.?\d+) *(ms|seconds?|s|minutes?|m|hours?|h|days?|d|years?|y)?$/i.exec(str); + if (!match) return; + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); switch (type) { case 'years': case 'year': case 'y': - return n * 31557600000; + return n * y; case 'days': case 'day': case 'd': - return n * 86400000; + return n * d; case 'hours': case 'hour': case 'h': - return n * 3600000; + return n * h; case 'minutes': case 'minute': case 'm': - return n * 60000; + return n * m; case 'seconds': case 'second': case 's': - return n * 1000; + return n * s; case 'ms': return n; } } /** - * Format the given `ms`. + * Short format for `ms`. * * @param {Number} ms * @return {String} - * @api public + * @api private */ -function format(ms) { - if (ms == d) return (ms / d) + ' day'; - if (ms > d) return (ms / d) + ' days'; - if (ms == h) return (ms / h) + ' hour'; - if (ms > h) return (ms / h) + ' hours'; - if (ms == m) return (ms / m) + ' minute'; - if (ms > m) return (ms / m) + ' minutes'; - if (ms == s) return (ms / s) + ' second'; - if (ms > s) return (ms / s) + ' seconds'; - return ms + ' ms'; -} \ No newline at end of file +function short(ms) { + if (ms >= d) return Math.round(ms / d) + 'd'; + if (ms >= h) return Math.round(ms / h) + 'h'; + if (ms >= m) return Math.round(ms / m) + 'm'; + if (ms >= s) return Math.round(ms / s) + 's'; + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function long(ms) { + return plural(ms, d, 'day') + || plural(ms, h, 'hour') + || plural(ms, m, 'minute') + || plural(ms, s, 'second') + || ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) return; + if (ms < n * 1.5) return Math.floor(ms / n) + ' ' + name; + return Math.ceil(ms / n) + ' ' + name + 's'; +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/base.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/base.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/base.js 2012-10-02 16:01:57.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/base.js 2013-09-15 15:29:27.000000000 +0000 @@ -62,6 +62,23 @@ }; /** + * Default symbol map. + */ + +exports.symbols = { + ok: '✓', + err: '✖', + dot: '․' +}; + +// With node.js on Windows: use symbols available in terminal default fonts +if ('win32' == process.platform) { + exports.symbols.ok = '\u221A'; + exports.symbols.err = '\u00D7'; + exports.symbols.dot = '.'; +} + +/** * Color `str` with the given `type`, * allowing colors to be disabled, * as well as user-defined color @@ -144,19 +161,21 @@ , expected = err.expected , escape = true; + // uncaught + if (err.uncaught) { + msg = 'Uncaught ' + msg; + } + // explicitly show diff - if (err.showDiff) { + if (err.showDiff && sameType(actual, expected)) { escape = false; - err.actual = actual = JSON.stringify(actual, null, 2); - err.expected = expected = JSON.stringify(expected, null, 2); + err.actual = actual = stringify(actual); + err.expected = expected = stringify(expected); } // actual / expected diff if ('string' == typeof actual && 'string' == typeof expected) { - var len = Math.max(actual.length, expected.length); - - if (len < 20) msg = errorDiff(err, 'Chars', escape); - else msg = errorDiff(err, 'Words', escape); + msg = errorDiff(err, 'WordsWithSpace', escape); // linenos var lines = msg.split('\n'); @@ -211,6 +230,8 @@ if (!runner) return; this.runner = runner; + runner.stats = stats; + runner.on('start', function(){ stats.start = new Date; }); @@ -263,48 +284,38 @@ */ Base.prototype.epilogue = function(){ - var stats = this.stats - , fmt - , tests; + var stats = this.stats; + var tests; + var fmt; console.log(); - function pluralize(n) { - return 1 == n ? 'test' : 'tests'; - } - - // failure - if (stats.failures) { - fmt = color('bright fail', ' ✖') - + color('fail', ' %d of %d %s failed') - + color('light', ':') - - console.error(fmt, - stats.failures, - this.runner.total, - pluralize(this.runner.total)); - - Base.list(this.failures); - console.error(); - return; - } - - // pass - fmt = color('bright pass', ' ✔') - + color('green', ' %d %s complete') + // passes + fmt = color('bright pass', ' ') + + color('green', ' %d passing') + color('light', ' (%s)'); console.log(fmt, - stats.tests || 0, - pluralize(stats.tests), + stats.passes || 0, ms(stats.duration)); // pending if (stats.pending) { - fmt = color('pending', ' •') - + color('pending', ' %d %s pending'); + fmt = color('pending', ' ') + + color('pending', ' %d pending'); + + console.log(fmt, stats.pending); + } + + // failures + if (stats.failures) { + fmt = color('fail', ' %d failing'); - console.log(fmt, stats.pending, pluralize(stats.pending)); + console.error(fmt, + stats.failures); + + Base.list(this.failures); + console.error(); } console.log(); @@ -360,3 +371,31 @@ return color(name, str); }).join('\n'); } + +/** + * Stringify `obj`. + * + * @param {Mixed} obj + * @return {String} + * @api private + */ + +function stringify(obj) { + if (obj instanceof RegExp) return obj.toString(); + return JSON.stringify(obj, null, 2); +} + +/** + * Check that a / b have the same type. + * + * @param {Object} a + * @param {Object} b + * @return {Boolean} + * @api private + */ + +function sameType(a, b) { + a = Object.prototype.toString.call(a); + b = Object.prototype.toString.call(b); + return a == b; +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/doc.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/doc.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/doc.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/doc.js 2013-04-03 15:26:56.000000000 +0000 @@ -36,7 +36,7 @@ ++indents; console.log('%s
    ', indent()); ++indents; - console.log('%s

    %s

    ', indent(), suite.title); + console.log('%s

    %s

    ', indent(), utils.escape(suite.title)); console.log('%s
    ', indent()); }); @@ -49,7 +49,7 @@ }); runner.on('pass', function(test){ - console.log('%s
    %s
    ', indent(), test.title); + console.log('%s
    %s
    ', indent(), utils.escape(test.title)); var code = utils.escape(utils.clean(test.fn.toString())); console.log('%s
    %s
    ', indent(), code); }); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/dot.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/dot.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/dot.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/dot.js 2013-04-03 15:26:56.000000000 +0000 @@ -25,7 +25,6 @@ var self = this , stats = this.stats , width = Base.window.width * .75 | 0 - , c = '․' , n = 0; runner.on('start', function(){ @@ -33,21 +32,21 @@ }); runner.on('pending', function(test){ - process.stdout.write(color('pending', c)); + process.stdout.write(color('pending', Base.symbols.dot)); }); runner.on('pass', function(test){ if (++n % width == 0) process.stdout.write('\n '); if ('slow' == test.speed) { - process.stdout.write(color('bright yellow', c)); + process.stdout.write(color('bright yellow', Base.symbols.dot)); } else { - process.stdout.write(color(test.speed, c)); + process.stdout.write(color(test.speed, Base.symbols.dot)); } }); runner.on('fail', function(test, err){ if (++n % width == 0) process.stdout.write('\n '); - process.stdout.write(color('fail', c)); + process.stdout.write(color('fail', Base.symbols.dot)); }); runner.on('end', function(){ diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/html.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/html.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/html.js 2012-10-02 15:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/html.js 2013-04-03 15:26:56.000000000 +0000 @@ -28,7 +28,7 @@ * Stats template. */ -var statsTemplate = '
      ' +var statsTemplate = '
        ' + '
      • ' + '
      • passes: 0
      • ' + '
      • failures: 0
      • ' @@ -56,7 +56,7 @@ , failuresLink = items[2].getElementsByTagName('a')[0] , duration = items[3].getElementsByTagName('em')[0] , canvas = stat.getElementsByTagName('canvas')[0] - , report = fragment('
          ') + , report = fragment('
            ') , stack = [report] , progress , ctx @@ -116,14 +116,12 @@ }); runner.on('fail', function(test, err){ - if ('hook' == test.type || err.uncaught) runner.emit('test end', test); + if ('hook' == test.type) runner.emit('test end', test); }); runner.on('test end', function(test){ - window.scrollTo(0, document.body.scrollHeight); - // TODO: add to stats - var percent = stats.tests / total * 100 | 0; + var percent = stats.tests / this.total * 100 | 0; if (progress) progress.update(percent).draw(ctx); // update stats @@ -134,11 +132,11 @@ // test if ('passed' == test.state) { - var el = fragment('
          • %e%ems

          • ', test.speed, test.title, test.duration); + var el = fragment('
          • %e%ems

          • ', test.speed, test.title, test.duration, encodeURIComponent(test.fullTitle())); } else if (test.pending) { var el = fragment('
          • %e

          • ', test.title); } else { - var el = fragment('
          • %e

          • ', test.title); + var el = fragment('
          • %e

          • ', test.title, encodeURIComponent(test.fullTitle())); var str = test.err.stack || test.err.toString(); // FF / Opera do not add the message @@ -165,7 +163,7 @@ on(h2, 'click', function(){ pre.style.display = 'none' == pre.style.display - ? 'inline-block' + ? 'block' : 'none'; }); @@ -174,7 +172,8 @@ pre.style.display = 'none'; } - stack[0].appendChild(el); + // Don't call .appendChild if #mocha-report was already .shift()'ed off the stack. + if (stack[0]) stack[0].appendChild(el); }); } @@ -183,7 +182,7 @@ */ function error(msg) { - document.body.appendChild(fragment('
            %s
            ', msg)); + document.body.appendChild(fragment('
            %s
            ', msg)); } /** diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/json-cov.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/json-cov.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/json-cov.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/json-cov.js 2013-04-03 15:26:56.000000000 +0000 @@ -80,6 +80,10 @@ ret.sloc += data.sloc; } + ret.files.sort(function(a, b) { + return a.filename.localeCompare(b.filename); + }); + if (ret.sloc > 0) { ret.coverage = (ret.hits / ret.sloc) * 100; } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/list.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/list.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/list.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/list.js 2013-04-03 15:26:56.000000000 +0000 @@ -42,7 +42,7 @@ }); runner.on('pass', function(test){ - var fmt = color('checkmark', ' ✓') + var fmt = color('checkmark', ' '+Base.symbols.dot) + color('pass', ' %s: ') + color(test.speed, '%dms'); cursor.CR(); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/markdown.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/markdown.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/markdown.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/markdown.js 2013-04-03 15:26:56.000000000 +0000 @@ -23,7 +23,6 @@ var self = this , stats = this.stats - , total = runner.total , level = 0 , buf = ''; @@ -68,7 +67,7 @@ runner.on('suite', function(suite){ ++level; var slug = utils.slug(suite.fullTitle()); - buf += '' + '\n'; + buf += '' + '\n'; buf += title(suite.title) + '\n'; }); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/min.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/min.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/min.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/min.js 2013-04-03 15:26:56.000000000 +0000 @@ -20,7 +20,7 @@ function Min(runner) { Base.call(this, runner); - + runner.on('start', function(){ // clear screen process.stdout.write('\u001b[2J'); @@ -35,4 +35,4 @@ * Inherit from `Base.prototype`. */ -Min.prototype.__proto__ = Base.prototype; \ No newline at end of file +Min.prototype.__proto__ = Base.prototype; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/nyan.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/nyan.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/nyan.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/nyan.js 2013-08-03 17:05:00.000000000 +0000 @@ -1,4 +1,3 @@ - /** * Module dependencies. */ @@ -21,7 +20,6 @@ function NyanCat(runner) { Base.call(this, runner); - var self = this , stats = this.stats , width = Base.window.width * .75 | 0 @@ -37,19 +35,19 @@ runner.on('start', function(){ Base.cursor.hide(); - self.draw('start'); + self.draw(); }); runner.on('pending', function(test){ - self.draw('pending'); + self.draw(); }); runner.on('pass', function(test){ - self.draw('pass'); + self.draw(); }); runner.on('fail', function(test, err){ - self.draw('fail'); + self.draw(); }); runner.on('end', function(){ @@ -60,17 +58,16 @@ } /** - * Draw the nyan cat with runner `status`. + * Draw the nyan cat * - * @param {String} status * @api private */ -NyanCat.prototype.draw = function(status){ +NyanCat.prototype.draw = function(){ this.appendRainbow(); this.drawScoreboard(); this.drawRainbow(); - this.drawNyanCat(status); + this.drawNyanCat(); this.tick = !this.tick; }; @@ -135,58 +132,62 @@ }; /** - * Draw the nyan cat with `status`. + * Draw the nyan cat * - * @param {String} status * @api private */ -NyanCat.prototype.drawNyanCat = function(status) { +NyanCat.prototype.drawNyanCat = function() { var self = this; var startWidth = this.scoreboardWidth + this.trajectories[0].length; + var color = '\u001b[' + startWidth + 'C'; + var padding = ''; - [0, 1, 2, 3].forEach(function(index) { - write('\u001b[' + startWidth + 'C'); + write(color); + write('_,------,'); + write('\n'); - switch (index) { - case 0: - write('_,------,'); - write('\n'); - break; - case 1: - var padding = self.tick ? ' ' : ' '; - write('_|' + padding + '/\\_/\\ '); - write('\n'); - break; - case 2: - var padding = self.tick ? '_' : '__'; - var tail = self.tick ? '~' : '^'; - var face; - switch (status) { - case 'pass': - face = '( ^ .^)'; - break; - case 'fail': - face = '( o .o)'; - break; - default: - face = '( - .-)'; - } - write(tail + '|' + padding + face + ' '); - write('\n'); - break; - case 3: - var padding = self.tick ? ' ' : ' '; - write(padding + '"" "" '); - write('\n'); - break; - } - }); + write(color); + padding = self.tick ? ' ' : ' '; + write('_|' + padding + '/\\_/\\ '); + write('\n'); + + write(color); + padding = self.tick ? '_' : '__'; + var tail = self.tick ? '~' : '^'; + var face; + write(tail + '|' + padding + this.face() + ' '); + write('\n'); + + write(color); + padding = self.tick ? ' ' : ' '; + write(padding + '"" "" '); + write('\n'); this.cursorUp(this.numberOfLines); }; /** + * Draw nyan cat face. + * + * @return {String} + * @api private + */ + +NyanCat.prototype.face = function() { + var stats = this.stats; + if (stats.failures) { + return '( x .x)'; + } else if (stats.pending) { + return '( o .o)'; + } else if(stats.passes) { + return '( ^ .^)'; + } else { + return '( - .-)'; + } +} + +/** * Move cursor up `n`. * * @param {Number} n diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/progress.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/progress.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/progress.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/progress.js 2013-04-03 15:26:56.000000000 +0000 @@ -41,7 +41,7 @@ // default chars options.open = options.open || '['; options.complete = options.complete || '▬'; - options.incomplete = options.incomplete || '⋅'; + options.incomplete = options.incomplete || Base.symbols.dot; options.close = options.close || ']'; options.verbose = false; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/spec.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/spec.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/spec.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/spec.js 2013-04-03 15:26:56.000000000 +0000 @@ -58,13 +58,13 @@ runner.on('pass', function(test){ if ('fast' == test.speed) { var fmt = indent() - + color('checkmark', ' ✓') + + color('checkmark', ' ' + Base.symbols.ok) + color('pass', ' %s '); cursor.CR(); console.log(fmt, test.title); } else { var fmt = indent() - + color('checkmark', ' ✓') + + color('checkmark', ' ' + Base.symbols.ok) + color('pass', ' %s ') + color(test.speed, '(%dms)'); cursor.CR(); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/tap.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/tap.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/tap.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/tap.js 2013-04-03 15:26:56.000000000 +0000 @@ -25,7 +25,9 @@ var self = this , stats = this.stats - , n = 1; + , n = 1 + , passes = 0 + , failures = 0; runner.on('start', function(){ var total = runner.grepTotal(runner.suite); @@ -41,12 +43,20 @@ }); runner.on('pass', function(test){ + passes++; console.log('ok %d %s', n, title(test)); }); runner.on('fail', function(test, err){ + failures++; console.log('not ok %d %s', n, title(test)); - console.log(err.stack.replace(/^/gm, ' ')); + if (err.stack) console.log(err.stack.replace(/^/gm, ' ')); + }); + + runner.on('end', function(){ + console.log('# tests ' + (passes + failures)); + console.log('# pass ' + passes); + console.log('# fail ' + failures); }); } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/templates/coverage.jade tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/templates/coverage.jade --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/templates/coverage.jade 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/templates/coverage.jade 2013-04-03 15:26:56.000000000 +0000 @@ -34,12 +34,12 @@ tbody for line, number in file.source if line.coverage > 0 - tr.hit + tr.hit td.line= number td.hits= line.coverage td.source= line.source else if 0 === line.coverage - tr.miss + tr.miss td.line= number td.hits 0 td.source= line.source @@ -47,4 +47,4 @@ tr td.line= number td.hits - td.source= line.source || ' ' \ No newline at end of file + td.source= line.source || ' ' diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/templates/menu.jade tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/templates/menu.jade --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/templates/menu.jade 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/templates/menu.jade 2013-06-13 12:45:20.000000000 +0000 @@ -10,4 +10,4 @@ if segments.length span.dirname= segments.join('/') + '/' span.basename= basename - a#logo(href='http://visionmedia.github.com/mocha/') m \ No newline at end of file + a#logo(href='http://visionmedia.github.io/mocha/') m diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/templates/style.html tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/templates/style.html --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/templates/style.html 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/templates/style.html 2013-09-15 15:29:27.000000000 +0000 @@ -79,8 +79,10 @@ border-bottom-color: #ddd; -webkit-border-radius: 2px; -moz-border-radius: 2px; + border-radius: 2px; -webkit-box-shadow: inset 0 0 10px #eee; -moz-box-shadow: inset 0 0 10px #eee; + box-shadow: inset 0 0 10px #eee; overflow-x: auto; } @@ -89,8 +91,10 @@ padding: 1px; -webkit-border-radius: 3px; -moz-border-radius: 3px; + border-radius: 3px; -webkit-box-shadow: 0 3px 10px #dedede, 0 1px 5px #888; -moz-box-shadow: 0 3px 10px #dedede, 0 1px 5px #888; + box-shadow: 0 3px 10px #dedede, 0 1px 5px #888; max-width: 100%; } @@ -126,10 +130,19 @@ -webkit-box-shadow: 0 0 2px #888 , inset 5px 0 20px rgba(0,0,0,.5) , inset 5px 0 3px rgba(0,0,0,.3); + box-shadow: 0 0 2px #888 + , inset 5px 0 20px rgba(0,0,0,.5) + , inset 5px 0 3px rgba(0,0,0,.3); -webkit-font-smoothing: antialiased; background: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGYAAABmCAMAAAAOARRQAAABelBMVEUjJSU6OzshIyM5OjoqKy02NjgsLS01NTYjJCUzNTUgISMlJSc0NTUvMDA6PDwlJyg1NjYoKis2NjYrLS02ODkpKyw0NDYrLC04ODovLzA4Ojo0NDUtLy86OjwjIyU4OTosLS82ODgtLS8hIyQvMTEnKCooKSsrKy0qLCwkJSUnKCkrLCwpKiwwMjIxMzMqLC0tLS0pKissLC00NTYwMDIwMTQpKysoKSovMDEtLzA2OTkxMzUrKywvLy8qKyszNTY5OzsqKiw6OjswMDExNDUoKiozNDUvMDIyNDY1Njg2Njk5OTozMzU0NjY4ODkiIyUiIyQ4OTkuMDEmKCowMjQwMTErLS4qKywwMTMhIiMpKiopKy0tLjAkJScxNDQvLzExNDYyNDQmKCk5OTslJig5OjskJSYxMzQrLS8gISIwMTIoKCk1NTUlJSUnJygwMDA4ODgiIiMhISI8PDw6Ojo5OTkpKSojIyQ7OzsyMjIpKSssLCw6Ozw1NjlrfLakAAAg2UlEQVR42jR6i3ea6rYvPgANIAhVXh8WvkQlioUiFlFcBtAmoiRNdzxqu9p0J7vrdK29zuPeex77nnvO/35n1r1ndHRktI0jTOacv/l7lCBK5UqVpOha/YxmWK7BC4TQFKVXrbYsnimqxuuMVlOQ0XltWjUdCwRJ1M+tC1KudOs9q6+da2adUewG0SC0SwELfHtgDds93VEuydEbl3QMWeNoYkR7b/0x1ZRobGI3mLwzAhePqTAwhg6aogjNsGy7/jwQ4rkdqe7CWLxF8k9LfMVFyRS7VJqtkrW8Vt/bkR8FZJao16ipknbC3Yw2lM7laO6HBEOadEZ2tpf65c4v8e3u7FyU6qbiNNyCuzXZ6pawgnwgmrpTT/Q7w2EZmiIJ0dzWDI7mhQ80IfRnMu2kzA5r5r1pIFoia+/d93HRYp1GV8TbrkWoU/+jdI0Ff6yGwTjT1Hn8J+8m1rKpGiYPuNiHnMtNMIv+zpsk84MYTNW1/+DpwXLvckdOCMYowVNPREe0QlM8xRHXXFhcNDzupwsSmb5pH+0t0RP2Qk+QtI7F1Qm6JRC6ZPBtPq/dq/kH+jxtCljn9TIpW6rQIgmSVyj6lPICIw4N/taka41PFUInth0je9+jO6Kt1G4/a7V2LEgG02B0pHVuCZrgltSKMuIl5SyufUv9mYuQi+mFgzbBEtFo2g+Dh4sSTrLNu8JPh00sQydpb00tqXBvqRN7Q7kqzcnIxCGnvZt/WmJacoOEO6Dcn8Qre03pOCSQxbMOXUuDNx9SxuLz4W1I18gvjViQ67zV0rxdWL8Te/TQkuo8STS41DR48W7L6YP2uWIqiUV8rd6Gbf/rnegKZeG8TpAM6afhGze9JAOxbLjsnUXEbrZ9vLYd7MT32cPF5mKKxmjy7huaoD9n62GOxni3iIJwv0IzZAZjdZkUtolCNLVfYZNaquFjGszVVf+J0vrz4CawoKdHnOzb0NMH7CDBOybfYNJ4rfeMyFNjkFYVTzMFs87rnPGXLUOeNKRVc0LnU7/UIgelzsy3CMuth0YfvnY0wsD3vODUL3eJcKqHQpm8yM3XZQWJxO6Un9iYloyyLpOwN2obHy6W6gbpcb44XmyC+mg+itAcaprGcrwZCqMj/GmtKn0zPvpTz/Cv1dw21XwP3cRupg3H3MF/S71eTKj1YrdwKdc2Mw0fRmb2sFf8lW3aU6JbIZSEPqvXvjM7G/aApyXlXeqKfMq0g/Su3rUGJPSPrtGElgknrZM3xUXqsAP6zMCNVn5u8aJnSNpJv2uru7t2jfRziW2+GuhqfldUNbPk71olwo+46ePUo1U3WKk/e5YK07F/wGRgcpODmQnIlVeHCWBE4puBi2jq28UKpqiN1/4UOrGz59TNYrrQHtd+11sG40BGD+pXdelNqGOg4NXe8W4eacJV/NS9/2Umtym6WQqveqR9xdCMElpxnbkalM4Vf9uaEcWZaKdyibEIjWKxJZPN95niCL3GiaXyssIrHxoLkqkzLCXULN46/f2h3tQJgyip+Tk9EAjJ9aJshq7t8X45aowSKspMSvPf7r9R8yxNptIaHS5ozuEm6luPDApugyNP8OaqiQ4BjaequXA54SLC83eHIY2r+CZp4409Xqw8Aa2oI7XkCrQi+in0w5AqF/kLNrcUz+qkl/lAobY1jSnx5OJNhyXIz3qfNFlXc0TKaglNwdWkWYt9QQ1Kr6W8zue21iNrdJk+N5oCr2O9nEtWKC7IS5J/zdDEYrmnAYfg6agCy+qcgz7ZofeDc4PbUWSvkshWuAc7OjiUyLkj+RAtdlwXJcjxdpkTTHDhK8lBCi8+JtvDVL1W6elmOM++YS0LuSlaP1oUvAeiW3cFnvTr8EbTz1tsSMYdGeZe40sRWu5uAfj7q+ZoKv2FNQ0p5XY1lmlcigHZqTPpabufEVrNuNPi165w3uCVQJHyJqmSJ7ZHnguqwtCmwViIJijj04ba2JNYtB+yORf5gg1/9t9iw4vUpeqiunSAbf+IBdj/b+iG2qrHvuNP0Vd/+ThVZT/lrvHYjjgDbbyxaqgHNM2uhxa1GW3UedZYhMMwM4mQhltouK+IV4NdbIQNM+8Yv311RZk9kT4tiYR4LkyFcuPpdcjuhUuFqBAWRZa11lcZ3gEBlXywsNhrt+plISZP5DlsV9l4EgY6J3yZPTUcMrgaWAT3oI79eSbGEbcJpr6BD8kyDiVt+G0/hXosQN4NFXKlfWIfsIs0BHODVok1/IGnKFHJYIquh8Xo+2+bkQNTGgWmN/fZ0Y33LSj6lr1GyV7mWIKg7ZTRZPGuhF/zjRNcQ1UPtSYgnWQxSs0yrVhwNDcdGMNSNe2JT3WuzbAM3HykyAajS3Uphf6STKEqxLas9EnmnhA/lyj9Uj+JoY7SVgVmGLl46Rm2u98sbkap2lzAdKBG4r6LgulQOSSjQv1GWdQ0jtDUK/mAaqM1Uqjpu4k3Rvfvxv7YTxLSK+wN3E5jVIzmF23uZ7hiH/sVP49D7tvoKp4S8b1LuvRlivVB/algbhcFITYVXvDpLzpDfplR2uD5V4XJFxpjmIpLc9Y5sB2TpBRix7Bme6GZIq+06v3XzNeTcA4obQIKxrnT4C2JpOqD92dbmSX8MGazly5EsZVMvSU1f4RZwyu8iQXbVdeLlZrjuTT1jrY1uk5c7iZ7RsvhhluqAkq4JpVQAg7RJFtSu+xgJ8Pv6O1j5DkLxT8mkbfyRW5DrQmG7hiDIjCgBsADbjuof6YHLGeV6a5Q1Smx9joUXPpdaaDx97A/Wq00oJkdR7ZYuQRfS533JtxO1erduqWOYIt3wh0wpbLuCNIYkwxbswbikCUu2CDCS+Q+7rgVtfRcm+SOcdKPRlZ/rE7wNVUEE39KTS5uvUKN1PUnkloPkyzhyGQ8qkouEjJ3H/VXdqG6asSRiw3ecMlBvDDt8dDhBHXMwZ2Cajzjr7/76T+IavqPYvz6r7//E/3X3+N//h/0QozbjPgPiir69P/8X3/9F/yv8b/827/++98WItPu5/Hvwd8YPf5bp/2/lX/T/+Of/0MJ/lYTa+L/Ef+d9vN/3/2T6P/+jyTzu/evf6U7vxN7B6pJkRtAF6jUr8I+P8RsP/ptGhfqFk+pQ/DgAy6NJtRYJdXmp4gK7WLqLKJ+MaKhGjOojvL+SnIWrkpy0SLHDe4QuyNzaEA15mLMCcmE8Em+4HdOihW4/ZWuppJEmzeAwcDtv7MuLc9y2V5atvxXNe3S4DUMt5/Qy2LM9kSYKiVWBuKlfp4nxTntpuW03JbIlkiRvBXmT23g1I2OYe6IizUHPIq6zm6mbfsbteKmi/sg9J+ocQBMctGFO7iljo8TPN+z3jxw4do+ZwfqoR9dkNTKHyM305GpTkfhcHexVkPVGEbUOjuo9f0UMPHBFlGEx0SLvJvVRKTwW7PSew5oPme+E42+frJa9cGt2njS3dK5kIif2eYbhuSEQXEqMVfUjhGIuin0G0/W5ezJyJQy3SpMLai4M0JUWb5u1k9tny5bd1pPwYBpQuDCXZl62xg4CdVEAtflXHs6JKmP/pH6mOl796Lgopj0o8d5kKh00hxG3OSdEE/QBo9Hgr8JJqAeLDwJohG5j/DGh61Rc/+tf22/8kEnxHNCEjo0ElvvGfESZkqmz2BDcKV1H1buSkhkdg7p1IMGs2s17nYjpblrWuE2K9WEO/hcRp5e9oOF/QBmOaDtgil+oaU6szPrdwW65fOB0KUTsVUn7LFU7J8e6cxJIl9+FHw5MQMzuQJ+4oxMH3iW/5GK+hWuG0T+gTLs+fAjdtUd58TmIUq04EeyRCYCjkldow234aIgR5bqwrtZosZ+6YEqAmDqatJ9lWasz4IquKALPtd92hGI3Z2BdzzZue+REl1Om4DIWD+RrtUTOJLI+S0jHowXXdAxsGLSd40zYNuEUlOGhrwL6c7tcOtUOvpJCP7QBQS19H+GvZn05ewjlVLz+IGKoC9TyfQjLMBNmXCuqqtTdOSukZW48B0HqgSTCBrBnlFvF4CG2Su7yFzqmJFURK3UmTT3ru050r0ptUpMilYnBJWfl2Bv6kPlUuE1kxxpdzui9AubsR2N2boVSu81OulAwBqoSr1LZ0LLYOomyZHmjqnXlP72s8LnDouEJjtodBvdHaG1jMySYO7crWd90MpCRyCG14vb5IE7Arupw/y/RcCm/Tm3zK6zYj8PYNaGldiUfkB/LHWcmf2lVM+mwyU27a0qq2tscrQ/vzBjN26DnntIrOyGizzXK35yKQdYnUABkyN4saz3WD/viF+eCcsXnIajdWYJWaYHRstIis9CS+tqnFGmz2j5uzfr3Z4prqgK4XOT/PyftvjZqIm8lhkfxJ7Ol3CJF1piYBGAG8wtAk56Drw1YwmOpcz+NdfkSpSLplRXLXHL0Rquj6YW/gabqgK7Dgr6NwtH0B/AN7XrN+MVJ6AmXmUuqmQulrNNYPmH0RoDogydOKLo/QbfYNARSQQKISRCzRXU+q9WWJFL3LZW6u34CkeG97xC0NNGaJ0bvK6SnZS3zPskr5EtuCgjMWR5o2x5BqhKmDWJPRe7JMEOyRb5uUKlHaGVtq5ivSOaSliSXp9SQm2qk8MRJh10MAp9QQ2H5t59J8rjiwSZtoIfMGjlLPVNdYl/LBR0AO6WLGDmkLkIPRE45Y9MftdAK/yNu1Hn6tzOQTesgQ+8fSzB19wO91vCnO23vOWQdwJ63SJrYjdfKFW6W281PKs2k8iT9ai1cgJ4sa3xqdvmtxR8/+D1B8AKc2u+6JftryRhMWSQtoSBgIyyQGyxcnELuAasXN12oSriU4RMz1DD6RL0TSV+om7i1Yt+jEE/jnawM8cX/UhN4nkiv/w9eALrzNhXuQfOzFL0Fi6SjF7/4Qn8rLYBoa85cvgAnkCEBP+HPbEnquVXCZsMS/yzYw2Vru60P/+nJPYKkzZFjmbykzUoEqV836T5q3fP/L383dF82tx18/AZgZczMAgyeWYKmSZIqtHL+e+O4ZRcq9VI3g/qPeCoiK4pcgEqdbS0S/Be54sbVQOuJVPNBblIghzeasNu7h/g+Sz1IdhI5lCwq1nUb3Ji4OCIcqQZqtqJ5w7rXrg/DA9IgVmEGhDgGecEwnCTHffXcXs0V3OCEVzYDKS1vp/oX+ng+6XVU86UjA6FMO2RXOOOrqY1GgPvrAk9HV/BXtCu5RuwF8qgdGDLsBcui4E33ymdBip1X8uKyhIWT8qNRDsXz+gvO9UiEC0d8RG4Tf2x8H4slljgHtCBcxHLTWOYJm5H/fCPCzOgf9qgOUxTRZ0Pc6ha5yLuLVT9ntvIa6gacE99mCovdUumTQdRP4RPsS9129eEe2uSvvGh0bV4Y3QPPhPZMqhZWSMa5R0Hc1SGO4IVOQc0FrirlibTVfKRrYkD8kz3b+X65/QkUNaZdrdl3mCap0Hf3YcCw/LiouJYNbqz88UqeDYv93yO7vvXtgl4XCyAO4ODkY6W+83+LZU//p3/zXNGGrUKClCiOnL27iJZbNWDF02XXAOeFlB7IaADoMH1Yqr+UP9biyZDEa/iJt4MDeIz6GKTdLVBfWGVtRN4fdT2rgReX8UXwF2zOrradm4J0nyTgdPnai3RvzpZvCKDUqjOwD/QA6EDaMCLewX6QWYVnHY1sx1bd8ovYnPm1ZvPH+rE20lWjOCnZ66/xDt0QAl15FjfBcZp+i9OU0RNPQ0t3x2pSNWo8eiYudwsnuP1Hq6iH1LJCJynkYsfgJ0p3pF6SoQk2l+jqE8CPk+ziGJRSKjs+W5AO185umPdkYzlK4wl7TC9NxyyDP7ZoyYVoXiuS6SjnInlLWrwz1i8bGTKXX0AVQWkSfIlglW3zRJRJ8bg5VgE6ZEnqNu9B++0GNQvDQJvFize4ESNKBJP+8vA3LM4AX5SIBq08Mob+7QMTCZx4nwP/64+4BnlZC+8WtlP/CXw6t1PwMwkJ3jhP1FiXLhDF/3I6FGUzO2DSi9ABxKyyL9paZxSEz40ZCPQToDAJu1959k7QdbVxgB4icsu2s4zsTPJhcEDo+N1GX4zSk/wriRh8AqwL62972i9HJHd1ydaLXVzvKvOfGGw5RVcUVMiKXFH4APdkQU/dc5BX0YfKTNZYXCW9mb8bc8mufoQP6BbdQmT99ZjoYfr/go4TgQX9IDgztim7wyFeGMfbNaeqj8Dzs38pgcqwSv2hbqB3oSGKWKy+sesY7p57wAHldqE6NDudk/W7s/zjrK4rZFlFvaGxnSZdHbc1y47qDN6xkoK8O3bfr2j41dlJZ71rB4dlDqapPFa8N6xBrprUdtenUCHwxKNhw1uuTBh+9uU45k4REpQABN2bAO9DSLqoIL26gNroWgup5pUMxHUNSq4Gyz47vBPvilpo5f9OYI2ddAqTqmnxXERxQJ3UK8fHbVE9HagHi3+tqNRoNsArdmAxHA5LwtQo9ZAaNKUTljnokljo2x8scqVpEEIPc01fPCdHOCg0DeWBz8D5TVAAfx8aRH5X2ZYNI3ebKDZdeJ+oBDAxmRqJ30Eh2/DaeAy5diVNMpEDmXiPDsGTzBLXy8eVDdJoIafgx/gxMyQi454QrW56nCyeELgSuNNEmYkflF+t3CZQOVRWjKhIuCclmQSlAXT3+4JGG75B4t/5hQ+ldMP4LsAW6z3XmU6IJJwpnGVnsgUZhoY1fZlwTR8wSU7xRejf2uCx9Z5trVTRRJP9KnEb134dEieil6eCOGWgboI7xsqsqM99jfJLTePjygKlH2CVxxsse9QRzTBFjD/Kjqitr/CCTBt/SJ6nLxz7cKP9pFqBpp0lN5y+adKNsZjrPuroemZauH9aTTFD3EKHW8S55XBLFQAt1jgxTQCTwxmx/JyfsZDN1RroN3VaxpSenpIX7K+ZbL8VdlQDcI4Cbzg3QJLa9yVqNxUelu+EtxLVqeekaAvSJkO6sSVqbUajxqhKshNpvZqoeApF0k/0P0ikkwUcbdwc4A1ejN7Oo0O15kG7hTMoK3hZRBCX7YYeLW0wvcXx/18n/u37yLgzBYVBUvORGli+sfRcX/74uD6P4hq+7xu54TlWJLFzT63uwUDwuEDdOjJQqx7JV+ZjaEAPi7t0MMrR4Q8Rkf18uxD6RK0RKh0hL8YU+DeL97i4pa5ZSyAfXKwZRS/8gXcxdZXm62RBDj8U3sN8x95b5PpPs/mCBKYvpaA50pN5Ct/499AFTtwQ5vgeSh+NHrKIi4NVpwM/XzRaNfJD856lPE6M21zWPguFsH7jbLVyEDfRmt4VwrhCJ5VTYmcSPfGgO5clfN+vbaDZ7sakU5+2vZ2WCDY031NxJarVytfDDVtiafcTGO2rJ/taoL3zChN2qmjxofczTOYQPPVQPh0JVtYgdUQINcSiNEEy58UdYXX1MpWUCEBx7LbcGtAm8XWRQTVOaoV3ySri4RShhs/B/0m4jX6OAwXOvcA09bNSG4czEGv/Wey6V/jbTCNTW6awXdNTcA1GsPe1E9fZdGl7R0vyoVpIdJtfC6d32NNErrvq/R+d65VG+YOwRXppXxOCYyGNSf1K3x6VxAW/vtz4EC1SgCOSPdN62sLsoIzuDfg8GwZAbquVO8HIuFP/ToVoeUB7nnwMF35a1wK1tI6fkrqFKhQdeJpwyls0pIy8AZde3/6LUUbFaYJthyUJSU/kqDXTLQElnn0Jr4B2RVghNrmNmoEn7pXIeshPguXVsvwoTdmClq49JJU3LWhHyWTrJL9bRP6VKv3tZoA/th77p5Jw++OEENvyvWy/pNeExiDUVQaXIRGh8xySZTI36yueFaSXo1uJY0RnXYgEOoWWOJHeaVuX/bGNhHsh2yinznl/++NJcE9j6fBPRcBdq9hb8awNw8U7Bl6GM7x69EDOIIbX/npZ++amlHR9L/35mE/2Ss4gb0xCcY4VyTFLRE796vHysLAamqcyO+aFQyJIDBNslbH2/MrAvZiSEIedc/cqjmv4fbda2pXbv+F5a2szSsdkm9noiNURXt8edUhGUF6fSZWd1IJaXKFwD+49R6eCXD4Bkef7j9tRtNMVgW8BhRz/Qpy1TmeYk0doyjZoJSbePOReVHgkFsCFuQJ+Lgc4BxeAsK/cOiNDRmdNw0ctYhn/nQ498dYI5znzGLoJi1rav7Cn88rL3wLePVtDK5gl77Tki3gHEsIAQ2+IKgarj7Y8W1IQzV5V9N+0TjLqbg68WfKcOmBCOj3JkwJhVIkwDhc+JorXuZEPMEh0vvH3x7iqf+VAwXgd4diZiaJD1zHL9Snx6Wfg4IugreyhabQkcir+y5XgDtdx3Avs7lkeeCBwDvZoTUCXx5QrZkcEqWfYEiEYRs/EphmRALSNGR1Iclgdr5VFoELpzF4++f35w3/j0t5ucW3n2ch4PQCLuUXupsPRR7UA5FjSKrMtPcKAZJfagO4lGE7FH3YKMjorpK0ZxAv+i2JkJhtAMWWWFej4RhPR/cJ3DxwocCvXDi4SGZU4cu+K32XndiFWgopAl+0GApcwf1XvymJcFs39jExIBO4yUjU9MExBLQYc9H+W7+IgdESPRpciT+rKZPebVtaVq+1GYO/5xTAL3HASjNTGIgMvdjWbgc7JvdE1zIFpuC0U9ESiZyzBixzxWxj4Kwh8My34q+FK3KNLtmsA1qyrmKSNQOXCPUZd+ONelBTvFoUI/CYsqa/RhtKiyMf2CgSFqEPk59Y3uqnlZ8gFpswfSYyko23yVZYxzKGxGm49Zqxg1l8oz5Ra9XaRwHkuxepmgyhm0SoNy2KlbcEqK+9QqS9PNx9Ihm9U7gsR55SSJ1FBDNnkuWKxIZ0SDpXuOGwZdoUbOMDPHP4vBAgz2VlSEJAHZGJVbYIg7l/FO5KfIVvxC8pPPxMGcNMoevFDeStt2iqztE10n2TA4dgJH76YS9HDhKHD3iCx6ieFX84BAI3QQnngh76f5ruPQVbr5qZmck/5UjDc26lfrOvUBWy0Ogl8bCoOkMOns81TnC3cuUS9KW8+9A+fe3XYZOFUPG1u5epSSmDLw0s5s2F0W30ANeo+zJkJQz9SPZgzwYpEoktofhGVfmLOAB20boCbW1QWq/NpET/hnMecw/uSyAH4NJc3ECOU4nnkK1fj3S/i5dwb3R7k00AqQQUwt7Ie1qV0aY/VQX0J8hLPy7eBNXMHYZYDNxHZ2Qh6AuXJxq+AeRec/Q+JLhZV6hpXwQEzw7bf5v9uUf2vpq3qlhmy0IIGTkwYdCfSAFmqbdo+3XvDTDjFJde0mbeQLcn2n31xaAqJ0ixO/CLsT4I4G4DoncVTgRGNBtsCcjISWT+oeXZ4Iedw/8OsJI1aPnNKLX/60VvcZb94uasRxCkqlPQ11u1Sa2hHvB80WQENxVyzjns0/PiEByyil21Te6oisk3mNCEMrhouCFO3yEZTHHOCMy9eb/4Tmi8cVf3Lf7P53SY2hX3PSN033As3ETIMLHWumWEO9JXHA2y2SIBlIPpLGG2qvNsCIlIr+B1SWAqRKm2w6Blf7U+zCSBwJrfHG5i8J5Gax/cVonMlon7aHJX/gSvucIncRP93XCqkv7D8IFKFsLiBgHqUpXhE3pYjEcV1dk/JD9zFVCfEaQIVX8Jmfz7IIofcBKQ4OaG+C3xC2veX9CD+iAFXDNaGg9eTVxvkbJRJlW4Nk9Wk13kn696jWppRDe/8pDrYMO9ZyxZ98ReKSz9kWKLLyk2zCZgAniCkLJVX3n1M9DYbomyahWiv/KixRIV9hj/oFz87I+HLznbPTjpa+D+bZQnMuRsljTpv90vQUt/pK7jCFnA30B/jtroSF2/m/gpWn1aQs5WeA6ghzF8SdqWI20fghdSeDOCSCmLgTkfaGgGDmw7nHFkRzGtag57IHS2na06I+gzEphXo1w/Zx2BM/jKL2nZoFjHggtFQjYi8nSVRSXIE58RPbBObXk7uuIL9+rs/5Zo7suJInEUxgsiZZAWS25iBtpEiZeBgDtghEoAE0sjcayNq85M4tbu/LF5h51335PsGzQ09O875+vUS89lkWMyNOFoip2PuyWyMP/iU2XIZdfCCJNDjebDoBLQdpy7QQZC7s9c0wjHJervQNDu2jWzBW5MSAJMr7bP+Iv92BkS/GGgzjEn7MF1IRKFwwzbjbS4/slGOmhx9cZrFu7HSEefojNv3r0UaKfKOWzXsq1zEugbzlMDFsacRJJI/iJlK3vtkZ+PLZIVMFlKA32wbq2Kd5T0uCLZ1CPkAfCdzkz2EYscjDcZq2AWfziN2covN4kXE1lQXPPLTNM1xx3tbiepcO/t3SWm4w87qfh99SL0ZnY+LKFPLPeXVM2mIIoVWt+9Nk0I7nY4O79iGYqxZ8RVz289an6NVdJWnSKZvJQCAuHNiVaDxPAFoH392t9wot5t0/qmU95eEWNbU2udUW5sN9JVqcYlvAIfLeYC33oUzzxZgSktsv21mA7Uly1FA5VnoJFh6N244Wmv3YJGFv/TCPryaw+ZORlpZjQdq/2DYXr3EZskfed0G61P09ipTKmlTQ1067Rg5+PAk5FlQ9e0SWbGf2B/08kqymOTMVOznsALHHNFH4LFRKl2F/NOiYFl9khNHnSu9Ak5sq26Ynl/i2fdTle29Y1ugqmR5Yj4YT9pvslFyYCbw0mNFr5rVQm1LvkG27QMq9ph3t8fmn6r6SQ4oSbr5tz+J1kIawGzDxb6VYOvvWhobDTXfBeNv3b4aNm5XUinsCGqG2q/45m3+LoCOsddFceYhRx1Tsss9PLdPfJdErFMjYd3gddjiP0+XQjcRadZP6bwNLySvunFf20Czy6JqdEW2a96KxdYdOryBv1BjbuUq2yCHeh+6sk7fGmmPi50pe/1l5TyPe5oHW9oPnhPswLyf2TFDdCyYlhwBCstv5C1HwlW7xWoGT9XZt4qVj5WryLPLLD6h/5cMLEjWzgCeAIKNsLak92aBqBsHl4AJwl2N4jfvbSkBExGimv0nFvv09uDScQbjx+w4kPQjgjlW+g9ws9VEJvI2k8N6XxVu0uIwovgTFdunG24gBtaDi+y1YLQwZ8mwbip5fVlO3k0n0AEr/ETbtu8Vjkm+nNSiEb7X/3fMjBL5A8PdgG+/FnbexbFFExmEfetXAnisEKy5z44WVPpQZjSy/jzeGn4yDRsFGqhh87QPaDBWhlo37IFbe/C0xynS91d2tP/AJoJS0sVF6iwAAAAAElFTkSuQmCC"); } +#menu::after { + display: block; + content: ''; + padding-top: 80px; +} + #logo { position: fixed; bottom: 10px; @@ -143,8 +156,10 @@ text-align: center; -webkit-border-radius: 20px; -moz-border-radius: 20px; + border-radius: 20px; -webkit-box-shadow: 0 0 3px rgba(0,0,0,.2); -moz-box-shadow: 0 0 3px rgba(0,0,0,.2); + box-shadow: 0 0 3px rgba(0,0,0,.2); color: inherit; } @@ -191,6 +206,7 @@ width: 17px; -webkit-border-radius: 10px; -moz-border-radius: 10px; + border-radius: 10px; padding: 2px 3px; text-align: center; } @@ -202,8 +218,10 @@ padding: 10px; -webkit-box-shadow: inset 0 0 2px #eee; -moz-box-shadow: inset 0 0 2px #eee; + box-shadow: inset 0 0 2px #eee; -webkit-border-radius: 5px; -moz-border-radius: 5px; + border-radius: 5px; } #stats div { @@ -254,6 +272,7 @@ color: #363636; -webkit-border-radius: 3px; -moz-border-radius: 3px; + border-radius: 3px; } table thead { diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/xunit.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/xunit.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/reporters/xunit.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/reporters/xunit.js 2013-06-13 13:24:42.000000000 +0000 @@ -39,7 +39,7 @@ runner.on('pass', function(test){ tests.push(test); }); - + runner.on('fail', function(test){ tests.push(test); }); @@ -50,13 +50,13 @@ , tests: stats.tests , failures: stats.failures , errors: stats.failures - , skip: stats.tests - stats.failures - stats.passes + , skipped: stats.tests - stats.failures - stats.passes , timestamp: (new Date).toUTCString() - , time: stats.duration / 1000 + , time: (stats.duration / 1000) || 0 }, false)); tests.forEach(test); - console.log(''); + console.log(''); }); } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/runnable.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/runnable.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/runnable.js 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/runnable.js 2013-05-31 16:09:18.000000000 +0000 @@ -4,7 +4,8 @@ */ var EventEmitter = require('events').EventEmitter - , debug = require('debug')('mocha:runnable'); + , debug = require('debug')('mocha:runnable') + , milliseconds = require('./ms'); /** * Save timer references to avoid Sinon interfering (see GH-237). @@ -17,6 +18,12 @@ , clearInterval = global.clearInterval; /** + * Object#toString(). + */ + +var toString = Object.prototype.toString; + +/** * Expose `Runnable`. */ @@ -49,13 +56,14 @@ /** * Set & get timeout `ms`. * - * @param {Number} ms + * @param {Number|String} ms * @return {Runnable|Number} ms or self * @api private */ Runnable.prototype.timeout = function(ms){ if (0 == arguments.length) return this._timeout; + if ('string' == typeof ms) ms = milliseconds(ms); debug('timeout %d', ms); this._timeout = ms; if (this.timer) this.resetTimeout(); @@ -65,13 +73,14 @@ /** * Set & get slow `ms`. * - * @param {Number} ms + * @param {Number|String} ms * @return {Runnable|Number} ms or self * @api private */ Runnable.prototype.slow = function(ms){ if (0 === arguments.length) return this._slow; + if ('string' == typeof ms) ms = milliseconds(ms); debug('timeout %d', ms); this._slow = ms; return this; @@ -122,16 +131,14 @@ */ Runnable.prototype.resetTimeout = function(){ - var self = this - , ms = this.timeout(); + var self = this; + var ms = this.timeout() || 1e9; this.clearTimeout(); - if (ms) { - this.timer = setTimeout(function(){ - self.callback(new Error('timeout of ' + ms + 'ms exceeded')); - self.timedOut = true; - }, ms); - } + this.timer = setTimeout(function(){ + self.callback(new Error('timeout of ' + ms + 'ms exceeded')); + self.timedOut = true; + }, ms); }; /** @@ -185,7 +192,7 @@ if (this.async) { try { this.fn.call(ctx, function(err){ - if (err instanceof Error) return done(err); + if (err instanceof Error || toString.call(err) === "[object Error]") return done(err); if (null != err) return done(new Error('done() invoked with non-Error: ' + err)); done(); }); @@ -194,7 +201,11 @@ } return; } - + + if (this.asyncOnly) { + return done(new Error('--async-only option in use without declaring `done()`')); + } + // sync try { if (!this.pending) this.fn.call(ctx); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/runner.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/runner.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/runner.js 2012-09-21 17:45:26.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/runner.js 2013-09-15 15:29:27.000000000 +0000 @@ -1,4 +1,3 @@ - /** * Module dependencies. */ @@ -8,8 +7,20 @@ , Test = require('./test') , utils = require('./utils') , filter = utils.filter - , keys = utils.keys - , noop = function(){}; + , keys = utils.keys; + +/** + * Non-enumerable globals. + */ + +var globals = [ + 'setTimeout', + 'clearTimeout', + 'setInterval', + 'clearInterval', + 'XMLHttpRequest', + 'Date' +]; /** * Expose `Runner`. @@ -32,6 +43,7 @@ * - `hook end` (hook) hook complete * - `pass` (test) test passed * - `fail` (test, err) test failed + * - `pending` (test) test pending * * @api public */ @@ -45,10 +57,19 @@ this.on('test end', function(test){ self.checkGlobals(test); }); this.on('hook end', function(hook){ self.checkGlobals(hook); }); this.grep(/.*/); - this.globals(utils.keys(global).concat(['errno'])); + this.globals(this.globalProps().concat(['errno'])); } /** + * Wrapper for setImmediate, process.nextTick, or browser polyfill. + * + * @param {Function} fn + * @api private + */ + +Runner.immediately = global.setImmediate || process.nextTick; + +/** * Inherit from `EventEmitter.prototype`. */ @@ -95,6 +116,25 @@ }; /** + * Return a list of global properties. + * + * @return {Array} + * @api private + */ + +Runner.prototype.globalProps = function() { + var props = utils.keys(global); + + // non-enumerables + for (var i = 0; i < globals.length; ++i) { + if (~utils.indexOf(props, globals[i])) continue; + props.push(globals[i]); + } + + return props; +}; + +/** * Allow the given `arr` of globals. * * @param {Array} arr @@ -120,14 +160,17 @@ Runner.prototype.checkGlobals = function(test){ if (this.ignoreLeaks) return; var ok = this._globals; - var globals = keys(global); + var globals = this.globalProps(); var isNode = process.kill; var leaks; // check length - 2 ('errno' and 'location' globals) - if (isNode && 1 == ok.length - globals.length) return + if (isNode && 1 == ok.length - globals.length) return; else if (2 == ok.length - globals.length) return; + if(this.prevGlobalsLength == globals.length) return; + this.prevGlobalsLength = globals.length; + leaks = filterLeaks(ok, globals); this._globals = this._globals.concat(leaks); @@ -149,9 +192,11 @@ Runner.prototype.fail = function(test, err){ ++this.failures; test.state = 'failed'; + if ('string' == typeof err) { err = new Error('the string "' + err + '" was thrown, throw an Error :)'); } + this.emit('fail', test, err); }; @@ -190,8 +235,11 @@ function next(i) { var hook = hooks[i]; if (!hook) return fn(); + if (self.failures && suite.bail()) return fn(); self.currentRunnable = hook; + hook.ctx.currentTest = self.test; + self.emit('hook', hook); hook.on('error', function(err){ @@ -204,11 +252,12 @@ if (testError) self.fail(self.test, testError); if (err) return self.failHook(hook, err); self.emit('hook end', hook); + delete hook.ctx.currentTest; next(++i); }); } - process.nextTick(function(){ + Runner.immediately(function(){ next(0); }); }; @@ -300,6 +349,8 @@ var test = this.test , self = this; + if (this.asyncOnly) test.asyncOnly = true; + try { test.on('error', function(err){ self.fail(test, err); @@ -321,7 +372,7 @@ Runner.prototype.runTests = function(suite, fn){ var self = this - , tests = suite.tests + , tests = suite.tests.slice() , test; function next(err) { @@ -449,13 +500,12 @@ var self = this , fn = fn || function(){}; - debug('start'); - - // uncaught callback - function uncaught(err) { + function uncaught(err){ self.uncaught(err); } + debug('start'); + // callback this.on('end', function(){ debug('end'); @@ -487,6 +537,21 @@ function filterLeaks(ok, globals) { return filter(globals, function(key){ + // Firefox and Chrome exposes iframes as index inside the window object + if (/^d+/.test(key)) return false; + + // in firefox + // if runner runs in an iframe, this iframe's window.getInterface method not init at first + // it is assigned in some seconds + if (global.navigator && /^getInterface/.test(key)) return false; + + // an iframe could be approached by window[iframeIndex] + // in ie6,7,8 and opera, iframeIndex is enumerable, this could cause leak + if (global.navigator && /^\d+/.test(key)) return false; + + // Opera and IE expose global variables for HTML element IDs (issue #243) + if (/^mocha-/.test(key)) return false; + var matched = filter(ok, function(ok){ if (~ok.indexOf('*')) return 0 == key.indexOf(ok.split('*')[0]); return key == ok; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/template.html tilemill-0.10.1~saucy10/node_modules/mocha/lib/template.html --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/template.html 2012-09-21 17:42:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/template.html 2013-04-19 18:57:05.000000000 +0000 @@ -1,14 +1,16 @@ + Mocha +
            - + diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/lib/utils.js tilemill-0.10.1~saucy10/node_modules/mocha/lib/utils.js --- tilemill-0.10.1~saucy9/node_modules/mocha/lib/utils.js 2012-09-21 17:45:26.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/lib/utils.js 2013-08-03 17:03:13.000000000 +0000 @@ -1,4 +1,3 @@ - /** * Module dependencies. */ @@ -63,7 +62,7 @@ /** * Array#reduce (<=IE8) - * + * * @param {Array} array * @param {Function} fn * @param {Object} initial value @@ -163,7 +162,7 @@ path = join(dir, path); if (fs.statSync(path).isDirectory()) { exports.files(path, ret); - } else if (path.match(/\.(js|coffee)$/)) { + } else if (path.match(/\.(js|coffee|litcoffee|coffee.md)$/)) { ret.push(path); } }); @@ -196,8 +195,8 @@ .replace(/^function *\(.*\) *{/, '') .replace(/\s+\}$/, ''); - var spaces = str.match(/^\n?( *)/)[1].length - , re = new RegExp('^ {' + spaces + '}', 'gm'); + var whitespace = str.match(/^\n?(\s*)/)[1] + , re = new RegExp('^' + whitespace, 'gm'); str = str.replace(re, ''); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/mocha.css tilemill-0.10.1~saucy10/node_modules/mocha/mocha.css --- tilemill-0.10.1~saucy9/node_modules/mocha/mocha.css 2012-10-02 15:42:45.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/mocha.css 2013-09-15 15:29:27.000000000 +0000 @@ -1,7 +1,12 @@ -@charset "UTF-8"; +@charset "utf-8"; + body { + margin:0; +} + +#mocha { font: 20px/1.5 "Helvetica Neue", Helvetica, Arial, sans-serif; - padding: 60px 50px; + margin: 60px 50px; } #mocha ul, #mocha li { @@ -37,7 +42,7 @@ font-size: .8em; } -.hidden { +#mocha .hidden { display: none; } @@ -53,21 +58,12 @@ #mocha .test { margin-left: 15px; -} - -#mocha .test:hover h2::after { - position: relative; - top: 0; - right: -10px; - content: '(view source)'; - font-size: 12px; - font-family: arial; - color: #888; + overflow: hidden; } #mocha .test.pending:hover h2::after { content: '(pending)'; - font-family: arial; + font-family: arial, sans-serif; } #mocha .test.pass.medium .duration { @@ -134,10 +130,14 @@ #mocha .test pre.error { color: #c00; + max-height: 300px; + overflow: auto; } #mocha .test pre { - display: inline-block; + display: block; + float: left; + clear: left; font: 12px/1.5 monaco, monospace; margin: 5px; padding: 15px; @@ -145,59 +145,116 @@ border-bottom-color: #ddd; -webkit-border-radius: 3px; -webkit-box-shadow: 0 1px 3px #eee; + -moz-border-radius: 3px; + -moz-box-shadow: 0 1px 3px #eee; + border-radius: 3px; +} + +#mocha .test h2 { + position: relative; } -#report.pass .test.fail { +#mocha .test a.replay { + position: absolute; + top: 3px; + right: 0; + text-decoration: none; + vertical-align: middle; + display: block; + width: 15px; + height: 15px; + line-height: 15px; + text-align: center; + background: #eee; + font-size: 15px; + -moz-border-radius: 15px; + border-radius: 15px; + -webkit-transition: opacity 200ms; + -moz-transition: opacity 200ms; + transition: opacity 200ms; + opacity: 0.3; + color: #888; +} + +#mocha .test:hover a.replay { + opacity: 1; +} + +#mocha-report.pass .test.fail { display: none; } -#report.fail .test.pass { +#mocha-report.fail .test.pass { display: none; } -#error { +#mocha-report.pending .test.pass, +#mocha-report.pending .test.fail { + display: none; +} +#mocha-report.pending .test.pass.pending { + display: block; +} + +#mocha-error { color: #c00; - font-size: 1.5 em; + font-size: 1.5em; font-weight: 100; letter-spacing: 1px; } -#stats { +#mocha-stats { position: fixed; top: 15px; right: 10px; font-size: 12px; margin: 0; color: #888; + z-index: 1; } -#stats .progress { +#mocha-stats .progress { float: right; padding-top: 0; } -#stats em { +#mocha-stats em { color: black; } -#stats a { +#mocha-stats a { text-decoration: none; color: inherit; } -#stats a:hover { +#mocha-stats a:hover { border-bottom: 1px solid #eee; } -#stats li { +#mocha-stats li { display: inline-block; margin: 0 5px; list-style: none; padding-top: 11px; } -code .comment { color: #ddd } -code .init { color: #2F6FAD } -code .string { color: #5890AD } -code .keyword { color: #8A6343 } -code .number { color: #2F6FAD } +#mocha-stats canvas { + width: 40px; + height: 40px; +} + +#mocha code .comment { color: #ddd } +#mocha code .init { color: #2F6FAD } +#mocha code .string { color: #5890AD } +#mocha code .keyword { color: #8A6343 } +#mocha code .number { color: #2F6FAD } + +@media screen and (max-device-width: 480px) { + #mocha { + margin: 60px 0px; + } + + #mocha #stats { + position: absolute; + } +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/mocha.js tilemill-0.10.1~saucy10/node_modules/mocha/mocha.js --- tilemill-0.10.1~saucy9/node_modules/mocha/mocha.js 2012-10-02 16:02:57.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/mocha.js 2013-09-15 15:29:27.000000000 +0000 @@ -1,6 +1,5 @@ ;(function(){ - // CommonJS require() function require(p){ @@ -32,11 +31,11 @@ require.relative = function (parent) { return function(p){ if ('.' != p.charAt(0)) return require(p); - + var path = parent.split('/') , segs = p.split('/'); path.pop(); - + for (var i = 0; i < segs.length; i++) { var seg = segs[i]; if ('..' == seg) path.pop(); @@ -52,12 +51,366 @@ module.exports = function(type){ return function(){ - } }; + }); // module: browser/debug.js require.register("browser/diff.js", function(module, exports, require){ +/* See LICENSE file for terms of use */ + +/* + * Text diff implementation. + * + * This library supports the following APIS: + * JsDiff.diffChars: Character by character diff + * JsDiff.diffWords: Word (as defined by \b regex) diff which ignores whitespace + * JsDiff.diffLines: Line based diff + * + * JsDiff.diffCss: Diff targeted at CSS content + * + * These methods are based on the implementation proposed in + * "An O(ND) Difference Algorithm and its Variations" (Myers, 1986). + * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927 + */ +var JsDiff = (function() { + /*jshint maxparams: 5*/ + function clonePath(path) { + return { newPos: path.newPos, components: path.components.slice(0) }; + } + function removeEmpty(array) { + var ret = []; + for (var i = 0; i < array.length; i++) { + if (array[i]) { + ret.push(array[i]); + } + } + return ret; + } + function escapeHTML(s) { + var n = s; + n = n.replace(/&/g, '&'); + n = n.replace(//g, '>'); + n = n.replace(/"/g, '"'); + + return n; + } + + var Diff = function(ignoreWhitespace) { + this.ignoreWhitespace = ignoreWhitespace; + }; + Diff.prototype = { + diff: function(oldString, newString) { + // Handle the identity case (this is due to unrolling editLength == 0 + if (newString === oldString) { + return [{ value: newString }]; + } + if (!newString) { + return [{ value: oldString, removed: true }]; + } + if (!oldString) { + return [{ value: newString, added: true }]; + } + + newString = this.tokenize(newString); + oldString = this.tokenize(oldString); + + var newLen = newString.length, oldLen = oldString.length; + var maxEditLength = newLen + oldLen; + var bestPath = [{ newPos: -1, components: [] }]; + + // Seed editLength = 0 + var oldPos = this.extractCommon(bestPath[0], newString, oldString, 0); + if (bestPath[0].newPos+1 >= newLen && oldPos+1 >= oldLen) { + return bestPath[0].components; + } + + for (var editLength = 1; editLength <= maxEditLength; editLength++) { + for (var diagonalPath = -1*editLength; diagonalPath <= editLength; diagonalPath+=2) { + var basePath; + var addPath = bestPath[diagonalPath-1], + removePath = bestPath[diagonalPath+1]; + oldPos = (removePath ? removePath.newPos : 0) - diagonalPath; + if (addPath) { + // No one else is going to attempt to use this value, clear it + bestPath[diagonalPath-1] = undefined; + } + + var canAdd = addPath && addPath.newPos+1 < newLen; + var canRemove = removePath && 0 <= oldPos && oldPos < oldLen; + if (!canAdd && !canRemove) { + bestPath[diagonalPath] = undefined; + continue; + } + + // Select the diagonal that we want to branch from. We select the prior + // path whose position in the new string is the farthest from the origin + // and does not pass the bounds of the diff graph + if (!canAdd || (canRemove && addPath.newPos < removePath.newPos)) { + basePath = clonePath(removePath); + this.pushComponent(basePath.components, oldString[oldPos], undefined, true); + } else { + basePath = clonePath(addPath); + basePath.newPos++; + this.pushComponent(basePath.components, newString[basePath.newPos], true, undefined); + } + + var oldPos = this.extractCommon(basePath, newString, oldString, diagonalPath); + + if (basePath.newPos+1 >= newLen && oldPos+1 >= oldLen) { + return basePath.components; + } else { + bestPath[diagonalPath] = basePath; + } + } + } + }, + + pushComponent: function(components, value, added, removed) { + var last = components[components.length-1]; + if (last && last.added === added && last.removed === removed) { + // We need to clone here as the component clone operation is just + // as shallow array clone + components[components.length-1] = + {value: this.join(last.value, value), added: added, removed: removed }; + } else { + components.push({value: value, added: added, removed: removed }); + } + }, + extractCommon: function(basePath, newString, oldString, diagonalPath) { + var newLen = newString.length, + oldLen = oldString.length, + newPos = basePath.newPos, + oldPos = newPos - diagonalPath; + while (newPos+1 < newLen && oldPos+1 < oldLen && this.equals(newString[newPos+1], oldString[oldPos+1])) { + newPos++; + oldPos++; + + this.pushComponent(basePath.components, newString[newPos], undefined, undefined); + } + basePath.newPos = newPos; + return oldPos; + }, + + equals: function(left, right) { + var reWhitespace = /\S/; + if (this.ignoreWhitespace && !reWhitespace.test(left) && !reWhitespace.test(right)) { + return true; + } else { + return left === right; + } + }, + join: function(left, right) { + return left + right; + }, + tokenize: function(value) { + return value; + } + }; + + var CharDiff = new Diff(); + + var WordDiff = new Diff(true); + var WordWithSpaceDiff = new Diff(); + WordDiff.tokenize = WordWithSpaceDiff.tokenize = function(value) { + return removeEmpty(value.split(/(\s+|\b)/)); + }; + + var CssDiff = new Diff(true); + CssDiff.tokenize = function(value) { + return removeEmpty(value.split(/([{}:;,]|\s+)/)); + }; + + var LineDiff = new Diff(); + LineDiff.tokenize = function(value) { + return value.split(/^/m); + }; + + return { + Diff: Diff, + + diffChars: function(oldStr, newStr) { return CharDiff.diff(oldStr, newStr); }, + diffWords: function(oldStr, newStr) { return WordDiff.diff(oldStr, newStr); }, + diffWordsWithSpace: function(oldStr, newStr) { return WordWithSpaceDiff.diff(oldStr, newStr); }, + diffLines: function(oldStr, newStr) { return LineDiff.diff(oldStr, newStr); }, + + diffCss: function(oldStr, newStr) { return CssDiff.diff(oldStr, newStr); }, + + createPatch: function(fileName, oldStr, newStr, oldHeader, newHeader) { + var ret = []; + + ret.push('Index: ' + fileName); + ret.push('==================================================================='); + ret.push('--- ' + fileName + (typeof oldHeader === 'undefined' ? '' : '\t' + oldHeader)); + ret.push('+++ ' + fileName + (typeof newHeader === 'undefined' ? '' : '\t' + newHeader)); + + var diff = LineDiff.diff(oldStr, newStr); + if (!diff[diff.length-1].value) { + diff.pop(); // Remove trailing newline add + } + diff.push({value: '', lines: []}); // Append an empty value to make cleanup easier + + function contextLines(lines) { + return lines.map(function(entry) { return ' ' + entry; }); + } + function eofNL(curRange, i, current) { + var last = diff[diff.length-2], + isLast = i === diff.length-2, + isLastOfType = i === diff.length-3 && (current.added !== last.added || current.removed !== last.removed); + + // Figure out if this is the last line for the given file and missing NL + if (!/\n$/.test(current.value) && (isLast || isLastOfType)) { + curRange.push('\\ No newline at end of file'); + } + } + + var oldRangeStart = 0, newRangeStart = 0, curRange = [], + oldLine = 1, newLine = 1; + for (var i = 0; i < diff.length; i++) { + var current = diff[i], + lines = current.lines || current.value.replace(/\n$/, '').split('\n'); + current.lines = lines; + + if (current.added || current.removed) { + if (!oldRangeStart) { + var prev = diff[i-1]; + oldRangeStart = oldLine; + newRangeStart = newLine; + + if (prev) { + curRange = contextLines(prev.lines.slice(-4)); + oldRangeStart -= curRange.length; + newRangeStart -= curRange.length; + } + } + curRange.push.apply(curRange, lines.map(function(entry) { return (current.added?'+':'-') + entry; })); + eofNL(curRange, i, current); + + if (current.added) { + newLine += lines.length; + } else { + oldLine += lines.length; + } + } else { + if (oldRangeStart) { + // Close out any changes that have been output (or join overlapping) + if (lines.length <= 8 && i < diff.length-2) { + // Overlapping + curRange.push.apply(curRange, contextLines(lines)); + } else { + // end the range and output + var contextSize = Math.min(lines.length, 4); + ret.push( + '@@ -' + oldRangeStart + ',' + (oldLine-oldRangeStart+contextSize) + + ' +' + newRangeStart + ',' + (newLine-newRangeStart+contextSize) + + ' @@'); + ret.push.apply(ret, curRange); + ret.push.apply(ret, contextLines(lines.slice(0, contextSize))); + if (lines.length <= 4) { + eofNL(ret, i, current); + } + + oldRangeStart = 0; newRangeStart = 0; curRange = []; + } + } + oldLine += lines.length; + newLine += lines.length; + } + } + + return ret.join('\n') + '\n'; + }, + + applyPatch: function(oldStr, uniDiff) { + var diffstr = uniDiff.split('\n'); + var diff = []; + var remEOFNL = false, + addEOFNL = false; + + for (var i = (diffstr[0][0]==='I'?4:0); i < diffstr.length; i++) { + if(diffstr[i][0] === '@') { + var meh = diffstr[i].split(/@@ -(\d+),(\d+) \+(\d+),(\d+) @@/); + diff.unshift({ + start:meh[3], + oldlength:meh[2], + oldlines:[], + newlength:meh[4], + newlines:[] + }); + } else if(diffstr[i][0] === '+') { + diff[0].newlines.push(diffstr[i].substr(1)); + } else if(diffstr[i][0] === '-') { + diff[0].oldlines.push(diffstr[i].substr(1)); + } else if(diffstr[i][0] === ' ') { + diff[0].newlines.push(diffstr[i].substr(1)); + diff[0].oldlines.push(diffstr[i].substr(1)); + } else if(diffstr[i][0] === '\\') { + if (diffstr[i-1][0] === '+') { + remEOFNL = true; + } else if(diffstr[i-1][0] === '-') { + addEOFNL = true; + } + } + } + + var str = oldStr.split('\n'); + for (var i = diff.length - 1; i >= 0; i--) { + var d = diff[i]; + for (var j = 0; j < d.oldlength; j++) { + if(str[d.start-1+j] !== d.oldlines[j]) { + return false; + } + } + Array.prototype.splice.apply(str,[d.start-1,+d.oldlength].concat(d.newlines)); + } + + if (remEOFNL) { + while (!str[str.length-1]) { + str.pop(); + } + } else if (addEOFNL) { + str.push(''); + } + return str.join('\n'); + }, + + convertChangesToXML: function(changes){ + var ret = []; + for ( var i = 0; i < changes.length; i++) { + var change = changes[i]; + if (change.added) { + ret.push(''); + } else if (change.removed) { + ret.push(''); + } + + ret.push(escapeHTML(change.value)); + + if (change.added) { + ret.push(''); + } else if (change.removed) { + ret.push(''); + } + } + return ret.join(''); + }, + + // See: http://code.google.com/p/google-diff-match-patch/wiki/API + convertChangesToDMP: function(changes){ + var ret = [], change; + for ( var i = 0; i < changes.length; i++) { + change = changes[i]; + ret.push([(change.added ? 1 : change.removed ? -1 : 0), change.value]); + } + return ret; + } + }; +})(); + +if (typeof module !== 'undefined') { + module.exports = JsDiff; +} }); // module: browser/diff.js @@ -386,8 +739,14 @@ }; exports.getWindowSize = function(){ - return [window.innerHeight, window.innerWidth]; + if ('innerHeight' in global) { + return [global.innerHeight, global.innerWidth]; + } else { + // In a Web Worker, the DOM Window is not available. + return [640, 480]; + } }; + }); // module: browser/tty.js require.register("context.js", function(module, exports, require){ @@ -494,7 +853,9 @@ * Inherit from `Runnable.prototype`. */ -Hook.prototype = new Runnable; +function F(){}; +F.prototype = Runnable.prototype; +Hook.prototype = new F; Hook.prototype.constructor = Hook; @@ -516,7 +877,6 @@ this._error = err; }; - }); // module: hook.js require.register("interfaces/bdd.js", function(module, exports, require){ @@ -526,11 +886,12 @@ */ var Suite = require('../suite') - , Test = require('../test'); + , Test = require('../test') + , utils = require('../utils'); /** * BDD-style interface: - * + * * describe('Array', function(){ * describe('#indexOf()', function(){ * it('should return -1 when not present', function(){ @@ -542,7 +903,7 @@ * }); * }); * }); - * + * */ module.exports = function(suite){ @@ -587,11 +948,11 @@ * and callback `fn` containing nested suites * and/or tests. */ - + context.describe = context.context = function(title, fn){ var suite = Suite.create(suites[0], title); suites.unshift(suite); - fn(); + fn.call(suite); suites.shift(); return suite; }; @@ -606,7 +967,7 @@ var suite = Suite.create(suites[0], title); suite.pending = true; suites.unshift(suite); - fn(); + fn.call(suite); suites.shift(); }; @@ -639,7 +1000,8 @@ context.it.only = function(title, fn){ var test = context.it(title, fn); - mocha.grep(test.fullTitle()); + var reString = '^' + utils.escapeRegexp(test.fullTitle()) + '$'; + mocha.grep(new RegExp(reString)); }; /** @@ -667,19 +1029,19 @@ /** * TDD-style interface: - * + * * exports.Array = { * '#indexOf()': { * 'should return -1 when the value is not present': function(){ - * + * * }, * * 'should return the correct index when the value is present': function(){ - * + * * } * } * }; - * + * */ module.exports = function(suite){ @@ -717,6 +1079,7 @@ } } }; + }); // module: interfaces/exports.js require.register("interfaces/index.js", function(module, exports, require){ @@ -735,37 +1098,38 @@ */ var Suite = require('../suite') - , Test = require('../test'); + , Test = require('../test') + , utils = require('../utils'); /** * QUnit-style interface: - * + * * suite('Array'); - * + * * test('#length', function(){ * var arr = [1,2,3]; * ok(arr.length == 3); * }); - * + * * test('#indexOf()', function(){ * var arr = [1,2,3]; * ok(arr.indexOf(1) == 0); * ok(arr.indexOf(2) == 1); * ok(arr.indexOf(3) == 2); * }); - * + * * suite('String'); - * + * * test('#length', function(){ * ok('foo'.length == 3); * }); - * + * */ module.exports = function(suite){ var suites = [suite]; - suite.on('pre-require', function(context){ + suite.on('pre-require', function(context, file, mocha){ /** * Execute before running tests. @@ -802,11 +1166,21 @@ /** * Describe a "suite" with the given `title`. */ - + context.suite = function(title){ if (suites.length > 1) suites.shift(); var suite = Suite.create(suites[0], title); suites.unshift(suite); + return suite; + }; + + /** + * Exclusive test-case. + */ + + context.suite.only = function(title, fn){ + var suite = context.suite(title, fn); + mocha.grep(suite.fullTitle()); }; /** @@ -816,7 +1190,27 @@ */ context.test = function(title, fn){ - suites[0].addTest(new Test(title, fn)); + var test = new Test(title, fn); + suites[0].addTest(test); + return test; + }; + + /** + * Exclusive test-case. + */ + + context.test.only = function(title, fn){ + var test = context.test(title, fn); + var reString = '^' + utils.escapeRegexp(test.fullTitle()) + '$'; + mocha.grep(new RegExp(reString)); + }; + + /** + * Pending test case. + */ + + context.test.skip = function(title){ + context.test(title); }; }); }; @@ -830,7 +1224,8 @@ */ var Suite = require('../suite') - , Test = require('../test'); + , Test = require('../test') + , utils = require('../utils');; /** * TDD-style interface: @@ -840,7 +1235,7 @@ * suiteSetup(function(){ * * }); - * + * * test('should return -1 when not present', function(){ * * }); @@ -903,12 +1298,23 @@ context.suite = function(title, fn){ var suite = Suite.create(suites[0], title); suites.unshift(suite); - fn(); + fn.call(suite); suites.shift(); return suite; }; /** + * Pending suite. + */ + context.suite.skip = function(title, fn) { + var suite = Suite.create(suites[0], title); + suite.pending = true; + suites.unshift(suite); + fn.call(suite); + suites.shift(); + }; + + /** * Exclusive test-case. */ @@ -924,8 +1330,10 @@ */ context.test = function(title, fn){ + var suite = suites[0]; + if (suite.pending) var fn = null; var test = new Test(title, fn); - suites[0].addTest(test); + suite.addTest(test); return test; }; @@ -935,7 +1343,16 @@ context.test.only = function(title, fn){ var test = context.test(title, fn); - mocha.grep(test.fullTitle()); + var reString = '^' + utils.escapeRegexp(test.fullTitle()) + '$'; + mocha.grep(new RegExp(reString)); + }; + + /** + * Pending test case. + */ + + context.test.skip = function(title){ + context.test(title); }; }); }; @@ -997,6 +1414,7 @@ * - `reporter` reporter instance, defaults to `mocha.reporters.Dot` * - `globals` array of accepted globals * - `timeout` timeout in milliseconds + * - `bail` bail on the first test failure * - `slow` milliseconds to wait before considering a test slow * - `ignoreLeaks` ignore global leaks * - `grep` string or regexp to filter tests with @@ -1012,12 +1430,26 @@ this.grep(options.grep); this.suite = new exports.Suite('', new exports.Context); this.ui(options.ui); + this.bail(options.bail); this.reporter(options.reporter); - if (options.timeout) this.timeout(options.timeout); + if (null != options.timeout) this.timeout(options.timeout); if (options.slow) this.slow(options.slow); } /** + * Enable or disable bailing on the first failure. + * + * @param {Boolean} [bail] + * @api public + */ + +Mocha.prototype.bail = function(bail){ + if (0 == arguments.length) bail = true; + this.suite.bail(bail); + return this; +}; + +/** * Add test `file`. * * @param {String} file @@ -1032,7 +1464,7 @@ /** * Set reporter to `reporter`, defaults to "dot". * - * @param {String|Function} reporter name of a reporter or a reporter constructor + * @param {String|Function} reporter name or constructor * @api public */ @@ -1139,12 +1571,13 @@ /** * Ignore global leaks. * + * @param {Boolean} ignore * @return {Mocha} * @api public */ -Mocha.prototype.ignoreLeaks = function(){ - this.options.ignoreLeaks = true; +Mocha.prototype.ignoreLeaks = function(ignore){ + this.options.ignoreLeaks = !!ignore; return this; }; @@ -1212,6 +1645,18 @@ }; /** + * Makes all tests async (accepting a callback) + * + * @return {Mocha} + * @api public + */ + +Mocha.prototype.asyncOnly = function(){ + this.options.asyncOnly = true; + return this; +}; + +/** * Run tests and invoke `fn()` when complete. * * @param {Function} fn @@ -1225,7 +1670,8 @@ var options = this.options; var runner = new exports.Runner(suite); var reporter = new this._reporter(runner); - runner.ignoreLeaks = options.ignoreLeaks; + runner.ignoreLeaks = false !== options.ignoreLeaks; + runner.asyncOnly = options.asyncOnly; if (options.grep) runner.grep(options.grep, options.invert); if (options.globals) runner.globals(options.globals); if (options.growl) this._growl(runner, reporter); @@ -1235,7 +1681,6 @@ }); // module: mocha.js require.register("ms.js", function(module, exports, require){ - /** * Helpers. */ @@ -1244,19 +1689,28 @@ var m = s * 60; var h = m * 60; var d = h * 24; +var y = d * 365.25; /** * Parse or format the given `val`. * + * Options: + * + * - `long` verbose formatting [false] + * * @param {String|Number} val + * @param {Object} options * @return {String|Number} * @api public */ -module.exports = function(val){ +module.exports = function(val, options){ + options = options || {}; if ('string' == typeof val) return parse(val); - return format(val); -} + return options.long + ? long(val) + : short(val); +}; /** * Parse the given `str` and return milliseconds. @@ -1267,55 +1721,78 @@ */ function parse(str) { - var m = /^((?:\d+)?\.?\d+) *(ms|seconds?|s|minutes?|m|hours?|h|days?|d|years?|y)?$/i.exec(str); - if (!m) return; - var n = parseFloat(m[1]); - var type = (m[2] || 'ms').toLowerCase(); + var match = /^((?:\d+)?\.?\d+) *(ms|seconds?|s|minutes?|m|hours?|h|days?|d|years?|y)?$/i.exec(str); + if (!match) return; + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); switch (type) { case 'years': case 'year': case 'y': - return n * 31557600000; + return n * y; case 'days': case 'day': case 'd': - return n * 86400000; + return n * d; case 'hours': case 'hour': case 'h': - return n * 3600000; + return n * h; case 'minutes': case 'minute': case 'm': - return n * 60000; + return n * m; case 'seconds': case 'second': case 's': - return n * 1000; + return n * s; case 'ms': return n; } } /** - * Format the given `ms`. + * Short format for `ms`. * * @param {Number} ms * @return {String} - * @api public + * @api private */ -function format(ms) { - if (ms == d) return (ms / d) + ' day'; - if (ms > d) return (ms / d) + ' days'; - if (ms == h) return (ms / h) + ' hour'; - if (ms > h) return (ms / h) + ' hours'; - if (ms == m) return (ms / m) + ' minute'; - if (ms > m) return (ms / m) + ' minutes'; - if (ms == s) return (ms / s) + ' second'; - if (ms > s) return (ms / s) + ' seconds'; - return ms + ' ms'; +function short(ms) { + if (ms >= d) return Math.round(ms / d) + 'd'; + if (ms >= h) return Math.round(ms / h) + 'h'; + if (ms >= m) return Math.round(ms / m) + 'm'; + if (ms >= s) return Math.round(ms / s) + 's'; + return ms + 'ms'; } + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function long(ms) { + return plural(ms, d, 'day') + || plural(ms, h, 'hour') + || plural(ms, m, 'minute') + || plural(ms, s, 'second') + || ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) return; + if (ms < n * 1.5) return Math.floor(ms / n) + ' ' + name; + return Math.ceil(ms / n) + ' ' + name + 's'; +} + }); // module: ms.js require.register("reporters/base.js", function(module, exports, require){ @@ -1383,6 +1860,23 @@ }; /** + * Default symbol map. + */ + +exports.symbols = { + ok: '✓', + err: '✖', + dot: '․' +}; + +// With node.js on Windows: use symbols available in terminal default fonts +if ('win32' == process.platform) { + exports.symbols.ok = '\u221A'; + exports.symbols.err = '\u00D7'; + exports.symbols.dot = '.'; +} + +/** * Color `str` with the given `type`, * allowing colors to be disabled, * as well as user-defined color @@ -1465,19 +1959,21 @@ , expected = err.expected , escape = true; + // uncaught + if (err.uncaught) { + msg = 'Uncaught ' + msg; + } + // explicitly show diff - if (err.showDiff) { + if (err.showDiff && sameType(actual, expected)) { escape = false; - err.actual = actual = JSON.stringify(actual, null, 2); - err.expected = expected = JSON.stringify(expected, null, 2); + err.actual = actual = stringify(actual); + err.expected = expected = stringify(expected); } // actual / expected diff if ('string' == typeof actual && 'string' == typeof expected) { - var len = Math.max(actual.length, expected.length); - - if (len < 20) msg = errorDiff(err, 'Chars', escape); - else msg = errorDiff(err, 'Words', escape); + msg = errorDiff(err, 'WordsWithSpace', escape); // linenos var lines = msg.split('\n'); @@ -1532,6 +2028,8 @@ if (!runner) return; this.runner = runner; + runner.stats = stats; + runner.on('start', function(){ stats.start = new Date; }); @@ -1584,48 +2082,38 @@ */ Base.prototype.epilogue = function(){ - var stats = this.stats - , fmt - , tests; + var stats = this.stats; + var tests; + var fmt; console.log(); - function pluralize(n) { - return 1 == n ? 'test' : 'tests'; - } - - // failure - if (stats.failures) { - fmt = color('bright fail', ' ✖') - + color('fail', ' %d of %d %s failed') - + color('light', ':') - - console.error(fmt, - stats.failures, - this.runner.total, - pluralize(this.runner.total)); - - Base.list(this.failures); - console.error(); - return; - } - - // pass - fmt = color('bright pass', ' ✔') - + color('green', ' %d %s complete') + // passes + fmt = color('bright pass', ' ') + + color('green', ' %d passing') + color('light', ' (%s)'); console.log(fmt, - stats.tests || 0, - pluralize(stats.tests), + stats.passes || 0, ms(stats.duration)); // pending if (stats.pending) { - fmt = color('pending', ' •') - + color('pending', ' %d %s pending'); + fmt = color('pending', ' ') + + color('pending', ' %d pending'); + + console.log(fmt, stats.pending); + } + + // failures + if (stats.failures) { + fmt = color('fail', ' %d failing'); - console.log(fmt, stats.pending, pluralize(stats.pending)); + console.error(fmt, + stats.failures); + + Base.list(this.failures); + console.error(); } console.log(); @@ -1682,6 +2170,34 @@ }).join('\n'); } +/** + * Stringify `obj`. + * + * @param {Mixed} obj + * @return {String} + * @api private + */ + +function stringify(obj) { + if (obj instanceof RegExp) return obj.toString(); + return JSON.stringify(obj, null, 2); +} + +/** + * Check that a / b have the same type. + * + * @param {Object} a + * @param {Object} b + * @return {Boolean} + * @api private + */ + +function sameType(a, b) { + a = Object.prototype.toString.call(a); + b = Object.prototype.toString.call(b); + return a == b; +} + }); // module: reporters/base.js require.register("reporters/doc.js", function(module, exports, require){ @@ -1723,7 +2239,7 @@ ++indents; console.log('%s
            ', indent()); ++indents; - console.log('%s

            %s

            ', indent(), suite.title); + console.log('%s

            %s

            ', indent(), utils.escape(suite.title)); console.log('%s
            ', indent()); }); @@ -1736,7 +2252,7 @@ }); runner.on('pass', function(test){ - console.log('%s
            %s
            ', indent(), test.title); + console.log('%s
            %s
            ', indent(), utils.escape(test.title)); var code = utils.escape(utils.clean(test.fn.toString())); console.log('%s
            %s
            ', indent(), code); }); @@ -1772,7 +2288,6 @@ var self = this , stats = this.stats , width = Base.window.width * .75 | 0 - , c = '․' , n = 0; runner.on('start', function(){ @@ -1780,21 +2295,21 @@ }); runner.on('pending', function(test){ - process.stdout.write(color('pending', c)); + process.stdout.write(color('pending', Base.symbols.dot)); }); runner.on('pass', function(test){ if (++n % width == 0) process.stdout.write('\n '); if ('slow' == test.speed) { - process.stdout.write(color('bright yellow', c)); + process.stdout.write(color('bright yellow', Base.symbols.dot)); } else { - process.stdout.write(color(test.speed, c)); + process.stdout.write(color(test.speed, Base.symbols.dot)); } }); runner.on('fail', function(test, err){ if (++n % width == 0) process.stdout.write('\n '); - process.stdout.write(color('fail', c)); + process.stdout.write(color('fail', Base.symbols.dot)); }); runner.on('end', function(){ @@ -1807,7 +2322,9 @@ * Inherit from `Base.prototype`. */ -Dot.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +Dot.prototype = new F; Dot.prototype.constructor = Dot; }); // module: reporters/dot.js @@ -1897,7 +2414,7 @@ * Stats template. */ -var statsTemplate = '
              ' +var statsTemplate = '
                ' + '
              • ' + '
              • passes: 0
              • ' + '
              • failures: 0
              • ' @@ -1925,7 +2442,7 @@ , failuresLink = items[2].getElementsByTagName('a')[0] , duration = items[3].getElementsByTagName('em')[0] , canvas = stat.getElementsByTagName('canvas')[0] - , report = fragment('
                  ') + , report = fragment('
                    ') , stack = [report] , progress , ctx @@ -1985,14 +2502,12 @@ }); runner.on('fail', function(test, err){ - if ('hook' == test.type || err.uncaught) runner.emit('test end', test); + if ('hook' == test.type) runner.emit('test end', test); }); runner.on('test end', function(test){ - window.scrollTo(0, document.body.scrollHeight); - // TODO: add to stats - var percent = stats.tests / total * 100 | 0; + var percent = stats.tests / this.total * 100 | 0; if (progress) progress.update(percent).draw(ctx); // update stats @@ -2003,11 +2518,11 @@ // test if ('passed' == test.state) { - var el = fragment('
                  • %e%ems

                  • ', test.speed, test.title, test.duration); + var el = fragment('
                  • %e%ems

                  • ', test.speed, test.title, test.duration, encodeURIComponent(test.fullTitle())); } else if (test.pending) { var el = fragment('
                  • %e

                  • ', test.title); } else { - var el = fragment('
                  • %e

                  • ', test.title); + var el = fragment('
                  • %e

                  • ', test.title, encodeURIComponent(test.fullTitle())); var str = test.err.stack || test.err.toString(); // FF / Opera do not add the message @@ -2034,7 +2549,7 @@ on(h2, 'click', function(){ pre.style.display = 'none' == pre.style.display - ? 'inline-block' + ? 'block' : 'none'; }); @@ -2043,7 +2558,8 @@ pre.style.display = 'none'; } - stack[0].appendChild(el); + // Don't call .appendChild if #mocha-report was already .shift()'ed off the stack. + if (stack[0]) stack[0].appendChild(el); }); } @@ -2052,7 +2568,7 @@ */ function error(msg) { - document.body.appendChild(fragment('
                    %s
                    ', msg)); + document.body.appendChild(fragment('
                    %s
                    ', msg)); } /** @@ -2230,6 +2746,10 @@ ret.sloc += data.sloc; } + ret.files.sort(function(a, b) { + return a.filename.localeCompare(b.filename); + }); + if (ret.sloc > 0) { ret.coverage = (ret.hits / ret.sloc) * 100; } @@ -2534,7 +3054,9 @@ * Inherit from `Base.prototype`. */ -Landing.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +Landing.prototype = new F; Landing.prototype.constructor = Landing; }); // module: reporters/landing.js @@ -2584,7 +3106,7 @@ }); runner.on('pass', function(test){ - var fmt = color('checkmark', ' ✓') + var fmt = color('checkmark', ' '+Base.symbols.dot) + color('pass', ' %s: ') + color(test.speed, '%dms'); cursor.CR(); @@ -2603,7 +3125,9 @@ * Inherit from `Base.prototype`. */ -List.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +List.prototype = new F; List.prototype.constructor = List; @@ -2635,7 +3159,6 @@ var self = this , stats = this.stats - , total = runner.total , level = 0 , buf = ''; @@ -2680,7 +3203,7 @@ runner.on('suite', function(suite){ ++level; var slug = utils.slug(suite.fullTitle()); - buf += '' + '\n'; + buf += '' + '\n'; buf += title(suite.title) + '\n'; }); @@ -2727,7 +3250,7 @@ function Min(runner) { Base.call(this, runner); - + runner.on('start', function(){ // clear screen process.stdout.write('\u001b[2J'); @@ -2742,13 +3265,15 @@ * Inherit from `Base.prototype`. */ -Min.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +Min.prototype = new F; Min.prototype.constructor = Min; + }); // module: reporters/min.js require.register("reporters/nyan.js", function(module, exports, require){ - /** * Module dependencies. */ @@ -2771,7 +3296,6 @@ function NyanCat(runner) { Base.call(this, runner); - var self = this , stats = this.stats , width = Base.window.width * .75 | 0 @@ -2787,19 +3311,19 @@ runner.on('start', function(){ Base.cursor.hide(); - self.draw('start'); + self.draw(); }); runner.on('pending', function(test){ - self.draw('pending'); + self.draw(); }); runner.on('pass', function(test){ - self.draw('pass'); + self.draw(); }); runner.on('fail', function(test, err){ - self.draw('fail'); + self.draw(); }); runner.on('end', function(){ @@ -2810,17 +3334,16 @@ } /** - * Draw the nyan cat with runner `status`. + * Draw the nyan cat * - * @param {String} status * @api private */ -NyanCat.prototype.draw = function(status){ +NyanCat.prototype.draw = function(){ this.appendRainbow(); this.drawScoreboard(); this.drawRainbow(); - this.drawNyanCat(status); + this.drawNyanCat(); this.tick = !this.tick; }; @@ -2885,58 +3408,62 @@ }; /** - * Draw the nyan cat with `status`. + * Draw the nyan cat * - * @param {String} status * @api private */ -NyanCat.prototype.drawNyanCat = function(status) { +NyanCat.prototype.drawNyanCat = function() { var self = this; var startWidth = this.scoreboardWidth + this.trajectories[0].length; + var color = '\u001b[' + startWidth + 'C'; + var padding = ''; - [0, 1, 2, 3].forEach(function(index) { - write('\u001b[' + startWidth + 'C'); + write(color); + write('_,------,'); + write('\n'); - switch (index) { - case 0: - write('_,------,'); - write('\n'); - break; - case 1: - var padding = self.tick ? ' ' : ' '; - write('_|' + padding + '/\\_/\\ '); - write('\n'); - break; - case 2: - var padding = self.tick ? '_' : '__'; - var tail = self.tick ? '~' : '^'; - var face; - switch (status) { - case 'pass': - face = '( ^ .^)'; - break; - case 'fail': - face = '( o .o)'; - break; - default: - face = '( - .-)'; - } - write(tail + '|' + padding + face + ' '); - write('\n'); - break; - case 3: - var padding = self.tick ? ' ' : ' '; - write(padding + '"" "" '); - write('\n'); - break; - } - }); + write(color); + padding = self.tick ? ' ' : ' '; + write('_|' + padding + '/\\_/\\ '); + write('\n'); + + write(color); + padding = self.tick ? '_' : '__'; + var tail = self.tick ? '~' : '^'; + var face; + write(tail + '|' + padding + this.face() + ' '); + write('\n'); + + write(color); + padding = self.tick ? ' ' : ' '; + write(padding + '"" "" '); + write('\n'); this.cursorUp(this.numberOfLines); }; /** + * Draw nyan cat face. + * + * @return {String} + * @api private + */ + +NyanCat.prototype.face = function() { + var stats = this.stats; + if (stats.failures) { + return '( x .x)'; + } else if (stats.pending) { + return '( o .o)'; + } else if(stats.passes) { + return '( ^ .^)'; + } else { + return '( - .-)'; + } +} + +/** * Move cursor up `n`. * * @param {Number} n @@ -3006,7 +3533,9 @@ * Inherit from `Base.prototype`. */ -NyanCat.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +NyanCat.prototype = new F; NyanCat.prototype.constructor = NyanCat; @@ -3056,7 +3585,7 @@ // default chars options.open = options.open || '['; options.complete = options.complete || '▬'; - options.incomplete = options.incomplete || '⋅'; + options.incomplete = options.incomplete || Base.symbols.dot; options.close = options.close || ']'; options.verbose = false; @@ -3098,7 +3627,9 @@ * Inherit from `Base.prototype`. */ -Progress.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +Progress.prototype = new F; Progress.prototype.constructor = Progress; @@ -3165,13 +3696,13 @@ runner.on('pass', function(test){ if ('fast' == test.speed) { var fmt = indent() - + color('checkmark', ' ✓') + + color('checkmark', ' ' + Base.symbols.ok) + color('pass', ' %s '); cursor.CR(); console.log(fmt, test.title); } else { var fmt = indent() - + color('checkmark', ' ✓') + + color('checkmark', ' ' + Base.symbols.ok) + color('pass', ' %s ') + color(test.speed, '(%dms)'); cursor.CR(); @@ -3191,7 +3722,9 @@ * Inherit from `Base.prototype`. */ -Spec.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +Spec.prototype = new F; Spec.prototype.constructor = Spec; @@ -3225,7 +3758,9 @@ var self = this , stats = this.stats - , n = 1; + , n = 1 + , passes = 0 + , failures = 0; runner.on('start', function(){ var total = runner.grepTotal(runner.suite); @@ -3241,12 +3776,20 @@ }); runner.on('pass', function(test){ + passes++; console.log('ok %d %s', n, title(test)); }); runner.on('fail', function(test, err){ + failures++; console.log('not ok %d %s', n, title(test)); - console.log(err.stack.replace(/^/gm, ' ')); + if (err.stack) console.log(err.stack.replace(/^/gm, ' ')); + }); + + runner.on('end', function(){ + console.log('# tests ' + (passes + failures)); + console.log('# pass ' + passes); + console.log('# fail ' + failures); }); } @@ -3375,7 +3918,7 @@ runner.on('pass', function(test){ tests.push(test); }); - + runner.on('fail', function(test){ tests.push(test); }); @@ -3386,13 +3929,13 @@ , tests: stats.tests , failures: stats.failures , errors: stats.failures - , skip: stats.tests - stats.failures - stats.passes + , skipped: stats.tests - stats.failures - stats.passes , timestamp: (new Date).toUTCString() - , time: stats.duration / 1000 + , time: (stats.duration / 1000) || 0 }, false)); tests.forEach(test); - console.log(''); + console.log(''); }); } @@ -3400,7 +3943,9 @@ * Inherit from `Base.prototype`. */ -XUnit.prototype = new Base; +function F(){}; +F.prototype = Base.prototype; +XUnit.prototype = new F; XUnit.prototype.constructor = XUnit; @@ -3461,7 +4006,8 @@ */ var EventEmitter = require('browser/events').EventEmitter - , debug = require('browser/debug')('mocha:runnable'); + , debug = require('browser/debug')('mocha:runnable') + , milliseconds = require('./ms'); /** * Save timer references to avoid Sinon interfering (see GH-237). @@ -3474,6 +4020,12 @@ , clearInterval = global.clearInterval; /** + * Object#toString(). + */ + +var toString = Object.prototype.toString; + +/** * Expose `Runnable`. */ @@ -3501,20 +4053,23 @@ * Inherit from `EventEmitter.prototype`. */ -Runnable.prototype = new EventEmitter; +function F(){}; +F.prototype = EventEmitter.prototype; +Runnable.prototype = new F; Runnable.prototype.constructor = Runnable; /** * Set & get timeout `ms`. * - * @param {Number} ms + * @param {Number|String} ms * @return {Runnable|Number} ms or self * @api private */ Runnable.prototype.timeout = function(ms){ if (0 == arguments.length) return this._timeout; + if ('string' == typeof ms) ms = milliseconds(ms); debug('timeout %d', ms); this._timeout = ms; if (this.timer) this.resetTimeout(); @@ -3524,13 +4079,14 @@ /** * Set & get slow `ms`. * - * @param {Number} ms + * @param {Number|String} ms * @return {Runnable|Number} ms or self * @api private */ Runnable.prototype.slow = function(ms){ if (0 === arguments.length) return this._slow; + if ('string' == typeof ms) ms = milliseconds(ms); debug('timeout %d', ms); this._slow = ms; return this; @@ -3581,16 +4137,14 @@ */ Runnable.prototype.resetTimeout = function(){ - var self = this - , ms = this.timeout(); + var self = this; + var ms = this.timeout() || 1e9; this.clearTimeout(); - if (ms) { - this.timer = setTimeout(function(){ - self.callback(new Error('timeout of ' + ms + 'ms exceeded')); - self.timedOut = true; - }, ms); - } + this.timer = setTimeout(function(){ + self.callback(new Error('timeout of ' + ms + 'ms exceeded')); + self.timedOut = true; + }, ms); }; /** @@ -3644,7 +4198,7 @@ if (this.async) { try { this.fn.call(ctx, function(err){ - if (err instanceof Error) return done(err); + if (err instanceof Error || toString.call(err) === "[object Error]") return done(err); if (null != err) return done(new Error('done() invoked with non-Error: ' + err)); done(); }); @@ -3653,7 +4207,11 @@ } return; } - + + if (this.asyncOnly) { + return done(new Error('--async-only option in use without declaring `done()`')); + } + // sync try { if (!this.pending) this.fn.call(ctx); @@ -3667,7 +4225,6 @@ }); // module: runnable.js require.register("runner.js", function(module, exports, require){ - /** * Module dependencies. */ @@ -3677,8 +4234,20 @@ , Test = require('./test') , utils = require('./utils') , filter = utils.filter - , keys = utils.keys - , noop = function(){}; + , keys = utils.keys; + +/** + * Non-enumerable globals. + */ + +var globals = [ + 'setTimeout', + 'clearTimeout', + 'setInterval', + 'clearInterval', + 'XMLHttpRequest', + 'Date' +]; /** * Expose `Runner`. @@ -3701,6 +4270,7 @@ * - `hook end` (hook) hook complete * - `pass` (test) test passed * - `fail` (test, err) test failed + * - `pending` (test) test pending * * @api public */ @@ -3714,14 +4284,25 @@ this.on('test end', function(test){ self.checkGlobals(test); }); this.on('hook end', function(hook){ self.checkGlobals(hook); }); this.grep(/.*/); - this.globals(utils.keys(global).concat(['errno'])); + this.globals(this.globalProps().concat(['errno'])); } /** + * Wrapper for setImmediate, process.nextTick, or browser polyfill. + * + * @param {Function} fn + * @api private + */ + +Runner.immediately = global.setImmediate || process.nextTick; + +/** * Inherit from `EventEmitter.prototype`. */ -Runner.prototype = new EventEmitter; +function F(){}; +F.prototype = EventEmitter.prototype; +Runner.prototype = new F; Runner.prototype.constructor = Runner; @@ -3766,6 +4347,25 @@ }; /** + * Return a list of global properties. + * + * @return {Array} + * @api private + */ + +Runner.prototype.globalProps = function() { + var props = utils.keys(global); + + // non-enumerables + for (var i = 0; i < globals.length; ++i) { + if (~utils.indexOf(props, globals[i])) continue; + props.push(globals[i]); + } + + return props; +}; + +/** * Allow the given `arr` of globals. * * @param {Array} arr @@ -3791,15 +4391,20 @@ Runner.prototype.checkGlobals = function(test){ if (this.ignoreLeaks) return; var ok = this._globals; - var globals = keys(global); + var globals = this.globalProps(); var isNode = process.kill; var leaks; // check length - 2 ('errno' and 'location' globals) - if (isNode && 1 == ok.length - globals.length) return + if (isNode && 1 == ok.length - globals.length) return; else if (2 == ok.length - globals.length) return; + if(this.prevGlobalsLength == globals.length) return; + this.prevGlobalsLength = globals.length; + + //console.time('filterLeaksTime'); leaks = filterLeaks(ok, globals); + //console.timeEnd('filterLeaksTime'); this._globals = this._globals.concat(leaks); if (leaks.length > 1) { @@ -3820,9 +4425,11 @@ Runner.prototype.fail = function(test, err){ ++this.failures; test.state = 'failed'; + if ('string' == typeof err) { err = new Error('the string "' + err + '" was thrown, throw an Error :)'); } + this.emit('fail', test, err); }; @@ -3861,8 +4468,11 @@ function next(i) { var hook = hooks[i]; if (!hook) return fn(); + if (self.failures && suite.bail()) return fn(); self.currentRunnable = hook; + hook.ctx.currentTest = self.test; + self.emit('hook', hook); hook.on('error', function(err){ @@ -3875,11 +4485,12 @@ if (testError) self.fail(self.test, testError); if (err) return self.failHook(hook, err); self.emit('hook end', hook); + delete hook.ctx.currentTest; next(++i); }); } - process.nextTick(function(){ + Runner.immediately(function(){ next(0); }); }; @@ -3971,6 +4582,8 @@ var test = this.test , self = this; + if (this.asyncOnly) test.asyncOnly = true; + try { test.on('error', function(err){ self.fail(test, err); @@ -3992,7 +4605,7 @@ Runner.prototype.runTests = function(suite, fn){ var self = this - , tests = suite.tests + , tests = suite.tests.slice() , test; function next(err) { @@ -4120,13 +4733,12 @@ var self = this , fn = fn || function(){}; - debug('start'); - - // uncaught callback - function uncaught(err) { + function uncaught(err){ self.uncaught(err); } + debug('start'); + // callback this.on('end', function(){ debug('end'); @@ -4158,6 +4770,21 @@ function filterLeaks(ok, globals) { return filter(globals, function(key){ + // Firefox and Chrome exposes iframes as index inside the window object + if (/^d+/.test(key)) return false; + + // in firefox + // if runner runs in an iframe, this iframe's window.getInterface method not init at first + // it is assigned in some seconds + if (global.navigator && /^getInterface/.test(key)) return false; + + // an iframe could be approached by window[iframeIndex] + // in ie6,7,8 and opera, iframeIndex is enumerable, this could cause leak + if (global.navigator && /^\d+/.test(key)) return false; + + // Opera and IE expose global variables for HTML element IDs (issue #243) + if (/^mocha-/.test(key)) return false; + var matched = filter(ok, function(ok){ if (~ok.indexOf('*')) return 0 == key.indexOf(ok.split('*')[0]); return key == ok; @@ -4237,7 +4864,9 @@ * Inherit from `EventEmitter.prototype`. */ -Suite.prototype = new EventEmitter; +function F(){}; +F.prototype = EventEmitter.prototype; +Suite.prototype = new F; Suite.prototype.constructor = Suite; @@ -4502,14 +5131,15 @@ * Inherit from `Runnable.prototype`. */ -Test.prototype = new Runnable; +function F(){}; +F.prototype = Runnable.prototype; +Test.prototype = new F; Test.prototype.constructor = Test; }); // module: test.js require.register("utils.js", function(module, exports, require){ - /** * Module dependencies. */ @@ -4574,7 +5204,7 @@ /** * Array#reduce (<=IE8) - * + * * @param {Array} array * @param {Function} fn * @param {Object} initial value @@ -4674,7 +5304,7 @@ path = join(dir, path); if (fs.statSync(path).isDirectory()) { exports.files(path, ret); - } else if (path.match(/\.(js|coffee)$/)) { + } else if (path.match(/\.(js|coffee|litcoffee|coffee.md)$/)) { ret.push(path); } }); @@ -4707,8 +5337,8 @@ .replace(/^function *\(.*\) *{/, '') .replace(/\s+\}$/, ''); - var spaces = str.match(/^\n?( *)/)[1].length - , re = new RegExp('^ {' + spaces + '}', 'gm'); + var whitespace = str.match(/^\n?(\s*)/)[1] + , re = new RegExp('^' + whitespace, 'gm'); str = str.replace(re, ''); @@ -4793,6 +5423,19 @@ }; }); // module: utils.js +// The global object is "self" in Web Workers. +global = (function() { return this; })(); + +/** + * Save timer references to avoid Sinon interfering (see GH-237). + */ + +var Date = global.Date; +var setTimeout = global.setTimeout; +var setInterval = global.setInterval; +var clearTimeout = global.clearTimeout; +var clearInterval = global.clearInterval; + /** * Node shims. * @@ -4802,105 +5445,110 @@ * the browser. */ -process = {}; +var process = {}; process.exit = function(status){}; process.stdout = {}; -global = window; /** - * next tick implementation. + * Remove uncaughtException listener. */ -process.nextTick = (function(){ - // postMessage behaves badly on IE8 - if (window.ActiveXObject || !window.postMessage) { - return function(fn){ fn() }; +process.removeListener = function(e){ + if ('uncaughtException' == e) { + global.onerror = function() {}; } +}; - // based on setZeroTimeout by David Baron - // - http://dbaron.org/log/20100309-faster-timeouts - var timeouts = [] - , name = 'mocha-zero-timeout' +/** + * Implements uncaughtException listener. + */ - window.addEventListener('message', function(e){ - if (e.source == window && e.data == name) { - if (e.stopPropagation) e.stopPropagation(); - if (timeouts.length) timeouts.shift()(); - } - }, true); +process.on = function(e, fn){ + if ('uncaughtException' == e) { + global.onerror = function(err, url, line){ + fn(new Error(err + ' (' + url + ':' + line + ')')); + }; + } +}; + +/** + * Expose mocha. + */ - return function(fn){ - timeouts.push(fn); - window.postMessage(name, '*'); +var Mocha = global.Mocha = require('mocha'), + mocha = global.mocha = new Mocha({ reporter: 'html' }); + +var immediateQueue = [] + , immediateTimeout; + +function timeslice() { + var immediateStart = new Date().getTime(); + while (immediateQueue.length && (new Date().getTime() - immediateStart) < 100) { + immediateQueue.shift()(); } -})(); + if (immediateQueue.length) { + immediateTimeout = setTimeout(timeslice, 0); + } else { + immediateTimeout = null; + } +} /** - * Remove uncaughtException listener. + * High-performance override of Runner.immediately. */ -process.removeListener = function(e){ - if ('uncaughtException' == e) { - window.onerror = null; +Mocha.Runner.immediately = function(callback) { + immediateQueue.push(callback); + if (!immediateTimeout) { + immediateTimeout = setTimeout(timeslice, 0); } }; /** - * Implements uncaughtException listener. + * Override ui to ensure that the ui functions are initialized. + * Normally this would happen in Mocha.prototype.loadFiles. */ -process.on = function(e, fn){ - if ('uncaughtException' == e) { - window.onerror = fn; - } +mocha.ui = function(ui){ + Mocha.prototype.ui.call(this, ui); + this.suite.emit('pre-require', global, null, this); + return this; }; -// boot -;(function(){ +/** + * Setup mocha with the given setting options. + */ - /** - * Expose mocha. - */ - - var Mocha = window.Mocha = require('mocha'), - mocha = window.mocha = new Mocha({ reporter: 'html' }); - - /** - * Override ui to ensure that the ui functions are initialized. - * Normally this would happen in Mocha.prototype.loadFiles. - */ - - mocha.ui = function(ui){ - Mocha.prototype.ui.call(this, ui); - this.suite.emit('pre-require', window, null, this); - return this; - }; +mocha.setup = function(opts){ + if ('string' == typeof opts) opts = { ui: opts }; + for (var opt in opts) this[opt](opts[opt]); + return this; +}; - /** - * Setup mocha with the given setting options. - */ - - mocha.setup = function(opts){ - if ('string' == typeof opts) opts = { ui: opts }; - for (var opt in opts) this[opt](opts[opt]); - return this; - }; +/** + * Run mocha, returning the Runner. + */ - /** - * Run mocha, returning the Runner. - */ - - mocha.run = function(fn){ - var options = mocha.options; - mocha.globals('location'); +mocha.run = function(fn){ + var options = mocha.options; + mocha.globals('location'); - var query = Mocha.utils.parseQuery(window.location.search || ''); - if (query.grep) mocha.grep(query.grep); + var query = Mocha.utils.parseQuery(global.location.search || ''); + if (query.grep) mocha.grep(query.grep); + if (query.invert) mocha.invert(); - return Mocha.prototype.run.call(mocha, function(){ + return Mocha.prototype.run.call(mocha, function(){ + // The DOM Document is not available in Web Workers. + if (global.document) { Mocha.utils.highlightTags('code'); - if (fn) fn(); - }); - }; -})(); + } + if (fn) fn(); + }); +}; + +/** + * Expose the process shim. + */ + +Mocha.process = process; })(); \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/commander/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/commander/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/commander/package.json 2012-10-11 02:23:14.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/commander/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -29,6 +29,10 @@ "node": ">= 0.4.x" }, "readme": "# Commander.js\n\n The complete solution for [node.js](http://nodejs.org) command-line interfaces, inspired by Ruby's [commander](https://github.com/visionmedia/commander).\n\n [![Build Status](https://secure.travis-ci.org/visionmedia/commander.js.png)](http://travis-ci.org/visionmedia/commander.js)\n\n## Installation\n\n $ npm install commander\n\n## Option parsing\n\n Options with commander are defined with the `.option()` method, also serving as documentation for the options. The example below parses args and options from `process.argv`, leaving remaining args as the `program.args` array which were not consumed by options.\n\n```js\n#!/usr/bin/env node\n\n/**\n * Module dependencies.\n */\n\nvar program = require('commander');\n\nprogram\n .version('0.0.1')\n .option('-p, --peppers', 'Add peppers')\n .option('-P, --pineapple', 'Add pineapple')\n .option('-b, --bbq', 'Add bbq sauce')\n .option('-c, --cheese [type]', 'Add the specified type of cheese [marble]', 'marble')\n .parse(process.argv);\n\nconsole.log('you ordered a pizza with:');\nif (program.peppers) console.log(' - peppers');\nif (program.pineapple) console.log(' - pineappe');\nif (program.bbq) console.log(' - bbq');\nconsole.log(' - %s cheese', program.cheese);\n```\n\n Short flags may be passed as a single arg, for example `-abc` is equivalent to `-a -b -c`. Multi-word options such as \"--template-engine\" are camel-cased, becoming `program.templateEngine` etc.\n\n## Automated --help\n\n The help information is auto-generated based on the information commander already knows about your program, so the following `--help` info is for free:\n\n``` \n $ ./examples/pizza --help\n\n Usage: pizza [options]\n\n Options:\n\n -V, --version output the version number\n -p, --peppers Add peppers\n -P, --pineapple Add pineappe\n -b, --bbq Add bbq sauce\n -c, --cheese Add the specified type of cheese [marble]\n -h, --help output usage information\n\n```\n\n## Coercion\n\n```js\nfunction range(val) {\n return val.split('..').map(Number);\n}\n\nfunction list(val) {\n return val.split(',');\n}\n\nprogram\n .version('0.0.1')\n .usage('[options] ')\n .option('-i, --integer ', 'An integer argument', parseInt)\n .option('-f, --float ', 'A float argument', parseFloat)\n .option('-r, --range ..', 'A range', range)\n .option('-l, --list ', 'A list', list)\n .option('-o, --optional [value]', 'An optional value')\n .parse(process.argv);\n\nconsole.log(' int: %j', program.integer);\nconsole.log(' float: %j', program.float);\nconsole.log(' optional: %j', program.optional);\nprogram.range = program.range || [];\nconsole.log(' range: %j..%j', program.range[0], program.range[1]);\nconsole.log(' list: %j', program.list);\nconsole.log(' args: %j', program.args);\n```\n\n## Custom help\n\n You can display arbitrary `-h, --help` information\n by listening for \"--help\". Commander will automatically\n exit once you are done so that the remainder of your program\n does not execute causing undesired behaviours, for example\n in the following executable \"stuff\" will not output when\n `--help` is used.\n\n```js\n#!/usr/bin/env node\n\n/**\n * Module dependencies.\n */\n\nvar program = require('../');\n\nfunction list(val) {\n return val.split(',').map(Number);\n}\n\nprogram\n .version('0.0.1')\n .option('-f, --foo', 'enable some foo')\n .option('-b, --bar', 'enable some bar')\n .option('-B, --baz', 'enable some baz');\n\n// must be before .parse() since\n// node's emit() is immediate\n\nprogram.on('--help', function(){\n console.log(' Examples:');\n console.log('');\n console.log(' $ custom-help --help');\n console.log(' $ custom-help -h');\n console.log('');\n});\n\nprogram.parse(process.argv);\n\nconsole.log('stuff');\n```\n\nyielding the following help output:\n\n```\n\nUsage: custom-help [options]\n\nOptions:\n\n -h, --help output usage information\n -V, --version output the version number\n -f, --foo enable some foo\n -b, --bar enable some bar\n -B, --baz enable some baz\n\nExamples:\n\n $ custom-help --help\n $ custom-help -h\n\n```\n\n## .prompt(msg, fn)\n\n Single-line prompt:\n\n```js\nprogram.prompt('name: ', function(name){\n console.log('hi %s', name);\n});\n```\n\n Multi-line prompt:\n\n```js\nprogram.prompt('description:', function(name){\n console.log('hi %s', name);\n});\n```\n\n Coercion:\n\n```js\nprogram.prompt('Age: ', Number, function(age){\n console.log('age: %j', age);\n});\n```\n\n```js\nprogram.prompt('Birthdate: ', Date, function(date){\n console.log('date: %s', date);\n});\n```\n\n## .password(msg[, mask], fn)\n\nPrompt for password without echoing:\n\n```js\nprogram.password('Password: ', function(pass){\n console.log('got \"%s\"', pass);\n process.stdin.destroy();\n});\n```\n\nPrompt for password with mask char \"*\":\n\n```js\nprogram.password('Password: ', '*', function(pass){\n console.log('got \"%s\"', pass);\n process.stdin.destroy();\n});\n```\n\n## .confirm(msg, fn)\n\n Confirm with the given `msg`:\n\n```js\nprogram.confirm('continue? ', function(ok){\n console.log(' got %j', ok);\n});\n```\n\n## .choose(list, fn)\n\n Let the user choose from a `list`:\n\n```js\nvar list = ['tobi', 'loki', 'jane', 'manny', 'luna'];\n\nconsole.log('Choose the coolest pet:');\nprogram.choose(list, function(i){\n console.log('you chose %d \"%s\"', i, list[i]);\n});\n```\n\n## Links\n\n - [API documentation](http://visionmedia.github.com/commander.js/)\n - [ascii tables](https://github.com/LearnBoost/cli-table)\n - [progress bars](https://github.com/visionmedia/node-progress)\n - [more progress bars](https://github.com/substack/node-multimeter)\n - [examples](https://github.com/visionmedia/commander.js/tree/master/examples)\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2011 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + "readmeFilename": "Readme.md", + "bugs": { + "url": "https://github.com/visionmedia/commander.js/issues" + }, "_id": "commander@0.6.1", "_from": "commander@0.6.1" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/History.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/History.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/History.md 2012-05-04 21:05:58.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/History.md 2013-02-06 23:41:55.000000000 +0000 @@ -1,4 +1,19 @@ +0.7.2 / 2013-02-06 +================== + + * fix package.json + * fix: Mobile Safari (private mode) is broken with debug + * fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript + +0.7.1 / 2013-02-05 +================== + + * add repository URL to package.json + * add DEBUG_COLORED to force colored output + * add browserify support + * fix component. Closes #24 + 0.7.0 / 2012-05-04 ================== diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/Makefile tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/Makefile --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/Makefile 2012-05-04 21:03:26.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/Makefile 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ - -debug.component.js: head.js debug.js tail.js - cat $^ > $@ - diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/Readme.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/Readme.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/Readme.md 2012-03-16 21:57:43.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/Readme.md 2013-02-05 16:51:47.000000000 +0000 @@ -1,7 +1,7 @@ # debug - tiny node.js debugging utility. + tiny node.js debugging utility modelled after node core's debugging technique. ## Installation @@ -9,24 +9,9 @@ $ npm install debug ``` -## Example +## Usage - This module is modelled after node core's debugging technique, allowing you to enable one or more topic-specific debugging functions, for example core does the following within many modules: - -```js -var debug; -if (process.env.NODE_DEBUG && /cluster/.test(process.env.NODE_DEBUG)) { - debug = function(x) { - var prefix = process.pid + ',' + - (process.env.NODE_WORKER_ID ? 'Worker' : 'Master'); - console.error(prefix, x); - }; -} else { - debug = function() { }; -} -``` - - This concept is extremely simple but it works well. With `debug` you simply invoke the exported function to generate your debug function, passing it a name which will determine if a noop function is returned, or a decorated `console.error`, so all of the `console` format string goodies you're used to work fine. A unique color is selected per-function for visibility. + With `debug` you simply invoke the exported function to generate your debug function, passing it a name which will determine if a noop function is returned, or a decorated `console.error`, so all of the `console` format string goodies you're used to work fine. A unique color is selected per-function for visibility. Example _app.js_: diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/component.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/component.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/component.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/component.json 2013-02-06 23:41:35.000000000 +0000 @@ -0,0 +1,9 @@ +{ + "name": "debug", + "repo": "visionmedia/debug", + "description": "small debugging utility", + "version": "0.7.2", + "keywords": ["debug", "log", "debugger"], + "scripts": ["index.js", "debug.js"], + "dependencies": {} +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/debug.component.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/debug.component.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/debug.component.js 2012-05-04 21:03:58.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/debug.component.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,120 +0,0 @@ -;(function(){ - -/** - * Create a debugger with the given `name`. - * - * @param {String} name - * @return {Type} - * @api public - */ - -function debug(name) { - if (!debug.enabled(name)) return function(){}; - - return function(fmt){ - var curr = new Date; - var ms = curr - (debug[name] || curr); - debug[name] = curr; - - fmt = name - + ' ' - + fmt - + ' +' + debug.humanize(ms); - - // This hackery is required for IE8 - // where `console.log` doesn't have 'apply' - window.console - && console.log - && Function.prototype.apply.call(console.log, console, arguments); - } -} - -/** - * The currently active debug mode names. - */ - -debug.names = []; -debug.skips = []; - -/** - * Enables a debug mode by name. This can include modes - * separated by a colon and wildcards. - * - * @param {String} name - * @api public - */ - -debug.enable = function(name) { - localStorage.debug = name; - - var split = (name || '').split(/[\s,]+/) - , len = split.length; - - for (var i = 0; i < len; i++) { - name = split[i].replace('*', '.*?'); - if (name[0] === '-') { - debug.skips.push(new RegExp('^' + name.substr(1) + '$')); - } - else { - debug.names.push(new RegExp('^' + name + '$')); - } - } -}; - -/** - * Disable debug output. - * - * @api public - */ - -debug.disable = function(){ - debug.enable(''); -}; - -/** - * Humanize the given `ms`. - * - * @param {Number} m - * @return {String} - * @api private - */ - -debug.humanize = function(ms) { - var sec = 1000 - , min = 60 * 1000 - , hour = 60 * min; - - if (ms >= hour) return (ms / hour).toFixed(1) + 'h'; - if (ms >= min) return (ms / min).toFixed(1) + 'm'; - if (ms >= sec) return (ms / sec | 0) + 's'; - return ms + 'ms'; -}; - -/** - * Returns true if the given mode name is enabled, false otherwise. - * - * @param {String} name - * @return {Boolean} - * @api public - */ - -debug.enabled = function(name) { - for (var i = 0, len = debug.skips.length; i < len; i++) { - if (debug.skips[i].test(name)) { - return false; - } - } - for (var i = 0, len = debug.names.length; i < len; i++) { - if (debug.names[i].test(name)) { - return true; - } - } - return false; -}; - -// persist - -if (window.localStorage) debug.enable(localStorage.debug); - module.exports = debug; - -})(); \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/debug.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/debug.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/debug.js 2012-05-04 21:01:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/debug.js 2013-02-06 21:58:25.000000000 +0000 @@ -1,5 +1,11 @@ /** + * Expose `debug()` as the module. + */ + +module.exports = debug; + +/** * Create a debugger with the given `name`. * * @param {String} name @@ -44,7 +50,9 @@ */ debug.enable = function(name) { - localStorage.debug = name; + try { + localStorage.debug = name; + } catch(e){} var split = (name || '').split(/[\s,]+/) , len = split.length; @@ -113,4 +121,4 @@ // persist -if (window.localStorage) debug.enable(localStorage.debug); \ No newline at end of file +if (window.localStorage) debug.enable(localStorage.debug); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/head.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/head.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/head.js 2012-05-04 21:02:56.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/head.js 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -;(function(){ diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/index.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/index.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/index.js 2012-01-22 18:13:17.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/index.js 2013-02-05 16:51:47.000000000 +0000 @@ -1,2 +1,5 @@ - -module.exports = require('./lib/debug'); \ No newline at end of file +if ('undefined' == typeof window) { + module.exports = require('./lib/debug'); +} else { + module.exports = require('./debug'); +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/lib/debug.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/lib/debug.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/lib/debug.js 2012-05-04 21:05:49.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/lib/debug.js 2013-02-06 21:58:25.000000000 +0000 @@ -1,4 +1,3 @@ - /** * Module dependencies. */ @@ -113,10 +112,10 @@ var ms = curr - (prev[name] || curr); prev[name] = curr; - fmt = ' \033[9' + c + 'm' + name + ' ' - + '\033[3' + c + 'm\033[90m' - + fmt + '\033[3' + c + 'm' - + ' +' + humanize(ms) + '\033[0m'; + fmt = ' \u001b[9' + c + 'm' + name + ' ' + + '\u001b[3' + c + 'm\u001b[90m' + + fmt + '\u001b[3' + c + 'm' + + ' +' + humanize(ms) + '\u001b[0m'; console.error.apply(this, arguments); } @@ -129,7 +128,7 @@ colored.enabled = plain.enabled = true; - return isatty + return isatty || process.env.DEBUG_COLORS ? colored : plain; } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/package.json 2012-10-11 02:23:14.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -1,6 +1,10 @@ { "name": "debug", - "version": "0.7.0", + "version": "0.7.2", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" + }, "description": "small debugging utility", "keywords": [ "debug", @@ -15,17 +19,22 @@ "devDependencies": { "mocha": "*" }, - "main": "index", - "browserify": "debug.component.js", + "main": "lib/debug.js", + "browserify": "debug.js", "engines": { "node": "*" }, "component": { "scripts": { - "debug": "debug.component.js" + "debug/index.js": "index.js", + "debug/debug.js": "debug.js" } }, - "readme": "\n# debug\n\n tiny node.js debugging utility.\n\n## Installation\n\n```\n$ npm install debug\n```\n\n## Example\n\n This module is modelled after node core's debugging technique, allowing you to enable one or more topic-specific debugging functions, for example core does the following within many modules:\n\n```js\nvar debug;\nif (process.env.NODE_DEBUG && /cluster/.test(process.env.NODE_DEBUG)) {\n debug = function(x) {\n var prefix = process.pid + ',' +\n (process.env.NODE_WORKER_ID ? 'Worker' : 'Master');\n console.error(prefix, x);\n };\n} else {\n debug = function() { };\n}\n```\n\n This concept is extremely simple but it works well. With `debug` you simply invoke the exported function to generate your debug function, passing it a name which will determine if a noop function is returned, or a decorated `console.error`, so all of the `console` format string goodies you're used to work fine. A unique color is selected per-function for visibility.\n \nExample _app.js_:\n\n```js\nvar debug = require('debug')('http')\n , http = require('http')\n , name = 'My App';\n\n// fake app\n\ndebug('booting %s', name);\n\nhttp.createServer(function(req, res){\n debug(req.method + ' ' + req.url);\n res.end('hello\\n');\n}).listen(3000, function(){\n debug('listening');\n});\n\n// fake worker of some kind\n\nrequire('./worker');\n```\n\nExample _worker.js_:\n\n```js\nvar debug = require('debug')('worker');\n\nsetInterval(function(){\n debug('doing some work');\n}, 1000);\n```\n\n The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples:\n\n ![debug http and worker](http://f.cl.ly/items/18471z1H402O24072r1J/Screenshot.png)\n\n ![debug worker](http://f.cl.ly/items/1X413v1a3M0d3C2c1E0i/Screenshot.png)\n\n## Millisecond diff\n\n When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the \"+NNNms\" will show you how much time was spent between calls.\n\n ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png)\n\n When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below:\n \n ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png)\n\n## Conventions\n\n If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use \":\" to separate features. For example \"bodyParser\" from Connect would then be \"connect:bodyParser\". \n\n## Wildcards\n\n The \"*\" character may be used as a wildcard. Suppose for example your library has debuggers named \"connect:bodyParser\", \"connect:compress\", \"connect:session\", instead of listing all three with `DEBUG=connect:bodyParser,connect.compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`.\n\n You can also exclude specific debuggers by prefixing them with a \"-\" character. For example, `DEBUG=* -connect:*` would include all debuggers except those starting with \"connect:\".\n\n## Browser support\n\n Debug works in the browser as well, currently persisted by `localStorage`. For example if you have `worker:a` and `worker:b` as shown below, and wish to debug both type `debug.enable('worker:*')` in the console and refresh the page, this will remain until you disable with `debug.disable()`. \n\n```js\na = debug('worker:a');\nb = debug('worker:b');\n\nsetInterval(function(){\n a('doing some work');\n}, 1000);\n\nsetInterval(function(){\n a('doing some work');\n}, 1200);\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2011 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - "_id": "debug@0.7.0", + "readme": "\n# debug\n\n tiny node.js debugging utility modelled after node core's debugging technique.\n\n## Installation\n\n```\n$ npm install debug\n```\n\n## Usage\n\n With `debug` you simply invoke the exported function to generate your debug function, passing it a name which will determine if a noop function is returned, or a decorated `console.error`, so all of the `console` format string goodies you're used to work fine. A unique color is selected per-function for visibility.\n \nExample _app.js_:\n\n```js\nvar debug = require('debug')('http')\n , http = require('http')\n , name = 'My App';\n\n// fake app\n\ndebug('booting %s', name);\n\nhttp.createServer(function(req, res){\n debug(req.method + ' ' + req.url);\n res.end('hello\\n');\n}).listen(3000, function(){\n debug('listening');\n});\n\n// fake worker of some kind\n\nrequire('./worker');\n```\n\nExample _worker.js_:\n\n```js\nvar debug = require('debug')('worker');\n\nsetInterval(function(){\n debug('doing some work');\n}, 1000);\n```\n\n The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples:\n\n ![debug http and worker](http://f.cl.ly/items/18471z1H402O24072r1J/Screenshot.png)\n\n ![debug worker](http://f.cl.ly/items/1X413v1a3M0d3C2c1E0i/Screenshot.png)\n\n## Millisecond diff\n\n When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the \"+NNNms\" will show you how much time was spent between calls.\n\n ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png)\n\n When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below:\n \n ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png)\n\n## Conventions\n\n If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use \":\" to separate features. For example \"bodyParser\" from Connect would then be \"connect:bodyParser\". \n\n## Wildcards\n\n The \"*\" character may be used as a wildcard. Suppose for example your library has debuggers named \"connect:bodyParser\", \"connect:compress\", \"connect:session\", instead of listing all three with `DEBUG=connect:bodyParser,connect.compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`.\n\n You can also exclude specific debuggers by prefixing them with a \"-\" character. For example, `DEBUG=* -connect:*` would include all debuggers except those starting with \"connect:\".\n\n## Browser support\n\n Debug works in the browser as well, currently persisted by `localStorage`. For example if you have `worker:a` and `worker:b` as shown below, and wish to debug both type `debug.enable('worker:*')` in the console and refresh the page, this will remain until you disable with `debug.disable()`. \n\n```js\na = debug('worker:a');\nb = debug('worker:b');\n\nsetInterval(function(){\n a('doing some work');\n}, 1000);\n\nsetInterval(function(){\n a('doing some work');\n}, 1200);\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2011 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + "readmeFilename": "Readme.md", + "bugs": { + "url": "https://github.com/visionmedia/debug/issues" + }, + "_id": "debug@0.7.2", "_from": "debug@*" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/tail.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/tail.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/debug/tail.js 2012-05-04 21:03:55.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/debug/tail.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ - - module.exports = debug; - -})(); \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/LICENSE 2011-03-29 16:35:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -Software License Agreement (BSD License) - -Copyright (c) 2009-2011, Kevin Decker - -All rights reserved. - -Redistribution and use of this software in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above - copyright notice, this list of conditions and the - following disclaimer. - -* Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the - following disclaimer in the documentation and/or other - materials provided with the distribution. - -* Neither the name of Kevin Decker nor the names of its - contributors may be used to endorse or promote products - derived from this software without specific prior - written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR -IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER -IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/README.md 2011-05-31 02:18:40.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/README.md 2012-10-19 23:43:24.000000000 +0000 @@ -1,5 +1,7 @@ # jsdiff +[![Build Status](https://secure.travis-ci.org/kpdecker/jsdiff.png)](http://travis-ci.org/kpdecker/jsdiff) + A javascript text differencing implementation. Based on the algorithm proposed in @@ -45,6 +47,11 @@ * oldHeader : Additional information to include in the old file header * newHeader : Additional information to include in thew new file header +* JsDiff.applyPatch(oldStr, diffStr) + Applies a unified diff patch. + + Return a string containing new version of provided data. + * convertChangesToXML(changes) Converts a list of changes to a serialized XML format diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/diff.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/diff.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/diff.js 2011-12-24 20:49:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/diff.js 2013-09-12 04:22:08.000000000 +0000 @@ -1,20 +1,21 @@ -/* See license.txt for terms of usage */ +/* See LICENSE file for terms of use */ /* * Text diff implementation. - * + * * This library supports the following APIS: * JsDiff.diffChars: Character by character diff * JsDiff.diffWords: Word (as defined by \b regex) diff which ignores whitespace * JsDiff.diffLines: Line based diff - * + * * JsDiff.diffCss: Diff targeted at CSS content - * + * * These methods are based on the implementation proposed in * "An O(ND) Difference Algorithm and its Variations" (Myers, 1986). * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927 */ var JsDiff = (function() { + /*jshint maxparams: 5*/ function clonePath(path) { return { newPos: path.newPos, components: path.components.slice(0) }; } @@ -29,22 +30,21 @@ } function escapeHTML(s) { var n = s; - n = n.replace(/&/g, "&"); - n = n.replace(//g, ">"); - n = n.replace(/"/g, """); + n = n.replace(/&/g, '&'); + n = n.replace(//g, '>'); + n = n.replace(/"/g, '"'); return n; } - - var fbDiff = function(ignoreWhitespace) { + var Diff = function(ignoreWhitespace) { this.ignoreWhitespace = ignoreWhitespace; }; - fbDiff.prototype = { + Diff.prototype = { diff: function(oldString, newString) { // Handle the identity case (this is due to unrolling editLength == 0 - if (newString == oldString) { + if (newString === oldString) { return [{ value: newString }]; } if (!newString) { @@ -127,7 +127,7 @@ while (newPos+1 < newLen && oldPos+1 < oldLen && this.equals(newString[newPos+1], oldString[oldPos+1])) { newPos++; oldPos++; - + this.pushComponent(basePath.components, newString[newPos], undefined, undefined); } basePath.newPos = newPos; @@ -139,7 +139,7 @@ if (this.ignoreWhitespace && !reWhitespace.test(left) && !reWhitespace.test(right)) { return true; } else { - return left == right; + return left === right; } }, join: function(left, right) { @@ -149,27 +149,31 @@ return value; } }; - - var CharDiff = new fbDiff(); - - var WordDiff = new fbDiff(true); - WordDiff.tokenize = function(value) { + + var CharDiff = new Diff(); + + var WordDiff = new Diff(true); + var WordWithSpaceDiff = new Diff(); + WordDiff.tokenize = WordWithSpaceDiff.tokenize = function(value) { return removeEmpty(value.split(/(\s+|\b)/)); }; - - var CssDiff = new fbDiff(true); + + var CssDiff = new Diff(true); CssDiff.tokenize = function(value) { return removeEmpty(value.split(/([{}:;,]|\s+)/)); }; - - var LineDiff = new fbDiff(); + + var LineDiff = new Diff(); LineDiff.tokenize = function(value) { return value.split(/^/m); }; - + return { + Diff: Diff, + diffChars: function(oldStr, newStr) { return CharDiff.diff(oldStr, newStr); }, diffWords: function(oldStr, newStr) { return WordDiff.diff(oldStr, newStr); }, + diffWordsWithSpace: function(oldStr, newStr) { return WordWithSpaceDiff.diff(oldStr, newStr); }, diffLines: function(oldStr, newStr) { return LineDiff.diff(oldStr, newStr); }, diffCss: function(oldStr, newStr) { return CssDiff.diff(oldStr, newStr); }, @@ -177,16 +181,16 @@ createPatch: function(fileName, oldStr, newStr, oldHeader, newHeader) { var ret = []; - ret.push("Index: " + fileName); - ret.push("==================================================================="); - ret.push("--- " + fileName + (typeof oldHeader === "undefined" ? "" : "\t" + oldHeader)); - ret.push("+++ " + fileName + (typeof newHeader === "undefined" ? "" : "\t" + newHeader)); + ret.push('Index: ' + fileName); + ret.push('==================================================================='); + ret.push('--- ' + fileName + (typeof oldHeader === 'undefined' ? '' : '\t' + oldHeader)); + ret.push('+++ ' + fileName + (typeof newHeader === 'undefined' ? '' : '\t' + newHeader)); var diff = LineDiff.diff(oldStr, newStr); if (!diff[diff.length-1].value) { diff.pop(); // Remove trailing newline add } - diff.push({value: "", lines: []}); // Append an empty value to make cleanup easier + diff.push({value: '', lines: []}); // Append an empty value to make cleanup easier function contextLines(lines) { return lines.map(function(entry) { return ' ' + entry; }); @@ -194,7 +198,7 @@ function eofNL(curRange, i, current) { var last = diff[diff.length-2], isLast = i === diff.length-2, - isLastOfType = i === diff.length-3 && (current.added === !last.added || current.removed === !last.removed); + isLastOfType = i === diff.length-3 && (current.added !== last.added || current.removed !== last.removed); // Figure out if this is the last line for the given file and missing NL if (!/\n$/.test(current.value) && (isLast || isLastOfType)) { @@ -206,7 +210,7 @@ oldLine = 1, newLine = 1; for (var i = 0; i < diff.length; i++) { var current = diff[i], - lines = current.lines || current.value.replace(/\n$/, "").split("\n"); + lines = current.lines || current.value.replace(/\n$/, '').split('\n'); current.lines = lines; if (current.added || current.removed) { @@ -214,14 +218,14 @@ var prev = diff[i-1]; oldRangeStart = oldLine; newRangeStart = newLine; - + if (prev) { curRange = contextLines(prev.lines.slice(-4)); oldRangeStart -= curRange.length; newRangeStart -= curRange.length; } } - curRange.push.apply(curRange, lines.map(function(entry) { return (current.added?"+":"-") + entry; })); + curRange.push.apply(curRange, lines.map(function(entry) { return (current.added?'+':'-') + entry; })); eofNL(curRange, i, current); if (current.added) { @@ -239,9 +243,9 @@ // end the range and output var contextSize = Math.min(lines.length, 4); ret.push( - "@@ -" + oldRangeStart + "," + (oldLine-oldRangeStart+contextSize) - + " +" + newRangeStart + "," + (newLine-newRangeStart+contextSize) - + " @@"); + '@@ -' + oldRangeStart + ',' + (oldLine-oldRangeStart+contextSize) + + ' +' + newRangeStart + ',' + (newLine-newRangeStart+contextSize) + + ' @@'); ret.push.apply(ret, curRange); ret.push.apply(ret, contextLines(lines.slice(0, contextSize))); if (lines.length <= 4) { @@ -259,29 +263,92 @@ return ret.join('\n') + '\n'; }, + applyPatch: function(oldStr, uniDiff) { + var diffstr = uniDiff.split('\n'); + var diff = []; + var remEOFNL = false, + addEOFNL = false; + + for (var i = (diffstr[0][0]==='I'?4:0); i < diffstr.length; i++) { + if(diffstr[i][0] === '@') { + var meh = diffstr[i].split(/@@ -(\d+),(\d+) \+(\d+),(\d+) @@/); + diff.unshift({ + start:meh[3], + oldlength:meh[2], + oldlines:[], + newlength:meh[4], + newlines:[] + }); + } else if(diffstr[i][0] === '+') { + diff[0].newlines.push(diffstr[i].substr(1)); + } else if(diffstr[i][0] === '-') { + diff[0].oldlines.push(diffstr[i].substr(1)); + } else if(diffstr[i][0] === ' ') { + diff[0].newlines.push(diffstr[i].substr(1)); + diff[0].oldlines.push(diffstr[i].substr(1)); + } else if(diffstr[i][0] === '\\') { + if (diffstr[i-1][0] === '+') { + remEOFNL = true; + } else if(diffstr[i-1][0] === '-') { + addEOFNL = true; + } + } + } + + var str = oldStr.split('\n'); + for (var i = diff.length - 1; i >= 0; i--) { + var d = diff[i]; + for (var j = 0; j < d.oldlength; j++) { + if(str[d.start-1+j] !== d.oldlines[j]) { + return false; + } + } + Array.prototype.splice.apply(str,[d.start-1,+d.oldlength].concat(d.newlines)); + } + + if (remEOFNL) { + while (!str[str.length-1]) { + str.pop(); + } + } else if (addEOFNL) { + str.push(''); + } + return str.join('\n'); + }, + convertChangesToXML: function(changes){ var ret = []; for ( var i = 0; i < changes.length; i++) { var change = changes[i]; if (change.added) { - ret.push(""); + ret.push(''); } else if (change.removed) { - ret.push(""); + ret.push(''); } ret.push(escapeHTML(change.value)); if (change.added) { - ret.push(""); + ret.push(''); } else if (change.removed) { - ret.push(""); + ret.push(''); } } - return ret.join(""); + return ret.join(''); + }, + + // See: http://code.google.com/p/google-diff-match-patch/wiki/API + convertChangesToDMP: function(changes){ + var ret = [], change; + for ( var i = 0; i < changes.length; i++) { + change = changes[i]; + ret.push([(change.added ? 1 : change.removed ? -1 : 0), change.value]); + } + return ret; } }; })(); -if (typeof module !== "undefined") { +if (typeof module !== 'undefined') { module.exports = JsDiff; } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/index.html tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/index.html --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/index.html 2011-05-31 02:01:21.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/index.html 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ - - - - - Diff - - - - -
                    -

                    Diff

                    - - - -
                    - -
                    github.com/kpdecker/jsdiff - - - - - - - -
                    restaurantaura
                    - - - - - diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/package.json 2012-10-11 02:23:14.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -1,6 +1,6 @@ { "name": "diff", - "version": "1.0.2", + "version": "1.0.7", "description": "A javascript text diff implementation.", "keywords": [ "diff", @@ -14,8 +14,8 @@ } ], "bugs": { - "email": "kpdecker@gmail.com", - "url": "http://github.com/kpdecker/jsdiff/issues" + "url": "http://github.com/kpdecker/jsdiff/issues", + "email": "kpdecker@gmail.com" }, "licenses": [ { @@ -32,11 +32,19 @@ }, "main": "./diff", "scripts": { - "test": "expresso test/*" + "test": "node_modules/.bin/mocha test/*.js" }, "dependencies": {}, - "devDependencies": {}, - "readme": "# jsdiff\n\nA javascript text differencing implementation.\n\nBased on the algorithm proposed in\n[\"An O(ND) Difference Algorithm and its Variations\" (Myers, 1986)](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927).\n\n## Installation\n\n npm install diff\n\nor\n\n git clone git://github.com/kpdecker/jsdiff.git\n\n## API\n\n* JsDiff.diffChars(oldStr, newStr)\n Diffs two blocks of text, comparing character by character.\n\n Returns a list of change objects (See below).\n\n* JsDiff.diffWords(oldStr, newStr)\n Diffs two blocks of text, comparing word by word.\n\n Returns a list of change objects (See below).\n\n* JsDiff.diffLines(oldStr, newStr)\n Diffs two blocks of text, comparing line by line.\n\n Returns a list of change objects (See below).\n\n* JsDiff.diffCss(oldStr, newStr)\n Diffs two blocks of text, comparing CSS tokens.\n\n Returns a list of change objects (See below).\n\n* JsDiff.createPatch(fileName, oldStr, newStr, oldHeader, newHeader)\n Creates a unified diff patch.\n\n Parameters:\n * fileName : String to be output in the filename sections of the patch\n * oldStr : Original string value\n * newStr : New string value\n * oldHeader : Additional information to include in the old file header\n * newHeader : Additional information to include in thew new file header\n\n* convertChangesToXML(changes)\n Converts a list of changes to a serialized XML format\n\n### Change Objects\nMany of the methods above return change objects. These objects are consist of the following fields:\n\n* value: Text content\n* added: True if the value was inserted into the new string\n* removed: True of the value was removed from the old string\n\nNote that some cases may omit a particular flag field. Comparison on the flag fields should always be done in a truthy or falsy manner.\n\n## [Example](http://kpdecker.github.com/jsdiff)\n\n## License\n\nSoftware License Agreement (BSD License)\n\nCopyright (c) 2009-2011, Kevin Decker kpdecker@gmail.com\n\nAll rights reserved.\n\nRedistribution and use of this software in source and binary forms, with or without modification,\nare permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above\n copyright notice, this list of conditions and the\n following disclaimer.\n\n* Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the\n following disclaimer in the documentation and/or other\n materials provided with the distribution.\n\n* Neither the name of Kevin Decker nor the names of its\n contributors may be used to endorse or promote products\n derived from this software without specific prior\n written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR\nIMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND\nFITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER\nIN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT\nOF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n", - "_id": "diff@1.0.2", - "_from": "diff@1.0.2" + "devDependencies": { + "mocha": "~1.6", + "should": "~1.2" + }, + "optionalDependencies": {}, + "files": [ + "diff.js" + ], + "readme": "# jsdiff\n\n[![Build Status](https://secure.travis-ci.org/kpdecker/jsdiff.png)](http://travis-ci.org/kpdecker/jsdiff)\n\nA javascript text differencing implementation.\n\nBased on the algorithm proposed in\n[\"An O(ND) Difference Algorithm and its Variations\" (Myers, 1986)](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927).\n\n## Installation\n\n npm install diff\n\nor\n\n git clone git://github.com/kpdecker/jsdiff.git\n\n## API\n\n* JsDiff.diffChars(oldStr, newStr)\n Diffs two blocks of text, comparing character by character.\n\n Returns a list of change objects (See below).\n\n* JsDiff.diffWords(oldStr, newStr)\n Diffs two blocks of text, comparing word by word.\n\n Returns a list of change objects (See below).\n\n* JsDiff.diffLines(oldStr, newStr)\n Diffs two blocks of text, comparing line by line.\n\n Returns a list of change objects (See below).\n\n* JsDiff.diffCss(oldStr, newStr)\n Diffs two blocks of text, comparing CSS tokens.\n\n Returns a list of change objects (See below).\n\n* JsDiff.createPatch(fileName, oldStr, newStr, oldHeader, newHeader)\n Creates a unified diff patch.\n\n Parameters:\n * fileName : String to be output in the filename sections of the patch\n * oldStr : Original string value\n * newStr : New string value\n * oldHeader : Additional information to include in the old file header\n * newHeader : Additional information to include in thew new file header\n\n* JsDiff.applyPatch(oldStr, diffStr)\n Applies a unified diff patch.\n\n Return a string containing new version of provided data.\n\n* convertChangesToXML(changes)\n Converts a list of changes to a serialized XML format\n\n### Change Objects\nMany of the methods above return change objects. These objects are consist of the following fields:\n\n* value: Text content\n* added: True if the value was inserted into the new string\n* removed: True of the value was removed from the old string\n\nNote that some cases may omit a particular flag field. Comparison on the flag fields should always be done in a truthy or falsy manner.\n\n## [Example](http://kpdecker.github.com/jsdiff)\n\n## License\n\nSoftware License Agreement (BSD License)\n\nCopyright (c) 2009-2011, Kevin Decker kpdecker@gmail.com\n\nAll rights reserved.\n\nRedistribution and use of this software in source and binary forms, with or without modification,\nare permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above\n copyright notice, this list of conditions and the\n following disclaimer.\n\n* Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the\n following disclaimer in the documentation and/or other\n materials provided with the distribution.\n\n* Neither the name of Kevin Decker nor the names of its\n contributors may be used to endorse or promote products\n derived from this software without specific prior\n written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR\nIMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND\nFITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER\nIN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT\nOF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n", + "readmeFilename": "README.md", + "_id": "diff@1.0.7", + "_from": "diff@1.0.7" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/style.css tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/style.css --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/diff/style.css 2011-05-31 02:01:21.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/diff/style.css 1970-01-01 00:00:00.000000000 +0000 @@ -1,81 +0,0 @@ -* { - margin: 0; - padding: 0; -} -html, body { - background: #EEE; - font: 12px sans-serif; -} -body { - padding-top: 1.8em; - -webkit-box-sizing: border-box; - -moz-box-sizing: border-box; - box-sizing: border-box; -} -html, body, table, tbody, tr, td { - height: 100% -} -table { - table-layout: fixed; - width: 100%; -} -td { - width: 33%; - padding: 3px 4px; - border: 1px solid transparent; - vertical-align: top; - font: 1em monospace; - text-align: left; - white-space: pre-wrap; -} -h1 { - display: inline; - font-size: 100%; -} -del { - text-decoration: none; - color: #b30000; - background: #fadad7; -} -ins { - background: #eaf2c2; - color: #406619; - text-decoration: none; -} - -#settings { - position: absolute; - top: 0; - left: 5px; - right: 5px; - height: 2em; - line-height: 2em; -} -#settings label { - margin-left: 1em; -} - -.source { - position: absolute; - right: 1%; - top: .2em; -} - -[contentEditable] { - background: #F9F9F9; - border-color: #BBB #D9D9D9 #DDD; - border-radius: 4px; - -webkit-user-modify: read-write-plaintext-only; - outline: none; -} -[contentEditable]:focus { - background: #FFF; - border-color: #6699cc; - box-shadow: 0 0 4px #2175c9; -} - -@-moz-document url-prefix() { - body { - height: 99%; /* Hide scroll bar in Firefox */ - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/.npmignore tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/.npmignore --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/.npmignore 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/.npmignore 2013-07-03 16:11:34.000000000 +0000 @@ -0,0 +1,2 @@ +.*.swp +test/a/ diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/.travis.yml tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/.travis.yml --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/.travis.yml 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/.travis.yml 2012-08-07 00:38:13.000000000 +0000 @@ -0,0 +1,3 @@ +language: node_js +node_js: + - 0.8 diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/LICENSE 2012-07-24 01:27:41.000000000 +0000 @@ -0,0 +1,27 @@ +Copyright (c) Isaac Z. Schlueter ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/README.md 2013-04-21 05:13:16.000000000 +0000 @@ -0,0 +1,250 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +## Attention: node-glob users! + +The API has changed dramatically between 2.x and 3.x. This library is +now 100% JavaScript, and the integer flags have been replaced with an +options object. + +Also, there's an event emitter class, proper tests, and all the other +things you've come to expect from node modules. + +And best of all, no compilation! + +## Usage + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Features + +Please see the [minimatch +documentation](https://github.com/isaacs/minimatch) for more details. + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob(pattern, [options], cb) + +* `pattern` {String} Pattern to be matched +* `options` {Object} +* `cb` {Function} + * `err` {Error | null} + * `matches` {Array} filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` {String} Pattern to be matched +* `options` {Object} +* return: {Array} filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instanting the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` {String} pattern to search for +* `options` {Object} +* `cb` {Function} Called when an error occurs, or matches are found + * `err` {Error | null} + * `matches` {Array} filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `error` The error encountered. When an error is encountered, the + glob object is in an undefined state, and should be discarded. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `statCache` Collection of all the stat results the glob search + performed. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `1` - Path exists, and is not a directory + * `2` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the matched. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `abort` Stop the search. + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the glob object, as well. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. It will cause + ELOOP to be triggered one level sooner in the case of cyclical + symbolic links. +* `silent` When an unusual error is encountered + when attempting to read a directory, a warning will be printed to + stderr. Set the `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered + when attempting to read a directory, the process will just continue on + in search of other matches. Set the `strict` option to raise an error + in these cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary to + set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `sync` Perform a synchronous glob search. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. + Set this flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `nocase` Perform a case-insensitive match. Note that case-insensitive + filesystems will sometimes result in glob returning results that are + case-insensitively matched anyway, since readdir and stat will not + raise an error. +* `debug` Set to enable debug logging in minimatch and glob. +* `globDebug` Set to enable debug logging in glob, but not minimatch. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/examples/g.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/examples/g.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/examples/g.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/examples/g.js 2012-01-18 23:32:32.000000000 +0000 @@ -0,0 +1,9 @@ +var Glob = require("../").Glob + +var pattern = "test/a/**/[cg]/../[cg]" +console.log(pattern) + +var mg = new Glob(pattern, {mark: true, sync:true}, function (er, matches) { + console.log("matches", matches) +}) +console.log("after") diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/examples/usr-local.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/examples/usr-local.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/examples/usr-local.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/examples/usr-local.js 2012-01-19 00:07:24.000000000 +0000 @@ -0,0 +1,9 @@ +var Glob = require("../").Glob + +var pattern = "{./*/*,/*,/usr/local/*}" +console.log(pattern) + +var mg = new Glob(pattern, {mark: true}, function (er, matches) { + console.log("matches", matches) +}) +console.log("after") diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/glob.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/glob.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/glob.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/glob.js 2013-07-03 16:11:34.000000000 +0000 @@ -0,0 +1,675 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// readdir(PREFIX) as ENTRIES +// If fails, END +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $]) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $]) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + + + +module.exports = glob + +var fs = require("graceful-fs") +, minimatch = require("minimatch") +, Minimatch = minimatch.Minimatch +, inherits = require("inherits") +, EE = require("events").EventEmitter +, path = require("path") +, isDir = {} +, assert = require("assert").ok + +function glob (pattern, options, cb) { + if (typeof options === "function") cb = options, options = {} + if (!options) options = {} + + if (typeof options === "number") { + deprecated() + return + } + + var g = new Glob(pattern, options, cb) + return g.sync ? g.found : g +} + +glob.fnmatch = deprecated + +function deprecated () { + throw new Error("glob's interface has changed. Please see the docs.") +} + +glob.sync = globSync +function globSync (pattern, options) { + if (typeof options === "number") { + deprecated() + return + } + + options = options || {} + options.sync = true + return glob(pattern, options) +} + + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (!(this instanceof Glob)) { + return new Glob(pattern, options, cb) + } + + if (typeof cb === "function") { + this.on("error", cb) + this.on("end", function (matches) { + cb(null, matches) + }) + } + + options = options || {} + + this.EOF = {} + this._emitQueue = [] + + this.maxDepth = options.maxDepth || 1000 + this.maxLength = options.maxLength || Infinity + this.cache = options.cache || {} + this.statCache = options.statCache || {} + + this.changedCwd = false + var cwd = process.cwd() + if (!options.hasOwnProperty("cwd")) this.cwd = cwd + else { + this.cwd = options.cwd + this.changedCwd = path.resolve(options.cwd) !== cwd + } + + this.root = options.root || path.resolve(this.cwd, "/") + this.root = path.resolve(this.root) + if (process.platform === "win32") + this.root = this.root.replace(/\\/g, "/") + + this.nomount = !!options.nomount + + if (!pattern) { + throw new Error("must provide pattern") + } + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + this.strict = options.strict !== false + this.dot = !!options.dot + this.mark = !!options.mark + this.sync = !!options.sync + this.nounique = !!options.nounique + this.nonull = !!options.nonull + this.nosort = !!options.nosort + this.nocase = !!options.nocase + this.stat = !!options.stat + + this.debug = !!options.debug || !!options.globDebug + if (this.debug) + this.log = console.error + + this.silent = !!options.silent + + var mm = this.minimatch = new Minimatch(pattern, options) + this.options = mm.options + pattern = this.pattern = mm.pattern + + this.error = null + this.aborted = false + + // list of all the patterns that ** has resolved do, so + // we can avoid visiting multiple times. + this._globstars = {} + + EE.call(this) + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + this.minimatch.set.forEach(iterator.bind(this)) + function iterator (pattern, i, set) { + this._process(pattern, 0, i, function (er) { + if (er) this.emit("error", er) + if (-- n <= 0) this._finish() + }) + } +} + +Glob.prototype.log = function () {} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + + var nou = this.nounique + , all = nou ? [] : {} + + for (var i = 0, l = this.matches.length; i < l; i ++) { + var matches = this.matches[i] + this.log("matches[%d] =", i, matches) + // do like the shell, and spit out the literal glob + if (!matches) { + if (this.nonull) { + var literal = this.minimatch.globSet[i] + if (nou) all.push(literal) + else all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) all.push.apply(all, m) + else m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) all = Object.keys(all) + + if (!this.nosort) { + all = all.sort(this.nocase ? alphasorti : alphasort) + } + + if (this.mark) { + // at *some* point we statted all of these + all = all.map(function (m) { + var sc = this.cache[m] + if (!sc) + return m + var isDir = (Array.isArray(sc) || sc === 2) + if (isDir && m.slice(-1) !== "/") { + return m + "/" + } + if (!isDir && m.slice(-1) === "/") { + return m.replace(/\/+$/, "") + } + return m + }, this) + } + + this.log("emitting end", all) + + this.EOF = this.found = all + this.emitMatch(this.EOF) +} + +function alphasorti (a, b) { + a = a.toLowerCase() + b = b.toLowerCase() + return alphasort(a, b) +} + +function alphasort (a, b) { + return a > b ? 1 : a < b ? -1 : 0 +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit("abort") +} + +Glob.prototype.pause = function () { + if (this.paused) return + if (this.sync) + this.emit("error", new Error("Can't pause/resume sync glob")) + this.paused = true + this.emit("pause") +} + +Glob.prototype.resume = function () { + if (!this.paused) return + if (this.sync) + this.emit("error", new Error("Can't pause/resume sync glob")) + this.paused = false + this.emit("resume") + this._processEmitQueue() + //process.nextTick(this.emit.bind(this, "resume")) +} + +Glob.prototype.emitMatch = function (m) { + if (!this.stat || this.statCache[m] || m === this.EOF) { + this._emitQueue.push(m) + this._processEmitQueue() + } else { + this._stat(m, function(exists, isDir) { + if (exists) { + this._emitQueue.push(m) + this._processEmitQueue() + } + }) + } +} + +Glob.prototype._processEmitQueue = function (m) { + while (!this._processingEmitQueue && + !this.paused) { + this._processingEmitQueue = true + var m = this._emitQueue.shift() + if (!m) { + this._processingEmitQueue = false + break + } + + this.log('emit!', m === this.EOF ? "end" : "match") + + this.emit(m === this.EOF ? "end" : "match", m) + this._processingEmitQueue = false + } +} + +Glob.prototype._process = function (pattern, depth, index, cb_) { + assert(this instanceof Glob) + + var cb = function cb (er, res) { + assert(this instanceof Glob) + if (this.paused) { + if (!this._processQueue) { + this._processQueue = [] + this.once("resume", function () { + var q = this._processQueue + this._processQueue = null + q.forEach(function (cb) { cb() }) + }) + } + this._processQueue.push(cb_.bind(this, er, res)) + } else { + cb_.call(this, er, res) + } + }.bind(this) + + if (this.aborted) return cb() + + if (depth > this.maxDepth) return cb() + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === "string") { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + prefix = pattern.join("/") + this._stat(prefix, function (exists, isDir) { + // either it's there, or it isn't. + // nothing more to do, either way. + if (exists) { + if (prefix && isAbsolute(prefix) && !this.nomount) { + if (prefix.charAt(0) === "/") { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + } + } + + if (process.platform === "win32") + prefix = prefix.replace(/\\/g, "/") + + this.matches[index] = this.matches[index] || {} + this.matches[index][prefix] = true + this.emitMatch(prefix) + } + return cb() + }) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's "absolute" like /foo/bar, + // or "relative" like "../baz" + prefix = pattern.slice(0, n) + prefix = prefix.join("/") + break + } + + // get the list of entries. + var read + if (prefix === null) read = "." + else if (isAbsolute(prefix) || isAbsolute(pattern.join("/"))) { + if (!prefix || !isAbsolute(prefix)) { + prefix = path.join("/", prefix) + } + read = prefix = path.resolve(prefix) + + // if (process.platform === "win32") + // read = prefix = prefix.replace(/^[a-zA-Z]:|\\/g, "/") + + this.log('absolute: ', prefix, this.root, pattern, read) + } else { + read = prefix + } + + this.log('readdir(%j)', read, this.cwd, this.root) + + return this._readdir(read, function (er, entries) { + if (er) { + // not a directory! + // this means that, whatever else comes after this, it can never match + return cb() + } + + // globstar is special + if (pattern[n] === minimatch.GLOBSTAR) { + // test without the globstar, and with every child both below + // and replacing the globstar. + var s = [ pattern.slice(0, n).concat(pattern.slice(n + 1)) ] + entries.forEach(function (e) { + if (e.charAt(0) === "." && !this.dot) return + // instead of the globstar + s.push(pattern.slice(0, n).concat(e).concat(pattern.slice(n + 1))) + // below the globstar + s.push(pattern.slice(0, n).concat(e).concat(pattern.slice(n))) + }, this) + + s = s.filter(function (pattern) { + var key = gsKey(pattern) + var seen = !this._globstars[key] + this._globstars[key] = true + return seen + }, this) + + if (!s.length) + return cb() + + // now asyncForEach over this + var l = s.length + , errState = null + s.forEach(function (gsPattern) { + this._process(gsPattern, depth + 1, index, function (er) { + if (errState) return + if (er) return cb(errState = er) + if (--l <= 0) return cb() + }) + }, this) + + return + } + + // not a globstar + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = pattern[n] + var rawGlob = pattern[n]._glob + , dotOk = this.dot || rawGlob.charAt(0) === "." + + entries = entries.filter(function (e) { + return (e.charAt(0) !== "." || dotOk) && + e.match(pattern[n]) + }) + + // If n === pattern.length - 1, then there's no need for the extra stat + // *unless* the user has specified "mark" or "stat" explicitly. + // We know that they exist, since the readdir returned them. + if (n === pattern.length - 1 && + !this.mark && + !this.stat) { + entries.forEach(function (e) { + if (prefix) { + if (prefix !== "/") e = prefix + "/" + e + else e = prefix + e + } + if (e.charAt(0) === "/" && !this.nomount) { + e = path.join(this.root, e) + } + + if (process.platform === "win32") + e = e.replace(/\\/g, "/") + + this.matches[index] = this.matches[index] || {} + this.matches[index][e] = true + this.emitMatch(e) + }, this) + return cb.call(this) + } + + + // now test all the remaining entries as stand-ins for that part + // of the pattern. + var l = entries.length + , errState = null + if (l === 0) return cb() // no matches possible + entries.forEach(function (e) { + var p = pattern.slice(0, n).concat(e).concat(pattern.slice(n + 1)) + this._process(p, depth + 1, index, function (er) { + if (errState) return + if (er) return cb(errState = er) + if (--l === 0) return cb.call(this) + }) + }, this) + }) + +} + +function gsKey (pattern) { + return '**' + pattern.map(function (p) { + return (p === minimatch.GLOBSTAR) ? '**' : (''+p) + }).join('/') +} + +Glob.prototype._stat = function (f, cb) { + assert(this instanceof Glob) + var abs = f + if (f.charAt(0) === "/") { + abs = path.join(this.root, f) + } else if (this.changedCwd) { + abs = path.resolve(this.cwd, f) + } + + if (f.length > this.maxLength) { + var er = new Error("Path name too long") + er.code = "ENAMETOOLONG" + er.path = f + return this._afterStat(f, abs, cb, er) + } + + this.log('stat', [this.cwd, f, '=', abs]) + + if (!this.stat && this.cache.hasOwnProperty(f)) { + var exists = this.cache[f] + , isDir = exists && (Array.isArray(exists) || exists === 2) + if (this.sync) return cb.call(this, !!exists, isDir) + return process.nextTick(cb.bind(this, !!exists, isDir)) + } + + var stat = this.statCache[abs] + if (this.sync || stat) { + var er + try { + stat = fs.statSync(abs) + } catch (e) { + er = e + } + this._afterStat(f, abs, cb, er, stat) + } else { + fs.stat(abs, this._afterStat.bind(this, f, abs, cb)) + } +} + +Glob.prototype._afterStat = function (f, abs, cb, er, stat) { + var exists + assert(this instanceof Glob) + + if (abs.slice(-1) === "/" && stat && !stat.isDirectory()) { + this.log("should be ENOTDIR, fake it") + + er = new Error("ENOTDIR, not a directory '" + abs + "'") + er.path = abs + er.code = "ENOTDIR" + stat = null + } + + var emit = !this.statCache[abs] + this.statCache[abs] = stat + + if (er || !stat) { + exists = false + } else { + exists = stat.isDirectory() ? 2 : 1 + if (emit) + this.emit('stat', f, stat) + } + this.cache[f] = this.cache[f] || exists + cb.call(this, !!exists, exists === 2) +} + +Glob.prototype._readdir = function (f, cb) { + assert(this instanceof Glob) + var abs = f + if (f.charAt(0) === "/") { + abs = path.join(this.root, f) + } else if (isAbsolute(f)) { + abs = f + } else if (this.changedCwd) { + abs = path.resolve(this.cwd, f) + } + + if (f.length > this.maxLength) { + var er = new Error("Path name too long") + er.code = "ENAMETOOLONG" + er.path = f + return this._afterReaddir(f, abs, cb, er) + } + + this.log('readdir', [this.cwd, f, abs]) + if (this.cache.hasOwnProperty(f)) { + var c = this.cache[f] + if (Array.isArray(c)) { + if (this.sync) return cb.call(this, null, c) + return process.nextTick(cb.bind(this, null, c)) + } + + if (!c || c === 1) { + // either ENOENT or ENOTDIR + var code = c ? "ENOTDIR" : "ENOENT" + , er = new Error((c ? "Not a directory" : "Not found") + ": " + f) + er.path = f + er.code = code + this.log(f, er) + if (this.sync) return cb.call(this, er) + return process.nextTick(cb.bind(this, er)) + } + + // at this point, c === 2, meaning it's a dir, but we haven't + // had to read it yet, or c === true, meaning it's *something* + // but we don't have any idea what. Need to read it, either way. + } + + if (this.sync) { + var er, entries + try { + entries = fs.readdirSync(abs) + } catch (e) { + er = e + } + return this._afterReaddir(f, abs, cb, er, entries) + } + + fs.readdir(abs, this._afterReaddir.bind(this, f, abs, cb)) +} + +Glob.prototype._afterReaddir = function (f, abs, cb, er, entries) { + assert(this instanceof Glob) + if (entries && !er) { + this.cache[f] = entries + // if we haven't asked to stat everything for suresies, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. This also gets us one step + // further into ELOOP territory. + if (!this.mark && !this.stat) { + entries.forEach(function (e) { + if (f === "/") e = f + e + else e = f + "/" + e + this.cache[e] = true + }, this) + } + + return cb.call(this, er, entries) + } + + // now handle errors, and cache the information + if (er) switch (er.code) { + case "ENOTDIR": // totally normal. means it *does* exist. + this.cache[f] = 1 + return cb.call(this, er) + case "ENOENT": // not terribly unusual + case "ELOOP": + case "ENAMETOOLONG": + case "UNKNOWN": + this.cache[f] = false + return cb.call(this, er) + default: // some unusual error. Treat as failure. + this.cache[f] = false + if (this.strict) this.emit("error", er) + if (!this.silent) console.error("glob error", er) + return cb.call(this, er) + } +} + +var isAbsolute = process.platform === "win32" ? absWin : absUnix + +function absWin (p) { + if (absUnix(p)) return true + // pull off the device/UNC bit from a windows path. + // from node's lib/path.js + var splitDeviceRe = + /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/ + , result = splitDeviceRe.exec(p) + , device = result[1] || '' + , isUnc = device && device.charAt(1) !== ':' + , isAbsolute = !!result[2] || isUnc // UNC paths are always absolute + + return isAbsolute +} + +function absUnix (p) { + return p.charAt(0) === "/" || p === "" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/.npmignore tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/.npmignore --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/.npmignore 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/.npmignore 2011-11-28 22:53:47.000000000 +0000 @@ -0,0 +1 @@ +node_modules/ diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/LICENSE 2013-05-27 04:05:36.000000000 +0000 @@ -0,0 +1,27 @@ +Copyright (c) Isaac Z. Schlueter ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/README.md 2013-07-11 15:25:53.000000000 +0000 @@ -0,0 +1,26 @@ +# graceful-fs + +graceful-fs functions as a drop-in replacement for the fs module, +making various improvements. + +The improvements are meant to normalize behavior across different +platforms and environments, and to make filesystem access more +resilient to errors. + +## Improvements over fs module + +graceful-fs: + +* Queues up `open` and `readdir` calls, and retries them once + something closes if there is an EMFILE error from too many file + descriptors. +* fixes `lchmod` for Node versions prior to 0.6.2. +* implements `fs.lutimes` if possible. Otherwise it becomes a noop. +* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or + `lchown` if the user isn't root. +* makes `lchmod` and `lchown` become noops, if not available. +* retries reading a file if `read` results in EAGAIN error. + +On Windows, it retries renaming a file for up to one second if `EACCESS` +or `EPERM` error occurs, likely because antivirus software has locked +the directory. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/graceful-fs.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/graceful-fs.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/graceful-fs.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/graceful-fs.js 2013-09-07 19:17:21.000000000 +0000 @@ -0,0 +1,159 @@ +// Monkey-patching the fs module. +// It's ugly, but there is simply no other way to do this. +var fs = module.exports = require('fs') + +var assert = require('assert') + +// fix up some busted stuff, mostly on windows and old nodes +require('./polyfills.js') + +// The EMFILE enqueuing stuff + +var util = require('util') + +function noop () {} + +var debug = noop +var util = require('util') +if (util.debuglog) + debug = util.debuglog('gfs') +else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS: ' + m.split(/\n/).join('\nGFS: ') + console.error(m) + } + +if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug('fds', fds) + debug(queue) + assert.equal(queue.length, 0) + }) +} + + +var originalOpen = fs.open +fs.open = open + +function open(path, flags, mode, cb) { + if (typeof mode === "function") cb = mode, mode = null + if (typeof cb !== "function") cb = noop + new OpenReq(path, flags, mode, cb) +} + +function OpenReq(path, flags, mode, cb) { + this.path = path + this.flags = flags + this.mode = mode + this.cb = cb + Req.call(this) +} + +util.inherits(OpenReq, Req) + +OpenReq.prototype.process = function() { + originalOpen.call(fs, this.path, this.flags, this.mode, this.done) +} + +var fds = {} +OpenReq.prototype.done = function(er, fd) { + debug('open done', er, fd) + if (fd) + fds['fd' + fd] = this.path + Req.prototype.done.call(this, er, fd) +} + + +var originalReaddir = fs.readdir +fs.readdir = readdir + +function readdir(path, cb) { + if (typeof cb !== "function") cb = noop + new ReaddirReq(path, cb) +} + +function ReaddirReq(path, cb) { + this.path = path + this.cb = cb + Req.call(this) +} + +util.inherits(ReaddirReq, Req) + +ReaddirReq.prototype.process = function() { + originalReaddir.call(fs, this.path, this.done) +} + +ReaddirReq.prototype.done = function(er, files) { + Req.prototype.done.call(this, er, files) + onclose() +} + + +var originalClose = fs.close +fs.close = close + +function close (fd, cb) { + debug('close', fd) + if (typeof cb !== "function") cb = noop + delete fds['fd' + fd] + originalClose.call(fs, fd, function(er) { + onclose() + cb(er) + }) +} + + +var originalCloseSync = fs.closeSync +fs.closeSync = closeSync + +function closeSync (fd) { + try { + return originalCloseSync(fd) + } finally { + onclose() + } +} + + +// Req class +function Req () { + // start processing + this.done = this.done.bind(this) + this.failures = 0 + this.process() +} + +Req.prototype.done = function (er, result) { + var tryAgain = false + if (er) { + var code = er.code + var tryAgain = code === "EMFILE" + if (process.platform === "win32") + tryAgain = tryAgain || code === "OK" + } + + if (tryAgain) { + this.failures ++ + enqueue(this) + } else { + var cb = this.cb + cb(er, result) + } +} + +var queue = [] + +function enqueue(req) { + queue.push(req) + debug('enqueue %d %s', queue.length, req.constructor.name, req) +} + +function onclose() { + var req = queue.shift() + if (req) { + debug('process', req.constructor.name, req) + req.process() + } +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -0,0 +1,48 @@ +{ + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me" + }, + "name": "graceful-fs", + "description": "A drop-in replacement for fs, making various improvements.", + "version": "2.0.1", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-graceful-fs.git" + }, + "main": "graceful-fs.js", + "engines": { + "node": ">=0.4.0" + }, + "directories": { + "test": "test" + }, + "scripts": { + "test": "tap test/*.js" + }, + "keywords": [ + "fs", + "module", + "reading", + "retry", + "retries", + "queue", + "error", + "errors", + "handling", + "EMFILE", + "EAGAIN", + "EINVAL", + "EPERM", + "EACCESS" + ], + "license": "BSD", + "readme": "# graceful-fs\n\ngraceful-fs functions as a drop-in replacement for the fs module,\nmaking various improvements.\n\nThe improvements are meant to normalize behavior across different\nplatforms and environments, and to make filesystem access more\nresilient to errors.\n\n## Improvements over fs module\n\ngraceful-fs:\n\n* Queues up `open` and `readdir` calls, and retries them once\n something closes if there is an EMFILE error from too many file\n descriptors.\n* fixes `lchmod` for Node versions prior to 0.6.2.\n* implements `fs.lutimes` if possible. Otherwise it becomes a noop.\n* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or\n `lchown` if the user isn't root.\n* makes `lchmod` and `lchown` become noops, if not available.\n* retries reading a file if `read` results in EAGAIN error.\n\nOn Windows, it retries renaming a file for up to one second if `EACCESS`\nor `EPERM` error occurs, likely because antivirus software has locked\nthe directory.\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/isaacs/node-graceful-fs/issues" + }, + "_id": "graceful-fs@2.0.1", + "_from": "graceful-fs@~2.0.0" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/polyfills.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/polyfills.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/polyfills.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/graceful-fs/polyfills.js 2013-07-11 07:08:07.000000000 +0000 @@ -0,0 +1,228 @@ +var fs = require('fs') +var constants = require('constants') + +var origCwd = process.cwd +var cwd = null +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +var chdir = process.chdir +process.chdir = function(d) { + cwd = null + chdir.call(process, d) +} + +// (re-)implement some things that are known busted or missing. + +// lchmod, broken prior to 0.6.2 +// back-port the fix here. +if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + fs.lchmod = function (path, mode, callback) { + callback = callback || noop + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + callback(err || err2) + }) + }) + }) + } + + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var err, err2 + try { + var ret = fs.fchmodSync(fd, mode) + } catch (er) { + err = er + } + try { + fs.closeSync(fd) + } catch (er) { + err2 = er + } + if (err || err2) throw (err || err2) + return ret + } +} + + +// lutimes implementation, or no-op +if (!fs.lutimes) { + if (constants.hasOwnProperty("O_SYMLINK")) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + cb = cb || noop + if (er) return cb(er) + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + return cb(er || er2) + }) + }) + }) + } + + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + , err + , err2 + , ret + + try { + var ret = fs.futimesSync(fd, at, mt) + } catch (er) { + err = er + } + try { + fs.closeSync(fd) + } catch (er) { + err2 = er + } + if (err || err2) throw (err || err2) + return ret + } + + } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) { + // maybe utimensat will be bound soonish? + fs.lutimes = function (path, at, mt, cb) { + fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb) + } + + fs.lutimesSync = function (path, at, mt) { + return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW) + } + + } else { + fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) } + fs.lutimesSync = function () {} + } +} + + +// https://github.com/isaacs/node-graceful-fs/issues/4 +// Chown should not fail on einval or eperm if non-root. + +fs.chown = chownFix(fs.chown) +fs.fchown = chownFix(fs.fchown) +fs.lchown = chownFix(fs.lchown) + +fs.chownSync = chownFixSync(fs.chownSync) +fs.fchownSync = chownFixSync(fs.fchownSync) +fs.lchownSync = chownFixSync(fs.lchownSync) + +function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er, res) { + if (chownErOk(er)) er = null + cb(er, res) + }) + } +} + +function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } +} + +function chownErOk (er) { + // if there's no getuid, or if getuid() is something other than 0, + // and the error is EINVAL or EPERM, then just ignore it. + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // When running as root, or if other types of errors are encountered, + // then it's strict. + if (!er || (!process.getuid || process.getuid() !== 0) + && (er.code === "EINVAL" || er.code === "EPERM")) return true +} + + +// if lchmod/lchown do not exist, then make them no-ops +if (!fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + process.nextTick(cb) + } + fs.lchmodSync = function () {} +} +if (!fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + process.nextTick(cb) + } + fs.lchownSync = function () {} +} + + + +// on Windows, A/V software can lock the directory, causing this +// to fail with an EACCES or EPERM if the directory contains newly +// created files. Try again on failure, for up to 1 second. +if (process.platform === "win32") { + var rename_ = fs.rename + fs.rename = function rename (from, to, cb) { + var start = Date.now() + rename_(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM") + && Date.now() - start < 1000) { + return rename_(from, to, CB) + } + cb(er) + }) + } +} + + +// if read() returns EAGAIN, then just try it again. +var read = fs.read +fs.read = function (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return read.call(fs, fd, buffer, offset, length, position, callback) +} + +var readSync = fs.readSync +fs.readSync = function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } +} + diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/LICENSE 2013-08-19 22:09:32.000000000 +0000 @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/README.md 2013-05-16 14:24:38.000000000 +0000 @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits.js 2013-05-16 14:22:57.000000000 +0000 @@ -0,0 +1 @@ +module.exports = require('util').inherits diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits_browser.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits_browser.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits_browser.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/inherits_browser.js 2013-05-16 14:39:58.000000000 +0000 @@ -0,0 +1,23 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -0,0 +1,32 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.1", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/inherits" + }, + "license": "ISC", + "scripts": { + "test": "node test" + }, + "readme": "Browser-friendly inheritance fully compatible with standard node.js\n[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).\n\nThis package exports standard `inherits` from node.js `util` module in\nnode environment, but also provides alternative browser-friendly\nimplementation through [browser\nfield](https://gist.github.com/shtylman/4339901). Alternative\nimplementation is a literal copy of standard one located in standalone\nmodule to avoid requiring of `util`. It also has a shim for old\nbrowsers with no `Object.create` support.\n\nWhile keeping you sure you are using standard `inherits`\nimplementation in node.js environment, it allows bundlers such as\n[browserify](https://github.com/substack/node-browserify) to not\ninclude full `util` package to your client code if all you need is\njust `inherits` function. It worth, because browser shim for `util`\npackage is large and `inherits` is often the single function you need\nfrom it.\n\nIt's recommended to use this package instead of\n`require('util').inherits` for any code that has chances to be used\nnot only in node.js but in browser too.\n\n## usage\n\n```js\nvar inherits = require('inherits');\n// then use exactly as the standard one\n```\n\n## note on version ~1.0\n\nVersion ~1.0 had completely different motivation and is not compatible\nneither with 2.0 nor with standard node.js `inherits`.\n\nIf you are using version ~1.0 and planning to switch to ~2.0, be\ncareful:\n\n* new version uses `super_` instead of `super` for referencing\n superclass\n* new version overwrites current prototype while old one preserves any\n existing fields on it\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/isaacs/inherits/issues" + }, + "_id": "inherits@2.0.1", + "_from": "inherits@2" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/test.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/test.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/inherits/test.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/inherits/test.js 2013-05-16 14:43:08.000000000 +0000 @@ -0,0 +1,25 @@ +var inherits = require('./inherits.js') +var assert = require('assert') + +function test(c) { + assert(c.constructor === Child) + assert(c.constructor.super_ === Parent) + assert(Object.getPrototypeOf(c) === Child.prototype) + assert(Object.getPrototypeOf(Object.getPrototypeOf(c)) === Parent.prototype) + assert(c instanceof Child) + assert(c instanceof Parent) +} + +function Child() { + Parent.call(this) + test(this) +} + +function Parent() {} + +inherits(Child, Parent) + +var c = new Child +test(c) + +console.log('ok') diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/LICENSE 2011-07-29 19:08:13.000000000 +0000 @@ -0,0 +1,23 @@ +Copyright 2009, 2010, 2011 Isaac Z. Schlueter. +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/README.md 2013-02-21 15:47:44.000000000 +0000 @@ -0,0 +1,218 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +Eventually, it will replace the C binding in node-glob. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +### Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. **Note that this is different from the way that `**` is +handled by ruby's `Dir` class.** + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + + +## Minimatch Class + +Create a minimatch object by instanting the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +## Functions + +The top-level exported function has a `cache` property, which is an LRU +cache set to store 100 items. So, calling these methods repeatedly +with the same pattern and options will use the same Minimatch object, +saving the cost of parsing it multiple times. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself. When set, an empty list is returned if there are +no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/minimatch.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/minimatch.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/minimatch.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/minimatch.js 2013-04-12 19:27:25.000000000 +0000 @@ -0,0 +1,1079 @@ +;(function (require, exports, module, platform) { + +if (module) module.exports = minimatch +else exports.minimatch = minimatch + +if (!require) { + require = function (id) { + switch (id) { + case "sigmund": return function sigmund (obj) { + return JSON.stringify(obj) + } + case "path": return { basename: function (f) { + f = f.split(/[\/\\]/) + var e = f.pop() + if (!e) e = f.pop() + return e + }} + case "lru-cache": return function LRUCache () { + // not quite an LRU, but still space-limited. + var cache = {} + var cnt = 0 + this.set = function (k, v) { + cnt ++ + if (cnt >= 100) cache = {} + cache[k] = v + } + this.get = function (k) { return cache[k] } + } + } + } +} + +minimatch.Minimatch = Minimatch + +var LRU = require("lru-cache") + , cache = minimatch.cache = new LRU({max: 100}) + , GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} + , sigmund = require("sigmund") + +var path = require("path") + // any single thing other than / + // don't need to escape / when using new RegExp() + , qmark = "[^/]" + + // * => any number of characters + , star = qmark + "*?" + + // ** when dots are allowed. Anything goes, except .. and . + // not (^ or / followed by one or two dots followed by $ or /), + // followed by anything, any number of times. + , twoStarDot = "(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?" + + // not a ^ or / followed by a dot, + // followed by anything, any number of times. + , twoStarNoDot = "(?:(?!(?:\\\/|^)\\.).)*?" + + // characters that need to be escaped in RegExp. + , reSpecials = charSet("().*{}+?[]^$\\!") + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split("").reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.monkeyPatch = monkeyPatch +function monkeyPatch () { + var desc = Object.getOwnPropertyDescriptor(String.prototype, "match") + var orig = desc.value + desc.value = function (p) { + if (p instanceof Minimatch) return p.match(this) + return orig.call(this, p) + } + Object.defineProperty(String.prototype, desc) +} + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + + +function minimatch (p, pattern, options) { + if (typeof pattern !== "string") { + throw new TypeError("glob pattern string required") + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === "#") { + return false + } + + // "" only matches "" + if (pattern.trim() === "") return p === "" + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options, cache) + } + + if (typeof pattern !== "string") { + throw new TypeError("glob pattern string required") + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows: need to use /, not \ + // On other platforms, \ is a valid (albeit bad) filename char. + if (platform === "win32") { + pattern = pattern.split("\\").join("/") + } + + // lru storage. + // these things aren't particularly big, but walking down the string + // and turning it into a regexp can get pretty costly. + var cacheKey = pattern + "\n" + sigmund(options) + var cached = minimatch.cache.get(cacheKey) + if (cached) return cached + minimatch.cache.set(cacheKey, this) + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) console.error(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + if (options.debug) console.error(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + if (options.debug) console.error(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return -1 === s.indexOf(false) + }) + + if (options.debug) console.error(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + , negate = false + , options = this.options + , negateOffset = 0 + + if (options.nonegate) return + + for ( var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === "!" + ; i ++) { + negate = !negate + negateOffset ++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return new Minimatch(pattern, options).braceExpand() +} + +Minimatch.prototype.braceExpand = braceExpand +function braceExpand (pattern, options) { + options = options || this.options + pattern = typeof pattern === "undefined" + ? this.pattern : pattern + + if (typeof pattern === "undefined") { + throw new Error("undefined pattern") + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + var escaping = false + + // examples and comments refer to this crazy pattern: + // a{b,c{d,e},{f,g}h}x{y,z} + // expected: + // abxy + // abxz + // acdxy + // acdxz + // acexy + // acexz + // afhxy + // afhxz + // aghxy + // aghxz + + // everything before the first \{ is just a prefix. + // So, we pluck that off, and work with the rest, + // and then prepend it to everything we find. + if (pattern.charAt(0) !== "{") { + // console.error(pattern) + var prefix = null + for (var i = 0, l = pattern.length; i < l; i ++) { + var c = pattern.charAt(i) + // console.error(i, c) + if (c === "\\") { + escaping = !escaping + } else if (c === "{" && !escaping) { + prefix = pattern.substr(0, i) + break + } + } + + // actually no sets, all { were escaped. + if (prefix === null) { + // console.error("no sets") + return [pattern] + } + + var tail = braceExpand(pattern.substr(i), options) + return tail.map(function (t) { + return prefix + t + }) + } + + // now we have something like: + // {b,c{d,e},{f,g}h}x{y,z} + // walk through the set, expanding each part, until + // the set ends. then, we'll expand the suffix. + // If the set only has a single member, then'll put the {} back + + // first, handle numeric sets, since they're easier + var numset = pattern.match(/^\{(-?[0-9]+)\.\.(-?[0-9]+)\}/) + if (numset) { + // console.error("numset", numset[1], numset[2]) + var suf = braceExpand(pattern.substr(numset[0].length), options) + , start = +numset[1] + , end = +numset[2] + , inc = start > end ? -1 : 1 + , set = [] + for (var i = start; i != (end + inc); i += inc) { + // append all the suffixes + for (var ii = 0, ll = suf.length; ii < ll; ii ++) { + set.push(i + suf[ii]) + } + } + return set + } + + // ok, walk through the set + // We hope, somewhat optimistically, that there + // will be a } at the end. + // If the closing brace isn't found, then the pattern is + // interpreted as braceExpand("\\" + pattern) so that + // the leading \{ will be interpreted literally. + var i = 1 // skip the \{ + , depth = 1 + , set = [] + , member = "" + , sawEnd = false + , escaping = false + + function addMember () { + set.push(member) + member = "" + } + + // console.error("Entering for") + FOR: for (i = 1, l = pattern.length; i < l; i ++) { + var c = pattern.charAt(i) + // console.error("", i, c) + + if (escaping) { + escaping = false + member += "\\" + c + } else { + switch (c) { + case "\\": + escaping = true + continue + + case "{": + depth ++ + member += "{" + continue + + case "}": + depth -- + // if this closes the actual set, then we're done + if (depth === 0) { + addMember() + // pluck off the close-brace + i ++ + break FOR + } else { + member += c + continue + } + + case ",": + if (depth === 1) { + addMember() + } else { + member += c + } + continue + + default: + member += c + continue + } // switch + } // else + } // for + + // now we've either finished the set, and the suffix is + // pattern.substr(i), or we have *not* closed the set, + // and need to escape the leading brace + if (depth !== 0) { + // console.error("didn't close", pattern) + return braceExpand("\\" + pattern, options) + } + + // x{y,z} -> ["xy", "xz"] + // console.error("set", set) + // console.error("suffix", pattern.substr(i)) + var suf = braceExpand(pattern.substr(i), options) + // ["b", "c{d,e}","{f,g}h"] -> + // [["b"], ["cd", "ce"], ["fh", "gh"]] + var addBraces = set.length === 1 + // console.error("set pre-expanded", set) + set = set.map(function (p) { + return braceExpand(p, options) + }) + // console.error("set expanded", set) + + + // [["b"], ["cd", "ce"], ["fh", "gh"]] -> + // ["b", "cd", "ce", "fh", "gh"] + set = set.reduce(function (l, r) { + return l.concat(r) + }) + + if (addBraces) { + set = set.map(function (s) { + return "{" + s + "}" + }) + } + + // now attach the suffixes. + var ret = [] + for (var i = 0, l = set.length; i < l; i ++) { + for (var ii = 0, ll = suf.length; ii < ll; ii ++) { + ret.push(set[i] + suf[ii]) + } + } + return ret +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === "**") return GLOBSTAR + if (pattern === "") return "" + + var re = "" + , hasMagic = !!options.nocase + , escaping = false + // ? => one single character + , patternListStack = [] + , plType + , stateChar + , inClass = false + , reClassStart = -1 + , classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + , patternStart = pattern.charAt(0) === "." ? "" // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? "(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))" + : "(?!\\.)" + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case "*": + re += star + hasMagic = true + break + case "?": + re += qmark + hasMagic = true + break + default: + re += "\\"+stateChar + break + } + stateChar = false + } + } + + for ( var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i ++ ) { + + if (options.debug) { + console.error("%s\t%s %s %j", pattern, i, re, c) + } + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += "\\" + c + escaping = false + continue + } + + SWITCH: switch (c) { + case "/": + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case "\\": + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case "?": + case "*": + case "+": + case "@": + case "!": + if (options.debug) { + console.error("%s\t%s %s %j <-- stateChar", pattern, i, re, c) + } + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + if (c === "!" && i === classStart + 1) c = "^" + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case "(": + if (inClass) { + re += "(" + continue + } + + if (!stateChar) { + re += "\\(" + continue + } + + plType = stateChar + patternListStack.push({ type: plType + , start: i - 1 + , reStart: re.length }) + // negation is (?:(?!js)[^/]*) + re += stateChar === "!" ? "(?:(?!" : "(?:" + stateChar = false + continue + + case ")": + if (inClass || !patternListStack.length) { + re += "\\)" + continue + } + + hasMagic = true + re += ")" + plType = patternListStack.pop().type + // negation is (?:(?!js)[^/]*) + // The others are (?:) + switch (plType) { + case "!": + re += "[^/]*?)" + break + case "?": + case "+": + case "*": re += plType + case "@": break // the default anyway + } + continue + + case "|": + if (inClass || !patternListStack.length || escaping) { + re += "\\|" + escaping = false + continue + } + + re += "|" + continue + + // these are mostly the same in regexp and glob + case "[": + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += "\\" + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case "]": + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += "\\" + c + escaping = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === "^" && inClass)) { + re += "\\" + } + + re += c + + } // switch + } // for + + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + var cs = pattern.substr(classStart + 1) + , sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + "\\[" + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + var pl + while (pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + 3) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = "\\" + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + "|" + }) + + // console.error("tail=%j\n %s", tail, tail) + var t = pl.type === "*" ? star + : pl.type === "?" ? qmark + : "\\" + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + + t + "\\(" + + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += "\\\\" + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case ".": + case "[": + case "(": addPatternStart = true + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== "" && hasMagic) re = "(?=.)" + re + + if (addPatternStart) re = patternStart + re + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [ re, hasMagic ] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? "i" : "" + , regExp = new RegExp("^" + re + "$", flags) + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) return this.regexp = false + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + , flags = options.nocase ? "i" : "" + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === "string") ? regExpEscape(p) + : p._src + }).join("\\\/") + }).join("|") + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = "^(?:" + re + ")$" + + // can match anything, as long as it's not this. + if (this.negate) re = "^(?!" + re + ").*$" + + try { + return this.regexp = new RegExp(re, flags) + } catch (ex) { + return this.regexp = false + } +} + +minimatch.match = function (list, pattern, options) { + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + // console.error("match", f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === "" + + if (f === "/" && partial) return true + + var options = this.options + + // windows: need to use /, not \ + // On other platforms, \ is a valid (albeit bad) filename char. + if (platform === "win32") { + f = f.split("\\").join("/") + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + if (options.debug) { + console.error(this.pattern, "split", f) + } + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + // console.error(this.pattern, "set", set) + + for (var i = 0, l = set.length; i < l; i ++) { + var pattern = set[i] + var hit = this.matchOne(f, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + if (options.debug) { + console.error("matchOne", + { "this": this + , file: file + , pattern: pattern }) + } + + if (options.matchBase && pattern.length === 1) { + file = path.basename(file.join("/")).split("/") + } + + if (options.debug) { + console.error("matchOne", file.length, pattern.length) + } + + for ( var fi = 0 + , pi = 0 + , fl = file.length + , pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi ++, pi ++ ) { + + if (options.debug) { + console.error("matchOne loop") + } + var p = pattern[pi] + , f = file[fi] + + if (options.debug) { + console.error(pattern, p, f) + } + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + if (options.debug) + console.error('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + , pr = pi + 1 + if (pr === pl) { + if (options.debug) + console.error('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for ( ; fi < fl; fi ++) { + if (file[fi] === "." || file[fi] === ".." || + (!options.dot && file[fi].charAt(0) === ".")) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + WHILE: while (fr < fl) { + var swallowee = file[fr] + + if (options.debug) { + console.error('\nglobstar while', + file, fr, pattern, pr, swallowee) + } + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + if (options.debug) + console.error('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === "." || swallowee === ".." || + (!options.dot && swallowee.charAt(0) === ".")) { + if (options.debug) + console.error("dot detected!", file, fr, pattern, pr) + break WHILE + } + + // ** swallows a segment, and continue. + if (options.debug) + console.error('globstar swallow a segment, and continue') + fr ++ + } + } + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + // console.error("\n>>> no match, partial?", file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === "string") { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + if (options.debug) { + console.error("string match", p, f, hit) + } + } else { + hit = f.match(p) + if (options.debug) { + console.error("pattern match", p, f, hit) + } + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === "") + return emptyFileEnd + } + + // should be unreachable. + throw new Error("wtf?") +} + + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, "$1") +} + + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&") +} + +})( typeof require === "function" ? require : null, + this, + typeof module === "object" ? module : null, + typeof process === "object" ? process.platform : "win32" + ) diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/.npmignore tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/.npmignore --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/.npmignore 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/.npmignore 2011-12-09 01:04:59.000000000 +0000 @@ -0,0 +1 @@ +/node_modules diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/AUTHORS tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/AUTHORS --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/AUTHORS 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/AUTHORS 2012-08-27 16:36:47.000000000 +0000 @@ -0,0 +1,8 @@ +# Authors, sorted by whether or not they are me +Isaac Z. Schlueter +Carlos Brito Lage +Marko Mikulicic +Trent Mick +Kevin O'Hara +Marco Rogers +Jesse Dailey diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE 2011-07-29 19:10:32.000000000 +0000 @@ -0,0 +1,23 @@ +Copyright 2009, 2010, 2011 Isaac Z. Schlueter. +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/README.md 2013-03-26 00:24:58.000000000 +0000 @@ -0,0 +1,97 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n) { return n * 2 } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = LRU(options) + , otherCache = LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n){return n.length}`. The default is + `function(n){return 1}`, which is fine if you want to store `n` + like-sized things. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. + +## API + +* `set(key, value)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/bench.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/bench.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/bench.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/bench.js 2013-08-19 19:55:38.000000000 +0000 @@ -0,0 +1,25 @@ +var LRU = require('lru-cache'); + +var max = +process.argv[2] || 10240; +var more = 102400; + +var cache = LRU({ + max: max, maxAge: 86400e3 +}); + +// fill cache +for (var i = 0; i < max; ++i) { + cache.set(i, {}); +} + +var start = process.hrtime(); + +// adding more items +for ( ; i < max+more; ++i) { + cache.set(i, {}); +} + +var end = process.hrtime(start); +var msecs = end[0] * 1E3 + end[1] / 1E6; + +console.log('adding %d items took %d ms', more, msecs.toPrecision(5)); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js 2013-08-19 21:26:27.000000000 +0000 @@ -0,0 +1,263 @@ +;(function () { // closure for web browsers + +if (typeof module === 'object' && module.exports) { + module.exports = LRUCache +} else { + // just set the global for non-node platforms. + this.LRUCache = LRUCache +} + +function hOP (obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key) +} + +function naiveLength () { return 1 } + +function LRUCache (options) { + if (!(this instanceof LRUCache)) { + return new LRUCache(options) + } + + var max + if (typeof options === 'number') { + max = options + options = { max: max } + } + + if (!options) options = {} + + max = options.max + + var lengthCalculator = options.length || naiveLength + + if (typeof lengthCalculator !== "function") { + lengthCalculator = naiveLength + } + + if (!max || !(typeof max === "number") || max <= 0 ) { + // a little bit silly. maybe this should throw? + max = Infinity + } + + var allowStale = options.stale || false + + var maxAge = options.maxAge || null + + var dispose = options.dispose + + var cache = Object.create(null) // hash of items by key + , lruList = Object.create(null) // list of items in order of use recency + , mru = 0 // most recently used + , lru = 0 // least recently used + , length = 0 // number of items in the list + , itemCount = 0 + + + // resize the cache when the max changes. + Object.defineProperty(this, "max", + { set : function (mL) { + if (!mL || !(typeof mL === "number") || mL <= 0 ) mL = Infinity + max = mL + // if it gets above double max, trim right away. + // otherwise, do it whenever it's convenient. + if (length > max) trim() + } + , get : function () { return max } + , enumerable : true + }) + + // resize the cache when the lengthCalculator changes. + Object.defineProperty(this, "lengthCalculator", + { set : function (lC) { + if (typeof lC !== "function") { + lengthCalculator = naiveLength + length = itemCount + for (var key in cache) { + cache[key].length = 1 + } + } else { + lengthCalculator = lC + length = 0 + for (var key in cache) { + cache[key].length = lengthCalculator(cache[key].value) + length += cache[key].length + } + } + + if (length > max) trim() + } + , get : function () { return lengthCalculator } + , enumerable : true + }) + + Object.defineProperty(this, "length", + { get : function () { return length } + , enumerable : true + }) + + + Object.defineProperty(this, "itemCount", + { get : function () { return itemCount } + , enumerable : true + }) + + this.forEach = function (fn, thisp) { + thisp = thisp || this + var i = 0; + for (var k = mru - 1; k >= 0 && i < itemCount; k--) if (lruList[k]) { + i++ + var hit = lruList[k] + if (maxAge && (Date.now() - hit.now > maxAge)) { + del(hit) + if (!allowStale) hit = undefined + } + if (hit) { + fn.call(thisp, hit.value, hit.key, this) + } + } + } + + this.keys = function () { + var keys = new Array(itemCount) + var i = 0 + for (var k = mru - 1; k >= 0 && i < itemCount; k--) if (lruList[k]) { + var hit = lruList[k] + keys[i++] = hit.key + } + return keys + } + + this.values = function () { + var values = new Array(itemCount) + var i = 0 + for (var k = mru - 1; k >= 0 && i < itemCount; k--) if (lruList[k]) { + var hit = lruList[k] + values[i++] = hit.value + } + return values + } + + this.reset = function () { + if (dispose) { + for (var k in cache) { + dispose(k, cache[k].value) + } + } + cache = {} + lruList = {} + lru = 0 + mru = 0 + length = 0 + itemCount = 0 + } + + // Provided for debugging/dev purposes only. No promises whatsoever that + // this API stays stable. + this.dump = function () { + return cache + } + + this.dumpLru = function () { + return lruList + } + + this.set = function (key, value) { + if (hOP(cache, key)) { + // dispose of the old one before overwriting + if (dispose) dispose(key, cache[key].value) + if (maxAge) cache[key].now = Date.now() + cache[key].value = value + this.get(key) + return true + } + + var len = lengthCalculator(value) + var age = maxAge ? Date.now() : 0 + var hit = new Entry(key, value, mru++, len, age) + + // oversized objects fall out of cache automatically. + if (hit.length > max) { + if (dispose) dispose(key, value) + return false + } + + length += hit.length + lruList[hit.lu] = cache[key] = hit + itemCount ++ + + if (length > max) trim() + return true + } + + this.has = function (key) { + if (!hOP(cache, key)) return false + var hit = cache[key] + if (maxAge && (Date.now() - hit.now > maxAge)) { + return false + } + return true + } + + this.get = function (key) { + return get(key, true) + } + + this.peek = function (key) { + return get(key, false) + } + + function get (key, doUse) { + var hit = cache[key] + if (hit) { + if (maxAge && (Date.now() - hit.now > maxAge)) { + del(hit) + if (!allowStale) hit = undefined + } else { + if (doUse) use(hit) + } + if (hit) hit = hit.value + } + return hit + } + + function use (hit) { + shiftLU(hit) + hit.lu = mru ++ + lruList[hit.lu] = hit + } + + this.del = function (key) { + del(cache[key]) + } + + function trim () { + while (lru < mru && length > max) + del(lruList[lru]) + } + + function shiftLU(hit) { + delete lruList[ hit.lu ] + while (lru < mru && !lruList[lru]) lru ++ + } + + function del(hit) { + if (hit) { + if (dispose) dispose(hit.key, hit.value) + length -= hit.length + itemCount -- + delete cache[ hit.key ] + shiftLU(hit) + } + } +} + +// classy, since V8 prefers predictable objects. +function Entry (key, value, mru, len, age) { + this.key = key + this.value = value + this.lu = mru + this.length = len + this.now = age +} + +})() diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -0,0 +1,62 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "2.3.1", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "scripts": { + "test": "tap test --gc" + }, + "main": "lib/lru-cache.js", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-lru-cache.git" + }, + "devDependencies": { + "tap": "", + "weak": "" + }, + "license": { + "type": "MIT", + "url": "http://github.com/isaacs/node-lru-cache/raw/master/LICENSE" + }, + "contributors": [ + { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + { + "name": "Carlos Brito Lage", + "email": "carlos@carloslage.net" + }, + { + "name": "Marko Mikulicic", + "email": "marko.mikulicic@isti.cnr.it" + }, + { + "name": "Trent Mick", + "email": "trentm@gmail.com" + }, + { + "name": "Kevin O'Hara", + "email": "kevinohara80@gmail.com" + }, + { + "name": "Marco Rogers", + "email": "marco.rogers@gmail.com" + }, + { + "name": "Jesse Dailey", + "email": "jesse.dailey@gmail.com" + } + ], + "readme": "# lru cache\n\nA cache object that deletes the least-recently-used items.\n\n## Usage:\n\n```javascript\nvar LRU = require(\"lru-cache\")\n , options = { max: 500\n , length: function (n) { return n * 2 }\n , dispose: function (key, n) { n.close() }\n , maxAge: 1000 * 60 * 60 }\n , cache = LRU(options)\n , otherCache = LRU(50) // sets just the max size\n\ncache.set(\"key\", \"value\")\ncache.get(\"key\") // \"value\"\n\ncache.reset() // empty the cache\n```\n\nIf you put more stuff in it, then items will fall out.\n\nIf you try to put an oversized thing in it, then it'll fall out right\naway.\n\n## Options\n\n* `max` The maximum size of the cache, checked by applying the length\n function to all values in the cache. Not setting this is kind of\n silly, since that's the whole purpose of this lib, but it defaults\n to `Infinity`.\n* `maxAge` Maximum age in ms. Items are not pro-actively pruned out\n as they age, but if you try to get an item that is too old, it'll\n drop it and return undefined instead of giving it to you.\n* `length` Function that is used to calculate the length of stored\n items. If you're storing strings or buffers, then you probably want\n to do something like `function(n){return n.length}`. The default is\n `function(n){return 1}`, which is fine if you want to store `n`\n like-sized things.\n* `dispose` Function that is called on items when they are dropped\n from the cache. This can be handy if you want to close file\n descriptors or do other cleanup tasks when items are no longer\n accessible. Called with `key, value`. It's called *before*\n actually removing the item from the internal cache, so if you want\n to immediately put it back in, you'll have to do that in a\n `nextTick` or `setTimeout` callback or it won't do anything.\n* `stale` By default, if you set a `maxAge`, it'll only actually pull\n stale items out of the cache when you `get(key)`. (That is, it's\n not pre-emptively doing a `setTimeout` or anything.) If you set\n `stale:true`, it'll return the stale value before deleting it. If\n you don't set this, then it'll return `undefined` when you try to\n get a stale entry, as if it had already been deleted.\n\n## API\n\n* `set(key, value)`\n* `get(key) => value`\n\n Both of these will update the \"recently used\"-ness of the key.\n They do what you think.\n\n* `peek(key)`\n\n Returns the key value (or `undefined` if not found) without\n updating the \"recently used\"-ness of the key.\n\n (If you find yourself using this a lot, you *might* be using the\n wrong sort of data structure, but there are some use cases where\n it's handy.)\n\n* `del(key)`\n\n Deletes a key out of the cache.\n\n* `reset()`\n\n Clear the cache entirely, throwing away all values.\n\n* `has(key)`\n\n Check if a key is in the cache, without updating the recent-ness\n or deleting it for being stale.\n\n* `forEach(function(value,key,cache), [thisp])`\n\n Just like `Array.prototype.forEach`. Iterates over all the keys\n in the cache, in order of recent-ness. (Ie, more recently used\n items are iterated over first.)\n\n* `keys()`\n\n Return an array of the keys in the cache.\n\n* `values()`\n\n Return an array of the values in the cache.\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/isaacs/node-lru-cache/issues" + }, + "_id": "lru-cache@2.3.1", + "_from": "lru-cache@2" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/LICENSE 2012-08-13 15:57:47.000000000 +0000 @@ -0,0 +1,27 @@ +Copyright (c) Isaac Z. Schlueter ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/README.md 2012-08-13 15:57:47.000000000 +0000 @@ -0,0 +1,53 @@ +# sigmund + +Quick and dirty signatures for Objects. + +This is like a much faster `deepEquals` comparison, which returns a +string key suitable for caches and the like. + +## Usage + +```javascript +function doSomething (someObj) { + var key = sigmund(someObj, maxDepth) // max depth defaults to 10 + var cached = cache.get(key) + if (cached) return cached) + + var result = expensiveCalculation(someObj) + cache.set(key, result) + return result +} +``` + +The resulting key will be as unique and reproducible as calling +`JSON.stringify` or `util.inspect` on the object, but is much faster. +In order to achieve this speed, some differences are glossed over. +For example, the object `{0:'foo'}` will be treated identically to the +array `['foo']`. + +Also, just as there is no way to summon the soul from the scribblings +of a cocain-addled psychoanalyst, there is no way to revive the object +from the signature string that sigmund gives you. In fact, it's +barely even readable. + +As with `sys.inspect` and `JSON.stringify`, larger objects will +produce larger signature strings. + +Because sigmund is a bit less strict than the more thorough +alternatives, the strings will be shorter, and also there is a +slightly higher chance for collisions. For example, these objects +have the same signature: + + var obj1 = {a:'b',c:/def/,g:['h','i',{j:'',k:'l'}]} + var obj2 = {a:'b',c:'/def/',g:['h','i','{jkl']} + +Like a good Freudian, sigmund is most effective when you already have +some understanding of what you're looking for. It can help you help +yourself, but you must be willing to do some work as well. + +Cycles are handled, and cyclical objects are silently omitted (though +the key is included in the signature output.) + +The second argument is the maximum depth, which defaults to 10, +because that is the maximum object traversal depth covered by most +insurance carriers. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/bench.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/bench.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/bench.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/bench.js 2012-08-13 15:57:47.000000000 +0000 @@ -0,0 +1,283 @@ +// different ways to id objects +// use a req/res pair, since it's crazy deep and cyclical + +// sparseFE10 and sigmund are usually pretty close, which is to be expected, +// since they are essentially the same algorithm, except that sigmund handles +// regular expression objects properly. + + +var http = require('http') +var util = require('util') +var sigmund = require('./sigmund.js') +var sreq, sres, creq, cres, test + +http.createServer(function (q, s) { + sreq = q + sres = s + sres.end('ok') + this.close(function () { setTimeout(function () { + start() + }, 200) }) +}).listen(1337, function () { + creq = http.get({ port: 1337 }) + creq.on('response', function (s) { cres = s }) +}) + +function start () { + test = [sreq, sres, creq, cres] + // test = sreq + // sreq.sres = sres + // sreq.creq = creq + // sreq.cres = cres + + for (var i in exports.compare) { + console.log(i) + var hash = exports.compare[i]() + console.log(hash) + console.log(hash.length) + console.log('') + } + + require('bench').runMain() +} + +function customWs (obj, md, d) { + d = d || 0 + var to = typeof obj + if (to === 'undefined' || to === 'function' || to === null) return '' + if (d > md || !obj || to !== 'object') return ('' + obj).replace(/[\n ]+/g, '') + + if (Array.isArray(obj)) { + return obj.map(function (i, _, __) { + return customWs(i, md, d + 1) + }).reduce(function (a, b) { return a + b }, '') + } + + var keys = Object.keys(obj) + return keys.map(function (k, _, __) { + return k + ':' + customWs(obj[k], md, d + 1) + }).reduce(function (a, b) { return a + b }, '') +} + +function custom (obj, md, d) { + d = d || 0 + var to = typeof obj + if (to === 'undefined' || to === 'function' || to === null) return '' + if (d > md || !obj || to !== 'object') return '' + obj + + if (Array.isArray(obj)) { + return obj.map(function (i, _, __) { + return custom(i, md, d + 1) + }).reduce(function (a, b) { return a + b }, '') + } + + var keys = Object.keys(obj) + return keys.map(function (k, _, __) { + return k + ':' + custom(obj[k], md, d + 1) + }).reduce(function (a, b) { return a + b }, '') +} + +function sparseFE2 (obj, maxDepth) { + var seen = [] + var soFar = '' + function ch (v, depth) { + if (depth > maxDepth) return + if (typeof v === 'function' || typeof v === 'undefined') return + if (typeof v !== 'object' || !v) { + soFar += v + return + } + if (seen.indexOf(v) !== -1 || depth === maxDepth) return + seen.push(v) + soFar += '{' + Object.keys(v).forEach(function (k, _, __) { + // pseudo-private values. skip those. + if (k.charAt(0) === '_') return + var to = typeof v[k] + if (to === 'function' || to === 'undefined') return + soFar += k + ':' + ch(v[k], depth + 1) + }) + soFar += '}' + } + ch(obj, 0) + return soFar +} + +function sparseFE (obj, maxDepth) { + var seen = [] + var soFar = '' + function ch (v, depth) { + if (depth > maxDepth) return + if (typeof v === 'function' || typeof v === 'undefined') return + if (typeof v !== 'object' || !v) { + soFar += v + return + } + if (seen.indexOf(v) !== -1 || depth === maxDepth) return + seen.push(v) + soFar += '{' + Object.keys(v).forEach(function (k, _, __) { + // pseudo-private values. skip those. + if (k.charAt(0) === '_') return + var to = typeof v[k] + if (to === 'function' || to === 'undefined') return + soFar += k + ch(v[k], depth + 1) + }) + } + ch(obj, 0) + return soFar +} + +function sparse (obj, maxDepth) { + var seen = [] + var soFar = '' + function ch (v, depth) { + if (depth > maxDepth) return + if (typeof v === 'function' || typeof v === 'undefined') return + if (typeof v !== 'object' || !v) { + soFar += v + return + } + if (seen.indexOf(v) !== -1 || depth === maxDepth) return + seen.push(v) + soFar += '{' + for (var k in v) { + // pseudo-private values. skip those. + if (k.charAt(0) === '_') continue + var to = typeof v[k] + if (to === 'function' || to === 'undefined') continue + soFar += k + ch(v[k], depth + 1) + } + } + ch(obj, 0) + return soFar +} + +function noCommas (obj, maxDepth) { + var seen = [] + var soFar = '' + function ch (v, depth) { + if (depth > maxDepth) return + if (typeof v === 'function' || typeof v === 'undefined') return + if (typeof v !== 'object' || !v) { + soFar += v + return + } + if (seen.indexOf(v) !== -1 || depth === maxDepth) return + seen.push(v) + soFar += '{' + for (var k in v) { + // pseudo-private values. skip those. + if (k.charAt(0) === '_') continue + var to = typeof v[k] + if (to === 'function' || to === 'undefined') continue + soFar += k + ':' + ch(v[k], depth + 1) + } + soFar += '}' + } + ch(obj, 0) + return soFar +} + + +function flatten (obj, maxDepth) { + var seen = [] + var soFar = '' + function ch (v, depth) { + if (depth > maxDepth) return + if (typeof v === 'function' || typeof v === 'undefined') return + if (typeof v !== 'object' || !v) { + soFar += v + return + } + if (seen.indexOf(v) !== -1 || depth === maxDepth) return + seen.push(v) + soFar += '{' + for (var k in v) { + // pseudo-private values. skip those. + if (k.charAt(0) === '_') continue + var to = typeof v[k] + if (to === 'function' || to === 'undefined') continue + soFar += k + ':' + ch(v[k], depth + 1) + soFar += ',' + } + soFar += '}' + } + ch(obj, 0) + return soFar +} + +exports.compare = +{ + // 'custom 2': function () { + // return custom(test, 2, 0) + // }, + // 'customWs 2': function () { + // return customWs(test, 2, 0) + // }, + 'JSON.stringify (guarded)': function () { + var seen = [] + return JSON.stringify(test, function (k, v) { + if (typeof v !== 'object' || !v) return v + if (seen.indexOf(v) !== -1) return undefined + seen.push(v) + return v + }) + }, + + 'flatten 10': function () { + return flatten(test, 10) + }, + + // 'flattenFE 10': function () { + // return flattenFE(test, 10) + // }, + + 'noCommas 10': function () { + return noCommas(test, 10) + }, + + 'sparse 10': function () { + return sparse(test, 10) + }, + + 'sparseFE 10': function () { + return sparseFE(test, 10) + }, + + 'sparseFE2 10': function () { + return sparseFE2(test, 10) + }, + + sigmund: function() { + return sigmund(test, 10) + }, + + + // 'util.inspect 1': function () { + // return util.inspect(test, false, 1, false) + // }, + // 'util.inspect undefined': function () { + // util.inspect(test) + // }, + // 'util.inspect 2': function () { + // util.inspect(test, false, 2, false) + // }, + // 'util.inspect 3': function () { + // util.inspect(test, false, 3, false) + // }, + // 'util.inspect 4': function () { + // util.inspect(test, false, 4, false) + // }, + // 'util.inspect Infinity': function () { + // util.inspect(test, false, Infinity, false) + // } +} + +/** results +**/ diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -0,0 +1,41 @@ +{ + "name": "sigmund", + "version": "1.0.0", + "description": "Quick and dirty signatures for Objects.", + "main": "sigmund.js", + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "tap": "~0.3.0" + }, + "scripts": { + "test": "tap test/*.js", + "bench": "node bench.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/sigmund" + }, + "keywords": [ + "object", + "signature", + "key", + "data", + "psychoanalysis" + ], + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "license": "BSD", + "readme": "# sigmund\n\nQuick and dirty signatures for Objects.\n\nThis is like a much faster `deepEquals` comparison, which returns a\nstring key suitable for caches and the like.\n\n## Usage\n\n```javascript\nfunction doSomething (someObj) {\n var key = sigmund(someObj, maxDepth) // max depth defaults to 10\n var cached = cache.get(key)\n if (cached) return cached)\n\n var result = expensiveCalculation(someObj)\n cache.set(key, result)\n return result\n}\n```\n\nThe resulting key will be as unique and reproducible as calling\n`JSON.stringify` or `util.inspect` on the object, but is much faster.\nIn order to achieve this speed, some differences are glossed over.\nFor example, the object `{0:'foo'}` will be treated identically to the\narray `['foo']`.\n\nAlso, just as there is no way to summon the soul from the scribblings\nof a cocain-addled psychoanalyst, there is no way to revive the object\nfrom the signature string that sigmund gives you. In fact, it's\nbarely even readable.\n\nAs with `sys.inspect` and `JSON.stringify`, larger objects will\nproduce larger signature strings.\n\nBecause sigmund is a bit less strict than the more thorough\nalternatives, the strings will be shorter, and also there is a\nslightly higher chance for collisions. For example, these objects\nhave the same signature:\n\n var obj1 = {a:'b',c:/def/,g:['h','i',{j:'',k:'l'}]}\n var obj2 = {a:'b',c:'/def/',g:['h','i','{jkl']}\n\nLike a good Freudian, sigmund is most effective when you already have\nsome understanding of what you're looking for. It can help you help\nyourself, but you must be willing to do some work as well.\n\nCycles are handled, and cyclical objects are silently omitted (though\nthe key is included in the signature output.)\n\nThe second argument is the maximum depth, which defaults to 10,\nbecause that is the maximum object traversal depth covered by most\ninsurance carriers.\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/isaacs/sigmund/issues" + }, + "_id": "sigmund@1.0.0", + "_from": "sigmund@~1.0.0" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/sigmund.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/sigmund.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/sigmund.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/node_modules/sigmund/sigmund.js 2012-08-13 15:57:47.000000000 +0000 @@ -0,0 +1,39 @@ +module.exports = sigmund +function sigmund (subject, maxSessions) { + maxSessions = maxSessions || 10; + var notes = []; + var analysis = ''; + var RE = RegExp; + + function psychoAnalyze (subject, session) { + if (session > maxSessions) return; + + if (typeof subject === 'function' || + typeof subject === 'undefined') { + return; + } + + if (typeof subject !== 'object' || !subject || + (subject instanceof RE)) { + analysis += subject; + return; + } + + if (notes.indexOf(subject) !== -1 || session === maxSessions) return; + + notes.push(subject); + analysis += '{'; + Object.keys(subject).forEach(function (issue, _, __) { + // pseudo-private values. skip those. + if (issue.charAt(0) === '_') return; + var to = typeof subject[issue]; + if (to === 'function' || to === 'undefined') return; + analysis += issue; + psychoAnalyze(subject[issue], session + 1); + }); + } + psychoAnalyze(subject, 0); + return analysis; +} + +// vim: set softtabstop=4 shiftwidth=4: diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/node_modules/minimatch/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/node_modules/minimatch/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -0,0 +1,39 @@ +{ + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me" + }, + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "0.2.12", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap test" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "lru-cache": "2", + "sigmund": "~1.0.0" + }, + "devDependencies": { + "tap": "" + }, + "license": { + "type": "MIT", + "url": "http://github.com/isaacs/minimatch/raw/master/LICENSE" + }, + "readme": "# minimatch\n\nA minimal matching utility.\n\n[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch)\n\n\nThis is the matching library used internally by npm.\n\nEventually, it will replace the C binding in node-glob.\n\nIt works by converting glob expressions into JavaScript `RegExp`\nobjects.\n\n## Usage\n\n```javascript\nvar minimatch = require(\"minimatch\")\n\nminimatch(\"bar.foo\", \"*.foo\") // true!\nminimatch(\"bar.foo\", \"*.bar\") // false!\n```\n\n## Features\n\nSupports these glob features:\n\n* Brace Expansion\n* Extended glob matching\n* \"Globstar\" `**` matching\n\nSee:\n\n* `man sh`\n* `man bash`\n* `man 3 fnmatch`\n* `man 5 gitignore`\n\n### Comparisons to other fnmatch/glob implementations\n\nWhile strict compliance with the existing standards is a worthwhile\ngoal, some discrepancies exist between minimatch and other\nimplementations, and are intentional.\n\nIf the pattern starts with a `!` character, then it is negated. Set the\n`nonegate` flag to suppress this behavior, and treat leading `!`\ncharacters normally. This is perhaps relevant if you wish to start the\npattern with a negative extglob pattern like `!(a|B)`. Multiple `!`\ncharacters at the start of a pattern will negate the pattern multiple\ntimes.\n\nIf a pattern starts with `#`, then it is treated as a comment, and\nwill not match anything. Use `\\#` to match a literal `#` at the\nstart of a line, or set the `nocomment` flag to suppress this behavior.\n\nThe double-star character `**` is supported by default, unless the\n`noglobstar` flag is set. This is supported in the manner of bsdglob\nand bash 4.1, where `**` only has special significance if it is the only\nthing in a path part. That is, `a/**/b` will match `a/x/y/b`, but\n`a/**b` will not. **Note that this is different from the way that `**` is\nhandled by ruby's `Dir` class.**\n\nIf an escaped pattern has no matches, and the `nonull` flag is set,\nthen minimatch.match returns the pattern as-provided, rather than\ninterpreting the character escapes. For example,\n`minimatch.match([], \"\\\\*a\\\\?\")` will return `\"\\\\*a\\\\?\"` rather than\n`\"*a?\"`. This is akin to setting the `nullglob` option in bash, except\nthat it does not resolve escaped pattern characters.\n\nIf brace expansion is not disabled, then it is performed before any\nother interpretation of the glob pattern. Thus, a pattern like\n`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded\n**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are\nchecked for validity. Since those two are valid, matching proceeds.\n\n\n## Minimatch Class\n\nCreate a minimatch object by instanting the `minimatch.Minimatch` class.\n\n```javascript\nvar Minimatch = require(\"minimatch\").Minimatch\nvar mm = new Minimatch(pattern, options)\n```\n\n### Properties\n\n* `pattern` The original pattern the minimatch object represents.\n* `options` The options supplied to the constructor.\n* `set` A 2-dimensional array of regexp or string expressions.\n Each row in the\n array corresponds to a brace-expanded pattern. Each item in the row\n corresponds to a single path-part. For example, the pattern\n `{a,b/c}/d` would expand to a set of patterns like:\n\n [ [ a, d ]\n , [ b, c, d ] ]\n\n If a portion of the pattern doesn't have any \"magic\" in it\n (that is, it's something like `\"foo\"` rather than `fo*o?`), then it\n will be left as a string rather than converted to a regular\n expression.\n\n* `regexp` Created by the `makeRe` method. A single regular expression\n expressing the entire pattern. This is useful in cases where you wish\n to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.\n* `negate` True if the pattern is negated.\n* `comment` True if the pattern is a comment.\n* `empty` True if the pattern is `\"\"`.\n\n### Methods\n\n* `makeRe` Generate the `regexp` member if necessary, and return it.\n Will return `false` if the pattern is invalid.\n* `match(fname)` Return true if the filename matches the pattern, or\n false otherwise.\n* `matchOne(fileArray, patternArray, partial)` Take a `/`-split\n filename, and match it against a single row in the `regExpSet`. This\n method is mainly for internal use, but is exposed so that it can be\n used by a glob-walker that needs to avoid excessive filesystem calls.\n\nAll other methods are internal, and will be called as necessary.\n\n## Functions\n\nThe top-level exported function has a `cache` property, which is an LRU\ncache set to store 100 items. So, calling these methods repeatedly\nwith the same pattern and options will use the same Minimatch object,\nsaving the cost of parsing it multiple times.\n\n### minimatch(path, pattern, options)\n\nMain export. Tests a path against the pattern using the options.\n\n```javascript\nvar isJS = minimatch(file, \"*.js\", { matchBase: true })\n```\n\n### minimatch.filter(pattern, options)\n\nReturns a function that tests its\nsupplied argument, suitable for use with `Array.filter`. Example:\n\n```javascript\nvar javascripts = fileList.filter(minimatch.filter(\"*.js\", {matchBase: true}))\n```\n\n### minimatch.match(list, pattern, options)\n\nMatch against the list of\nfiles, in the style of fnmatch or glob. If nothing is matched, and\noptions.nonull is set, then return a list containing the pattern itself.\n\n```javascript\nvar javascripts = minimatch.match(fileList, \"*.js\", {matchBase: true}))\n```\n\n### minimatch.makeRe(pattern, options)\n\nMake a regular expression object from the pattern.\n\n## Options\n\nAll options are `false` by default.\n\n### debug\n\nDump a ton of stuff to stderr.\n\n### nobrace\n\nDo not expand `{a,b}` and `{1..3}` brace sets.\n\n### noglobstar\n\nDisable `**` matching against multiple folder names.\n\n### dot\n\nAllow patterns to match filenames starting with a period, even if\nthe pattern does not explicitly have a period in that spot.\n\nNote that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`\nis set.\n\n### noext\n\nDisable \"extglob\" style patterns like `+(a|b)`.\n\n### nocase\n\nPerform a case-insensitive match.\n\n### nonull\n\nWhen a match is not found by `minimatch.match`, return a list containing\nthe pattern itself. When set, an empty list is returned if there are\nno matches.\n\n### matchBase\n\nIf set, then patterns without slashes will be matched\nagainst the basename of the path if it contains slashes. For example,\n`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.\n\n### nocomment\n\nSuppress the behavior of treating `#` at the start of a pattern as a\ncomment.\n\n### nonegate\n\nSuppress the behavior of treating a leading `!` character as negation.\n\n### flipNegate\n\nReturns from negate expressions the same as if they were not negated.\n(Ie, true on a hit, false on a miss.)\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/isaacs/minimatch/issues" + }, + "_id": "minimatch@0.2.12", + "_from": "minimatch@~0.2.11" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/glob/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/glob/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -0,0 +1,39 @@ +{ + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "name": "glob", + "description": "a little globber", + "version": "3.2.3", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "engines": { + "node": "*" + }, + "dependencies": { + "minimatch": "~0.2.11", + "graceful-fs": "~2.0.0", + "inherits": "2" + }, + "devDependencies": { + "tap": "~0.4.0", + "mkdirp": "0", + "rimraf": "1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "license": "BSD", + "readme": "# Glob\n\nMatch files using the patterns the shell uses, like stars and stuff.\n\nThis is a glob implementation in JavaScript. It uses the `minimatch`\nlibrary to do its matching.\n\n## Attention: node-glob users!\n\nThe API has changed dramatically between 2.x and 3.x. This library is\nnow 100% JavaScript, and the integer flags have been replaced with an\noptions object.\n\nAlso, there's an event emitter class, proper tests, and all the other\nthings you've come to expect from node modules.\n\nAnd best of all, no compilation!\n\n## Usage\n\n```javascript\nvar glob = require(\"glob\")\n\n// options is optional\nglob(\"**/*.js\", options, function (er, files) {\n // files is an array of filenames.\n // If the `nonull` option is set, and nothing\n // was found, then files is [\"**/*.js\"]\n // er is an error object or null.\n})\n```\n\n## Features\n\nPlease see the [minimatch\ndocumentation](https://github.com/isaacs/minimatch) for more details.\n\nSupports these glob features:\n\n* Brace Expansion\n* Extended glob matching\n* \"Globstar\" `**` matching\n\nSee:\n\n* `man sh`\n* `man bash`\n* `man 3 fnmatch`\n* `man 5 gitignore`\n* [minimatch documentation](https://github.com/isaacs/minimatch)\n\n## glob(pattern, [options], cb)\n\n* `pattern` {String} Pattern to be matched\n* `options` {Object}\n* `cb` {Function}\n * `err` {Error | null}\n * `matches` {Array} filenames found matching the pattern\n\nPerform an asynchronous glob search.\n\n## glob.sync(pattern, [options])\n\n* `pattern` {String} Pattern to be matched\n* `options` {Object}\n* return: {Array} filenames found matching the pattern\n\nPerform a synchronous glob search.\n\n## Class: glob.Glob\n\nCreate a Glob object by instanting the `glob.Glob` class.\n\n```javascript\nvar Glob = require(\"glob\").Glob\nvar mg = new Glob(pattern, options, cb)\n```\n\nIt's an EventEmitter, and starts walking the filesystem to find matches\nimmediately.\n\n### new glob.Glob(pattern, [options], [cb])\n\n* `pattern` {String} pattern to search for\n* `options` {Object}\n* `cb` {Function} Called when an error occurs, or matches are found\n * `err` {Error | null}\n * `matches` {Array} filenames found matching the pattern\n\nNote that if the `sync` flag is set in the options, then matches will\nbe immediately available on the `g.found` member.\n\n### Properties\n\n* `minimatch` The minimatch object that the glob uses.\n* `options` The options object passed in.\n* `error` The error encountered. When an error is encountered, the\n glob object is in an undefined state, and should be discarded.\n* `aborted` Boolean which is set to true when calling `abort()`. There\n is no way at this time to continue a glob search after aborting, but\n you can re-use the statCache to avoid having to duplicate syscalls.\n* `statCache` Collection of all the stat results the glob search\n performed.\n* `cache` Convenience object. Each field has the following possible\n values:\n * `false` - Path does not exist\n * `true` - Path exists\n * `1` - Path exists, and is not a directory\n * `2` - Path exists, and is a directory\n * `[file, entries, ...]` - Path exists, is a directory, and the\n array value is the results of `fs.readdir`\n\n### Events\n\n* `end` When the matching is finished, this is emitted with all the\n matches found. If the `nonull` option is set, and no match was found,\n then the `matches` list contains the original pattern. The matches\n are sorted, unless the `nosort` flag is set.\n* `match` Every time a match is found, this is emitted with the matched.\n* `error` Emitted when an unexpected error is encountered, or whenever\n any fs error occurs if `options.strict` is set.\n* `abort` When `abort()` is called, this event is raised.\n\n### Methods\n\n* `abort` Stop the search.\n\n### Options\n\nAll the options that can be passed to Minimatch can also be passed to\nGlob to change pattern matching behavior. Also, some have been added,\nor have glob-specific ramifications.\n\nAll options are false by default, unless otherwise noted.\n\nAll options are added to the glob object, as well.\n\n* `cwd` The current working directory in which to search. Defaults\n to `process.cwd()`.\n* `root` The place where patterns starting with `/` will be mounted\n onto. Defaults to `path.resolve(options.cwd, \"/\")` (`/` on Unix\n systems, and `C:\\` or some such on Windows.)\n* `dot` Include `.dot` files in normal matches and `globstar` matches.\n Note that an explicit dot in a portion of the pattern will always\n match dot files.\n* `nomount` By default, a pattern starting with a forward-slash will be\n \"mounted\" onto the root setting, so that a valid filesystem path is\n returned. Set this flag to disable that behavior.\n* `mark` Add a `/` character to directory matches. Note that this\n requires additional stat calls.\n* `nosort` Don't sort the results.\n* `stat` Set to true to stat *all* results. This reduces performance\n somewhat, and is completely unnecessary, unless `readdir` is presumed\n to be an untrustworthy indicator of file existence. It will cause\n ELOOP to be triggered one level sooner in the case of cyclical\n symbolic links.\n* `silent` When an unusual error is encountered\n when attempting to read a directory, a warning will be printed to\n stderr. Set the `silent` option to true to suppress these warnings.\n* `strict` When an unusual error is encountered\n when attempting to read a directory, the process will just continue on\n in search of other matches. Set the `strict` option to raise an error\n in these cases.\n* `cache` See `cache` property above. Pass in a previously generated\n cache object to save some fs calls.\n* `statCache` A cache of results of filesystem information, to prevent\n unnecessary stat calls. While it should not normally be necessary to\n set this, you may pass the statCache from one glob() call to the\n options object of another, if you know that the filesystem will not\n change between calls. (See \"Race Conditions\" below.)\n* `sync` Perform a synchronous glob search.\n* `nounique` In some cases, brace-expanded patterns can result in the\n same file showing up multiple times in the result set. By default,\n this implementation prevents duplicates in the result set.\n Set this flag to disable that behavior.\n* `nonull` Set to never return an empty set, instead returning a set\n containing the pattern itself. This is the default in glob(3).\n* `nocase` Perform a case-insensitive match. Note that case-insensitive\n filesystems will sometimes result in glob returning results that are\n case-insensitively matched anyway, since readdir and stat will not\n raise an error.\n* `debug` Set to enable debug logging in minimatch and glob.\n* `globDebug` Set to enable debug logging in glob, but not minimatch.\n\n## Comparisons to other fnmatch/glob implementations\n\nWhile strict compliance with the existing standards is a worthwhile\ngoal, some discrepancies exist between node-glob and other\nimplementations, and are intentional.\n\nIf the pattern starts with a `!` character, then it is negated. Set the\n`nonegate` flag to suppress this behavior, and treat leading `!`\ncharacters normally. This is perhaps relevant if you wish to start the\npattern with a negative extglob pattern like `!(a|B)`. Multiple `!`\ncharacters at the start of a pattern will negate the pattern multiple\ntimes.\n\nIf a pattern starts with `#`, then it is treated as a comment, and\nwill not match anything. Use `\\#` to match a literal `#` at the\nstart of a line, or set the `nocomment` flag to suppress this behavior.\n\nThe double-star character `**` is supported by default, unless the\n`noglobstar` flag is set. This is supported in the manner of bsdglob\nand bash 4.1, where `**` only has special significance if it is the only\nthing in a path part. That is, `a/**/b` will match `a/x/y/b`, but\n`a/**b` will not.\n\nIf an escaped pattern has no matches, and the `nonull` flag is set,\nthen glob returns the pattern as-provided, rather than\ninterpreting the character escapes. For example,\n`glob.match([], \"\\\\*a\\\\?\")` will return `\"\\\\*a\\\\?\"` rather than\n`\"*a?\"`. This is akin to setting the `nullglob` option in bash, except\nthat it does not resolve escaped pattern characters.\n\nIf brace expansion is not disabled, then it is performed before any\nother interpretation of the glob pattern. Thus, a pattern like\n`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded\n**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are\nchecked for validity. Since those two are valid, matching proceeds.\n\n## Windows\n\n**Please only use forward-slashes in glob expressions.**\n\nThough windows uses either `/` or `\\` as its path separator, only `/`\ncharacters are used by this glob implementation. You must use\nforward-slashes **only** in glob expressions. Back-slashes will always\nbe interpreted as escape characters, not path separators.\n\nResults from absolute patterns such as `/foo/*` are mounted onto the\nroot setting using `path.join`. On windows, this will by default result\nin `/foo/*` matching `C:\\foo\\bar.txt`.\n\n## Race Conditions\n\nGlob searching, by its very nature, is susceptible to race conditions,\nsince it relies on directory walking and such.\n\nAs a result, it is possible that a file that exists when glob looks for\nit may have been deleted or modified by the time it returns the result.\n\nAs part of its internal implementation, this program caches all stat\nand readdir calls that it makes, in order to cut down on system\noverhead. However, this also makes it even more susceptible to races,\nespecially if the cache or statCache objects are reused between glob\ncalls.\n\nUsers are thus advised not to use a glob result as a guarantee of\nfilesystem state in the face of rapid changes. For the vast majority\nof operations, this is never a problem.\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/isaacs/node-glob/issues" + }, + "_id": "glob@3.2.3", + "_from": "glob@3.2.3" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/History.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/History.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/History.md 2012-04-08 18:02:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/History.md 2012-12-30 17:28:12.000000000 +0000 @@ -1,4 +1,19 @@ +1.7.0 / 2012-12-30 +================== + + * support transient notifications in Gnome + +1.6.1 / 2012-09-25 +================== + + * restore compatibility with node < 0.8 [fgnass] + +1.6.0 / 2012-09-06 +================== + + * add notification center support [drudge] + 1.5.1 / 2012-04-08 ================== diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/Readme.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/Readme.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/Readme.md 2012-02-08 15:53:29.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/Readme.md 2012-12-15 22:54:17.000000000 +0000 @@ -1,11 +1,17 @@ # Growl for nodejs -Growl support for Nodejs. This is essentially a port of my [Ruby Growl Library](http://github.com/visionmedia/growl). Ubuntu/Linux support added thanks to [@niftylettuce](http://github.com/niftylettuce). You'll need [growlnotify(1)](http://growl.info/extras.php#growlnotify). +Growl support for Nodejs. This is essentially a port of my [Ruby Growl Library](http://github.com/visionmedia/growl). Ubuntu/Linux support added thanks to [@niftylettuce](http://github.com/niftylettuce). ## Installation +### Install + ### Mac OS X (Darwin): + Install [growlnotify(1)](http://growl.info/extras.php#growlnotify). On OS X 10.8, Notification Center is supported using [terminal-notifier](https://github.com/alloy/terminal-notifier). To install: + + $ sudo gem install terminal-notifier + Install [npm](http://npmjs.org/) and run: $ npm install growl diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/lib/growl.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/lib/growl.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/lib/growl.js 2012-04-08 18:02:19.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/lib/growl.js 2012-12-15 22:54:17.000000000 +0000 @@ -5,33 +5,60 @@ */ var exec = require('child_process').exec + , fs = require('fs') , path = require('path') + , exists = fs.existsSync || path.existsSync , os = require('os') + , quote = JSON.stringify , cmd; +function which(name) { + var paths = process.env.PATH.split(':'); + var loc; + + for (var i = 0, len = paths.length; i < len; ++i) { + loc = path.join(paths[i], name); + if (exists(loc)) return loc; + } +} + switch(os.type()) { case 'Darwin': - cmd = { - type: "Darwin" - , pkg: "growlnotify" - , msg: '-m' - , sticky: '--sticky' - , priority: { - cmd: '--priority' - , range: [ - -2 - , -1 - , 0 - , 1 - , 2 - , "Very Low" - , "Moderate" - , "Normal" - , "High" - , "Emergency" - ] - } - }; + if (which('terminal-notifier')) { + cmd = { + type: "Darwin-NotificationCenter" + , pkg: "terminal-notifier" + , msg: '-message' + , title: '-title' + , subtitle: '-subtitle' + , priority: { + cmd: '-execute' + , range: [] + } + }; + } else { + cmd = { + type: "Darwin-Growl" + , pkg: "growlnotify" + , msg: '-m' + , sticky: '--sticky' + , priority: { + cmd: '--priority' + , range: [ + -2 + , -1 + , 0 + , 1 + , 2 + , "Very Low" + , "Moderate" + , "Normal" + , "High" + , "Emergency" + ] + } + }; + } break; case 'Linux': cmd = { @@ -127,7 +154,7 @@ // image if (image = options.image) { switch(cmd.type) { - case 'Darwin': + case 'Darwin-Growl': var flag, ext = path.extname(image).substr(1) flag = flag || ext == 'icns' && 'iconpath' flag = flag || /^[A-Z]/.test(image) && 'appIcon' @@ -138,9 +165,11 @@ break; case 'Linux': args.push(cmd.icon + " " + image); + // libnotify defaults to sticky, set a hint for transient notifications + if (!options.sticky) args.push('--hint=int:transient:1'); break; case 'Windows': - args.push(cmd.icon + '"' + image.replace(/\\/g, "\\\\") + '"'); + args.push(cmd.icon + quote(image)); break; } } @@ -158,28 +187,45 @@ } // name - if (options.name && cmd.type === "Darwin") { + if (options.name && cmd.type === "Darwin-Growl") { args.push('--name', options.name); } switch(cmd.type) { - case 'Darwin': + case 'Darwin-Growl': + args.push(cmd.msg); + args.push(quote(msg)); + if (options.title) args.push(quote(options.title)); + break; + case 'Darwin-NotificationCenter': + args.push(cmd.msg); + args.push(quote(msg)); + if (options.title) { + args.push(cmd.title); + args.push(quote(options.title)); + } + if (options.subtitle) { + args.push(cmd.subtitle); + args.push(quote(options.title)); + } + break; + case 'Darwin-Growl': args.push(cmd.msg); - args.push('"' + msg + '"'); - if (options.title) args.push(options.title); + args.push(quote(msg)); + if (options.title) args.push(quote(options.title)); break; case 'Linux': if (options.title) { - args.push("'" + options.title + "'"); + args.push(quote(options.title)); args.push(cmd.msg); - args.push("'" + msg + "'"); + args.push(quote(msg)); } else { - args.push("'" + msg + "'"); + args.push(quote(msg)); } break; case 'Windows': - args.push('"' + msg + '"'); - if (options.title) args.push(cmd.title + '"' + options.title + '"'); + args.push(quote(msg)); + if (options.title) args.push(cmd.title + quote(options.title)); break; } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/package.json 2012-10-11 02:23:14.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -1,13 +1,14 @@ { "name": "growl", - "version": "1.5.1", + "version": "1.7.0", "description": "Growl unobtrusive notifications", "author": { "name": "TJ Holowaychuk", "email": "tj@vision-media.ca" }, "main": "./lib/growl.js", - "readme": "# Growl for nodejs\n\nGrowl support for Nodejs. This is essentially a port of my [Ruby Growl Library](http://github.com/visionmedia/growl). Ubuntu/Linux support added thanks to [@niftylettuce](http://github.com/niftylettuce). You'll need [growlnotify(1)](http://growl.info/extras.php#growlnotify).\n\n## Installation\n\n### Mac OS X (Darwin):\n\n Install [npm](http://npmjs.org/) and run:\n \n $ npm install growl\n\n### Ubuntu (Linux):\n\n Install `notify-send` through the [libnotify-bin](http://packages.ubuntu.com/libnotify-bin) package:\n\n $ sudo apt-get install libnotify-bin\n\n Install [npm](http://npmjs.org/) and run:\n \n $ npm install growl\n\n### Windows:\n\n Download and install [Growl for Windows](http://www.growlforwindows.com/gfw/default.aspx)\n\n Download [growlnotify](http://www.growlforwindows.com/gfw/help/growlnotify.aspx) - **IMPORTANT :** Unpack growlnotify to a folder that is present in your path!\n\n Install [npm](http://npmjs.org/) and run:\n \n $ npm install growl\n\n## Examples\n\nCallback functions are optional\n\n var growl = require('growl')\n growl('You have mail!')\n growl('5 new messages', { sticky: true })\n growl('5 new emails', { title: 'Email Client', image: 'Safari', sticky: true })\n growl('Message with title', { title: 'Title'})\n growl('Set priority', { priority: 2 })\n growl('Show Safari icon', { image: 'Safari' })\n growl('Show icon', { image: 'path/to/icon.icns' })\n growl('Show image', { image: 'path/to/my.image.png' })\n growl('Show png filesystem icon', { image: 'png' })\n growl('Show pdf filesystem icon', { image: 'article.pdf' })\n growl('Show pdf filesystem icon', { image: 'article.pdf' }, function(err){\n // ... notified\n })\n\n## Options\n\n - title\n - notification title\n - name\n - application name\n - priority\n - priority for the notification (default is 0)\n - sticky\n - weither or not the notification should remainin until closed\n - image\n - Auto-detects the context:\n - path to an icon sets --iconpath\n - path to an image sets --image\n - capitalized word sets --appIcon\n - filename uses extname as --icon\n - otherwise treated as --icon\n \n## License \n\n(The MIT License)\n\nCopyright (c) 2009 TJ Holowaychuk \n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "_id": "growl@1.5.1", - "_from": "growl@1.5.x" + "readme": "# Growl for nodejs\n\nGrowl support for Nodejs. This is essentially a port of my [Ruby Growl Library](http://github.com/visionmedia/growl). Ubuntu/Linux support added thanks to [@niftylettuce](http://github.com/niftylettuce). \n\n## Installation\n\n### Install \n\n### Mac OS X (Darwin):\n\n Install [growlnotify(1)](http://growl.info/extras.php#growlnotify). On OS X 10.8, Notification Center is supported using [terminal-notifier](https://github.com/alloy/terminal-notifier). To install:\n \n $ sudo gem install terminal-notifier\n \n Install [npm](http://npmjs.org/) and run:\n \n $ npm install growl\n\n### Ubuntu (Linux):\n\n Install `notify-send` through the [libnotify-bin](http://packages.ubuntu.com/libnotify-bin) package:\n\n $ sudo apt-get install libnotify-bin\n\n Install [npm](http://npmjs.org/) and run:\n \n $ npm install growl\n\n### Windows:\n\n Download and install [Growl for Windows](http://www.growlforwindows.com/gfw/default.aspx)\n\n Download [growlnotify](http://www.growlforwindows.com/gfw/help/growlnotify.aspx) - **IMPORTANT :** Unpack growlnotify to a folder that is present in your path!\n\n Install [npm](http://npmjs.org/) and run:\n \n $ npm install growl\n\n## Examples\n\nCallback functions are optional\n\n var growl = require('growl')\n growl('You have mail!')\n growl('5 new messages', { sticky: true })\n growl('5 new emails', { title: 'Email Client', image: 'Safari', sticky: true })\n growl('Message with title', { title: 'Title'})\n growl('Set priority', { priority: 2 })\n growl('Show Safari icon', { image: 'Safari' })\n growl('Show icon', { image: 'path/to/icon.icns' })\n growl('Show image', { image: 'path/to/my.image.png' })\n growl('Show png filesystem icon', { image: 'png' })\n growl('Show pdf filesystem icon', { image: 'article.pdf' })\n growl('Show pdf filesystem icon', { image: 'article.pdf' }, function(err){\n // ... notified\n })\n\n## Options\n\n - title\n - notification title\n - name\n - application name\n - priority\n - priority for the notification (default is 0)\n - sticky\n - weither or not the notification should remainin until closed\n - image\n - Auto-detects the context:\n - path to an icon sets --iconpath\n - path to an image sets --image\n - capitalized word sets --appIcon\n - filename uses extname as --icon\n - otherwise treated as --icon\n \n## License \n\n(The MIT License)\n\nCopyright (c) 2009 TJ Holowaychuk \n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", + "readmeFilename": "Readme.md", + "_id": "growl@1.7.0", + "_from": "growl@1.7.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/test.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/test.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/growl/test.js 2011-12-17 16:34:37.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/growl/test.js 2012-12-15 22:54:17.000000000 +0000 @@ -14,3 +14,7 @@ growl('Show pdf filesystem icon', { image: 'article.pdf' }, function(){ console.log('callback'); }) +growl('Show pdf filesystem icon', { title: 'Use show()', image: 'article.pdf' }) +growl('here \' are \n some \\ characters that " need escaping', {}, function(error, stdout, stderr) { + if (error !== null) throw new Error('escaping failed:\n' + stdout + stderr); +}) diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/jade/node_modules/mkdirp/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/jade/node_modules/mkdirp/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/jade/node_modules/mkdirp/package.json 2012-10-11 02:23:16.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/jade/node_modules/mkdirp/package.json 2013-10-25 23:13:15.000000000 +0000 @@ -27,6 +27,10 @@ "node": "*" }, "readme": "mkdirp\n======\n\nLike `mkdir -p`, but in node.js!\n\nexample\n=======\n\npow.js\n------\n var mkdirp = require('mkdirp');\n \n mkdirp('/tmp/foo/bar/baz', function (err) {\n if (err) console.error(err)\n else console.log('pow!')\n });\n\nOutput\n pow!\n\nAnd now /tmp/foo/bar/baz exists, huzzah!\n\nmethods\n=======\n\nvar mkdirp = require('mkdirp');\n\nmkdirp(dir, mode, cb)\n---------------------\n\nCreate a new directory and any necessary subdirectories at `dir` with octal\npermission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\nmkdirp.sync(dir, mode)\n----------------------\n\nSynchronously create a new directory and any necessary subdirectories at `dir`\nwith octal permission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\ninstall\n=======\n\nWith [npm](http://npmjs.org) do:\n\n npm install mkdirp\n\nlicense\n=======\n\nMIT/X11\n", + "readmeFilename": "README.markdown", + "bugs": { + "url": "https://github.com/substack/node-mkdirp/issues" + }, "_id": "mkdirp@0.3.0", "_from": "mkdirp@0.3.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/jade/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/jade/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/jade/package.json 2012-10-11 02:23:15.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/jade/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -38,7 +38,10 @@ "scripts": { "prepublish": "npm prune" }, + "bugs": { + "url": "https://github.com/visionmedia/jade/issues" + }, + "readme": "ERROR: No README data found!", "_id": "jade@0.26.3", - "readme": "ERROR: No README.md file found!", "_from": "jade@0.26.3" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/.npmignore tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/.npmignore --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/.npmignore 2011-09-10 05:36:52.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -node_modules/ -npm-debug.log \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/.travis.yml tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/.travis.yml --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/.travis.yml 2012-03-31 19:50:45.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/.travis.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -language: node_js -node_js: - - 0.4 - - 0.6 diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/LICENSE tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/LICENSE --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/LICENSE 2011-04-16 23:09:28.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -Copyright 2010 James Halliday (mail@substack.net) - -This project is free software released under the MIT/X11 license: - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/README.markdown tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/README.markdown --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/README.markdown 2012-03-31 19:50:45.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/README.markdown 1970-01-01 00:00:00.000000000 +0000 @@ -1,61 +0,0 @@ -mkdirp -====== - -Like `mkdir -p`, but in node.js! - -[![build status](https://secure.travis-ci.org/substack/node-mkdirp.png)](http://travis-ci.org/substack/node-mkdirp) - -example -======= - -pow.js ------- - var mkdirp = require('mkdirp'); - - mkdirp('/tmp/foo/bar/baz', function (err) { - if (err) console.error(err) - else console.log('pow!') - }); - -Output - pow! - -And now /tmp/foo/bar/baz exists, huzzah! - -methods -======= - -var mkdirp = require('mkdirp'); - -mkdirp(dir, mode, cb) ---------------------- - -Create a new directory and any necessary subdirectories at `dir` with octal -permission string `mode`. - -If `mode` isn't specified, it defaults to `0777 & (~process.umask())`. - -`cb(err, made)` fires with the error or the first directory `made` -that had to be created, if any. - -mkdirp.sync(dir, mode) ----------------------- - -Synchronously create a new directory and any necessary subdirectories at `dir` -with octal permission string `mode`. - -If `mode` isn't specified, it defaults to `0777 & (~process.umask())`. - -Returns the first directory that had to be created, if any. - -install -======= - -With [npm](http://npmjs.org) do: - - npm install mkdirp - -license -======= - -MIT/X11 diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/examples/pow.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/examples/pow.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/examples/pow.js 2012-03-31 07:42:54.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/examples/pow.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -var mkdirp = require('mkdirp'); - -mkdirp('/tmp/foo/bar/baz', function (err) { - if (err) console.error(err) - else console.log('pow!') -}); diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/index.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/index.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/index.js 2012-06-05 15:53:46.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/index.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,94 +0,0 @@ -var path = require('path'); -var fs = require('fs'); - -module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; - -function mkdirP (p, mode, f, made) { - if (typeof mode === 'function' || mode === undefined) { - f = mode; - mode = 0777 & (~process.umask()); - } - if (!made) made = null; - - var cb = f || function () {}; - if (typeof mode === 'string') mode = parseInt(mode, 8); - p = path.resolve(p); - - fs.mkdir(p, mode, function (er) { - if (!er) { - made = made || p; - return cb(null, made); - } - switch (er.code) { - case 'ENOENT': - mkdirP(path.dirname(p), mode, function (er, made) { - if (er) cb(er, made); - else mkdirP(p, mode, cb, made); - }); - break; - - case 'EISDIR': - case 'EPERM': - // Operation not permitted or already is a dir. - // This is the error you get when trying to mkdir('c:/') - // on windows, or mkdir('/') on unix. Make sure it's a - // dir by falling through to the EEXIST case. - case 'EROFS': - // a read-only file system. - // However, the dir could already exist, in which case - // the EROFS error will be obscuring a EEXIST! - // Fallthrough to that case. - case 'EEXIST': - fs.stat(p, function (er2, stat) { - // if the stat fails, then that's super weird. - // let the original error be the failure reason. - if (er2 || !stat.isDirectory()) cb(er, made) - else cb(null, made); - }); - break; - - default: - cb(er, made); - break; - } - }); -} - -mkdirP.sync = function sync (p, mode, made) { - if (mode === undefined) { - mode = 0777 & (~process.umask()); - } - if (!made) made = null; - - if (typeof mode === 'string') mode = parseInt(mode, 8); - p = path.resolve(p); - - try { - fs.mkdirSync(p, mode); - made = made || p; - } - catch (err0) { - switch (err0.code) { - case 'ENOENT' : - made = sync(path.dirname(p), mode, made); - sync(p, mode, made); - break; - - case 'EEXIST' : - var stat; - try { - stat = fs.statSync(p); - } - catch (err1) { - throw err0; - } - if (!stat.isDirectory()) throw err0; - break; - default : - throw err0 - break; - } - } - - return made; -}; diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/mkdirp/package.json 2012-10-11 02:23:14.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/mkdirp/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -{ - "name": "mkdirp", - "description": "Recursively mkdir, like `mkdir -p`", - "version": "0.3.3", - "author": { - "name": "James Halliday", - "email": "mail@substack.net", - "url": "http://substack.net" - }, - "main": "./index", - "keywords": [ - "mkdir", - "directory" - ], - "repository": { - "type": "git", - "url": "http://github.com/substack/node-mkdirp.git" - }, - "scripts": { - "test": "tap test/*.js" - }, - "devDependencies": { - "tap": "~0.2.4" - }, - "license": "MIT/X11", - "engines": { - "node": "*" - }, - "readme": "mkdirp\n======\n\nLike `mkdir -p`, but in node.js!\n\n[![build status](https://secure.travis-ci.org/substack/node-mkdirp.png)](http://travis-ci.org/substack/node-mkdirp)\n\nexample\n=======\n\npow.js\n------\n var mkdirp = require('mkdirp');\n \n mkdirp('/tmp/foo/bar/baz', function (err) {\n if (err) console.error(err)\n else console.log('pow!')\n });\n\nOutput\n pow!\n\nAnd now /tmp/foo/bar/baz exists, huzzah!\n\nmethods\n=======\n\nvar mkdirp = require('mkdirp');\n\nmkdirp(dir, mode, cb)\n---------------------\n\nCreate a new directory and any necessary subdirectories at `dir` with octal\npermission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\n`cb(err, made)` fires with the error or the first directory `made`\nthat had to be created, if any.\n\nmkdirp.sync(dir, mode)\n----------------------\n\nSynchronously create a new directory and any necessary subdirectories at `dir`\nwith octal permission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\nReturns the first directory that had to be created, if any.\n\ninstall\n=======\n\nWith [npm](http://npmjs.org) do:\n\n npm install mkdirp\n\nlicense\n=======\n\nMIT/X11\n", - "_id": "mkdirp@0.3.3", - "_from": "mkdirp@0.3.3" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/.npmignore tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/.npmignore --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/.npmignore 2012-03-08 01:57:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -node_modules diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/History.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/History.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/History.md 2012-09-07 20:36:23.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/History.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ - -0.3.0 / 2012-09-07 -================== - - * fix `ms()` [visionmedia] - -0.2.0 / 2012-09-03 -================== - - * add component.json [visionmedia] - * add days support [visionmedia] - * add hours support [visionmedia] - * add minutes support [visionmedia] - * add seconds support [visionmedia] - * add ms string support [visionmedia] - * refactor tests to facilitate ms(number) [visionmedia] - -0.1.0 / 2012-03-07 -================== - - * Initial release diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/Makefile tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/Makefile --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/Makefile 2012-09-03 19:53:27.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/Makefile 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ - -test: - @./node_modules/.bin/mocha test/test.js - -test-browser: - ./node_modules/.bin/serve test/ - -.PHONY: test diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/README.md tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/README.md --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/README.md 2012-09-03 19:53:27.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -# ms.js: miliseconds conversion utility - -```js -ms('1d') // 86400000 -ms('10h') // 36000000 -ms('2h') // 7200000 -ms('1m') // 60000 -ms('5s') // 5000 -ms('100') // 100 -ms(100) // 100 -``` - -```js -ms(60000) // "1 minute" -ms(2 * 60000) // "2 minutes" -ms(ms('10 hours')) // "10 hours" -``` - -- Node/Browser compatible. Published as `ms` in NPM. -- If a number is supplied to `ms`, it returns it immediately. -- If a string that contains the number is supplied, it returns it as -a number (e.g: it returns `100` for `'100'`). -- If you pass a string with a number and a valid unit, the number of -equivalent ms is returned. diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/component.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/component.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/component.json 2012-09-03 19:53:27.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/component.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -{ - "name": "ms", - "repo": "guille/ms.js", - "description": "ms parsing / formatting", - "keywords": ["ms", "parse", "format"], - "main": "ms.js" -} \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/ms.js tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/ms.js --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/ms.js 2012-09-07 20:35:55.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/ms.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,81 +0,0 @@ - -/** - * Helpers. - */ - -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; - -/** - * Parse or format the given `val`. - * - * @param {String|Number} val - * @return {String|Number} - * @api public - */ - -module.exports = function(val){ - if ('string' == typeof val) return parse(val); - return format(val); -} - -/** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private - */ - -function parse(str) { - var m = /^((?:\d+)?\.?\d+) *(ms|seconds?|s|minutes?|m|hours?|h|days?|d|years?|y)?$/i.exec(str); - if (!m) return; - var n = parseFloat(m[1]); - var type = (m[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'y': - return n * 31557600000; - case 'days': - case 'day': - case 'd': - return n * 86400000; - case 'hours': - case 'hour': - case 'h': - return n * 3600000; - case 'minutes': - case 'minute': - case 'm': - return n * 60000; - case 'seconds': - case 'second': - case 's': - return n * 1000; - case 'ms': - return n; - } -} - -/** - * Format the given `ms`. - * - * @param {Number} ms - * @return {String} - * @api public - */ - -function format(ms) { - if (ms == d) return (ms / d) + ' day'; - if (ms > d) return (ms / d) + ' days'; - if (ms == h) return (ms / h) + ' hour'; - if (ms > h) return (ms / h) + ' hours'; - if (ms == m) return (ms / m) + ' minute'; - if (ms > m) return (ms / m) + ' minutes'; - if (ms == s) return (ms / s) + ' second'; - if (ms > s) return (ms / s) + ' seconds'; - return ms + ' ms'; -} \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/package.json tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/node_modules/ms/package.json 2012-10-11 02:23:14.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/node_modules/ms/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -{ - "name": "ms", - "version": "0.3.0", - "description": "Tiny ms conversion utility", - "main": "./ms", - "devDependencies": { - "mocha": "*", - "expect.js": "*", - "serve": "*" - }, - "readme": "# ms.js: miliseconds conversion utility\n\n```js\nms('1d') // 86400000\nms('10h') // 36000000\nms('2h') // 7200000\nms('1m') // 60000\nms('5s') // 5000\nms('100') // 100\nms(100) // 100\n```\n\n```js\nms(60000) // \"1 minute\"\nms(2 * 60000) // \"2 minutes\"\nms(ms('10 hours')) // \"10 hours\"\n```\n\n- Node/Browser compatible. Published as `ms` in NPM.\n- If a number is supplied to `ms`, it returns it immediately.\n- If a string that contains the number is supplied, it returns it as\na number (e.g: it returns `100` for `'100'`).\n- If you pass a string with a number and a valid unit, the number of\nequivalent ms is returned.\n", - "_id": "ms@0.3.0", - "_from": "ms@0.3.0" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/package.json tilemill-0.10.1~saucy10/node_modules/mocha/package.json --- tilemill-0.10.1~saucy9/node_modules/mocha/package.json 2012-10-11 02:23:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/package.json 2013-10-25 23:13:11.000000000 +0000 @@ -1,8 +1,9 @@ { "name": "mocha", - "version": "1.6.0", + "version": "1.13.0", "description": "simple, flexible, fun test framework", "keywords": [ + "mocha", "test", "bdd", "tdd", @@ -29,18 +30,30 @@ }, "dependencies": { "commander": "0.6.1", - "growl": "1.5.x", + "growl": "1.7.x", "jade": "0.26.3", - "diff": "1.0.2", + "diff": "1.0.7", "debug": "*", - "mkdirp": "0.3.3", - "ms": "0.3.0" + "mkdirp": "0.3.5", + "glob": "3.2.3" }, "devDependencies": { "should": "*", "coffee-script": "1.2" }, - "readme": " [![Build Status](https://secure.travis-ci.org/visionmedia/mocha.png)](http://travis-ci.org/visionmedia/mocha)\n\n [![Mocha test framework](http://f.cl.ly/items/3l1k0n2A1U3M1I1L210p/Screen%20Shot%202012-02-24%20at%202.21.43%20PM.png)](http://visionmedia.github.com/mocha)\n\n Mocha is a simple, flexible, fun JavaScript test framework for node.js and the browser. For more information view the [documentation](http://visionmedia.github.com/mocha).\n\n## Contributors\n\n```\nproject: mocha\ncommits: 502\nfiles : 86\nauthors: \n 352\tTj Holowaychuk 70.1%\n 98\tTJ Holowaychuk 19.5%\n 21\tGuillermo Rauch 4.2%\n 6\tJames Carr 1.2%\n 4\tJoshua Krall 0.8%\n 3\tBen Lindsey 0.6%\n 3\tNathan Rajlich 0.6%\n 2\tFARKAS Máté 0.4%\n 2\tQuang Van 0.4%\n 1\tSteve Mason 0.2%\n 1\tYuest Wang 0.2%\n 1\thokaccha 0.2%\n 1\tDavid Henderson 0.2%\n 1\tFedor Indutny 0.2%\n 1\tFredrik Lindin 0.2%\n 1\tHarry Brundage 0.2%\n 1\tKonstantin Käfer 0.2%\n 1\tMaciej Małecki 0.2%\n 1\tRaynos 0.2%\n 1\tRyunosuke SATO 0.2%\n```\n\n## Links\n\n - [Chai](https://github.com/chaijs/chai) - BDD, TDD, and assert for node & the browser\n - [Should.js](http://github.com/visionmedia/should.js) - BDD style assertions for node\n - [Expect.js](https://github.com/LearnBoost/expect.js) - BDD style assertions for node & the browser\n - [Google Group](http://groups.google.com/group/mochajs)", - "_id": "mocha@1.6.0", + "files": [ + "bin", + "images", + "lib", + "index.js", + "mocha.css", + "mocha.js" + ], + "readme": " [![Build Status](https://secure.travis-ci.org/visionmedia/mocha.png)](http://travis-ci.org/visionmedia/mocha)\n\n [![Mocha test framework](http://f.cl.ly/items/3l1k0n2A1U3M1I1L210p/Screen%20Shot%202012-02-24%20at%202.21.43%20PM.png)](http://visionmedia.github.io/mocha)\n\n Mocha is a simple, flexible, fun JavaScript test framework for node.js and the browser. For more information view the [documentation](http://visionmedia.github.io/mocha).\n\n## Contributors\n\n```\n\n project : mocha\n repo age : 1 year, 7 months\n active : 272 days\n commits : 1116\n files : 123\n authors :\n 504 TJ Holowaychuk 45.2%\n 389 Tj Holowaychuk 34.9%\n 31 Guillermo Rauch 2.8%\n 13 Attila Domokos 1.2%\n 9 John Firebaugh 0.8%\n 8 Jo Liss 0.7%\n 7 Nathan Rajlich 0.6%\n 6 James Carr 0.5%\n 6 Brendan Nee 0.5%\n 5 Aaron Heckmann 0.4%\n 4 hokaccha 0.4%\n 4 Xavier Antoviaque 0.4%\n 4 Joshua Krall 0.4%\n 3 Wil Moore III 0.3%\n 3 Jesse Dailey 0.3%\n 3 Nathan Bowser 0.3%\n 3 Tyson Tate 0.3%\n 3 Cory Thomas 0.3%\n 3 Ryunosuke SATO 0.3%\n 3 Paul Miller 0.3%\n 3 Ben Lindsey 0.3%\n 2 Forbes Lindesay 0.2%\n 2 Konstantin Käfer 0.2%\n 2 Brian Beck 0.2%\n 2 Merrick Christensen 0.2%\n 2 Michael Riley 0.2%\n 2 David Henderson 0.2%\n 2 Nathan Alderson 0.2%\n 2 Paul Armstrong 0.2%\n 2 Pete Hawkins 0.2%\n 2 Quang Van 0.2%\n 2 Raynos 0.2%\n 2 Jonas Westerlund 0.2%\n 2 Domenic Denicola 0.2%\n 2 Shawn Krisman 0.2%\n 2 Simon Gaeremynck 0.2%\n 2 FARKAS Máté 0.2%\n 2 Timo Tijhof 0.2%\n 2 Justin DuJardin 0.2%\n 2 Juzer Ali 0.2%\n 2 Ian Storm Taylor 0.2%\n 2 Arian Stolwijk 0.2%\n 2 domenic 0.2%\n 1 Richard Dingwall 0.1%\n 1 Russ Bradberry 0.1%\n 1 Sasha Koss 0.1%\n 1 Seiya Konno 0.1%\n 1 Standa Opichal 0.1%\n 1 Steve Mason 0.1%\n 1 Will Langstroth 0.1%\n 1 Yanis Wang 0.1%\n 1 Yuest Wang 0.1%\n 1 abrkn 0.1%\n 1 airportyh 0.1%\n 1 fengmk2 0.1%\n 1 tgautier@yahoo.com 0.1%\n 1 traleig1 0.1%\n 1 vlad 0.1%\n 1 yuitest 0.1%\n 1 Adam Crabtree 0.1%\n 1 Andreas Brekken 0.1%\n 1 Atsuya Takagi 0.1%\n 1 Austin Birch 0.1%\n 1 Bjørge Næss 0.1%\n 1 Brian Moore 0.1%\n 1 Bryan Donovan 0.1%\n 1 Casey Foster 0.1%\n 1 Corey Butler 0.1%\n 1 Dave McKenna 0.1%\n 1 Fedor Indutny 0.1%\n 1 Florian Margaine 0.1%\n 1 Frederico Silva 0.1%\n 1 Fredrik Lindin 0.1%\n 1 Gareth Murphy 0.1%\n 1 Gavin Mogan 0.1%\n 1 Greg Perkins 0.1%\n 1 Harry Brundage 0.1%\n 1 Herman Junge 0.1%\n 1 Ian Young 0.1%\n 1 Ivan 0.1%\n 1 Jaakko Salonen 0.1%\n 1 Jakub Nešetřil 0.1%\n 1 James Bowes 0.1%\n 1 James Lal 0.1%\n 1 Jason Barry 0.1%\n 1 Javier Aranda 0.1%\n 1 Jeff Kunkle 0.1%\n 1 Jonathan Creamer 0.1%\n 1 Jussi Virtanen 0.1%\n 1 Katie Gengler 0.1%\n 1 Kazuhito Hokamura 0.1%\n 1 Koen Punt 0.1%\n 1 Laszlo Bacsi 0.1%\n 1 László Bácsi 0.1%\n 1 Maciej Małecki 0.1%\n 1 Matt Robenolt 0.1%\n 1 Matt Smith 0.1%\n 1 Matthew Shanley 0.1%\n 1 Michael Schoonmaker 0.1%\n 1 Phil Sung 0.1%\n 1 R56 0.1%\n```\n\n## Links\n\n - [Google Group](http://groups.google.com/group/mochajs)\n - [Wiki](https://github.com/visionmedia/mocha/wiki)\n - Mocha [Extensions and reporters](https://github.com/visionmedia/mocha/wiki)\n", + "readmeFilename": "Readme.md", + "bugs": { + "url": "https://github.com/visionmedia/mocha/issues" + }, + "_id": "mocha@1.13.0", "_from": "mocha@*" } diff -Nru tilemill-0.10.1~saucy9/node_modules/mocha/test.js tilemill-0.10.1~saucy10/node_modules/mocha/test.js --- tilemill-0.10.1~saucy9/node_modules/mocha/test.js 2012-09-19 16:26:51.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/mocha/test.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ - -describe('test', function(){ - it('should work with objects', function(){ - var a = { name: 'tobi', age: 2, species: 'ferret' }; - var b = { name: 'jane', age: 8, species: 'ferret' }; - a.should.eql(b); - }) - - it('should work with arrays', function(){ - var a = [1,2,{ name: 'tobi' },4,5] - var b = [1,2,{ name: 'jane' },4,4, 'extra stuff', 'more extra'] - a.should.eql(b); - }) - - it('should work with strings', function(){ - 'some\nfoo\nbar'.should.equal('some\nbar\nbaz'); - }) -}) \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/modestmaps/package.json tilemill-0.10.1~saucy10/node_modules/modestmaps/package.json --- tilemill-0.10.1~saucy9/node_modules/modestmaps/package.json 2012-10-11 02:23:11.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/modestmaps/package.json 2013-10-25 23:13:17.000000000 +0000 @@ -40,6 +40,18 @@ }, "directories": {}, "readme": "Modest Maps JS is a BSD-licensed display and interaction library for tile-based\nmaps in Javascript.\n\nOur intent is to provide a minimal, extensible, customizable, and free display\nlibrary for discriminating designers and developers who want to use interactive\nmaps in their own projects. Modest Maps provides a core set of features in a\ntight, clean package, with plenty of hooks for additional functionality.\n\n# [Documentation](https://github.com/modestmaps/modestmaps-js/wiki)\n\n## Building\n\nThis package includes a copy of [YUICompressor](http://developer.yahoo.com/yui/compressor/),\nwhich requires a version of Java on your system. To create a new build of\nModest Maps (only necessary for development), run `make` from the root\ndirectory.\n\n## Developing with npm:\n\nModest Maps includes a `package.json` file to guide usage of its code on the\nserver-side, and to handle certain dependencies.\n\nTo install developer dependencies - needed for documentation and tests -\nyou'll need [npm](http://npmjs.org/):\n\n npm install --dev\n\n## Tests\n\nTests require `expresso` to be installed by `npm`, as noted above. To run tests,\n\n make tests\n", + "readmeFilename": "README.md", + "repository": { + "type": "git", + "url": "git://github.com/modestmaps/modestmaps-js.git" + }, + "bugs": { + "url": "https://github.com/modestmaps/modestmaps-js/issues" + }, "_id": "modestmaps@3.3.5", - "_from": "modestmaps@3.3.5" + "dist": { + "shasum": "3fd3205bb2a012069831461f20497f54b7aeab9a" + }, + "_from": "modestmaps@3.3.5", + "_resolved": "https://registry.npmjs.org/modestmaps/-/modestmaps-3.3.5.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/.jshintrc tilemill-0.10.1~saucy10/node_modules/node-gyp/.jshintrc --- tilemill-0.10.1~saucy9/node_modules/node-gyp/.jshintrc 2013-06-24 21:20:26.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/.jshintrc 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -{ - "asi": true, - "laxcomma": true, - "es5": true, - "node": true, - "strict": false -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/.npmignore 2013-06-24 20:57:53.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -gyp/test diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/LICENSE 2012-06-29 20:31:36.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2012 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/README.md 2013-06-05 23:08:58.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,163 +0,0 @@ -node-gyp -========= -### Node.js native addon build tool - -`node-gyp` is a cross-platform command-line tool written in Node.js for compiling -native addon modules for Node.js, which takes away the pain of dealing with the -various differences in build platforms. It is the replacement to the `node-waf` -program which is removed for node `v0.8`. If you have a native addon for node that -still has a `wscript` file, then you should definitely add a `binding.gyp` file -to support the latest versions of node. - -Multiple target versions of node are supported (i.e. `0.8`, `0.9`, `0.10`, ..., `1.0`, -etc.), regardless of what version of node is actually installed on your system -(`node-gyp` downloads the necessary development files for the target version). - -#### Features: - - * Easy to use, consistent interface - * Same commands to build your module on every platform - * Supports multiple target versions of Node - - -Installation ------------- - -You can install with `npm`: - -``` bash -$ npm install -g node-gyp -``` - -You will also need to install: - - * On Unix: - * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) - * `make` - * A proper C/C++ compiler toolchain, like GCC - * On Windows: - * [Python][windows-python] ([`v2.7.3`][windows-python-v2.7.3] recommended, `v3.x.x` is __*not*__ supported) - * Windows XP/Vista/7: - * Microsoft Visual Studio C++ 2010 ([Express][msvc2010] version works well) - * For 64-bit builds of node and native modules you will _**also**_ need the [Windows 7 64-bit SDK][win7sdk] - * If the install fails, try uninstalling any C++ 2010 x64&x86 Redistributable that you have installed first. - * If you get errors that the 64-bit compilers are not installed you may also need the [compiler update for the Windows SDK 7.1] - * Windows 7/8: - * Microsoft Visual Studio C++ 2012 for Windows Desktop ([Express][msvc2012] version works well) - -Note that OS X is just a flavour of Unix and so needs `python`, `make`, and C/C++. -An easy way to obtain these is to install XCode from Apple, -and then use it to install the command line tools (under Preferences -> Downloads). - -How to Use ----------- - -To compile your native addon, first go to its root directory: - -``` bash -$ cd my_node_addon -``` - -The next step is to generate the appropriate project build files for the current -platform. Use `configure` for that: - -``` bash -$ node-gyp configure -``` - -__Note__: The `configure` step looks for the `binding.gyp` file in the current -directory to processs. See below for instructions on creating the `binding.gyp` file. - -Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file -(on Windows) in the `build/` directory. Next invoke the `build` command: - -``` bash -$ node-gyp build -``` - -Now you have your compiled `.node` bindings file! The compiled bindings end up -in `build/Debug/` or `build/Release/`, depending on the build mode. At this point -you can require the `.node` file with Node and run your tests! - -__Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or -`-d`) switch when running the either `configure` or `build` command. - - -The "binding.gyp" file ----------------------- - -Previously when node had `node-waf` you had to write a `wscript` file. The -replacement for that is the `binding.gyp` file, which describes the configuration -to build your module in a JSON-like format. This file gets placed in the root of -your package, alongside the `package.json` file. - -A barebones `gyp` file appropriate for building a node addon looks like: - -``` python -{ - "targets": [ - { - "target_name": "binding", - "sources": [ "src/binding.cc" ] - } - ] -} -``` - -Some additional resources for writing `gyp` files: - - * ["Hello World" node addon example](https://github.com/joyent/node/tree/master/test/addons/hello-world) - * [gyp user documentation](http://code.google.com/p/gyp/wiki/GypUserDocumentation) - * [gyp input format reference](http://code.google.com/p/gyp/wiki/InputFormatReference) - * [*"binding.gyp" files out in the wild* wiki page](https://github.com/TooTallNate/node-gyp/wiki/%22binding.gyp%22-files-out-in-the-wild) - - -Commands --------- - -`node-gyp` responds to the following commands: - -| **Command** | **Description** -|:--------------|:--------------------------------------------------------------- -| `build` | Invokes `make`/`msbuild.exe` and builds the native addon -| `clean` | Removes any the `build` dir if it exists -| `configure` | Generates project build files for the current platform -| `rebuild` | Runs "clean", "configure" and "build" all in a row -| `install` | Installs node development header files for the given version -| `list` | Lists the currently installed node development file versions -| `remove` | Removes the node development header files for the given version - - -License -------- - -(The MIT License) - -Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -[windows-python]: http://www.python.org/getit/windows -[windows-python-v2.7.3]: http://www.python.org/download/releases/2.7.3#download -[msvc2010]: http://go.microsoft.com/?linkid=9709949 -[msvc2012]: http://go.microsoft.com/?linkid=9816758 -[win7sdk]: http://www.microsoft.com/en-us/download/details.aspx?id=8279 -[compiler update for the Windows SDK 7.1]: http://www.microsoft.com/en-us/download/details.aspx?id=4422 diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/addon.gypi tilemill-0.10.1~saucy10/node_modules/node-gyp/addon.gypi --- tilemill-0.10.1~saucy9/node_modules/node-gyp/addon.gypi 2013-08-01 16:36:39.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/addon.gypi 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -{ - 'target_defaults': { - 'type': 'loadable_module', - 'product_prefix': '', - 'include_dirs': [ - '<(node_root_dir)/src', - '<(node_root_dir)/deps/uv/include', - '<(node_root_dir)/deps/v8/include' - ], - - 'target_conditions': [ - ['_type=="loadable_module"', { - 'product_extension': 'node', - 'defines': [ 'BUILDING_NODE_EXTENSION' ], - }], - ['_type=="static_library"', { - # set to `1` to *disable* the -T thin archive 'ld' flag. - # older linkers don't support this flag. - 'standalone_static_library': '<(standalone_static_library)' - }], - ], - - 'conditions': [ - [ 'OS=="mac"', { - 'defines': [ '_DARWIN_USE_64_BIT_INODE=1' ], - 'libraries': [ '-undefined dynamic_lookup' ], - 'xcode_settings': { - 'DYLIB_INSTALL_NAME_BASE': '@rpath' - }, - }], - [ 'OS=="win"', { - 'libraries': [ - '-lkernel32.lib', - '-luser32.lib', - '-lgdi32.lib', - '-lwinspool.lib', - '-lcomdlg32.lib', - '-ladvapi32.lib', - '-lshell32.lib', - '-lole32.lib', - '-loleaut32.lib', - '-luuid.lib', - '-lodbc32.lib', - '-lDelayImp.lib', - '-l<(node_root_dir)/$(Configuration)/node.lib' - ], - # warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent' - # needs to have dll-interface to be used by clients of class 'node::ObjectWrap' - 'msvs_disabled_warnings': [ 4251 ], - }, { - # OS!="win" - 'defines': [ '_LARGEFILE_SOURCE', '_FILE_OFFSET_BITS=64' ], - }], - [ 'OS=="freebsd" or OS=="openbsd" or OS=="solaris" or (OS=="linux" and target_arch!="ia32")', { - 'cflags': [ '-fPIC' ], - }] - ] - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/bin/node-gyp.js tilemill-0.10.1~saucy10/node_modules/node-gyp/bin/node-gyp.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/bin/node-gyp.js 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/bin/node-gyp.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,133 +0,0 @@ -#!/usr/bin/env node - -/** - * Set the title. - */ - -process.title = 'node-gyp' - -/** - * Module dependencies. - */ - -var gyp = require('../') -var log = require('npmlog') - -/** - * Process and execute the selected commands. - */ - -var prog = gyp() -var completed = false -prog.parseArgv(process.argv) - -if (prog.todo.length === 0) { - if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { - console.log('v%s', prog.version) - } else { - console.log('%s', prog.usage()) - } - return process.exit(0) -} - -log.info('it worked if it ends with', 'ok') -log.verbose('cli', process.argv) -log.info('using', 'node-gyp@%s', prog.version) -log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch) - - -/** - * Change dir if -C/--directory was passed. - */ - -var dir = prog.opts.directory -if (dir) { - var fs = require('fs') - try { - var stat = fs.statSync(dir) - if (stat.isDirectory()) { - log.info('chdir', dir) - process.chdir(dir) - } else { - log.warn('chdir', dir + ' is not a directory') - } - } catch (e) { - if (e.code === 'ENOENT') { - log.warn('chdir', dir + ' is not a directory') - } else { - log.warn('chdir', 'error during chdir() "%s"', e.message) - } - } -} - -function run () { - var command = prog.todo.shift() - if (!command) { - // done! - completed = true - log.info('ok') - return - } - - prog.commands[command.name](command.args, function (err) { - if (err) { - log.error(command.name + ' error') - log.error('stack', err.stack) - errorMessage() - log.error('not ok') - return process.exit(1) - } - if (command.name == 'list') { - var versions = arguments[1] - if (versions.length > 0) { - versions.forEach(function (version) { - console.log(version) - }) - } else { - console.log('No node development files installed. Use `node-gyp install` to install a version.') - } - } else if (arguments.length >= 2) { - console.log.apply(console, [].slice.call(arguments, 1)) - } - - // now run the next command in the queue - process.nextTick(run) - }) -} - -process.on('exit', function (code) { - if (!completed && !code) { - log.error('Completion callback never invoked!') - issueMessage() - process.exit(6) - } -}) - -process.on('uncaughtException', function (err) { - log.error('UNCAUGHT EXCEPTION') - log.error('stack', err.stack) - issueMessage() - process.exit(7) -}) - -function errorMessage () { - // copied from npm's lib/util/error-handler.js - var os = require('os') - log.error('System', os.type() + ' ' + os.release()) - log.error('command', process.argv - .map(JSON.stringify).join(' ')) - log.error('cwd', process.cwd()) - log.error('node -v', process.version) - log.error('node-gyp -v', 'v' + prog.package.version) -} - -function issueMessage () { - errorMessage() - log.error('', [ 'This is a bug in `node-gyp`.' - , 'Try to update node-gyp and file an Issue if it does not help:' - , ' ' - ].join('\n')) -} - -// start running the given commands! -run() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/.npmignore 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -*.pyc diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/AUTHORS tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/AUTHORS --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/AUTHORS 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/AUTHORS 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -# Names should be added to this file like so: -# Name or Organization - -Google Inc. -Bloomberg Finance L.P. - -Steven Knight -Ryan Norton diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/DEPS tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/DEPS --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/DEPS 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/DEPS 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -# DEPS file for gclient use in buildbot execution of gyp tests. -# -# (You don't need to use gclient for normal GYP development work.) - -vars = { - "chrome_trunk": "http://src.chromium.org/svn/trunk", - "googlecode_url": "http://%s.googlecode.com/svn", -} - -deps = { - "scons": - Var("chrome_trunk") + "/src/third_party/scons@44099", -} - -deps_os = { - "win": { - "third_party/cygwin": - Var("chrome_trunk") + "/deps/third_party/cygwin@66844", - - "third_party/python_26": - Var("chrome_trunk") + "/tools/third_party/python_26@89111", - - "src/third_party/pefile": - (Var("googlecode_url") % "pefile") + "/trunk@63", - }, -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/LICENSE 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Copyright (c) 2009 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/MANIFEST tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/MANIFEST --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/MANIFEST 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/MANIFEST 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -setup.py -gyp -LICENSE -AUTHORS -pylib/gyp/MSVSNew.py -pylib/gyp/MSVSProject.py -pylib/gyp/MSVSToolFile.py -pylib/gyp/MSVSUserFile.py -pylib/gyp/MSVSVersion.py -pylib/gyp/SCons.py -pylib/gyp/__init__.py -pylib/gyp/common.py -pylib/gyp/input.py -pylib/gyp/xcodeproj_file.py -pylib/gyp/generator/__init__.py -pylib/gyp/generator/gypd.py -pylib/gyp/generator/gypsh.py -pylib/gyp/generator/make.py -pylib/gyp/generator/msvs.py -pylib/gyp/generator/scons.py -pylib/gyp/generator/xcode.py diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/OWNERS tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/OWNERS --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/OWNERS 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/OWNERS 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -* diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/PRESUBMIT.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/PRESUBMIT.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/PRESUBMIT.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/PRESUBMIT.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,116 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -"""Top-level presubmit script for GYP. - -See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts -for more details about the presubmit API built into gcl. -""" - - -PYLINT_BLACKLIST = [ - # TODO: fix me. - # From SCons, not done in google style. - 'test/lib/TestCmd.py', - 'test/lib/TestCommon.py', - 'test/lib/TestGyp.py', - # Needs style fix. - 'pylib/gyp/generator/scons.py', - 'pylib/gyp/generator/xcode.py', -] - - -PYLINT_DISABLED_WARNINGS = [ - # TODO: fix me. - # Many tests include modules they don't use. - 'W0611', - # Include order doesn't properly include local files? - 'F0401', - # Some use of built-in names. - 'W0622', - # Some unused variables. - 'W0612', - # Operator not preceded/followed by space. - 'C0323', - 'C0322', - # Unnecessary semicolon. - 'W0301', - # Unused argument. - 'W0613', - # String has no effect (docstring in wrong place). - 'W0105', - # Comma not followed by space. - 'C0324', - # Access to a protected member. - 'W0212', - # Bad indent. - 'W0311', - # Line too long. - 'C0301', - # Undefined variable. - 'E0602', - # Not exception type specified. - 'W0702', - # No member of that name. - 'E1101', - # Dangerous default {}. - 'W0102', - # Others, too many to sort. - 'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231', - 'R0201', 'E0101', 'C0321', - # ************* Module copy - # W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect - 'W0104', -] - - -def CheckChangeOnUpload(input_api, output_api): - report = [] - report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api)) - return report - - -def CheckChangeOnCommit(input_api, output_api): - report = [] - - # Accept any year number from 2009 to the current year. - current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) - years_re = '(' + '|'.join(allowed_years) + ')' - - # The (c) is deprecated, but tolerate it until it's removed from all files. - license = ( - r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n' - r'.*? Use of this source code is governed by a BSD-style license that ' - r'can be\n' - r'.*? found in the LICENSE file\.\n' - ) % { - 'year': years_re, - } - - report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api, license_header=license)) - report.extend(input_api.canned_checks.CheckTreeIsOpen( - input_api, output_api, - 'http://gyp-status.appspot.com/status', - 'http://gyp-status.appspot.com/current')) - - import sys - old_sys_path = sys.path - try: - sys.path = ['pylib', 'test/lib'] + sys.path - report.extend(input_api.canned_checks.RunPylint( - input_api, - output_api, - black_list=PYLINT_BLACKLIST, - disabled_warnings=PYLINT_DISABLED_WARNINGS)) - finally: - sys.path = old_sys_path - return report - - -def GetPreferredTrySlaves(): - return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android'] diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/codereview.settings tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/codereview.settings --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/codereview.settings 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/codereview.settings 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -# This file is used by gcl to get repository specific information. -CODE_REVIEW_SERVER: codereview.chromium.org -CC_LIST: gyp-developer@googlegroups.com -VIEW_VC: http://code.google.com/p/gyp/source/detail?r= -TRY_ON_UPLOAD: True -TRYSERVER_PROJECT: gyp -TRYSERVER_PATCHLEVEL: 0 -TRYSERVER_ROOT: trunk -TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -// Copyright (c) 2013 Google Inc. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is -// then used during the final link for modules that have large PDBs. Otherwise, -// the linker will generate a pdb with a page size of 1KB, which imposes a limit -// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler -// (rather than the linker), this limit is avoided. With this in place PDBs may -// grow to 2GB. -// -// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyp tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyp --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyp 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyp 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -# TODO(mark): sys.path manipulation is some temporary testing stuff. -try: - import gyp -except ImportError, e: - import os.path - sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) - import gyp - -if __name__ == '__main__': - sys.exit(gyp.main(sys.argv[1:])) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyp.bat tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyp.bat --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyp.bat 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyp.bat 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python "%~dp0/gyp" %* diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyp_dummy.c tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyp_dummy.c --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyp_dummy.c 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyp_dummy.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -int main() { - return 0; -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyptest.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyptest.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/gyptest.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/gyptest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,267 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -__doc__ = """ -gyptest.py -- test runner for GYP tests. -""" - -import os -import optparse -import subprocess -import sys - -class CommandRunner: - """ - Executor class for commands, including "commands" implemented by - Python functions. - """ - verbose = True - active = True - - def __init__(self, dictionary={}): - self.subst_dictionary(dictionary) - - def subst_dictionary(self, dictionary): - self._subst_dictionary = dictionary - - def subst(self, string, dictionary=None): - """ - Substitutes (via the format operator) the values in the specified - dictionary into the specified command. - - The command can be an (action, string) tuple. In all cases, we - perform substitution on strings and don't worry if something isn't - a string. (It's probably a Python function to be executed.) - """ - if dictionary is None: - dictionary = self._subst_dictionary - if dictionary: - try: - string = string % dictionary - except TypeError: - pass - return string - - def display(self, command, stdout=None, stderr=None): - if not self.verbose: - return - if type(command) == type(()): - func = command[0] - args = command[1:] - s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args))) - if type(command) == type([]): - # TODO: quote arguments containing spaces - # TODO: handle meta characters? - s = ' '.join(command) - else: - s = self.subst(command) - if not s.endswith('\n'): - s += '\n' - sys.stdout.write(s) - sys.stdout.flush() - - def execute(self, command, stdout=None, stderr=None): - """ - Executes a single command. - """ - if not self.active: - return 0 - if type(command) == type(''): - command = self.subst(command) - cmdargs = shlex.split(command) - if cmdargs[0] == 'cd': - command = (os.chdir,) + tuple(cmdargs[1:]) - if type(command) == type(()): - func = command[0] - args = command[1:] - return func(*args) - else: - if stdout is sys.stdout: - # Same as passing sys.stdout, except python2.4 doesn't fail on it. - subout = None - else: - # Open pipe for anything else so Popen works on python2.4. - subout = subprocess.PIPE - if stderr is sys.stderr: - # Same as passing sys.stderr, except python2.4 doesn't fail on it. - suberr = None - elif stderr is None: - # Merge with stdout if stderr isn't specified. - suberr = subprocess.STDOUT - else: - # Open pipe for anything else so Popen works on python2.4. - suberr = subprocess.PIPE - p = subprocess.Popen(command, - shell=(sys.platform == 'win32'), - stdout=subout, - stderr=suberr) - p.wait() - if stdout is None: - self.stdout = p.stdout.read() - elif stdout is not sys.stdout: - stdout.write(p.stdout.read()) - if stderr not in (None, sys.stderr): - stderr.write(p.stderr.read()) - return p.returncode - - def run(self, command, display=None, stdout=None, stderr=None): - """ - Runs a single command, displaying it first. - """ - if display is None: - display = command - self.display(display) - return self.execute(command, stdout, stderr) - - -class Unbuffered: - def __init__(self, fp): - self.fp = fp - def write(self, arg): - self.fp.write(arg) - self.fp.flush() - def __getattr__(self, attr): - return getattr(self.fp, attr) - -sys.stdout = Unbuffered(sys.stdout) -sys.stderr = Unbuffered(sys.stderr) - - -def find_all_gyptest_files(directory): - result = [] - for root, dirs, files in os.walk(directory): - if '.svn' in dirs: - dirs.remove('.svn') - result.extend([ os.path.join(root, f) for f in files - if f.startswith('gyptest') and f.endswith('.py') ]) - result.sort() - return result - - -def main(argv=None): - if argv is None: - argv = sys.argv - - usage = "gyptest.py [-ahlnq] [-f formats] [test ...]" - parser = optparse.OptionParser(usage=usage) - parser.add_option("-a", "--all", action="store_true", - help="run all tests") - parser.add_option("-C", "--chdir", action="store", default=None, - help="chdir to the specified directory") - parser.add_option("-f", "--format", action="store", default='', - help="run tests with the specified formats") - parser.add_option("-G", '--gyp_option', action="append", default=[], - help="Add -G options to the gyp command line") - parser.add_option("-l", "--list", action="store_true", - help="list available tests and exit") - parser.add_option("-n", "--no-exec", action="store_true", - help="no execute, just print the command line") - parser.add_option("--passed", action="store_true", - help="report passed tests") - parser.add_option("--path", action="append", default=[], - help="additional $PATH directory") - parser.add_option("-q", "--quiet", action="store_true", - help="quiet, don't print test command lines") - opts, args = parser.parse_args(argv[1:]) - - if opts.chdir: - os.chdir(opts.chdir) - - if opts.path: - extra_path = [os.path.abspath(p) for p in opts.path] - extra_path = os.pathsep.join(extra_path) - os.environ['PATH'] += os.pathsep + extra_path - - if not args: - if not opts.all: - sys.stderr.write('Specify -a to get all tests.\n') - return 1 - args = ['test'] - - tests = [] - for arg in args: - if os.path.isdir(arg): - tests.extend(find_all_gyptest_files(os.path.normpath(arg))) - else: - tests.append(arg) - - if opts.list: - for test in tests: - print test - sys.exit(0) - - CommandRunner.verbose = not opts.quiet - CommandRunner.active = not opts.no_exec - cr = CommandRunner() - - os.environ['PYTHONPATH'] = os.path.abspath('test/lib') - if not opts.quiet: - sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH']) - - passed = [] - failed = [] - no_result = [] - - if opts.format: - format_list = opts.format.split(',') - else: - # TODO: not duplicate this mapping from pylib/gyp/__init__.py - format_list = { - 'freebsd7': ['make'], - 'freebsd8': ['make'], - 'openbsd5': ['make'], - 'cygwin': ['msvs'], - 'win32': ['msvs', 'ninja'], - 'linux2': ['make', 'ninja'], - 'linux3': ['make', 'ninja'], - 'darwin': ['make', 'ninja', 'xcode'], - }[sys.platform] - - for format in format_list: - os.environ['TESTGYP_FORMAT'] = format - if not opts.quiet: - sys.stdout.write('TESTGYP_FORMAT=%s\n' % format) - - gyp_options = [] - for option in opts.gyp_option: - gyp_options += ['-G', option] - if gyp_options and not opts.quiet: - sys.stdout.write('Extra Gyp options: %s\n' % gyp_options) - - for test in tests: - status = cr.run([sys.executable, test] + gyp_options, - stdout=sys.stdout, - stderr=sys.stderr) - if status == 2: - no_result.append(test) - elif status: - failed.append(test) - else: - passed.append(test) - - if not opts.quiet: - def report(description, tests): - if tests: - if len(tests) == 1: - sys.stdout.write("\n%s the following test:\n" % description) - else: - fmt = "\n%s the following %d tests:\n" - sys.stdout.write(fmt % (description, len(tests))) - sys.stdout.write("\t" + "\n\t".join(tests) + "\n") - - if opts.passed: - report("Passed", passed) - report("Failed", failed) - report("No result from", no_result) - - if failed: - return 1 - else: - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,339 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""New implementation of Visual Studio project generation for SCons.""" - -import os -import random - -import gyp.common - -# hashlib is supplied as of Python 2.5 as the replacement interface for md5 -# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import md5 otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_md5 = hashlib.md5 -except ImportError: - import md5 - _new_md5 = md5.new - - -# Initialize random number generator -random.seed() - -# GUIDs for project types -ENTRY_TYPE_GUIDS = { - 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}', - 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}', -} - -#------------------------------------------------------------------------------ -# Helper functions - - -def MakeGuid(name, seed='msvs_new'): - """Returns a GUID for the specified target name. - - Args: - name: Target name. - seed: Seed for MD5 hash. - Returns: - A GUID-line string calculated from the name and seed. - - This generates something which looks like a GUID, but depends only on the - name and seed. This means the same name/seed will always generate the same - GUID, so that projects and solutions which refer to each other can explicitly - determine the GUID to refer to explicitly. It also means that the GUID will - not change when the project for a target is rebuilt. - """ - # Calculate a MD5 signature for the seed and name. - d = _new_md5(str(seed) + str(name)).hexdigest().upper() - # Convert most of the signature to GUID form (discard the rest) - guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] - + '-' + d[20:32] + '}') - return guid - -#------------------------------------------------------------------------------ - - -class MSVSSolutionEntry(object): - def __cmp__(self, other): - # Sort by name then guid (so things are in order on vs2008). - return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) - - -class MSVSFolder(MSVSSolutionEntry): - """Folder in a Visual Studio project or solution.""" - - def __init__(self, path, name = None, entries = None, - guid = None, items = None): - """Initializes the folder. - - Args: - path: Full path to the folder. - name: Name of the folder. - entries: List of folder entries to nest inside this folder. May contain - Folder or Project objects. May be None, if the folder is empty. - guid: GUID to use for folder, if not None. - items: List of solution items to include in the folder project. May be - None, if the folder does not directly contain items. - """ - if name: - self.name = name - else: - # Use last layer. - self.name = os.path.basename(path) - - self.path = path - self.guid = guid - - # Copy passed lists (or set to empty lists) - self.entries = sorted(list(entries or [])) - self.items = list(items or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] - - def get_guid(self): - if self.guid is None: - # Use consistent guids for folders (so things don't regenerate). - self.guid = MakeGuid(self.path, seed='msvs_folder') - return self.guid - - -#------------------------------------------------------------------------------ - - -class MSVSProject(MSVSSolutionEntry): - """Visual Studio project.""" - - def __init__(self, path, name = None, dependencies = None, guid = None, - spec = None, build_file = None, config_platform_overrides = None, - fixpath_prefix = None): - """Initializes the project. - - Args: - path: Absolute path to the project file. - name: Name of project. If None, the name will be the same as the base - name of the project file. - dependencies: List of other Project objects this project is dependent - upon, if not None. - guid: GUID to use for project, if not None. - spec: Dictionary specifying how to build this project. - build_file: Filename of the .gyp file that the vcproj file comes from. - config_platform_overrides: optional dict of configuration platforms to - used in place of the default for this target. - fixpath_prefix: the path used to adjust the behavior of _fixpath - """ - self.path = path - self.guid = guid - self.spec = spec - self.build_file = build_file - # Use project filename if name not specified - self.name = name or os.path.splitext(os.path.basename(path))[0] - - # Copy passed lists (or set to empty lists) - self.dependencies = list(dependencies or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] - - if config_platform_overrides: - self.config_platform_overrides = config_platform_overrides - else: - self.config_platform_overrides = {} - self.fixpath_prefix = fixpath_prefix - self.msbuild_toolset = None - - def set_dependencies(self, dependencies): - self.dependencies = list(dependencies or []) - - def get_guid(self): - if self.guid is None: - # Set GUID from path - # TODO(rspangler): This is fragile. - # 1. We can't just use the project filename sans path, since there could - # be multiple projects with the same base name (for example, - # foo/unittest.vcproj and bar/unittest.vcproj). - # 2. The path needs to be relative to $SOURCE_ROOT, so that the project - # GUID is the same whether it's included from base/base.sln or - # foo/bar/baz/baz.sln. - # 3. The GUID needs to be the same each time this builder is invoked, so - # that we don't need to rebuild the solution when the project changes. - # 4. We should be able to handle pre-built project files by reading the - # GUID from the files. - self.guid = MakeGuid(self.name) - return self.guid - - def set_msbuild_toolset(self, msbuild_toolset): - self.msbuild_toolset = msbuild_toolset - -#------------------------------------------------------------------------------ - - -class MSVSSolution: - """Visual Studio solution.""" - - def __init__(self, path, version, entries=None, variants=None, - websiteProperties=True): - """Initializes the solution. - - Args: - path: Path to solution file. - version: Format version to emit. - entries: List of entries in solution. May contain Folder or Project - objects. May be None, if the folder is empty. - variants: List of build variant strings. If none, a default list will - be used. - websiteProperties: Flag to decide if the website properties section - is generated. - """ - self.path = path - self.websiteProperties = websiteProperties - self.version = version - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - - if variants: - # Copy passed list - self.variants = variants[:] - else: - # Use default - self.variants = ['Debug|Win32', 'Release|Win32'] - # TODO(rspangler): Need to be able to handle a mapping of solution config - # to project config. Should we be able to handle variants being a dict, - # or add a separate variant_map variable? If it's a dict, we can't - # guarantee the order of variants since dict keys aren't ordered. - - - # TODO(rspangler): Automatically write to disk for now; should delay until - # node-evaluation time. - self.Write() - - - def Write(self, writer=gyp.common.WriteOnDiff): - """Writes the solution file to disk. - - Raises: - IndexError: An entry appears multiple times. - """ - # Walk the entry tree and collect all the folders and projects. - all_entries = set() - entries_to_check = self.entries[:] - while entries_to_check: - e = entries_to_check.pop(0) - - # If this entry has been visited, nothing to do. - if e in all_entries: - continue - - all_entries.add(e) - - # If this is a folder, check its entries too. - if isinstance(e, MSVSFolder): - entries_to_check += e.entries - - all_entries = sorted(all_entries) - - # Open file and print header - f = writer(self.path) - f.write('Microsoft Visual Studio Solution File, ' - 'Format Version %s\r\n' % self.version.SolutionVersion()) - f.write('# %s\r\n' % self.version.Description()) - - # Project entries - sln_root = os.path.split(self.path)[0] - for e in all_entries: - relative_path = gyp.common.RelativePath(e.path, sln_root) - # msbuild does not accept an empty folder_name. - # use '.' in case relative_path is empty. - folder_name = relative_path.replace('/', '\\') or '.' - f.write('Project("%s") = "%s", "%s", "%s"\r\n' % ( - e.entry_type_guid, # Entry type GUID - e.name, # Folder name - folder_name, # Folder name (again) - e.get_guid(), # Entry GUID - )) - - # TODO(rspangler): Need a way to configure this stuff - if self.websiteProperties: - f.write('\tProjectSection(WebsiteProperties) = preProject\r\n' - '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' - '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' - '\tEndProjectSection\r\n') - - if isinstance(e, MSVSFolder): - if e.items: - f.write('\tProjectSection(SolutionItems) = preProject\r\n') - for i in e.items: - f.write('\t\t%s = %s\r\n' % (i, i)) - f.write('\tEndProjectSection\r\n') - - if isinstance(e, MSVSProject): - if e.dependencies: - f.write('\tProjectSection(ProjectDependencies) = postProject\r\n') - for d in e.dependencies: - f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid())) - f.write('\tEndProjectSection\r\n') - - f.write('EndProject\r\n') - - # Global section - f.write('Global\r\n') - - # Configurations (variants) - f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n') - for v in self.variants: - f.write('\t\t%s = %s\r\n' % (v, v)) - f.write('\tEndGlobalSection\r\n') - - # Sort config guids for easier diffing of solution changes. - config_guids = [] - config_guids_overrides = {} - for e in all_entries: - if isinstance(e, MSVSProject): - config_guids.append(e.get_guid()) - config_guids_overrides[e.get_guid()] = e.config_platform_overrides - config_guids.sort() - - f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n') - for g in config_guids: - for v in self.variants: - nv = config_guids_overrides[g].get(v, v) - # Pick which project configuration to build for this solution - # configuration. - f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - - # Enable project in this solution configuration. - f.write('\t\t%s.%s.Build.0 = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - f.write('\tEndGlobalSection\r\n') - - # TODO(rspangler): Should be able to configure this stuff too (though I've - # never seen this be any different) - f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n') - f.write('\t\tHideSolutionNode = FALSE\r\n') - f.write('\tEndGlobalSection\r\n') - - # Folder mappings - # TODO(rspangler): Should omit this section if there are no folders - f.write('\tGlobalSection(NestedProjects) = preSolution\r\n') - for e in all_entries: - if not isinstance(e, MSVSFolder): - continue # Does not apply to projects, only folders - for subentry in e.entries: - f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid())) - f.write('\tEndGlobalSection\r\n') - - f.write('EndGlobal\r\n') - - f.close() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,208 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.common -import gyp.easy_xml as easy_xml - -#------------------------------------------------------------------------------ - - -class Tool(object): - """Visual Studio tool.""" - - def __init__(self, name, attrs=None): - """Initializes the tool. - - Args: - name: Tool name. - attrs: Dict of tool attributes; may be None. - """ - self._attrs = attrs or {} - self._attrs['Name'] = name - - def _GetSpecification(self): - """Creates an element for the tool. - - Returns: - A new xml.dom.Element for the tool. - """ - return ['Tool', self._attrs] - -class Filter(object): - """Visual Studio filter - that is, a virtual folder.""" - - def __init__(self, name, contents=None): - """Initializes the folder. - - Args: - name: Filter (folder) name. - contents: List of filenames and/or Filter objects contained. - """ - self.name = name - self.contents = list(contents or []) - - -#------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML project writer.""" - - def __init__(self, project_path, version, name, guid=None, platforms=None): - """Initializes the project. - - Args: - project_path: Path to the project file. - version: Format version to emit. - name: Name of the project. - guid: GUID to use for project, if not None. - platforms: Array of string, the supported platforms. If null, ['Win32'] - """ - self.project_path = project_path - self.version = version - self.name = name - self.guid = guid - - # Default to Win32 for platforms. - if not platforms: - platforms = ['Win32'] - - # Initialize the specifications of the various sections. - self.platform_section = ['Platforms'] - for platform in platforms: - self.platform_section.append(['Platform', {'Name': platform}]) - self.tool_files_section = ['ToolFiles'] - self.configurations_section = ['Configurations'] - self.files_section = ['Files'] - - # Keep a dict keyed on filename to speed up access. - self.files_dict = dict() - - def AddToolFile(self, path): - """Adds a tool file to the project. - - Args: - path: Relative path from project to tool file. - """ - self.tool_files_section.append(['ToolFile', {'RelativePath': path}]) - - def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools): - """Returns the specification for a configuration. - - Args: - config_type: Type of configuration node. - config_name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - Returns: - """ - # Handle defaults - if not attrs: - attrs = {} - if not tools: - tools = [] - - # Add configuration node and its attributes - node_attrs = attrs.copy() - node_attrs['Name'] = config_name - specification = [config_type, node_attrs] - - # Add tool nodes and their attributes - if tools: - for t in tools: - if isinstance(t, Tool): - specification.append(t._GetSpecification()) - else: - specification.append(Tool(t)._GetSpecification()) - return specification - - - def AddConfig(self, name, attrs=None, tools=None): - """Adds a configuration to the project. - - Args: - name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools) - self.configurations_section.append(spec) - - def _AddFilesToNode(self, parent, files): - """Adds files and/or filters to the parent node. - - Args: - parent: Destination node - files: A list of Filter objects and/or relative paths to files. - - Will call itself recursively, if the files list contains Filter objects. - """ - for f in files: - if isinstance(f, Filter): - node = ['Filter', {'Name': f.name}] - self._AddFilesToNode(node, f.contents) - else: - node = ['File', {'RelativePath': f}] - self.files_dict[f] = node - parent.append(node) - - def AddFiles(self, files): - """Adds files to the project. - - Args: - files: A list of Filter objects and/or relative paths to files. - - This makes a copy of the file/filter tree at the time of this call. If you - later add files to a Filter object which was passed into a previous call - to AddFiles(), it will not be reflected in this project. - """ - self._AddFilesToNode(self.files_section, files) - # TODO(rspangler) This also doesn't handle adding files to an existing - # filter. That is, it doesn't merge the trees. - - def AddFileConfig(self, path, config, attrs=None, tools=None): - """Adds a configuration to a file. - - Args: - path: Relative path to the file. - config: Name of configuration to add. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - - Raises: - ValueError: Relative path does not match any file added via AddFiles(). - """ - # Find the file node with the right relative path - parent = self.files_dict.get(path) - if not parent: - raise ValueError('AddFileConfig: file "%s" not in project.' % path) - - # Add the config to the file node - spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs, - tools) - parent.append(spec) - - def WriteIfChanged(self): - """Writes the project file.""" - # First create XML content definition - content = [ - 'VisualStudioProject', - {'ProjectType': 'Visual C++', - 'Version': self.version.ProjectVersion(), - 'Name': self.name, - 'ProjectGUID': self.guid, - 'RootNamespace': self.name, - 'Keyword': 'Win32Proj' - }, - self.platform_section, - self.tool_files_section, - self.configurations_section, - ['References'], # empty section - self.files_section, - ['Globals'] # empty section - ] - easy_xml.WriteXmlIfChanged(content, self.project_path, - encoding="Windows-1252") diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1046 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Code to validate and convert settings of the Microsoft build tools. - -This file contains code to validate and convert settings of the Microsoft -build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), -and ValidateMSBuildSettings() are the entry points. - -This file was created by comparing the projects created by Visual Studio 2008 -and Visual Studio 2010 for all available settings through the user interface. -The MSBuild schemas were also considered. They are typically found in the -MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild -""" - -import sys -import re - -# Dictionaries of settings validators. The key is the tool name, the value is -# a dictionary mapping setting names to validation functions. -_msvs_validators = {} -_msbuild_validators = {} - - -# A dictionary of settings converters. The key is the tool name, the value is -# a dictionary mapping setting names to conversion functions. -_msvs_to_msbuild_converters = {} - - -# Tool name mapping from MSVS to MSBuild. -_msbuild_name_of_tool = {} - - -class _Tool(object): - """Represents a tool used by MSVS or MSBuild. - - Attributes: - msvs_name: The name of the tool in MSVS. - msbuild_name: The name of the tool in MSBuild. - """ - - def __init__(self, msvs_name, msbuild_name): - self.msvs_name = msvs_name - self.msbuild_name = msbuild_name - - -def _AddTool(tool): - """Adds a tool to the four dictionaries used to process settings. - - This only defines the tool. Each setting also needs to be added. - - Args: - tool: The _Tool object to be added. - """ - _msvs_validators[tool.msvs_name] = {} - _msbuild_validators[tool.msbuild_name] = {} - _msvs_to_msbuild_converters[tool.msvs_name] = {} - _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name - - -def _GetMSBuildToolSettings(msbuild_settings, tool): - """Returns an MSBuild tool dictionary. Creates it if needed.""" - return msbuild_settings.setdefault(tool.msbuild_name, {}) - - -class _Type(object): - """Type of settings (Base class).""" - - def ValidateMSVS(self, value): - """Verifies that the value is legal for MSVS. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSVS. - """ - - def ValidateMSBuild(self, value): - """Verifies that the value is legal for MSBuild. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSBuild. - """ - - def ConvertToMSBuild(self, value): - """Returns the MSBuild equivalent of the MSVS value given. - - Args: - value: the MSVS value to convert. - - Returns: - the MSBuild equivalent. - - Raises: - ValueError if value is not valid. - """ - return value - - -class _String(_Type): - """A setting that's just a string.""" - - def ValidateMSVS(self, value): - if not isinstance(value, basestring): - raise ValueError('expected string; got %r' % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, basestring): - raise ValueError('expected string; got %r' % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - return ConvertVCMacrosToMSBuild(value) - - -class _StringList(_Type): - """A settings that's a list of strings.""" - - def ValidateMSVS(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): - raise ValueError('expected string list; got %r' % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): - raise ValueError('expected string list; got %r' % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - if isinstance(value, list): - return [ConvertVCMacrosToMSBuild(i) for i in value] - else: - return ConvertVCMacrosToMSBuild(value) - - -class _Boolean(_Type): - """Boolean settings, can have the values 'false' or 'true'.""" - - def _Validate(self, value): - if value != 'true' and value != 'false': - raise ValueError('expected bool; got %r' % value) - - def ValidateMSVS(self, value): - self._Validate(value) - - def ValidateMSBuild(self, value): - self._Validate(value) - - def ConvertToMSBuild(self, value): - self._Validate(value) - return value - - -class _Integer(_Type): - """Integer settings.""" - - def __init__(self, msbuild_base=10): - _Type.__init__(self) - self._msbuild_base = msbuild_base - - def ValidateMSVS(self, value): - # Try to convert, this will raise ValueError if invalid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - # Try to convert, this will raise ValueError if invalid. - int(value, self._msbuild_base) - - def ConvertToMSBuild(self, value): - msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x' - return msbuild_format % int(value) - - -class _Enumeration(_Type): - """Type of settings that is an enumeration. - - In MSVS, the values are indexes like '0', '1', and '2'. - MSBuild uses text labels that are more representative, like 'Win32'. - - Constructor args: - label_list: an array of MSBuild labels that correspond to the MSVS index. - In the rare cases where MSVS has skipped an index value, None is - used in the array to indicate the unused spot. - new: an array of labels that are new to MSBuild. - """ - - def __init__(self, label_list, new=None): - _Type.__init__(self) - self._label_list = label_list - self._msbuild_values = set(value for value in label_list - if value is not None) - if new is not None: - self._msbuild_values.update(new) - - def ValidateMSVS(self, value): - # Try to convert. It will raise an exception if not valid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - if value not in self._msbuild_values: - raise ValueError('unrecognized enumerated value %s' % value) - - def ConvertToMSBuild(self, value): - index = int(value) - if index < 0 or index >= len(self._label_list): - raise ValueError('index value (%d) not in expected range [0, %d)' % - (index, len(self._label_list))) - label = self._label_list[index] - if label is None: - raise ValueError('converted value for %s not specified.' % value) - return label - - -# Instantiate the various generic types. -_boolean = _Boolean() -_integer = _Integer() -# For now, we don't do any special validation on these types: -_string = _String() -_file_name = _String() -_folder_name = _String() -_file_list = _StringList() -_folder_list = _StringList() -_string_list = _StringList() -# Some boolean settings went from numerical values to boolean. The -# mapping is 0: default, 1: false, 2: true. -_newly_boolean = _Enumeration(['', 'false', 'true']) - - -def _Same(tool, name, setting_type): - """Defines a setting that has the same name in MSVS and MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - _Renamed(tool, name, name, setting_type) - - -def _Renamed(tool, msvs_name, msbuild_name, setting_type): - """Defines a setting for which the name has changed. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting. - msbuild_name: the name of the MSBuild setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS - _msbuild_validators[tool.msbuild_name][msbuild_name] = ( - setting_type.ValidateMSBuild) - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _Moved(tool, settings_name, msbuild_tool_name, setting_type): - _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name, - setting_type) - - -def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name, - msbuild_settings_name, setting_type): - """Defines a setting that may have moved to a new section. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_settings_name: the MSVS name of the setting. - msbuild_tool_name: the name of the MSBuild tool to place the setting under. - msbuild_settings_name: the MSBuild name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) - tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_settings_name] = ( - setting_type.ValidateMSVS) - validator = setting_type.ValidateMSBuild - _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate - - -def _MSVSOnly(tool, name, setting_type): - """Defines a setting that is only found in MSVS. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(unused_value, unused_msbuild_settings): - # Since this is for MSVS only settings, no translation will happen. - pass - - _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate - - -def _MSBuildOnly(tool, name, setting_type): - """Defines a setting that is only found in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild - - -def _ConvertedToAdditionalOption(tool, msvs_name, flag): - """Defines a setting that's handled via a command line option in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting that if 'true' becomes a flag - flag: the flag to insert at the end of the AdditionalOptions - """ - - def _Translate(value, msbuild_settings): - if value == 'true': - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if 'AdditionalOptions' in tool_settings: - new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag) - else: - new_flags = flag - tool_settings['AdditionalOptions'] = new_flags - _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _CustomGeneratePreprocessedFile(tool, msvs_name): - def _Translate(value, msbuild_settings): - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if value == '0': - tool_settings['PreprocessToFile'] = 'false' - tool_settings['PreprocessSuppressLineNumbers'] = 'false' - elif value == '1': # /P - tool_settings['PreprocessToFile'] = 'true' - tool_settings['PreprocessSuppressLineNumbers'] = 'false' - elif value == '2': # /EP /P - tool_settings['PreprocessToFile'] = 'true' - tool_settings['PreprocessSuppressLineNumbers'] = 'true' - else: - raise ValueError('value must be one of [0, 1, 2]; got %s' % value) - # Create a bogus validator that looks for '0', '1', or '2' - msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS - _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator - msbuild_validator = _boolean.ValidateMSBuild - msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] - msbuild_tool_validators['PreprocessToFile'] = msbuild_validator - msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir') -fix_vc_macro_slashes_regex = re.compile( - r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list) -) - -def FixVCMacroSlashes(s): - """Replace macros which have excessive following slashes. - - These macros are known to have a built-in trailing slash. Furthermore, many - scripts hiccup on processing paths with extra slashes in the middle. - - This list is probably not exhaustive. Add as needed. - """ - if '$' in s: - s = fix_vc_macro_slashes_regex.sub(r'\1', s) - return s - - -def ConvertVCMacrosToMSBuild(s): - """Convert the the MSVS macros found in the string to the MSBuild equivalent. - - This list is probably not exhaustive. Add as needed. - """ - if '$' in s: - replace_map = { - '$(ConfigurationName)': '$(Configuration)', - '$(InputDir)': '%(RootDir)%(Directory)', - '$(InputExt)': '%(Extension)', - '$(InputFileName)': '%(Filename)%(Extension)', - '$(InputName)': '%(Filename)', - '$(InputPath)': '%(FullPath)', - '$(ParentName)': '$(ProjectFileName)', - '$(PlatformName)': '$(Platform)', - '$(SafeInputName)': '%(Filename)', - } - for old, new in replace_map.iteritems(): - s = s.replace(old, new) - s = FixVCMacroSlashes(s) - return s - - -def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): - """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). - - Args: - msvs_settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - - Returns: - A dictionary of MSBuild settings. The key is either the MSBuild tool name - or the empty string (for the global settings). The values are themselves - dictionaries of settings and their values. - """ - msbuild_settings = {} - for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): - if msvs_tool_name in _msvs_to_msbuild_converters: - msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] - for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): - if msvs_setting in msvs_tool: - # Invoke the translation function. - try: - msvs_tool[msvs_setting](msvs_value, msbuild_settings) - except ValueError, e: - print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' - '%s' % (msvs_tool_name, msvs_setting, e)) - else: - # We don't know this setting. Give a warning. - print >> stderr, ('Warning: unrecognized setting %s/%s ' - 'while converting to MSBuild.' % - (msvs_tool_name, msvs_setting)) - else: - print >> stderr, ('Warning: unrecognized tool %s while converting to ' - 'MSBuild.' % msvs_tool_name) - return msbuild_settings - - -def ValidateMSVSSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSVS. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msvs_validators, settings, stderr) - - -def ValidateMSBuildSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSBuild. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msbuild_validators, settings, stderr) - - -def _ValidateSettings(validators, settings, stderr): - """Validates that the settings are valid for MSBuild or MSVS. - - We currently only validate the names of the settings, not their values. - - Args: - validators: A dictionary of tools and their validators. - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - for tool_name in settings: - if tool_name in validators: - tool_validators = validators[tool_name] - for setting, value in settings[tool_name].iteritems(): - if setting in tool_validators: - try: - tool_validators[setting](value) - except ValueError, e: - print >> stderr, ('Warning: for %s/%s, %s' % - (tool_name, setting, e)) - else: - print >> stderr, ('Warning: unrecognized setting %s/%s' % - (tool_name, setting)) - else: - print >> stderr, ('Warning: unrecognized tool %s' % tool_name) - - -# MSVS and MBuild names of the tools. -_compile = _Tool('VCCLCompilerTool', 'ClCompile') -_link = _Tool('VCLinkerTool', 'Link') -_midl = _Tool('VCMIDLTool', 'Midl') -_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile') -_lib = _Tool('VCLibrarianTool', 'Lib') -_manifest = _Tool('VCManifestTool', 'Manifest') - - -_AddTool(_compile) -_AddTool(_link) -_AddTool(_midl) -_AddTool(_rc) -_AddTool(_lib) -_AddTool(_manifest) -# Add sections only found in the MSBuild settings. -_msbuild_validators[''] = {} -_msbuild_validators['ProjectReference'] = {} -_msbuild_validators['ManifestResourceCompile'] = {} - -# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and -# ClCompile in MSBuild. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for -# the schema of the MSBuild ClCompile settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I -_Same(_compile, 'AdditionalOptions', _string_list) -_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI -_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa -_Same(_compile, 'BrowseInformationFile', _file_name) -_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS -_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za -_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd -_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT -_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false' -_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx -_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except -_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope -_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI -_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU -_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc -_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X -_Same(_compile, 'MinimalRebuild', _boolean) # /Gm -_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl -_Same(_compile, 'OmitFramePointers', _boolean) # /Oy -_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D -_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd -_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR -_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes -_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc -_Same(_compile, 'StringPooling', _boolean) # /GF -_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t -_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u -_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U -_Same(_compile, 'UseFullPaths', _boolean) # /FC -_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL -_Same(_compile, 'XMLDocumentationFileName', _file_name) - -_Same(_compile, 'AssemblerOutput', - _Enumeration(['NoListing', - 'AssemblyCode', # /FA - 'All', # /FAcs - 'AssemblyAndMachineCode', # /FAc - 'AssemblyAndSourceCode'])) # /FAs -_Same(_compile, 'BasicRuntimeChecks', - _Enumeration(['Default', - 'StackFrameRuntimeCheck', # /RTCs - 'UninitializedLocalUsageCheck', # /RTCu - 'EnableFastChecks'])) # /RTC1 -_Same(_compile, 'BrowseInformation', - _Enumeration(['false', - 'true', # /FR - 'true'])) # /Fr -_Same(_compile, 'CallingConvention', - _Enumeration(['Cdecl', # /Gd - 'FastCall', # /Gr - 'StdCall'])) # /Gz -_Same(_compile, 'CompileAs', - _Enumeration(['Default', - 'CompileAsC', # /TC - 'CompileAsCpp'])) # /TP -_Same(_compile, 'DebugInformationFormat', - _Enumeration(['', # Disabled - 'OldStyle', # /Z7 - None, - 'ProgramDatabase', # /Zi - 'EditAndContinue'])) # /ZI -_Same(_compile, 'EnableEnhancedInstructionSet', - _Enumeration(['NotSet', - 'StreamingSIMDExtensions', # /arch:SSE - 'StreamingSIMDExtensions2'])) # /arch:SSE2 -_Same(_compile, 'ErrorReporting', - _Enumeration(['None', # /errorReport:none - 'Prompt', # /errorReport:prompt - 'Queue'], # /errorReport:queue - new=['Send'])) # /errorReport:send" -_Same(_compile, 'ExceptionHandling', - _Enumeration(['false', - 'Sync', # /EHsc - 'Async'], # /EHa - new=['SyncCThrow'])) # /EHs -_Same(_compile, 'FavorSizeOrSpeed', - _Enumeration(['Neither', - 'Speed', # /Ot - 'Size'])) # /Os -_Same(_compile, 'FloatingPointModel', - _Enumeration(['Precise', # /fp:precise - 'Strict', # /fp:strict - 'Fast'])) # /fp:fast -_Same(_compile, 'InlineFunctionExpansion', - _Enumeration(['Default', - 'OnlyExplicitInline', # /Ob1 - 'AnySuitable'], # /Ob2 - new=['Disabled'])) # /Ob0 -_Same(_compile, 'Optimization', - _Enumeration(['Disabled', # /Od - 'MinSpace', # /O1 - 'MaxSpeed', # /O2 - 'Full'])) # /Ox -_Same(_compile, 'RuntimeLibrary', - _Enumeration(['MultiThreaded', # /MT - 'MultiThreadedDebug', # /MTd - 'MultiThreadedDLL', # /MD - 'MultiThreadedDebugDLL'])) # /MDd -_Same(_compile, 'StructMemberAlignment', - _Enumeration(['Default', - '1Byte', # /Zp1 - '2Bytes', # /Zp2 - '4Bytes', # /Zp4 - '8Bytes', # /Zp8 - '16Bytes'])) # /Zp16 -_Same(_compile, 'WarningLevel', - _Enumeration(['TurnOffAllWarnings', # /W0 - 'Level1', # /W1 - 'Level2', # /W2 - 'Level3', # /W3 - 'Level4'], # /W4 - new=['EnableAllWarnings'])) # /Wall - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking', - _boolean) # /Gy -_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions', - _boolean) # /Oi -_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C -_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo -_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp -_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile', - _file_name) # Used with /Yc and /Yu -_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile', - _file_name) # /Fp -_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader', - _Enumeration(['NotUsing', # VS recognized '' for this value too. - 'Create', # /Yc - 'Use'])) # /Yu -_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX - -_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J') - -# MSVS options not found in MSBuild. -_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean) -_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_compile, 'BuildingInIDE', _boolean) -_MSBuildOnly(_compile, 'CompileAsManaged', - _Enumeration([], new=['false', - 'true', # /clr - 'Pure', # /clr:pure - 'Safe', # /clr:safe - 'OldSyntax'])) # /clr:oldSyntax -_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch -_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP -_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi -_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors -_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we -_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu - -# Defines a setting that needs very customized processing -_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile') - - -# Directives for converting MSVS VCLinkerTool to MSBuild Link. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for -# the schema of the MSBuild Link settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_link, 'AdditionalDependencies', _file_list) -_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH -# /MANIFESTDEPENDENCY: -_Same(_link, 'AdditionalManifestDependencies', _file_list) -_Same(_link, 'AdditionalOptions', _string_list) -_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE -_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION -_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE -_Same(_link, 'BaseAddress', _string) # /BASE -_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK -_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD -_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN -_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE -_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC -_Same(_link, 'EntryPointSymbol', _string) # /ENTRY -_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE -_Same(_link, 'FunctionOrder', _file_name) # /ORDER -_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG -_Same(_link, 'GenerateMapFile', _boolean) # /MAP -_Same(_link, 'HeapCommitSize', _string) -_Same(_link, 'HeapReserveSize', _string) # /HEAP -_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB -_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL -_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB -_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER -_Same(_link, 'KeyFile', _file_name) # /KEYFILE -_Same(_link, 'ManifestFile', _file_name) # /ManifestFile -_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS -_Same(_link, 'MapFileName', _file_name) -_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT -_Same(_link, 'MergeSections', _string) # /MERGE -_Same(_link, 'MidlCommandFile', _file_name) # /MIDL -_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF -_Same(_link, 'OutputFile', _file_name) # /OUT -_Same(_link, 'PerUserRedirection', _boolean) -_Same(_link, 'Profile', _boolean) # /PROFILE -_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD -_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB -_Same(_link, 'RegisterOutput', _boolean) -_Same(_link, 'SetChecksum', _boolean) # /RELEASE -_Same(_link, 'StackCommitSize', _string) -_Same(_link, 'StackReserveSize', _string) # /STACK -_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED -_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD -_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO -_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD -_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY -_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT -_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID -_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true' -_Same(_link, 'Version', _string) # /VERSION - -_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF -_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED -_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE -_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF -_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE -_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE - -_subsystem_enumeration = _Enumeration( - ['NotSet', - 'Console', # /SUBSYSTEM:CONSOLE - 'Windows', # /SUBSYSTEM:WINDOWS - 'Native', # /SUBSYSTEM:NATIVE - 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION - 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER - 'EFI ROM', # /SUBSYSTEM:EFI_ROM - 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER - 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE - new=['POSIX']) # /SUBSYSTEM:POSIX - -_target_machine_enumeration = _Enumeration( - ['NotSet', - 'MachineX86', # /MACHINE:X86 - None, - 'MachineARM', # /MACHINE:ARM - 'MachineEBC', # /MACHINE:EBC - 'MachineIA64', # /MACHINE:IA64 - None, - 'MachineMIPS', # /MACHINE:MIPS - 'MachineMIPS16', # /MACHINE:MIPS16 - 'MachineMIPSFPU', # /MACHINE:MIPSFPU - 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16 - None, - None, - None, - 'MachineSH4', # /MACHINE:SH4 - None, - 'MachineTHUMB', # /MACHINE:THUMB - 'MachineX64']) # /MACHINE:X64 - -_Same(_link, 'AssemblyDebug', - _Enumeration(['', - 'true', # /ASSEMBLYDEBUG - 'false'])) # /ASSEMBLYDEBUG:DISABLE -_Same(_link, 'CLRImageType', - _Enumeration(['Default', - 'ForceIJWImage', # /CLRIMAGETYPE:IJW - 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE - 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE -_Same(_link, 'CLRThreadAttribute', - _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE - 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA - 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA -_Same(_link, 'DataExecutionPrevention', - _Enumeration(['', - 'false', # /NXCOMPAT:NO - 'true'])) # /NXCOMPAT -_Same(_link, 'Driver', - _Enumeration(['NotSet', - 'Driver', # /Driver - 'UpOnly', # /DRIVER:UPONLY - 'WDM'])) # /DRIVER:WDM -_Same(_link, 'LinkTimeCodeGeneration', - _Enumeration(['Default', - 'UseLinkTimeCodeGeneration', # /LTCG - 'PGInstrument', # /LTCG:PGInstrument - 'PGOptimization', # /LTCG:PGOptimize - 'PGUpdate'])) # /LTCG:PGUpdate -_Same(_link, 'ShowProgress', - _Enumeration(['NotSet', - 'LinkVerbose', # /VERBOSE - 'LinkVerboseLib'], # /VERBOSE:Lib - new=['LinkVerboseICF', # /VERBOSE:ICF - 'LinkVerboseREF', # /VERBOSE:REF - 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH - 'LinkVerboseCLR'])) # /VERBOSE:CLR -_Same(_link, 'SubSystem', _subsystem_enumeration) -_Same(_link, 'TargetMachine', _target_machine_enumeration) -_Same(_link, 'UACExecutionLevel', - _Enumeration(['AsInvoker', # /level='asInvoker' - 'HighestAvailable', # /level='highestAvailable' - 'RequireAdministrator'])) # /level='requireAdministrator' - - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting', - _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE - 'PromptImmediately', # /ERRORREPORT:PROMPT - 'QueueForNextLogin'], # /ERRORREPORT:QUEUE - new=['SendErrorReport'])) # /ERRORREPORT:SEND -_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries', - _file_list) # /NODEFAULTLIB -_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY -_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET - -_Moved(_link, 'GenerateManifest', '', _boolean) -_Moved(_link, 'IgnoreImportLibrary', '', _boolean) -_Moved(_link, 'LinkIncremental', '', _newly_boolean) -_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean) -_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean) -_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean) -# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp. -_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_link, 'BuildingInIDE', _boolean) -_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH -_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false' -_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS -_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND -_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND -_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX -_MSBuildOnly(_link, 'MinimumRequiredVersion', _string) -_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false' -_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN -_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION -_MSBuildOnly(_link, 'ForceFileOutput', - _Enumeration([], new=['Enabled', # /FORCE - # /FORCE:MULTIPLE - 'MultiplyDefinedSymbolOnly', - 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED -_MSBuildOnly(_link, 'CreateHotPatchableImage', - _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN - 'X86Image', # /FUNCTIONPADMIN:5 - 'X64Image', # /FUNCTIONPADMIN:6 - 'ItaniumImage'])) # /FUNCTIONPADMIN:16 -_MSBuildOnly(_link, 'CLRSupportLastError', - _Enumeration([], new=['Enabled', # /CLRSupportLastError - 'Disabled', # /CLRSupportLastError:NO - # /CLRSupportLastError:SYSTEMDLL - 'SystemDlls'])) - - -# Directives for converting VCResourceCompilerTool to ResourceCompile. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for -# the schema of the MSBuild ResourceCompile settings. - -_Same(_rc, 'AdditionalOptions', _string_list) -_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I -_Same(_rc, 'Culture', _Integer(msbuild_base=16)) -_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X -_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D -_Same(_rc, 'ResourceOutputFileName', _string) # /fo -_Same(_rc, 'ShowProgress', _boolean) # /v -# There is no UI in VisualStudio 2008 to set the following properties. -# However they are found in CL and other tools. Include them here for -# completeness, as they are very likely to have the same usage pattern. -_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u - -# MSBuild options not found in MSVS. -_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n -_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name) - - -# Directives for converting VCMIDLTool to Midl. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for -# the schema of the MSBuild Midl settings. - -_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I -_Same(_midl, 'AdditionalOptions', _string_list) -_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt -_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation -_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check -_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum -_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref -_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data -_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf -_Same(_midl, 'GenerateTypeLibrary', _boolean) -_Same(_midl, 'HeaderFileName', _file_name) # /h -_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir -_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid -_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203 -_Same(_midl, 'OutputDirectory', _string) # /out -_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D -_Same(_midl, 'ProxyFileName', _file_name) # /proxy -_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o -_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_midl, 'TypeLibraryName', _file_name) # /tlb -_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U -_Same(_midl, 'WarnAsError', _boolean) # /WX - -_Same(_midl, 'DefaultCharType', - _Enumeration(['Unsigned', # /char unsigned - 'Signed', # /char signed - 'Ascii'])) # /char ascii7 -_Same(_midl, 'TargetEnvironment', - _Enumeration(['NotSet', - 'Win32', # /env win32 - 'Itanium', # /env ia64 - 'X64'])) # /env x64 -_Same(_midl, 'EnableErrorChecks', - _Enumeration(['EnableCustom', - 'None', # /error none - 'All'])) # /error all -_Same(_midl, 'StructMemberAlignment', - _Enumeration(['NotSet', - '1', # Zp1 - '2', # Zp2 - '4', # Zp4 - '8'])) # Zp8 -_Same(_midl, 'WarningLevel', - _Enumeration(['0', # /W0 - '1', # /W1 - '2', # /W2 - '3', # /W3 - '4'])) # /W4 - -_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata -_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters', - _boolean) # /robust - -# MSBuild options not found in MSVS. -_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config -_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub -_MSBuildOnly(_midl, 'GenerateClientFiles', - _Enumeration([], new=['Stub', # /client stub - 'None'])) # /client none -_MSBuildOnly(_midl, 'GenerateServerFiles', - _Enumeration([], new=['Stub', # /client stub - 'None'])) # /client none -_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL -_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub -_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn -_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_midl, 'TypeLibFormat', - _Enumeration([], new=['NewFormat', # /newtlb - 'OldFormat'])) # /oldtlb - - -# Directives for converting VCLibrarianTool to Lib. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for -# the schema of the MSBuild Lib settings. - -_Same(_lib, 'AdditionalDependencies', _file_list) -_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH -_Same(_lib, 'AdditionalOptions', _string_list) -_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT -_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE -_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB -_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB -_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF -_Same(_lib, 'OutputFile', _file_name) # /OUT -_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO -_Same(_lib, 'UseUnicodeResponseFiles', _boolean) -_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG - -# TODO(jeanluc) _link defines the same value that gets moved to -# ProjectReference. We may want to validate that they are consistent. -_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean) - -# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp. -_MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list) - -_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false' -_MSBuildOnly(_lib, 'ErrorReporting', - _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT - 'QueueForNextLogin', # /ERRORREPORT:QUEUE - 'SendErrorReport', # /ERRORREPORT:SEND - 'NoErrorReport'])) # /ERRORREPORT:NONE -_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string) -_MSBuildOnly(_lib, 'Name', _file_name) # /NAME -_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE -_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration) -_MSBuildOnly(_lib, 'TargetMachine', _target_machine_enumeration) -_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX -_MSBuildOnly(_lib, 'Verbose', _boolean) - - -# Directives for converting VCManifestTool to Mt. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for -# the schema of the MSBuild Lib settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest -_Same(_manifest, 'AdditionalOptions', _string_list) -_Same(_manifest, 'AssemblyIdentity', _string) # /identity: -_Same(_manifest, 'ComponentFileName', _file_name) # /dll -_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs -_Same(_manifest, 'InputResourceManifests', _string) # /inputresource -_Same(_manifest, 'OutputManifestFile', _file_name) # /out -_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs -_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements -_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb: -_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate -_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name) -_Same(_manifest, 'VerboseOutput', _boolean) # /verbose - -# Options that have moved location. -_MovedAndRenamed(_manifest, 'ManifestResourceFile', - 'ManifestResourceCompile', - 'ResourceOutputFileName', - _file_name) -_Moved(_manifest, 'EmbedManifest', '', _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name) -_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean) -_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean) -_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category -_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly', - _file_name) # /managedassemblyname -_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource -_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency -_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1482 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the MSVSSettings.py file.""" - -import StringIO -import unittest -import gyp.MSVSSettings as MSVSSettings - - -class TestSequenceFunctions(unittest.TestCase): - - def setUp(self): - self.stderr = StringIO.StringIO() - - def _ExpectedWarnings(self, expected): - """Compares recorded lines to expected warnings.""" - self.stderr.seek(0) - actual = self.stderr.read().split('\n') - actual = [line for line in actual if line] - self.assertEqual(sorted(expected), sorted(actual)) - - def testValidateMSVSSettings_tool_names(self): - """Tests that only MSVS tool names are allowed.""" - MSVSSettings.ValidateMSVSSettings( - {'VCCLCompilerTool': {}, - 'VCLinkerTool': {}, - 'VCMIDLTool': {}, - 'foo': {}, - 'VCResourceCompilerTool': {}, - 'VCLibrarianTool': {}, - 'VCManifestTool': {}, - 'ClCompile': {}}, - self.stderr) - self._ExpectedWarnings([ - 'Warning: unrecognized tool foo', - 'Warning: unrecognized tool ClCompile']) - - def testValidateMSVSSettings_settings(self): - """Tests that for invalid MSVS settings.""" - MSVSSettings.ValidateMSVSSettings( - {'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': ['string1', 'string2'], - 'AdditionalUsingDirectories': 'folder1;folder2', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': '0', - 'BasicRuntimeChecks': '5', - 'BrowseInformation': 'fdkslj', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'CallingConvention': '-1', - 'CompileAs': '1', - 'DebugInformationFormat': '2', - 'DefaultCharIsUnsigned': 'true', - 'Detect64BitPortabilityProblems': 'true', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'string1;string2', - 'EnableEnhancedInstructionSet': '1', - 'EnableFiberSafeOptimizations': 'true', - 'EnableFunctionLevelLinking': 'true', - 'EnableIntrinsicFunctions': 'true', - 'EnablePREfast': 'true', - 'Enableprefast': 'bogus', - 'ErrorReporting': '1', - 'ExceptionHandling': '1', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': '1', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': '1', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2', - 'ForcedUsingFiles': 'file1;file2', - 'GeneratePreprocessedFile': '1', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': '1', - 'KeepComments': 'true', - 'MinimalRebuild': 'true', - 'ObjectFile': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMP': 'true', - 'Optimization': '1', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderThrough': 'a_file_name', - 'PreprocessorDefinitions': 'string1;string2', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': '1', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1', - 'SuppressStartupBanner': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'UseFullPaths': 'true', - 'UsePrecompiledHeader': '1', - 'UseUnicodeResponseFiles': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '1', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name', - 'ZZXYZ': 'bogus'}, - 'VCLinkerTool': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalManifestDependencies': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AddModuleNamesToAssembly': 'file1;file2', - 'AllowIsolation': 'true', - 'AssemblyDebug': '2', - 'AssemblyLinkResource': 'file1;file2', - 'BaseAddress': 'a string1', - 'CLRImageType': '2', - 'CLRThreadAttribute': '2', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '2', - 'DelayLoadDLLs': 'file1;file2', - 'DelaySign': 'true', - 'Driver': '2', - 'EmbedManagedResourceFile': 'file1;file2', - 'EnableCOMDATFolding': '2', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a string1', - 'ErrorReporting': '2', - 'FixedBaseAddress': '2', - 'ForceSymbolReferences': 'file1;file2', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateManifest': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a string1', - 'HeapReserveSize': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreDefaultLibraryNames': 'file1;file2', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreImportLibrary': 'true', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': '2', - 'LinkIncremental': '2', - 'LinkLibraryDependencies': 'true', - 'LinkTimeCodeGeneration': '2', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a string1', - 'MidlCommandFile': 'a_file_name', - 'ModuleDefinitionFile': 'a_file_name', - 'OptimizeForWindows98': '1', - 'OptimizeReferences': '2', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': '2', - 'RegisterOutput': 'true', - 'ResourceOnlyDLL': 'true', - 'SetChecksum': 'true', - 'ShowProgress': '2', - 'StackCommitSize': 'a string1', - 'StackReserveSize': 'a string1', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': '2', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNet': 'true', - 'TargetMachine': '2', - 'TerminalServerAware': '2', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': '2', - 'UACUIAccess': 'true', - 'UseLibraryDependencyInputs': 'true', - 'UseUnicodeResponseFiles': 'true', - 'Version': 'a string1'}, - 'VCMIDLTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'CPreprocessOptions': 'a string1', - 'DefaultCharType': '1', - 'DLLDataFileName': 'a_file_name', - 'EnableErrorChecks': '1', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'MkTypLibCompatible': 'true', - 'notgood': 'bogus', - 'OutputDirectory': 'a string1', - 'PreprocessorDefinitions': 'string1;string2', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'StructMemberAlignment': '1', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': '1', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'ValidateParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '1'}, - 'VCResourceCompilerTool': { - 'AdditionalOptions': 'a string1', - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'Culture': '1003', - 'IgnoreStandardIncludePath': 'true', - 'notgood2': 'bogus', - 'PreprocessorDefinitions': 'string1;string2', - 'ResourceOutputFileName': 'a string1', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'UndefinePreprocessorDefinitions': 'string1;string2'}, - 'VCLibrarianTool': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'ExportNamedFunctions': 'string1;string2', - 'ForceSymbolReferences': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2', - 'LinkLibraryDependencies': 'true', - 'ModuleDefinitionFile': 'a_file_name', - 'OutputFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'UseUnicodeResponseFiles': 'true'}, - 'VCManifestTool': { - 'AdditionalManifestFiles': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AssemblyIdentity': 'a string1', - 'ComponentFileName': 'a_file_name', - 'DependencyInformationFile': 'a_file_name', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'a string1', - 'ManifestResourceFile': 'a_file_name', - 'OutputManifestFile': 'a_file_name', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'truel', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'UseFAT32Workaround': 'true', - 'UseUnicodeResponseFiles': 'true', - 'VerboseOutput': 'true'}}, - self.stderr) - self._ExpectedWarnings([ - 'Warning: for VCCLCompilerTool/BasicRuntimeChecks, ' - 'index value (5) not in expected range [0, 4)', - 'Warning: for VCCLCompilerTool/BrowseInformation, ' - "invalid literal for int() with base 10: 'fdkslj'", - 'Warning: for VCCLCompilerTool/CallingConvention, ' - 'index value (-1) not in expected range [0, 3)', - 'Warning: for VCCLCompilerTool/DebugInformationFormat, ' - 'converted value for 2 not specified.', - 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast', - 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ', - 'Warning: for VCLinkerTool/TargetMachine, ' - 'converted value for 2 not specified.', - 'Warning: unrecognized setting VCMIDLTool/notgood', - 'Warning: unrecognized setting VCResourceCompilerTool/notgood2', - 'Warning: for VCManifestTool/UpdateFileHashes, ' - "expected bool; got 'truel'" - '']) - - def testValidateMSBuildSettings_settings(self): - """Tests that for invalid MSBuild settings.""" - MSVSSettings.ValidateMSBuildSettings( - {'ClCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': ['string1', 'string2'], - 'AdditionalUsingDirectories': 'folder1;folder2', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': 'NoListing', - 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', - 'BrowseInformation': 'false', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'BuildingInIDE': 'true', - 'CallingConvention': 'Cdecl', - 'CompileAs': 'CompileAsC', - 'CompileAsManaged': 'Pure', - 'CreateHotpatchableImage': 'true', - 'DebugInformationFormat': 'ProgramDatabase', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'string1;string2', - 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', - 'EnableFiberSafeOptimizations': 'true', - 'EnablePREfast': 'true', - 'Enableprefast': 'bogus', - 'ErrorReporting': 'Prompt', - 'ExceptionHandling': 'SyncCThrow', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': 'Neither', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': 'Precise', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2', - 'ForcedUsingFiles': 'file1;file2', - 'FunctionLevelLinking': 'false', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': 'OnlyExplicitInline', - 'IntrinsicFunctions': 'false', - 'MinimalRebuild': 'true', - 'MultiProcessorCompilation': 'true', - 'ObjectFileName': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMPSupport': 'true', - 'Optimization': 'Disabled', - 'PrecompiledHeader': 'NotUsing', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderOutputFile': 'a_file_name', - 'PreprocessKeepComments': 'true', - 'PreprocessorDefinitions': 'string1;string2', - 'PreprocessOutputPath': 'a string1', - 'PreprocessSuppressLineNumbers': 'false', - 'PreprocessToFile': 'false', - 'ProcessorNumber': '33', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': 'MultiThreaded', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1Byte', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'TreatSpecificWarningsAsErrors': 'string1;string2', - 'TreatWarningAsError': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'UseFullPaths': 'true', - 'UseUnicodeForAssemblerListing': 'true', - 'WarningLevel': 'TurnOffAllWarnings', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name', - 'ZZXYZ': 'bogus'}, - 'Link': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalManifestDependencies': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AddModuleNamesToAssembly': 'file1;file2', - 'AllowIsolation': 'true', - 'AssemblyDebug': '', - 'AssemblyLinkResource': 'file1;file2', - 'BaseAddress': 'a string1', - 'BuildingInIDE': 'true', - 'CLRImageType': 'ForceIJWImage', - 'CLRSupportLastError': 'Enabled', - 'CLRThreadAttribute': 'MTAThreadingAttribute', - 'CLRUnmanagedCodeCheck': 'true', - 'CreateHotPatchableImage': 'X86Image', - 'DataExecutionPrevention': 'false', - 'DelayLoadDLLs': 'file1;file2', - 'DelaySign': 'true', - 'Driver': 'NotSet', - 'EmbedManagedResourceFile': 'file1;file2', - 'EnableCOMDATFolding': 'false', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a string1', - 'FixedBaseAddress': 'false', - 'ForceFileOutput': 'Enabled', - 'ForceSymbolReferences': 'file1;file2', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a string1', - 'HeapReserveSize': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreSpecificDefaultLibraries': 'a_file_list', - 'ImageHasSafeExceptionHandlers': 'true', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': 'false', - 'LinkDLL': 'true', - 'LinkErrorReporting': 'SendErrorReport', - 'LinkStatus': 'true', - 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a string1', - 'MidlCommandFile': 'a_file_name', - 'MinimumRequiredVersion': 'a string1', - 'ModuleDefinitionFile': 'a_file_name', - 'MSDOSStubFileName': 'a_file_name', - 'NoEntryPoint': 'true', - 'OptimizeReferences': 'false', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'PreventDllBinding': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': 'false', - 'RegisterOutput': 'true', - 'SectionAlignment': '33', - 'SetChecksum': 'true', - 'ShowProgress': 'LinkVerboseREF', - 'SpecifySectionAttributes': 'a string1', - 'StackCommitSize': 'a string1', - 'StackReserveSize': 'a string1', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': 'Console', - 'SupportNobindOfDelayLoadedDLL': 'true', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNET': 'true', - 'TargetMachine': 'MachineX86', - 'TerminalServerAware': 'false', - 'TrackerLogDirectory': 'a_folder', - 'TreatLinkerWarningAsErrors': 'true', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': 'AsInvoker', - 'UACUIAccess': 'true', - 'Version': 'a string1'}, - 'ResourceCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'Culture': '0x236', - 'IgnoreStandardIncludePath': 'true', - 'NullTerminateStrings': 'true', - 'PreprocessorDefinitions': 'string1;string2', - 'ResourceOutputFileName': 'a string1', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'UndefinePreprocessorDefinitions': 'string1;string2'}, - 'Midl': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'ApplicationConfigurationMode': 'true', - 'ClientStubFile': 'a_file_name', - 'CPreprocessOptions': 'a string1', - 'DefaultCharType': 'Signed', - 'DllDataFileName': 'a_file_name', - 'EnableErrorChecks': 'EnableCustom', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateClientFiles': 'Stub', - 'GenerateServerFiles': 'None', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'LocaleID': '33', - 'MkTypLibCompatible': 'true', - 'OutputDirectory': 'a string1', - 'PreprocessorDefinitions': 'string1;string2', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'ServerStubFile': 'a_file_name', - 'StructMemberAlignment': 'NotSet', - 'SuppressCompilerWarnings': 'true', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': 'Itanium', - 'TrackerLogDirectory': 'a_folder', - 'TypeLibFormat': 'NewFormat', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'ValidateAllParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '1'}, - 'Lib': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'DisplayLibrary': 'a string1', - 'ErrorReporting': 'PromptImmediately', - 'ExportNamedFunctions': 'string1;string2', - 'ForceSymbolReferences': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2', - 'LinkTimeCodeGeneration': 'true', - 'MinimumRequiredVersion': 'a string1', - 'ModuleDefinitionFile': 'a_file_name', - 'Name': 'a_file_name', - 'OutputFile': 'a_file_name', - 'RemoveObjects': 'file1;file2', - 'SubSystem': 'Console', - 'SuppressStartupBanner': 'true', - 'TargetMachine': 'MachineX86i', - 'TrackerLogDirectory': 'a_folder', - 'TreatLibWarningAsErrors': 'true', - 'UseUnicodeResponseFiles': 'true', - 'Verbose': 'true'}, - 'Manifest': { - 'AdditionalManifestFiles': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AssemblyIdentity': 'a string1', - 'ComponentFileName': 'a_file_name', - 'EnableDPIAwareness': 'fal', - 'GenerateCatalogFiles': 'truel', - 'GenerateCategoryTags': 'true', - 'InputResourceManifests': 'a string1', - 'ManifestFromManagedAssembly': 'a_file_name', - 'notgood3': 'bogus', - 'OutputManifestFile': 'a_file_name', - 'OutputResourceManifests': 'a string1', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressDependencyElement': 'true', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'VerboseOutput': 'true'}, - 'ProjectReference': { - 'LinkLibraryDependencies': 'true', - 'UseLibraryDependencyInputs': 'true'}, - 'ManifestResourceCompile': { - 'ResourceOutputFileName': 'a_file_name'}, - '': { - 'EmbedManifest': 'true', - 'GenerateManifest': 'true', - 'IgnoreImportLibrary': 'true', - 'LinkIncremental': 'false'}}, - self.stderr) - self._ExpectedWarnings([ - 'Warning: unrecognized setting ClCompile/Enableprefast', - 'Warning: unrecognized setting ClCompile/ZZXYZ', - 'Warning: unrecognized setting Manifest/notgood3', - 'Warning: for Manifest/GenerateCatalogFiles, ' - "expected bool; got 'truel'", - 'Warning: for Lib/TargetMachine, unrecognized enumerated value ' - 'MachineX86i', - "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"]) - - def testConvertToMSBuildSettings_empty(self): - """Tests an empty conversion.""" - msvs_settings = {} - expected_msbuild_settings = {} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_minimal(self): - """Tests a minimal conversion.""" - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/foo', - 'BasicRuntimeChecks': '0', - }, - 'VCLinkerTool': { - 'LinkTimeCodeGeneration': '1', - 'ErrorReporting': '1', - 'DataExecutionPrevention': '2', - }, - } - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/foo', - 'BasicRuntimeChecks': 'Default', - }, - 'Link': { - 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', - 'LinkErrorReporting': 'PromptImmediately', - 'DataExecutionPrevention': 'true', - }, - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_warnings(self): - """Tests conversion that generates warnings.""" - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': '1', - 'AdditionalOptions': '2', - # These are incorrect values: - 'BasicRuntimeChecks': '12', - 'BrowseInformation': '21', - 'UsePrecompiledHeader': '13', - 'GeneratePreprocessedFile': '14'}, - 'VCLinkerTool': { - # These are incorrect values: - 'Driver': '10', - 'LinkTimeCodeGeneration': '31', - 'ErrorReporting': '21', - 'FixedBaseAddress': '6'}, - 'VCResourceCompilerTool': { - # Custom - 'Culture': '1003'}} - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': '1', - 'AdditionalOptions': '2'}, - 'Link': {}, - 'ResourceCompile': { - # Custom - 'Culture': '0x03eb'}} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([ - 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to ' - 'MSBuild, index value (12) not in expected range [0, 4)', - 'Warning: while converting VCCLCompilerTool/BrowseInformation to ' - 'MSBuild, index value (21) not in expected range [0, 3)', - 'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to ' - 'MSBuild, index value (13) not in expected range [0, 3)', - 'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to ' - 'MSBuild, value must be one of [0, 1, 2]; got 14', - - 'Warning: while converting VCLinkerTool/Driver to ' - 'MSBuild, index value (10) not in expected range [0, 4)', - 'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to ' - 'MSBuild, index value (31) not in expected range [0, 5)', - 'Warning: while converting VCLinkerTool/ErrorReporting to ' - 'MSBuild, index value (21) not in expected range [0, 3)', - 'Warning: while converting VCLinkerTool/FixedBaseAddress to ' - 'MSBuild, index value (6) not in expected range [0, 3)', - ]) - - def testConvertToMSBuildSettings_full_synthetic(self): - """Tests conversion of all the MSBuild settings.""" - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'AdditionalUsingDirectories': 'folder1;folder2;folder3', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': '0', - 'BasicRuntimeChecks': '1', - 'BrowseInformation': '2', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'CallingConvention': '0', - 'CompileAs': '1', - 'DebugInformationFormat': '4', - 'DefaultCharIsUnsigned': 'true', - 'Detect64BitPortabilityProblems': 'true', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'd1;d2;d3', - 'EnableEnhancedInstructionSet': '0', - 'EnableFiberSafeOptimizations': 'true', - 'EnableFunctionLevelLinking': 'true', - 'EnableIntrinsicFunctions': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': '1', - 'ExceptionHandling': '2', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': '0', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': '1', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2;file3', - 'ForcedUsingFiles': 'file1;file2;file3', - 'GeneratePreprocessedFile': '1', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': '2', - 'KeepComments': 'true', - 'MinimalRebuild': 'true', - 'ObjectFile': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMP': 'true', - 'Optimization': '3', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderThrough': 'a_file_name', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': '0', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1', - 'SuppressStartupBanner': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'UseFullPaths': 'true', - 'UsePrecompiledHeader': '1', - 'UseUnicodeResponseFiles': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '2', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name'}, - 'VCLinkerTool': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', - 'AdditionalManifestDependencies': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AddModuleNamesToAssembly': 'file1;file2;file3', - 'AllowIsolation': 'true', - 'AssemblyDebug': '0', - 'AssemblyLinkResource': 'file1;file2;file3', - 'BaseAddress': 'a_string', - 'CLRImageType': '1', - 'CLRThreadAttribute': '2', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '0', - 'DelayLoadDLLs': 'file1;file2;file3', - 'DelaySign': 'true', - 'Driver': '1', - 'EmbedManagedResourceFile': 'file1;file2;file3', - 'EnableCOMDATFolding': '0', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a_string', - 'ErrorReporting': '0', - 'FixedBaseAddress': '1', - 'ForceSymbolReferences': 'file1;file2;file3', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateManifest': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a_string', - 'HeapReserveSize': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreDefaultLibraryNames': 'file1;file2;file3', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreImportLibrary': 'true', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': '2', - 'LinkIncremental': '1', - 'LinkLibraryDependencies': 'true', - 'LinkTimeCodeGeneration': '2', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a_string', - 'MidlCommandFile': 'a_file_name', - 'ModuleDefinitionFile': 'a_file_name', - 'OptimizeForWindows98': '1', - 'OptimizeReferences': '0', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': '1', - 'RegisterOutput': 'true', - 'ResourceOnlyDLL': 'true', - 'SetChecksum': 'true', - 'ShowProgress': '0', - 'StackCommitSize': 'a_string', - 'StackReserveSize': 'a_string', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': '2', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNet': 'true', - 'TargetMachine': '3', - 'TerminalServerAware': '2', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': '1', - 'UACUIAccess': 'true', - 'UseLibraryDependencyInputs': 'false', - 'UseUnicodeResponseFiles': 'true', - 'Version': 'a_string'}, - 'VCResourceCompilerTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'Culture': '1003', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ResourceOutputFileName': 'a_string', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, - 'VCMIDLTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'CPreprocessOptions': 'a_string', - 'DefaultCharType': '0', - 'DLLDataFileName': 'a_file_name', - 'EnableErrorChecks': '2', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'MkTypLibCompatible': 'true', - 'OutputDirectory': 'a_string', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'StructMemberAlignment': '3', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': '1', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'ValidateParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '4'}, - 'VCLibrarianTool': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'ExportNamedFunctions': 'd1;d2;d3', - 'ForceSymbolReferences': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', - 'LinkLibraryDependencies': 'true', - 'ModuleDefinitionFile': 'a_file_name', - 'OutputFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'UseUnicodeResponseFiles': 'true'}, - 'VCManifestTool': { - 'AdditionalManifestFiles': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AssemblyIdentity': 'a_string', - 'ComponentFileName': 'a_file_name', - 'DependencyInformationFile': 'a_file_name', - 'EmbedManifest': 'true', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'a_string', - 'ManifestResourceFile': 'my_name', - 'OutputManifestFile': 'a_file_name', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'UseFAT32Workaround': 'true', - 'UseUnicodeResponseFiles': 'true', - 'VerboseOutput': 'true'}} - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string /J', - 'AdditionalUsingDirectories': 'folder1;folder2;folder3', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': 'NoListing', - 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', - 'BrowseInformation': 'true', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'CallingConvention': 'Cdecl', - 'CompileAs': 'CompileAsC', - 'DebugInformationFormat': 'EditAndContinue', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'd1;d2;d3', - 'EnableEnhancedInstructionSet': 'NotSet', - 'EnableFiberSafeOptimizations': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': 'Prompt', - 'ExceptionHandling': 'Async', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': 'Neither', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': 'Strict', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2;file3', - 'ForcedUsingFiles': 'file1;file2;file3', - 'FunctionLevelLinking': 'true', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': 'AnySuitable', - 'IntrinsicFunctions': 'true', - 'MinimalRebuild': 'true', - 'ObjectFileName': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMPSupport': 'true', - 'Optimization': 'Full', - 'PrecompiledHeader': 'Create', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderOutputFile': 'a_file_name', - 'PreprocessKeepComments': 'true', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'PreprocessSuppressLineNumbers': 'false', - 'PreprocessToFile': 'true', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': 'MultiThreaded', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1Byte', - 'SuppressStartupBanner': 'true', - 'TreatWarningAsError': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'UseFullPaths': 'true', - 'WarningLevel': 'Level2', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name'}, - 'Link': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalManifestDependencies': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AddModuleNamesToAssembly': 'file1;file2;file3', - 'AllowIsolation': 'true', - 'AssemblyDebug': '', - 'AssemblyLinkResource': 'file1;file2;file3', - 'BaseAddress': 'a_string', - 'CLRImageType': 'ForceIJWImage', - 'CLRThreadAttribute': 'STAThreadingAttribute', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '', - 'DelayLoadDLLs': 'file1;file2;file3', - 'DelaySign': 'true', - 'Driver': 'Driver', - 'EmbedManagedResourceFile': 'file1;file2;file3', - 'EnableCOMDATFolding': '', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a_string', - 'FixedBaseAddress': 'false', - 'ForceSymbolReferences': 'file1;file2;file3', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a_string', - 'HeapReserveSize': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': 'true', - 'LinkErrorReporting': 'NoErrorReport', - 'LinkTimeCodeGeneration': 'PGInstrument', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a_string', - 'MidlCommandFile': 'a_file_name', - 'ModuleDefinitionFile': 'a_file_name', - 'NoEntryPoint': 'true', - 'OptimizeReferences': '', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': 'false', - 'RegisterOutput': 'true', - 'SetChecksum': 'true', - 'ShowProgress': 'NotSet', - 'StackCommitSize': 'a_string', - 'StackReserveSize': 'a_string', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': 'Windows', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNET': 'true', - 'TargetMachine': 'MachineARM', - 'TerminalServerAware': 'true', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': 'HighestAvailable', - 'UACUIAccess': 'true', - 'Version': 'a_string'}, - 'ResourceCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'Culture': '0x03eb', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ResourceOutputFileName': 'a_string', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, - 'Midl': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'CPreprocessOptions': 'a_string', - 'DefaultCharType': 'Unsigned', - 'DllDataFileName': 'a_file_name', - 'EnableErrorChecks': 'All', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'MkTypLibCompatible': 'true', - 'OutputDirectory': 'a_string', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'StructMemberAlignment': '4', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': 'Win32', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'ValidateAllParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '4'}, - 'Lib': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'ExportNamedFunctions': 'd1;d2;d3', - 'ForceSymbolReferences': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', - 'ModuleDefinitionFile': 'a_file_name', - 'OutputFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'UseUnicodeResponseFiles': 'true'}, - 'Manifest': { - 'AdditionalManifestFiles': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AssemblyIdentity': 'a_string', - 'ComponentFileName': 'a_file_name', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'a_string', - 'OutputManifestFile': 'a_file_name', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'VerboseOutput': 'true'}, - 'ManifestResourceCompile': { - 'ResourceOutputFileName': 'my_name'}, - 'ProjectReference': { - 'LinkLibraryDependencies': 'true', - 'UseLibraryDependencyInputs': 'false'}, - '': { - 'EmbedManifest': 'true', - 'GenerateManifest': 'true', - 'IgnoreImportLibrary': 'true', - 'LinkIncremental': 'false'}} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_actual(self): - """Tests the conversion of an actual project. - - A VS2008 project with most of the options defined was created through the - VS2008 IDE. It was then converted to VS2010. The tool settings found in - the .vcproj and .vcxproj files were converted to the two dictionaries - msvs_settings and expected_msbuild_settings. - - Note that for many settings, the VS2010 converter adds macros like - %(AdditionalIncludeDirectories) to make sure than inherited values are - included. Since the Gyp projects we generate do not use inheritance, - we removed these macros. They were: - ClCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)' - AdditionalOptions: ' %(AdditionalOptions)' - AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)' - DisableSpecificWarnings: ';%(DisableSpecificWarnings)', - ForcedIncludeFiles: ';%(ForcedIncludeFiles)', - ForcedUsingFiles: ';%(ForcedUsingFiles)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - UndefinePreprocessorDefinitions: - ';%(UndefinePreprocessorDefinitions)', - Link: - AdditionalDependencies: ';%(AdditionalDependencies)', - AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)', - AdditionalManifestDependencies: - ';%(AdditionalManifestDependencies)', - AdditionalOptions: ' %(AdditionalOptions)', - AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)', - AssemblyLinkResource: ';%(AssemblyLinkResource)', - DelayLoadDLLs: ';%(DelayLoadDLLs)', - EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)', - ForceSymbolReferences: ';%(ForceSymbolReferences)', - IgnoreSpecificDefaultLibraries: - ';%(IgnoreSpecificDefaultLibraries)', - ResourceCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', - AdditionalOptions: ' %(AdditionalOptions)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - Manifest: - AdditionalManifestFiles: ';%(AdditionalManifestFiles)', - AdditionalOptions: ' %(AdditionalOptions)', - InputResourceManifests: ';%(InputResourceManifests)', - """ - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/more', - 'AdditionalUsingDirectories': 'test', - 'AssemblerListingLocation': '$(IntDir)\\a', - 'AssemblerOutput': '1', - 'BasicRuntimeChecks': '3', - 'BrowseInformation': '1', - 'BrowseInformationFile': '$(IntDir)\\e', - 'BufferSecurityCheck': 'false', - 'CallingConvention': '1', - 'CompileAs': '1', - 'DebugInformationFormat': '4', - 'DefaultCharIsUnsigned': 'true', - 'Detect64BitPortabilityProblems': 'true', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'abc', - 'EnableEnhancedInstructionSet': '1', - 'EnableFiberSafeOptimizations': 'true', - 'EnableFunctionLevelLinking': 'true', - 'EnableIntrinsicFunctions': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': '2', - 'ExceptionHandling': '2', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': '2', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': '1', - 'ForceConformanceInForLoopScope': 'false', - 'ForcedIncludeFiles': 'def', - 'ForcedUsingFiles': 'ge', - 'GeneratePreprocessedFile': '2', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': '1', - 'KeepComments': 'true', - 'MinimalRebuild': 'true', - 'ObjectFile': '$(IntDir)\\b', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMP': 'true', - 'Optimization': '3', - 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche', - 'PrecompiledHeaderThrough': 'StdAfx.hd', - 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', - 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb', - 'RuntimeLibrary': '3', - 'RuntimeTypeInfo': 'false', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '3', - 'SuppressStartupBanner': 'false', - 'TreatWChar_tAsBuiltInType': 'false', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'wer', - 'UseFullPaths': 'true', - 'UsePrecompiledHeader': '0', - 'UseUnicodeResponseFiles': 'false', - 'WarnAsError': 'true', - 'WarningLevel': '3', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': '$(IntDir)\\c'}, - 'VCLinkerTool': { - 'AdditionalDependencies': 'zx', - 'AdditionalLibraryDirectories': 'asd', - 'AdditionalManifestDependencies': 's2', - 'AdditionalOptions': '/mor2', - 'AddModuleNamesToAssembly': 'd1', - 'AllowIsolation': 'false', - 'AssemblyDebug': '1', - 'AssemblyLinkResource': 'd5', - 'BaseAddress': '23423', - 'CLRImageType': '3', - 'CLRThreadAttribute': '1', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '0', - 'DelayLoadDLLs': 'd4', - 'DelaySign': 'true', - 'Driver': '2', - 'EmbedManagedResourceFile': 'd2', - 'EnableCOMDATFolding': '1', - 'EnableUAC': 'false', - 'EntryPointSymbol': 'f5', - 'ErrorReporting': '2', - 'FixedBaseAddress': '1', - 'ForceSymbolReferences': 'd3', - 'FunctionOrder': 'fssdfsd', - 'GenerateDebugInformation': 'true', - 'GenerateManifest': 'false', - 'GenerateMapFile': 'true', - 'HeapCommitSize': '13', - 'HeapReserveSize': '12', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreDefaultLibraryNames': 'flob;flok', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreImportLibrary': 'true', - 'ImportLibrary': 'f4', - 'KeyContainer': 'f7', - 'KeyFile': 'f6', - 'LargeAddressAware': '2', - 'LinkIncremental': '0', - 'LinkLibraryDependencies': 'false', - 'LinkTimeCodeGeneration': '1', - 'ManifestFile': - '$(IntDir)\\$(TargetFileName).2intermediate.manifest', - 'MapExports': 'true', - 'MapFileName': 'd5', - 'MergedIDLBaseFileName': 'f2', - 'MergeSections': 'f5', - 'MidlCommandFile': 'f1', - 'ModuleDefinitionFile': 'sdsd', - 'OptimizeForWindows98': '2', - 'OptimizeReferences': '2', - 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', - 'ProgramDatabaseFile': 'Flob.pdb', - 'RandomizedBaseAddress': '1', - 'RegisterOutput': 'true', - 'ResourceOnlyDLL': 'true', - 'SetChecksum': 'false', - 'ShowProgress': '1', - 'StackCommitSize': '15', - 'StackReserveSize': '14', - 'StripPrivateSymbols': 'd3', - 'SubSystem': '1', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'false', - 'SwapRunFromCD': 'true', - 'SwapRunFromNet': 'true', - 'TargetMachine': '1', - 'TerminalServerAware': '1', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'f3', - 'TypeLibraryResourceID': '12', - 'UACExecutionLevel': '2', - 'UACUIAccess': 'true', - 'UseLibraryDependencyInputs': 'true', - 'UseUnicodeResponseFiles': 'false', - 'Version': '333'}, - 'VCResourceCompilerTool': { - 'AdditionalIncludeDirectories': 'f3', - 'AdditionalOptions': '/more3', - 'Culture': '3084', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': '_UNICODE;UNICODE2', - 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res', - 'ShowProgress': 'true'}, - 'VCManifestTool': { - 'AdditionalManifestFiles': 'sfsdfsd', - 'AdditionalOptions': 'afdsdafsd', - 'AssemblyIdentity': 'sddfdsadfsa', - 'ComponentFileName': 'fsdfds', - 'DependencyInformationFile': '$(IntDir)\\mt.depdfd', - 'EmbedManifest': 'false', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'asfsfdafs', - 'ManifestResourceFile': - '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf', - 'OutputManifestFile': '$(TargetPath).manifestdfs', - 'RegistrarScriptFile': 'sdfsfd', - 'ReplacementsFile': 'sdffsd', - 'SuppressStartupBanner': 'false', - 'TypeLibraryFile': 'sfsd', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'sfsd', - 'UseFAT32Workaround': 'true', - 'UseUnicodeResponseFiles': 'false', - 'VerboseOutput': 'true'}} - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/more /J', - 'AdditionalUsingDirectories': 'test', - 'AssemblerListingLocation': '$(IntDir)a', - 'AssemblerOutput': 'AssemblyCode', - 'BasicRuntimeChecks': 'EnableFastChecks', - 'BrowseInformation': 'true', - 'BrowseInformationFile': '$(IntDir)e', - 'BufferSecurityCheck': 'false', - 'CallingConvention': 'FastCall', - 'CompileAs': 'CompileAsC', - 'DebugInformationFormat': 'EditAndContinue', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'abc', - 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', - 'EnableFiberSafeOptimizations': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': 'Queue', - 'ExceptionHandling': 'Async', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': 'Size', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': 'Strict', - 'ForceConformanceInForLoopScope': 'false', - 'ForcedIncludeFiles': 'def', - 'ForcedUsingFiles': 'ge', - 'FunctionLevelLinking': 'true', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': 'OnlyExplicitInline', - 'IntrinsicFunctions': 'true', - 'MinimalRebuild': 'true', - 'ObjectFileName': '$(IntDir)b', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMPSupport': 'true', - 'Optimization': 'Full', - 'PrecompiledHeader': 'NotUsing', # Actual conversion gives '' - 'PrecompiledHeaderFile': 'StdAfx.hd', - 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche', - 'PreprocessKeepComments': 'true', - 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', - 'PreprocessSuppressLineNumbers': 'true', - 'PreprocessToFile': 'true', - 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb', - 'RuntimeLibrary': 'MultiThreadedDebugDLL', - 'RuntimeTypeInfo': 'false', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '4Bytes', - 'SuppressStartupBanner': 'false', - 'TreatWarningAsError': 'true', - 'TreatWChar_tAsBuiltInType': 'false', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'wer', - 'UseFullPaths': 'true', - 'WarningLevel': 'Level3', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': '$(IntDir)c'}, - 'Link': { - 'AdditionalDependencies': 'zx', - 'AdditionalLibraryDirectories': 'asd', - 'AdditionalManifestDependencies': 's2', - 'AdditionalOptions': '/mor2', - 'AddModuleNamesToAssembly': 'd1', - 'AllowIsolation': 'false', - 'AssemblyDebug': 'true', - 'AssemblyLinkResource': 'd5', - 'BaseAddress': '23423', - 'CLRImageType': 'ForceSafeILImage', - 'CLRThreadAttribute': 'MTAThreadingAttribute', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '', - 'DelayLoadDLLs': 'd4', - 'DelaySign': 'true', - 'Driver': 'UpOnly', - 'EmbedManagedResourceFile': 'd2', - 'EnableCOMDATFolding': 'false', - 'EnableUAC': 'false', - 'EntryPointSymbol': 'f5', - 'FixedBaseAddress': 'false', - 'ForceSymbolReferences': 'd3', - 'FunctionOrder': 'fssdfsd', - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': '13', - 'HeapReserveSize': '12', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreSpecificDefaultLibraries': 'flob;flok', - 'ImportLibrary': 'f4', - 'KeyContainer': 'f7', - 'KeyFile': 'f6', - 'LargeAddressAware': 'true', - 'LinkErrorReporting': 'QueueForNextLogin', - 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', - 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest', - 'MapExports': 'true', - 'MapFileName': 'd5', - 'MergedIDLBaseFileName': 'f2', - 'MergeSections': 'f5', - 'MidlCommandFile': 'f1', - 'ModuleDefinitionFile': 'sdsd', - 'NoEntryPoint': 'true', - 'OptimizeReferences': 'true', - 'OutputFile': '$(OutDir)$(ProjectName)2.exe', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', - 'ProgramDatabaseFile': 'Flob.pdb', - 'RandomizedBaseAddress': 'false', - 'RegisterOutput': 'true', - 'SetChecksum': 'false', - 'ShowProgress': 'LinkVerbose', - 'StackCommitSize': '15', - 'StackReserveSize': '14', - 'StripPrivateSymbols': 'd3', - 'SubSystem': 'Console', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'false', - 'SwapRunFromCD': 'true', - 'SwapRunFromNET': 'true', - 'TargetMachine': 'MachineX86', - 'TerminalServerAware': 'false', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'f3', - 'TypeLibraryResourceID': '12', - 'UACExecutionLevel': 'RequireAdministrator', - 'UACUIAccess': 'true', - 'Version': '333'}, - 'ResourceCompile': { - 'AdditionalIncludeDirectories': 'f3', - 'AdditionalOptions': '/more3', - 'Culture': '0x0c0c', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': '_UNICODE;UNICODE2', - 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res', - 'ShowProgress': 'true'}, - 'Manifest': { - 'AdditionalManifestFiles': 'sfsdfsd', - 'AdditionalOptions': 'afdsdafsd', - 'AssemblyIdentity': 'sddfdsadfsa', - 'ComponentFileName': 'fsdfds', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'asfsfdafs', - 'OutputManifestFile': '$(TargetPath).manifestdfs', - 'RegistrarScriptFile': 'sdfsfd', - 'ReplacementsFile': 'sdffsd', - 'SuppressStartupBanner': 'false', - 'TypeLibraryFile': 'sfsd', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'sfsd', - 'VerboseOutput': 'true'}, - 'ProjectReference': { - 'LinkLibraryDependencies': 'false', - 'UseLibraryDependencyInputs': 'true'}, - '': { - 'EmbedManifest': 'false', - 'GenerateManifest': 'false', - 'IgnoreImportLibrary': 'true', - 'LinkIncremental': '' - }, - 'ManifestResourceCompile': { - 'ResourceOutputFileName': - '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'} - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - -if __name__ == '__main__': - unittest.main() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.common -import gyp.easy_xml as easy_xml - - -class Writer(object): - """Visual Studio XML tool file writer.""" - - def __init__(self, tool_file_path, name): - """Initializes the tool file. - - Args: - tool_file_path: Path to the tool file. - name: Name of the tool file. - """ - self.tool_file_path = tool_file_path - self.name = name - self.rules_section = ['Rules'] - - def AddCustomBuildRule(self, name, cmd, description, - additional_dependencies, - outputs, extensions): - """Adds a rule to the tool file. - - Args: - name: Name of the rule. - description: Description of the rule. - cmd: Command line of the rule. - additional_dependencies: other files which may trigger the rule. - outputs: outputs of the rule. - extensions: extensions handled by the rule. - """ - rule = ['CustomBuildRule', - {'Name': name, - 'ExecutionDescription': description, - 'CommandLine': cmd, - 'Outputs': ';'.join(outputs), - 'FileExtensions': ';'.join(extensions), - 'AdditionalDependencies': - ';'.join(additional_dependencies) - }] - self.rules_section.append(rule) - - def WriteIfChanged(self): - """Writes the tool file.""" - content = ['VisualStudioToolFile', - {'Version': '8.00', - 'Name': self.name - }, - self.rules_section - ] - easy_xml.WriteXmlIfChanged(content, self.tool_file_path, - encoding="Windows-1252") diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,147 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio user preferences file writer.""" - -import os -import re -import socket # for gethostname - -import gyp.common -import gyp.easy_xml as easy_xml - - -#------------------------------------------------------------------------------ - -def _FindCommandInPath(command): - """If there are no slashes in the command given, this function - searches the PATH env to find the given command, and converts it - to an absolute path. We have to do this because MSVS is looking - for an actual file to launch a debugger on, not just a command - line. Note that this happens at GYP time, so anything needing to - be built needs to have a full path.""" - if '/' in command or '\\' in command: - # If the command already has path elements (either relative or - # absolute), then assume it is constructed properly. - return command - else: - # Search through the path list and find an existing file that - # we can access. - paths = os.environ.get('PATH','').split(os.pathsep) - for path in paths: - item = os.path.join(path, command) - if os.path.isfile(item) and os.access(item, os.X_OK): - return item - return command - -def _QuoteWin32CommandLineArgs(args): - new_args = [] - for arg in args: - # Replace all double-quotes with double-double-quotes to escape - # them for cmd shell, and then quote the whole thing if there - # are any. - if arg.find('"') != -1: - arg = '""'.join(arg.split('"')) - arg = '"%s"' % arg - - # Otherwise, if there are any spaces, quote the whole arg. - elif re.search(r'[ \t\n]', arg): - arg = '"%s"' % arg - new_args.append(arg) - return new_args - -class Writer(object): - """Visual Studio XML user user file writer.""" - - def __init__(self, user_file_path, version, name): - """Initializes the user file. - - Args: - user_file_path: Path to the user file. - version: Version info. - name: Name of the user file. - """ - self.user_file_path = user_file_path - self.version = version - self.name = name - self.configurations = {} - - def AddConfig(self, name): - """Adds a configuration to the project. - - Args: - name: Configuration name. - """ - self.configurations[name] = ['Configuration', {'Name': name}] - - def AddDebugSettings(self, config_name, command, environment = {}, - working_directory=""): - """Adds a DebugSettings node to the user file for a particular config. - - Args: - command: command line to run. First element in the list is the - executable. All elements of the command will be quoted if - necessary. - working_directory: other files which may trigger the rule. (optional) - """ - command = _QuoteWin32CommandLineArgs(command) - - abs_command = _FindCommandInPath(command[0]) - - if environment and isinstance(environment, dict): - env_list = ['%s="%s"' % (key, val) - for (key,val) in environment.iteritems()] - environment = ' '.join(env_list) - else: - environment = '' - - n_cmd = ['DebugSettings', - {'Command': abs_command, - 'WorkingDirectory': working_directory, - 'CommandArguments': " ".join(command[1:]), - 'RemoteMachine': socket.gethostname(), - 'Environment': environment, - 'EnvironmentMerge': 'true', - # Currently these are all "dummy" values that we're just setting - # in the default manner that MSVS does it. We could use some of - # these to add additional capabilities, I suppose, but they might - # not have parity with other platforms then. - 'Attach': 'false', - 'DebuggerType': '3', # 'auto' debugger - 'Remote': '1', - 'RemoteCommand': '', - 'HttpUrl': '', - 'PDBPath': '', - 'SQLDebugging': '', - 'DebuggerFlavor': '0', - 'MPIRunCommand': '', - 'MPIRunArguments': '', - 'MPIRunWorkingDirectory': '', - 'ApplicationCommand': '', - 'ApplicationArguments': '', - 'ShimCommand': '', - 'MPIAcceptMode': '', - 'MPIAcceptFilter': '' - }] - - # Find the config, and add it if it doesn't exist. - if config_name not in self.configurations: - self.AddConfig(config_name) - - # Add the DebugSettings onto the appropriate config. - self.configurations[config_name].append(n_cmd) - - def WriteIfChanged(self): - """Writes the user file.""" - configs = ['Configurations'] - for config, spec in sorted(self.configurations.iteritems()): - configs.append(spec) - - content = ['VisualStudioUserFile', - {'Version': self.version.ProjectVersion(), - 'Name': self.name - }, - configs] - easy_xml.WriteXmlIfChanged(content, self.user_file_path, - encoding="Windows-1252") diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,212 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions shared amongst the Windows generators.""" - -import copy -import os - - -_TARGET_TYPE_EXT = { - 'executable': '.exe', - 'shared_library': '.dll' -} - - -def _GetLargePdbShimCcPath(): - """Returns the path of the large_pdb_shim.cc file.""" - this_dir = os.path.abspath(os.path.dirname(__file__)) - src_dir = os.path.abspath(os.path.join(this_dir, '..', '..')) - win_data_dir = os.path.join(src_dir, 'data', 'win') - large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc') - return large_pdb_shim_cc - - -def _DeepCopySomeKeys(in_dict, keys): - """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. - - Arguments: - in_dict: The dictionary to copy. - keys: The keys to be copied. If a key is in this list and doesn't exist in - |in_dict| this is not an error. - Returns: - The partially deep-copied dictionary. - """ - d = {} - for key in keys: - if key not in in_dict: - continue - d[key] = copy.deepcopy(in_dict[key]) - return d - - -def _SuffixName(name, suffix): - """Add a suffix to the end of a target. - - Arguments: - name: name of the target (foo#target) - suffix: the suffix to be added - Returns: - Target name with suffix added (foo_suffix#target) - """ - parts = name.rsplit('#', 1) - parts[0] = '%s_%s' % (parts[0], suffix) - return '#'.join(parts) - - -def _ShardName(name, number): - """Add a shard number to the end of a target. - - Arguments: - name: name of the target (foo#target) - number: shard number - Returns: - Target name with shard added (foo_1#target) - """ - return _SuffixName(name, str(number)) - - -def ShardTargets(target_list, target_dicts): - """Shard some targets apart to work around the linkers limits. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - Returns: - Tuple of the new sharded versions of the inputs. - """ - # Gather the targets to shard, and how many pieces. - targets_to_shard = {} - for t in target_dicts: - shards = int(target_dicts[t].get('msvs_shard', 0)) - if shards: - targets_to_shard[t] = shards - # Shard target_list. - new_target_list = [] - for t in target_list: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - new_target_list.append(_ShardName(t, i)) - else: - new_target_list.append(t) - # Shard target_dict. - new_target_dicts = {} - for t in target_dicts: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - name = _ShardName(t, i) - new_target_dicts[name] = copy.copy(target_dicts[t]) - new_target_dicts[name]['target_name'] = _ShardName( - new_target_dicts[name]['target_name'], i) - sources = new_target_dicts[name].get('sources', []) - new_sources = [] - for pos in range(i, len(sources), targets_to_shard[t]): - new_sources.append(sources[pos]) - new_target_dicts[name]['sources'] = new_sources - else: - new_target_dicts[t] = target_dicts[t] - # Shard dependencies. - for t in new_target_dicts: - dependencies = copy.copy(new_target_dicts[t].get('dependencies', [])) - new_dependencies = [] - for d in dependencies: - if d in targets_to_shard: - for i in range(targets_to_shard[d]): - new_dependencies.append(_ShardName(d, i)) - else: - new_dependencies.append(d) - new_target_dicts[t]['dependencies'] = new_dependencies - - return (new_target_list, new_target_dicts) - - -def InsertLargePdbShims(target_list, target_dicts, vars): - """Insert a shim target that forces the linker to use 4KB pagesize PDBs. - - This is a workaround for targets with PDBs greater than 1GB in size, the - limit for the 1KB pagesize PDBs created by the linker by default. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - vars: A dictionary of common GYP variables with generator-specific values. - Returns: - Tuple of the shimmed version of the inputs. - """ - # Determine which targets need shimming. - targets_to_shim = [] - for t in target_dicts: - target_dict = target_dicts[t] - # We only want to shim targets that have msvs_large_pdb enabled. - if not int(target_dict.get('msvs_large_pdb', 0)): - continue - # This is intended for executable, shared_library and loadable_module - # targets where every configuration is set up to produce a PDB output. - # If any of these conditions is not true then the shim logic will fail - # below. - targets_to_shim.append(t) - - large_pdb_shim_cc = _GetLargePdbShimCcPath() - - for t in targets_to_shim: - target_dict = target_dicts[t] - target_name = target_dict.get('target_name') - - base_dict = _DeepCopySomeKeys(target_dict, - ['configurations', 'default_configuration', 'toolset']) - - # This is the dict for copying the source file (part of the GYP tree) - # to the intermediate directory of the project. This is necessary because - # we can't always build a relative path to the shim source file (on Windows - # GYP and the project may be on different drives), and Ninja hates absolute - # paths (it ends up generating the .obj and .obj.d alongside the source - # file, polluting GYPs tree). - copy_suffix = '_large_pdb_copy' - copy_target_name = target_name + '_' + copy_suffix - full_copy_target_name = _SuffixName(t, copy_suffix) - shim_cc_basename = os.path.basename(large_pdb_shim_cc) - shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name - shim_cc_path = shim_cc_dir + '/' + shim_cc_basename - copy_dict = copy.deepcopy(base_dict) - copy_dict['target_name'] = copy_target_name - copy_dict['type'] = 'none' - copy_dict['sources'] = [ large_pdb_shim_cc ] - copy_dict['copies'] = [{ - 'destination': shim_cc_dir, - 'files': [ large_pdb_shim_cc ] - }] - - # This is the dict for the PDB generating shim target. It depends on the - # copy target. - shim_suffix = '_large_pdb_shim' - shim_target_name = target_name + '_' + shim_suffix - full_shim_target_name = _SuffixName(t, shim_suffix) - shim_dict = copy.deepcopy(base_dict) - shim_dict['target_name'] = shim_target_name - shim_dict['type'] = 'static_library' - shim_dict['sources'] = [ shim_cc_path ] - shim_dict['dependencies'] = [ full_copy_target_name ] - - # Set up the shim to output its PDB to the same location as the final linker - # target. - for config in shim_dict.get('configurations').itervalues(): - msvs = config.setdefault('msvs_settings') - - linker = msvs.pop('VCLinkerTool') # We want to clear this dict. - pdb_path = linker.get('ProgramDatabaseFile') - - compiler = msvs.setdefault('VCCLCompilerTool', {}) - compiler.setdefault('DebugInformationFormat', '3') - compiler.setdefault('ProgramDataBaseFileName', pdb_path) - - # Add the new targets. - target_list.append(full_copy_target_name) - target_list.append(full_shim_target_name) - target_dicts[full_copy_target_name] = copy_dict - target_dicts[full_shim_target_name] = shim_dict - - # Update the original target to depend on the shim target. - target_dict.setdefault('dependencies', []).append(full_shim_target_name) - - return (target_list, target_dicts) \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,373 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Handle version information related to Visual Stuio.""" - -import errno -import os -import re -import subprocess -import sys -import gyp - - -class VisualStudioVersion(object): - """Information regarding a version of Visual Studio.""" - - def __init__(self, short_name, description, - solution_version, project_version, flat_sln, uses_vcxproj, - path, sdk_based, default_toolset=None): - self.short_name = short_name - self.description = description - self.solution_version = solution_version - self.project_version = project_version - self.flat_sln = flat_sln - self.uses_vcxproj = uses_vcxproj - self.path = path - self.sdk_based = sdk_based - self.default_toolset = default_toolset - - def ShortName(self): - return self.short_name - - def Description(self): - """Get the full description of the version.""" - return self.description - - def SolutionVersion(self): - """Get the version number of the sln files.""" - return self.solution_version - - def ProjectVersion(self): - """Get the version number of the vcproj or vcxproj files.""" - return self.project_version - - def FlatSolution(self): - return self.flat_sln - - def UsesVcxproj(self): - """Returns true if this version uses a vcxproj file.""" - return self.uses_vcxproj - - def ProjectExtension(self): - """Returns the file extension for the project.""" - return self.uses_vcxproj and '.vcxproj' or '.vcproj' - - def Path(self): - """Returns the path to Visual Studio installation.""" - return self.path - - def ToolPath(self, tool): - """Returns the path to a given compiler tool. """ - return os.path.normpath(os.path.join(self.path, "VC/bin", tool)) - - def DefaultToolset(self): - """Returns the msbuild toolset version that will be used in the absence - of a user override.""" - return self.default_toolset - - def SetupScript(self, target_arch): - """Returns a command (with arguments) to be used to set up the - environment.""" - # Check if we are running in the SDK command line environment and use - # the setup script from the SDK if so. |target_arch| should be either - # 'x86' or 'x64'. - assert target_arch in ('x86', 'x64') - sdk_dir = os.environ.get('WindowsSDKDir') - if self.sdk_based and sdk_dir: - return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')), - '/' + target_arch] - else: - # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls - # vcvars32, which it can only find if VS??COMNTOOLS is set, which it - # isn't always. - if target_arch == 'x86': - return [os.path.normpath( - os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))] - else: - assert target_arch == 'x64' - arg = 'x86_amd64' - if (os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or - os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'): - # Use the 64-on-64 compiler if we can. - arg = 'amd64' - return [os.path.normpath( - os.path.join(self.path, 'VC/vcvarsall.bat')), arg] - - -def _RegistryQueryBase(sysdir, key, value): - """Use reg.exe to read a particular key. - - While ideally we might use the win32 module, we would like gyp to be - python neutral, so for instance cygwin python lacks this module. - - Arguments: - sysdir: The system subdirectory to attempt to launch reg.exe from. - key: The registry key to read from. - value: The particular value to read. - Return: - stdout from reg.exe, or None for failure. - """ - # Skip if not on Windows or Python Win32 setup issue - if sys.platform not in ('win32', 'cygwin'): - return None - # Setup params to pass to and attempt to launch reg.exe - cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'), - 'query', key] - if value: - cmd.extend(['/v', value]) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid - # Note that the error text may be in [1] in some cases - text = p.communicate()[0] - # Check return code from reg.exe; officially 0==success and 1==error - if p.returncode: - return None - return text - - -def _RegistryQuery(key, value=None): - """Use reg.exe to read a particular key through _RegistryQueryBase. - - First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If - that fails, it falls back to System32. Sysnative is available on Vista and - up and available on Windows Server 2003 and XP through KB patch 942589. Note - that Sysnative will always fail if using 64-bit python due to it being a - virtual directory and System32 will work correctly in the first place. - - KB 942589 - http://support.microsoft.com/kb/942589/en-us. - - Arguments: - key: The registry key. - value: The particular registry value to read (optional). - Return: - stdout from reg.exe, or None for failure. - """ - text = None - try: - text = _RegistryQueryBase('Sysnative', key, value) - except OSError, e: - if e.errno == errno.ENOENT: - text = _RegistryQueryBase('System32', key, value) - else: - raise - return text - - -def _RegistryGetValue(key, value): - """Use reg.exe to obtain the value of a registry key. - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. - """ - text = _RegistryQuery(key, value) - if not text: - return None - # Extract value. - match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text) - if not match: - return None - return match.group(1) - - -def _RegistryKeyExists(key): - """Use reg.exe to see if a key exists. - - Args: - key: The registry key to check. - Return: - True if the key exists - """ - if not _RegistryQuery(key): - return False - return True - - -def _CreateVersion(name, path, sdk_based=False): - """Sets up MSVS project generation. - - Setup is based off the GYP_MSVS_VERSION environment variable or whatever is - autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is - passed in that doesn't match a value in versions python will throw a error. - """ - if path: - path = os.path.normpath(path) - versions = { - '2012': VisualStudioVersion('2012', - 'Visual Studio 2012', - solution_version='12.00', - project_version='4.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v110'), - '2012e': VisualStudioVersion('2012e', - 'Visual Studio 2012', - solution_version='12.00', - project_version='4.0', - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v110'), - '2010': VisualStudioVersion('2010', - 'Visual Studio 2010', - solution_version='11.00', - project_version='4.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based), - '2010e': VisualStudioVersion('2010e', - 'Visual Studio 2010', - solution_version='11.00', - project_version='4.0', - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based), - '2008': VisualStudioVersion('2008', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2008e': VisualStudioVersion('2008e', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2005': VisualStudioVersion('2005', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2005e': VisualStudioVersion('2005e', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - } - return versions[str(name)] - - -def _ConvertToCygpath(path): - """Convert to cygwin path if we are using cygwin.""" - if sys.platform == 'cygwin': - p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE) - path = p.communicate()[0].strip() - return path - - -def _DetectVisualStudioVersions(versions_to_check, force_express): - """Collect the list of installed visual studio versions. - - Returns: - A list of visual studio versions installed in descending order of - usage preference. - Base this on the registry and a quick check if devenv.exe exists. - Only versions 8-10 are considered. - Possibilities are: - 2005(e) - Visual Studio 2005 (8) - 2008(e) - Visual Studio 2008 (9) - 2010(e) - Visual Studio 2010 (10) - 2012(e) - Visual Studio 2012 (11) - Where (e) is e for express editions of MSVS and blank otherwise. - """ - version_to_year = { - '8.0': '2005', '9.0': '2008', '10.0': '2010', '11.0': '2012'} - versions = [] - for version in versions_to_check: - # Old method of searching for which VS version is installed - # We don't use the 2010-encouraged-way because we also want to get the - # path to the binaries, which it doesn't offer. - keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version, - r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version, - r'HKLM\Software\Microsoft\VCExpress\%s' % version, - r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], 'InstallDir') - if not path: - continue - path = _ConvertToCygpath(path) - # Check for full. - full_path = os.path.join(path, 'devenv.exe') - express_path = os.path.join(path, 'vcexpress.exe') - if not force_express and os.path.exists(full_path): - # Add this one. - versions.append(_CreateVersion(version_to_year[version], - os.path.join(path, '..', '..'))) - # Check for express. - elif os.path.exists(express_path): - # Add this one. - versions.append(_CreateVersion(version_to_year[version] + 'e', - os.path.join(path, '..', '..'))) - - # The old method above does not work when only SDK is installed. - keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7', - r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7'] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], version) - if not path: - continue - path = _ConvertToCygpath(path) - versions.append(_CreateVersion(version_to_year[version] + 'e', - os.path.join(path, '..'), sdk_based=True)) - - return versions - - -def SelectVisualStudioVersion(version='auto'): - """Select which version of Visual Studio projects to generate. - - Arguments: - version: Hook to allow caller to force a particular version (vs auto). - Returns: - An object representing a visual studio project format version. - """ - # In auto mode, check environment variable for override. - if version == 'auto': - version = os.environ.get('GYP_MSVS_VERSION', 'auto') - version_map = { - 'auto': ('10.0', '9.0', '8.0', '11.0'), - '2005': ('8.0',), - '2005e': ('8.0',), - '2008': ('9.0',), - '2008e': ('9.0',), - '2010': ('10.0',), - '2010e': ('10.0',), - '2012': ('11.0',), - '2012e': ('11.0',), - } - override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH') - if override_path: - msvs_version = os.environ.get('GYP_MSVS_VERSION') - if not msvs_version or 'e' not in msvs_version: - raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be ' - 'set to an "e" version (e.g. 2010e)') - return _CreateVersion(msvs_version, override_path, sdk_based=True) - version = str(version) - versions = _DetectVisualStudioVersions(version_map[version], 'e' in version) - if not versions: - if version == 'auto': - # Default to 2005 if we couldn't find anything - return _CreateVersion('2005', None) - else: - return _CreateVersion(version, None) - return versions[0] diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/SCons.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/SCons.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/SCons.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/SCons.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,199 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -SCons generator. - -This contains class definitions and supporting functions for generating -pieces of SCons files for the different types of GYP targets. -""" - -import os - - -def WriteList(fp, list, prefix='', - separator=',\n ', - preamble=None, - postamble=None): - fp.write(preamble or '') - fp.write((separator or ' ').join([prefix + l for l in list])) - fp.write(postamble or '') - - -class TargetBase(object): - """ - Base class for a SCons representation of a GYP target. - """ - is_ignored = False - target_prefix = '' - target_suffix = '' - def __init__(self, spec): - self.spec = spec - def full_product_name(self): - """ - Returns the full name of the product being built: - - * Uses 'product_name' if it's set, else prefix + 'target_name'. - * Prepends 'product_dir' if set. - * Appends SCons suffix variables for the target type (or - product_extension). - """ - suffix = self.target_suffix - product_extension = self.spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - prefix = self.spec.get('product_prefix', self.target_prefix) - name = self.spec['target_name'] - name = prefix + self.spec.get('product_name', name) + suffix - product_dir = self.spec.get('product_dir') - if product_dir: - name = os.path.join(product_dir, name) - else: - name = os.path.join(self.out_dir, name) - return name - - def write_input_files(self, fp): - """ - Writes the definition of the input files (sources). - """ - sources = self.spec.get('sources') - if not sources: - fp.write('\ninput_files = []\n') - return - preamble = '\ninput_files = [\n ' - postamble = ',\n]\n' - WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble) - - def builder_call(self): - """ - Returns the actual SCons builder call to build this target. - """ - name = self.full_product_name() - return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name) - def write_target(self, fp, src_dir='', pre=''): - """ - Writes the lines necessary to build this target. - """ - fp.write('\n' + pre) - fp.write('_outputs = %s\n' % self.builder_call()) - fp.write('target_files.extend(_outputs)\n') - - -class NoneTarget(TargetBase): - """ - A GYP target type of 'none', implicitly or explicitly. - """ - def write_target(self, fp, src_dir='', pre=''): - fp.write('\ntarget_files.extend(input_files)\n') - - -class SettingsTarget(TargetBase): - """ - A GYP target type of 'settings'. - """ - is_ignored = True - - -compilable_sources_template = """ -_result = [] -for infile in input_files: - if env.compilable(infile): - if (type(infile) == type('') - and (infile.startswith(%(src_dir)r) - or not os.path.isabs(env.subst(infile)))): - # Force files below the build directory by replacing all '..' - # elements in the path with '__': - base, ext = os.path.splitext(os.path.normpath(infile)) - base = [d == '..' and '__' or d for d in base.split('/')] - base = os.path.join(*base) - object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base - if not infile.startswith(%(src_dir)r): - infile = %(src_dir)r + infile - infile = env.%(name)s(object, infile)[0] - else: - infile = env.%(name)s(infile)[0] - _result.append(infile) -input_files = _result -""" - -class CompilableSourcesTargetBase(TargetBase): - """ - An abstract base class for targets that compile their source files. - - We explicitly transform compilable files into object files, - even though SCons could infer that for us, because we want - to control where the object file ends up. (The implicit rules - in SCons always put the object file next to the source file.) - """ - intermediate_builder_name = None - def write_target(self, fp, src_dir='', pre=''): - if self.intermediate_builder_name is None: - raise NotImplementedError - if src_dir and not src_dir.endswith('/'): - src_dir += '/' - variables = { - 'src_dir': src_dir, - 'name': self.intermediate_builder_name, - } - fp.write(compilable_sources_template % variables) - super(CompilableSourcesTargetBase, self).write_target(fp) - - -class ProgramTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'executable'. - """ - builder_name = 'GypProgram' - intermediate_builder_name = 'StaticObject' - target_prefix = '${PROGPREFIX}' - target_suffix = '${PROGSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -class StaticLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'static_library'. - """ - builder_name = 'GypStaticLibrary' - intermediate_builder_name = 'StaticObject' - target_prefix = '${LIBPREFIX}' - target_suffix = '${LIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class SharedLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'shared_library'. - """ - builder_name = 'GypSharedLibrary' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class LoadableModuleTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'loadable_module'. - """ - builder_name = 'GypLoadableModule' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -TargetMap = { - None : NoneTarget, - 'none' : NoneTarget, - 'settings' : SettingsTarget, - 'executable' : ProgramTarget, - 'static_library' : StaticLibraryTarget, - 'shared_library' : SharedLibraryTarget, - 'loadable_module' : LoadableModuleTarget, -} - - -def Target(spec): - return TargetMap[spec.get('type')](spec) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/__init__.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/__init__.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/__init__.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/__init__.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,532 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import gyp.input -import optparse -import os.path -import re -import shlex -import sys -import traceback -from gyp.common import GypError - -# Default debug modes for GYP -debug = {} - -# List of "official" debug modes, but you can use anything you like. -DEBUG_GENERAL = 'general' -DEBUG_VARIABLES = 'variables' -DEBUG_INCLUDES = 'includes' - - -def DebugOutput(mode, message, *args): - if 'all' in gyp.debug or mode in gyp.debug: - ctx = ('unknown', 0, 'unknown') - try: - f = traceback.extract_stack(limit=2) - if f: - ctx = f[0][:3] - except: - pass - if args: - message %= args - print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), - ctx[1], ctx[2], message) - -def FindBuildFiles(): - extension = '.gyp' - files = os.listdir(os.getcwd()) - build_files = [] - for file in files: - if file.endswith(extension): - build_files.append(file) - return build_files - - -def Load(build_files, format, default_variables={}, - includes=[], depth='.', params=None, check=False, - circular_check=True): - """ - Loads one or more specified build files. - default_variables and includes will be copied before use. - Returns the generator for the specified format and the - data returned by loading the specified build files. - """ - if params is None: - params = {} - - flavor = None - if '-' in format: - format, params['flavor'] = format.split('-', 1) - - default_variables = copy.copy(default_variables) - - # Default variables provided by this program and its modules should be - # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, - # avoiding collisions with user and automatic variables. - default_variables['GENERATOR'] = format - - # Format can be a custom python file, or by default the name of a module - # within gyp.generator. - if format.endswith('.py'): - generator_name = os.path.splitext(format)[0] - path, generator_name = os.path.split(generator_name) - - # Make sure the path to the custom generator is in sys.path - # Don't worry about removing it once we are done. Keeping the path - # to each generator that is used in sys.path is likely harmless and - # arguably a good idea. - path = os.path.abspath(path) - if path not in sys.path: - sys.path.insert(0, path) - else: - generator_name = 'gyp.generator.' + format - - # These parameters are passed in order (as opposed to by key) - # because ActivePython cannot handle key parameters to __import__. - generator = __import__(generator_name, globals(), locals(), generator_name) - for (key, val) in generator.generator_default_variables.items(): - default_variables.setdefault(key, val) - - # Give the generator the opportunity to set additional variables based on - # the params it will receive in the output phase. - if getattr(generator, 'CalculateVariables', None): - generator.CalculateVariables(default_variables, params) - - # Give the generator the opportunity to set generator_input_info based on - # the params it will receive in the output phase. - if getattr(generator, 'CalculateGeneratorInputInfo', None): - generator.CalculateGeneratorInputInfo(params) - - # Fetch the generator specific info that gets fed to input, we use getattr - # so we can default things and the generators only have to provide what - # they need. - generator_input_info = { - 'generator_wants_absolute_build_file_paths': - getattr(generator, 'generator_wants_absolute_build_file_paths', False), - 'generator_handles_variants': - getattr(generator, 'generator_handles_variants', False), - 'non_configuration_keys': - getattr(generator, 'generator_additional_non_configuration_keys', []), - 'path_sections': - getattr(generator, 'generator_additional_path_sections', []), - 'extra_sources_for_rules': - getattr(generator, 'generator_extra_sources_for_rules', []), - 'generator_supports_multiple_toolsets': - getattr(generator, 'generator_supports_multiple_toolsets', False), - 'generator_wants_static_library_dependencies_adjusted': - getattr(generator, - 'generator_wants_static_library_dependencies_adjusted', True), - 'generator_wants_sorted_dependencies': - getattr(generator, 'generator_wants_sorted_dependencies', False), - } - - # Process the input specific to this generator. - result = gyp.input.Load(build_files, default_variables, includes[:], - depth, generator_input_info, check, circular_check, - params['parallel']) - return [generator] + result - -def NameValueListToDict(name_value_list): - """ - Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary - of the pairs. If a string is simply NAME, then the value in the dictionary - is set to True. If VALUE can be converted to an integer, it is. - """ - result = { } - for item in name_value_list: - tokens = item.split('=', 1) - if len(tokens) == 2: - # If we can make it an int, use that, otherwise, use the string. - try: - token_value = int(tokens[1]) - except ValueError: - token_value = tokens[1] - # Set the variable to the supplied value. - result[tokens[0]] = token_value - else: - # No value supplied, treat it as a boolean and set it. - result[tokens[0]] = True - return result - -def ShlexEnv(env_name): - flags = os.environ.get(env_name, []) - if flags: - flags = shlex.split(flags) - return flags - -def FormatOpt(opt, value): - if opt.startswith('--'): - return '%s=%s' % (opt, value) - return opt + value - -def RegenerateAppendFlag(flag, values, predicate, env_name, options): - """Regenerate a list of command line flags, for an option of action='append'. - - The |env_name|, if given, is checked in the environment and used to generate - an initial list of options, then the options that were specified on the - command line (given in |values|) are appended. This matches the handling of - environment variables and command line flags where command line flags override - the environment, while not requiring the environment to be set when the flags - are used again. - """ - flags = [] - if options.use_environment and env_name: - for flag_value in ShlexEnv(env_name): - value = FormatOpt(flag, predicate(flag_value)) - if value in flags: - flags.remove(value) - flags.append(value) - if values: - for flag_value in values: - flags.append(FormatOpt(flag, predicate(flag_value))) - return flags - -def RegenerateFlags(options): - """Given a parsed options object, and taking the environment variables into - account, returns a list of flags that should regenerate an equivalent options - object (even in the absence of the environment variables.) - - Any path options will be normalized relative to depth. - - The format flag is not included, as it is assumed the calling generator will - set that as appropriate. - """ - def FixPath(path): - path = gyp.common.FixIfRelativePath(path, options.depth) - if not path: - return os.path.curdir - return path - - def Noop(value): - return value - - # We always want to ignore the environment when regenerating, to avoid - # duplicate or changed flags in the environment at the time of regeneration. - flags = ['--ignore-environment'] - for name, metadata in options._regeneration_metadata.iteritems(): - opt = metadata['opt'] - value = getattr(options, name) - value_predicate = metadata['type'] == 'path' and FixPath or Noop - action = metadata['action'] - env_name = metadata['env_name'] - if action == 'append': - flags.extend(RegenerateAppendFlag(opt, value, value_predicate, - env_name, options)) - elif action in ('store', None): # None is a synonym for 'store'. - if value: - flags.append(FormatOpt(opt, value_predicate(value))) - elif options.use_environment and env_name and os.environ.get(env_name): - flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) - elif action in ('store_true', 'store_false'): - if ((action == 'store_true' and value) or - (action == 'store_false' and not value)): - flags.append(opt) - elif options.use_environment and env_name: - print >>sys.stderr, ('Warning: environment regeneration unimplemented ' - 'for %s flag %r env_name %r' % (action, opt, - env_name)) - else: - print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)) - - return flags - -class RegeneratableOptionParser(optparse.OptionParser): - def __init__(self): - self.__regeneratable_options = {} - optparse.OptionParser.__init__(self) - - def add_option(self, *args, **kw): - """Add an option to the parser. - - This accepts the same arguments as OptionParser.add_option, plus the - following: - regenerate: can be set to False to prevent this option from being included - in regeneration. - env_name: name of environment variable that additional values for this - option come from. - type: adds type='path', to tell the regenerator that the values of - this option need to be made relative to options.depth - """ - env_name = kw.pop('env_name', None) - if 'dest' in kw and kw.pop('regenerate', True): - dest = kw['dest'] - - # The path type is needed for regenerating, for optparse we can just treat - # it as a string. - type = kw.get('type') - if type == 'path': - kw['type'] = 'string' - - self.__regeneratable_options[dest] = { - 'action': kw.get('action'), - 'type': type, - 'env_name': env_name, - 'opt': args[0], - } - - optparse.OptionParser.add_option(self, *args, **kw) - - def parse_args(self, *args): - values, args = optparse.OptionParser.parse_args(self, *args) - values._regeneration_metadata = self.__regeneratable_options - return values, args - -def gyp_main(args): - my_name = os.path.basename(sys.argv[0]) - - parser = RegeneratableOptionParser() - usage = 'usage: %s [options ...] [build_file ...]' - parser.set_usage(usage.replace('%s', '%prog')) - parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', - env_name='GYP_DEFINES', - help='sets variable VAR to value VAL') - parser.add_option('-f', '--format', dest='formats', action='append', - env_name='GYP_GENERATORS', regenerate=False, - help='output formats to generate') - parser.add_option('--msvs-version', dest='msvs_version', - regenerate=False, - help='Deprecated; use -G msvs_version=MSVS_VERSION instead') - parser.add_option('-I', '--include', dest='includes', action='append', - metavar='INCLUDE', type='path', - help='files to include in all loaded .gyp files') - parser.add_option('--depth', dest='depth', metavar='PATH', type='path', - help='set DEPTH gyp variable to a relative path to PATH') - parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', - action='append', default=[], help='turn on a debugging ' - 'mode for debugging GYP. Supported modes are "variables", ' - '"includes" and "general" or "all" for all of them.') - parser.add_option('-S', '--suffix', dest='suffix', default='', - help='suffix to add to generated files') - parser.add_option('-G', dest='generator_flags', action='append', default=[], - metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', - help='sets generator flag FLAG to VAL') - parser.add_option('--generator-output', dest='generator_output', - action='store', default=None, metavar='DIR', type='path', - env_name='GYP_GENERATOR_OUTPUT', - help='puts generated build files under DIR') - parser.add_option('--ignore-environment', dest='use_environment', - action='store_false', default=True, regenerate=False, - help='do not read options from environment variables') - parser.add_option('--check', dest='check', action='store_true', - help='check format of gyp files') - parser.add_option('--parallel', action='store_true', - env_name='GYP_PARALLEL', - help='Use multiprocessing for speed (experimental)') - parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', - default=None, metavar='DIR', type='path', - help='directory to use as the root of the source tree') - parser.add_option('--build', dest='configs', action='append', - help='configuration for build after project generation') - # --no-circular-check disables the check for circular relationships between - # .gyp files. These relationships should not exist, but they've only been - # observed to be harmful with the Xcode generator. Chromium's .gyp files - # currently have some circular relationships on non-Mac platforms, so this - # option allows the strict behavior to be used on Macs and the lenient - # behavior to be used elsewhere. - # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_option('--no-circular-check', dest='circular_check', - action='store_false', default=True, regenerate=False, - help="don't check for circular relationships between files") - - # We read a few things from ~/.gyp, so set up a var for that. - home_vars = ['HOME'] - if sys.platform in ('cygwin', 'win32'): - home_vars.append('USERPROFILE') - home = None - home_dot_gyp = None - for home_var in home_vars: - home = os.getenv(home_var) - if home != None: - home_dot_gyp = os.path.join(home, '.gyp') - if not os.path.exists(home_dot_gyp): - home_dot_gyp = None - else: - break - - # TODO(thomasvl): add support for ~/.gyp/defaults - - options, build_files_arg = parser.parse_args(args) - build_files = build_files_arg - - if not options.formats: - # If no format was given on the command line, then check the env variable. - generate_formats = [] - if options.use_environment: - generate_formats = os.environ.get('GYP_GENERATORS', []) - if generate_formats: - generate_formats = re.split('[\s,]', generate_formats) - if generate_formats: - options.formats = generate_formats - else: - # Nothing in the variable, default based on platform. - if sys.platform == 'darwin': - options.formats = ['xcode'] - elif sys.platform in ('win32', 'cygwin'): - options.formats = ['msvs'] - else: - options.formats = ['make'] - - if not options.generator_output and options.use_environment: - g_o = os.environ.get('GYP_GENERATOR_OUTPUT') - if g_o: - options.generator_output = g_o - - if not options.parallel and options.use_environment: - p = os.environ.get('GYP_PARALLEL') - options.parallel = bool(p and p != '0') - - for mode in options.debug: - gyp.debug[mode] = 1 - - # Do an extra check to avoid work when we're not debugging. - if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, 'running with these options:') - for option, value in sorted(options.__dict__.items()): - if option[0] == '_': - continue - if isinstance(value, basestring): - DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) - else: - DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) - - if not build_files: - build_files = FindBuildFiles() - if not build_files: - raise GypError((usage + '\n\n%s: error: no build_file') % - (my_name, my_name)) - - # TODO(mark): Chromium-specific hack! - # For Chromium, the gyp "depth" variable should always be a relative path - # to Chromium's top-level "src" directory. If no depth variable was set - # on the command line, try to find a "src" directory by looking at the - # absolute path to each build file's directory. The first "src" component - # found will be treated as though it were the path used for --depth. - if not options.depth: - for build_file in build_files: - build_file_dir = os.path.abspath(os.path.dirname(build_file)) - build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in xrange(components_len - 1, -1, -1): - if build_file_dir_components[index] == 'src': - options.depth = os.path.sep.join(build_file_dir_components) - break - del build_file_dir_components[index] - - # If the inner loop found something, break without advancing to another - # build file. - if options.depth: - break - - if not options.depth: - raise GypError('Could not automatically locate src directory. This is' - 'a temporary Chromium feature that will be removed. Use' - '--depth as a workaround.') - - # If toplevel-dir is not set, we assume that depth is the root of our source - # tree. - if not options.toplevel_dir: - options.toplevel_dir = options.depth - - # -D on the command line sets variable defaults - D isn't just for define, - # it's for default. Perhaps there should be a way to force (-F?) a - # variable's value so that it can't be overridden by anything else. - cmdline_default_variables = {} - defines = [] - if options.use_environment: - defines += ShlexEnv('GYP_DEFINES') - if options.defines: - defines += options.defines - cmdline_default_variables = NameValueListToDict(defines) - if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, - "cmdline_default_variables: %s", cmdline_default_variables) - - # Set up includes. - includes = [] - - # If ~/.gyp/include.gypi exists, it'll be forcibly included into every - # .gyp file that's loaded, before anything else is included. - if home_dot_gyp != None: - default_include = os.path.join(home_dot_gyp, 'include.gypi') - if os.path.exists(default_include): - print 'Using overrides found in ' + default_include - includes.append(default_include) - - # Command-line --include files come after the default include. - if options.includes: - includes.extend(options.includes) - - # Generator flags should be prefixed with the target generator since they - # are global across all generator runs. - gen_flags = [] - if options.use_environment: - gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS') - if options.generator_flags: - gen_flags += options.generator_flags - generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) - - # TODO: Remove this and the option after we've gotten folks to move to the - # generator flag. - if options.msvs_version: - print >>sys.stderr, \ - 'DEPRECATED: Use generator flag (-G msvs_version=' + \ - options.msvs_version + ') instead of --msvs-version=' + \ - options.msvs_version - generator_flags['msvs_version'] = options.msvs_version - - # Generate all requested formats (use a set in case we got one format request - # twice) - for format in set(options.formats): - params = {'options': options, - 'build_files': build_files, - 'generator_flags': generator_flags, - 'cwd': os.getcwd(), - 'build_files_arg': build_files_arg, - 'gyp_binary': sys.argv[0], - 'home_dot_gyp': home_dot_gyp, - 'parallel': options.parallel} - - # Start with the default variables from the command line. - [generator, flat_list, targets, data] = Load(build_files, format, - cmdline_default_variables, - includes, options.depth, - params, options.check, - options.circular_check) - - # TODO(mark): Pass |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - # NOTE: flat_list is the flattened dependency graph specifying the order - # that targets may be built. Build systems that operate serially or that - # need to have dependencies defined before dependents reference them should - # generate targets in the order specified in flat_list. - generator.GenerateOutput(flat_list, targets, data, params) - - if options.configs: - valid_configs = targets[flat_list[0]]['configurations'].keys() - for conf in options.configs: - if conf not in valid_configs: - raise GypError('Invalid config specified via --build: %s' % conf) - generator.PerformBuild(data, options.configs, params) - - # Done - return 0 - - -def main(args): - try: - return gyp_main(args) - except GypError, e: - sys.stderr.write("gyp: %s\n" % e) - return 1 - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/__init__.pyc and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/__init__.pyc differ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/common.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/common.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/common.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/common.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,491 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import with_statement - -import errno -import filecmp -import os.path -import re -import tempfile -import sys - - -# A minimal memoizing decorator. It'll blow up if the args aren't immutable, -# among other "problems". -class memoize(object): - def __init__(self, func): - self.func = func - self.cache = {} - def __call__(self, *args): - try: - return self.cache[args] - except KeyError: - result = self.func(*args) - self.cache[args] = result - return result - - -class GypError(Exception): - """Error class representing an error, which is to be presented - to the user. The main entry point will catch and display this. - """ - pass - - -def ExceptionAppend(e, msg): - """Append a message to the given exception's message.""" - if not e.args: - e.args = (msg,) - elif len(e.args) == 1: - e.args = (str(e.args[0]) + ' ' + msg,) - else: - e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:] - - -def ParseQualifiedTarget(target): - # Splits a qualified target into a build file, target name and toolset. - - # NOTE: rsplit is used to disambiguate the Windows drive letter separator. - target_split = target.rsplit(':', 1) - if len(target_split) == 2: - [build_file, target] = target_split - else: - build_file = None - - target_split = target.rsplit('#', 1) - if len(target_split) == 2: - [target, toolset] = target_split - else: - toolset = None - - return [build_file, target, toolset] - - -def ResolveTarget(build_file, target, toolset): - # This function resolves a target into a canonical form: - # - a fully defined build file, either absolute or relative to the current - # directory - # - a target name - # - a toolset - # - # build_file is the file relative to which 'target' is defined. - # target is the qualified target. - # toolset is the default toolset for that target. - [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) - - if parsed_build_file: - if build_file: - # If a relative path, parsed_build_file is relative to the directory - # containing build_file. If build_file is not in the current directory, - # parsed_build_file is not a usable path as-is. Resolve it by - # interpreting it as relative to build_file. If parsed_build_file is - # absolute, it is usable as a path regardless of the current directory, - # and os.path.join will return it as-is. - build_file = os.path.normpath(os.path.join(os.path.dirname(build_file), - parsed_build_file)) - # Further (to handle cases like ../cwd), make it relative to cwd) - if not os.path.isabs(build_file): - build_file = RelativePath(build_file, '.') - else: - build_file = parsed_build_file - - if parsed_toolset: - toolset = parsed_toolset - - return [build_file, target, toolset] - - -def BuildFile(fully_qualified_target): - # Extracts the build file from the fully qualified target. - return ParseQualifiedTarget(fully_qualified_target)[0] - - -def GetEnvironFallback(var_list, default): - """Look up a key in the environment, with fallback to secondary keys - and finally falling back to a default value.""" - for var in var_list: - if var in os.environ: - return os.environ[var] - return default - - -def QualifiedTarget(build_file, target, toolset): - # "Qualified" means the file that a target was defined in and the target - # name, separated by a colon, suffixed by a # and the toolset name: - # /path/to/file.gyp:target_name#toolset - fully_qualified = build_file + ':' + target - if toolset: - fully_qualified = fully_qualified + '#' + toolset - return fully_qualified - - -@memoize -def RelativePath(path, relative_to): - # Assuming both |path| and |relative_to| are relative to the current - # directory, returns a relative path that identifies path relative to - # relative_to. - - # Convert to normalized (and therefore absolute paths). - path = os.path.realpath(path) - relative_to = os.path.realpath(relative_to) - - # Split the paths into components. - path_split = path.split(os.path.sep) - relative_to_split = relative_to.split(os.path.sep) - - # Determine how much of the prefix the two paths share. - prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) - - # Put enough ".." components to back up out of relative_to to the common - # prefix, and then append the part of path_split after the common prefix. - relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \ - path_split[prefix_len:] - - if len(relative_split) == 0: - # The paths were the same. - return '' - - # Turn it back into a string and we're done. - return os.path.join(*relative_split) - - -@memoize -def InvertRelativePath(path, toplevel_dir=None): - """Given a path like foo/bar that is relative to toplevel_dir, return - the inverse relative path back to the toplevel_dir. - - E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) - should always produce the empty string, unless the path contains symlinks. - """ - if not path: - return path - toplevel_dir = '.' if toplevel_dir is None else toplevel_dir - return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path)) - - -def FixIfRelativePath(path, relative_to): - # Like RelativePath but returns |path| unchanged if it is absolute. - if os.path.isabs(path): - return path - return RelativePath(path, relative_to) - - -def UnrelativePath(path, relative_to): - # Assuming that |relative_to| is relative to the current directory, and |path| - # is a path relative to the dirname of |relative_to|, returns a path that - # identifies |path| relative to the current directory. - rel_dir = os.path.dirname(relative_to) - return os.path.normpath(os.path.join(rel_dir, path)) - - -# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at -# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 -# and the documentation for various shells. - -# _quote is a pattern that should match any argument that needs to be quoted -# with double-quotes by EncodePOSIXShellArgument. It matches the following -# characters appearing anywhere in an argument: -# \t, \n, space parameter separators -# # comments -# $ expansions (quoted to always expand within one argument) -# % called out by IEEE 1003.1 XCU.2.2 -# & job control -# ' quoting -# (, ) subshell execution -# *, ?, [ pathname expansion -# ; command delimiter -# <, >, | redirection -# = assignment -# {, } brace expansion (bash) -# ~ tilde expansion -# It also matches the empty string, because "" (or '') is the only way to -# represent an empty string literal argument to a POSIX shell. -# -# This does not match the characters in _escape, because those need to be -# backslash-escaped regardless of whether they appear in a double-quoted -# string. -_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$') - -# _escape is a pattern that should match any character that needs to be -# escaped with a backslash, whether or not the argument matched the _quote -# pattern. _escape is used with re.sub to backslash anything in _escape's -# first match group, hence the (parentheses) in the regular expression. -# -# _escape matches the following characters appearing anywhere in an argument: -# " to prevent POSIX shells from interpreting this character for quoting -# \ to prevent POSIX shells from interpreting this character for escaping -# ` to prevent POSIX shells from interpreting this character for command -# substitution -# Missing from this list is $, because the desired behavior of -# EncodePOSIXShellArgument is to permit parameter (variable) expansion. -# -# Also missing from this list is !, which bash will interpret as the history -# expansion character when history is enabled. bash does not enable history -# by default in non-interactive shells, so this is not thought to be a problem. -# ! was omitted from this list because bash interprets "\!" as a literal string -# including the backslash character (avoiding history expansion but retaining -# the backslash), which would not be correct for argument encoding. Handling -# this case properly would also be problematic because bash allows the history -# character to be changed with the histchars shell variable. Fortunately, -# as history is not enabled in non-interactive shells and -# EncodePOSIXShellArgument is only expected to encode for non-interactive -# shells, there is no room for error here by ignoring !. -_escape = re.compile(r'(["\\`])') - -def EncodePOSIXShellArgument(argument): - """Encodes |argument| suitably for consumption by POSIX shells. - - argument may be quoted and escaped as necessary to ensure that POSIX shells - treat the returned value as a literal representing the argument passed to - this function. Parameter (variable) expansions beginning with $ are allowed - to remain intact without escaping the $, to allow the argument to contain - references to variables to be expanded by the shell. - """ - - if not isinstance(argument, str): - argument = str(argument) - - if _quote.search(argument): - quote = '"' - else: - quote = '' - - encoded = quote + re.sub(_escape, r'\\\1', argument) + quote - - return encoded - - -def EncodePOSIXShellList(list): - """Encodes |list| suitably for consumption by POSIX shells. - - Returns EncodePOSIXShellArgument for each item in list, and joins them - together using the space character as an argument separator. - """ - - encoded_arguments = [] - for argument in list: - encoded_arguments.append(EncodePOSIXShellArgument(argument)) - return ' '.join(encoded_arguments) - - -def DeepDependencyTargets(target_dicts, roots): - """Returns the recursive list of target dependencies.""" - dependencies = set() - pending = set(roots) - while pending: - # Pluck out one. - r = pending.pop() - # Skip if visited already. - if r in dependencies: - continue - # Add it. - dependencies.add(r) - # Add its children. - spec = target_dicts[r] - pending.update(set(spec.get('dependencies', []))) - pending.update(set(spec.get('dependencies_original', []))) - return list(dependencies - set(roots)) - - -def BuildFileTargets(target_list, build_file): - """From a target_list, returns the subset from the specified build_file. - """ - return [p for p in target_list if BuildFile(p) == build_file] - - -def AllTargets(target_list, target_dicts, build_file): - """Returns all targets (direct and dependencies) for the specified build_file. - """ - bftargets = BuildFileTargets(target_list, build_file) - deptargets = DeepDependencyTargets(target_dicts, bftargets) - return bftargets + deptargets - - -def WriteOnDiff(filename): - """Write to a file only if the new contents differ. - - Arguments: - filename: name of the file to potentially write to. - Returns: - A file like object which will write to temporary file and only overwrite - the target if it differs (on close). - """ - - class Writer: - """Wrapper around file which only covers the target if it differs.""" - def __init__(self): - # Pick temporary file. - tmp_fd, self.tmp_path = tempfile.mkstemp( - suffix='.tmp', - prefix=os.path.split(filename)[1] + '.gyp.', - dir=os.path.split(filename)[0]) - try: - self.tmp_file = os.fdopen(tmp_fd, 'wb') - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def __getattr__(self, attrname): - # Delegate everything else to self.tmp_file - return getattr(self.tmp_file, attrname) - - def close(self): - try: - # Close tmp file. - self.tmp_file.close() - # Determine if different. - same = False - try: - same = filecmp.cmp(self.tmp_path, filename, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(self.tmp_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - os.chmod(self.tmp_path, 0666 & ~umask) - if sys.platform == 'win32' and os.path.exists(filename): - # NOTE: on windows (but not cygwin) rename will not replace an - # existing file, so it must be preceded with a remove. Sadly there - # is no way to make the switch atomic. - os.remove(filename) - os.rename(self.tmp_path, filename) - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - return Writer() - - -def GetFlavor(params): - """Returns |params.flavor| if it's set, the system's default flavor else.""" - flavors = { - 'cygwin': 'win', - 'win32': 'win', - 'darwin': 'mac', - } - - if 'flavor' in params: - return params['flavor'] - if sys.platform in flavors: - return flavors[sys.platform] - if sys.platform.startswith('sunos'): - return 'solaris' - if sys.platform.startswith('freebsd'): - return 'freebsd' - if sys.platform.startswith('openbsd'): - return 'openbsd' - if sys.platform.startswith('aix'): - return 'aix' - - return 'linux' - - -def CopyTool(flavor, out_path): - """Finds (mac|sun|win)_tool.gyp in the gyp directory and copies it - to |out_path|.""" - prefix = { 'solaris': 'sun', 'mac': 'mac', 'win': 'win' }.get(flavor, None) - if not prefix: - return - - # Slurp input file. - source_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix) - with open(source_path) as source_file: - source = source_file.readlines() - - # Add header and write it out. - tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix) - with open(tool_path, 'w') as tool_file: - tool_file.write( - ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:])) - - # Make file executable. - os.chmod(tool_path, 0755) - - -# From Alex Martelli, -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# First comment, dated 2001/10/13. -# (Also in the printed Python Cookbook.) - -def uniquer(seq, idfun=None): - if idfun is None: - idfun = lambda x: x - seen = {} - result = [] - for item in seq: - marker = idfun(item) - if marker in seen: continue - seen[marker] = 1 - result.append(item) - return result - - -class CycleError(Exception): - """An exception raised when an unexpected cycle is detected.""" - def __init__(self, nodes): - self.nodes = nodes - def __str__(self): - return 'CycleError: cycle involving: ' + str(self.nodes) - - -def TopologicallySorted(graph, get_edges): - """Topologically sort based on a user provided edge definition. - - Args: - graph: A list of node names. - get_edges: A function mapping from node name to a hashable collection - of node names which this node has outgoing edges to. - Returns: - A list containing all of the node in graph in topological order. - It is assumed that calling get_edges once for each node and caching is - cheaper than repeatedly calling get_edges. - Raises: - CycleError in the event of a cycle. - Example: - graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'} - def GetEdges(node): - return re.findall(r'\$\(([^))]\)', graph[node]) - print TopologicallySorted(graph.keys(), GetEdges) - ==> - ['a', 'c', b'] - """ - get_edges = memoize(get_edges) - visited = set() - visiting = set() - ordered_nodes = [] - def Visit(node): - if node in visiting: - raise CycleError(visiting) - if node in visited: - return - visited.add(node) - visiting.add(node) - for neighbor in get_edges(node): - Visit(neighbor) - visiting.remove(node) - ordered_nodes.insert(0, node) - for node in sorted(graph): - Visit(node) - return ordered_nodes Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/common.pyc and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/common.pyc differ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/common_test.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/common_test.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/common_test.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/common_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the common.py file.""" - -import gyp.common -import unittest -import sys - - -class TestTopologicallySorted(unittest.TestCase): - def test_Valid(self): - """Test that sorting works on a valid graph with one possible order.""" - graph = { - 'a': ['b', 'c'], - 'b': [], - 'c': ['d'], - 'd': ['b'], - } - def GetEdge(node): - return tuple(graph[node]) - self.assertEqual( - gyp.common.TopologicallySorted(graph.keys(), GetEdge), - ['a', 'c', 'd', 'b']) - - def test_Cycle(self): - """Test that an exception is thrown on a cyclic graph.""" - graph = { - 'a': ['b'], - 'b': ['c'], - 'c': ['d'], - 'd': ['a'], - } - def GetEdge(node): - return tuple(graph[node]) - self.assertRaises( - gyp.common.CycleError, gyp.common.TopologicallySorted, - graph.keys(), GetEdge) - - -class TestGetFlavor(unittest.TestCase): - """Test that gyp.common.GetFlavor works as intended""" - original_platform = '' - - def setUp(self): - self.original_platform = sys.platform - - def tearDown(self): - sys.platform = self.original_platform - - def assertFlavor(self, expected, argument, param): - sys.platform = argument - self.assertEqual(expected, gyp.common.GetFlavor(param)) - - def test_platform_default(self): - self.assertFlavor('freebsd', 'freebsd9' , {}) - self.assertFlavor('freebsd', 'freebsd10', {}) - self.assertFlavor('openbsd', 'openbsd5' , {}) - self.assertFlavor('solaris', 'sunos5' , {}); - self.assertFlavor('solaris', 'sunos' , {}); - self.assertFlavor('linux' , 'linux2' , {}); - self.assertFlavor('linux' , 'linux3' , {}); - - def test_param(self): - self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) - - -if __name__ == '__main__': - unittest.main() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,157 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import re -import os - - -def XmlToString(content, encoding='utf-8', pretty=False): - """ Writes the XML content to disk, touching the file only if it has changed. - - Visual Studio files have a lot of pre-defined structures. This function makes - it easy to represent these structures as Python data structures, instead of - having to create a lot of function calls. - - Each XML element of the content is represented as a list composed of: - 1. The name of the element, a string, - 2. The attributes of the element, a dictionary (optional), and - 3+. The content of the element, if any. Strings are simple text nodes and - lists are child elements. - - Example 1: - - becomes - ['test'] - - Example 2: - - This is - it! - - - becomes - ['myelement', {'a':'value1', 'b':'value2'}, - ['childtype', 'This is'], - ['childtype', 'it!'], - ] - - Args: - content: The structured content to be converted. - encoding: The encoding to report on the first XML line. - pretty: True if we want pretty printing with indents and new lines. - - Returns: - The XML content as a string. - """ - # We create a huge list of all the elements of the file. - xml_parts = ['' % encoding] - if pretty: - xml_parts.append('\n') - _ConstructContentList(xml_parts, content, pretty) - - # Convert it to a string - return ''.join(xml_parts) - - -def _ConstructContentList(xml_parts, specification, pretty, level=0): - """ Appends the XML parts corresponding to the specification. - - Args: - xml_parts: A list of XML parts to be appended to. - specification: The specification of the element. See EasyXml docs. - pretty: True if we want pretty printing with indents and new lines. - level: Indentation level. - """ - # The first item in a specification is the name of the element. - if pretty: - indentation = ' ' * level - new_line = '\n' - else: - indentation = '' - new_line = '' - name = specification[0] - if not isinstance(name, str): - raise Exception('The first item of an EasyXml specification should be ' - 'a string. Specification was ' + str(specification)) - xml_parts.append(indentation + '<' + name) - - # Optionally in second position is a dictionary of the attributes. - rest = specification[1:] - if rest and isinstance(rest[0], dict): - for at, val in sorted(rest[0].iteritems()): - xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) - rest = rest[1:] - if rest: - xml_parts.append('>') - all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) - multi_line = not all_strings - if multi_line and new_line: - xml_parts.append(new_line) - for child_spec in rest: - # If it's a string, append a text node. - # Otherwise recurse over that child definition - if isinstance(child_spec, str): - xml_parts.append(_XmlEscape(child_spec)) - else: - _ConstructContentList(xml_parts, child_spec, pretty, level + 1) - if multi_line and indentation: - xml_parts.append(indentation) - xml_parts.append('%s' % (name, new_line)) - else: - xml_parts.append('/>%s' % new_line) - - -def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, - win32=False): - """ Writes the XML content to disk, touching the file only if it has changed. - - Args: - content: The structured content to be written. - path: Location of the file. - encoding: The encoding to report on the first line of the XML file. - pretty: True if we want pretty printing with indents and new lines. - """ - xml_string = XmlToString(content, encoding, pretty) - if win32 and os.linesep != '\r\n': - xml_string = xml_string.replace('\n', '\r\n') - - # Get the old content - try: - f = open(path, 'r') - existing = f.read() - f.close() - except: - existing = None - - # It has changed, write it - if existing != xml_string: - f = open(path, 'w') - f.write(xml_string) - f.close() - - -_xml_escape_map = { - '"': '"', - "'": ''', - '<': '<', - '>': '>', - '&': '&', - '\n': ' ', - '\r': ' ', -} - - -_xml_escape_re = re.compile( - "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) - - -def _XmlEscape(value, attr=False): - """ Escape a string for inclusion in XML.""" - def replace(match): - m = match.string[match.start() : match.end()] - # don't replace single quotes in attrs - if attr and m == "'": - return m - return _xml_escape_map[m] - return _xml_escape_re.sub(replace, value) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,103 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the easy_xml.py file. """ - -import gyp.easy_xml as easy_xml -import unittest -import StringIO - - -class TestSequenceFunctions(unittest.TestCase): - - def setUp(self): - self.stderr = StringIO.StringIO() - - def test_EasyXml_simple(self): - self.assertEqual( - easy_xml.XmlToString(['test']), - '') - - self.assertEqual( - easy_xml.XmlToString(['test'], encoding='Windows-1252'), - '') - - def test_EasyXml_simple_with_attributes(self): - self.assertEqual( - easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]), - '') - - def test_EasyXml_escaping(self): - original = '\'"\r&\nfoo' - converted = '<test>\'" & foo' - converted_apos = converted.replace("'", ''') - self.assertEqual( - easy_xml.XmlToString(['test3', {'a': original}, original]), - '%s' % - (converted, converted_apos)) - - def test_EasyXml_pretty(self): - self.assertEqual( - easy_xml.XmlToString( - ['test3', - ['GrandParent', - ['Parent1', - ['Child'] - ], - ['Parent2'] - ] - ], - pretty=True), - '\n' - '\n' - ' \n' - ' \n' - ' \n' - ' \n' - ' \n' - ' \n' - '\n') - - - def test_EasyXml_complex(self): - # We want to create: - target = ( - '' - '' - '' - '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}' - 'Win32Proj' - 'automated_ui_tests' - '' - '' - '' - 'Application' - 'Unicode' - '' - '') - - xml = easy_xml.XmlToString( - ['Project', - ['PropertyGroup', {'Label': 'Globals'}, - ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'], - ['Keyword', 'Win32Proj'], - ['RootNamespace', 'automated_ui_tests'] - ], - ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}], - ['PropertyGroup', - {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'", - 'Label': 'Configuration'}, - ['ConfigurationType', 'Application'], - ['CharacterSet', 'Unicode'] - ] - ]) - self.assertEqual(xml, target) - - -if __name__ == '__main__': - unittest.main() Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.pyc and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.pyc differ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1099 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This generates makefiles suitable for inclusion into the Android build system -# via an Android.mk file. It is based on make.py, the standard makefile -# generator. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level GypAndroid.mk. This means that all -# variables in .mk-files clobber one another, and furthermore that any -# variables set potentially clash with other Android build system variables. -# Try to avoid setting global variables where possible. - -import gyp -import gyp.common -import gyp.generator.make as make # Reuse global functions from make backend. -import os -import re -import subprocess - -generator_default_variables = { - 'OS': 'android', - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.so', - 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)', - 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)', - 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)', - 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)', - 'LIB_DIR': '$(obj).$(TOOLSET)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(RULE_SOURCES)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - 'CONFIGURATION_NAME': '$(GYP_DEFAULT_CONFIGURATION)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - - -# Generator-specific gyp specs. -generator_additional_non_configuration_keys = [ - # Boolean to declare that this target does not want its name mangled. - 'android_unmangled_name', -] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - - -SHARED_FOOTER = """\ -# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from -# all the included sub-makefiles. This is just here to clarify. -gyp_all_modules: -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -android_standard_include_paths = set([ - # JNI_H_INCLUDE in build/core/binary.mk - 'dalvik/libnativehelper/include/nativehelper', - # from SRC_HEADERS in build/core/config.mk - 'system/core/include', - 'hardware/libhardware/include', - 'hardware/libhardware_legacy/include', - 'hardware/ril/include', - 'dalvik/libnativehelper/include', - 'frameworks/native/include', - 'frameworks/native/opengl/include', - 'frameworks/base/include', - 'frameworks/base/opengl/include', - 'frameworks/base/native/include', - 'external/skia/include', - # TARGET_C_INCLUDES in build/core/combo/TARGET_linux-arm.mk - 'bionic/libc/arch-arm/include', - 'bionic/libc/include', - 'bionic/libstdc++/include', - 'bionic/libc/kernel/common', - 'bionic/libc/kernel/arch-arm', - 'bionic/libm/include', - 'bionic/libm/include/arm', - 'bionic/libthread_db/include', - ]) - - -# Map gyp target types to Android module classes. -MODULE_CLASSES = { - 'static_library': 'STATIC_LIBRARIES', - 'shared_library': 'SHARED_LIBRARIES', - 'executable': 'EXECUTABLES', -} - - -def IsCPPExtension(ext): - return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx' - - -def Sourceify(path): - """Convert a path to its source directory form. The Android backend does not - support options.generator_output, so this function is a noop.""" - return path - - -# Map from qualified target to path to output. -# For Android, the target of these maps is a tuple ('static', 'modulename'), -# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, -# since we link by module. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class AndroidMkWriter(object): - """AndroidMkWriter packages up the writing of one target-specific Android.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, android_top_dir): - self.android_top_dir = android_top_dir - - def Write(self, qualified_target, relative_target, base_path, output_filename, - spec, configs, part_of_all): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - relative_target: qualified target name relative to the root - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - make.ensure_directory_exists(output_filename) - - self.fp = open(output_filename, 'w') - - self.fp.write(header) - - self.qualified_target = qualified_target - self.relative_target = relative_target - self.path = base_path - self.target = spec['target_name'] - self.type = spec['type'] - self.toolset = spec['toolset'] - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - - self.android_class = MODULE_CLASSES.get(self.type, 'GYP') - self.android_module = self.ComputeAndroidModule(spec) - (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) - self.output = self.output_binary = self.ComputeOutput(spec) - - # Standard header. - self.WriteLn('include $(CLEAR_VARS)\n') - - # Module class and name. - self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class) - self.WriteLn('LOCAL_MODULE := ' + self.android_module) - # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. - # The library module classes fail if the stem is set. ComputeOutputParts - # makes sure that stem == modulename in these cases. - if self.android_stem != self.android_module: - self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem) - self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix) - self.WriteLn('LOCAL_MODULE_TAGS := optional') - if self.toolset == 'host': - self.WriteLn('LOCAL_IS_HOST_MODULE := true') - - # Grab output directories; needed for Actions and Rules. - self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir)') - self.WriteLn('gyp_shared_intermediate_dir := ' - '$(call intermediates-dir-for,GYP,shared)') - self.WriteLn() - - # List files this target depends on so that actions/rules/copies/sources - # can depend on the list. - # TODO: doesn't pull in things through transitive link deps; needed? - target_dependencies = [x[1] for x in deps if x[0] == 'path'] - self.WriteLn('# Make sure our deps are built first.') - self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES', - local_pathify=True) - - # Actions must come first, since they can generate more OBJs for use below. - if 'actions' in spec: - self.WriteActions(spec['actions'], extra_sources, extra_outputs) - - # Rules must be early like actions. - if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs) - - if 'copies' in spec: - self.WriteCopies(spec['copies'], extra_outputs) - - # GYP generated outputs. - self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True) - - # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend - # on both our dependency targets and our generated files. - self.WriteLn('# Make sure our deps and generated files are built first.') - self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) ' - '$(GYP_GENERATED_OUTPUTS)') - self.WriteLn() - - # Sources. - if spec.get('sources', []) or extra_sources: - self.WriteSources(spec, configs, extra_sources) - - self.WriteTarget(spec, configs, deps, link_deps, part_of_all) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = ('path', self.output_binary) - - # Update global list of link dependencies. - if self.type == 'static_library': - target_link_deps[qualified_target] = ('static', self.android_module) - elif self.type == 'shared_library': - target_link_deps[qualified_target] = ('shared', self.android_module) - - self.fp.close() - return self.android_module - - - def WriteActions(self, actions, extra_sources, extra_outputs): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - """ - for action in actions: - name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, - action['action_name'])) - self.WriteLn('### Rules for action "%s":' % action['action_name']) - inputs = action['inputs'] - outputs = action['outputs'] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - if not out.startswith('$'): - print ('WARNING: Action for target "%s" writes output to local path ' - '"%s".' % (self.target, out)) - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get('process_outputs_as_sources', False)): - extra_sources += outputs - - # Prepare the actual command. - command = gyp.common.EncodePOSIXShellList(action['action']) - if 'message' in action: - quiet_cmd = 'Gyp action: %s ($@)' % action['message'] - else: - quiet_cmd = 'Gyp action: %s ($@)' % name - if len(dirs) > 0: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - cd_action = 'cd $(gyp_local_path)/%s; ' % self.path - command = cd_action + command - - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the gyp_* - # variables for the action rule with an absolute version so that the - # output goes in the right place. - # Only write the gyp_* rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) - self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) - self.WriteLn('%s: gyp_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)' % - main_output) - self.WriteLn('%s: gyp_shared_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' % - main_output) - - # Android's envsetup.sh adds a number of directories to the path including - # the built host binary directory. This causes actions/rules invoked by - # gyp to sometimes use these instead of system versions, e.g. bison. - # The built host binaries may not be suitable, and can cause errors. - # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable - # set by envsetup. - self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' - % main_output) - - for input in inputs: - assert ' ' not in input, ( - "Spaces in action input filenames not supported (%s)" % input) - for output in outputs: - assert ' ' not in output, ( - "Spaces in action output filenames not supported (%s)" % output) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % - (main_output, ' '.join(map(self.LocalPathify, inputs)))) - self.WriteLn('\t@echo "%s"' % quiet_cmd) - self.WriteLn('\t$(hide)%s\n' % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output)) - - extra_outputs += outputs - self.WriteLn() - - self.WriteLn() - - - def WriteRules(self, rules, extra_sources, extra_outputs): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - """ - if len(rules) == 0: - return - rule_trigger = '%s_rule_trigger' % self.android_module - - did_write_rule = False - for rule in rules: - if len(rule.get('rule_sources', [])) == 0: - continue - did_write_rule = True - name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, - rule['rule_name'])) - self.WriteLn('\n### Generated for rule "%s":' % name) - self.WriteLn('# "%s":' % rule) - - inputs = rule.get('inputs') - for rule_source in rule.get('rule_sources', []): - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) - - outputs = [self.ExpandInputRoot(out, rule_source_root, - rule_source_dirname) - for out in rule['outputs']] - - dirs = set() - for out in outputs: - if not out.startswith('$'): - print ('WARNING: Rule for target %s writes output to local path %s' - % (self.target, out)) - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - extra_outputs += outputs - if int(rule.get('process_outputs_as_sources', False)): - extra_sources.extend(outputs) - - components = [] - for component in rule['action']: - component = self.ExpandInputRoot(component, rule_source_root, - rule_source_dirname) - if '$(RULE_SOURCES)' in component: - component = component.replace('$(RULE_SOURCES)', - rule_source) - components.append(component) - - command = gyp.common.EncodePOSIXShellList(components) - cd_action = 'cd $(gyp_local_path)/%s; ' % self.path - command = cd_action + command - if dirs: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - # We set up a rule to build the first output, and then set up - # a rule for each additional output to depend on the first. - outputs = map(self.LocalPathify, outputs) - main_output = outputs[0] - self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) - self.WriteLn('%s: gyp_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)' - % main_output) - self.WriteLn('%s: gyp_shared_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' - % main_output) - - # See explanation in WriteActions. - self.WriteLn('%s: export PATH := ' - '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output) - - main_output_deps = self.LocalPathify(rule_source) - if inputs: - main_output_deps += ' ' - main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs]) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % - (main_output, main_output_deps)) - self.WriteLn('\t%s\n' % command) - for output in outputs[1:]: - self.WriteLn('%s: %s' % (output, main_output)) - self.WriteLn('.PHONY: %s' % (rule_trigger)) - self.WriteLn('%s: %s' % (rule_trigger, main_output)) - self.WriteLn('') - if did_write_rule: - extra_sources.append(rule_trigger) # Force all rules to run. - self.WriteLn('### Finished generating for all rules') - self.WriteLn('') - - - def WriteCopies(self, copies, extra_outputs): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - """ - self.WriteLn('### Generated for copy rule.') - - variable = make.StringToMakefileVariable(self.relative_target + '_copies') - outputs = [] - for copy in copies: - for path in copy['files']: - # The Android build system does not allow generation of files into the - # source tree. The destination should start with a variable, which will - # typically be $(gyp_intermediate_dir) or - # $(gyp_shared_intermediate_dir). Note that we can't use an assertion - # because some of the gyp tests depend on this. - if not copy['destination'].startswith('$'): - print ('WARNING: Copy rule for target %s writes output to ' - 'local path %s' % (self.target, copy['destination'])) - - # LocalPathify() calls normpath, stripping trailing slashes. - path = Sourceify(self.LocalPathify(path)) - filename = os.path.split(path)[1] - output = Sourceify(self.LocalPathify(os.path.join(copy['destination'], - filename))) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' % - (output, path)) - self.WriteLn('\t@echo Copying: $@') - self.WriteLn('\t$(hide) mkdir -p $(dir $@)') - self.WriteLn('\t$(hide) $(ACP) -r $< $@') - self.WriteLn() - outputs.append(output) - self.WriteLn('%s = %s' % (variable, - ' '.join(map(make.QuoteSpaces, outputs)))) - extra_outputs.append('$(%s)' % variable) - self.WriteLn() - - - def WriteSourceFlags(self, spec, configs): - """Write out the flags and include paths used to compile source files for - the current target. - - Args: - spec, configs: input from gyp. - """ - config = configs[spec['default_configuration']] - extracted_includes = [] - - self.WriteLn('\n# Flags passed to both C and C++ files.') - cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( - config.get('cflags')) - extracted_includes.extend(includes_from_cflags) - self.WriteList(cflags, 'MY_CFLAGS') - - cflags_c, includes_from_cflags_c = self.ExtractIncludesFromCFlags( - config.get('cflags_c')) - extracted_includes.extend(includes_from_cflags_c) - self.WriteList(cflags_c, 'MY_CFLAGS_C') - - self.WriteList(config.get('defines'), 'MY_DEFS', prefix='-D', - quoter=make.EscapeCppDefine) - self.WriteLn('LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)') - - # Undefine ANDROID for host modules - # TODO: the source code should not use macro ANDROID to tell if it's host or - # target module. - if self.toolset == 'host': - self.WriteLn('# Undefine ANDROID for host modules') - self.WriteLn('LOCAL_CFLAGS += -UANDROID') - - self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS') - includes = list(config.get('include_dirs', [])) - includes.extend(extracted_includes) - includes = map(Sourceify, map(self.LocalPathify, includes)) - includes = self.NormalizeIncludePaths(includes) - self.WriteList(includes, 'LOCAL_C_INCLUDES') - self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) ' - '$(LOCAL_C_INCLUDES)') - - self.WriteLn('\n# Flags passed to only C++ (and not C) files.') - self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS') - - - def WriteSources(self, spec, configs, extra_sources): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - We need to handle shared_intermediate directory source files as - a special case by copying them to the intermediate directory and - treating them as a genereated sources. Otherwise the Android build - rules won't pick them up. - - Args: - spec, configs: input from gyp. - extra_sources: Sources generated from Actions or Rules. - """ - sources = filter(make.Compilable, spec.get('sources', [])) - generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] - extra_sources = filter(make.Compilable, extra_sources) - - # Determine and output the C++ extension used by these sources. - # We simply find the first C++ file and use that extension. - all_sources = sources + extra_sources - local_cpp_extension = '.cpp' - for source in all_sources: - (root, ext) = os.path.splitext(source) - if IsCPPExtension(ext): - local_cpp_extension = ext - break - if local_cpp_extension != '.cpp': - self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension) - - # We need to move any non-generated sources that are coming from the - # shared intermediate directory out of LOCAL_SRC_FILES and put them - # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files - # that don't match our local_cpp_extension, since Android will only - # generate Makefile rules for a single LOCAL_CPP_EXTENSION. - local_files = [] - for source in sources: - (root, ext) = os.path.splitext(source) - if '$(gyp_shared_intermediate_dir)' in source: - extra_sources.append(source) - elif '$(gyp_intermediate_dir)' in source: - extra_sources.append(source) - elif IsCPPExtension(ext) and ext != local_cpp_extension: - extra_sources.append(source) - else: - local_files.append(os.path.normpath(os.path.join(self.path, source))) - - # For any generated source, if it is coming from the shared intermediate - # directory then we add a Make rule to copy them to the local intermediate - # directory first. This is because the Android LOCAL_GENERATED_SOURCES - # must be in the local module intermediate directory for the compile rules - # to work properly. If the file has the wrong C++ extension, then we add - # a rule to copy that to intermediates and use the new version. - final_generated_sources = [] - # If a source file gets copied, we still need to add the orginal source - # directory as header search path, for GCC searches headers in the - # directory that contains the source file by default. - origin_src_dirs = [] - for source in extra_sources: - local_file = source - if not '$(gyp_intermediate_dir)/' in local_file: - basename = os.path.basename(local_file) - local_file = '$(gyp_intermediate_dir)/' + basename - (root, ext) = os.path.splitext(local_file) - if IsCPPExtension(ext) and ext != local_cpp_extension: - local_file = root + local_cpp_extension - if local_file != source: - self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source))) - self.WriteLn('\tmkdir -p $(@D); cp $< $@') - origin_src_dirs.append(os.path.dirname(source)) - final_generated_sources.append(local_file) - - # We add back in all of the non-compilable stuff to make sure that the - # make rules have dependencies on them. - final_generated_sources.extend(generated_not_sources) - self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES') - - origin_src_dirs = gyp.common.uniquer(origin_src_dirs) - origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) - self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS') - - self.WriteList(local_files, 'LOCAL_SRC_FILES') - - # Write out the flags used to compile the source; this must be done last - # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. - self.WriteSourceFlags(spec, configs) - - - def ComputeAndroidModule(self, spec): - """Return the Android module name used for a gyp spec. - - We use the complete qualified target name to avoid collisions between - duplicate targets in different directories. We also add a suffix to - distinguish gyp-generated module names. - """ - - if int(spec.get('android_unmangled_name', 0)): - assert self.type != 'shared_library' or self.target.startswith('lib') - return self.target - - if self.type == 'shared_library': - # For reasons of convention, the Android build system requires that all - # shared library modules are named 'libfoo' when generating -l flags. - prefix = 'lib_' - else: - prefix = '' - - if spec['toolset'] == 'host': - suffix = '_host_gyp' - else: - suffix = '_gyp' - - if self.path: - name = '%s%s_%s%s' % (prefix, self.path, self.target, suffix) - else: - name = '%s%s%s' % (prefix, self.target, suffix) - - return make.StringToMakefileVariable(name) - - - def ComputeOutputParts(self, spec): - """Return the 'output basename' of a gyp spec, split into filename + ext. - - Android libraries must be named the same thing as their module name, - otherwise the linker can't find them, so product_name and so on must be - ignored if we are building a library, and the "lib" prepending is - not done for Android. - """ - assert self.type != 'loadable_module' # TODO: not supported? - - target = spec['target_name'] - target_prefix = '' - target_ext = '' - if self.type == 'static_library': - target = self.ComputeAndroidModule(spec) - target_ext = '.a' - elif self.type == 'shared_library': - target = self.ComputeAndroidModule(spec) - target_ext = '.so' - elif self.type == 'none': - target_ext = '.stamp' - elif self.type != 'executable': - print ("ERROR: What output file should be generated?", - "type", self.type, "target", target) - - if self.type != 'static_library' and self.type != 'shared_library': - target_prefix = spec.get('product_prefix', target_prefix) - target = spec.get('product_name', target) - product_ext = spec.get('product_extension') - if product_ext: - target_ext = '.' + product_ext - - target_stem = target_prefix + target - return (target_stem, target_ext) - - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - return ''.join(self.ComputeOutputParts(spec)) - - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - if self.type == 'executable' and self.toolset == 'host': - # We install host executables into shared_intermediate_dir so they can be - # run by gyp rules that refer to PRODUCT_DIR. - path = '$(gyp_shared_intermediate_dir)' - elif self.type == 'shared_library': - if self.toolset == 'host': - path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)' - else: - path = '$(TARGET_OUT_INTERMEDIATE_LIBRARIES)' - else: - # Other targets just get built into their intermediate dir. - if self.toolset == 'host': - path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class, - self.android_module) - else: - path = '$(call intermediates-dir-for,%s,%s)' % (self.android_class, - self.android_module) - - assert spec.get('product_dir') is None # TODO: not supported? - return os.path.join(path, self.ComputeOutputBasename(spec)) - - - def NormalizeLdFlags(self, ld_flags): - """ Clean up ldflags from gyp file. - Remove any ldflags that contain android_top_dir. - - Args: - ld_flags: ldflags from gyp files. - - Returns: - clean ldflags - """ - clean_ldflags = [] - for flag in ld_flags: - if self.android_top_dir in flag: - continue - clean_ldflags.append(flag) - return clean_ldflags - - def NormalizeIncludePaths(self, include_paths): - """ Normalize include_paths. - Convert absolute paths to relative to the Android top directory; - filter out include paths that are already brought in by the Android build - system. - - Args: - include_paths: A list of unprocessed include paths. - Returns: - A list of normalized include paths. - """ - normalized = [] - for path in include_paths: - if path[0] == '/': - path = gyp.common.RelativePath(path, self.android_top_dir) - - # Filter out the Android standard search path. - if path not in android_standard_include_paths: - normalized.append(path) - return normalized - - def ExtractIncludesFromCFlags(self, cflags): - """Extract includes "-I..." out from cflags - - Args: - cflags: A list of compiler flags, which may be mixed with "-I.." - Returns: - A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. - """ - clean_cflags = [] - include_paths = [] - if cflags: - for flag in cflags: - if flag.startswith('-I'): - include_paths.append(flag[2:]) - else: - clean_cflags.append(flag) - - return (clean_cflags, include_paths) - - def ComputeAndroidLibraryModuleNames(self, libraries): - """Compute the Android module names from libraries, ie spec.get('libraries') - - Args: - libraries: the value of spec.get('libraries') - Returns: - A tuple (static_lib_modules, dynamic_lib_modules) - """ - static_lib_modules = [] - dynamic_lib_modules = [] - for libs in libraries: - # Libs can have multiple words. - for lib in libs.split(): - # Filter the system libraries, which are added by default by the Android - # build system. - if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or - lib.endswith('libgcc.a')): - continue - match = re.search(r'([^/]+)\.a$', lib) - if match: - static_lib_modules.append(match.group(1)) - continue - match = re.search(r'([^/]+)\.so$', lib) - if match: - dynamic_lib_modules.append(match.group(1)) - continue - # "-lstlport" -> libstlport - if lib.startswith('-l'): - if lib.endswith('_static'): - static_lib_modules.append('lib' + lib[2:]) - else: - dynamic_lib_modules.append('lib' + lib[2:]) - return (static_lib_modules, dynamic_lib_modules) - - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if 'dependencies' in spec: - deps.extend([target_outputs[dep] for dep in spec['dependencies'] - if target_outputs[dep]]) - for dep in spec['dependencies']: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteTargetFlags(self, spec, configs, link_deps): - """Write Makefile code to specify the link flags and library dependencies. - - spec, configs: input from gyp. - link_deps: link dependency list; see ComputeDeps() - """ - config = configs[spec['default_configuration']] - - # LDFLAGS - ldflags = list(config.get('ldflags', [])) - static_flags, dynamic_flags = self.ComputeAndroidLibraryModuleNames( - ldflags) - self.WriteLn('') - self.WriteList(self.NormalizeLdFlags(ldflags), 'LOCAL_LDFLAGS') - - # Libraries (i.e. -lfoo) - libraries = gyp.common.uniquer(spec.get('libraries', [])) - static_libs, dynamic_libs = self.ComputeAndroidLibraryModuleNames( - libraries) - - # Link dependencies (i.e. libfoo.a, libfoo.so) - static_link_deps = [x[1] for x in link_deps if x[0] == 'static'] - shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared'] - self.WriteLn('') - self.WriteList(static_flags + static_libs + static_link_deps, - 'LOCAL_STATIC_LIBRARIES') - self.WriteLn('# Enable grouping to fix circular references') - self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true') - self.WriteLn('') - self.WriteList(dynamic_flags + dynamic_libs + shared_link_deps, - 'LOCAL_SHARED_LIBRARIES') - - - def WriteTarget(self, spec, configs, deps, link_deps, part_of_all): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn('### Rules for final target.') - - if self.type != 'none': - self.WriteTargetFlags(spec, configs, link_deps) - - # Add to the set of targets which represent the gyp 'all' target. We use the - # name 'gyp_all_modules' as the Android build system doesn't allow the use - # of the Make target 'all' and because 'all_modules' is the equivalent of - # the Make target 'all' on Android. - if part_of_all: - self.WriteLn('# Add target alias to "gyp_all_modules" target.') - self.WriteLn('.PHONY: gyp_all_modules') - self.WriteLn('gyp_all_modules: %s' % self.android_module) - self.WriteLn('') - - # Add an alias from the gyp target name to the Android module name. This - # simplifies manual builds of the target, and is required by the test - # framework. - if self.target != self.android_module: - self.WriteLn('# Alias gyp target name.') - self.WriteLn('.PHONY: %s' % self.target) - self.WriteLn('%s: %s' % (self.target, self.android_module)) - self.WriteLn('') - - # Add the command to trigger build of the target type depending - # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY - # NOTE: This has to come last! - modifier = '' - if self.toolset == 'host': - modifier = 'HOST_' - if self.type == 'static_library': - self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier) - elif self.type == 'shared_library': - self.WriteLn('LOCAL_PRELINK_MODULE := false') - self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier) - elif self.type == 'executable': - if self.toolset == 'host': - self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)') - else: - # Don't install target executables for now, as it results in them being - # included in ROM. This can be revisited if there's a reason to install - # them later. - self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') - self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier) - else: - self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp') - self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') - self.WriteLn() - self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk') - self.WriteLn() - self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)') - self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') - self.WriteLn('\t$(hide) mkdir -p $(dir $@)') - self.WriteLn('\t$(hide) touch $@') - - - def WriteList(self, value_list, variable=None, prefix='', - quoter=make.QuoteIfNecessary, local_pathify=False): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = '' - if value_list: - value_list = [quoter(prefix + l) for l in value_list] - if local_pathify: - value_list = [self.LocalPathify(l) for l in value_list] - values = ' \\\n\t' + ' \\\n\t'.join(value_list) - self.fp.write('%s :=%s\n\n' % (variable, values)) - - - def WriteLn(self, text=''): - self.fp.write(text + '\n') - - - def LocalPathify(self, path): - """Convert a subdirectory-relative path into a normalized path which starts - with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). - Absolute paths, or paths that contain variables, are just normalized.""" - if '$(' in path or os.path.isabs(path): - # path is not a file in the project tree in this case, but calling - # normpath is still important for trimming trailing slashes. - return os.path.normpath(path) - local_path = os.path.join('$(LOCAL_PATH)', self.path, path) - local_path = os.path.normpath(local_path) - # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) - # - i.e. that the resulting path is still inside the project tree. The - # path may legitimately have ended up containing just $(LOCAL_PATH), though, - # so we don't look for a slash. - assert local_path.startswith('$(LOCAL_PATH)'), ( - 'Path %s attempts to escape from gyp path %s !)' % (path, self.path)) - return local_path - - - def ExpandInputRoot(self, template, expansion, dirname): - if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: - return template - path = template % { - 'INPUT_ROOT': expansion, - 'INPUT_DIRNAME': dirname, - } - return path - - -def PerformBuild(data, configurations, params): - # The android backend only supports the default configuration. - options = params['options'] - makefile = os.path.abspath(os.path.join(options.toplevel_dir, - 'GypAndroid.mk')) - env = dict(os.environ) - env['ONE_SHOT_MAKEFILE'] = makefile - arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules'] - print 'Building: %s' % arguments - subprocess.check_call(arguments, env=env) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - limit_to_target_all = generator_flags.get('limit_to_target_all', False) - android_top_dir = os.environ.get('ANDROID_BUILD_TOP') - assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.' - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - assert not options.generator_output, ( - 'The Android backend does not support options.generator_output.') - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.toplevel_dir) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'GypAndroid' + options.suffix + '.mk' - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - assert not options.generator_output, ( - 'The Android backend does not support options.generator_output.') - make.ensure_directory_exists(makefile_path) - root_makefile = open(makefile_path, 'w') - - root_makefile.write(header) - - # We set LOCAL_PATH just once, here, to the top of the project tree. This - # allows all the other paths we use to be relative to the Android.mk file, - # as the Android build system expects. - root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n') - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - android_modules = {} - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - relative_build_file = gyp.common.RelativePath(build_file, - options.toplevel_dir) - build_files.add(relative_build_file) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - part_of_all = (qualified_target in needed_targets and - not int(spec.get('suppress_wildcard', False))) - if limit_to_target_all and not part_of_all: - continue - - relative_target = gyp.common.QualifiedTarget(relative_build_file, target, - toolset) - writer = AndroidMkWriter(android_top_dir) - android_module = writer.Write(qualified_target, relative_target, base_path, - output_file, spec, configs, - part_of_all=part_of_all) - if android_module in android_modules: - print ('ERROR: Android module names must be unique. The following ' - 'targets both generate Android module name %s.\n %s\n %s' % - (android_module, android_modules[android_module], - qualified_target)) - return - android_modules[android_module] = qualified_target - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add(mkfile_rel_path) - - # Some tools need to know the absolute path of the top directory. - root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n') - root_makefile.write('GYP_DEFAULT_CONFIGURATION := %s\n' % - default_configuration) - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include_file in sorted(include_list): - root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n') - root_makefile.write('\n') - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import collections -import os -import gyp -import gyp.common -import gyp.msvs_emulation -import json -import sys - -generator_supports_multiple_toolsets = True - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = { -} -for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR', - 'LIB_DIR', 'SHARED_LIB_DIR']: - # Some gyp steps fail if these are empty(!). - generator_default_variables[dirname] = 'dir' -for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', - 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', - 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', - 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', - 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', - 'CONFIGURATION_NAME']: - generator_default_variables[unused] = '' - - -def CalculateVariables(default_variables, params): - generator_flags = params.get('generator_flags', {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - default_variables.setdefault('OS', gyp.common.GetFlavor(params)) - - flavor = gyp.common.GetFlavor(params) - if flavor =='win': - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - - # Set a variable so conditions can be based on msvs_version. - msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or - '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get('generator_flags', {}) - if generator_flags.get('adjust_static_libraries', False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - -def GenerateOutput(target_list, target_dicts, data, params): - # Map of target -> list of targets it depends on. - edges = {} - - # Queue of targets to visit. - targets_to_visit = target_list[:] - - while len(targets_to_visit) > 0: - target = targets_to_visit.pop() - if target in edges: - continue - edges[target] = [] - - for dep in target_dicts[target].get('dependencies', []): - edges[target].append(dep) - targets_to_visit.append(dep) - - filename = 'dump.json' - f = open(filename, 'w') - json.dump(edges, f) - f.close() - print 'Wrote json to %s.' % filename diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,277 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""GYP backend that generates Eclipse CDT settings files. - -This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML -files that can be imported into an Eclipse CDT project. The XML file contains a -list of include paths and symbols (i.e. defines). - -Because a full .cproject definition is not created by this generator, it's not -possible to properly define the include dirs and symbols for each file -individually. Instead, one set of includes/symbols is generated for the entire -project. This works fairly well (and is a vast improvement in general), but may -still result in a few indexer issues here and there. - -This generator has no automated tests, so expect it to be broken. -""" - -from xml.sax.saxutils import escape -import os.path -import subprocess -import gyp -import gyp.common -import shlex - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = { -} - -for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']: - # Some gyp steps fail if these are empty(!). - generator_default_variables[dirname] = 'dir' - -for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', - 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', - 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', - 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', - 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', - 'CONFIGURATION_NAME']: - generator_default_variables[unused] = '' - -# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as -# part of the path when dealing with generated headers. This value will be -# replaced dynamically for each configuration. -generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \ - '$SHARED_INTERMEDIATE_DIR' - - -def CalculateVariables(default_variables, params): - generator_flags = params.get('generator_flags', {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - default_variables.setdefault('OS', gyp.common.GetFlavor(params)) - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get('generator_flags', {}) - if generator_flags.get('adjust_static_libraries', False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - -def GetAllIncludeDirectories(target_list, target_dicts, - shared_intermediate_dirs, config_name): - """Calculate the set of include directories to be used. - - Returns: - A list including all the include_dir's specified for every target followed - by any include directories that were added as cflag compiler options. - """ - - gyp_includes_set = set() - compiler_includes_list = [] - - for target_name in target_list: - target = target_dicts[target_name] - if config_name in target['configurations']: - config = target['configurations'][config_name] - - # Look for any include dirs that were explicitly added via cflags. This - # may be done in gyp files to force certain includes to come at the end. - # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and - # remove this. - cflags = config['cflags'] - for cflag in cflags: - include_dir = '' - if cflag.startswith('-I'): - include_dir = cflag[2:] - if include_dir and not include_dir in compiler_includes_list: - compiler_includes_list.append(include_dir) - - # Find standard gyp include dirs. - if config.has_key('include_dirs'): - include_dirs = config['include_dirs'] - for shared_intermediate_dir in shared_intermediate_dirs: - for include_dir in include_dirs: - include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR', - shared_intermediate_dir) - if not os.path.isabs(include_dir): - base_dir = os.path.dirname(target_name) - - include_dir = base_dir + '/' + include_dir - include_dir = os.path.abspath(include_dir) - - if not include_dir in gyp_includes_set: - gyp_includes_set.add(include_dir) - - - # Generate a list that has all the include dirs. - all_includes_list = list(gyp_includes_set) - all_includes_list.sort() - for compiler_include in compiler_includes_list: - if not compiler_include in gyp_includes_set: - all_includes_list.append(compiler_include) - - # All done. - return all_includes_list - - -def GetCompilerPath(target_list, target_dicts, data): - """Determine a command that can be used to invoke the compiler. - - Returns: - If this is a gyp project that has explicit make settings, try to determine - the compiler from that. Otherwise, see if a compiler was specified via the - CC_target environment variable. - """ - - # First, see if the compiler is configured in make's settings. - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_dict = data[build_file].get('make_global_settings', {}) - for key, value in make_global_settings_dict: - if key in ['CC', 'CXX']: - return value - - # Check to see if the compiler was specified as an environment variable. - for key in ['CC_target', 'CC', 'CXX']: - compiler = os.environ.get(key) - if compiler: - return compiler - - return 'gcc' - - -def GetAllDefines(target_list, target_dicts, data, config_name): - """Calculate the defines for a project. - - Returns: - A dict that includes explict defines declared in gyp files along with all of - the default defines that the compiler uses. - """ - - # Get defines declared in the gyp files. - all_defines = {} - for target_name in target_list: - target = target_dicts[target_name] - - if config_name in target['configurations']: - config = target['configurations'][config_name] - for define in config['defines']: - split_define = define.split('=', 1) - if len(split_define) == 1: - split_define.append('1') - if split_define[0].strip() in all_defines: - # Already defined - continue - - all_defines[split_define[0].strip()] = split_define[1].strip() - - # Get default compiler defines (if possible). - cc_target = GetCompilerPath(target_list, target_dicts, data) - if cc_target: - command = shlex.split(cc_target) - command.extend(['-E', '-dM', '-']) - cpp_proc = subprocess.Popen(args=command, cwd='.', - stdin=subprocess.PIPE, stdout=subprocess.PIPE) - cpp_output = cpp_proc.communicate()[0] - cpp_lines = cpp_output.split('\n') - for cpp_line in cpp_lines: - if not cpp_line.strip(): - continue - cpp_line_parts = cpp_line.split(' ', 2) - key = cpp_line_parts[1] - if len(cpp_line_parts) >= 3: - val = cpp_line_parts[2] - else: - val = '1' - all_defines[key] = val - - return all_defines - - -def WriteIncludePaths(out, eclipse_langs, include_dirs): - """Write the includes section of a CDT settings export file.""" - - out.write('
                    \n') - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for include_dir in include_dirs: - out.write(' %s\n' % - include_dir) - out.write(' \n') - out.write('
                    \n') - - -def WriteMacros(out, eclipse_langs, defines): - """Write the macros section of a CDT settings export file.""" - - out.write('
                    \n') - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for key in sorted(defines.iterkeys()): - out.write(' %s%s\n' % - (escape(key), escape(defines[key]))) - out.write(' \n') - out.write('
                    \n') - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.join(generator_flags.get('output_dir', 'out'), - config_name) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the - # SHARED_INTERMEDIATE_DIR. Include both possible locations. - shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), - os.path.join(toplevel_build, 'gen')] - - if not os.path.exists(toplevel_build): - os.makedirs(toplevel_build) - out = open(os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), 'w') - - out.write('\n') - out.write('\n') - - eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', - 'GNU C++', 'GNU C', 'Assembly'] - include_dirs = GetAllIncludeDirectories(target_list, target_dicts, - shared_intermediate_dirs, config_name) - WriteIncludePaths(out, eclipse_langs, include_dirs) - defines = GetAllDefines(target_list, target_dicts, data, config_name) - WriteMacros(out, eclipse_langs, defines) - - out.write('\n') - out.close() - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate an XML settings file that can be imported into a CDT project.""" - - if params['options'].generator_output: - raise NotImplementedError, "--generator_output not implemented for eclipse" - - user_config = params.get('generator_flags', {}).get('config', None) - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, - user_config) - else: - config_names = target_dicts[target_list[0]]['configurations'].keys() - for config_name in config_names: - GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name) - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,87 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypd output module - -This module produces gyp input as its output. Output files are given the -.gypd extension to avoid overwriting the .gyp files that they are generated -from. Internal references to .gyp files (such as those found in -"dependencies" sections) are not adjusted to point to .gypd files instead; -unlike other paths, which are relative to the .gyp or .gypd file, such paths -are relative to the directory from which gyp was run to create the .gypd file. - -This generator module is intended to be a sample and a debugging aid, hence -the "d" for "debug" in .gypd. It is useful to inspect the results of the -various merges, expansions, and conditional evaluations performed by gyp -and to see a representation of what would be fed to a generator module. - -It's not advisable to rename .gypd files produced by this module to .gyp, -because they will have all merges, expansions, and evaluations already -performed and the relevant constructs not present in the output; paths to -dependencies may be wrong; and various sections that do not belong in .gyp -files such as such as "included_files" and "*_excluded" will be present. -Output will also be stripped of comments. This is not intended to be a -general-purpose gyp pretty-printer; for that, you probably just want to -run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip -comments but won't do all of the other things done to this module's output. - -The specific formatting of the output generated by this module is subject -to change. -""" - - -import gyp.common -import errno -import os -import pprint - - -# These variables should just be spit back out as variable references. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_DIRNAME', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -# gypd doesn't define a default value for OS like many other generator -# modules. Specify "-D OS=whatever" on the command line to provide a value. -generator_default_variables = { -} - -# gypd supports multiple toolsets -generator_supports_multiple_toolsets = True - -# TODO(mark): This always uses <, which isn't right. The input module should -# notify the generator to tell it which phase it is operating in, and this -# module should use < for the early phase and then switch to > for the late -# phase. Bonus points for carrying @ back into the output too. -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - output_files = {} - for qualified_target in target_list: - [input_file, target] = \ - gyp.common.ParseQualifiedTarget(qualified_target)[0:2] - - if input_file[-4:] != '.gyp': - continue - input_file_stem = input_file[:-4] - output_file = input_file_stem + params['options'].suffix + '.gypd' - - if not output_file in output_files: - output_files[output_file] = input_file - - for output_file, input_file in output_files.iteritems(): - output = open(output_file, 'w') - pprint.pprint(data[input_file], output) - output.close() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,56 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypsh output module - -gypsh is a GYP shell. It's not really a generator per se. All it does is -fire up an interactive Python session with a few local variables set to the -variables passed to the generator. Like gypd, it's intended as a debugging -aid, to facilitate the exploration of .gyp structures after being processed -by the input module. - -The expected usage is "gyp -f gypsh -D OS=desired_os". -""" - - -import code -import sys - - -# All of this stuff about generator variables was lovingly ripped from gypd.py. -# That module has a much better description of what's going on and why. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_DIRNAME', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -generator_default_variables = { -} - -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - locals = { - 'target_list': target_list, - 'target_dicts': target_dicts, - 'data': data, - } - - # Use a banner that looks like the stock Python one and like what - # code.interact uses by default, but tack on something to indicate what - # locals are available, and identify gypsh. - banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \ - (sys.version, sys.platform, repr(sorted(locals.keys()))) - - code.interact(banner, local=locals) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py 2013-07-02 18:05:49.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,2151 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This is all roughly based on the Makefile system used by the Linux -# kernel, but is a non-recursive make -- we put the entire dependency -# graph in front of make and let it figure it out. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level Makefile. This means that all -# variables in .mk-files clobber one another. Be careful to use := -# where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last beween different -# .mk files. -# -# TODOs: -# -# Global settings and utility functions are currently stuffed in the -# toplevel Makefile. It may make sense to generate some .mk files on -# the side to keep the the files readable. - -import os -import re -import sys -import subprocess -import gyp -import gyp.common -import gyp.xcode_emulation -from gyp.common import GetEnvironFallback - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni', - 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen', - 'PRODUCT_DIR': '$(builddir)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(abspath $<)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - 'CONFIGURATION_NAME': '$(BUILDTYPE)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - -# Request sorted dependencies in the order from dependents to dependencies. -generator_wants_sorted_dependencies = False - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - flavor = gyp.common.GetFlavor(params) - if flavor == 'mac': - default_variables.setdefault('OS', 'mac') - default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') - default_variables.setdefault('SHARED_LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - default_variables.setdefault('LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Make generator. - import gyp.generator.xcode as xcode_generator - global generator_additional_non_configuration_keys - generator_additional_non_configuration_keys = getattr(xcode_generator, - 'generator_additional_non_configuration_keys', []) - global generator_additional_path_sections - generator_additional_path_sections = getattr(xcode_generator, - 'generator_additional_path_sections', []) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr(xcode_generator, - 'generator_extra_sources_for_rules', []) - COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'}) - else: - operating_system = flavor - if flavor == 'android': - operating_system = 'linux' # Keep this legacy behavior for now. - default_variables.setdefault('OS', operating_system) - default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') - default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)') - default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)') - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get('generator_flags', {}) - android_ndk_version = generator_flags.get('android_ndk_version', None) - # Android NDK requires a strict link order. - if android_ndk_version: - global generator_wants_sorted_dependencies - generator_wants_sorted_dependencies = True - - -def ensure_directory_exists(path): - dir = os.path.dirname(path) - if dir and not os.path.exists(dir): - os.makedirs(dir) - - -# The .d checking code below uses these functions: -# wildcard, sort, foreach, shell, wordlist -# wildcard can handle spaces, the rest can't. -# Since I could find no way to make foreach work with spaces in filenames -# correctly, the .d files have spaces replaced with another character. The .d -# file for -# Chromium\ Framework.framework/foo -# is for example -# out/Release/.deps/out/Release/Chromium?Framework.framework/foo -# This is the replacement character. -SPACE_REPLACEMENT = '?' - - -LINK_COMMANDS_LINUX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -""" - -LINK_COMMANDS_MAC = """\ -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -# TODO(thakis): Find out and document the difference between shared_library and -# loadable_module on mac. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass -# -bundle -single_module here (for osmesa.so). -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" - -LINK_COMMANDS_ANDROID = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -quiet_cmd_link_host = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) -cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" - - -# Header of toplevel Makefile. -# This should go into the build tree, but it's easier to keep it here for now. -SHARED_HEADER = ("""\ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := %(srcdir)s -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= %(builddir)s - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= %(default_configuration)s - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - -%(make_global_settings)s - -# C++ apps need to be linked with g++. -# -# Note: flock is used to seralize linking. Linking is a memory-intensive -# process so running parallel links can often lead to thrashing. To disable -# the serialization, override LINK via an envrionment variable as follows: -# -# export LINK=g++ -# -# This will allow make to invoke N linker processes as specified in -jN. -LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target) - -CC.target ?= %(CC.target)s -CFLAGS.target ?= $(CFLAGS) -CXX.target ?= %(CXX.target)s -CXXFLAGS.target ?= $(CXXFLAGS) -LINK.target ?= %(LINK.target)s -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= %(CC.host)s -CFLAGS.host ?= -CXX.host ?= %(CXX.host)s -CXXFLAGS.host ?= -LINK.host ?= %(LINK.host)s -LDFLAGS.host ?= -AR.host ?= %(AR.host)s - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1) -unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \\ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters.""" -r""" -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef -""" -""" -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -%(extra_commands)s -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -%(link_commands)s -""" - -r""" -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))' -""" -""" -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain """ + SPACE_REPLACEMENT + \ - """ instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\\ - for p in $(POSTBUILDS); do\\ - eval $$p;\\ - E=$$?;\\ - if [ $$E -ne 0 ]; then\\ - break;\\ - fi;\\ - done;\\ - if [ $$E -ne 0 ]; then\\ - rm -rf "$@";\\ - exit $$E;\\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \ - SPACE_REPLACEMENT + """ for -# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \ - """ characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "%(default_target)s" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: %(default_target)s -%(default_target)s: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -""") - -SHARED_HEADER_MAC_COMMANDS = """ -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" -""" - -SHARED_HEADER_SUN_COMMANDS = """ -# gyp-sun-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_sun_tool = SUNTOOL $(4) $< -cmd_sun_tool = ./gyp-sun-tool $(4) $< "$@" -""" - - -def WriteRootHeaderSuffixRules(writer): - extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower) - - writer.write('# Suffix rules, putting all outputs into $(obj).\n') - for ext in extensions: - writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext) - writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext]) - - writer.write('\n# Try building from generated source, too.\n') - for ext in extensions: - writer.write( - '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext) - writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext]) - writer.write('\n') - for ext in extensions: - writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext) - writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext]) - writer.write('\n') - - -SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\ -# Suffix rules, putting all outputs into $(obj). -""") - - -SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\ -# Try building from generated source, too. -""") - - -SHARED_FOOTER = """\ -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Maps every compilable file extension to the do_cmd that compiles it. -COMPILABLE_EXTENSIONS = { - '.c': 'cc', - '.cc': 'cxx', - '.cpp': 'cxx', - '.cxx': 'cxx', - '.s': 'cc', - '.S': 'cc', -} - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS): - if res: - return True - return False - - -def Linkable(filename): - """Return true if the file is linkable (should be on the link line).""" - return filename.endswith('.o') - - -def Target(filename): - """Translate a compilable filename to its .o target.""" - return os.path.splitext(filename)[0] + '.o' - - -def EscapeShellArgument(s): - """Quotes an argument so that it will be interpreted literally by a POSIX - shell. Taken from - http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python - """ - return "'" + s.replace("'", "'\\''") + "'" - - -def EscapeMakeVariableExpansion(s): - """Make has its own variable expansion syntax using $. We must escape it for - string to be interpreted literally.""" - return s.replace('$', '$$') - - -def EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = EscapeShellArgument(s) - s = EscapeMakeVariableExpansion(s) - # '#' characters must be escaped even embedded in a string, else Make will - # treat it as the start of a comment. - return s.replace('#', r'\#') - - -def QuoteIfNecessary(string): - """TODO: Should this ideally be replaced with one or more of the above - functions?""" - if '"' in string: - string = '"' + string.replace('"', '\\"') + '"' - return string - - -def StringToMakefileVariable(string): - """Convert a string to a value that is acceptable as a make variable name.""" - return re.sub('[^a-zA-Z0-9_]', '_', string) - - -srcdir_prefix = '' -def Sourceify(path): - """Convert a path to its source directory form.""" - if '$(' in path: - return path - if os.path.isabs(path): - return path - return srcdir_prefix + path - - -def QuoteSpaces(s, quote=r'\ '): - return s.replace(' ', quote) - - -# Map from qualified target to path to output. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class MakefileWriter: - """MakefileWriter packages up the writing of one target-specific foobar.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, generator_flags, flavor): - self.generator_flags = generator_flags - self.flavor = flavor - - self.suffix_rules_srcdir = {} - self.suffix_rules_objdir1 = {} - self.suffix_rules_objdir2 = {} - - # Generate suffix rules for all compilable extensions. - for ext in COMPILABLE_EXTENSIONS.keys(): - # Suffix rules for source folder. - self.suffix_rules_srcdir.update({ext: ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD - @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) - - # Suffix rules for generated source files. - self.suffix_rules_objdir1.update({ext: ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD - @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) - self.suffix_rules_objdir2.update({ext: ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD - @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) - - - def Write(self, qualified_target, base_path, output_filename, spec, configs, - part_of_all): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - ensure_directory_exists(output_filename) - - self.fp = open(output_filename, 'w') - - self.fp.write(header) - - self.qualified_target = qualified_target - self.path = base_path - self.target = spec['target_name'] - self.type = spec['type'] - self.toolset = spec['toolset'] - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - if self.flavor == 'mac': - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - else: - self.xcode_settings = None - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - extra_link_deps = [] - extra_mac_bundle_resources = [] - mac_bundle_deps = [] - - if self.is_mac_bundle: - self.output = self.ComputeMacBundleOutput(spec) - self.output_binary = self.ComputeMacBundleBinaryOutput(spec) - else: - self.output = self.output_binary = self.ComputeOutput(spec) - - self.is_standalone_static_library = bool( - spec.get('standalone_static_library', 0)) - self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', - 'shared_library') - if (self.is_standalone_static_library or - self.type in self._INSTALLABLE_TARGETS): - self.alias = os.path.basename(self.output) - install_path = self._InstallableTargetInstallPath() - else: - self.alias = self.output - install_path = self.output - - self.WriteLn("TOOLSET := " + self.toolset) - self.WriteLn("TARGET := " + self.target) - - # Actions must come first, since they can generate more OBJs for use below. - if 'actions' in spec: - self.WriteActions(spec['actions'], extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all) - - # Rules must be early like actions. - if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all) - - if 'copies' in spec: - self.WriteCopies(spec['copies'], extra_outputs, part_of_all) - - # Bundle resources. - if self.is_mac_bundle: - all_mac_bundle_resources = ( - spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources) - self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) - self.WriteMacInfoPlist(mac_bundle_deps) - - # Sources. - all_sources = spec.get('sources', []) + extra_sources - if all_sources: - self.WriteSources( - configs, deps, all_sources, extra_outputs, - extra_link_deps, part_of_all, - gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)), - self.Pchify)) - sources = filter(Compilable, all_sources) - if sources: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) - extensions = set([os.path.splitext(s)[1] for s in sources]) - for ext in extensions: - if ext in self.suffix_rules_srcdir: - self.WriteLn(self.suffix_rules_srcdir[ext]) - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) - for ext in extensions: - if ext in self.suffix_rules_objdir1: - self.WriteLn(self.suffix_rules_objdir1[ext]) - for ext in extensions: - if ext in self.suffix_rules_objdir2: - self.WriteLn(self.suffix_rules_objdir2[ext]) - self.WriteLn('# End of this set of suffix rules') - - # Add dependency from bundle to bundle binary. - if self.is_mac_bundle: - mac_bundle_deps.append(self.output_binary) - - self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps, - mac_bundle_deps, extra_outputs, part_of_all) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = install_path - - # Update global list of link dependencies. - if self.type in ('static_library', 'shared_library'): - target_link_deps[qualified_target] = self.output_binary - - # Currently any versions have the same effect, but in future the behavior - # could be different. - if self.generator_flags.get('android_ndk_version', None): - self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps) - - self.fp.close() - - - def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): - """Write a "sub-project" Makefile. - - This is a small, wrapper Makefile that calls the top-level Makefile to build - the targets from a single gyp file (i.e. a sub-project). - - Arguments: - output_filename: sub-project Makefile name to write - makefile_path: path to the top-level Makefile - targets: list of "all" targets for this sub-project - build_dir: build output directory, relative to the sub-project - """ - ensure_directory_exists(output_filename) - self.fp = open(output_filename, 'w') - self.fp.write(header) - # For consistency with other builders, put sub-project build output in the - # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). - self.WriteLn('export builddir_name ?= %s' % - os.path.join(os.path.dirname(output_filename), build_dir)) - self.WriteLn('.PHONY: all') - self.WriteLn('all:') - if makefile_path: - makefile_path = ' -C ' + makefile_path - self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets))) - self.fp.close() - - - def WriteActions(self, actions, extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for action in actions: - name = StringToMakefileVariable('%s_%s' % (self.qualified_target, - action['action_name'])) - self.WriteLn('### Rules for action "%s":' % action['action_name']) - inputs = action['inputs'] - outputs = action['outputs'] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get('process_outputs_as_sources', False)): - extra_sources += outputs - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += outputs - - # Write the actual command. - action_commands = action['action'] - if self.flavor == 'mac': - action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action_commands] - command = gyp.common.EncodePOSIXShellList(action_commands) - if 'message' in action: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message'])) - else: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name)) - if len(dirs) > 0: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - cd_action = 'cd %s; ' % Sourceify(self.path or '.') - - # command and cd_action get written to a toplevel variable called - # cmd_foo. Toplevel variables can't handle things that change per - # makefile like $(TARGET), so hardcode the target. - command = command.replace('$(TARGET)', self.target) - cd_action = cd_action.replace('$(TARGET)', self.target) - - # Set LD_LIBRARY_PATH in case the action runs an executable from this - # build which links to shared libs from this build. - # actions run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:' - '$(builddir)/lib.target:$$LD_LIBRARY_PATH; ' - 'export LD_LIBRARY_PATH; ' - '%s%s' - % (name, cd_action, command)) - self.WriteLn() - outputs = map(self.Absolutify, outputs) - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the obj - # variable for the action rule with an absolute version so that the output - # goes in the right place. - # Only write the 'obj' and 'builddir' rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - # Same for environment. - self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0])) - self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0])) - self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv()) - - for input in inputs: - assert ' ' not in input, ( - "Spaces in action input filenames not supported (%s)" % input) - for output in outputs: - assert ' ' not in output, ( - "Spaces in action output filenames not supported (%s)" % output) - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)), - part_of_all=part_of_all, command=name) - - # Stuff the outputs in a variable so we can refer to them later. - outputs_variable = 'action_%s_outputs' % name - self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs))) - extra_outputs.append('$(%s)' % outputs_variable) - self.WriteLn() - - self.WriteLn() - - - def WriteRules(self, rules, extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for rule in rules: - name = StringToMakefileVariable('%s_%s' % (self.qualified_target, - rule['rule_name'])) - count = 0 - self.WriteLn('### Generated for rule %s:' % name) - - all_outputs = [] - - for rule_source in rule.get('rule_sources', []): - dirs = set() - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) - - outputs = [self.ExpandInputRoot(out, rule_source_root, - rule_source_dirname) - for out in rule['outputs']] - - for out in outputs: - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - if int(rule.get('process_outputs_as_sources', False)): - extra_sources += outputs - if int(rule.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += outputs - inputs = map(Sourceify, map(self.Absolutify, [rule_source] + - rule.get('inputs', []))) - actions = ['$(call do_cmd,%s_%d)' % (name, count)] - - if name == 'resources_grit': - # HACK: This is ugly. Grit intentionally doesn't touch the - # timestamp of its output file when the file doesn't change, - # which is fine in hash-based dependency systems like scons - # and forge, but not kosher in the make world. After some - # discussion, hacking around it here seems like the least - # amount of pain. - actions += ['@touch --no-create $@'] - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - outputs = map(self.Absolutify, outputs) - all_outputs += outputs - # Only write the 'obj' and 'builddir' rules for the "primary" output - # (:1); it's superfluous for the "extra outputs", and this avoids - # accidentally writing duplicate dummy rules for those outputs. - self.WriteLn('%s: obj := $(abs_obj)' % outputs[0]) - self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0]) - self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions) - for output in outputs: - assert ' ' not in output, ( - "Spaces in rule filenames not yet supported (%s)" % output) - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - action = [self.ExpandInputRoot(ac, rule_source_root, - rule_source_dirname) - for ac in rule['action']] - mkdirs = '' - if len(dirs) > 0: - mkdirs = 'mkdir -p %s; ' % ' '.join(dirs) - cd_action = 'cd %s; ' % Sourceify(self.path or '.') - - # action, cd_action, and mkdirs get written to a toplevel variable - # called cmd_foo. Toplevel variables can't handle things that change - # per makefile like $(TARGET), so hardcode the target. - if self.flavor == 'mac': - action = [gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action] - action = gyp.common.EncodePOSIXShellList(action) - action = action.replace('$(TARGET)', self.target) - cd_action = cd_action.replace('$(TARGET)', self.target) - mkdirs = mkdirs.replace('$(TARGET)', self.target) - - # Set LD_LIBRARY_PATH in case the rule runs an executable from this - # build which links to shared libs from this build. - # rules run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn( - "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH=" - "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "export LD_LIBRARY_PATH; " - "%(cd_action)s%(mkdirs)s%(action)s" % { - 'action': action, - 'cd_action': cd_action, - 'count': count, - 'mkdirs': mkdirs, - 'name': name, - }) - self.WriteLn( - 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % { - 'count': count, - 'name': name, - }) - self.WriteLn() - count += 1 - - outputs_variable = 'rule_%s_outputs' % name - self.WriteList(all_outputs, outputs_variable) - extra_outputs.append('$(%s)' % outputs_variable) - - self.WriteLn('### Finished generating for rule: %s' % name) - self.WriteLn() - self.WriteLn('### Finished generating for all rules') - self.WriteLn('') - - - def WriteCopies(self, copies, extra_outputs, part_of_all): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn('### Generated for copy rule.') - - variable = StringToMakefileVariable(self.qualified_target + '_copies') - outputs = [] - for copy in copies: - for path in copy['files']: - # Absolutify() may call normpath, and will strip trailing slashes. - path = Sourceify(self.Absolutify(path)) - filename = os.path.split(path)[1] - output = Sourceify(self.Absolutify(os.path.join(copy['destination'], - filename))) - - # If the output path has variables in it, which happens in practice for - # 'copies', writing the environment as target-local doesn't work, - # because the variables are already needed for the target name. - # Copying the environment variables into global make variables doesn't - # work either, because then the .d files will potentially contain spaces - # after variable expansion, and .d file handling cannot handle spaces. - # As a workaround, manually expand variables at gyp time. Since 'copies' - # can't run scripts, there's no need to write the env then. - # WriteDoCmd() will escape spaces for .d files. - env = self.GetSortedXcodeEnv() - output = gyp.xcode_emulation.ExpandEnvVars(output, env) - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - self.WriteDoCmd([output], [path], 'copy', part_of_all) - outputs.append(output) - self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs)))) - extra_outputs.append('$(%s)' % variable) - self.WriteLn() - - - def WriteMacBundleResources(self, resources, bundle_deps): - """Writes Makefile code for 'mac_bundle_resources'.""" - self.WriteLn('### Generated for mac_bundle_resources') - - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables['PRODUCT_DIR'], self.xcode_settings, - map(Sourceify, map(self.Absolutify, resources))): - self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource', - part_of_all=True) - bundle_deps.append(output) - - - def WriteMacInfoPlist(self, bundle_deps): - """Write Makefile code for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables['PRODUCT_DIR'], self.xcode_settings, - lambda p: Sourceify(self.Absolutify(p))) - if not info_plist: - return - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' + - os.path.basename(info_plist)) - self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D', - quoter=EscapeCppDefine) - self.WriteMakeRule([intermediate_plist], [info_plist], - ['$(call do_cmd,infoplist)', - # "Convert" the plist so that any weird whitespace changes from the - # preprocessor do not affect the XML parser in mac_tool. - '@plutil -convert xml1 $@ $@']) - info_plist = intermediate_plist - # plists can contain envvars and substitute them into the file. - self.WriteSortedXcodeEnv( - out, self.GetSortedXcodeEnv(additional_settings=extra_env)) - self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist', - part_of_all=True) - bundle_deps.append(out) - - - def WriteSources(self, configs, deps, sources, - extra_outputs, extra_link_deps, - part_of_all, precompiled_header): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - - configs, deps, sources: input from gyp. - extra_outputs: a list of extra outputs this action should be dependent on; - used to serialize action/rules before compilation - extra_link_deps: a list that will be filled in with any outputs of - compilation (to be used in link lines) - part_of_all: flag indicating this target is part of 'all' - """ - - # Write configuration-specific variables for CFLAGS, etc. - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D', - quoter=EscapeCppDefine) - - if self.flavor == 'mac': - cflags = self.xcode_settings.GetCflags(configname) - cflags_c = self.xcode_settings.GetCflagsC(configname) - cflags_cc = self.xcode_settings.GetCflagsCC(configname) - cflags_objc = self.xcode_settings.GetCflagsObjC(configname) - cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname) - else: - cflags = config.get('cflags') - cflags_c = config.get('cflags_c') - cflags_cc = config.get('cflags_cc') - - self.WriteLn("# Flags passed to all source files."); - self.WriteList(cflags, 'CFLAGS_%s' % configname) - self.WriteLn("# Flags passed to only C files."); - self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname) - self.WriteLn("# Flags passed to only C++ files."); - self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname) - if self.flavor == 'mac': - self.WriteLn("# Flags passed to only ObjC files."); - self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname) - self.WriteLn("# Flags passed to only ObjC++ files."); - self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname) - includes = config.get('include_dirs') - if includes: - includes = map(Sourceify, map(self.Absolutify, includes)) - self.WriteList(includes, 'INCS_%s' % configname, prefix='-I') - - compilable = filter(Compilable, sources) - objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable))) - self.WriteList(objs, 'OBJS') - - for obj in objs: - assert ' ' not in obj, ( - "Spaces in object filenames not supported (%s)" % obj) - self.WriteLn('# Add to the list of files we specially track ' - 'dependencies for.') - self.WriteLn('all_deps += $(OBJS)') - self.WriteLn() - - # Make sure our dependencies are built first. - if deps: - self.WriteMakeRule(['$(OBJS)'], deps, - comment = 'Make sure our dependencies are built ' - 'before any of us.', - order_only = True) - - # Make sure the actions and rules run first. - # If they generate any extra headers etc., the per-.o file dep tracking - # will catch the proper rebuilds, so order only is still ok here. - if extra_outputs: - self.WriteMakeRule(['$(OBJS)'], extra_outputs, - comment = 'Make sure our actions/rules run ' - 'before any of us.', - order_only = True) - - pchdeps = precompiled_header.GetObjDependencies(compilable, objs ) - if pchdeps: - self.WriteLn('# Dependencies from obj files to their precompiled headers') - for source, obj, gch in pchdeps: - self.WriteLn('%s: %s' % (obj, gch)) - self.WriteLn('# End precompiled header dependencies') - - if objs: - extra_link_deps.append('$(OBJS)') - self.WriteLn("""\ -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual.""") - self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)") - self.WriteLn("$(OBJS): GYP_CFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('c') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE))") - self.WriteLn("$(OBJS): GYP_CXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('cc') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE))") - if self.flavor == 'mac': - self.WriteLn("$(OBJS): GYP_OBJCFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('m') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE)) " - "$(CFLAGS_OBJC_$(BUILDTYPE))") - self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('mm') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE)) " - "$(CFLAGS_OBJCC_$(BUILDTYPE))") - - self.WritePchTargets(precompiled_header.GetPchBuildCommands()) - - # If there are any object files in our input file list, link them into our - # output. - extra_link_deps += filter(Linkable, sources) - - self.WriteLn() - - def WritePchTargets(self, pch_commands): - """Writes make rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - extra_flags = { - 'c': '$(CFLAGS_C_$(BUILDTYPE))', - 'cc': '$(CFLAGS_CC_$(BUILDTYPE))', - 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))', - 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))', - }[lang] - var_name = { - 'c': 'GYP_PCH_CFLAGS', - 'cc': 'GYP_PCH_CXXFLAGS', - 'm': 'GYP_PCH_OBJCFLAGS', - 'mm': 'GYP_PCH_OBJCXXFLAGS', - }[lang] - self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) + - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "$(CFLAGS_$(BUILDTYPE)) " + - extra_flags) - - self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input)) - self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang) - self.WriteLn('') - assert ' ' not in gch, ( - "Spaces in gch filenames not supported (%s)" % gch) - self.WriteLn('all_deps += %s' % gch) - self.WriteLn('') - - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - assert not self.is_mac_bundle - - if self.flavor == 'mac' and self.type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module'): - return self.xcode_settings.GetExecutablePath() - - target = spec['target_name'] - target_prefix = '' - target_ext = '' - if self.type == 'static_library': - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.a' - elif self.type in ('loadable_module', 'shared_library'): - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.so' - elif self.type == 'none': - target = '%s.stamp' % target - elif self.type != 'executable': - print ("ERROR: What output file should be generated?", - "type", self.type, "target", target) - - target_prefix = spec.get('product_prefix', target_prefix) - target = spec.get('product_name', target) - product_ext = spec.get('product_extension') - if product_ext: - target_ext = '.' + product_ext - - return target_prefix + target + target_ext - - - def _InstallImmediately(self): - return self.toolset == 'target' and self.flavor == 'mac' and self.type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module') - - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - assert not self.is_mac_bundle - - path = os.path.join('$(obj).' + self.toolset, self.path) - if self.type == 'executable' or self._InstallImmediately(): - path = '$(builddir)' - path = spec.get('product_dir', path) - return os.path.join(path, self.ComputeOutputBasename(spec)) - - - def ComputeMacBundleOutput(self, spec): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = generator_default_variables['PRODUCT_DIR'] - return os.path.join(path, self.xcode_settings.GetWrapperName()) - - - def ComputeMacBundleBinaryOutput(self, spec): - """Return the 'output' (full output path) to the binary in a bundle.""" - path = generator_default_variables['PRODUCT_DIR'] - return os.path.join(path, self.xcode_settings.GetExecutablePath()) - - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if 'dependencies' in spec: - deps.extend([target_outputs[dep] for dep in spec['dependencies'] - if target_outputs[dep]]) - for dep in spec['dependencies']: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? - # This hack makes it work: - # link_deps.extend(spec.get('libraries', [])) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteDependencyOnExtraOutputs(self, target, extra_outputs): - self.WriteMakeRule([self.output_binary], extra_outputs, - comment = 'Build our special outputs first.', - order_only = True) - - - def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, - extra_outputs, part_of_all): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - extra_outputs: any extra outputs that our target should depend on - part_of_all: flag indicating this target is part of 'all' - """ - - self.WriteLn('### Rules for final target.') - - if extra_outputs: - self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) - self.WriteMakeRule(extra_outputs, deps, - comment=('Preserve order dependency of ' - 'special output on deps.'), - order_only = True) - - target_postbuilds = {} - if self.type != 'none': - for configname in sorted(configs.keys()): - config = configs[configname] - if self.flavor == 'mac': - ldflags = self.xcode_settings.GetLdflags(configname, - generator_default_variables['PRODUCT_DIR'], - lambda p: Sourceify(self.Absolutify(p))) - - # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. - gyp_to_build = gyp.common.InvertRelativePath(self.path) - target_postbuild = self.xcode_settings.GetTargetPostbuilds( - configname, - QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, - self.output))), - QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, - self.output_binary)))) - if target_postbuild: - target_postbuilds[configname] = target_postbuild - else: - ldflags = config.get('ldflags', []) - # Compute an rpath for this output if needed. - if any(dep.endswith('.so') for dep in deps): - # We want to get the literal string "$ORIGIN" into the link command, - # so we need lots of escaping. - ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset) - ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' % - self.toolset) - self.WriteList(ldflags, 'LDFLAGS_%s' % configname) - if self.flavor == 'mac': - self.WriteList(self.xcode_settings.GetLibtoolflags(configname), - 'LIBTOOLFLAGS_%s' % configname) - libraries = spec.get('libraries') - if libraries: - # Remove duplicate entries - libraries = gyp.common.uniquer(libraries) - if self.flavor == 'mac': - libraries = self.xcode_settings.AdjustLibraries(libraries) - self.WriteList(libraries, 'LIBS') - self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % - QuoteSpaces(self.output_binary)) - self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary)) - - if self.flavor == 'mac': - self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' % - QuoteSpaces(self.output_binary)) - - # Postbuild actions. Like actions, but implicitly depend on the target's - # output. - postbuilds = [] - if self.flavor == 'mac': - if target_postbuilds: - postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))') - postbuilds.extend( - gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) - - if postbuilds: - # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), - # so we must output its definition first, since we declare variables - # using ":=". - self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) - - for configname in target_postbuilds: - self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' % - (QuoteSpaces(self.output), - configname, - gyp.common.EncodePOSIXShellList(target_postbuilds[configname]))) - - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) - for i in xrange(len(postbuilds)): - if not postbuilds[i].startswith('$'): - postbuilds[i] = EscapeShellArgument(postbuilds[i]) - self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output)) - self.WriteLn('%s: POSTBUILDS := %s' % ( - QuoteSpaces(self.output), ' '.join(postbuilds))) - - # A bundle directory depends on its dependencies such as bundle resources - # and bundle binary. When all dependencies have been built, the bundle - # needs to be packaged. - if self.is_mac_bundle: - # If the framework doesn't contain a binary, then nothing depends - # on the actions -- make the framework depend on them directly too. - self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) - - # Bundle dependencies. Note that the code below adds actions to this - # target, so if you move these two lines, move the lines below as well. - self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS') - self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output)) - - # After the framework is built, package it. Needs to happen before - # postbuilds, since postbuilds depend on this. - if self.type in ('shared_library', 'loadable_module'): - self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' % - self.xcode_settings.GetFrameworkVersion()) - - # Bundle postbuilds can depend on the whole bundle, so run them after - # the bundle is packaged, not already after the bundle binary is done. - if postbuilds: - self.WriteLn('\t@$(call do_postbuilds)') - postbuilds = [] # Don't write postbuilds for target's output. - - # Needed by test/mac/gyptest-rebuild.py. - self.WriteLn('\t@true # No-op, used by tests') - - # Since this target depends on binary and resources which are in - # nested subfolders, the framework directory will be older than - # its dependencies usually. To prevent this rule from executing - # on every build (expensive, especially with postbuilds), expliclity - # update the time on the framework directory. - self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output)) - - if postbuilds: - assert not self.is_mac_bundle, ('Postbuilds for bundles should be done ' - 'on the bundle, not the binary (target \'%s\')' % self.target) - assert 'product_dir' not in spec, ('Postbuilds do not work with ' - 'custom product_dir') - - if self.type == 'executable': - self.WriteLn('%s: LD_INPUTS := %s' % ( - QuoteSpaces(self.output_binary), - ' '.join(map(QuoteSpaces, link_deps)))) - if self.toolset == 'host' and self.flavor == 'android': - self.WriteDoCmd([self.output_binary], link_deps, 'link_host', - part_of_all, postbuilds=postbuilds) - else: - self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all, - postbuilds=postbuilds) - - elif self.type == 'static_library': - for link_dep in link_deps: - assert ' ' not in link_dep, ( - "Spaces in alink input filenames not supported (%s)" % link_dep) - if (self.flavor not in ('mac', 'openbsd', 'win') and not - self.is_standalone_static_library): - self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', - part_of_all, postbuilds=postbuilds) - else: - self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, - postbuilds=postbuilds) - elif self.type == 'shared_library': - self.WriteLn('%s: LD_INPUTS := %s' % ( - QuoteSpaces(self.output_binary), - ' '.join(map(QuoteSpaces, link_deps)))) - self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all, - postbuilds=postbuilds) - elif self.type == 'loadable_module': - for link_dep in link_deps: - assert ' ' not in link_dep, ( - "Spaces in module input filenames not supported (%s)" % link_dep) - if self.toolset == 'host' and self.flavor == 'android': - self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host', - part_of_all, postbuilds=postbuilds) - else: - self.WriteDoCmd( - [self.output_binary], link_deps, 'solink_module', part_of_all, - postbuilds=postbuilds) - elif self.type == 'none': - # Write a stamp line. - self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, - postbuilds=postbuilds) - else: - print "WARNING: no output for", self.type, target - - # Add an alias for each target (if there are any outputs). - # Installable target aliases are created below. - if ((self.output and self.output != self.target) and - (self.type not in self._INSTALLABLE_TARGETS)): - self.WriteMakeRule([self.target], [self.output], - comment='Add target alias', phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [self.target], - comment = 'Add target alias to "all" target.', - phony = True) - - # Add special-case rules for our installable targets. - # 1) They need to install to the build dir or "product" dir. - # 2) They get shortcuts for building (e.g. "make chrome"). - # 3) They are part of "make all". - if (self.type in self._INSTALLABLE_TARGETS or - self.is_standalone_static_library): - if self.type == 'shared_library': - file_desc = 'shared library' - elif self.type == 'static_library': - file_desc = 'static library' - else: - file_desc = 'executable' - install_path = self._InstallableTargetInstallPath() - installable_deps = [self.output] - if (self.flavor == 'mac' and not 'product_dir' in spec and - self.toolset == 'target'): - # On mac, products are created in install_path immediately. - assert install_path == self.output, '%s != %s' % ( - install_path, self.output) - - # Point the target alias to the final binary output. - self.WriteMakeRule([self.target], [install_path], - comment='Add target alias', phony = True) - if install_path != self.output: - assert not self.is_mac_bundle # See comment a few lines above. - self.WriteDoCmd([install_path], [self.output], 'copy', - comment = 'Copy this to the %s output path.' % - file_desc, part_of_all=part_of_all) - installable_deps.append(install_path) - if self.output != self.alias and self.alias != self.target: - self.WriteMakeRule([self.alias], installable_deps, - comment = 'Short alias for building this %s.' % - file_desc, phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [install_path], - comment = 'Add %s to "all" target.' % file_desc, - phony = True) - - - def WriteList(self, value_list, variable=None, prefix='', - quoter=QuoteIfNecessary): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = '' - if value_list: - value_list = [quoter(prefix + l) for l in value_list] - values = ' \\\n\t' + ' \\\n\t'.join(value_list) - self.fp.write('%s :=%s\n\n' % (variable, values)) - - - def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None, - postbuilds=False): - """Write a Makefile rule that uses do_cmd. - - This makes the outputs dependent on the command line that was run, - as well as support the V= make command line flag. - """ - suffix = '' - if postbuilds: - assert ',' not in command - suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS - self.WriteMakeRule(outputs, inputs, - actions = ['$(call do_cmd,%s%s)' % (command, suffix)], - comment = comment, - force = True) - # Add our outputs to the list of targets we read depfiles from. - # all_deps is only used for deps file reading, and for deps files we replace - # spaces with ? because escaping doesn't work with make's $(sort) and - # other functions. - outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - - def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, - order_only=False, force=False, phony=False): - """Write a Makefile rule, with some extra tricks. - - outputs: a list of outputs for the rule (note: this is not directly - supported by make; see comments below) - inputs: a list of inputs for the rule - actions: a list of shell commands to run for the rule - comment: a comment to put in the Makefile above the rule (also useful - for making this Python script's code self-documenting) - order_only: if true, makes the dependency order-only - force: if true, include FORCE_DO_CMD as an order-only dep - phony: if true, the rule does not actually generate the named output, the - output is just a name to run the rule - """ - outputs = map(QuoteSpaces, outputs) - inputs = map(QuoteSpaces, inputs) - - if comment: - self.WriteLn('# ' + comment) - if phony: - self.WriteLn('.PHONY: ' + ' '.join(outputs)) - # TODO(evanm): just make order_only a list of deps instead of these hacks. - if order_only: - order_insert = '| ' - pick_output = ' '.join(outputs) - else: - order_insert = '' - pick_output = outputs[0] - if force: - force_append = ' FORCE_DO_CMD' - else: - force_append = '' - if actions: - self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) - self.WriteLn('%s: %s%s%s' % (pick_output, order_insert, ' '.join(inputs), - force_append)) - if actions: - for action in actions: - self.WriteLn('\t%s' % action) - if not order_only and len(outputs) > 1: - # If we have more than one output, a rule like - # foo bar: baz - # that for *each* output we must run the action, potentially - # in parallel. That is not what we're trying to write -- what - # we want is that we run the action once and it generates all - # the files. - # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html - # discusses this problem and has this solution: - # 1) Write the naive rule that would produce parallel runs of - # the action. - # 2) Make the outputs seralized on each other, so we won't start - # a parallel run until the first run finishes, at which point - # we'll have generated all the outputs and we're done. - self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0])) - # Add a dummy command to the "extra outputs" rule, otherwise make seems to - # think these outputs haven't (couldn't have?) changed, and thus doesn't - # flag them as changed (i.e. include in '$?') when evaluating dependent - # rules, which in turn causes do_cmd() to skip running dependent commands. - self.WriteLn('%s: ;' % (' '.join(outputs[1:]))) - self.WriteLn() - - - def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): - """Write a set of LOCAL_XXX definitions for Android NDK. - - These variable definitions will be used by Android NDK but do nothing for - non-Android applications. - - Arguments: - module_name: Android NDK module name, which must be unique among all - module names. - all_sources: A list of source files (will be filtered by Compilable). - link_deps: A list of link dependencies, which must be sorted in - the order from dependencies to dependents. - """ - if self.type not in ('executable', 'shared_library', 'static_library'): - return - - self.WriteLn('# Variable definitions for Android applications') - self.WriteLn('include $(CLEAR_VARS)') - self.WriteLn('LOCAL_MODULE := ' + module_name) - self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) ' - '$(DEFS_$(BUILDTYPE)) ' - # LOCAL_CFLAGS is applied to both of C and C++. There is - # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C - # sources. - '$(CFLAGS_C_$(BUILDTYPE)) ' - # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while - # LOCAL_C_INCLUDES does not expect it. So put it in - # LOCAL_CFLAGS. - '$(INCS_$(BUILDTYPE))') - # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred. - self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))') - self.WriteLn('LOCAL_C_INCLUDES :=') - self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)') - - # Detect the C++ extension. - cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0} - default_cpp_ext = '.cpp' - for filename in all_sources: - ext = os.path.splitext(filename)[1] - if ext in cpp_ext: - cpp_ext[ext] += 1 - if cpp_ext[ext] > cpp_ext[default_cpp_ext]: - default_cpp_ext = ext - self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext) - - self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)), - 'LOCAL_SRC_FILES') - - # Filter out those which do not match prefix and suffix and produce - # the resulting list without prefix and suffix. - def DepsToModules(deps, prefix, suffix): - modules = [] - for filepath in deps: - filename = os.path.basename(filepath) - if filename.startswith(prefix) and filename.endswith(suffix): - modules.append(filename[len(prefix):-len(suffix)]) - return modules - - # Retrieve the default value of 'SHARED_LIB_SUFFIX' - params = {'flavor': 'linux'} - default_variables = {} - CalculateVariables(default_variables, params) - - self.WriteList( - DepsToModules(link_deps, - generator_default_variables['SHARED_LIB_PREFIX'], - default_variables['SHARED_LIB_SUFFIX']), - 'LOCAL_SHARED_LIBRARIES') - self.WriteList( - DepsToModules(link_deps, - generator_default_variables['STATIC_LIB_PREFIX'], - generator_default_variables['STATIC_LIB_SUFFIX']), - 'LOCAL_STATIC_LIBRARIES') - - if self.type == 'executable': - self.WriteLn('include $(BUILD_EXECUTABLE)') - elif self.type == 'shared_library': - self.WriteLn('include $(BUILD_SHARED_LIBRARY)') - elif self.type == 'static_library': - self.WriteLn('include $(BUILD_STATIC_LIBRARY)') - self.WriteLn() - - - def WriteLn(self, text=''): - self.fp.write(text + '\n') - - - def GetSortedXcodeEnv(self, additional_settings=None): - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, "$(abs_builddir)", - os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)", - additional_settings) - - - def GetSortedXcodePostbuildEnv(self): - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - 'CHROMIUM_STRIP_SAVE_FILE', '') - # Even if strip_save_file is empty, explicitly write it. Else a postbuild - # might pick up an export from an earlier target. - return self.GetSortedXcodeEnv( - additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file}) - - - def WriteSortedXcodeEnv(self, target, env): - for k, v in env: - # For - # foo := a\ b - # the escaped space does the right thing. For - # export foo := a\ b - # it does not -- the backslash is written to the env as literal character. - # So don't escape spaces in |env[k]|. - self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v)) - - - def Objectify(self, path): - """Convert a path to its output directory form.""" - if '$(' in path: - path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset) - if not '$(obj)' in path: - path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path) - return path - - - def Pchify(self, path, lang): - """Convert a prefix header path to its output directory form.""" - path = self.Absolutify(path) - if '$(' in path: - path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' % - (self.toolset, lang)) - return path - return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path) - - - def Absolutify(self, path): - """Convert a subdirectory-relative path into a base-relative path. - Skips over paths that contain variables.""" - if '$(' in path: - # Don't call normpath in this case, as it might collapse the - # path too aggressively if it features '..'. However it's still - # important to strip trailing slashes. - return path.rstrip('/') - return os.path.normpath(os.path.join(self.path, path)) - - - def ExpandInputRoot(self, template, expansion, dirname): - if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: - return template - path = template % { - 'INPUT_ROOT': expansion, - 'INPUT_DIRNAME': dirname, - } - return path - - - def _InstallableTargetInstallPath(self): - """Returns the location of the final output for an installable target.""" - # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files - # rely on this. Emulate this behavior for mac. - - # XXX(TooTallNate): disabling this code since we don't want this behavior... - #if (self.type == 'shared_library' and - # (self.flavor != 'mac' or self.toolset != 'target')): - # # Install all shared libs into a common directory (per toolset) for - # # convenient access with LD_LIBRARY_PATH. - # return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias) - return '$(builddir)/' + self.alias - - -def WriteAutoRegenerationRule(params, root_makefile, makefile_name, - build_files): - """Write the target to regenerate the Makefile.""" - options = params['options'] - build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir) - for filename in params['build_files_arg']] - gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'], - options.toplevel_dir) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join('.', gyp_binary) - root_makefile.write( - "quiet_cmd_regen_makefile = ACTION Regenerating $@\n" - "cmd_regen_makefile = %(cmd)s\n" - "%(makefile_name)s: %(deps)s\n" - "\t$(call do_cmd,regen_makefile)\n\n" % { - 'makefile_name': makefile_name, - 'deps': ' '.join(map(Sourceify, build_files)), - 'cmd': gyp.common.EncodePOSIXShellList( - [gyp_binary, '-fmake'] + - gyp.RegenerateFlags(options) + - build_files_args)}) - - -def PerformBuild(data, configurations, params): - options = params['options'] - for config in configurations: - arguments = ['make'] - if options.toplevel_dir and options.toplevel_dir != '.': - arguments += '-C', options.toplevel_dir - arguments.append('BUILDTYPE=' + config) - print 'Building [%s]: %s' % (config, arguments) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - android_ndk_version = generator_flags.get('android_ndk_version', None) - default_target = generator_flags.get('default_target', 'all') - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - if options.generator_output: - output_file = os.path.join(options.generator_output, output_file) - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.toplevel_dir) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'Makefile' + options.suffix - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - if options.generator_output: - global srcdir_prefix - makefile_path = os.path.join(options.generator_output, makefile_path) - srcdir = gyp.common.RelativePath(srcdir, options.generator_output) - srcdir_prefix = '$(srcdir)/' - - flock_command= 'flock' - header_params = { - 'default_target': default_target, - 'builddir': builddir_name, - 'default_configuration': default_configuration, - 'flock': flock_command, - 'flock_index': 1, - 'link_commands': LINK_COMMANDS_LINUX, - 'extra_commands': '', - 'srcdir': srcdir, - } - if flavor == 'mac': - flock_command = './gyp-mac-tool flock' - header_params.update({ - 'flock': flock_command, - 'flock_index': 2, - 'link_commands': LINK_COMMANDS_MAC, - 'extra_commands': SHARED_HEADER_MAC_COMMANDS, - }) - elif flavor == 'android': - header_params.update({ - 'link_commands': LINK_COMMANDS_ANDROID, - }) - elif flavor == 'solaris': - header_params.update({ - 'flock': './gyp-sun-tool flock', - 'flock_index': 2, - 'extra_commands': SHARED_HEADER_SUN_COMMANDS, - }) - elif flavor == 'freebsd': - # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific. - header_params.update({ - 'flock': 'lockf', - }) - - header_params.update({ - 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), - 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), - 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), - 'LINK.target': GetEnvironFallback(('LD_target', 'LD'), '$(LINK)'), - 'CC.host': GetEnvironFallback(('CC_host',), 'gcc'), - 'AR.host': GetEnvironFallback(('AR_host',), 'ar'), - 'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'), - 'LINK.host': GetEnvironFallback(('LD_host',), 'g++'), - }) - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_array = data[build_file].get('make_global_settings', []) - wrappers = {} - wrappers['LINK'] = '%s $(builddir)/linker.lock' % flock_command - for key, value in make_global_settings_array: - if key.endswith('_wrapper'): - wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value - make_global_settings = '' - for key, value in make_global_settings_array: - if re.match('.*_wrapper', key): - continue - if value[0] != '$': - value = '$(abspath %s)' % value - wrapper = wrappers.get(key) - if wrapper: - value = '%s %s' % (wrapper, value) - del wrappers[key] - if key in ('CC', 'CC.host', 'CXX', 'CXX.host'): - make_global_settings += ( - 'ifneq (,$(filter $(origin %s), undefined default))\n' % key) - # Let gyp-time envvars win over global settings. - if key in os.environ: - value = os.environ[key] - make_global_settings += ' %s = %s\n' % (key, value) - make_global_settings += 'endif\n' - else: - make_global_settings += '%s ?= %s\n' % (key, value) - # TODO(ukai): define cmd when only wrapper is specified in - # make_global_settings. - - header_params['make_global_settings'] = make_global_settings - - ensure_directory_exists(makefile_path) - root_makefile = open(makefile_path, 'w') - root_makefile.write(SHARED_HEADER % header_params) - # Currently any versions have the same effect, but in future the behavior - # could be different. - if android_ndk_version: - root_makefile.write( - '# Define LOCAL_PATH for build of Android applications.\n' - 'LOCAL_PATH := $(call my-dir)\n' - '\n') - for toolset in toolsets: - root_makefile.write('TOOLSET := %s\n' % toolset) - WriteRootHeaderSuffixRules(root_makefile) - - # Put build-time support tools next to the root Makefile. - dest_path = os.path.dirname(makefile_path) - gyp.common.CopyTool(flavor, dest_path) - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - - this_make_global_settings = data[build_file].get('make_global_settings', []) - assert make_global_settings_array == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets.") - - build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - if flavor == 'mac': - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - writer = MakefileWriter(generator_flags, flavor) - writer.Write(qualified_target, base_path, output_file, spec, configs, - part_of_all=qualified_target in needed_targets) - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add(mkfile_rel_path) - - # Write out per-gyp (sub-project) Makefiles. - depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) - for build_file in build_files: - # The paths in build_files were relativized above, so undo that before - # testing against the non-relativized items in target_list and before - # calculating the Makefile path. - build_file = os.path.join(depth_rel_path, build_file) - gyp_targets = [target_dicts[target]['target_name'] for target in target_list - if target.startswith(build_file) and - target in needed_targets] - # Only generate Makefiles for gyp files with targets. - if not gyp_targets: - continue - base_path, output_file = CalculateMakefilePath(build_file, - os.path.splitext(os.path.basename(build_file))[0] + '.Makefile') - makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path), - os.path.dirname(output_file)) - writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, - builddir_name) - - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include_file in sorted(include_list): - # We wrap each .mk include in an if statement so users can tell make to - # not load a file by setting NO_LOAD. The below make code says, only - # load the .mk file if the .mk filename doesn't start with a token in - # NO_LOAD. - root_makefile.write( - "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n" - " $(findstring $(join ^,$(prefix)),\\\n" - " $(join ^," + include_file + ")))),)\n") - root_makefile.write(" include " + include_file + "\n") - root_makefile.write("endif\n") - root_makefile.write('\n') - - if (not generator_flags.get('standalone') - and generator_flags.get('auto_regeneration', True)): - WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/make.pyc and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/make.pyc differ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,3123 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import ntpath -import os -import posixpath -import re -import subprocess -import sys - -import gyp.common -import gyp.easy_xml as easy_xml -import gyp.MSVSNew as MSVSNew -import gyp.MSVSProject as MSVSProject -import gyp.MSVSSettings as MSVSSettings -import gyp.MSVSToolFile as MSVSToolFile -import gyp.MSVSUserFile as MSVSUserFile -import gyp.MSVSUtil as MSVSUtil -import gyp.MSVSVersion as MSVSVersion -from gyp.common import GypError - - -# Regular expression for validating Visual Studio GUIDs. If the GUID -# contains lowercase hex letters, MSVS will be fine. However, -# IncrediBuild BuildConsole will parse the solution file, but then -# silently skip building the target causing hard to track down errors. -# Note that this only happens with the BuildConsole, and does not occur -# if IncrediBuild is executed from inside Visual Studio. This regex -# validates that the string looks like a GUID with all uppercase hex -# letters. -VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$') - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '.exe', - 'STATIC_LIB_PREFIX': '', - 'SHARED_LIB_PREFIX': '', - 'STATIC_LIB_SUFFIX': '.lib', - 'SHARED_LIB_SUFFIX': '.dll', - 'INTERMEDIATE_DIR': '$(IntDir)', - 'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate', - 'OS': 'win', - 'PRODUCT_DIR': '$(OutDir)', - 'LIB_DIR': '$(OutDir)lib', - 'RULE_INPUT_ROOT': '$(InputName)', - 'RULE_INPUT_DIRNAME': '$(InputDir)', - 'RULE_INPUT_EXT': '$(InputExt)', - 'RULE_INPUT_NAME': '$(InputFileName)', - 'RULE_INPUT_PATH': '$(InputPath)', - 'CONFIGURATION_NAME': '$(ConfigurationName)', -} - - -# The msvs specific sections that hold paths -generator_additional_path_sections = [ - 'msvs_cygwin_dirs', - 'msvs_props', -] - - -generator_additional_non_configuration_keys = [ - 'msvs_cygwin_dirs', - 'msvs_cygwin_shell', - 'msvs_large_pdb', - 'msvs_shard', -] - - -# List of precompiled header related keys. -precomp_keys = [ - 'msvs_precompiled_header', - 'msvs_precompiled_source', -] - - -cached_username = None - - -cached_domain = None - - -# TODO(gspencer): Switch the os.environ calls to be -# win32api.GetDomainName() and win32api.GetUserName() once the -# python version in depot_tools has been updated to work on Vista -# 64-bit. -def _GetDomainAndUserName(): - if sys.platform not in ('win32', 'cygwin'): - return ('DOMAIN', 'USERNAME') - global cached_username - global cached_domain - if not cached_domain or not cached_username: - domain = os.environ.get('USERDOMAIN') - username = os.environ.get('USERNAME') - if not domain or not username: - call = subprocess.Popen(['net', 'config', 'Workstation'], - stdout=subprocess.PIPE) - config = call.communicate()[0] - username_re = re.compile('^User name\s+(\S+)', re.MULTILINE) - username_match = username_re.search(config) - if username_match: - username = username_match.group(1) - domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE) - domain_match = domain_re.search(config) - if domain_match: - domain = domain_match.group(1) - cached_domain = domain - cached_username = username - return (cached_domain, cached_username) - -fixpath_prefix = None - - -def _NormalizedSource(source): - """Normalize the path. - - But not if that gets rid of a variable, as this may expand to something - larger than one directory. - - Arguments: - source: The path to be normalize.d - - Returns: - The normalized path. - """ - normalized = os.path.normpath(source) - if source.count('$') == normalized.count('$'): - source = normalized - return source - - -def _FixPath(path): - """Convert paths to a form that will make sense in a vcproj file. - - Arguments: - path: The path to convert, may contain / etc. - Returns: - The path with all slashes made into backslashes. - """ - if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$': - path = os.path.join(fixpath_prefix, path) - path = path.replace('/', '\\') - path = _NormalizedSource(path) - if path and path[-1] == '\\': - path = path[:-1] - return path - - -def _FixPaths(paths): - """Fix each of the paths of the list.""" - return [_FixPath(i) for i in paths] - - -def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, - list_excluded=True): - """Converts a list split source file paths into a vcproj folder hierarchy. - - Arguments: - sources: A list of source file paths split. - prefix: A list of source file path layers meant to apply to each of sources. - excluded: A set of excluded files. - - Returns: - A hierarchy of filenames and MSVSProject.Filter objects that matches the - layout of the source tree. - For example: - _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']], - prefix=['joe']) - --> - [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), - MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] - """ - if not prefix: prefix = [] - result = [] - excluded_result = [] - folders = dict() - # Gather files into the final result, excluded, or folders. - for s in sources: - if len(s) == 1: - filename = _NormalizedSource('\\'.join(prefix + s)) - if filename in excluded: - excluded_result.append(filename) - else: - result.append(filename) - else: - if not folders.get(s[0]): - folders[s[0]] = [] - folders[s[0]].append(s[1:]) - # Add a folder for excluded files. - if excluded_result and list_excluded: - excluded_folder = MSVSProject.Filter('_excluded_files', - contents=excluded_result) - result.append(excluded_folder) - # Populate all the folders. - for f in folders: - contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f], - excluded=excluded, - list_excluded=list_excluded) - contents = MSVSProject.Filter(f, contents=contents) - result.append(contents) - - return result - - -def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): - if not value: return - _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset) - - -def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): - # TODO(bradnelson): ugly hack, fix this more generally!!! - if 'Directories' in setting or 'Dependencies' in setting: - if type(value) == str: - value = value.replace('/', '\\') - else: - value = [i.replace('/', '\\') for i in value] - if not tools.get(tool_name): - tools[tool_name] = dict() - tool = tools[tool_name] - if tool.get(setting): - if only_if_unset: return - if type(tool[setting]) == list: - tool[setting] += value - else: - raise TypeError( - 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' - 'not allowed, previous value: %s' % ( - value, setting, tool_name, str(tool[setting]))) - else: - tool[setting] = value - - -def _ConfigPlatform(config_data): - return config_data.get('msvs_configuration_platform', 'Win32') - - -def _ConfigBaseName(config_name, platform_name): - if config_name.endswith('_' + platform_name): - return config_name[0:-len(platform_name) - 1] - else: - return config_name - - -def _ConfigFullName(config_name, config_data): - platform_name = _ConfigPlatform(config_data) - return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) - - -def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, - quote_cmd, do_setup_env): - - if [x for x in cmd if '$(InputDir)' in x]: - input_dir_preamble = ( - 'set INPUTDIR=$(InputDir)\n' - 'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n' - 'set INPUTDIR=%INPUTDIR:~0,-1%\n' - ) - else: - input_dir_preamble = '' - - if cygwin_shell: - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) - # Prepare command. - direct_cmd = cmd - direct_cmd = [i.replace('$(IntDir)', - '`cygpath -m "${INTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(OutDir)', - '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(InputDir)', - '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd] - if has_input_path: - direct_cmd = [i.replace('$(InputPath)', - '`cygpath -m "${INPUTPATH}"`') - for i in direct_cmd] - direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] - # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) - direct_cmd = ' '.join(direct_cmd) - # TODO(quote): regularize quoting path names throughout the module - cmd = '' - if do_setup_env: - cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' - cmd += 'set CYGWIN=nontsec&& ' - if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0: - cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& ' - if direct_cmd.find('INTDIR') >= 0: - cmd += 'set INTDIR=$(IntDir)&& ' - if direct_cmd.find('OUTDIR') >= 0: - cmd += 'set OUTDIR=$(OutDir)&& ' - if has_input_path and direct_cmd.find('INPUTPATH') >= 0: - cmd += 'set INPUTPATH=$(InputPath) && ' - cmd += 'bash -c "%(cmd)s"' - cmd = cmd % {'cygwin_dir': cygwin_dir, - 'cmd': direct_cmd} - return input_dir_preamble + cmd - else: - # Convert cat --> type to mimic unix. - if cmd[0] == 'cat': - command = ['type'] - else: - command = [cmd[0].replace('/', '\\')] - # Add call before command to ensure that commands can be tied together one - # after the other without aborting in Incredibuild, since IB makes a bat - # file out of the raw command string, and some commands (like python) are - # actually batch files themselves. - command.insert(0, 'call') - # Fix the paths - # TODO(quote): This is a really ugly heuristic, and will miss path fixing - # for arguments like "--arg=path" or "/opt:path". - # If the argument starts with a slash or dash, it's probably a command line - # switch - arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]] - arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments] - arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments] - if quote_cmd: - # Support a mode for using cmd directly. - # Convert any paths to native form (first element is used directly). - # TODO(quote): regularize quoting path names throughout the module - arguments = ['"%s"' % i for i in arguments] - # Collapse into a single command. - return input_dir_preamble + ' '.join(command + arguments) - - -def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env): - # Currently this weird argument munging is used to duplicate the way a - # python script would need to be run as part of the chrome tree. - # Eventually we should add some sort of rule_default option to set this - # per project. For now the behavior chrome needs is the default. - mcs = rule.get('msvs_cygwin_shell') - if mcs is None: - mcs = int(spec.get('msvs_cygwin_shell', 1)) - elif isinstance(mcs, str): - mcs = int(mcs) - quote_cmd = int(rule.get('msvs_quote_cmd', 1)) - return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path, - quote_cmd, do_setup_env=do_setup_env) - - -def _AddActionStep(actions_dict, inputs, outputs, description, command): - """Merge action into an existing list of actions. - - Care must be taken so that actions which have overlapping inputs either don't - get assigned to the same input, or get collapsed into one. - - Arguments: - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - inputs: list of inputs - outputs: list of outputs - description: description of the action - command: command line to execute - """ - # Require there to be at least one input (call sites will ensure this). - assert inputs - - action = { - 'inputs': inputs, - 'outputs': outputs, - 'description': description, - 'command': command, - } - - # Pick where to stick this action. - # While less than optimal in terms of build time, attach them to the first - # input for now. - chosen_input = inputs[0] - - # Add it there. - if chosen_input not in actions_dict: - actions_dict[chosen_input] = [] - actions_dict[chosen_input].append(action) - - -def _AddCustomBuildToolForMSVS(p, spec, primary_input, - inputs, outputs, description, cmd): - """Add a custom build tool to execute something. - - Arguments: - p: the target project - spec: the target project dict - primary_input: input file to attach the build tool to - inputs: list of inputs - outputs: list of outputs - description: description of the action - cmd: command line to execute - """ - inputs = _FixPaths(inputs) - outputs = _FixPaths(outputs) - tool = MSVSProject.Tool( - 'VCCustomBuildTool', - {'Description': description, - 'AdditionalDependencies': ';'.join(inputs), - 'Outputs': ';'.join(outputs), - 'CommandLine': cmd, - }) - # Add to the properties of primary input for each config. - for config_name, c_data in spec['configurations'].iteritems(): - p.AddFileConfig(_FixPath(primary_input), - _ConfigFullName(config_name, c_data), tools=[tool]) - - -def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): - """Add actions accumulated into an actions_dict, merging as needed. - - Arguments: - p: the target project - spec: the target project dict - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - """ - for primary_input in actions_dict: - inputs = set() - outputs = set() - descriptions = [] - commands = [] - for action in actions_dict[primary_input]: - inputs.update(set(action['inputs'])) - outputs.update(set(action['outputs'])) - descriptions.append(action['description']) - commands.append(action['command']) - # Add the custom build step for one input file. - description = ', and also '.join(descriptions) - command = '\r\n'.join(commands) - _AddCustomBuildToolForMSVS(p, spec, - primary_input=primary_input, - inputs=inputs, - outputs=outputs, - description=description, - cmd=command) - - -def _RuleExpandPath(path, input_file): - """Given the input file to which a rule applied, string substitute a path. - - Arguments: - path: a path to string expand - input_file: the file to which the rule applied. - Returns: - The string substituted path. - """ - path = path.replace('$(InputName)', - os.path.splitext(os.path.split(input_file)[1])[0]) - path = path.replace('$(InputDir)', os.path.dirname(input_file)) - path = path.replace('$(InputExt)', - os.path.splitext(os.path.split(input_file)[1])[1]) - path = path.replace('$(InputFileName)', os.path.split(input_file)[1]) - path = path.replace('$(InputPath)', input_file) - return path - - -def _FindRuleTriggerFiles(rule, sources): - """Find the list of files which a particular rule applies to. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The list of sources that trigger a particular rule. - """ - rule_ext = rule['extension'] - return [s for s in sources if s.endswith('.' + rule_ext)] - - -def _RuleInputsAndOutputs(rule, trigger_file): - """Find the inputs and outputs generated by a rule. - - Arguments: - rule: the rule in question. - trigger_file: the main trigger for this rule. - Returns: - The pair of (inputs, outputs) involved in this rule. - """ - raw_inputs = _FixPaths(rule.get('inputs', [])) - raw_outputs = _FixPaths(rule.get('outputs', [])) - inputs = set() - outputs = set() - inputs.add(trigger_file) - for i in raw_inputs: - inputs.add(_RuleExpandPath(i, trigger_file)) - for o in raw_outputs: - outputs.add(_RuleExpandPath(o, trigger_file)) - return (inputs, outputs) - - -def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): - """Generate a native rules file. - - Arguments: - p: the target project - rules: the set of rules to include - output_dir: the directory in which the project/gyp resides - spec: the project dict - options: global generator options - """ - rules_filename = '%s%s.rules' % (spec['target_name'], - options.suffix) - rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename), - spec['target_name']) - # Add each rule. - for r in rules: - rule_name = r['rule_name'] - rule_ext = r['extension'] - inputs = _FixPaths(r.get('inputs', [])) - outputs = _FixPaths(r.get('outputs', [])) - # Skip a rule with no action and no inputs. - if 'action' not in r and not r.get('rule_sources', []): - continue - cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, - do_setup_env=True) - rules_file.AddCustomBuildRule(name=rule_name, - description=r.get('message', rule_name), - extensions=[rule_ext], - additional_dependencies=inputs, - outputs=outputs, - cmd=cmd) - # Write out rules file. - rules_file.WriteIfChanged() - - # Add rules file to project. - p.AddToolFile(rules_filename) - - -def _Cygwinify(path): - path = path.replace('$(OutDir)', '$(OutDirCygwin)') - path = path.replace('$(IntDir)', '$(IntDirCygwin)') - return path - - -def _GenerateExternalRules(rules, output_dir, spec, - sources, options, actions_to_add): - """Generate an external makefile to do a set of rules. - - Arguments: - rules: the list of rules to include - output_dir: path containing project and gyp files - spec: project specification data - sources: set of sources known - options: global generator options - actions_to_add: The list of actions we will add to. - """ - filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix) - mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) - # Find cygwin style versions of some paths. - mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') - mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') - # Gather stuff needed to emit all: target. - all_inputs = set() - all_outputs = set() - all_output_dirs = set() - first_outputs = [] - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - all_inputs.update(set(inputs)) - all_outputs.update(set(outputs)) - # Only use one target from each rule as the dependency for - # 'all' so we don't try to build each rule multiple times. - first_outputs.append(list(outputs)[0]) - # Get the unique output directories for this rule. - output_dirs = [os.path.split(i)[0] for i in outputs] - for od in output_dirs: - all_output_dirs.add(od) - first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] - # Write out all: target, including mkdir for each output directory. - mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg)) - for od in all_output_dirs: - if od: - mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od) - mk_file.write('\n') - # Define how each output is generated. - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - # Get all the inputs and outputs for this rule for this trigger file. - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs = [_Cygwinify(i) for i in inputs] - outputs = [_Cygwinify(i) for i in outputs] - # Prepare the command line for this rule. - cmd = [_RuleExpandPath(c, tf) for c in rule['action']] - cmd = ['"%s"' % i for i in cmd] - cmd = ' '.join(cmd) - # Add it to the makefile. - mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs))) - mk_file.write('\t%s\n\n' % cmd) - # Close up the file. - mk_file.close() - - # Add makefile to list of sources. - sources.add(filename) - # Add a build action to call makefile. - cmd = ['make', - 'OutDir=$(OutDir)', - 'IntDir=$(IntDir)', - '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}', - '-f', filename] - cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True) - # Insert makefile as 0'th input, so it gets the action attached there, - # as this is easier to understand from in the IDE. - all_inputs = list(all_inputs) - all_inputs.insert(0, filename) - _AddActionStep(actions_to_add, - inputs=_FixPaths(all_inputs), - outputs=_FixPaths(all_outputs), - description='Running external rules for %s' % - spec['target_name'], - command=cmd) - - -def _EscapeEnvironmentVariableExpansion(s): - """Escapes % characters. - - Escapes any % characters so that Windows-style environment variable - expansions will leave them alone. - See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile - to understand why we have to do this. - - Args: - s: The string to be escaped. - - Returns: - The escaped string. - """ - s = s.replace('%', '%%') - return s - - -quote_replacer_regex = re.compile(r'(\\*)"') - - -def _EscapeCommandLineArgumentForMSVS(s): - """Escapes a Windows command-line argument. - - So that the Win32 CommandLineToArgv function will turn the escaped result back - into the original string. - See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - ("Parsing C++ Command-Line Arguments") to understand why we have to do - this. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a literal quote, CommandLineToArgv requires an odd number of - # backslashes preceding it, and it produces half as many literal backslashes - # (rounded down). So we need to produce 2n+1 backslashes. - return 2 * match.group(1) + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex.sub(_Replace, s) - # Now add unescaped quotes so that any whitespace is interpreted literally. - s = '"' + s + '"' - return s - - -delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)') - - -def _EscapeVCProjCommandLineArgListItem(s): - """Escapes command line arguments for MSVS. - - The VCProj format stores string lists in a single string using commas and - semi-colons as separators, which must be quoted if they are to be - interpreted literally. However, command-line arguments may already have - quotes, and the VCProj parser is ignorant of the backslash escaping - convention used by CommandLineToArgv, so the command-line quotes and the - VCProj quotes may not be the same quotes. So to store a general - command-line argument in a VCProj list, we need to parse the existing - quoting according to VCProj's convention and quote any delimiters that are - not already quoted by that convention. The quotes that we add will also be - seen by CommandLineToArgv, so if backslashes precede them then we also have - to escape those backslashes according to the CommandLineToArgv - convention. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a non-literal quote, CommandLineToArgv requires an even number of - # backslashes preceding it, and it produces half as many literal - # backslashes. So we need to produce 2n backslashes. - return 2 * match.group(1) + '"' + match.group(2) + '"' - - segments = s.split('"') - # The unquoted segments are at the even-numbered indices. - for i in range(0, len(segments), 2): - segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) - # Concatenate back into a single string - s = '"'.join(segments) - if len(segments) % 2 == 0: - # String ends while still quoted according to VCProj's convention. This - # means the delimiter and the next list item that follow this one in the - # .vcproj file will be misinterpreted as part of this item. There is nothing - # we can do about this. Adding an extra quote would correct the problem in - # the VCProj but cause the same problem on the final command-line. Moving - # the item to the end of the list does works, but that's only possible if - # there's only one such item. Let's just warn the user. - print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s) - return s - - -def _EscapeCppDefineForMSVS(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSVS(s) - s = _EscapeVCProjCommandLineArgListItem(s) - # cl.exe replaces literal # characters with = in preprocesor definitions for - # some reason. Octal-encode to work around that. - s = s.replace('#', '\\%03o' % ord('#')) - return s - - -quote_replacer_regex2 = re.compile(r'(\\+)"') - - -def _EscapeCommandLineArgumentForMSBuild(s): - """Escapes a Windows command-line argument for use by MSBuild.""" - - def _Replace(match): - return (len(match.group(1)) / 2 * 4) * '\\' + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex2.sub(_Replace, s) - return s - - -def _EscapeMSBuildSpecialCharacters(s): - escape_dictionary = { - '%': '%25', - '$': '%24', - '@': '%40', - "'": '%27', - ';': '%3B', - '?': '%3F', - '*': '%2A' - } - result = ''.join([escape_dictionary.get(c, c) for c in s]) - return result - - -def _EscapeCppDefineForMSBuild(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSBuild(s) - s = _EscapeMSBuildSpecialCharacters(s) - # cl.exe replaces literal # characters with = in preprocesor definitions for - # some reason. Octal-encode to work around that. - s = s.replace('#', '\\%03o' % ord('#')) - return s - - -def _GenerateRulesForMSVS(p, output_dir, options, spec, - sources, excluded_sources, - actions_to_add): - """Generate all the rules for a particular project. - - Arguments: - p: the project - output_dir: directory to emit rules to - options: global options passed to the generator - spec: the specification for this project - sources: the set of all known source files in this project - excluded_sources: the set of sources excluded from normal processing - actions_to_add: deferred list of actions to add in - """ - rules = spec.get('rules', []) - rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] - rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] - - # Handle rules that use a native rules file. - if rules_native: - _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) - - # Handle external rules (non-native rules). - if rules_external: - _GenerateExternalRules(rules_external, output_dir, spec, - sources, options, actions_to_add) - _AdjustSourcesForRules(rules, sources, excluded_sources) - - -def _AdjustSourcesForRules(rules, sources, excluded_sources): - # Add outputs generated by each rule (if applicable). - for rule in rules: - # Done if not processing outputs as sources. - if int(rule.get('process_outputs_as_sources', False)): - # Add in the outputs from this rule. - trigger_files = _FindRuleTriggerFiles(rule, sources) - for trigger_file in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) - inputs = set(_FixPaths(inputs)) - outputs = set(_FixPaths(outputs)) - inputs.remove(_FixPath(trigger_file)) - sources.update(inputs) - excluded_sources.update(inputs) - sources.update(outputs) - - -def _FilterActionsFromExcluded(excluded_sources, actions_to_add): - """Take inputs with actions attached out of the list of exclusions. - - Arguments: - excluded_sources: list of source files not to be built. - actions_to_add: dict of actions keyed on source file they're attached to. - Returns: - excluded_sources with files that have actions attached removed. - """ - must_keep = set(_FixPaths(actions_to_add.keys())) - return [s for s in excluded_sources if s not in must_keep] - - -def _GetDefaultConfiguration(spec): - return spec['configurations'][spec['default_configuration']] - - -def _GetGuidOfProject(proj_path, spec): - """Get the guid for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - Returns: - the guid. - Raises: - ValueError: if the specified GUID is invalid. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - # Decide the guid of the project. - guid = default_config.get('msvs_guid') - if guid: - if VALID_MSVS_GUID_CHARS.match(guid) is None: - raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % - (guid, VALID_MSVS_GUID_CHARS.pattern)) - guid = '{%s}' % guid - guid = guid or MSVSNew.MakeGuid(proj_path) - return guid - - -def _GetMsbuildToolsetOfProject(proj_path, spec, version): - """Get the platform toolset for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - version: The MSVSVersion object. - Returns: - the platform toolset string or None. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - toolset = default_config.get('msbuild_toolset') - if not toolset and version.DefaultToolset(): - toolset = version.DefaultToolset() - return toolset - - -def _GenerateProject(project, options, version, generator_flags): - """Generates a vcproj file. - - Arguments: - project: the MSVSProject object. - options: global generator options. - version: the MSVSVersion object. - generator_flags: dict of generator-specific flags. - Returns: - A list of source files that cannot be found on disk. - """ - default_config = _GetDefaultConfiguration(project.spec) - - # Skip emitting anything if told to with msvs_existing_vcproj option. - if default_config.get('msvs_existing_vcproj'): - return [] - - if version.UsesVcxproj(): - return _GenerateMSBuildProject(project, options, version, generator_flags) - else: - return _GenerateMSVSProject(project, options, version, generator_flags) - - -def _GenerateMSVSProject(project, options, version, generator_flags): - """Generates a .vcproj file. It may create .rules and .user files too. - - Arguments: - project: The project object we will generate the file for. - options: Global options passed to the generator. - version: The VisualStudioVersion object. - generator_flags: dict of generator-specific flags. - """ - spec = project.spec - vcproj_dir = os.path.dirname(project.path) - if vcproj_dir and not os.path.exists(vcproj_dir): - os.makedirs(vcproj_dir) - - platforms = _GetUniquePlatforms(spec) - p = MSVSProject.Writer(project.path, version, spec['target_name'], - project.guid, platforms) - - # Get directory project file is in. - project_dir = os.path.split(project.path)[0] - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - - config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec['configurations'].iteritems(): - _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) - - # Prepare list of sources and excluded sources. - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, - gyp_file) - - # Add rules. - actions_to_add = {} - _GenerateRulesForMSVS(p, project_dir, options, spec, - sources, excluded_sources, - actions_to_add) - list_excluded = generator_flags.get('msvs_list_excluded_files', True) - sources, excluded_sources, excluded_idl = ( - _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, project_dir, sources, excluded_sources, list_excluded)) - - # Add in files. - missing_sources = _VerifySourcesExist(sources, project_dir) - p.AddFiles(sources) - - _AddToolFilesToMSVS(p, spec) - _HandlePreCompiledHeaders(p, sources, spec) - _AddActions(actions_to_add, spec, relative_path_of_gyp_file) - _AddCopies(actions_to_add, spec) - _WriteMSVSUserFile(project.path, version, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded( - excluded_sources, actions_to_add) - _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, - list_excluded) - _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) - - # Write it out. - p.WriteIfChanged() - - return missing_sources - - -def _GetUniquePlatforms(spec): - """Returns the list of unique platforms for this spec, e.g ['win32', ...]. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - # Gather list of unique platforms. - platforms = set() - for configuration in spec['configurations']: - platforms.add(_ConfigPlatform(spec['configurations'][configuration])) - platforms = list(platforms) - return platforms - - -def _CreateMSVSUserFile(proj_path, version, spec): - """Generates a .user file for the user running this Gyp program. - - Arguments: - proj_path: The path of the project file being created. The .user file - shares the same path (with an appropriate suffix). - version: The VisualStudioVersion object. - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - (domain, username) = _GetDomainAndUserName() - vcuser_filename = '.'.join([proj_path, domain, username, 'user']) - user_file = MSVSUserFile.Writer(vcuser_filename, version, - spec['target_name']) - return user_file - - -def _GetMSVSConfigurationType(spec, build_file): - """Returns the configuration type for this project. - - It's a number defined by Microsoft. May raise an exception. - - Args: - spec: The target dictionary containing the properties of the target. - build_file: The path of the gyp file. - Returns: - An integer, the configuration type. - """ - try: - config_type = { - 'executable': '1', # .exe - 'shared_library': '2', # .dll - 'loadable_module': '2', # .dll - 'static_library': '4', # .lib - 'none': '10', # Utility type - }[spec['type']] - except KeyError: - if spec.get('type'): - raise GypError('Target type %s is not a valid target type for ' - 'target %s in %s.' % - (spec['type'], spec['target_name'], build_file)) - else: - raise GypError('Missing type field for target %s in %s.' % - (spec['target_name'], build_file)) - return config_type - - -def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): - """Adds a configuration to the MSVS project. - - Many settings in a vcproj file are specific to a configuration. This - function the main part of the vcproj file that's configuration specific. - - Arguments: - p: The target project being generated. - spec: The target dictionary containing the properties of the target. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - config: The dictionnary that defines the special processing to be done - for this configuration. - """ - # Get the information for this configuration - include_dirs, resource_include_dirs = _GetIncludeDirs(config) - libraries = _GetLibraries(spec) - out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) - defines = _GetDefines(config) - defines = [_EscapeCppDefineForMSVS(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(config) - prebuild = config.get('msvs_prebuild') - postbuild = config.get('msvs_postbuild') - def_file = _GetModuleDefinition(spec) - precompiled_header = config.get('msvs_precompiled_header') - - # Prepare the list of tools as a dictionary. - tools = dict() - # Add in user specified msvs_settings. - msvs_settings = config.get('msvs_settings', {}) - MSVSSettings.ValidateMSVSSettings(msvs_settings) - - # Prevent default library inheritance from the environment. - _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)']) - - for tool in msvs_settings: - settings = config['msvs_settings'][tool] - for setting in settings: - _ToolAppend(tools, tool, setting, settings[setting]) - # Add the information to the appropriate tool - _ToolAppend(tools, 'VCCLCompilerTool', - 'AdditionalIncludeDirectories', include_dirs) - _ToolAppend(tools, 'VCResourceCompilerTool', - 'AdditionalIncludeDirectories', resource_include_dirs) - # Add in libraries. - _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries) - if out_file: - _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True) - # Add defines. - _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines) - _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions', - defines) - # Change program database directory to prevent collisions. - _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName', - '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True) - # Add disabled warnings. - _ToolAppend(tools, 'VCCLCompilerTool', - 'DisableSpecificWarnings', disabled_warnings) - # Add Pre-build. - _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild) - # Add Post-build. - _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2') - _ToolAppend(tools, 'VCCLCompilerTool', - 'PrecompiledHeaderThrough', precompiled_header) - _ToolAppend(tools, 'VCCLCompilerTool', - 'ForcedIncludeFiles', precompiled_header) - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec['type'] == 'loadable_module': - _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true') - # Set the module definition file if any. - if def_file: - _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file) - - _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) - - -def _GetIncludeDirs(config): - """Returns the list of directories to be used for #include directives. - - Arguments: - config: The dictionnary that defines the special processing to be done - for this configuration. - Returns: - The list of directory paths. - """ - # TODO(bradnelson): include_dirs should really be flexible enough not to - # require this sort of thing. - include_dirs = ( - config.get('include_dirs', []) + - config.get('msvs_system_include_dirs', [])) - resource_include_dirs = config.get('resource_include_dirs', include_dirs) - include_dirs = _FixPaths(include_dirs) - resource_include_dirs = _FixPaths(resource_include_dirs) - return include_dirs, resource_include_dirs - - -def _GetLibraries(spec): - """Returns the list of libraries for this configuration. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The list of directory paths. - """ - libraries = spec.get('libraries', []) - # Strip out -l, as it is not used on windows (but is needed so we can pass - # in libraries that are assumed to be in the default library path). - # Also remove duplicate entries, leaving only the last duplicate, while - # preserving order. - found = set() - unique_libraries_list = [] - for entry in reversed(libraries): - library = re.sub('^\-l', '', entry) - if not os.path.splitext(library)[1]: - library += '.lib' - if library not in found: - found.add(library) - unique_libraries_list.append(library) - unique_libraries_list.reverse() - return unique_libraries_list - - -def _GetOutputFilePathAndTool(spec, msbuild): - """Returns the path and tool to use for this target. - - Figures out the path of the file this spec will create and the name of - the VC tool that will create it. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - A triple of (file path, name of the vc tool, name of the msbuild tool) - """ - # Select a name for the output file. - out_file = '' - vc_tool = '' - msbuild_tool = '' - output_file_map = { - 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'), - 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), - 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), - 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'), - } - output_file_props = output_file_map.get(spec['type']) - if output_file_props and int(spec.get('msvs_auto_output_file', 1)): - vc_tool, msbuild_tool, out_dir, suffix = output_file_props - if spec.get('standalone_static_library', 0): - out_dir = '$(OutDir)' - out_dir = spec.get('product_dir', out_dir) - product_extension = spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - elif msbuild: - suffix = '$(TargetExt)' - prefix = spec.get('product_prefix', '') - product_name = spec.get('product_name', '$(ProjectName)') - out_file = ntpath.join(out_dir, prefix + product_name + suffix) - return out_file, vc_tool, msbuild_tool - - -def _GetDefines(config): - """Returns the list of preprocessor definitions for this configuation. - - Arguments: - config: The dictionnary that defines the special processing to be done - for this configuration. - Returns: - The list of preprocessor definitions. - """ - defines = [] - for d in config.get('defines', []): - if type(d) == list: - fd = '='.join([str(dpart) for dpart in d]) - else: - fd = str(d) - defines.append(fd) - return defines - - -def _GetDisabledWarnings(config): - return [str(i) for i in config.get('msvs_disabled_warnings', [])] - - -def _GetModuleDefinition(spec): - def_file = '' - if spec['type'] in ['shared_library', 'loadable_module', 'executable']: - def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] - if len(def_files) == 1: - def_file = _FixPath(def_files[0]) - elif def_files: - raise ValueError( - 'Multiple module definition files in one target, target %s lists ' - 'multiple .def files: %s' % ( - spec['target_name'], ' '.join(def_files))) - return def_file - - -def _ConvertToolsToExpectedForm(tools): - """Convert tools to a form expected by Visual Studio. - - Arguments: - tools: A dictionnary of settings; the tool name is the key. - Returns: - A list of Tool objects. - """ - tool_list = [] - for tool, settings in tools.iteritems(): - # Collapse settings with lists. - settings_fixed = {} - for setting, value in settings.iteritems(): - if type(value) == list: - if ((tool == 'VCLinkerTool' and - setting == 'AdditionalDependencies') or - setting == 'AdditionalOptions'): - settings_fixed[setting] = ' '.join(value) - else: - settings_fixed[setting] = ';'.join(value) - else: - settings_fixed[setting] = value - # Add in this tool. - tool_list.append(MSVSProject.Tool(tool, settings_fixed)) - return tool_list - - -def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): - """Add to the project file the configuration specified by config. - - Arguments: - p: The target project being generated. - spec: the target project dict. - tools: A dictionnary of settings; the tool name is the key. - config: The dictionnary that defines the special processing to be done - for this configuration. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - """ - attributes = _GetMSVSAttributes(spec, config, config_type) - # Add in this configuration. - tool_list = _ConvertToolsToExpectedForm(tools) - p.AddConfig(_ConfigFullName(config_name, config), - attrs=attributes, tools=tool_list) - - -def _GetMSVSAttributes(spec, config, config_type): - # Prepare configuration attributes. - prepared_attrs = {} - source_attrs = config.get('msvs_configuration_attributes', {}) - for a in source_attrs: - prepared_attrs[a] = source_attrs[a] - # Add props files. - vsprops_dirs = config.get('msvs_props', []) - vsprops_dirs = _FixPaths(vsprops_dirs) - if vsprops_dirs: - prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs) - # Set configuration type. - prepared_attrs['ConfigurationType'] = config_type - output_dir = prepared_attrs.get('OutputDirectory', - '$(SolutionDir)$(ConfigurationName)') - prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\' - if 'IntermediateDirectory' not in prepared_attrs: - intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' - prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\' - else: - intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\' - intermediate = MSVSSettings.FixVCMacroSlashes(intermediate) - prepared_attrs['IntermediateDirectory'] = intermediate - return prepared_attrs - - -def _AddNormalizedSources(sources_set, sources_array): - sources = [_NormalizedSource(s) for s in sources_array] - sources_set.update(set(sources)) - - -def _PrepareListOfSources(spec, generator_flags, gyp_file): - """Prepare list of sources and excluded sources. - - Besides the sources specified directly in the spec, adds the gyp file so - that a change to it will cause a re-compile. Also adds appropriate sources - for actions and copies. Assumes later stage will un-exclude files which - have custom build steps attached. - - Arguments: - spec: The target dictionary containing the properties of the target. - gyp_file: The name of the gyp file. - Returns: - A pair of (list of sources, list of excluded sources). - The sources will be relative to the gyp file. - """ - sources = set() - _AddNormalizedSources(sources, spec.get('sources', [])) - excluded_sources = set() - # Add in the gyp file. - if not generator_flags.get('standalone'): - sources.add(gyp_file) - - # Add in 'action' inputs and outputs. - for a in spec.get('actions', []): - inputs = a['inputs'] - inputs = [_NormalizedSource(i) for i in inputs] - # Add all inputs to sources and excluded sources. - inputs = set(inputs) - sources.update(inputs) - excluded_sources.update(inputs) - if int(a.get('process_outputs_as_sources', False)): - _AddNormalizedSources(sources, a.get('outputs', [])) - # Add in 'copies' inputs and outputs. - for cpy in spec.get('copies', []): - _AddNormalizedSources(sources, cpy.get('files', [])) - return (sources, excluded_sources) - - -def _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, gyp_dir, sources, excluded_sources, list_excluded): - """Adjusts the list of sources and excluded sources. - - Also converts the sets to lists. - - Arguments: - spec: The target dictionary containing the properties of the target. - options: Global generator options. - gyp_dir: The path to the gyp file being processed. - sources: A set of sources to be included for this project. - excluded_sources: A set of sources to be excluded for this project. - Returns: - A trio of (list of sources, list of excluded sources, - path of excluded IDL file) - """ - # Exclude excluded sources coming into the generator. - excluded_sources.update(set(spec.get('sources_excluded', []))) - # Add excluded sources into sources for good measure. - sources.update(excluded_sources) - # Convert to proper windows form. - # NOTE: sources goes from being a set to a list here. - # NOTE: excluded_sources goes from being a set to a list here. - sources = _FixPaths(sources) - # Convert to proper windows form. - excluded_sources = _FixPaths(excluded_sources) - - excluded_idl = _IdlFilesHandledNonNatively(spec, sources) - - precompiled_related = _GetPrecompileRelatedFiles(spec) - # Find the excluded ones, minus the precompiled header related ones. - fully_excluded = [i for i in excluded_sources if i not in precompiled_related] - - # Convert to folders and the right slashes. - sources = [i.split('\\') for i in sources] - sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded, - list_excluded=list_excluded) - - return sources, excluded_sources, excluded_idl - - -def _IdlFilesHandledNonNatively(spec, sources): - # If any non-native rules use 'idl' as an extension exclude idl files. - # Gather a list here to use later. - using_idl = False - for rule in spec.get('rules', []): - if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): - using_idl = True - break - if using_idl: - excluded_idl = [i for i in sources if i.endswith('.idl')] - else: - excluded_idl = [] - return excluded_idl - - -def _GetPrecompileRelatedFiles(spec): - # Gather a list of precompiled header related sources. - precompiled_related = [] - for _, config in spec['configurations'].iteritems(): - for k in precomp_keys: - f = config.get(k) - if f: - precompiled_related.append(_FixPath(f)) - return precompiled_related - - -def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, - list_excluded): - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - for file_name, excluded_configs in exclusions.iteritems(): - if (not list_excluded and - len(excluded_configs) == len(spec['configurations'])): - # If we're not listing excluded files, then they won't appear in the - # project, so don't try to configure them to be excluded. - pass - else: - for config_name, config in excluded_configs: - p.AddFileConfig(file_name, _ConfigFullName(config_name, config), - {'ExcludedFromBuild': 'true'}) - - -def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): - exclusions = {} - # Exclude excluded sources from being built. - for f in excluded_sources: - excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): - precomped = [_FixPath(config.get(i, '')) for i in precomp_keys] - # Don't do this for ones that are precompiled header related. - if f not in precomped: - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - # If any non-native rules use 'idl' as an extension exclude idl files. - # Exclude them now. - for f in excluded_idl: - excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - return exclusions - - -def _AddToolFilesToMSVS(p, spec): - # Add in tool files (rules). - tool_files = set() - for _, config in spec['configurations'].iteritems(): - for f in config.get('msvs_tool_files', []): - tool_files.add(f) - for f in tool_files: - p.AddToolFile(f) - - -def _HandlePreCompiledHeaders(p, sources, spec): - # Pre-compiled header source stubs need a different compiler flag - # (generate precompiled header) and any source file not of the same - # kind (i.e. C vs. C++) as the precompiled header source stub needs - # to have use of precompiled headers disabled. - extensions_excluded_from_precompile = [] - for config_name, config in spec['configurations'].iteritems(): - source = config.get('msvs_precompiled_source') - if source: - source = _FixPath(source) - # UsePrecompiledHeader=1 for if using precompiled headers. - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '1'}) - p.AddFileConfig(source, _ConfigFullName(config_name, config), - {}, tools=[tool]) - basename, extension = os.path.splitext(source) - if extension == '.c': - extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] - else: - extensions_excluded_from_precompile = ['.c'] - def DisableForSourceTree(source_tree): - for source in source_tree: - if isinstance(source, MSVSProject.Filter): - DisableForSourceTree(source.contents) - else: - basename, extension = os.path.splitext(source) - if extension in extensions_excluded_from_precompile: - for config_name, config in spec['configurations'].iteritems(): - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '0', - 'ForcedIncludeFiles': '$(NOINHERIT)'}) - p.AddFileConfig(_FixPath(source), - _ConfigFullName(config_name, config), - {}, tools=[tool]) - # Do nothing if there was no precompiled source. - if extensions_excluded_from_precompile: - DisableForSourceTree(sources) - - -def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): - # Add actions. - actions = spec.get('actions', []) - # Don't setup_env every time. When all the actions are run together in one - # batch file in VS, the PATH will grow too long. - # Membership in this set means that the cygwin environment has been set up, - # and does not need to be set up again. - have_setup_env = set() - for a in actions: - # Attach actions to the gyp file if nothing else is there. - inputs = a.get('inputs') or [relative_path_of_gyp_file] - attached_to = inputs[0] - need_setup_env = attached_to not in have_setup_env - cmd = _BuildCommandLineForRule(spec, a, has_input_path=False, - do_setup_env=need_setup_env) - have_setup_env.add(attached_to) - # Add the action. - _AddActionStep(actions_to_add, - inputs=inputs, - outputs=a.get('outputs', []), - description=a.get('message', a['action_name']), - command=cmd) - - -def _WriteMSVSUserFile(project_path, version, spec): - # Add run_as and test targets. - if 'run_as' in spec: - run_as = spec['run_as'] - action = run_as.get('action', []) - environment = run_as.get('environment', []) - working_directory = run_as.get('working_directory', '.') - elif int(spec.get('test', 0)): - action = ['$(TargetPath)', '--gtest_print_time'] - environment = [] - working_directory = '.' - else: - return # Nothing to add - # Write out the user file. - user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec['configurations'].iteritems(): - user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), - action, environment, working_directory) - user_file.WriteIfChanged() - - -def _AddCopies(actions_to_add, spec): - copies = _GetCopies(spec) - for inputs, outputs, cmd, description in copies: - _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs, - description=description, command=cmd) - - -def _GetCopies(spec): - copies = [] - # Add copies. - for cpy in spec.get('copies', []): - for src in cpy.get('files', []): - dst = os.path.join(cpy['destination'], os.path.basename(src)) - # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and - # outputs, so do the same for our generated command line. - if src.endswith('/'): - src_bare = src[:-1] - base_dir = posixpath.split(src_bare)[0] - outer_dir = posixpath.split(src_bare)[1] - cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % ( - _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir) - copies.append(([src], ['dummy_copies', dst], cmd, - 'Copying %s to %s' % (src, dst))) - else: - cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( - _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst)) - copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst))) - return copies - - -def _GetPathDict(root, path): - # |path| will eventually be empty (in the recursive calls) if it was initially - # relative; otherwise it will eventually end up as '\', 'D:\', etc. - if not path or path.endswith(os.sep): - return root - parent, folder = os.path.split(path) - parent_dict = _GetPathDict(root, parent) - if folder not in parent_dict: - parent_dict[folder] = dict() - return parent_dict[folder] - - -def _DictsToFolders(base_path, bucket, flat): - # Convert to folders recursively. - children = [] - for folder, contents in bucket.iteritems(): - if type(contents) == dict: - folder_children = _DictsToFolders(os.path.join(base_path, folder), - contents, flat) - if flat: - children += folder_children - else: - folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder), - name='(' + folder + ')', - entries=folder_children) - children.append(folder_children) - else: - children.append(contents) - return children - - -def _CollapseSingles(parent, node): - # Recursively explorer the tree of dicts looking for projects which are - # the sole item in a folder which has the same name as the project. Bring - # such projects up one level. - if (type(node) == dict and - len(node) == 1 and - node.keys()[0] == parent + '.vcproj'): - return node[node.keys()[0]] - if type(node) != dict: - return node - for child in node: - node[child] = _CollapseSingles(child, node[child]) - return node - - -def _GatherSolutionFolders(sln_projects, project_objects, flat): - root = {} - # Convert into a tree of dicts on path. - for p in sln_projects: - gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] - gyp_dir = os.path.dirname(gyp_file) - path_dict = _GetPathDict(root, gyp_dir) - path_dict[target + '.vcproj'] = project_objects[p] - # Walk down from the top until we hit a folder that has more than one entry. - # In practice, this strips the top-level "src/" dir from the hierarchy in - # the solution. - while len(root) == 1 and type(root[root.keys()[0]]) == dict: - root = root[root.keys()[0]] - # Collapse singles. - root = _CollapseSingles('', root) - # Merge buckets until everything is a root entry. - return _DictsToFolders('', root, flat) - - -def _GetPathOfProject(qualified_target, spec, options, msvs_version): - default_config = _GetDefaultConfiguration(spec) - proj_filename = default_config.get('msvs_existing_vcproj') - if not proj_filename: - proj_filename = (spec['target_name'] + options.suffix + - msvs_version.ProjectExtension()) - - build_file = gyp.common.BuildFile(qualified_target) - proj_path = os.path.join(os.path.dirname(build_file), proj_filename) - fix_prefix = None - if options.generator_output: - project_dir_path = os.path.dirname(os.path.abspath(proj_path)) - proj_path = os.path.join(options.generator_output, proj_path) - fix_prefix = gyp.common.RelativePath(project_dir_path, - os.path.dirname(proj_path)) - return proj_path, fix_prefix - - -def _GetPlatformOverridesOfProject(spec): - # Prepare a dict indicating which project configurations are used for which - # solution configurations for this target. - config_platform_overrides = {} - for config_name, c in spec['configurations'].iteritems(): - config_fullname = _ConfigFullName(config_name, c) - platform = c.get('msvs_target_platform', _ConfigPlatform(c)) - fixed_config_fullname = '%s|%s' % ( - _ConfigBaseName(config_name, _ConfigPlatform(c)), platform) - config_platform_overrides[config_fullname] = fixed_config_fullname - return config_platform_overrides - - -def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): - """Create a MSVSProject object for the targets found in target list. - - Arguments: - target_list: the list of targets to generate project objects for. - target_dicts: the dictionary of specifications. - options: global generator options. - msvs_version: the MSVSVersion object. - Returns: - A set of created projects, keyed by target. - """ - global fixpath_prefix - # Generate each project. - projects = {} - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise GypError( - 'Multiple toolsets not supported in msvs build (target %s)' % - qualified_target) - proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec, - options, msvs_version) - guid = _GetGuidOfProject(proj_path, spec) - overrides = _GetPlatformOverridesOfProject(spec) - build_file = gyp.common.BuildFile(qualified_target) - # Create object for this project. - obj = MSVSNew.MSVSProject( - proj_path, - name=spec['target_name'], - guid=guid, - spec=spec, - build_file=build_file, - config_platform_overrides=overrides, - fixpath_prefix=fixpath_prefix) - # Set project toolset if any (MS build only) - if msvs_version.UsesVcxproj(): - obj.set_msbuild_toolset( - _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version)) - projects[qualified_target] = obj - # Set all the dependencies - for project in projects.values(): - deps = project.spec.get('dependencies', []) - deps = [projects[d] for d in deps] - project.set_dependencies(deps) - return projects - - -def CalculateVariables(default_variables, params): - """Generated variables that require params to be known.""" - - generator_flags = params.get('generator_flags', {}) - - # Select project file format version (if unset, default to auto detecting). - msvs_version = MSVSVersion.SelectVisualStudioVersion( - generator_flags.get('msvs_version', 'auto')) - # Stash msvs_version for later (so we don't have to probe the system twice). - params['msvs_version'] = msvs_version - - # Set a variable so conditions can be based on msvs_version. - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or - os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - - -def PerformBuild(data, configurations, params): - options = params['options'] - msvs_version = params['msvs_version'] - devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') - - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - sln_path = build_file_root + options.suffix + '.sln' - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - - for config in configurations: - arguments = [devenv, sln_path, '/Build', config] - print 'Building [%s]: %s' % (config, arguments) - rtn = subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate .sln and .vcproj files. - - This is the entry point for this generator. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dictionary containing per .gyp data. - """ - global fixpath_prefix - - options = params['options'] - - # Get the project file format version back out of where we stashed it in - # GeneratorCalculatedVariables. - msvs_version = params['msvs_version'] - - generator_flags = params.get('generator_flags', {}) - - # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. - (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) - - # Optionally use the large PDB workaround for targets marked with - # 'msvs_large_pdb': 1. - (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables) - - # Prepare the set of configurations. - configs = set() - for qualified_target in target_list: - spec = target_dicts[qualified_target] - for config_name, config in spec['configurations'].iteritems(): - configs.add(_ConfigFullName(config_name, config)) - configs = list(configs) - - # Figure out all the projects that will be generated and their guids - project_objects = _CreateProjectObjects(target_list, target_dicts, options, - msvs_version) - - # Generate each project. - missing_sources = [] - for project in project_objects.values(): - fixpath_prefix = project.fixpath_prefix - missing_sources.extend(_GenerateProject(project, options, msvs_version, - generator_flags)) - fixpath_prefix = None - - for build_file in data: - # Validate build_file extension - if not build_file.endswith('.gyp'): - continue - sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln' - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - # Get projects in the solution, and their dependents. - sln_projects = gyp.common.BuildFileTargets(target_list, build_file) - sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) - # Create folder hierarchy. - root_entries = _GatherSolutionFolders( - sln_projects, project_objects, flat=msvs_version.FlatSolution()) - # Create solution. - sln = MSVSNew.MSVSSolution(sln_path, - entries=root_entries, - variants=configs, - websiteProperties=False, - version=msvs_version) - sln.Write() - - if missing_sources: - error_message = "Missing input files:\n" + \ - '\n'.join(set(missing_sources)) - if generator_flags.get('msvs_error_on_missing_sources', False): - raise GypError(error_message) - else: - print >> sys.stdout, "Warning: " + error_message - - -def _GenerateMSBuildFiltersFile(filters_path, source_files, - extension_to_rule_name): - """Generate the filters file. - - This file is used by Visual Studio to organize the presentation of source - files into folders. - - Arguments: - filters_path: The path of the file to be created. - source_files: The hierarchical structure of all the sources. - extension_to_rule_name: A dictionary mapping file extensions to rules. - """ - filter_group = [] - source_group = [] - _AppendFiltersForMSBuild('', source_files, extension_to_rule_name, - filter_group, source_group) - if filter_group: - content = ['Project', - {'ToolsVersion': '4.0', - 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' - }, - ['ItemGroup'] + filter_group, - ['ItemGroup'] + source_group - ] - easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) - elif os.path.exists(filters_path): - # We don't need this filter anymore. Delete the old filter file. - os.unlink(filters_path) - - -def _AppendFiltersForMSBuild(parent_filter_name, sources, - extension_to_rule_name, - filter_group, source_group): - """Creates the list of filters and sources to be added in the filter file. - - Args: - parent_filter_name: The name of the filter under which the sources are - found. - sources: The hierarchy of filters and sources to process. - extension_to_rule_name: A dictionary mapping file extensions to rules. - filter_group: The list to which filter entries will be appended. - source_group: The list to which source entries will be appeneded. - """ - for source in sources: - if isinstance(source, MSVSProject.Filter): - # We have a sub-filter. Create the name of that sub-filter. - if not parent_filter_name: - filter_name = source.name - else: - filter_name = '%s\\%s' % (parent_filter_name, source.name) - # Add the filter to the group. - filter_group.append( - ['Filter', {'Include': filter_name}, - ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]]) - # Recurse and add its dependents. - _AppendFiltersForMSBuild(filter_name, source.contents, - extension_to_rule_name, - filter_group, source_group) - else: - # It's a source. Create a source entry. - _, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name) - source_entry = [element, {'Include': source}] - # Specify the filter it is part of, if any. - if parent_filter_name: - source_entry.append(['Filter', parent_filter_name]) - source_group.append(source_entry) - - -def _MapFileToMsBuildSourceType(source, extension_to_rule_name): - """Returns the group and element type of the source file. - - Arguments: - source: The source file name. - extension_to_rule_name: A dictionary mapping file extensions to rules. - - Returns: - A pair of (group this file should be part of, the label of element) - """ - _, ext = os.path.splitext(source) - if ext in extension_to_rule_name: - group = 'rule' - element = extension_to_rule_name[ext] - elif ext in ['.cc', '.cpp', '.c', '.cxx']: - group = 'compile' - element = 'ClCompile' - elif ext in ['.h', '.hxx']: - group = 'include' - element = 'ClInclude' - elif ext == '.rc': - group = 'resource' - element = 'ResourceCompile' - elif ext == '.idl': - group = 'midl' - element = 'Midl' - else: - group = 'none' - element = 'None' - return (group, element) - - -def _GenerateRulesForMSBuild(output_dir, options, spec, - sources, excluded_sources, - props_files_of_rules, targets_files_of_rules, - actions_to_add, extension_to_rule_name): - # MSBuild rules are implemented using three files: an XML file, a .targets - # file and a .props file. - # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx - # for more details. - rules = spec.get('rules', []) - rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] - rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] - - msbuild_rules = [] - for rule in rules_native: - # Skip a rule with no action and no inputs. - if 'action' not in rule and not rule.get('rule_sources', []): - continue - msbuild_rule = MSBuildRule(rule, spec) - msbuild_rules.append(msbuild_rule) - extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name - if msbuild_rules: - base = spec['target_name'] + options.suffix - props_name = base + '.props' - targets_name = base + '.targets' - xml_name = base + '.xml' - - props_files_of_rules.add(props_name) - targets_files_of_rules.add(targets_name) - - props_path = os.path.join(output_dir, props_name) - targets_path = os.path.join(output_dir, targets_name) - xml_path = os.path.join(output_dir, xml_name) - - _GenerateMSBuildRulePropsFile(props_path, msbuild_rules) - _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules) - _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules) - - if rules_external: - _GenerateExternalRules(rules_external, output_dir, spec, - sources, options, actions_to_add) - _AdjustSourcesForRules(rules, sources, excluded_sources) - - -class MSBuildRule(object): - """Used to store information used to generate an MSBuild rule. - - Attributes: - rule_name: The rule name, sanitized to use in XML. - target_name: The name of the target. - after_targets: The name of the AfterTargets element. - before_targets: The name of the BeforeTargets element. - depends_on: The name of the DependsOn element. - compute_output: The name of the ComputeOutput element. - dirs_to_make: The name of the DirsToMake element. - inputs: The name of the _inputs element. - tlog: The name of the _tlog element. - extension: The extension this rule applies to. - description: The message displayed when this rule is invoked. - additional_dependencies: A string listing additional dependencies. - outputs: The outputs of this rule. - command: The command used to run the rule. - """ - - def __init__(self, rule, spec): - self.display_name = rule['rule_name'] - # Assure that the rule name is only characters and numbers - self.rule_name = re.sub(r'\W', '_', self.display_name) - # Create the various element names, following the example set by the - # Visual Studio 2008 to 2010 conversion. I don't know if VS2010 - # is sensitive to the exact names. - self.target_name = '_' + self.rule_name - self.after_targets = self.rule_name + 'AfterTargets' - self.before_targets = self.rule_name + 'BeforeTargets' - self.depends_on = self.rule_name + 'DependsOn' - self.compute_output = 'Compute%sOutput' % self.rule_name - self.dirs_to_make = self.rule_name + 'DirsToMake' - self.inputs = self.rule_name + '_inputs' - self.tlog = self.rule_name + '_tlog' - self.extension = rule['extension'] - if not self.extension.startswith('.'): - self.extension = '.' + self.extension - - self.description = MSVSSettings.ConvertVCMacrosToMSBuild( - rule.get('message', self.rule_name)) - old_additional_dependencies = _FixPaths(rule.get('inputs', [])) - self.additional_dependencies = ( - ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) - for i in old_additional_dependencies])) - old_outputs = _FixPaths(rule.get('outputs', [])) - self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) - for i in old_outputs]) - old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True, - do_setup_env=True) - self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) - - -def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules): - """Generate the .props file.""" - content = ['Project', - {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}] - for rule in msbuild_rules: - content.extend([ - ['PropertyGroup', - {'Condition': "'$(%s)' == '' and '$(%s)' == '' and " - "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets, - rule.after_targets) - }, - [rule.before_targets, 'Midl'], - [rule.after_targets, 'CustomBuild'], - ], - ['PropertyGroup', - [rule.depends_on, - {'Condition': "'$(ConfigurationType)' != 'Makefile'"}, - '_SelectedFiles;$(%s)' % rule.depends_on - ], - ], - ['ItemDefinitionGroup', - [rule.rule_name, - ['CommandLineTemplate', rule.command], - ['Outputs', rule.outputs], - ['ExecutionDescription', rule.description], - ['AdditionalDependencies', rule.additional_dependencies], - ], - ] - ]) - easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules): - """Generate the .targets file.""" - content = ['Project', - {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' - } - ] - item_group = [ - 'ItemGroup', - ['PropertyPageSchema', - {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'} - ] - ] - for rule in msbuild_rules: - item_group.append( - ['AvailableItemName', - {'Include': rule.rule_name}, - ['Targets', rule.target_name], - ]) - content.append(item_group) - - for rule in msbuild_rules: - content.append( - ['UsingTask', - {'TaskName': rule.rule_name, - 'TaskFactory': 'XamlTaskFactory', - 'AssemblyName': 'Microsoft.Build.Tasks.v4.0' - }, - ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'], - ]) - for rule in msbuild_rules: - rule_name = rule.rule_name - target_outputs = '%%(%s.Outputs)' % rule_name - target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);' - '$(MSBuildProjectFile)') % (rule_name, rule_name) - rule_inputs = '%%(%s.Identity)' % rule_name - extension_condition = ("'%(Extension)'=='.obj' or " - "'%(Extension)'=='.res' or " - "'%(Extension)'=='.rsc' or " - "'%(Extension)'=='.lib'") - remove_section = [ - 'ItemGroup', - {'Condition': "'@(SelectedFiles)' != ''"}, - [rule_name, - {'Remove': '@(%s)' % rule_name, - 'Condition': "'%(Identity)' != '@(SelectedFiles)'" - } - ] - ] - inputs_section = [ - 'ItemGroup', - [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}] - ] - logging_section = [ - 'ItemGroup', - [rule.tlog, - {'Include': '%%(%s.Outputs)' % rule_name, - 'Condition': ("'%%(%s.Outputs)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" % - (rule_name, rule_name)) - }, - ['Source', "@(%s, '|')" % rule_name], - ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs], - ], - ] - message_section = [ - 'Message', - {'Importance': 'High', - 'Text': '%%(%s.ExecutionDescription)' % rule_name - } - ] - write_tlog_section = [ - 'WriteLinesToFile', - {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - 'File': '$(IntDir)$(ProjectName).write.1.tlog', - 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog, - rule.tlog) - } - ] - read_tlog_section = [ - 'WriteLinesToFile', - {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - 'File': '$(IntDir)$(ProjectName).read.1.tlog', - 'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog) - } - ] - command_and_input_section = [ - rule_name, - {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule_name, rule_name), - 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name, - 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name, - 'Inputs': rule_inputs - } - ] - content.extend([ - ['Target', - {'Name': rule.target_name, - 'BeforeTargets': '$(%s)' % rule.before_targets, - 'AfterTargets': '$(%s)' % rule.after_targets, - 'Condition': "'@(%s)' != ''" % rule_name, - 'DependsOnTargets': '$(%s);%s' % (rule.depends_on, - rule.compute_output), - 'Outputs': target_outputs, - 'Inputs': target_inputs - }, - remove_section, - inputs_section, - logging_section, - message_section, - write_tlog_section, - read_tlog_section, - command_and_input_section, - ], - ['PropertyGroup', - ['ComputeLinkInputsTargets', - '$(ComputeLinkInputsTargets);', - '%s;' % rule.compute_output - ], - ['ComputeLibInputsTargets', - '$(ComputeLibInputsTargets);', - '%s;' % rule.compute_output - ], - ], - ['Target', - {'Name': rule.compute_output, - 'Condition': "'@(%s)' != ''" % rule_name - }, - ['ItemGroup', - [rule.dirs_to_make, - {'Condition': "'@(%s)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name), - 'Include': '%%(%s.Outputs)' % rule_name - } - ], - ['Link', - {'Include': '%%(%s.Identity)' % rule.dirs_to_make, - 'Condition': extension_condition - } - ], - ['Lib', - {'Include': '%%(%s.Identity)' % rule.dirs_to_make, - 'Condition': extension_condition - } - ], - ['ImpLib', - {'Include': '%%(%s.Identity)' % rule.dirs_to_make, - 'Condition': extension_condition - } - ], - ], - ['MakeDir', - {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" % - rule.dirs_to_make) - } - ] - ], - ]) - easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): - # Generate the .xml file - content = [ - 'ProjectSchemaDefinitions', - {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;' - 'assembly=Microsoft.Build.Framework'), - 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml', - 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib', - 'xmlns:transformCallback': - 'Microsoft.Cpp.Dev10.ConvertPropertyCallback' - } - ] - for rule in msbuild_rules: - content.extend([ - ['Rule', - {'Name': rule.rule_name, - 'PageTemplate': 'tool', - 'DisplayName': rule.display_name, - 'Order': '200' - }, - ['Rule.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'ItemType': rule.rule_name - } - ] - ], - ['Rule.Categories', - ['Category', - {'Name': 'General'}, - ['Category.DisplayName', - ['sys:String', 'General'], - ], - ], - ['Category', - {'Name': 'Command Line', - 'Subtype': 'CommandLine' - }, - ['Category.DisplayName', - ['sys:String', 'Command Line'], - ], - ], - ], - ['StringListProperty', - {'Name': 'Inputs', - 'Category': 'Command Line', - 'IsRequired': 'true', - 'Switch': ' ' - }, - ['StringListProperty.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'ItemType': rule.rule_name, - 'SourceType': 'Item' - } - ] - ], - ], - ['StringProperty', - {'Name': 'CommandLineTemplate', - 'DisplayName': 'Command Line', - 'Visible': 'False', - 'IncludeInCommandLine': 'False' - } - ], - ['DynamicEnumProperty', - {'Name': rule.before_targets, - 'Category': 'General', - 'EnumProvider': 'Targets', - 'IncludeInCommandLine': 'False' - }, - ['DynamicEnumProperty.DisplayName', - ['sys:String', 'Execute Before'], - ], - ['DynamicEnumProperty.Description', - ['sys:String', 'Specifies the targets for the build customization' - ' to run before.' - ], - ], - ['DynamicEnumProperty.ProviderSettings', - ['NameValuePair', - {'Name': 'Exclude', - 'Value': '^%s|^Compute' % rule.before_targets - } - ] - ], - ['DynamicEnumProperty.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'HasConfigurationCondition': 'true' - } - ] - ], - ], - ['DynamicEnumProperty', - {'Name': rule.after_targets, - 'Category': 'General', - 'EnumProvider': 'Targets', - 'IncludeInCommandLine': 'False' - }, - ['DynamicEnumProperty.DisplayName', - ['sys:String', 'Execute After'], - ], - ['DynamicEnumProperty.Description', - ['sys:String', ('Specifies the targets for the build customization' - ' to run after.') - ], - ], - ['DynamicEnumProperty.ProviderSettings', - ['NameValuePair', - {'Name': 'Exclude', - 'Value': '^%s|^Compute' % rule.after_targets - } - ] - ], - ['DynamicEnumProperty.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'ItemType': '', - 'HasConfigurationCondition': 'true' - } - ] - ], - ], - ['StringListProperty', - {'Name': 'Outputs', - 'DisplayName': 'Outputs', - 'Visible': 'False', - 'IncludeInCommandLine': 'False' - } - ], - ['StringProperty', - {'Name': 'ExecutionDescription', - 'DisplayName': 'Execution Description', - 'Visible': 'False', - 'IncludeInCommandLine': 'False' - } - ], - ['StringListProperty', - {'Name': 'AdditionalDependencies', - 'DisplayName': 'Additional Dependencies', - 'IncludeInCommandLine': 'False', - 'Visible': 'false' - } - ], - ['StringProperty', - {'Subtype': 'AdditionalOptions', - 'Name': 'AdditionalOptions', - 'Category': 'Command Line' - }, - ['StringProperty.DisplayName', - ['sys:String', 'Additional Options'], - ], - ['StringProperty.Description', - ['sys:String', 'Additional Options'], - ], - ], - ], - ['ItemType', - {'Name': rule.rule_name, - 'DisplayName': rule.display_name - } - ], - ['FileExtension', - {'Name': '*' + rule.extension, - 'ContentType': rule.rule_name - } - ], - ['ContentType', - {'Name': rule.rule_name, - 'DisplayName': '', - 'ItemType': rule.rule_name - } - ] - ]) - easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True) - - -def _GetConfigurationAndPlatform(name, settings): - configuration = name.rsplit('_', 1)[0] - platform = settings.get('msvs_configuration_platform', 'Win32') - return (configuration, platform) - - -def _GetConfigurationCondition(name, settings): - return (r"'$(Configuration)|$(Platform)'=='%s|%s'" % - _GetConfigurationAndPlatform(name, settings)) - - -def _GetMSBuildProjectConfigurations(configurations): - group = ['ItemGroup', {'Label': 'ProjectConfigurations'}] - for (name, settings) in sorted(configurations.iteritems()): - configuration, platform = _GetConfigurationAndPlatform(name, settings) - designation = '%s|%s' % (configuration, platform) - group.append( - ['ProjectConfiguration', {'Include': designation}, - ['Configuration', configuration], - ['Platform', platform]]) - return [group] - - -def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): - namespace = os.path.splitext(gyp_file_name)[0] - return [ - ['PropertyGroup', {'Label': 'Globals'}, - ['ProjectGuid', guid], - ['Keyword', 'Win32Proj'], - ['RootNamespace', namespace], - ] - ] - - -def _GetMSBuildConfigurationDetails(spec, build_file): - properties = {} - for name, settings in spec['configurations'].iteritems(): - msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) - condition = _GetConfigurationCondition(name, settings) - character_set = msbuild_attributes.get('CharacterSet') - _AddConditionalProperty(properties, condition, 'ConfigurationType', - msbuild_attributes['ConfigurationType']) - if character_set: - _AddConditionalProperty(properties, condition, 'CharacterSet', - character_set) - return _GetMSBuildPropertyGroup(spec, 'Configuration', properties) - - -def _GetMSBuildLocalProperties(msbuild_toolset): - # Currently the only local property we support is PlatformToolset - properties = {} - if msbuild_toolset: - properties = [ - ['PropertyGroup', {'Label': 'Locals'}, - ['PlatformToolset', msbuild_toolset], - ] - ] - return properties - - -def _GetMSBuildPropertySheets(configurations): - user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' - additional_props = {} - props_specified = False - for name, settings in sorted(configurations.iteritems()): - configuration = _GetConfigurationCondition(name, settings) - if settings.has_key('msbuild_props'): - additional_props[configuration] = _FixPaths(settings['msbuild_props']) - props_specified = True - else: - additional_props[configuration] = '' - - if not props_specified: - return [ - ['ImportGroup', - {'Label': 'PropertySheets'}, - ['Import', - {'Project': user_props, - 'Condition': "exists('%s')" % user_props, - 'Label': 'LocalAppDataPlatform' - } - ] - ] - ] - else: - sheets = [] - for condition, props in additional_props.iteritems(): - import_group = [ - 'ImportGroup', - {'Label': 'PropertySheets', - 'Condition': condition - }, - ['Import', - {'Project': user_props, - 'Condition': "exists('%s')" % user_props, - 'Label': 'LocalAppDataPlatform' - } - ] - ] - for props_file in props: - import_group.append(['Import', {'Project':props_file}]) - sheets.append(import_group) - return sheets - -def _ConvertMSVSBuildAttributes(spec, config, build_file): - config_type = _GetMSVSConfigurationType(spec, build_file) - msvs_attributes = _GetMSVSAttributes(spec, config, config_type) - msbuild_attributes = {} - for a in msvs_attributes: - if a in ['IntermediateDirectory', 'OutputDirectory']: - directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a]) - if not directory.endswith('\\'): - directory += '\\' - msbuild_attributes[a] = directory - elif a == 'CharacterSet': - msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a]) - elif a == 'ConfigurationType': - msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) - else: - print 'Warning: Do not know how to convert MSVS attribute ' + a - return msbuild_attributes - - -def _ConvertMSVSCharacterSet(char_set): - if char_set.isdigit(): - char_set = { - '0': 'MultiByte', - '1': 'Unicode', - '2': 'MultiByte', - }[char_set] - return char_set - - -def _ConvertMSVSConfigurationType(config_type): - if config_type.isdigit(): - config_type = { - '1': 'Application', - '2': 'DynamicLibrary', - '4': 'StaticLibrary', - '10': 'Utility' - }[config_type] - return config_type - - -def _GetMSBuildAttributes(spec, config, build_file): - if 'msbuild_configuration_attributes' not in config: - msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file) - - else: - config_type = _GetMSVSConfigurationType(spec, build_file) - config_type = _ConvertMSVSConfigurationType(config_type) - msbuild_attributes = config.get('msbuild_configuration_attributes', {}) - msbuild_attributes.setdefault('ConfigurationType', config_type) - output_dir = msbuild_attributes.get('OutputDirectory', - '$(SolutionDir)$(Configuration)') - msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\' - if 'IntermediateDirectory' not in msbuild_attributes: - intermediate = _FixPath('$(Configuration)') + '\\' - msbuild_attributes['IntermediateDirectory'] = intermediate - if 'CharacterSet' in msbuild_attributes: - msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet( - msbuild_attributes['CharacterSet']) - if 'TargetName' not in msbuild_attributes: - prefix = spec.get('product_prefix', '') - product_name = spec.get('product_name', '$(ProjectName)') - target_name = prefix + product_name - msbuild_attributes['TargetName'] = target_name - if 'TargetExt' not in msbuild_attributes and 'product_extension' in spec: - ext = spec.get('product_extension') - msbuild_attributes['TargetExt'] = '.' + ext - - # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile' - # (depending on the tool used) to avoid MSB8012 warning. - msbuild_tool_map = { - 'executable': 'Link', - 'shared_library': 'Link', - 'loadable_module': 'Link', - 'static_library': 'Lib', - } - msbuild_tool = msbuild_tool_map.get(spec['type']) - if msbuild_tool: - msbuild_settings = config['finalized_msbuild_settings'] - out_file = msbuild_settings[msbuild_tool].get('OutputFile') - if out_file: - msbuild_attributes['TargetPath'] = _FixPath(out_file) - - return msbuild_attributes - - -def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): - # TODO(jeanluc) We could optimize out the following and do it only if - # there are actions. - # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'. - new_paths = [] - cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0] - if cygwin_dirs: - cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs) - new_paths.append(cyg_path) - # TODO(jeanluc) Change the convention to have both a cygwin_dir and a - # python_dir. - python_path = cyg_path.replace('cygwin\\bin', 'python_26') - new_paths.append(python_path) - if new_paths: - new_paths = '$(ExecutablePath);' + ';'.join(new_paths) - - properties = {} - for (name, configuration) in sorted(configurations.iteritems()): - condition = _GetConfigurationCondition(name, configuration) - attributes = _GetMSBuildAttributes(spec, configuration, build_file) - msbuild_settings = configuration['finalized_msbuild_settings'] - _AddConditionalProperty(properties, condition, 'IntDir', - attributes['IntermediateDirectory']) - _AddConditionalProperty(properties, condition, 'OutDir', - attributes['OutputDirectory']) - _AddConditionalProperty(properties, condition, 'TargetName', - attributes['TargetName']) - if 'TargetExt' in attributes: - _AddConditionalProperty(properties, condition, 'TargetExt', - attributes['TargetExt']) - - if attributes.get('TargetPath'): - _AddConditionalProperty(properties, condition, 'TargetPath', - attributes['TargetPath']) - - if new_paths: - _AddConditionalProperty(properties, condition, 'ExecutablePath', - new_paths) - tool_settings = msbuild_settings.get('', {}) - for name, value in sorted(tool_settings.iteritems()): - formatted_value = _GetValueFormattedForMSBuild('', name, value) - _AddConditionalProperty(properties, condition, name, formatted_value) - return _GetMSBuildPropertyGroup(spec, None, properties) - - -def _AddConditionalProperty(properties, condition, name, value): - """Adds a property / conditional value pair to a dictionary. - - Arguments: - properties: The dictionary to be modified. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - condition: The condition under which the named property has the value. - name: The name of the property. - value: The value of the property. - """ - if name not in properties: - properties[name] = {} - values = properties[name] - if value not in values: - values[value] = [] - conditions = values[value] - conditions.append(condition) - - -# Regex for msvs variable references ( i.e. $(FOO) ). -MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)') - - -def _GetMSBuildPropertyGroup(spec, label, properties): - """Returns a PropertyGroup definition for the specified properties. - - Arguments: - spec: The target project dict. - label: An optional label for the PropertyGroup. - properties: The dictionary to be converted. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - """ - group = ['PropertyGroup'] - if label: - group.append({'Label': label}) - num_configurations = len(spec['configurations']) - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - edges = set() - for value in sorted(properties[node].keys()): - # Add to edges all $(...) references to variables. - # - # Variable references that refer to names not in properties are excluded - # These can exist for instance to refer built in definitions like - # $(SolutionDir). - # - # Self references are ignored. Self reference is used in a few places to - # append to the default value. I.e. PATH=$(PATH);other_path - edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value) - if v in properties and v != node])) - return edges - properties_ordered = gyp.common.TopologicallySorted( - properties.keys(), GetEdges) - # Walk properties in the reverse of a topological sort on - # user_of_variable -> used_variable as this ensures variables are - # defined before they are used. - # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - for name in reversed(properties_ordered): - values = properties[name] - for value, conditions in sorted(values.iteritems()): - if len(conditions) == num_configurations: - # If the value is the same all configurations, - # just add one unconditional entry. - group.append([name, value]) - else: - for condition in conditions: - group.append([name, {'Condition': condition}, value]) - return [group] - - -def _GetMSBuildToolSettingsSections(spec, configurations): - groups = [] - for (name, configuration) in sorted(configurations.iteritems()): - msbuild_settings = configuration['finalized_msbuild_settings'] - group = ['ItemDefinitionGroup', - {'Condition': _GetConfigurationCondition(name, configuration)} - ] - for tool_name, tool_settings in sorted(msbuild_settings.iteritems()): - # Skip the tool named '' which is a holder of global settings handled - # by _GetMSBuildConfigurationGlobalProperties. - if tool_name: - if tool_settings: - tool = [tool_name] - for name, value in sorted(tool_settings.iteritems()): - formatted_value = _GetValueFormattedForMSBuild(tool_name, name, - value) - tool.append([name, formatted_value]) - group.append(tool) - groups.append(group) - return groups - - -def _FinalizeMSBuildSettings(spec, configuration): - if 'msbuild_settings' in configuration: - converted = False - msbuild_settings = configuration['msbuild_settings'] - MSVSSettings.ValidateMSBuildSettings(msbuild_settings) - else: - converted = True - msvs_settings = configuration.get('msvs_settings', {}) - msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) - include_dirs, resource_include_dirs = _GetIncludeDirs(configuration) - libraries = _GetLibraries(spec) - out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) - defines = _GetDefines(configuration) - if converted: - # Visual Studio 2010 has TR1 - defines = [d for d in defines if d != '_HAS_TR1=0'] - # Warn of ignored settings - ignored_settings = ['msvs_prebuild', 'msvs_postbuild', 'msvs_tool_files'] - for ignored_setting in ignored_settings: - value = configuration.get(ignored_setting) - if value: - print ('Warning: The automatic conversion to MSBuild does not handle ' - '%s. Ignoring setting of %s' % (ignored_setting, str(value))) - - defines = [_EscapeCppDefineForMSBuild(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(configuration) - # TODO(jeanluc) Validate & warn that we don't translate - # prebuild = configuration.get('msvs_prebuild') - # postbuild = configuration.get('msvs_postbuild') - def_file = _GetModuleDefinition(spec) - precompiled_header = configuration.get('msvs_precompiled_header') - - # Add the information to the appropriate tool - # TODO(jeanluc) We could optimize and generate these settings only if - # the corresponding files are found, e.g. don't generate ResourceCompile - # if you don't have any resources. - _ToolAppend(msbuild_settings, 'ClCompile', - 'AdditionalIncludeDirectories', include_dirs) - _ToolAppend(msbuild_settings, 'ResourceCompile', - 'AdditionalIncludeDirectories', resource_include_dirs) - # Add in libraries, note that even for empty libraries, we want this - # set, to prevent inheriting default libraries from the enviroment. - _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies', - libraries) - if out_file: - _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file, - only_if_unset=True) - # Add defines. - _ToolAppend(msbuild_settings, 'ClCompile', - 'PreprocessorDefinitions', defines) - _ToolAppend(msbuild_settings, 'ResourceCompile', - 'PreprocessorDefinitions', defines) - # Add disabled warnings. - _ToolAppend(msbuild_settings, 'ClCompile', - 'DisableSpecificWarnings', disabled_warnings) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use') - _ToolAppend(msbuild_settings, 'ClCompile', - 'PrecompiledHeaderFile', precompiled_header) - _ToolAppend(msbuild_settings, 'ClCompile', - 'ForcedIncludeFiles', precompiled_header) - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec['type'] == 'loadable_module': - _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true') - # Set the module definition file if any. - if def_file: - _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file) - configuration['finalized_msbuild_settings'] = msbuild_settings - - -def _GetValueFormattedForMSBuild(tool_name, name, value): - if type(value) == list: - # For some settings, VS2010 does not automatically extends the settings - # TODO(jeanluc) Is this what we want? - if name in ['AdditionalIncludeDirectories', - 'AdditionalLibraryDirectories', - 'AdditionalOptions', - 'DelayLoadDLLs', - 'DisableSpecificWarnings', - 'PreprocessorDefinitions']: - value.append('%%(%s)' % name) - # For most tools, entries in a list should be separated with ';' but some - # settings use a space. Check for those first. - exceptions = { - 'ClCompile': ['AdditionalOptions'], - 'Link': ['AdditionalOptions'], - 'Lib': ['AdditionalOptions']} - if tool_name in exceptions and name in exceptions[tool_name]: - char = ' ' - else: - char = ';' - formatted_value = char.join( - [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value]) - else: - formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value) - return formatted_value - - -def _VerifySourcesExist(sources, root_dir): - """Verifies that all source files exist on disk. - - Checks that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation but no otherwise - visible errors. - - Arguments: - sources: A recursive list of Filter/file names. - root_dir: The root directory for the relative path names. - Returns: - A list of source files that cannot be found on disk. - """ - missing_sources = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - missing_sources.extend(_VerifySourcesExist(source.contents, root_dir)) - else: - if '$' not in source: - full_path = os.path.join(root_dir, source) - if not os.path.exists(full_path): - missing_sources.append(full_path) - return missing_sources - - -def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name, - actions_spec, sources_handled_by_action, list_excluded): - groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule'] - grouped_sources = {} - for g in groups: - grouped_sources[g] = [] - - _AddSources2(spec, sources, exclusions, grouped_sources, - extension_to_rule_name, sources_handled_by_action, list_excluded) - sources = [] - for g in groups: - if grouped_sources[g]: - sources.append(['ItemGroup'] + grouped_sources[g]) - if actions_spec: - sources.append(['ItemGroup'] + actions_spec) - return sources - - -def _AddSources2(spec, sources, exclusions, grouped_sources, - extension_to_rule_name, sources_handled_by_action, - list_excluded): - extensions_excluded_from_precompile = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - _AddSources2(spec, source.contents, exclusions, grouped_sources, - extension_to_rule_name, sources_handled_by_action, - list_excluded) - else: - if not source in sources_handled_by_action: - detail = [] - excluded_configurations = exclusions.get(source, []) - if len(excluded_configurations) == len(spec['configurations']): - detail.append(['ExcludedFromBuild', 'true']) - else: - for config_name, configuration in sorted(excluded_configurations): - condition = _GetConfigurationCondition(config_name, configuration) - detail.append(['ExcludedFromBuild', - {'Condition': condition}, - 'true']) - # Add precompile if needed - for config_name, configuration in spec['configurations'].iteritems(): - precompiled_source = configuration.get('msvs_precompiled_source', '') - if precompiled_source != '': - precompiled_source = _FixPath(precompiled_source) - if not extensions_excluded_from_precompile: - # If the precompiled header is generated by a C source, we must - # not try to use it for C++ sources, and vice versa. - basename, extension = os.path.splitext(precompiled_source) - if extension == '.c': - extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] - else: - extensions_excluded_from_precompile = ['.c'] - - if precompiled_source == source: - condition = _GetConfigurationCondition(config_name, configuration) - detail.append(['PrecompiledHeader', - {'Condition': condition}, - 'Create' - ]) - else: - # Turn off precompiled header usage for source files of a - # different type than the file that generated the - # precompiled header. - for extension in extensions_excluded_from_precompile: - if source.endswith(extension): - detail.append(['PrecompiledHeader', '']) - detail.append(['ForcedIncludeFiles', '']) - - group, element = _MapFileToMsBuildSourceType(source, - extension_to_rule_name) - grouped_sources[group].append([element, {'Include': source}] + detail) - - -def _GetMSBuildProjectReferences(project): - references = [] - if project.dependencies: - group = ['ItemGroup'] - for dependency in project.dependencies: - guid = dependency.guid - project_dir = os.path.split(project.path)[0] - relative_path = gyp.common.RelativePath(dependency.path, project_dir) - project_ref = ['ProjectReference', - {'Include': relative_path}, - ['Project', guid], - ['ReferenceOutputAssembly', 'false'] - ] - for config in dependency.spec.get('configurations', {}).itervalues(): - # If it's disabled in any config, turn it off in the reference. - if config.get('msvs_2010_disable_uldi_when_referenced', 0): - project_ref.append(['UseLibraryDependencyInputs', 'false']) - break - group.append(project_ref) - references.append(group) - return references - - -def _GenerateMSBuildProject(project, options, version, generator_flags): - spec = project.spec - configurations = spec['configurations'] - project_dir, project_file_name = os.path.split(project.path) - msbuildproj_dir = os.path.dirname(project.path) - if msbuildproj_dir and not os.path.exists(msbuildproj_dir): - os.makedirs(msbuildproj_dir) - # Prepare list of sources and excluded sources. - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, - gyp_file) - # Add rules. - actions_to_add = {} - props_files_of_rules = set() - targets_files_of_rules = set() - extension_to_rule_name = {} - list_excluded = generator_flags.get('msvs_list_excluded_files', True) - _GenerateRulesForMSBuild(project_dir, options, spec, - sources, excluded_sources, - props_files_of_rules, targets_files_of_rules, - actions_to_add, extension_to_rule_name) - sources, excluded_sources, excluded_idl = ( - _AdjustSourcesAndConvertToFilterHierarchy(spec, options, - project_dir, sources, - excluded_sources, - list_excluded)) - _AddActions(actions_to_add, spec, project.build_file) - _AddCopies(actions_to_add, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded( - excluded_sources, actions_to_add) - - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( - spec, actions_to_add) - - _GenerateMSBuildFiltersFile(project.path + '.filters', sources, - extension_to_rule_name) - missing_sources = _VerifySourcesExist(sources, project_dir) - - for configuration in configurations.itervalues(): - _FinalizeMSBuildSettings(spec, configuration) - - # Add attributes to root element - - import_default_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]] - import_cpp_props_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]] - import_cpp_targets_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]] - macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]] - - content = [ - 'Project', - {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003', - 'ToolsVersion': version.ProjectVersion(), - 'DefaultTargets': 'Build' - }] - - content += _GetMSBuildProjectConfigurations(configurations) - content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name) - content += import_default_section - content += _GetMSBuildConfigurationDetails(spec, project.build_file) - content += _GetMSBuildLocalProperties(project.msbuild_toolset) - content += import_cpp_props_section - content += _GetMSBuildExtensions(props_files_of_rules) - content += _GetMSBuildPropertySheets(configurations) - content += macro_section - content += _GetMSBuildConfigurationGlobalProperties(spec, configurations, - project.build_file) - content += _GetMSBuildToolSettingsSections(spec, configurations) - content += _GetMSBuildSources( - spec, sources, exclusions, extension_to_rule_name, actions_spec, - sources_handled_by_action, list_excluded) - content += _GetMSBuildProjectReferences(project) - content += import_cpp_targets_section - content += _GetMSBuildExtensionTargets(targets_files_of_rules) - - # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS: - # has_run_as = _WriteMSVSUserFile(project.path, version, spec) - - easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True) - - return missing_sources - - -def _GetMSBuildExtensions(props_files_of_rules): - extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}] - for props_file in props_files_of_rules: - extensions.append(['Import', {'Project': props_file}]) - return [extensions] - - -def _GetMSBuildExtensionTargets(targets_files_of_rules): - targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}] - for targets_file in sorted(targets_files_of_rules): - targets_node.append(['Import', {'Project': targets_file}]) - return [targets_node] - - -def _GenerateActionsForMSBuild(spec, actions_to_add): - """Add actions accumulated into an actions_to_add, merging as needed. - - Arguments: - spec: the target project dict - actions_to_add: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - - Returns: - A pair of (action specification, the sources handled by this action). - """ - sources_handled_by_action = set() - actions_spec = [] - for primary_input, actions in actions_to_add.iteritems(): - inputs = set() - outputs = set() - descriptions = [] - commands = [] - for action in actions: - inputs.update(set(action['inputs'])) - outputs.update(set(action['outputs'])) - descriptions.append(action['description']) - cmd = action['command'] - # For most actions, add 'call' so that actions that invoke batch files - # return and continue executing. msbuild_use_call provides a way to - # disable this but I have not seen any adverse effect from doing that - # for everything. - if action.get('msbuild_use_call', True): - cmd = 'call ' + cmd - commands.append(cmd) - # Add the custom build action for one input file. - description = ', and also '.join(descriptions) - - # We can't join the commands simply with && because the command line will - # get too long. See also _AddActions: cygwin's setup_env mustn't be called - # for every invocation or the command that sets the PATH will grow too - # long. - command = ( - '\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands)) - _AddMSBuildAction(spec, - primary_input, - inputs, - outputs, - command, - description, - sources_handled_by_action, - actions_spec) - return actions_spec, sources_handled_by_action - - -def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description, - sources_handled_by_action, actions_spec): - command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) - primary_input = _FixPath(primary_input) - inputs_array = _FixPaths(inputs) - outputs_array = _FixPaths(outputs) - additional_inputs = ';'.join([i for i in inputs_array - if i != primary_input]) - outputs = ';'.join(outputs_array) - sources_handled_by_action.add(primary_input) - action_spec = ['CustomBuild', {'Include': primary_input}] - action_spec.extend( - # TODO(jeanluc) 'Document' for all or just if as_sources? - [['FileType', 'Document'], - ['Command', command], - ['Message', description], - ['Outputs', outputs] - ]) - if additional_inputs: - action_spec.append(['AdditionalInputs', additional_inputs]) - actions_spec.append(action_spec) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the msvs.py file. """ - -import gyp.generator.msvs as msvs -import unittest -import StringIO - - -class TestSequenceFunctions(unittest.TestCase): - - def setUp(self): - self.stderr = StringIO.StringIO() - - def test_GetLibraries(self): - self.assertEqual( - msvs._GetLibraries({}), - []) - self.assertEqual( - msvs._GetLibraries({'libraries': []}), - []) - self.assertEqual( - msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}), - ['a.lib']) - self.assertEqual( - msvs._GetLibraries({'libraries': ['-la']}), - ['a.lib']) - self.assertEqual( - msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib', - '-lb.lib', 'd.lib', 'a.lib']}), - ['c.lib', 'b.lib', 'd.lib', 'a.lib']) - -if __name__ == '__main__': - unittest.main() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py 2013-07-02 18:05:49.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1809 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import hashlib -import multiprocessing -import os.path -import re -import signal -import subprocess -import sys -import gyp -import gyp.common -import gyp.msvs_emulation -import gyp.MSVSUtil as MSVSUtil -import gyp.xcode_emulation - -from gyp.common import GetEnvironFallback -import gyp.ninja_syntax as ninja_syntax - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_PREFIX': 'lib', - - # Gyp expects the following variables to be expandable by the build - # system to the appropriate locations. Ninja prefers paths to be - # known at gyp time. To resolve this, introduce special - # variables starting with $! and $| (which begin with a $ so gyp knows it - # should be treated specially, but is otherwise an invalid - # ninja/shell variable) that are passed to gyp here but expanded - # before writing out into the target .ninja files; see - # ExpandSpecial. - # $! is used for variables that represent a path and that can only appear at - # the start of a string, while $| is used for variables that can appear - # anywhere in a string. - 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', - 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', - 'PRODUCT_DIR': '$!PRODUCT_DIR', - 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME', - - # Special variables that may be used by gyp 'rule' targets. - # We generate definitions for these variables on the fly when processing a - # rule. - 'RULE_INPUT_ROOT': '${root}', - 'RULE_INPUT_DIRNAME': '${dirname}', - 'RULE_INPUT_PATH': '${source}', - 'RULE_INPUT_EXT': '${ext}', - 'RULE_INPUT_NAME': '${name}', -} - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - -# TODO: figure out how to not build extra host objects in the non-cross-compile -# case when this is enabled, and enable unconditionally. -generator_supports_multiple_toolsets = ( - os.environ.get('GYP_CROSSCOMPILE') or - os.environ.get('AR_host') or - os.environ.get('CC_host') or - os.environ.get('CXX_host') or - os.environ.get('AR_target') or - os.environ.get('CC_target') or - os.environ.get('CXX_target')) - - -def StripPrefix(arg, prefix): - if arg.startswith(prefix): - return arg[len(prefix):] - return arg - - -def QuoteShellArgument(arg, flavor): - """Quote a string such that it will be interpreted as a single argument - by the shell.""" - # Rather than attempting to enumerate the bad shell characters, just - # whitelist common OK ones and quote anything else. - if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): - return arg # No quoting necessary. - if flavor == 'win': - return gyp.msvs_emulation.QuoteForRspFile(arg) - return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" - - -def Define(d, flavor): - """Takes a preprocessor define and returns a -D parameter that's ninja- and - shell-escaped.""" - if flavor == 'win': - # cl.exe replaces literal # characters with = in preprocesor definitions for - # some reason. Octal-encode to work around that. - d = d.replace('#', '\\%03o' % ord('#')) - return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) - - -class Target: - """Target represents the paths used within a single gyp target. - - Conceptually, building a single target A is a series of steps: - - 1) actions/rules/copies generates source/resources/etc. - 2) compiles generates .o files - 3) link generates a binary (library/executable) - 4) bundle merges the above in a mac bundle - - (Any of these steps can be optional.) - - From a build ordering perspective, a dependent target B could just - depend on the last output of this series of steps. - - But some dependent commands sometimes need to reach inside the box. - For example, when linking B it needs to get the path to the static - library generated by A. - - This object stores those paths. To keep things simple, member - variables only store concrete paths to single files, while methods - compute derived values like "the last output of the target". - """ - def __init__(self, type): - # Gyp type ("static_library", etc.) of this target. - self.type = type - # File representing whether any input dependencies necessary for - # dependent actions have completed. - self.preaction_stamp = None - # File representing whether any input dependencies necessary for - # dependent compiles have completed. - self.precompile_stamp = None - # File representing the completion of actions/rules/copies, if any. - self.actions_stamp = None - # Path to the output of the link step, if any. - self.binary = None - # Path to the file representing the completion of building the bundle, - # if any. - self.bundle = None - # On Windows, incremental linking requires linking against all the .objs - # that compose a .lib (rather than the .lib itself). That list is stored - # here. - self.component_objs = None - # Windows only. The import .lib is the output of a build step, but - # because dependents only link against the lib (not both the lib and the - # dll) we keep track of the import library here. - self.import_lib = None - - def Linkable(self): - """Return true if this is a target that can be linked against.""" - return self.type in ('static_library', 'shared_library') - - def UsesToc(self, flavor): - """Return true if the target should produce a restat rule based on a TOC - file.""" - # For bundles, the .TOC should be produced for the binary, not for - # FinalOutput(). But the naive approach would put the TOC file into the - # bundle, so don't do this for bundles for now. - if flavor == 'win' or self.bundle: - return False - return self.type in ('shared_library', 'loadable_module') - - def PreActionInput(self, flavor): - """Return the path, if any, that should be used as a dependency of - any dependent action step.""" - if self.UsesToc(flavor): - return self.FinalOutput() + '.TOC' - return self.FinalOutput() or self.preaction_stamp - - def PreCompileInput(self): - """Return the path, if any, that should be used as a dependency of - any dependent compile step.""" - return self.actions_stamp or self.precompile_stamp - - def FinalOutput(self): - """Return the last output of the target, which depends on all prior - steps.""" - return self.bundle or self.binary or self.actions_stamp - - -# A small discourse on paths as used within the Ninja build: -# All files we produce (both at gyp and at build time) appear in the -# build directory (e.g. out/Debug). -# -# Paths within a given .gyp file are always relative to the directory -# containing the .gyp file. Call these "gyp paths". This includes -# sources as well as the starting directory a given gyp rule/action -# expects to be run from. We call the path from the source root to -# the gyp file the "base directory" within the per-.gyp-file -# NinjaWriter code. -# -# All paths as written into the .ninja files are relative to the build -# directory. Call these paths "ninja paths". -# -# We translate between these two notions of paths with two helper -# functions: -# -# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) -# into the equivalent ninja path. -# -# - GypPathToUniqueOutput translates a gyp path into a ninja path to write -# an output file; the result can be namespaced such that it is unique -# to the input file name as well as the output target name. - -class NinjaWriter: - def __init__(self, qualified_target, target_outputs, base_dir, build_dir, - output_file, flavor, toplevel_dir=None): - """ - base_dir: path from source root to directory containing this gyp file, - by gyp semantics, all input paths are relative to this - build_dir: path from source root to build output - toplevel_dir: path to the toplevel directory - """ - - self.qualified_target = qualified_target - self.target_outputs = target_outputs - self.base_dir = base_dir - self.build_dir = build_dir - self.ninja = ninja_syntax.Writer(output_file) - self.flavor = flavor - self.abs_build_dir = None - if toplevel_dir is not None: - self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, - build_dir)) - self.obj_ext = '.obj' if flavor == 'win' else '.o' - if flavor == 'win': - # See docstring of msvs_emulation.GenerateEnvironmentFiles(). - self.win_env = {} - for arch in ('x86', 'x64'): - self.win_env[arch] = 'environment.' + arch - - # Relative path from build output dir to base dir. - build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) - self.build_to_base = os.path.join(build_to_top, base_dir) - # Relative path from base dir to build dir. - base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) - self.base_to_build = os.path.join(base_to_top, build_dir) - - def ExpandSpecial(self, path, product_dir=None): - """Expand specials like $!PRODUCT_DIR in |path|. - - If |product_dir| is None, assumes the cwd is already the product - dir. Otherwise, |product_dir| is the relative path to the product - dir. - """ - - PRODUCT_DIR = '$!PRODUCT_DIR' - if PRODUCT_DIR in path: - if product_dir: - path = path.replace(PRODUCT_DIR, product_dir) - else: - path = path.replace(PRODUCT_DIR + '/', '') - path = path.replace(PRODUCT_DIR + '\\', '') - path = path.replace(PRODUCT_DIR, '.') - - INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' - if INTERMEDIATE_DIR in path: - int_dir = self.GypPathToUniqueOutput('gen') - # GypPathToUniqueOutput generates a path relative to the product dir, - # so insert product_dir in front if it is provided. - path = path.replace(INTERMEDIATE_DIR, - os.path.join(product_dir or '', int_dir)) - - CONFIGURATION_NAME = '$|CONFIGURATION_NAME' - path = path.replace(CONFIGURATION_NAME, self.config_name) - - return path - - def ExpandRuleVariables(self, path, root, dirname, source, ext, name): - if self.flavor == 'win': - path = self.msvs_settings.ConvertVSMacros( - path, config=self.config_name) - path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) - path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], - dirname) - path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) - path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) - path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) - return path - - def GypPathToNinja(self, path, env=None): - """Translate a gyp path to a ninja path, optionally expanding environment - variable references in |path| with |env|. - - See the above discourse on path conversions.""" - if env: - if self.flavor == 'mac': - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - elif self.flavor == 'win': - path = gyp.msvs_emulation.ExpandMacros(path, env) - if path.startswith('$!'): - expanded = self.ExpandSpecial(path) - if self.flavor == 'win': - expanded = os.path.normpath(expanded) - return expanded - if '$|' in path: - path = self.ExpandSpecial(path) - assert '$' not in path, path - return os.path.normpath(os.path.join(self.build_to_base, path)) - - def GypPathToUniqueOutput(self, path, qualified=True): - """Translate a gyp path to a ninja path for writing output. - - If qualified is True, qualify the resulting filename with the name - of the target. This is necessary when e.g. compiling the same - path twice for two separate output targets. - - See the above discourse on path conversions.""" - - path = self.ExpandSpecial(path) - assert not path.startswith('$'), path - - # Translate the path following this scheme: - # Input: foo/bar.gyp, target targ, references baz/out.o - # Output: obj/foo/baz/targ.out.o (if qualified) - # obj/foo/baz/out.o (otherwise) - # (and obj.host instead of obj for cross-compiles) - # - # Why this scheme and not some other one? - # 1) for a given input, you can compute all derived outputs by matching - # its path, even if the input is brought via a gyp file with '..'. - # 2) simple files like libraries and stamps have a simple filename. - - obj = 'obj' - if self.toolset != 'target': - obj += '.' + self.toolset - - path_dir, path_basename = os.path.split(path) - if qualified: - path_basename = self.name + '.' + path_basename - return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, - path_basename)) - - def WriteCollapsedDependencies(self, name, targets): - """Given a list of targets, return a path for a single file - representing the result of building all the targets or None. - - Uses a stamp file if necessary.""" - - assert targets == filter(None, targets), targets - if len(targets) == 0: - return None - if len(targets) > 1: - stamp = self.GypPathToUniqueOutput(name + '.stamp') - targets = self.ninja.build(stamp, 'stamp', targets) - self.ninja.newline() - return targets[0] - - def WriteSpec(self, spec, config_name, generator_flags, - case_sensitive_filesystem): - """The main entry point for NinjaWriter: write the build rules for a spec. - - Returns a Target object, which represents the output paths for this spec. - Returns None if there are no outputs (e.g. a settings-only 'none' type - target).""" - - self.config_name = config_name - self.name = spec['target_name'] - self.toolset = spec['toolset'] - config = spec['configurations'][config_name] - self.target = Target(spec['type']) - self.is_standalone_static_library = bool( - spec.get('standalone_static_library', 0)) - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - self.xcode_settings = self.msvs_settings = None - if self.flavor == 'mac': - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - if self.flavor == 'win': - self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, - generator_flags) - arch = self.msvs_settings.GetArch(config_name) - self.ninja.variable('arch', self.win_env[arch]) - - # Compute predepends for all rules. - # actions_depends is the dependencies this target depends on before running - # any of its action/rule/copy steps. - # compile_depends is the dependencies this target depends on before running - # any of its compile steps. - actions_depends = [] - compile_depends = [] - # TODO(evan): it is rather confusing which things are lists and which - # are strings. Fix these. - if 'dependencies' in spec: - for dep in spec['dependencies']: - if dep in self.target_outputs: - target = self.target_outputs[dep] - actions_depends.append(target.PreActionInput(self.flavor)) - compile_depends.append(target.PreCompileInput()) - actions_depends = filter(None, actions_depends) - compile_depends = filter(None, compile_depends) - actions_depends = self.WriteCollapsedDependencies('actions_depends', - actions_depends) - compile_depends = self.WriteCollapsedDependencies('compile_depends', - compile_depends) - self.target.preaction_stamp = actions_depends - self.target.precompile_stamp = compile_depends - - # Write out actions, rules, and copies. These must happen before we - # compile any sources, so compute a list of predependencies for sources - # while we do it. - extra_sources = [] - mac_bundle_depends = [] - self.target.actions_stamp = self.WriteActionsRulesCopies( - spec, extra_sources, actions_depends, mac_bundle_depends) - - # If we have actions/rules/copies, we depend directly on those, but - # otherwise we depend on dependent target's actions/rules/copies etc. - # We never need to explicitly depend on previous target's link steps, - # because no compile ever depends on them. - compile_depends_stamp = (self.target.actions_stamp or compile_depends) - - # Write out the compilation steps, if any. - link_deps = [] - sources = spec.get('sources', []) + extra_sources - if sources: - pch = None - if self.flavor == 'win': - gyp.msvs_emulation.VerifyMissingSources( - sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) - pch = gyp.msvs_emulation.PrecompiledHeader( - self.msvs_settings, config_name, self.GypPathToNinja, - self.GypPathToUniqueOutput, self.obj_ext) - else: - pch = gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, self.GypPathToNinja, - lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) - link_deps = self.WriteSources( - config_name, config, sources, compile_depends_stamp, pch, - case_sensitive_filesystem, spec) - # Some actions/rules output 'sources' that are already object files. - link_deps += [self.GypPathToNinja(f) - for f in sources if f.endswith(self.obj_ext)] - - if self.flavor == 'win' and self.target.type == 'static_library': - self.target.component_objs = link_deps - - # Write out a link step, if needed. - output = None - if link_deps or self.target.actions_stamp or actions_depends: - output = self.WriteTarget(spec, config_name, config, link_deps, - self.target.actions_stamp or actions_depends) - if self.is_mac_bundle: - mac_bundle_depends.append(output) - - # Bundle all of the above together, if needed. - if self.is_mac_bundle: - output = self.WriteMacBundle(spec, mac_bundle_depends) - - if not output: - return None - - assert self.target.FinalOutput(), output - return self.target - - def _WinIdlRule(self, source, prebuild, outputs): - """Handle the implicit VS .idl rule for one source file. Fills |outputs| - with files that are generated.""" - outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( - source, self.config_name) - outdir = self.GypPathToNinja(outdir) - def fix_path(path, rel=None): - path = os.path.join(outdir, path) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - path = self.ExpandRuleVariables( - path, root, dirname, source, ext, basename) - if rel: - path = os.path.relpath(path, rel) - return path - vars = [(name, fix_path(value, outdir)) for name, value in vars] - output = [fix_path(p) for p in output] - vars.append(('outdir', outdir)) - vars.append(('idlflags', flags)) - input = self.GypPathToNinja(source) - self.ninja.build(output, 'idl', input, - variables=vars, order_only=prebuild) - outputs.extend(output) - - def WriteWinIdlFiles(self, spec, prebuild): - """Writes rules to match MSVS's implicit idl handling.""" - assert self.flavor == 'win' - if self.msvs_settings.HasExplicitIdlRules(spec): - return [] - outputs = [] - for source in filter(lambda x: x.endswith('.idl'), spec['sources']): - self._WinIdlRule(source, prebuild, outputs) - return outputs - - def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, - mac_bundle_depends): - """Write out the Actions, Rules, and Copies steps. Return a path - representing the outputs of these steps.""" - outputs = [] - extra_mac_bundle_resources = [] - - if 'actions' in spec: - outputs += self.WriteActions(spec['actions'], extra_sources, prebuild, - extra_mac_bundle_resources) - if 'rules' in spec: - outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, - extra_mac_bundle_resources) - if 'copies' in spec: - outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends) - - if 'sources' in spec and self.flavor == 'win': - outputs += self.WriteWinIdlFiles(spec, prebuild) - - stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) - - if self.is_mac_bundle: - mac_bundle_resources = spec.get('mac_bundle_resources', []) + \ - extra_mac_bundle_resources - self.WriteMacBundleResources(mac_bundle_resources, mac_bundle_depends) - self.WriteMacInfoPlist(mac_bundle_depends) - - return stamp - - def GenerateDescription(self, verb, message, fallback): - """Generate and return a description of a build step. - - |verb| is the short summary, e.g. ACTION or RULE. - |message| is a hand-written description, or None if not available. - |fallback| is the gyp-level name of the step, usable as a fallback. - """ - if self.toolset != 'target': - verb += '(%s)' % self.toolset - if message: - return '%s %s' % (verb, self.ExpandSpecial(message)) - else: - return '%s %s: %s' % (verb, self.name, fallback) - - def WriteActions(self, actions, extra_sources, prebuild, - extra_mac_bundle_resources): - # Actions cd into the base directory. - env = self.GetSortedXcodeEnv() - if self.flavor == 'win': - env = self.msvs_settings.GetVSMacroEnv( - '$!PRODUCT_DIR', config=self.config_name) - all_outputs = [] - for action in actions: - # First write out a rule for the action. - name = '%s_%s' % (action['action_name'], - hashlib.md5(self.qualified_target).hexdigest()) - description = self.GenerateDescription('ACTION', - action.get('message', None), - name) - is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action) - if self.flavor == 'win' else False) - args = action['action'] - rule_name, _ = self.WriteNewNinjaRule(name, args, description, - is_cygwin, env=env) - - inputs = [self.GypPathToNinja(i, env) for i in action['inputs']] - if int(action.get('process_outputs_as_sources', False)): - extra_sources += action['outputs'] - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += action['outputs'] - outputs = [self.GypPathToNinja(o, env) for o in action['outputs']] - - # Then write out an edge using the rule. - self.ninja.build(outputs, rule_name, inputs, - order_only=prebuild) - all_outputs += outputs - - self.ninja.newline() - - return all_outputs - - def WriteRules(self, rules, extra_sources, prebuild, - extra_mac_bundle_resources): - env = self.GetSortedXcodeEnv() - all_outputs = [] - for rule in rules: - # First write out a rule for the rule action. - name = '%s_%s' % (rule['rule_name'], - hashlib.md5(self.qualified_target).hexdigest()) - # Skip a rule with no action and no inputs. - if 'action' not in rule and not rule.get('rule_sources', []): - continue - args = rule['action'] - description = self.GenerateDescription( - 'RULE', - rule.get('message', None), - ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) - is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule) - if self.flavor == 'win' else False) - rule_name, args = self.WriteNewNinjaRule( - name, args, description, is_cygwin, env=env) - - # TODO: if the command references the outputs directly, we should - # simplify it to just use $out. - - # Rules can potentially make use of some special variables which - # must vary per source file. - # Compute the list of variables we'll need to provide. - special_locals = ('source', 'root', 'dirname', 'ext', 'name') - needed_variables = set(['source']) - for argument in args: - for var in special_locals: - if ('${%s}' % var) in argument: - needed_variables.add(var) - - def cygwin_munge(path): - if is_cygwin: - return path.replace('\\', '/') - return path - - # For each source file, write an edge that generates all the outputs. - for source in rule.get('rule_sources', []): - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - - # Gather the list of inputs and outputs, expanding $vars if possible. - outputs = [self.ExpandRuleVariables(o, root, dirname, - source, ext, basename) - for o in rule['outputs']] - inputs = [self.ExpandRuleVariables(i, root, dirname, - source, ext, basename) - for i in rule.get('inputs', [])] - - if int(rule.get('process_outputs_as_sources', False)): - extra_sources += outputs - if int(rule.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += outputs - - extra_bindings = [] - for var in needed_variables: - if var == 'root': - extra_bindings.append(('root', cygwin_munge(root))) - elif var == 'dirname': - extra_bindings.append(('dirname', cygwin_munge(dirname))) - elif var == 'source': - # '$source' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - source_expanded = self.ExpandSpecial(source, self.base_to_build) - extra_bindings.append(('source', cygwin_munge(source_expanded))) - elif var == 'ext': - extra_bindings.append(('ext', ext)) - elif var == 'name': - extra_bindings.append(('name', cygwin_munge(basename))) - else: - assert var == None, repr(var) - - inputs = [self.GypPathToNinja(i, env) for i in inputs] - outputs = [self.GypPathToNinja(o, env) for o in outputs] - extra_bindings.append(('unique_name', - hashlib.md5(outputs[0]).hexdigest())) - self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), - implicit=inputs, - order_only=prebuild, - variables=extra_bindings) - - all_outputs.extend(outputs) - - return all_outputs - - def WriteCopies(self, copies, prebuild, mac_bundle_depends): - outputs = [] - env = self.GetSortedXcodeEnv() - for copy in copies: - for path in copy['files']: - # Normalize the path so trailing slashes don't confuse us. - path = os.path.normpath(path) - basename = os.path.split(path)[1] - src = self.GypPathToNinja(path, env) - dst = self.GypPathToNinja(os.path.join(copy['destination'], basename), - env) - outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) - if self.is_mac_bundle: - # gyp has mac_bundle_resources to copy things into a bundle's - # Resources folder, but there's no built-in way to copy files to other - # places in the bundle. Hence, some targets use copies for this. Check - # if this file is copied into the current bundle, and if so add it to - # the bundle depends so that dependent targets get rebuilt if the copy - # input changes. - if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()): - mac_bundle_depends.append(dst) - - return outputs - - def WriteMacBundleResources(self, resources, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources'.""" - for output, res in gyp.xcode_emulation.GetMacBundleResources( - self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), - self.xcode_settings, map(self.GypPathToNinja, resources)): - self.ninja.build(output, 'mac_tool', res, - variables=[('mactool_cmd', 'copy-bundle-resource')]) - bundle_depends.append(output) - - def WriteMacInfoPlist(self, bundle_depends): - """Write build rules for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), - self.xcode_settings, self.GypPathToNinja) - if not info_plist: - return - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = self.GypPathToUniqueOutput( - os.path.basename(info_plist)) - defines = ' '.join([Define(d, self.flavor) for d in defines]) - info_plist = self.ninja.build(intermediate_plist, 'infoplist', info_plist, - variables=[('defines',defines)]) - - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - self.ninja.build(out, 'mac_tool', info_plist, - variables=[('mactool_cmd', 'copy-info-plist'), - ('env', env)]) - bundle_depends.append(out) - - def WriteSources(self, config_name, config, sources, predepends, - precompiled_header, case_sensitive_filesystem, spec): - """Write build rules to compile all of |sources|.""" - if self.toolset == 'host': - self.ninja.variable('ar', '$ar_host') - self.ninja.variable('cc', '$cc_host') - self.ninja.variable('cxx', '$cxx_host') - self.ninja.variable('ld', '$ld_host') - - extra_defines = [] - if self.flavor == 'mac': - cflags = self.xcode_settings.GetCflags(config_name) - cflags_c = self.xcode_settings.GetCflagsC(config_name) - cflags_cc = self.xcode_settings.GetCflagsCC(config_name) - cflags_objc = ['$cflags_c'] + \ - self.xcode_settings.GetCflagsObjC(config_name) - cflags_objcc = ['$cflags_cc'] + \ - self.xcode_settings.GetCflagsObjCC(config_name) - elif self.flavor == 'win': - cflags = self.msvs_settings.GetCflags(config_name) - cflags_c = self.msvs_settings.GetCflagsC(config_name) - cflags_cc = self.msvs_settings.GetCflagsCC(config_name) - extra_defines = self.msvs_settings.GetComputedDefines(config_name) - pdbpath = self.msvs_settings.GetCompilerPdbName( - config_name, self.ExpandSpecial) - if not pdbpath: - obj = 'obj' - if self.toolset != 'target': - obj += '.' + self.toolset - pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, - self.name + '.pdb')) - self.WriteVariableList('pdbname', [pdbpath]) - self.WriteVariableList('pchprefix', [self.name]) - else: - cflags = config.get('cflags', []) - cflags_c = config.get('cflags_c', []) - cflags_cc = config.get('cflags_cc', []) - - defines = config.get('defines', []) + extra_defines - self.WriteVariableList('defines', [Define(d, self.flavor) for d in defines]) - if self.flavor == 'win': - self.WriteVariableList('rcflags', - [QuoteShellArgument(self.ExpandSpecial(f), self.flavor) - for f in self.msvs_settings.GetRcflags(config_name, - self.GypPathToNinja)]) - - include_dirs = config.get('include_dirs', []) - if self.flavor == 'win': - include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs, - config_name) - self.WriteVariableList('includes', - [QuoteShellArgument('-I' + self.GypPathToNinja(i), self.flavor) - for i in include_dirs]) - - pch_commands = precompiled_header.GetPchBuildCommands() - if self.flavor == 'mac': - self.WriteVariableList('cflags_pch_c', - [precompiled_header.GetInclude('c')]) - self.WriteVariableList('cflags_pch_cc', - [precompiled_header.GetInclude('cc')]) - self.WriteVariableList('cflags_pch_objc', - [precompiled_header.GetInclude('m')]) - self.WriteVariableList('cflags_pch_objcc', - [precompiled_header.GetInclude('mm')]) - - self.WriteVariableList('cflags', map(self.ExpandSpecial, cflags)) - self.WriteVariableList('cflags_c', map(self.ExpandSpecial, cflags_c)) - self.WriteVariableList('cflags_cc', map(self.ExpandSpecial, cflags_cc)) - if self.flavor == 'mac': - self.WriteVariableList('cflags_objc', map(self.ExpandSpecial, - cflags_objc)) - self.WriteVariableList('cflags_objcc', map(self.ExpandSpecial, - cflags_objcc)) - self.ninja.newline() - outputs = [] - for source in sources: - filename, ext = os.path.splitext(source) - ext = ext[1:] - obj_ext = self.obj_ext - if ext in ('cc', 'cpp', 'cxx'): - command = 'cxx' - elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): - command = 'cc' - elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. - command = 'cc_s' - elif (self.flavor == 'win' and ext == 'asm' and - self.msvs_settings.GetArch(config_name) == 'x86' and - not self.msvs_settings.HasExplicitAsmRules(spec)): - # Asm files only get auto assembled for x86 (not x64). - command = 'asm' - # Add the _asm suffix as msvs is capable of handling .cc and - # .asm files of the same name without collision. - obj_ext = '_asm.obj' - elif self.flavor == 'mac' and ext == 'm': - command = 'objc' - elif self.flavor == 'mac' and ext == 'mm': - command = 'objcxx' - elif self.flavor == 'win' and ext == 'rc': - command = 'rc' - obj_ext = '.res' - else: - # Ignore unhandled extensions. - continue - input = self.GypPathToNinja(source) - output = self.GypPathToUniqueOutput(filename + obj_ext) - # Ninja's depfile handling gets confused when the case of a filename - # changes on a case-insensitive file system. To work around that, always - # convert .o filenames to lowercase on such file systems. See - # https://github.com/martine/ninja/issues/402 for details. - if not case_sensitive_filesystem: - output = output.lower() - implicit = precompiled_header.GetObjDependencies([input], [output]) - variables = [] - if self.flavor == 'win': - variables, output, implicit = precompiled_header.GetFlagsModifications( - input, output, implicit, command, cflags_c, cflags_cc, - self.ExpandSpecial) - self.ninja.build(output, command, input, - implicit=[gch for _, _, gch in implicit], - order_only=predepends, variables=variables) - outputs.append(output) - - self.WritePchTargets(pch_commands) - - self.ninja.newline() - return outputs - - def WritePchTargets(self, pch_commands): - """Writes ninja rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - var_name = { - 'c': 'cflags_pch_c', - 'cc': 'cflags_pch_cc', - 'm': 'cflags_pch_objc', - 'mm': 'cflags_pch_objcc', - }[lang] - - map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', } - cmd = map.get(lang) - self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)]) - - def WriteLink(self, spec, config_name, config, link_deps): - """Write out a link step. Fills out target.binary. """ - - command = { - 'executable': 'link', - 'loadable_module': 'solink_module', - 'shared_library': 'solink', - }[spec['type']] - - implicit_deps = set() - solibs = set() - - if 'dependencies' in spec: - # Two kinds of dependencies: - # - Linkable dependencies (like a .a or a .so): add them to the link line. - # - Non-linkable dependencies (like a rule that generates a file - # and writes a stamp file): add them to implicit_deps - extra_link_deps = set() - for dep in spec['dependencies']: - target = self.target_outputs.get(dep) - if not target: - continue - linkable = target.Linkable() - if linkable: - if (self.flavor == 'win' and - target.component_objs and - self.msvs_settings.IsUseLibraryDependencyInputs(config_name)): - extra_link_deps |= set(target.component_objs) - elif self.flavor == 'win' and target.import_lib: - extra_link_deps.add(target.import_lib) - elif target.UsesToc(self.flavor): - solibs.add(target.binary) - implicit_deps.add(target.binary + '.TOC') - else: - extra_link_deps.add(target.binary) - - final_output = target.FinalOutput() - if not linkable or final_output != target.binary: - implicit_deps.add(final_output) - - link_deps.extend(list(extra_link_deps)) - - extra_bindings = [] - if self.is_mac_bundle: - output = self.ComputeMacBundleBinaryOutput() - else: - output = self.ComputeOutput(spec) - extra_bindings.append(('postbuilds', - self.GetPostbuildCommand(spec, output, output))) - - is_executable = spec['type'] == 'executable' - if self.flavor == 'mac': - ldflags = self.xcode_settings.GetLdflags(config_name, - self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), - self.GypPathToNinja) - elif self.flavor == 'win': - manifest_name = self.GypPathToUniqueOutput( - self.ComputeOutputFileName(spec)) - ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name, - self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable) - self.WriteVariableList('manifests', manifest_files) - else: - ldflags = config.get('ldflags', []) - if is_executable and len(solibs): - ldflags.append('-Wl,-rpath=\$$ORIGIN/lib/') - ldflags.append('-Wl,-rpath-link=lib/') - self.WriteVariableList('ldflags', - gyp.common.uniquer(map(self.ExpandSpecial, - ldflags))) - - libraries = gyp.common.uniquer(map(self.ExpandSpecial, - spec.get('libraries', []))) - if self.flavor == 'mac': - libraries = self.xcode_settings.AdjustLibraries(libraries) - elif self.flavor == 'win': - libraries = self.msvs_settings.AdjustLibraries(libraries) - self.WriteVariableList('libs', libraries) - - self.target.binary = output - - if command in ('solink', 'solink_module'): - extra_bindings.append(('soname', os.path.split(output)[1])) - extra_bindings.append(('lib', - gyp.common.EncodePOSIXShellArgument(output))) - if self.flavor == 'win': - extra_bindings.append(('dll', output)) - if '/NOENTRY' not in ldflags: - self.target.import_lib = output + '.lib' - extra_bindings.append(('implibflag', - '/IMPLIB:%s' % self.target.import_lib)) - output = [output, self.target.import_lib] - else: - output = [output, output + '.TOC'] - - if len(solibs): - extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs))) - - self.ninja.build(output, command, link_deps, - implicit=list(implicit_deps), - variables=extra_bindings) - - def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): - if spec['type'] == 'none': - # TODO(evan): don't call this function for 'none' target types, as - # it doesn't do anything, and we fake out a 'binary' with a stamp file. - self.target.binary = compile_deps - elif spec['type'] == 'static_library': - self.target.binary = self.ComputeOutput(spec) - variables = [] - postbuild = self.GetPostbuildCommand( - spec, self.target.binary, self.target.binary) - if postbuild: - variables.append(('postbuilds', postbuild)) - if self.xcode_settings: - variables.append(('libtool_flags', - self.xcode_settings.GetLibtoolflags(config_name))) - if (self.flavor not in ('mac', 'win') and not - self.is_standalone_static_library): - self.ninja.build(self.target.binary, 'alink_thin', link_deps, - order_only=compile_deps, variables=variables) - else: - if self.msvs_settings: - libflags = self.msvs_settings.GetLibFlags(config_name, - self.GypPathToNinja) - variables.append(('libflags', libflags)) - self.ninja.build(self.target.binary, 'alink', link_deps, - order_only=compile_deps, variables=variables) - else: - self.WriteLink(spec, config_name, config, link_deps) - return self.target.binary - - def WriteMacBundle(self, spec, mac_bundle_depends): - assert self.is_mac_bundle - package_framework = spec['type'] in ('shared_library', 'loadable_module') - output = self.ComputeMacBundleOutput() - postbuild = self.GetPostbuildCommand(spec, output, self.target.binary, - is_command_start=not package_framework) - variables = [] - if postbuild: - variables.append(('postbuilds', postbuild)) - if package_framework: - variables.append(('version', self.xcode_settings.GetFrameworkVersion())) - self.ninja.build(output, 'package_framework', mac_bundle_depends, - variables=variables) - else: - self.ninja.build(output, 'stamp', mac_bundle_depends, - variables=variables) - self.target.bundle = output - return output - - def GetSortedXcodeEnv(self, additional_settings=None): - """Returns the variables Xcode would set for build steps.""" - assert self.abs_build_dir - abs_build_dir = self.abs_build_dir - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, abs_build_dir, - os.path.join(abs_build_dir, self.build_to_base), self.config_name, - additional_settings) - - def GetSortedXcodePostbuildEnv(self): - """Returns the variables Xcode would set for postbuild steps.""" - postbuild_settings = {} - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - 'CHROMIUM_STRIP_SAVE_FILE') - if strip_save_file: - postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file - return self.GetSortedXcodeEnv(additional_settings=postbuild_settings) - - def GetPostbuildCommand(self, spec, output, output_binary, - is_command_start=False): - """Returns a shell command that runs all the postbuilds, and removes - |output| if any of them fails. If |is_command_start| is False, then the - returned string will start with ' && '.""" - if not self.xcode_settings or spec['type'] == 'none' or not output: - return '' - output = QuoteShellArgument(output, self.flavor) - target_postbuilds = self.xcode_settings.GetTargetPostbuilds( - self.config_name, - os.path.normpath(os.path.join(self.base_to_build, output)), - QuoteShellArgument( - os.path.normpath(os.path.join(self.base_to_build, output_binary)), - self.flavor), - quiet=True) - postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) - postbuilds = target_postbuilds + postbuilds - if not postbuilds: - return '' - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert(0, gyp.common.EncodePOSIXShellList( - ['cd', self.build_to_base])) - env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) - # G will be non-null if any postbuild fails. Run all postbuilds in a - # subshell. - commands = env + ' (' + \ - ' && '.join([ninja_syntax.escape(command) for command in postbuilds]) - command_string = (commands + '); G=$$?; ' - # Remove the final output if any postbuild failed. - '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)') - if is_command_start: - return '(' + command_string + ' && ' - else: - return '$ && (' + command_string - - def ComputeExportEnvString(self, env): - """Given an environment, returns a string looking like - 'export FOO=foo; export BAR="${FOO} bar;' - that exports |env| to the shell.""" - export_str = [] - for k, v in env: - export_str.append('export %s=%s;' % - (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v)))) - return ' '.join(export_str) - - def ComputeMacBundleOutput(self): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']) - return os.path.join(path, self.xcode_settings.GetWrapperName()) - - def ComputeMacBundleBinaryOutput(self): - """Return the 'output' (full output path) to the binary in a bundle.""" - assert self.is_mac_bundle - path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']) - return os.path.join(path, self.xcode_settings.GetExecutablePath()) - - def ComputeOutputFileName(self, spec, type=None): - """Compute the filename of the final output for the current target.""" - if not type: - type = spec['type'] - - default_variables = copy.copy(generator_default_variables) - CalculateVariables(default_variables, {'flavor': self.flavor}) - - # Compute filename prefix: the product prefix, or a default for - # the product type. - DEFAULT_PREFIX = { - 'loadable_module': default_variables['SHARED_LIB_PREFIX'], - 'shared_library': default_variables['SHARED_LIB_PREFIX'], - 'static_library': default_variables['STATIC_LIB_PREFIX'], - 'executable': default_variables['EXECUTABLE_PREFIX'], - } - prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, '')) - - # Compute filename extension: the product extension, or a default - # for the product type. - DEFAULT_EXTENSION = { - 'loadable_module': default_variables['SHARED_LIB_SUFFIX'], - 'shared_library': default_variables['SHARED_LIB_SUFFIX'], - 'static_library': default_variables['STATIC_LIB_SUFFIX'], - 'executable': default_variables['EXECUTABLE_SUFFIX'], - } - extension = spec.get('product_extension') - if extension: - extension = '.' + extension - else: - extension = DEFAULT_EXTENSION.get(type, '') - - if 'product_name' in spec: - # If we were given an explicit name, use that. - target = spec['product_name'] - else: - # Otherwise, derive a name from the target name. - target = spec['target_name'] - if prefix == 'lib': - # Snip out an extra 'lib' from libs if appropriate. - target = StripPrefix(target, 'lib') - - if type in ('static_library', 'loadable_module', 'shared_library', - 'executable'): - return '%s%s%s' % (prefix, target, extension) - elif type == 'none': - return '%s.stamp' % target - else: - raise Exception('Unhandled output type %s' % type) - - def ComputeOutput(self, spec, type=None): - """Compute the path for the final output of the spec.""" - assert not self.is_mac_bundle or type - - if not type: - type = spec['type'] - - if self.flavor == 'win': - override = self.msvs_settings.GetOutputName(self.config_name, - self.ExpandSpecial) - if override: - return override - - if self.flavor == 'mac' and type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module'): - filename = self.xcode_settings.GetExecutablePath() - else: - filename = self.ComputeOutputFileName(spec, type) - - if 'product_dir' in spec: - path = os.path.join(spec['product_dir'], filename) - return self.ExpandSpecial(path) - - # Some products go into the output root, libraries go into shared library - # dir, and everything else goes into the normal place. - type_in_output_root = ['executable', 'loadable_module'] - if self.flavor == 'mac' and self.toolset == 'target': - type_in_output_root += ['shared_library', 'static_library'] - elif self.flavor == 'win' and self.toolset == 'target': - type_in_output_root += ['shared_library'] - - if type in type_in_output_root or self.is_standalone_static_library: - return filename - elif type == 'shared_library': - libdir = 'lib' - if self.toolset != 'target': - libdir = os.path.join('lib', '%s' % self.toolset) - return os.path.join(libdir, filename) - else: - return self.GypPathToUniqueOutput(filename, qualified=False) - - def WriteVariableList(self, var, values): - assert not isinstance(values, str) - if values is None: - values = [] - self.ninja.variable(var, ' '.join(values)) - - def WriteNewNinjaRule(self, name, args, description, is_cygwin, env): - """Write out a new ninja "rule" statement for a given command. - - Returns the name of the new rule, and a copy of |args| with variables - expanded.""" - - if self.flavor == 'win': - args = [self.msvs_settings.ConvertVSMacros( - arg, self.base_to_build, config=self.config_name) - for arg in args] - description = self.msvs_settings.ConvertVSMacros( - description, config=self.config_name) - elif self.flavor == 'mac': - # |env| is an empty list on non-mac. - args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] - description = gyp.xcode_emulation.ExpandEnvVars(description, env) - - # TODO: we shouldn't need to qualify names; we do it because - # currently the ninja rule namespace is global, but it really - # should be scoped to the subninja. - rule_name = self.name - if self.toolset == 'target': - rule_name += '.' + self.toolset - rule_name += '.' + name - rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) - - # Remove variable references, but not if they refer to the magic rule - # variables. This is not quite right, as it also protects these for - # actions, not just for rules where they are valid. Good enough. - protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ] - protect = '(?!' + '|'.join(map(re.escape, protect)) + ')' - description = re.sub(protect + r'\$', '_', description) - - # gyp dictates that commands are run from the base directory. - # cd into the directory before running, and adjust paths in - # the arguments to point to the proper locations. - rspfile = None - rspfile_content = None - args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] - if self.flavor == 'win': - rspfile = rule_name + '.$unique_name.rsp' - # The cygwin case handles this inside the bash sub-shell. - run_in = '' if is_cygwin else ' ' + self.build_to_base - if is_cygwin: - rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( - args, self.build_to_base) - else: - rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) - command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable + - rspfile + run_in) - else: - env = self.ComputeExportEnvString(env) - command = gyp.common.EncodePOSIXShellList(args) - command = 'cd %s; ' % self.build_to_base + env + command - - # GYP rules/actions express being no-ops by not touching their outputs. - # Avoid executing downstream dependencies in this case by specifying - # restat=1 to ninja. - self.ninja.rule(rule_name, command, description, restat=True, - rspfile=rspfile, rspfile_content=rspfile_content) - self.ninja.newline() - - return rule_name, args - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - global generator_additional_non_configuration_keys - global generator_additional_path_sections - flavor = gyp.common.GetFlavor(params) - if flavor == 'mac': - default_variables.setdefault('OS', 'mac') - default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') - default_variables.setdefault('SHARED_LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - default_variables.setdefault('LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Ninja generator. - import gyp.generator.xcode as xcode_generator - generator_additional_non_configuration_keys = getattr(xcode_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(xcode_generator, - 'generator_additional_path_sections', []) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr(xcode_generator, - 'generator_extra_sources_for_rules', []) - elif flavor == 'win': - default_variables.setdefault('OS', 'win') - default_variables['EXECUTABLE_SUFFIX'] = '.exe' - default_variables['STATIC_LIB_PREFIX'] = '' - default_variables['STATIC_LIB_SUFFIX'] = '.lib' - default_variables['SHARED_LIB_PREFIX'] = '' - default_variables['SHARED_LIB_SUFFIX'] = '.dll' - generator_flags = params.get('generator_flags', {}) - - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - - # Set a variable so conditions can be based on msvs_version. - msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or - '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - else: - operating_system = flavor - if flavor == 'android': - operating_system = 'linux' # Keep this legacy behavior for now. - default_variables.setdefault('OS', operating_system) - default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') - default_variables.setdefault('SHARED_LIB_DIR', - os.path.join('$!PRODUCT_DIR', 'lib')) - default_variables.setdefault('LIB_DIR', - os.path.join('$!PRODUCT_DIR', 'obj')) - - -def OpenOutput(path, mode='w'): - """Open |path| for writing, creating directories if necessary.""" - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - return open(path, mode) - - -def CommandWithWrapper(cmd, wrappers, prog): - wrapper = wrappers.get(cmd, '') - if wrapper: - return wrapper + ' ' + prog - return prog - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name): - options = params['options'] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get('generator_flags', {}) - - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to ninja easier, ninja doesn't put anything here. - generator_dir = os.path.relpath(params['options'].generator_output or '.') - - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get('output_dir', 'out') - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(generator_dir, - output_dir, - config_name)) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - - master_ninja = ninja_syntax.Writer( - OpenOutput(os.path.join(toplevel_build, 'build.ninja')), - width=120) - case_sensitive_filesystem = not os.path.exists( - os.path.join(toplevel_build, 'BUILD.NINJA')) - - # Put build-time support tools in out/{config_name}. - gyp.common.CopyTool(flavor, toplevel_build) - - # Grab make settings for CC/CXX. - # The rules are - # - The priority from low to high is gcc/g++, the 'make_global_settings' in - # gyp, the environment variable. - # - If there is no 'make_global_settings' for CC.host/CXX.host or - # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set - # to cc/cxx. - if flavor == 'win': - cc = 'cl.exe' - cxx = 'cl.exe' - ld = 'link.exe' - gyp.msvs_emulation.GenerateEnvironmentFiles( - toplevel_build, generator_flags, OpenOutput) - ld_host = '$ld' - else: - cc = 'gcc' - cxx = 'g++' - ld = '$cxx' - ld_host = '$cxx_host' - - cc_host = None - cxx_host = None - cc_host_global_setting = None - cxx_host_global_setting = None - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings = data[build_file].get('make_global_settings', []) - build_to_root = gyp.common.InvertRelativePath(build_dir, - options.toplevel_dir) - flock = 'flock' - if flavor == 'mac': - flock = './gyp-mac-tool flock' - wrappers = {} - if flavor != 'win': - wrappers['LINK'] = flock + ' linker.lock' - for key, value in make_global_settings: - if key == 'CC': - cc = os.path.join(build_to_root, value) - if key == 'CXX': - cxx = os.path.join(build_to_root, value) - if key == 'LD': - ld = os.path.join(build_to_root, value) - if key == 'CC.host': - cc_host = os.path.join(build_to_root, value) - cc_host_global_setting = value - if key == 'CXX.host': - cxx_host = os.path.join(build_to_root, value) - cxx_host_global_setting = value - if key == 'LD.host': - ld_host = os.path.join(build_to_root, value) - if key.endswith('_wrapper'): - wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) - - cc = GetEnvironFallback(['CC_target', 'CC'], cc) - master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc)) - cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx) - master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx)) - ld = GetEnvironFallback(['LD_target', 'LD'], ld) - - if not cc_host: - cc_host = cc - if not cxx_host: - cxx_host = cxx - - if flavor == 'win': - master_ninja.variable('ld', ld) - master_ninja.variable('idl', 'midl.exe') - master_ninja.variable('ar', 'lib.exe') - master_ninja.variable('rc', 'rc.exe') - master_ninja.variable('asm', 'ml.exe') - master_ninja.variable('mt', 'mt.exe') - master_ninja.variable('use_dep_database', '1') - else: - master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld)) - master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar')) - - master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar')) - cc_host = GetEnvironFallback(['CC_host'], cc_host) - cxx_host = GetEnvironFallback(['CXX_host'], cxx_host) - ld_host = GetEnvironFallback(['LD_host'], ld_host) - - # The environment variable could be used in 'make_global_settings', like - # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. - if '$(CC)' in cc_host and cc_host_global_setting: - cc_host = cc_host_global_setting.replace('$(CC)', cc) - if '$(CXX)' in cxx_host and cxx_host_global_setting: - cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx) - master_ninja.variable('cc_host', - CommandWithWrapper('CC.host', wrappers, cc_host)) - master_ninja.variable('cxx_host', - CommandWithWrapper('CXX.host', wrappers, cxx_host)) - if flavor == 'win': - master_ninja.variable('ld_host', ld_host) - else: - master_ninja.variable('ld_host', CommandWithWrapper( - 'LINK', wrappers, ld_host)) - - master_ninja.newline() - - if flavor != 'win': - master_ninja.rule( - 'cc', - description='CC $out', - command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' - '$cflags_pch_c -c $in -o $out'), - depfile='$out.d') - master_ninja.rule( - 'cc_s', - description='CC $out', - command=('$cc $defines $includes $cflags $cflags_c ' - '$cflags_pch_c -c $in -o $out')) - master_ninja.rule( - 'cxx', - description='CXX $out', - command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc ' - '$cflags_pch_cc -c $in -o $out'), - depfile='$out.d') - else: - cc_command = ('ninja -t msvc -o $out -e $arch ' - '-- ' - '$cc /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname ') - cxx_command = ('ninja -t msvc -o $out -e $arch ' - '-- ' - '$cxx /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname ') - master_ninja.rule( - 'cc', - description='CC $out', - command=cc_command, - depfile='$out.d', - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_c') - master_ninja.rule( - 'cxx', - description='CXX $out', - command=cxx_command, - depfile='$out.d', - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_cc') - master_ninja.rule( - 'idl', - description='IDL $in', - command=('%s gyp-win-tool midl-wrapper $arch $outdir ' - '$tlb $h $dlldata $iid $proxy $in ' - '$idlflags' % sys.executable)) - master_ninja.rule( - 'rc', - description='RC $in', - # Note: $in must be last otherwise rc.exe complains. - command=('%s gyp-win-tool rc-wrapper ' - '$arch $rc $defines $includes $rcflags /fo$out $in' % - sys.executable)) - master_ninja.rule( - 'asm', - description='ASM $in', - command=('%s gyp-win-tool asm-wrapper ' - '$arch $asm $defines $includes /c /Fo $out $in' % - sys.executable)) - - if flavor != 'mac' and flavor != 'win': - master_ninja.rule( - 'alink', - description='AR $out', - command='rm -f $out && $ar rcs $out $in') - master_ninja.rule( - 'alink_thin', - description='AR $out', - command='rm -f $out && $ar rcsT $out $in') - - # This allows targets that only need to depend on $lib's API to declare an - # order-only dependency on $lib.TOC and avoid relinking such downstream - # dependencies when $lib changes only in non-public ways. - # The resulting string leaves an uninterpolated %{suffix} which - # is used in the final substitution below. - mtime_preserving_solink_base = ( - 'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then ' - '%(solink)s && %(extract_toc)s > ${lib}.TOC; else ' - '%(solink)s && %(extract_toc)s > ${lib}.tmp && ' - 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; ' - 'fi; fi' - % { 'solink': - '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s', - 'extract_toc': - ('{ readelf -d ${lib} | grep SONAME ; ' - 'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')}) - - master_ninja.rule( - 'solink', - description='SOLINK $lib', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive ' - '$libs'})) - master_ninja.rule( - 'solink_module', - description='SOLINK(module) $lib', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group $libs'})) - master_ninja.rule( - 'link', - description='LINK $out', - command=('$ld $ldflags -o $out ' - '-Wl,--start-group $in $solibs -Wl,--end-group $libs')) - elif flavor == 'win': - master_ninja.rule( - 'alink', - description='LIB $out', - command=('%s gyp-win-tool link-wrapper $arch ' - '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % - sys.executable), - rspfile='$out.rsp', - rspfile_content='$in_newline $libflags') - dlldesc = 'LINK(DLL) $dll' - dllcmd = ('%s gyp-win-tool link-wrapper $arch ' - '$ld /nologo $implibflag /DLL /OUT:$dll ' - '/PDB:$dll.pdb @$dll.rsp' % sys.executable) - dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' - 'cmd /c if exist $dll.manifest del $dll.manifest' % - sys.executable) - dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' - '$mt -nologo -manifest $manifests -out:$dll.manifest' % - sys.executable) - master_ninja.rule('solink', description=dlldesc, command=dllcmd, - rspfile='$dll.rsp', - rspfile_content='$libs $in_newline $ldflags', - restat=True) - master_ninja.rule('solink_module', description=dlldesc, command=dllcmd, - rspfile='$dll.rsp', - rspfile_content='$libs $in_newline $ldflags', - restat=True) - # Note that ldflags goes at the end so that it has the option of - # overriding default settings earlier in the command line. - master_ninja.rule( - 'link', - description='LINK $out', - command=('%s gyp-win-tool link-wrapper $arch ' - '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && ' - '%s gyp-win-tool manifest-wrapper $arch ' - 'cmd /c if exist $out.manifest del $out.manifest && ' - '%s gyp-win-tool manifest-wrapper $arch ' - '$mt -nologo -manifest $manifests -out:$out.manifest' % - (sys.executable, sys.executable, sys.executable)), - rspfile='$out.rsp', - rspfile_content='$in_newline $libs $ldflags') - else: - master_ninja.rule( - 'objc', - description='OBJC $out', - command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc ' - '$cflags_pch_objc -c $in -o $out'), - depfile='$out.d') - master_ninja.rule( - 'objcxx', - description='OBJCXX $out', - command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc ' - '$cflags_pch_objcc -c $in -o $out'), - depfile='$out.d') - master_ninja.rule( - 'alink', - description='LIBTOOL-STATIC $out, POSTBUILDS', - command='rm -f $out && ' - './gyp-mac-tool filter-libtool libtool $libtool_flags ' - '-static -o $out $in' - '$postbuilds') - - # Record the public interface of $lib in $lib.TOC. See the corresponding - # comment in the posix section above for details. - mtime_preserving_solink_base = ( - 'if [ ! -e $lib -o ! -e ${lib}.TOC ] || ' - # Always force dependent targets to relink if this library - # reexports something. Handling this correctly would require - # recursive TOC dumping but this is rare in practice, so punt. - 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then ' - '%(solink)s && %(extract_toc)s > ${lib}.TOC; ' - 'else ' - '%(solink)s && %(extract_toc)s > ${lib}.tmp && ' - 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then ' - 'mv ${lib}.tmp ${lib}.TOC ; ' - 'fi; ' - 'fi' - % { 'solink': '$ld -shared $ldflags -o $lib %(suffix)s', - 'extract_toc': - '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' - 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}) - - # TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass - # -bundle -single_module here (for osmesa.so). - master_ninja.rule( - 'solink', - description='SOLINK $lib, POSTBUILDS', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '$in $solibs $libs$postbuilds'})) - master_ninja.rule( - 'solink_module', - description='SOLINK(module) $lib, POSTBUILDS', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '$in $solibs $libs$postbuilds'})) - - master_ninja.rule( - 'link', - description='LINK $out, POSTBUILDS', - command=('$ld $ldflags -o $out ' - '$in $solibs $libs$postbuilds')) - master_ninja.rule( - 'infoplist', - description='INFOPLIST $out', - command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && ' - 'plutil -convert xml1 $out $out')) - master_ninja.rule( - 'mac_tool', - description='MACTOOL $mactool_cmd $in', - command='$env ./gyp-mac-tool $mactool_cmd $in $out') - master_ninja.rule( - 'package_framework', - description='PACKAGE FRAMEWORK $out, POSTBUILDS', - command='./gyp-mac-tool package-framework $out $version$postbuilds ' - '&& touch $out') - if flavor == 'win': - master_ninja.rule( - 'stamp', - description='STAMP $out', - command='%s gyp-win-tool stamp $out' % sys.executable) - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable) - else: - master_ninja.rule( - 'stamp', - description='STAMP $out', - command='${postbuilds}touch $out') - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='rm -rf $out && cp -af $in $out') - master_ninja.newline() - - all_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, - target_dicts, - os.path.normpath(build_file)): - all_targets.add(target) - all_outputs = set() - - # target_outputs is a map from qualified target name to a Target object. - target_outputs = {} - # target_short_names is a map from target short name to a list of Target - # objects. - target_short_names = {} - for qualified_target in target_list: - # qualified_target is like: third_party/icu/icu.gyp:icui18n#target - build_file, name, toolset = \ - gyp.common.ParseQualifiedTarget(qualified_target) - - this_make_global_settings = data[build_file].get('make_global_settings', []) - assert make_global_settings == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets.") - - spec = target_dicts[qualified_target] - if flavor == 'mac': - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - build_file = gyp.common.RelativePath(build_file, options.toplevel_dir) - - base_path = os.path.dirname(build_file) - obj = 'obj' - if toolset != 'target': - obj += '.' + toolset - output_file = os.path.join(obj, base_path, name + '.ninja') - - abs_build_dir = os.path.abspath(toplevel_build) - writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir, - OpenOutput(os.path.join(toplevel_build, output_file)), - flavor, toplevel_dir=options.toplevel_dir) - master_ninja.subninja(output_file) - - target = writer.WriteSpec( - spec, config_name, generator_flags, case_sensitive_filesystem) - if target: - if name != target.FinalOutput() and spec['toolset'] == 'target': - target_short_names.setdefault(name, []).append(target) - target_outputs[qualified_target] = target - if qualified_target in all_targets: - all_outputs.add(target.FinalOutput()) - - if target_short_names: - # Write a short name to build this target. This benefits both the - # "build chrome" case as well as the gyp tests, which expect to be - # able to run actions and build libraries by their short name. - master_ninja.newline() - master_ninja.comment('Short names for targets.') - for short_name in target_short_names: - master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in - target_short_names[short_name]]) - - if all_outputs: - master_ninja.newline() - master_ninja.build('all', 'phony', list(all_outputs)) - master_ninja.default(generator_flags.get('default_target', 'all')) - - -def PerformBuild(data, configurations, params): - options = params['options'] - for config in configurations: - builddir = os.path.join(options.toplevel_dir, 'out', config) - arguments = ['ninja', '-C', builddir] - print 'Building [%s]: %s' % (config, arguments) - subprocess.check_call(arguments) - - -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - (target_list, target_dicts, data, params, config_name) = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - -def GenerateOutput(target_list, target_dicts, data, params): - user_config = params.get('generator_flags', {}).get('config', None) - if gyp.common.GetFlavor(params) == 'win': - target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) - target_list, target_dicts = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables) - - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, - user_config) - else: - config_names = target_dicts[target_list[0]]['configurations'].keys() - if params['parallel']: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append( - (target_list, target_dicts, data, params, config_name)) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the ninja.py file. """ - -import gyp.generator.ninja as ninja -import unittest -import StringIO -import sys -import TestCommon - - -class TestPrefixesAndSuffixes(unittest.TestCase): - if sys.platform in ('win32', 'cygwin'): - def test_BinaryNamesWindows(self): - writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'win') - spec = { 'target_name': 'wee' } - self.assertTrue(writer.ComputeOutputFileName(spec, 'executable'). - endswith('.exe')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - endswith('.dll')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - endswith('.lib')) - - if sys.platform == 'linux2': - def test_BinaryNamesLinux(self): - writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'linux') - spec = { 'target_name': 'wee' } - self.assertTrue('.' not in writer.ComputeOutputFileName(spec, - 'executable')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - startswith('lib')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - startswith('lib')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - endswith('.so')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - endswith('.a')) - -if __name__ == '__main__': - unittest.main() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/scons.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/scons.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/scons.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/scons.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1072 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import gyp -import gyp.common -import gyp.SCons as SCons -import os.path -import pprint -import re -import subprocess - - -# TODO: remove when we delete the last WriteList() call in this module -WriteList = SCons.WriteList - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': '${LIBPREFIX}', - 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}', - 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}', - 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}', - 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}', - 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}', - 'OS': 'linux', - 'PRODUCT_DIR': '$TOP_BUILDDIR', - 'SHARED_LIB_DIR': '$LIB_DIR', - 'LIB_DIR': '$LIB_DIR', - 'RULE_INPUT_ROOT': '${SOURCE.filebase}', - 'RULE_INPUT_DIRNAME': '${SOURCE.dir}', - 'RULE_INPUT_EXT': '${SOURCE.suffix}', - 'RULE_INPUT_NAME': '${SOURCE.file}', - 'RULE_INPUT_PATH': '${SOURCE.abspath}', - 'CONFIGURATION_NAME': '${CONFIG_NAME}', -} - -# Tell GYP how to process the input for us. -generator_handles_variants = True -generator_wants_absolute_build_file_paths = True - - -def FixPath(path, prefix): - if not os.path.isabs(path) and not path[0] == '$': - path = prefix + path - return path - - -header = """\ -# This file is generated; do not edit. -""" - - -_alias_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Alias( - ['_%(target_name)s_action'], - %(inputs)s, - _action -) -env.AlwaysBuild(_outputs) -""" - -_run_as_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -""" - -_run_as_template_suffix = """ -_run_as_target = env.Alias('run_%(target_name)s', target_files, _action) -env.Requires(_run_as_target, [ - Alias('%(target_name)s'), -]) -env.AlwaysBuild(_run_as_target) -""" - -_command_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Command( - %(outputs)s, - %(inputs)s, - _action -) -""" - -# This is copied from the default SCons action, updated to handle symlinks. -_copy_action_template = """ -import shutil -import SCons.Action - -def _copy_files_or_dirs_or_symlinks(dest, src): - SCons.Node.FS.invalidate_node_memos(dest) - if SCons.Util.is_List(src) and os.path.isdir(dest): - for file in src: - shutil.copy2(file, dest) - return 0 - elif os.path.islink(src): - linkto = os.readlink(src) - os.symlink(linkto, dest) - return 0 - elif os.path.isfile(src): - return shutil.copy2(src, dest) - else: - return shutil.copytree(src, dest, 1) - -def _copy_files_or_dirs_or_symlinks_str(dest, src): - return 'Copying %s to %s ...' % (src, dest) - -GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks, - _copy_files_or_dirs_or_symlinks_str, - convert=str) -""" - -_rule_template = """ -%(name)s_additional_inputs = %(inputs)s -%(name)s_outputs = %(outputs)s -def %(name)s_emitter(target, source, env): - return (%(name)s_outputs, source + %(name)s_additional_inputs) -if GetOption('verbose'): - %(name)s_action = Action([%(action)s]) -else: - %(name)s_action = Action([%(action)s], %(message)s) -env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action, - emitter=%(name)s_emitter) - -_outputs = [] -_processed_input_files = [] -for infile in input_files: - if (type(infile) == type('') - and not os.path.isabs(infile) - and not infile[0] == '$'): - infile = %(src_dir)r + infile - if str(infile).endswith('.%(extension)s'): - _generated = env.%(name)s(infile) - env.Precious(_generated) - _outputs.append(_generated) - %(process_outputs_as_sources_line)s - else: - _processed_input_files.append(infile) -prerequisites.extend(_outputs) -input_files = _processed_input_files -""" - -_spawn_hack = """ -import re -import SCons.Platform.posix -needs_shell = re.compile('["\\'>= 2.5: - return os.sysconf('SC_NPROCESSORS_ONLN') - else: # Mac OS X with Python < 2.5: - return int(os.popen2("sysctl -n hw.ncpu")[1].read()) - # Windows: - if os.environ.has_key('NUMBER_OF_PROCESSORS'): - return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1) - return 1 # Default - -# Support PROGRESS= to show progress in different ways. -p = ARGUMENTS.get('PROGRESS') -if p == 'spinner': - Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'], - interval=5, - file=open('/dev/tty', 'w')) -elif p == 'name': - Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w')) - -# Set the default -j value based on the number of processors. -SetOption('num_jobs', GetProcessorCount() + 1) - -# Have SCons use its cached dependency information. -SetOption('implicit_cache', 1) - -# Only re-calculate MD5 checksums if a timestamp has changed. -Decider('MD5-timestamp') - -# Since we set the -j value by default, suppress SCons warnings about being -# unable to support parallel build on versions of Python with no threading. -default_warnings = ['no-no-parallel-support'] -SetOption('warn', default_warnings + GetOption('warn')) - -AddOption('--mode', nargs=1, dest='conf_list', default=[], - action='append', help='Configuration to build.') - -AddOption('--verbose', dest='verbose', default=False, - action='store_true', help='Verbose command-line output.') - - -# -sconscript_file_map = %(sconscript_files)s - -class LoadTarget: - ''' - Class for deciding if a given target sconscript is to be included - based on a list of included target names, optionally prefixed with '-' - to exclude a target name. - ''' - def __init__(self, load): - ''' - Initialize a class with a list of names for possible loading. - - Arguments: - load: list of elements in the LOAD= specification - ''' - self.included = set([c for c in load if not c.startswith('-')]) - self.excluded = set([c[1:] for c in load if c.startswith('-')]) - - if not self.included: - self.included = set(['all']) - - def __call__(self, target): - ''' - Returns True if the specified target's sconscript file should be - loaded, based on the initialized included and excluded lists. - ''' - return (target in self.included or - ('all' in self.included and not target in self.excluded)) - -if 'LOAD' in ARGUMENTS: - load = ARGUMENTS['LOAD'].split(',') -else: - load = [] -load_target = LoadTarget(load) - -sconscript_files = [] -for target, sconscript in sconscript_file_map.iteritems(): - if load_target(target): - sconscript_files.append(sconscript) - - -target_alias_list= [] - -conf_list = GetOption('conf_list') -if conf_list: - # In case the same --mode= value was specified multiple times. - conf_list = list(set(conf_list)) -else: - conf_list = [%(default_configuration)r] - -sconsbuild_dir = Dir(%(sconsbuild_dir)s) - - -def FilterOut(self, **kw): - kw = SCons.Environment.copy_non_reserved_keywords(kw) - for key, val in kw.items(): - envval = self.get(key, None) - if envval is None: - # No existing variable in the environment, so nothing to delete. - continue - - for vremove in val: - # Use while not if, so we can handle duplicates. - while vremove in envval: - envval.remove(vremove) - - self[key] = envval - - # TODO(sgk): SCons.Environment.Append() has much more logic to deal - # with various types of values. We should handle all those cases in here - # too. (If variable is a dict, etc.) - - -non_compilable_suffixes = { - 'LINUX' : set([ - '.bdic', - '.css', - '.dat', - '.fragment', - '.gperf', - '.h', - '.hh', - '.hpp', - '.html', - '.hxx', - '.idl', - '.in', - '.in0', - '.in1', - '.js', - '.mk', - '.rc', - '.sigs', - '', - ]), - 'WINDOWS' : set([ - '.h', - '.hh', - '.hpp', - '.dat', - '.idl', - '.in', - '.in0', - '.in1', - ]), -} - -def compilable(env, file): - base, ext = os.path.splitext(str(file)) - if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]: - return False - return True - -def compilable_files(env, sources): - return [x for x in sources if compilable(env, x)] - -def GypProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def GypTestProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(*result) - return result - -def GypLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Library(target, source, *args, **kw) - return result - -def GypLoadableModule(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.LoadableModule(target, source, *args, **kw) - return result - -def GypStaticLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.StaticLibrary(target, source, *args, **kw) - return result - -def GypSharedLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.SharedLibrary(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def add_gyp_methods(env): - env.AddMethod(GypProgram) - env.AddMethod(GypTestProgram) - env.AddMethod(GypLibrary) - env.AddMethod(GypLoadableModule) - env.AddMethod(GypStaticLibrary) - env.AddMethod(GypSharedLibrary) - - env.AddMethod(FilterOut) - - env.AddMethod(compilable) - - -base_env = Environment( - tools = %(scons_tools)s, - INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate', - LIB_DIR='$TOP_BUILDDIR/lib', - OBJ_DIR='$TOP_BUILDDIR/obj', - SCONSBUILD_DIR=sconsbuild_dir.abspath, - SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate', - SRC_DIR=Dir(%(src_dir)r), - TARGET_PLATFORM='LINUX', - TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME', - LIBPATH=['$LIB_DIR'], -) - -if not GetOption('verbose'): - base_env.SetDefault( - ARCOMSTR='Creating library $TARGET', - ASCOMSTR='Assembling $TARGET', - CCCOMSTR='Compiling $TARGET', - CONCATSOURCECOMSTR='ConcatSource $TARGET', - CXXCOMSTR='Compiling $TARGET', - LDMODULECOMSTR='Building loadable module $TARGET', - LINKCOMSTR='Linking $TARGET', - MANIFESTCOMSTR='Updating manifest for $TARGET', - MIDLCOMSTR='Compiling IDL $TARGET', - PCHCOMSTR='Precompiling $TARGET', - RANLIBCOMSTR='Indexing $TARGET', - RCCOMSTR='Compiling resource $TARGET', - SHCCCOMSTR='Compiling $TARGET', - SHCXXCOMSTR='Compiling $TARGET', - SHLINKCOMSTR='Linking $TARGET', - SHMANIFESTCOMSTR='Updating manifest for $TARGET', - ) - -add_gyp_methods(base_env) - -for conf in conf_list: - env = base_env.Clone(CONFIG_NAME=conf) - SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath) - for sconscript in sconscript_files: - target_alias = env.SConscript(sconscript, exports=['env']) - if target_alias: - target_alias_list.extend(target_alias) - -Default(Alias('all', target_alias_list)) - -help_fmt = ''' -Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ... - -Local command-line build options: - --mode=CONFIG Configuration to build: - --mode=Debug [default] - --mode=Release - --verbose Print actual executed command lines. - -Supported command-line build variables: - LOAD=[module,...] Comma-separated list of components to load in the - dependency graph ('-' prefix excludes) - PROGRESS=type Display a progress indicator: - name: print each evaluated target name - spinner: print a spinner every 5 targets - -The following TARGET names can also be used as LOAD= module names: - -%%s -''' - -if GetOption('help'): - def columnar_text(items, width=78, indent=2, sep=2): - result = [] - colwidth = max(map(len, items)) + sep - cols = (width - indent) / colwidth - if cols < 1: - cols = 1 - rows = (len(items) + cols - 1) / cols - indent = '%%*s' %% (indent, '') - sep = indent - for row in xrange(0, rows): - result.append(sep) - for i in xrange(row, len(items), rows): - result.append('%%-*s' %% (colwidth, items[i])) - sep = '\\n' + indent - result.append('\\n') - return ''.join(result) - - load_list = set(sconscript_file_map.keys()) - target_aliases = set(map(str, target_alias_list)) - - common = load_list and target_aliases - load_only = load_list - common - target_only = target_aliases - common - help_text = [help_fmt %% columnar_text(sorted(list(common)))] - if target_only: - fmt = "The following are additional TARGET names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(target_only)))) - if load_only: - fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(load_only)))) - Help(''.join(help_text)) -""" - -# TEMPLATE END -############################################################################# - - -def GenerateSConscriptWrapper(build_file, build_file_data, name, - output_filename, sconscript_files, - default_configuration): - """ - Generates the "wrapper" SConscript file (analogous to the Visual Studio - solution) that calls all the individual target SConscript files. - """ - output_dir = os.path.dirname(output_filename) - src_dir = build_file_data['_DEPTH'] - src_dir_rel = gyp.common.RelativePath(src_dir, output_dir) - if not src_dir_rel: - src_dir_rel = '.' - scons_settings = build_file_data.get('scons_settings', {}) - sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#') - scons_tools = scons_settings.get('tools', ['default']) - - sconscript_file_lines = ['dict('] - for target in sorted(sconscript_files.keys()): - sconscript = sconscript_files[target] - sconscript_file_lines.append(' %s = %r,' % (target, sconscript)) - sconscript_file_lines.append(')') - - fp = open(output_filename, 'w') - fp.write(header) - fp.write(_wrapper_template % { - 'default_configuration' : default_configuration, - 'name' : name, - 'scons_tools' : repr(scons_tools), - 'sconsbuild_dir' : repr(sconsbuild_dir), - 'sconscript_files' : '\n'.join(sconscript_file_lines), - 'src_dir' : src_dir_rel, - }) - fp.close() - - # Generate the SConstruct file that invokes the wrapper SConscript. - dir, fname = os.path.split(output_filename) - SConstruct = os.path.join(dir, 'SConstruct') - fp = open(SConstruct, 'w') - fp.write(header) - fp.write('SConscript(%s)\n' % repr(fname)) - fp.close() - - -def TargetFilename(target, build_file=None, output_suffix=''): - """Returns the .scons file name for the specified target. - """ - if build_file is None: - build_file, target = gyp.common.ParseQualifiedTarget(target)[:2] - output_file = os.path.join(os.path.dirname(build_file), - target + output_suffix + '.scons') - return output_file - - -def PerformBuild(data, configurations, params): - options = params['options'] - - # Due to the way we test gyp on the chromium typbots - # we need to look for 'scons.py' as well as the more common 'scons' - # TODO(sbc): update the trybots to have a more normal install - # of scons. - scons = 'scons' - paths = os.environ['PATH'].split(os.pathsep) - for scons_name in ['scons', 'scons.py']: - for path in paths: - test_scons = os.path.join(path, scons_name) - print 'looking for: %s' % test_scons - if os.path.exists(test_scons): - print "found scons: %s" % scons - scons = test_scons - break - - for config in configurations: - arguments = [scons, '-C', options.toplevel_dir, '--mode=%s' % config] - print "Building [%s]: %s" % (config, arguments) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - """ - Generates all the output files for the specified targets. - """ - options = params['options'] - - if options.generator_output: - def output_path(filename): - return filename.replace(params['cwd'], options.generator_output) - else: - def output_path(filename): - return filename - - default_configuration = None - - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in scons build (target %s)' % - qualified_target) - scons_target = SCons.Target(spec) - if scons_target.is_ignored: - continue - - # TODO: assumes the default_configuration of the first target - # non-Default target is the correct default for all targets. - # Need a better model for handle variation between targets. - if (not default_configuration and - spec['default_configuration'] != 'Default'): - default_configuration = spec['default_configuration'] - - build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2] - output_file = TargetFilename(target, build_file, options.suffix) - if options.generator_output: - output_file = output_path(output_file) - - if not spec.has_key('libraries'): - spec['libraries'] = [] - - # Add dependent static library targets to the 'libraries' value. - deps = spec.get('dependencies', []) - spec['scons_dependencies'] = [] - for d in deps: - td = target_dicts[d] - target_name = td['target_name'] - spec['scons_dependencies'].append("Alias('%s')" % target_name) - if td['type'] in ('static_library', 'shared_library'): - libname = td.get('product_name', target_name) - spec['libraries'].append('lib' + libname) - if td['type'] == 'loadable_module': - prereqs = spec.get('scons_prerequisites', []) - # TODO: parameterize with <(SHARED_LIBRARY_*) variables? - td_target = SCons.Target(td) - td_target.target_prefix = '${SHLIBPREFIX}' - td_target.target_suffix = '${SHLIBSUFFIX}' - - GenerateSConscript(output_file, spec, build_file, data[build_file]) - - if not default_configuration: - default_configuration = 'Default' - - for build_file in sorted(data.keys()): - path, ext = os.path.splitext(build_file) - if ext != '.gyp': - continue - output_dir, basename = os.path.split(path) - output_filename = path + '_main' + options.suffix + '.scons' - - all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file) - sconscript_files = {} - for t in all_targets: - scons_target = SCons.Target(target_dicts[t]) - if scons_target.is_ignored: - continue - bf, target = gyp.common.ParseQualifiedTarget(t)[:2] - target_filename = TargetFilename(target, bf, options.suffix) - tpath = gyp.common.RelativePath(target_filename, output_dir) - sconscript_files[target] = tpath - - output_filename = output_path(output_filename) - if sconscript_files: - GenerateSConscriptWrapper(build_file, data[build_file], basename, - output_filename, sconscript_files, - default_configuration) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1239 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import filecmp -import gyp.common -import gyp.xcodeproj_file -import errno -import os -import sys -import posixpath -import re -import shutil -import subprocess -import tempfile - - -# Project files generated by this module will use _intermediate_var as a -# custom Xcode setting whose value is a DerivedSources-like directory that's -# project-specific and configuration-specific. The normal choice, -# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive -# as it is likely that multiple targets within a single project file will want -# to access the same set of generated files. The other option, -# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, -# it is not configuration-specific. INTERMEDIATE_DIR is defined as -# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). -_intermediate_var = 'INTERMEDIATE_DIR' - -# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all -# targets that share the same BUILT_PRODUCTS_DIR. -_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR' - -_library_search_paths_var = 'LIBRARY_SEARCH_PATHS' - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.dylib', - # INTERMEDIATE_DIR is a place for targets to build up intermediate products. - # It is specific to each build environment. It is only guaranteed to exist - # and be constant within the context of a project, corresponding to a single - # input file. Some build environments may allow their intermediate directory - # to be shared on a wider scale, but this is not guaranteed. - 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var, - 'OS': 'mac', - 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)', - 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)', - 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)', - 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)', - 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)', - 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)', - 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)', - 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var, - 'CONFIGURATION_NAME': '$(CONFIGURATION)', -} - -# The Xcode-specific sections that hold paths. -generator_additional_path_sections = [ - 'mac_bundle_resources', - 'mac_framework_headers', - 'mac_framework_private_headers', - # 'mac_framework_dirs', input already handles _dirs endings. -] - -# The Xcode-specific keys that exist on targets and aren't moved down to -# configurations. -generator_additional_non_configuration_keys = [ - 'mac_bundle', - 'mac_bundle_resources', - 'mac_framework_headers', - 'mac_framework_private_headers', - 'xcode_create_dependents_test_runner', -] - -# We want to let any rules apply to files that are resources also. -generator_extra_sources_for_rules = [ - 'mac_bundle_resources', - 'mac_framework_headers', - 'mac_framework_private_headers', -] - -# Xcode's standard set of library directories, which don't need to be duplicated -# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. -xcode_standard_library_dirs = frozenset([ - '$(SDKROOT)/usr/lib', - '$(SDKROOT)/usr/local/lib', -]) - -def CreateXCConfigurationList(configuration_names): - xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) - if len(configuration_names) == 0: - configuration_names = ['Default'] - for configuration_name in configuration_names: - xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ - 'name': configuration_name}) - xccl.AppendProperty('buildConfigurations', xcbc) - xccl.SetProperty('defaultConfigurationName', configuration_names[0]) - return xccl - - -class XcodeProject(object): - def __init__(self, gyp_path, path, build_file_dict): - self.gyp_path = gyp_path - self.path = path - self.project = gyp.xcodeproj_file.PBXProject(path=path) - projectDirPath = gyp.common.RelativePath( - os.path.dirname(os.path.abspath(self.gyp_path)), - os.path.dirname(path) or '.') - self.project.SetProperty('projectDirPath', projectDirPath) - self.project_file = \ - gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) - self.build_file_dict = build_file_dict - - # TODO(mark): add destructor that cleans up self.path if created_dir is - # True and things didn't complete successfully. Or do something even - # better with "try"? - self.created_dir = False - try: - os.makedirs(self.path) - self.created_dir = True - except OSError, e: - if e.errno != errno.EEXIST: - raise - - def Finalize1(self, xcode_targets, serialize_all_tests): - # Collect a list of all of the build configuration names used by the - # various targets in the file. It is very heavily advised to keep each - # target in an entire project (even across multiple project files) using - # the same set of configuration names. - configurations = [] - for xct in self.project.GetProperty('targets'): - xccl = xct.GetProperty('buildConfigurationList') - xcbcs = xccl.GetProperty('buildConfigurations') - for xcbc in xcbcs: - name = xcbc.GetProperty('name') - if name not in configurations: - configurations.append(name) - - # Replace the XCConfigurationList attached to the PBXProject object with - # a new one specifying all of the configuration names used by the various - # targets. - try: - xccl = CreateXCConfigurationList(configurations) - self.project.SetProperty('buildConfigurationList', xccl) - except: - sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) - raise - - # The need for this setting is explained above where _intermediate_var is - # defined. The comments below about wanting to avoid project-wide build - # settings apply here too, but this needs to be set on a project-wide basis - # so that files relative to the _intermediate_var setting can be displayed - # properly in the Xcode UI. - # - # Note that for configuration-relative files such as anything relative to - # _intermediate_var, for the purposes of UI tree view display, Xcode will - # only resolve the configuration name once, when the project file is - # opened. If the active build configuration is changed, the project file - # must be closed and reopened if it is desired for the tree view to update. - # This is filed as Apple radar 6588391. - xccl.SetBuildSetting(_intermediate_var, - '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)') - xccl.SetBuildSetting(_shared_intermediate_var, - '$(SYMROOT)/DerivedSources/$(CONFIGURATION)') - - # Set user-specified project-wide build settings and config files. This - # is intended to be used very sparingly. Really, almost everything should - # go into target-specific build settings sections. The project-wide - # settings are only intended to be used in cases where Xcode attempts to - # resolve variable references in a project context as opposed to a target - # context, such as when resolving sourceTree references while building up - # the tree tree view for UI display. - # Any values set globally are applied to all configurations, then any - # per-configuration values are applied. - for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): - xccl.SetBuildSetting(xck, xcv) - if 'xcode_config_file' in self.build_file_dict: - config_ref = self.project.AddOrGetFileInRootGroup( - self.build_file_dict['xcode_config_file']) - xccl.SetBaseConfiguration(config_ref) - build_file_configurations = self.build_file_dict.get('configurations', {}) - if build_file_configurations: - for config_name in configurations: - build_file_configuration_named = \ - build_file_configurations.get(config_name, {}) - if build_file_configuration_named: - xcc = xccl.ConfigurationNamed(config_name) - for xck, xcv in build_file_configuration_named.get('xcode_settings', - {}).iteritems(): - xcc.SetBuildSetting(xck, xcv) - if 'xcode_config_file' in build_file_configuration_named: - config_ref = self.project.AddOrGetFileInRootGroup( - build_file_configurations[config_name]['xcode_config_file']) - xcc.SetBaseConfiguration(config_ref) - - # Sort the targets based on how they appeared in the input. - # TODO(mark): Like a lot of other things here, this assumes internal - # knowledge of PBXProject - in this case, of its "targets" property. - - # ordinary_targets are ordinary targets that are already in the project - # file. run_test_targets are the targets that run unittests and should be - # used for the Run All Tests target. support_targets are the action/rule - # targets used by GYP file targets, just kept for the assert check. - ordinary_targets = [] - run_test_targets = [] - support_targets = [] - - # targets is full list of targets in the project. - targets = [] - - # does the it define it's own "all"? - has_custom_all = False - - # targets_for_all is the list of ordinary_targets that should be listed - # in this project's "All" target. It includes each non_runtest_target - # that does not have suppress_wildcard set. - targets_for_all = [] - - for target in self.build_file_dict['targets']: - target_name = target['target_name'] - toolset = target['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name, - toolset) - xcode_target = xcode_targets[qualified_target] - # Make sure that the target being added to the sorted list is already in - # the unsorted list. - assert xcode_target in self.project._properties['targets'] - targets.append(xcode_target) - ordinary_targets.append(xcode_target) - if xcode_target.support_target: - support_targets.append(xcode_target.support_target) - targets.append(xcode_target.support_target) - - if not int(target.get('suppress_wildcard', False)): - targets_for_all.append(xcode_target) - - if target_name.lower() == 'all': - has_custom_all = True; - - # If this target has a 'run_as' attribute, add its target to the - # targets, and add it to the test targets. - if target.get('run_as'): - # Make a target to run something. It should have one - # dependency, the parent xcode target. - xccl = CreateXCConfigurationList(configurations) - run_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run ' + target_name, - 'productName': xcode_target.GetProperty('productName'), - 'buildConfigurationList': xccl, - }, - parent=self.project) - run_target.AddDependency(xcode_target) - - command = target['run_as'] - script = '' - if command.get('working_directory'): - script = script + 'cd "%s"\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - command.get('working_directory')) - - if command.get('environment'): - script = script + "\n".join( - ['export %s="%s"' % - (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) - for (key, val) in command.get('environment').iteritems()]) + "\n" - - # Some test end up using sockets, files on disk, etc. and can get - # confused if more then one test runs at a time. The generator - # flag 'xcode_serialize_all_test_runs' controls the forcing of all - # tests serially. It defaults to True. To get serial runs this - # little bit of python does the same as the linux flock utility to - # make sure only one runs at a time. - command_prefix = '' - if serialize_all_tests: - command_prefix = \ -"""python -c "import fcntl, subprocess, sys -file = open('$TMPDIR/GYP_serialize_test_runs', 'a') -fcntl.flock(file.fileno(), fcntl.LOCK_EX) -sys.exit(subprocess.call(sys.argv[1:]))" """ - - # If we were unable to exec for some reason, we want to exit - # with an error, and fixup variable references to be shell - # syntax instead of xcode syntax. - script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - gyp.common.EncodePOSIXShellList(command.get('action'))) - - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - run_target.AppendProperty('buildPhases', ssbp) - - # Add the run target to the project file. - targets.append(run_target) - run_test_targets.append(run_target) - xcode_target.test_runner = run_target - - - # Make sure that the list of targets being replaced is the same length as - # the one replacing it, but allow for the added test runner targets. - assert len(self.project._properties['targets']) == \ - len(ordinary_targets) + len(support_targets) - - self.project._properties['targets'] = targets - - # Get rid of unnecessary levels of depth in groups like the Source group. - self.project.RootGroupsTakeOverOnlyChildren(True) - - # Sort the groups nicely. Do this after sorting the targets, because the - # Products group is sorted based on the order of the targets. - self.project.SortGroups() - - # Create an "All" target if there's more than one target in this project - # file and the project didn't define its own "All" target. Put a generated - # "All" target first so that people opening up the project for the first - # time will build everything by default. - if len(targets_for_all) > 1 and not has_custom_all: - xccl = CreateXCConfigurationList(configurations) - all_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'All', - }, - parent=self.project) - - for target in targets_for_all: - all_target.AddDependency(target) - - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._properties. It's important to get the "All" target first, - # though. - self.project._properties['targets'].insert(0, all_target) - - # The same, but for run_test_targets. - if len(run_test_targets) > 1: - xccl = CreateXCConfigurationList(configurations) - run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'Run All Tests', - }, - parent=self.project) - for run_test_target in run_test_targets: - run_all_tests_target.AddDependency(run_test_target) - - # Insert after the "All" target, which must exist if there is more than - # one run_test_target. - self.project._properties['targets'].insert(1, run_all_tests_target) - - def Finalize2(self, xcode_targets, xcode_target_to_target_dict): - # Finalize2 needs to happen in a separate step because the process of - # updating references to other projects depends on the ordering of targets - # within remote project files. Finalize1 is responsible for sorting duty, - # and once all project files are sorted, Finalize2 can come in and update - # these references. - - # To support making a "test runner" target that will run all the tests - # that are direct dependents of any given target, we look for - # xcode_create_dependents_test_runner being set on an Aggregate target, - # and generate a second target that will run the tests runners found under - # the marked target. - for bf_tgt in self.build_file_dict['targets']: - if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)): - tgt_name = bf_tgt['target_name'] - toolset = bf_tgt['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, - tgt_name, toolset) - xcode_target = xcode_targets[qualified_target] - if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): - # Collect all the run test targets. - all_run_tests = [] - pbxtds = xcode_target.GetProperty('dependencies') - for pbxtd in pbxtds: - pbxcip = pbxtd.GetProperty('targetProxy') - dependency_xct = pbxcip.GetProperty('remoteGlobalIDString') - if hasattr(dependency_xct, 'test_runner'): - all_run_tests.append(dependency_xct.test_runner) - - # Directly depend on all the runners as they depend on the target - # that builds them. - if len(all_run_tests) > 0: - run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run %s Tests' % tgt_name, - 'productName': tgt_name, - }, - parent=self.project) - for run_test_target in all_run_tests: - run_all_target.AddDependency(run_test_target) - - # Insert the test runner after the related target. - idx = self.project._properties['targets'].index(xcode_target) - self.project._properties['targets'].insert(idx + 1, run_all_target) - - # Update all references to other projects, to make sure that the lists of - # remote products are complete. Otherwise, Xcode will fill them in when - # it opens the project file, which will result in unnecessary diffs. - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._other_pbxprojects. - for other_pbxproject in self.project._other_pbxprojects.keys(): - self.project.AddOrGetProjectReference(other_pbxproject) - - self.project.SortRemoteProductReferences() - - # Give everything an ID. - self.project_file.ComputeIDs() - - # Make sure that no two objects in the project file have the same ID. If - # multiple objects wind up with the same ID, upon loading the file, Xcode - # will only recognize one object (the last one in the file?) and the - # results are unpredictable. - self.project_file.EnsureNoIDCollisions() - - def Write(self): - # Write the project file to a temporary location first. Xcode watches for - # changes to the project file and presents a UI sheet offering to reload - # the project when it does change. However, in some cases, especially when - # multiple projects are open or when Xcode is busy, things don't work so - # seamlessly. Sometimes, Xcode is able to detect that a project file has - # changed but can't unload it because something else is referencing it. - # To mitigate this problem, and to avoid even having Xcode present the UI - # sheet when an open project is rewritten for inconsequential changes, the - # project file is written to a temporary file in the xcodeproj directory - # first. The new temporary file is then compared to the existing project - # file, if any. If they differ, the new file replaces the old; otherwise, - # the new project file is simply deleted. Xcode properly detects a file - # being renamed over an open project file as a change and so it remains - # able to present the "project file changed" sheet under this system. - # Writing to a temporary file first also avoids the possible problem of - # Xcode rereading an incomplete project file. - (output_fd, new_pbxproj_path) = \ - tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.', - dir=self.path) - - try: - output_file = os.fdopen(output_fd, 'wb') - - self.project_file.Print(output_file) - output_file.close() - - pbxproj_path = os.path.join(self.path, 'project.pbxproj') - - same = False - try: - same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(new_pbxproj_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - - os.chmod(new_pbxproj_path, 0666 & ~umask) - os.rename(new_pbxproj_path, pbxproj_path) - - except Exception: - # Don't leave turds behind. In fact, if this code was responsible for - # creating the xcodeproj directory, get rid of that too. - os.unlink(new_pbxproj_path) - if self.created_dir: - shutil.rmtree(self.path, True) - raise - - -cached_xcode_version = None -def InstalledXcodeVersion(): - """Fetches the installed version of Xcode, returns empty string if it is - unable to figure it out.""" - - global cached_xcode_version - if not cached_xcode_version is None: - return cached_xcode_version - - # Default to an empty string - cached_xcode_version = '' - - # Collect the xcodebuild's version information. - try: - import subprocess - cmd = ['/usr/bin/xcodebuild', '-version'] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) - xcodebuild_version_info = proc.communicate()[0] - # Any error, return empty string - if proc.returncode: - xcodebuild_version_info = '' - except OSError: - # We failed to launch the tool - xcodebuild_version_info = '' - - # Pull out the Xcode version itself. - match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE) - if match_line: - cached_xcode_version = match_line.group(1) - # Done! - return cached_xcode_version - - -def AddSourceToTarget(source, type, pbxp, xct): - # TODO(mark): Perhaps source_extensions and library_extensions can be made a - # little bit fancier. - source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's'] - - # .o is conceptually more of a "source" than a "library," but Xcode thinks - # of "sources" as things to compile and "libraries" (or "frameworks") as - # things to link with. Adding an object file to an Xcode target's frameworks - # phase works properly. - library_extensions = ['a', 'dylib', 'framework', 'o'] - - basename = posixpath.basename(source) - (root, ext) = posixpath.splitext(basename) - if ext: - ext = ext[1:].lower() - - if ext in source_extensions and type != 'none': - xct.SourcesPhase().AddFile(source) - elif ext in library_extensions and type != 'none': - xct.FrameworksPhase().AddFile(source) - else: - # Files that aren't added to a sources or frameworks build phase can still - # go into the project file, just not as part of a build phase. - pbxp.AddOrGetFileInRootGroup(source) - - -def AddResourceToTarget(resource, pbxp, xct): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - xct.ResourcesPhase().AddFile(resource) - - -def AddHeaderToTarget(header, pbxp, xct, is_public): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public] - xct.HeadersPhase().AddFile(header, settings) - - -_xcode_variable_re = re.compile('(\$\((.*?)\))') -def ExpandXcodeVariables(string, expansions): - """Expands Xcode-style $(VARIABLES) in string per the expansions dict. - - In some rare cases, it is appropriate to expand Xcode variables when a - project file is generated. For any substring $(VAR) in string, if VAR is a - key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. - Any $(VAR) substring in string for which VAR is not a key in the expansions - dict will remain in the returned string. - """ - - matches = _xcode_variable_re.findall(string) - if matches == None: - return string - - matches.reverse() - for match in matches: - (to_replace, variable) = match - if not variable in expansions: - continue - - replacement = expansions[variable] - string = re.sub(re.escape(to_replace), replacement, string) - - return string - - -def EscapeXCodeArgument(s): - """We must escape the arguments that we give to XCode so that it knows not to - split on spaces and to respect backslash and quote literals.""" - s = s.replace('\\', '\\\\') - s = s.replace('"', '\\"') - return '"' + s + '"' - - - -def PerformBuild(data, configurations, params): - options = params['options'] - - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - - for config in configurations: - arguments = ['xcodebuild', '-project', xcodeproj_path] - arguments += ['-configuration', config] - print "Building [%s]: %s" % (config, arguments) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - parallel_builds = generator_flags.get('xcode_parallel_builds', True) - serialize_all_tests = \ - generator_flags.get('xcode_serialize_all_test_runs', True) - project_version = generator_flags.get('xcode_project_version', None) - skip_excluded_files = \ - not generator_flags.get('xcode_list_excluded_files', True) - xcode_projects = {} - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) - xcode_projects[build_file] = xcp - pbxp = xcp.project - - if parallel_builds: - pbxp.SetProperty('attributes', - {'BuildIndependentTargetsInParallel': 'YES'}) - if project_version: - xcp.project_file.SetXcodeVersion(project_version) - - # Add gyp/gypi files to project - if not generator_flags.get('standalone'): - main_group = pbxp.GetProperty('mainGroup') - build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) - main_group.AppendChild(build_group) - for included_file in build_file_dict['included_files']: - build_group.AddOrGetFileByPath(included_file, False) - - xcode_targets = {} - xcode_target_to_target_dict = {} - for qualified_target in target_list: - [build_file, target_name, toolset] = \ - gyp.common.ParseQualifiedTarget(qualified_target) - - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in xcode build (target %s)' % - qualified_target) - configuration_names = [spec['default_configuration']] - for configuration_name in sorted(spec['configurations'].keys()): - if configuration_name not in configuration_names: - configuration_names.append(configuration_name) - xcp = xcode_projects[build_file] - pbxp = xcp.project - - # Set up the configurations for the target according to the list of names - # supplied. - xccl = CreateXCConfigurationList(configuration_names) - - # Create an XCTarget subclass object for the target. The type with - # "+bundle" appended will be used if the target has "mac_bundle" set. - # loadable_modules not in a mac_bundle are mapped to - # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets - # to create a single-file mh_bundle. - _types = { - 'executable': 'com.apple.product-type.tool', - 'loadable_module': 'com.googlecode.gyp.xcode.bundle', - 'shared_library': 'com.apple.product-type.library.dynamic', - 'static_library': 'com.apple.product-type.library.static', - 'executable+bundle': 'com.apple.product-type.application', - 'loadable_module+bundle': 'com.apple.product-type.bundle', - 'shared_library+bundle': 'com.apple.product-type.framework', - } - - target_properties = { - 'buildConfigurationList': xccl, - 'name': target_name, - } - - type = spec['type'] - is_bundle = int(spec.get('mac_bundle', 0)) - if type != 'none': - type_bundle_key = type - if is_bundle: - type_bundle_key += '+bundle' - xctarget_type = gyp.xcodeproj_file.PBXNativeTarget - try: - target_properties['productType'] = _types[type_bundle_key] - except KeyError, e: - gyp.common.ExceptionAppend(e, "-- unknown product type while " - "writing target %s" % target_name) - raise - else: - xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget - assert not is_bundle, ( - 'mac_bundle targets cannot have type none (target "%s")' % - target_name) - - target_product_name = spec.get('product_name') - if target_product_name is not None: - target_properties['productName'] = target_product_name - - xct = xctarget_type(target_properties, parent=pbxp, - force_outdir=spec.get('product_dir'), - force_prefix=spec.get('product_prefix'), - force_extension=spec.get('product_extension')) - pbxp.AppendProperty('targets', xct) - xcode_targets[qualified_target] = xct - xcode_target_to_target_dict[xct] = spec - - spec_actions = spec.get('actions', []) - spec_rules = spec.get('rules', []) - - # Xcode has some "issues" with checking dependencies for the "Compile - # sources" step with any source files/headers generated by actions/rules. - # To work around this, if a target is building anything directly (not - # type "none"), then a second target is used to run the GYP actions/rules - # and is made a dependency of this target. This way the work is done - # before the dependency checks for what should be recompiled. - support_xct = None - if type != 'none' and (spec_actions or spec_rules): - support_xccl = CreateXCConfigurationList(configuration_names); - support_target_properties = { - 'buildConfigurationList': support_xccl, - 'name': target_name + ' Support', - } - if target_product_name: - support_target_properties['productName'] = \ - target_product_name + ' Support' - support_xct = \ - gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, - parent=pbxp) - pbxp.AppendProperty('targets', support_xct) - xct.AddDependency(support_xct) - # Hang the support target off the main target so it can be tested/found - # by the generator during Finalize. - xct.support_target = support_xct - - prebuild_index = 0 - - # Add custom shell script phases for "actions" sections. - for action in spec_actions: - # There's no need to write anything into the script to ensure that the - # output directories already exist, because Xcode will look at the - # declared outputs and automatically ensure that they exist for us. - - # Do we have a message to print when this action runs? - message = action.get('message') - if message: - message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message) - else: - message = '' - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(action['action']) - - # Convert Xcode-type variable references to sh-compatible environment - # variable references. - message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) - action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - action_string) - - script = '' - # Include the optional message - if message_sh: - script += message_sh + '\n' - # Be sure the script runs in exec, and that if exec fails, the script - # exits signalling an error. - script += 'exec ' + action_string_sh + '\nexit 1\n' - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'inputPaths': action['inputs'], - 'name': 'Action "' + action['action_name'] + '"', - 'outputPaths': action['outputs'], - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # TODO(mark): Should verify that at most one of these is specified. - if int(action.get('process_outputs_as_sources', False)): - for output in action['outputs']: - AddSourceToTarget(output, type, pbxp, xct) - - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - for output in action['outputs']: - AddResourceToTarget(output, pbxp, xct) - - # tgt_mac_bundle_resources holds the list of bundle resources so - # the rule processing can check against it. - if is_bundle: - tgt_mac_bundle_resources = spec.get('mac_bundle_resources', []) - else: - tgt_mac_bundle_resources = [] - - # Add custom shell script phases driving "make" for "rules" sections. - # - # Xcode's built-in rule support is almost powerful enough to use directly, - # but there are a few significant deficiencies that render them unusable. - # There are workarounds for some of its inadequacies, but in aggregate, - # the workarounds added complexity to the generator, and some workarounds - # actually require input files to be crafted more carefully than I'd like. - # Consequently, until Xcode rules are made more capable, "rules" input - # sections will be handled in Xcode output by shell script build phases - # performed prior to the compilation phase. - # - # The following problems with Xcode rules were found. The numbers are - # Apple radar IDs. I hope that these shortcomings are addressed, I really - # liked having the rules handled directly in Xcode during the period that - # I was prototyping this. - # - # 6588600 Xcode compiles custom script rule outputs too soon, compilation - # fails. This occurs when rule outputs from distinct inputs are - # interdependent. The only workaround is to put rules and their - # inputs in a separate target from the one that compiles the rule - # outputs. This requires input file cooperation and it means that - # process_outputs_as_sources is unusable. - # 6584932 Need to declare that custom rule outputs should be excluded from - # compilation. A possible workaround is to lie to Xcode about a - # rule's output, giving it a dummy file it doesn't know how to - # compile. The rule action script would need to touch the dummy. - # 6584839 I need a way to declare additional inputs to a custom rule. - # A possible workaround is a shell script phase prior to - # compilation that touches a rule's primary input files if any - # would-be additional inputs are newer than the output. Modifying - # the source tree - even just modification times - feels dirty. - # 6564240 Xcode "custom script" build rules always dump all environment - # variables. This is a low-prioroty problem and is not a - # show-stopper. - rules_by_ext = {} - for rule in spec_rules: - rules_by_ext[rule['extension']] = rule - - # First, some definitions: - # - # A "rule source" is a file that was listed in a target's "sources" - # list and will have a rule applied to it on the basis of matching the - # rule's "extensions" attribute. Rule sources are direct inputs to - # rules. - # - # Rule definitions may specify additional inputs in their "inputs" - # attribute. These additional inputs are used for dependency tracking - # purposes. - # - # A "concrete output" is a rule output with input-dependent variables - # resolved. For example, given a rule with: - # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], - # if the target's "sources" list contained "one.ext" and "two.ext", - # the "concrete output" for rule input "two.ext" would be "two.cc". If - # a rule specifies multiple outputs, each input file that the rule is - # applied to will have the same number of concrete outputs. - # - # If any concrete outputs are outdated or missing relative to their - # corresponding rule_source or to any specified additional input, the - # rule action must be performed to generate the concrete outputs. - - # concrete_outputs_by_rule_source will have an item at the same index - # as the rule['rule_sources'] that it corresponds to. Each item is a - # list of all of the concrete outputs for the rule_source. - concrete_outputs_by_rule_source = [] - - # concrete_outputs_all is a flat list of all concrete outputs that this - # rule is able to produce, given the known set of input files - # (rule_sources) that apply to it. - concrete_outputs_all = [] - - # messages & actions are keyed by the same indices as rule['rule_sources'] - # and concrete_outputs_by_rule_source. They contain the message and - # action to perform after resolving input-dependent variables. The - # message is optional, in which case None is stored for each rule source. - messages = [] - actions = [] - - for rule_source in rule.get('rule_sources', []): - rule_source_dirname, rule_source_basename = \ - posixpath.split(rule_source) - (rule_source_root, rule_source_ext) = \ - posixpath.splitext(rule_source_basename) - - # These are the same variable names that Xcode uses for its own native - # rule support. Because Xcode's rule engine is not being used, they - # need to be expanded as they are written to the makefile. - rule_input_dict = { - 'INPUT_FILE_BASE': rule_source_root, - 'INPUT_FILE_SUFFIX': rule_source_ext, - 'INPUT_FILE_NAME': rule_source_basename, - 'INPUT_FILE_PATH': rule_source, - 'INPUT_FILE_DIRNAME': rule_source_dirname, - } - - concrete_outputs_for_this_rule_source = [] - for output in rule.get('outputs', []): - # Fortunately, Xcode and make both use $(VAR) format for their - # variables, so the expansion is the only transformation necessary. - # Any remaning $(VAR)-type variables in the string can be given - # directly to make, which will pick up the correct settings from - # what Xcode puts into the environment. - concrete_output = ExpandXcodeVariables(output, rule_input_dict) - concrete_outputs_for_this_rule_source.append(concrete_output) - - # Add all concrete outputs to the project. - pbxp.AddOrGetFileInRootGroup(concrete_output) - - concrete_outputs_by_rule_source.append( \ - concrete_outputs_for_this_rule_source) - concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) - - # TODO(mark): Should verify that at most one of these is specified. - if int(rule.get('process_outputs_as_sources', False)): - for output in concrete_outputs_for_this_rule_source: - AddSourceToTarget(output, type, pbxp, xct) - - # If the file came from the mac_bundle_resources list or if the rule - # is marked to process outputs as bundle resource, do so. - was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources - if was_mac_bundle_resource or \ - int(rule.get('process_outputs_as_mac_bundle_resources', False)): - for output in concrete_outputs_for_this_rule_source: - AddResourceToTarget(output, pbxp, xct) - - # Do we have a message to print when this rule runs? - message = rule.get('message') - if message: - message = gyp.common.EncodePOSIXShellArgument(message) - message = ExpandXcodeVariables(message, rule_input_dict) - messages.append(message) - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(rule['action']) - - action = ExpandXcodeVariables(action_string, rule_input_dict) - actions.append(action) - - if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibilty for collision here. Consider - # target "t" rule "A_r" and target "t_A" rule "r". - makefile_name = '%s.make' % re.sub( - '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name'])) - makefile_path = os.path.join(xcode_projects[build_file].path, - makefile_name) - # TODO(mark): try/close? Write to a temporary file and swap it only - # if it's got changes? - makefile = open(makefile_path, 'wb') - - # make will build the first target in the makefile by default. By - # convention, it's called "all". List all (or at least one) - # concrete output for each rule source as a prerequisite of the "all" - # target. - makefile.write('all: \\\n') - for concrete_output_index in \ - xrange(0, len(concrete_outputs_by_rule_source)): - # Only list the first (index [0]) concrete output of each input - # in the "all" target. Otherwise, a parallel make (-j > 1) would - # attempt to process each input multiple times simultaneously. - # Otherwise, "all" could just contain the entire list of - # concrete_outputs_all. - concrete_output = \ - concrete_outputs_by_rule_source[concrete_output_index][0] - if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (concrete_output, eol)) - - for (rule_source, concrete_outputs, message, action) in \ - zip(rule['rule_sources'], concrete_outputs_by_rule_source, - messages, actions): - makefile.write('\n') - - # Add a rule that declares it can build each concrete output of a - # rule source. Collect the names of the directories that are - # required. - concrete_output_dirs = [] - for concrete_output_index in xrange(0, len(concrete_outputs)): - concrete_output = concrete_outputs[concrete_output_index] - if concrete_output_index == 0: - bol = '' - else: - bol = ' ' - makefile.write('%s%s \\\n' % (bol, concrete_output)) - - concrete_output_dir = posixpath.dirname(concrete_output) - if (concrete_output_dir and - concrete_output_dir not in concrete_output_dirs): - concrete_output_dirs.append(concrete_output_dir) - - makefile.write(' : \\\n') - - # The prerequisites for this rule are the rule source itself and - # the set of additional rule inputs, if any. - prerequisites = [rule_source] - prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in xrange(0, len(prerequisites)): - prerequisite = prerequisites[prerequisite_index] - if prerequisite_index == len(prerequisites) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (prerequisite, eol)) - - # Make sure that output directories exist before executing the rule - # action. - if len(concrete_output_dirs) > 0: - makefile.write('\t@mkdir -p "%s"\n' % - '" "'.join(concrete_output_dirs)) - - # The rule message and action have already had the necessary variable - # substitutions performed. - if message: - # Mark it with note: so Xcode picks it up in build output. - makefile.write('\t@echo note: %s\n' % message) - makefile.write('\t%s\n' % action) - - makefile.close() - - # It might be nice to ensure that needed output directories exist - # here rather than in each target in the Makefile, but that wouldn't - # work if there ever was a concrete output that had an input-dependent - # variable anywhere other than in the leaf position. - - # Don't declare any inputPaths or outputPaths. If they're present, - # Xcode will provide a slight optimization by only running the script - # phase if any output is missing or outdated relative to any input. - # Unfortunately, it will also assume that all outputs are touched by - # the script, and if the outputs serve as files in a compilation - # phase, they will be unconditionally rebuilt. Since make might not - # rebuild everything that could be declared here as an output, this - # extra compilation activity is unnecessary. With inputPaths and - # outputPaths not supplied, make will always be called, but it knows - # enough to not do anything when everything is up-to-date. - - # To help speed things up, pass -j COUNT to make so it does some work - # in parallel. Don't use ncpus because Xcode will build ncpus targets - # in parallel and if each target happens to have a rules step, there - # would be ncpus^2 things going. With a machine that has 2 quad-core - # Xeons, a build can quickly run out of processes based on - # scheduling/other tasks, and randomly failing builds are no good. - script = \ -"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" -if [ "${JOB_COUNT}" -gt 4 ]; then - JOB_COUNT=4 -fi -exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" -exit 1 -""" % makefile_name - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'name': 'Rule "' + rule['rule_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # Extra rule inputs also go into the project file. Concrete outputs were - # already added when they were computed. - groups = ['inputs', 'inputs_excluded'] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith('_excluded')] - for group in groups: - for item in rule.get(group, []): - pbxp.AddOrGetFileInRootGroup(item) - - # Add "sources". - for source in spec.get('sources', []): - (source_root, source_extension) = posixpath.splitext(source) - if source_extension[1:] not in rules_by_ext: - # AddSourceToTarget will add the file to a root group if it's not - # already there. - AddSourceToTarget(source, type, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(source) - - # Add "mac_bundle_resources" and "mac_framework_private_headers" if - # it's a bundle of any type. - if is_bundle: - for resource in tgt_mac_bundle_resources: - (resource_root, resource_extension) = posixpath.splitext(resource) - if resource_extension[1:] not in rules_by_ext: - AddResourceToTarget(resource, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(resource) - - for header in spec.get('mac_framework_private_headers', []): - AddHeaderToTarget(header, pbxp, xct, False) - - # Add "mac_framework_headers". These can be valid for both frameworks - # and static libraries. - if is_bundle or type == 'static_library': - for header in spec.get('mac_framework_headers', []): - AddHeaderToTarget(header, pbxp, xct, True) - - # Add "copies". - pbxcp_dict = {} - for copy_group in spec.get('copies', []): - dest = copy_group['destination'] - if dest[0] not in ('/', '$'): - # Relative paths are relative to $(SRCROOT). - dest = '$(SRCROOT)/' + dest - - # Coalesce multiple "copies" sections in the same target with the same - # "destination" property into the same PBXCopyFilesBuildPhase, otherwise - # they'll wind up with ID collisions. - pbxcp = pbxcp_dict.get(dest, None) - if pbxcp is None: - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ - 'name': 'Copy to ' + copy_group['destination'] - }, - parent=xct) - pbxcp.SetDestination(dest) - - # TODO(mark): The usual comment about this knowing too much about - # gyp.xcodeproj_file internals applies. - xct._properties['buildPhases'].insert(prebuild_index, pbxcp) - - pbxcp_dict[dest] = pbxcp - - for file in copy_group['files']: - pbxcp.AddFile(file) - - # Excluded files can also go into the project file. - if not skip_excluded_files: - for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers', - 'mac_framework_private_headers']: - excluded_key = key + '_excluded' - for item in spec.get(excluded_key, []): - pbxp.AddOrGetFileInRootGroup(item) - - # So can "inputs" and "outputs" sections of "actions" groups. - groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded'] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith('_excluded')] - for action in spec.get('actions', []): - for group in groups: - for item in action.get(group, []): - # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not - # sources. - if not item.startswith('$(BUILT_PRODUCTS_DIR)/'): - pbxp.AddOrGetFileInRootGroup(item) - - for postbuild in spec.get('postbuilds', []): - action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action']) - script = 'exec ' + action_string_sh + '\nexit 1\n' - - # Make the postbuild step depend on the output of ld or ar from this - # target. Apparently putting the script step after the link step isn't - # sufficient to ensure proper ordering in all cases. With an input - # declared but no outputs, the script step should run every time, as - # desired. - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'], - 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - xct.AppendProperty('buildPhases', ssbp) - - # Add dependencies before libraries, because adding a dependency may imply - # adding a library. It's preferable to keep dependencies listed first - # during a link phase so that they can override symbols that would - # otherwise be provided by libraries, which will usually include system - # libraries. On some systems, ld is finicky and even requires the - # libraries to be ordered in such a way that unresolved symbols in - # earlier-listed libraries may only be resolved by later-listed libraries. - # The Mac linker doesn't work that way, but other platforms do, and so - # their linker invocations need to be constructed in this way. There's - # no compelling reason for Xcode's linker invocations to differ. - - if 'dependencies' in spec: - for dependency in spec['dependencies']: - xct.AddDependency(xcode_targets[dependency]) - # The support project also gets the dependencies (in case they are - # needed for the actions/rules to work). - if support_xct: - support_xct.AddDependency(xcode_targets[dependency]) - - if 'libraries' in spec: - for library in spec['libraries']: - xct.FrameworksPhase().AddFile(library) - # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. - # I wish Xcode handled this automatically. - library_dir = posixpath.dirname(library) - if library_dir not in xcode_standard_library_dirs and ( - not xct.HasBuildSetting(_library_search_paths_var) or - library_dir not in xct.GetBuildSetting(_library_search_paths_var)): - xct.AppendBuildSetting(_library_search_paths_var, library_dir) - - for configuration_name in configuration_names: - configuration = spec['configurations'][configuration_name] - xcbc = xct.ConfigurationNamed(configuration_name) - for include_dir in configuration.get('mac_framework_dirs', []): - xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir) - for include_dir in configuration.get('include_dirs', []): - xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir) - if 'defines' in configuration: - for define in configuration['defines']: - set_define = EscapeXCodeArgument(define) - xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) - if 'xcode_settings' in configuration: - for xck, xcv in configuration['xcode_settings'].iteritems(): - xcbc.SetBuildSetting(xck, xcv) - if 'xcode_config_file' in configuration: - config_ref = pbxp.AddOrGetFileInRootGroup( - configuration['xcode_config_file']) - xcbc.SetBaseConfiguration(config_ref) - - build_files = [] - for build_file, build_file_dict in data.iteritems(): - if build_file.endswith('.gyp'): - build_files.append(build_file) - - for build_file in build_files: - xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) - - for build_file in build_files: - xcode_projects[build_file].Finalize2(xcode_targets, - xcode_target_to_target_dict) - - for build_file in build_files: - xcode_projects[build_file].Write() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/input.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/input.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/input.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/input.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,2679 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from compiler.ast import Const -from compiler.ast import Dict -from compiler.ast import Discard -from compiler.ast import List -from compiler.ast import Module -from compiler.ast import Node -from compiler.ast import Stmt -import compiler -import copy -import gyp.common -import multiprocessing -import optparse -import os.path -import re -import shlex -import signal -import subprocess -import sys -import threading -import time -from gyp.common import GypError - - -# A list of types that are treated as linkable. -linkable_types = ['executable', 'shared_library', 'loadable_module'] - -# A list of sections that contain links to other targets. -dependency_sections = ['dependencies', 'export_dependent_settings'] - -# base_path_sections is a list of sections defined by GYP that contain -# pathnames. The generators can provide more keys, the two lists are merged -# into path_sections, but you should call IsPathSection instead of using either -# list directly. -base_path_sections = [ - 'destination', - 'files', - 'include_dirs', - 'inputs', - 'libraries', - 'outputs', - 'sources', -] -path_sections = [] - -is_path_section_charset = set('=+?!') -is_path_section_match_re = re.compile('_(dir|file|path)s?$') - -def IsPathSection(section): - # If section ends in one of these characters, it's applied to a section - # without the trailing characters. '/' is notably absent from this list, - # because there's no way for a regular expression to be treated as a path. - while section[-1:] in is_path_section_charset: - section = section[:-1] - return section in path_sections or is_path_section_match_re.search(section) - -# base_non_configuraiton_keys is a list of key names that belong in the target -# itself and should not be propagated into its configurations. It is merged -# with a list that can come from the generator to -# create non_configuration_keys. -base_non_configuration_keys = [ - # Sections that must exist inside targets and not configurations. - 'actions', - 'configurations', - 'copies', - 'default_configuration', - 'dependencies', - 'dependencies_original', - 'link_languages', - 'libraries', - 'postbuilds', - 'product_dir', - 'product_extension', - 'product_name', - 'product_prefix', - 'rules', - 'run_as', - 'sources', - 'standalone_static_library', - 'suppress_wildcard', - 'target_name', - 'toolset', - 'toolsets', - 'type', - 'variants', - - # Sections that can be found inside targets or configurations, but that - # should not be propagated from targets into their configurations. - 'variables', -] -non_configuration_keys = [] - -# Keys that do not belong inside a configuration dictionary. -invalid_configuration_keys = [ - 'actions', - 'all_dependent_settings', - 'configurations', - 'dependencies', - 'direct_dependent_settings', - 'libraries', - 'link_settings', - 'sources', - 'standalone_static_library', - 'target_name', - 'type', -] - -# Controls how the generator want the build file paths. -absolute_build_file_paths = False - -# Controls whether or not the generator supports multiple toolsets. -multiple_toolsets = False - - -def GetIncludedBuildFiles(build_file_path, aux_data, included=None): - """Return a list of all build files included into build_file_path. - - The returned list will contain build_file_path as well as all other files - that it included, either directly or indirectly. Note that the list may - contain files that were included into a conditional section that evaluated - to false and was not merged into build_file_path's dict. - - aux_data is a dict containing a key for each build file or included build - file. Those keys provide access to dicts whose "included" keys contain - lists of all other files included by the build file. - - included should be left at its default None value by external callers. It - is used for recursion. - - The returned list will not contain any duplicate entries. Each build file - in the list will be relative to the current directory. - """ - - if included == None: - included = [] - - if build_file_path in included: - return included - - included.append(build_file_path) - - for included_build_file in aux_data[build_file_path].get('included', []): - GetIncludedBuildFiles(included_build_file, aux_data, included) - - return included - - -def CheckedEval(file_contents): - """Return the eval of a gyp file. - - The gyp file is restricted to dictionaries and lists only, and - repeated keys are not allowed. - - Note that this is slower than eval() is. - """ - - ast = compiler.parse(file_contents) - assert isinstance(ast, Module) - c1 = ast.getChildren() - assert c1[0] is None - assert isinstance(c1[1], Stmt) - c2 = c1[1].getChildren() - assert isinstance(c2[0], Discard) - c3 = c2[0].getChildren() - assert len(c3) == 1 - return CheckNode(c3[0], []) - - -def CheckNode(node, keypath): - if isinstance(node, Dict): - c = node.getChildren() - dict = {} - for n in range(0, len(c), 2): - assert isinstance(c[n], Const) - key = c[n].getChildren()[0] - if key in dict: - raise GypError("Key '" + key + "' repeated at level " + - repr(len(keypath) + 1) + " with key path '" + - '.'.join(keypath) + "'") - kp = list(keypath) # Make a copy of the list for descending this node. - kp.append(key) - dict[key] = CheckNode(c[n + 1], kp) - return dict - elif isinstance(node, List): - c = node.getChildren() - children = [] - for index, child in enumerate(c): - kp = list(keypath) # Copy list. - kp.append(repr(index)) - children.append(CheckNode(child, kp)) - return children - elif isinstance(node, Const): - return node.getChildren()[0] - else: - raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \ - "': " + repr(node) - - -def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, - is_target, check): - if build_file_path in data: - return data[build_file_path] - - if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() - else: - raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) - - build_file_data = None - try: - if check: - build_file_data = CheckedEval(build_file_contents) - else: - build_file_data = eval(build_file_contents, {'__builtins__': None}, - None) - except SyntaxError, e: - e.filename = build_file_path - raise - except Exception, e: - gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) - raise - - data[build_file_path] = build_file_data - aux_data[build_file_path] = {} - - # Scan for includes and merge them in. - try: - if is_target: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, includes, check) - else: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, None, check) - except Exception, e: - gyp.common.ExceptionAppend(e, - 'while reading includes of ' + build_file_path) - raise - - return build_file_data - - -def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, - variables, includes, check): - includes_list = [] - if includes != None: - includes_list.extend(includes) - if 'includes' in subdict: - for include in subdict['includes']: - # "include" is specified relative to subdict_path, so compute the real - # path to include by appending the provided "include" to the directory - # in which subdict_path resides. - relative_include = \ - os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) - includes_list.append(relative_include) - # Unhook the includes list, it's no longer needed. - del subdict['includes'] - - # Merge in the included files. - for include in includes_list: - if not 'included' in aux_data[subdict_path]: - aux_data[subdict_path]['included'] = [] - aux_data[subdict_path]['included'].append(include) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) - - MergeDicts(subdict, - LoadOneBuildFile(include, data, aux_data, variables, None, - False, check), - subdict_path, include) - - # Recurse into subdictionaries. - for k, v in subdict.iteritems(): - if v.__class__ == dict: - LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables, - None, check) - elif v.__class__ == list: - LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables, - check) - - -# This recurses into lists so that it can look for dicts. -def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, - variables, check): - for item in sublist: - if item.__class__ == dict: - LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, - variables, None, check) - elif item.__class__ == list: - LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, - variables, check) - -# Processes toolsets in all the targets. This recurses into condition entries -# since they can contain toolsets as well. -def ProcessToolsetsInDict(data): - if 'targets' in data: - target_list = data['targets'] - new_target_list = [] - for target in target_list: - # If this target already has an explicit 'toolset', and no 'toolsets' - # list, don't modify it further. - if 'toolset' in target and 'toolsets' not in target: - new_target_list.append(target) - continue - if multiple_toolsets: - toolsets = target.get('toolsets', ['target']) - else: - toolsets = ['target'] - # Make sure this 'toolsets' definition is only processed once. - if 'toolsets' in target: - del target['toolsets'] - if len(toolsets) > 0: - # Optimization: only do copies if more than one toolset is specified. - for build in toolsets[1:]: - new_target = copy.deepcopy(target) - new_target['toolset'] = build - new_target_list.append(new_target) - target['toolset'] = toolsets[0] - new_target_list.append(target) - data['targets'] = new_target_list - if 'conditions' in data: - for condition in data['conditions']: - if isinstance(condition, list): - for condition_dict in condition[1:]: - ProcessToolsetsInDict(condition_dict) - - -# TODO(mark): I don't love this name. It just means that it's going to load -# a build file that contains targets and is expected to provide a targets dict -# that contains the targets... -def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, - depth, check, load_dependencies): - # If depth is set, predefine the DEPTH variable to be a relative path from - # this build file's directory to the directory identified by depth. - if depth: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) - if d == '': - variables['DEPTH'] = '.' - else: - variables['DEPTH'] = d.replace('\\', '/') - - # If the generator needs absolue paths, then do so. - if absolute_build_file_paths: - build_file_path = os.path.abspath(build_file_path) - - if build_file_path in data['target_build_files']: - # Already loaded. - return False - data['target_build_files'].add(build_file_path) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, - "Loading Target Build File '%s'", build_file_path) - - build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, - includes, True, check) - - # Store DEPTH for later use in generators. - build_file_data['_DEPTH'] = depth - - # Set up the included_files key indicating which .gyp files contributed to - # this target dict. - if 'included_files' in build_file_data: - raise GypError(build_file_path + ' must not contain included_files key') - - included = GetIncludedBuildFiles(build_file_path, aux_data) - build_file_data['included_files'] = [] - for included_file in included: - # included_file is relative to the current directory, but it needs to - # be made relative to build_file_path's directory. - included_relative = \ - gyp.common.RelativePath(included_file, - os.path.dirname(build_file_path)) - build_file_data['included_files'].append(included_relative) - - # Do a first round of toolsets expansion so that conditions can be defined - # per toolset. - ProcessToolsetsInDict(build_file_data) - - # Apply "pre"/"early" variable expansions and condition evaluations. - ProcessVariablesAndConditionsInDict( - build_file_data, PHASE_EARLY, variables, build_file_path) - - # Since some toolsets might have been defined conditionally, perform - # a second round of toolsets expansion now. - ProcessToolsetsInDict(build_file_data) - - # Look at each project's target_defaults dict, and merge settings into - # targets. - if 'target_defaults' in build_file_data: - if 'targets' not in build_file_data: - raise GypError("Unable to find targets in build file %s" % - build_file_path) - - index = 0 - while index < len(build_file_data['targets']): - # This procedure needs to give the impression that target_defaults is - # used as defaults, and the individual targets inherit from that. - # The individual targets need to be merged into the defaults. Make - # a deep copy of the defaults for each target, merge the target dict - # as found in the input file into that copy, and then hook up the - # copy with the target-specific data merged into it as the replacement - # target dict. - old_target_dict = build_file_data['targets'][index] - new_target_dict = copy.deepcopy(build_file_data['target_defaults']) - MergeDicts(new_target_dict, old_target_dict, - build_file_path, build_file_path) - build_file_data['targets'][index] = new_target_dict - index += 1 - - # No longer needed. - del build_file_data['target_defaults'] - - # Look for dependencies. This means that dependency resolution occurs - # after "pre" conditionals and variable expansion, but before "post" - - # in other words, you can't put a "dependencies" section inside a "post" - # conditional within a target. - - dependencies = [] - if 'targets' in build_file_data: - for target_dict in build_file_data['targets']: - if 'dependencies' not in target_dict: - continue - for dependency in target_dict['dependencies']: - dependencies.append( - gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) - - if load_dependencies: - for dependency in dependencies: - try: - LoadTargetBuildFile(dependency, data, aux_data, variables, - includes, depth, check, load_dependencies) - except Exception, e: - gyp.common.ExceptionAppend( - e, 'while loading dependencies of %s' % build_file_path) - raise - else: - return (build_file_path, dependencies) - - -def CallLoadTargetBuildFile(global_flags, - build_file_path, data, - aux_data, variables, - includes, depth, check): - """Wrapper around LoadTargetBuildFile for parallel processing. - - This wrapper is used when LoadTargetBuildFile is executed in - a worker process. - """ - - try: - signal.signal(signal.SIGINT, signal.SIG_IGN) - - # Apply globals so that the worker process behaves the same. - for key, value in global_flags.iteritems(): - globals()[key] = value - - # Save the keys so we can return data that changed. - data_keys = set(data) - aux_data_keys = set(aux_data) - - result = LoadTargetBuildFile(build_file_path, data, - aux_data, variables, - includes, depth, check, False) - if not result: - return result - - (build_file_path, dependencies) = result - - data_out = {} - for key in data: - if key == 'target_build_files': - continue - if key not in data_keys: - data_out[key] = data[key] - aux_data_out = {} - for key in aux_data: - if key not in aux_data_keys: - aux_data_out[key] = aux_data[key] - - # This gets serialized and sent back to the main process via a pipe. - # It's handled in LoadTargetBuildFileCallback. - return (build_file_path, - data_out, - aux_data_out, - dependencies) - except Exception, e: - print >>sys.stderr, 'Exception: ', e - return None - - -class ParallelProcessingError(Exception): - pass - - -class ParallelState(object): - """Class to keep track of state when processing input files in parallel. - - If build files are loaded in parallel, use this to keep track of - state during farming out and processing parallel jobs. It's stored - in a global so that the callback function can have access to it. - """ - - def __init__(self): - # The multiprocessing pool. - self.pool = None - # The condition variable used to protect this object and notify - # the main loop when there might be more data to process. - self.condition = None - # The "data" dict that was passed to LoadTargetBuildFileParallel - self.data = None - # The "aux_data" dict that was passed to LoadTargetBuildFileParallel - self.aux_data = None - # The number of parallel calls outstanding; decremented when a response - # was received. - self.pending = 0 - # The set of all build files that have been scheduled, so we don't - # schedule the same one twice. - self.scheduled = set() - # A list of dependency build file paths that haven't been scheduled yet. - self.dependencies = [] - # Flag to indicate if there was an error in a child process. - self.error = False - - def LoadTargetBuildFileCallback(self, result): - """Handle the results of running LoadTargetBuildFile in another process. - """ - self.condition.acquire() - if not result: - self.error = True - self.condition.notify() - self.condition.release() - return - (build_file_path0, data0, aux_data0, dependencies0) = result - self.data['target_build_files'].add(build_file_path0) - for key in data0: - self.data[key] = data0[key] - for key in aux_data0: - self.aux_data[key] = aux_data0[key] - for new_dependency in dependencies0: - if new_dependency not in self.scheduled: - self.scheduled.add(new_dependency) - self.dependencies.append(new_dependency) - self.pending -= 1 - self.condition.notify() - self.condition.release() - - -def LoadTargetBuildFileParallel(build_file_path, data, aux_data, - variables, includes, depth, check): - parallel_state = ParallelState() - parallel_state.condition = threading.Condition() - parallel_state.dependencies = [build_file_path] - parallel_state.scheduled = set([build_file_path]) - parallel_state.pending = 0 - parallel_state.data = data - parallel_state.aux_data = aux_data - - try: - parallel_state.condition.acquire() - while parallel_state.dependencies or parallel_state.pending: - if parallel_state.error: - print >>sys.stderr, ( - '\n' - 'Note: an error occurred while running gyp using multiprocessing.\n' - 'For more verbose output, set GYP_PARALLEL=0 in your environment.\n' - 'If the error only occurs when GYP_PARALLEL=1, ' - 'please report a bug!') - break - if not parallel_state.dependencies: - parallel_state.condition.wait() - continue - - dependency = parallel_state.dependencies.pop() - - parallel_state.pending += 1 - data_in = {} - data_in['target_build_files'] = data['target_build_files'] - aux_data_in = {} - global_flags = { - 'path_sections': globals()['path_sections'], - 'non_configuration_keys': globals()['non_configuration_keys'], - 'absolute_build_file_paths': globals()['absolute_build_file_paths'], - 'multiple_toolsets': globals()['multiple_toolsets']} - - if not parallel_state.pool: - parallel_state.pool = multiprocessing.Pool(8) - parallel_state.pool.apply_async( - CallLoadTargetBuildFile, - args = (global_flags, dependency, - data_in, aux_data_in, - variables, includes, depth, check), - callback = parallel_state.LoadTargetBuildFileCallback) - except KeyboardInterrupt, e: - parallel_state.pool.terminate() - raise e - - parallel_state.condition.release() - if parallel_state.error: - sys.exit() - - -# Look for the bracket that matches the first bracket seen in a -# string, and return the start and end as a tuple. For example, if -# the input is something like "<(foo <(bar)) blah", then it would -# return (1, 13), indicating the entire string except for the leading -# "<" and trailing " blah". -LBRACKETS= set('{[(') -BRACKETS = {'}': '{', ']': '[', ')': '('} -def FindEnclosingBracketGroup(input_str): - stack = [] - start = -1 - for index, char in enumerate(input_str): - if char in LBRACKETS: - stack.append(char) - if start == -1: - start = index - elif char in BRACKETS: - if not stack: - return (-1, -1) - if stack.pop() != BRACKETS[char]: - return (-1, -1) - if not stack: - return (start, index + 1) - return (-1, -1) - - -canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$') - - -def IsStrCanonicalInt(string): - """Returns True if |string| is in its canonical integer form. - - The canonical form is such that str(int(string)) == string. - """ - return isinstance(string, str) and canonical_int_re.match(string) - - -# This matches things like "<(asdf)", "(?P<(?:(?:!?@?)|\|)?)' - '(?P[-a-zA-Z0-9_.]+)?' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# This matches the same as early_variable_re, but with '>' instead of '<'. -late_variable_re = re.compile( - '(?P(?P>(?:(?:!?@?)|\|)?)' - '(?P[-a-zA-Z0-9_.]+)?' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# This matches the same as early_variable_re, but with '^' instead of '<'. -latelate_variable_re = re.compile( - '(?P(?P[\^](?:(?:!?@?)|\|)?)' - '(?P[-a-zA-Z0-9_.]+)?' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# Global cache of results from running commands so they don't have to be run -# more then once. -cached_command_results = {} - - -def FixupPlatformCommand(cmd): - if sys.platform == 'win32': - if type(cmd) == list: - cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:] - else: - cmd = re.sub('^cat ', 'type ', cmd) - return cmd - - -PHASE_EARLY = 0 -PHASE_LATE = 1 -PHASE_LATELATE = 2 - - -def ExpandVariables(input, phase, variables, build_file): - # Look for the pattern that gets expanded into variables - if phase == PHASE_EARLY: - variable_re = early_variable_re - expansion_symbol = '<' - elif phase == PHASE_LATE: - variable_re = late_variable_re - expansion_symbol = '>' - elif phase == PHASE_LATELATE: - variable_re = latelate_variable_re - expansion_symbol = '^' - else: - assert False - - input_str = str(input) - if IsStrCanonicalInt(input_str): - return int(input_str) - - # Do a quick scan to determine if an expensive regex search is warranted. - if expansion_symbol not in input_str: - return input_str - - # Get the entire list of matches as a list of MatchObject instances. - # (using findall here would return strings instead of MatchObjects). - matches = list(variable_re.finditer(input_str)) - if not matches: - return input_str - - output = input_str - # Reverse the list of matches so that replacements are done right-to-left. - # That ensures that earlier replacements won't mess up the string in a - # way that causes later calls to find the earlier substituted text instead - # of what's intended for replacement. - matches.reverse() - for match_group in matches: - match = match_group.groupdict() - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match) - # match['replace'] is the substring to look for, match['type'] - # is the character code for the replacement type (< > ! <| >| <@ - # >@ !@), match['is_array'] contains a '[' for command - # arrays, and match['content'] is the name of the variable (< >) - # or command to run (!). match['command_string'] is an optional - # command string. Currently, only 'pymod_do_main' is supported. - - # run_command is true if a ! variant is used. - run_command = '!' in match['type'] - command_string = match['command_string'] - - # file_list is true if a | variant is used. - file_list = '|' in match['type'] - - # Capture these now so we can adjust them later. - replace_start = match_group.start('replace') - replace_end = match_group.end('replace') - - # Find the ending paren, and re-evaluate the contained string. - (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) - - # Adjust the replacement range to match the entire command - # found by FindEnclosingBracketGroup (since the variable_re - # probably doesn't match the entire command if it contained - # nested variables). - replace_end = replace_start + c_end - - # Find the "real" replacement, matching the appropriate closing - # paren, and adjust the replacement start and end. - replacement = input_str[replace_start:replace_end] - - # Figure out what the contents of the variable parens are. - contents_start = replace_start + c_start + 1 - contents_end = replace_end - 1 - contents = input_str[contents_start:contents_end] - - # Do filter substitution now for <|(). - # Admittedly, this is different than the evaluation order in other - # contexts. However, since filtration has no chance to run on <|(), - # this seems like the only obvious way to give them access to filters. - if file_list: - processed_variables = copy.deepcopy(variables) - ProcessListFiltersInDict(contents, processed_variables) - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, - processed_variables, build_file) - else: - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, variables, build_file) - - # Strip off leading/trailing whitespace so that variable matches are - # simpler below (and because they are rarely needed). - contents = contents.strip() - - # expand_to_list is true if an @ variant is used. In that case, - # the expansion should result in a list. Note that the caller - # is to be expecting a list in return, and not all callers do - # because not all are working in list context. Also, for list - # expansions, there can be no other text besides the variable - # expansion in the input string. - expand_to_list = '@' in match['type'] and input_str == replacement - - if run_command or file_list: - # Find the build file's directory, so commands can be run or file lists - # generated relative to it. - build_file_dir = os.path.dirname(build_file) - if build_file_dir == '': - # If build_file is just a leaf filename indicating a file in the - # current directory, build_file_dir might be an empty string. Set - # it to None to signal to subprocess.Popen that it should run the - # command in the current directory. - build_file_dir = None - - # Support <|(listfile.txt ...) which generates a file - # containing items from a gyp list, generated at gyp time. - # This works around actions/rules which have more inputs than will - # fit on the command line. - if file_list: - if type(contents) == list: - contents_list = contents - else: - contents_list = contents.split(' ') - replacement = contents_list[0] - path = replacement - if not os.path.isabs(path): - path = os.path.join(build_file_dir, path) - f = gyp.common.WriteOnDiff(path) - for i in contents_list[1:]: - f.write('%s\n' % i) - f.close() - - elif run_command: - use_shell = True - if match['is_array']: - contents = eval(contents) - use_shell = False - - # Check for a cached value to avoid executing commands, or generating - # file lists more than once. - # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is - # possible that the command being invoked depends on the current - # directory. For that case the syntax needs to be extended so that the - # directory is also used in cache_key (it becomes a tuple). - # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, - # someone could author a set of GYP files where each time the command - # is invoked it produces different output by design. When the need - # arises, the syntax should be extended to support no caching off a - # command's output so it is run every time. - cache_key = str(contents) - cached_value = cached_command_results.get(cache_key, None) - if cached_value is None: - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'", - contents, build_file_dir) - - replacement = '' - - if command_string == 'pymod_do_main': - # (sources/) etc. to resolve to - # and empty list if undefined. This allows actions to: - # 'action!': [ - # '>@(_sources!)', - # ], - # 'action/': [ - # '>@(_sources/)', - # ], - replacement = [] - else: - raise GypError('Undefined variable ' + contents + - ' in ' + build_file) - else: - replacement = variables[contents] - - if isinstance(replacement, list): - for item in replacement: - if (not contents[-1] == '/' and - not isinstance(item, str) and not isinstance(item, int)): - raise GypError('Variable ' + contents + - ' must expand to a string or list of strings; ' + - 'list contains a ' + - item.__class__.__name__) - # Run through the list and handle variable expansions in it. Since - # the list is guaranteed not to contain dicts, this won't do anything - # with conditions sections. - ProcessVariablesAndConditionsInList(replacement, phase, variables, - build_file) - elif not isinstance(replacement, str) and \ - not isinstance(replacement, int): - raise GypError('Variable ' + contents + - ' must expand to a string or list of strings; ' + - 'found a ' + replacement.__class__.__name__) - - if expand_to_list: - # Expanding in list context. It's guaranteed that there's only one - # replacement to do in |input_str| and that it's this replacement. See - # above. - if isinstance(replacement, list): - # If it's already a list, make a copy. - output = replacement[:] - else: - # Split it the same way sh would split arguments. - output = shlex.split(str(replacement)) - else: - # Expanding in string context. - encoded_replacement = '' - if isinstance(replacement, list): - # When expanding a list into string context, turn the list items - # into a string in a way that will work with a subprocess call. - # - # TODO(mark): This isn't completely correct. This should - # call a generator-provided function that observes the - # proper list-to-argument quoting rules on a specific - # platform instead of just calling the POSIX encoding - # routine. - encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) - else: - encoded_replacement = replacement - - output = output[:replace_start] + str(encoded_replacement) + \ - output[replace_end:] - # Prepare for the next match iteration. - input_str = output - - # Look for more matches now that we've replaced some, to deal with - # expanding local variables (variables defined in the same - # variables block as this one). - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output) - if isinstance(output, list): - if output and isinstance(output[0], list): - # Leave output alone if it's a list of lists. - # We don't want such lists to be stringified. - pass - else: - new_output = [] - for item in output: - new_output.append( - ExpandVariables(item, phase, variables, build_file)) - output = new_output - else: - output = ExpandVariables(output, phase, variables, build_file) - - # Convert all strings that are canonically-represented integers into integers. - if isinstance(output, list): - for index in xrange(0, len(output)): - if IsStrCanonicalInt(output[index]): - output[index] = int(output[index]) - elif IsStrCanonicalInt(output): - output = int(output) - - return output - - -def ProcessConditionsInDict(the_dict, phase, variables, build_file): - # Process a 'conditions' or 'target_conditions' section in the_dict, - # depending on phase. - # early -> conditions - # late -> target_conditions - # latelate -> no conditions - # - # Each item in a conditions list consists of cond_expr, a string expression - # evaluated as the condition, and true_dict, a dict that will be merged into - # the_dict if cond_expr evaluates to true. Optionally, a third item, - # false_dict, may be present. false_dict is merged into the_dict if - # cond_expr evaluates to false. - # - # Any dict merged into the_dict will be recursively processed for nested - # conditionals and other expansions, also according to phase, immediately - # prior to being merged. - - if phase == PHASE_EARLY: - conditions_key = 'conditions' - elif phase == PHASE_LATE: - conditions_key = 'target_conditions' - elif phase == PHASE_LATELATE: - return - else: - assert False - - if not conditions_key in the_dict: - return - - conditions_list = the_dict[conditions_key] - # Unhook the conditions list, it's no longer needed. - del the_dict[conditions_key] - - for condition in conditions_list: - if not isinstance(condition, list): - raise GypError(conditions_key + ' must be a list') - if len(condition) != 2 and len(condition) != 3: - # It's possible that condition[0] won't work in which case this - # attempt will raise its own IndexError. That's probably fine. - raise GypError(conditions_key + ' ' + condition[0] + - ' must be length 2 or 3, not ' + str(len(condition))) - - [cond_expr, true_dict] = condition[0:2] - false_dict = None - if len(condition) == 3: - false_dict = condition[2] - - # Do expansions on the condition itself. Since the conditon can naturally - # contain variable references without needing to resort to GYP expansion - # syntax, this is of dubious value for variables, but someone might want to - # use a command expansion directly inside a condition. - cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, - build_file) - if not isinstance(cond_expr_expanded, str) and \ - not isinstance(cond_expr_expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ - - try: - ast_code = compile(cond_expr_expanded, '', 'eval') - - if eval(ast_code, {'__builtins__': None}, variables): - merge_dict = true_dict - else: - merge_dict = false_dict - except SyntaxError, e: - syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' - 'at character %d.' % - (str(e.args[0]), e.text, build_file, e.offset), - e.filename, e.lineno, e.offset, e.text) - raise syntax_error - except NameError, e: - gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % - (cond_expr_expanded, build_file)) - raise GypError(e) - - if merge_dict != None: - # Expand variables and nested conditinals in the merge_dict before - # merging it. - ProcessVariablesAndConditionsInDict(merge_dict, phase, - variables, build_file) - - MergeDicts(the_dict, merge_dict, build_file, build_file) - - -def LoadAutomaticVariablesFromDict(variables, the_dict): - # Any keys with plain string values in the_dict become automatic variables. - # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): - if isinstance(value, str) or isinstance(value, int) or \ - isinstance(value, list): - variables['_' + key] = value - - -def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): - # Any keys in the_dict's "variables" dict, if it has one, becomes a - # variable. The variable name is the key name in the "variables" dict. - # Variables that end with the % character are set only if they are unset in - # the variables dict. the_dict_key is the name of the key that accesses - # the_dict in the_dict's parent dict. If the_dict's parent is not a dict - # (it could be a list or it could be parentless because it is a root dict), - # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): - if not isinstance(value, str) and not isinstance(value, int) and \ - not isinstance(value, list): - continue - - if key.endswith('%'): - variable_name = key[:-1] - if variable_name in variables: - # If the variable is already set, don't set it. - continue - if the_dict_key is 'variables' and variable_name in the_dict: - # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a varaibles sub-dict of a - # variables dict), use the_dict's definition. - value = the_dict[variable_name] - else: - variable_name = key - - variables[variable_name] = value - - -def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, - build_file, the_dict_key=None): - """Handle all variable and command expansion and conditional evaluation. - - This function is the public entry point for all variable expansions and - conditional evaluations. The variables_in dictionary will not be modified - by this function. - """ - - # Make a copy of the variables_in dict that can be modified during the - # loading of automatics and the loading of the variables dict. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - - if 'variables' in the_dict: - # Make sure all the local variables are added to the variables - # list before we process them so that you can reference one - # variable from another. They will be fully expanded by recursion - # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): - variables[key] = value - - # Handle the associated variables dict first, so that any variable - # references within can be resolved prior to using them as variables. - # Pass a copy of the variables dict to avoid having it be tainted. - # Otherwise, it would have extra automatics added for everything that - # should just be an ordinary variable in this scope. - ProcessVariablesAndConditionsInDict(the_dict['variables'], phase, - variables, build_file, 'variables') - - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - for key, value in the_dict.iteritems(): - # Skip "variables", which was already processed if present. - if key != 'variables' and isinstance(value, str): - expanded = ExpandVariables(value, phase, variables, build_file) - if not isinstance(expanded, str) and not isinstance(expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ + ' for ' + key - the_dict[key] = expanded - - # Variable expansion may have resulted in changes to automatics. Reload. - # TODO(mark): Optimization: only reload if no changes were made. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Process conditions in this dict. This is done after variable expansion - # so that conditions may take advantage of expanded variables. For example, - # if the_dict contains: - # {'type': '<(library_type)', - # 'conditions': [['_type=="static_library"', { ... }]]}, - # _type, as used in the condition, will only be set to the value of - # library_type if variable expansion is performed before condition - # processing. However, condition processing should occur prior to recursion - # so that variables (both automatic and "variables" dict type) may be - # adjusted by conditions sections, merged into the_dict, and have the - # intended impact on contained dicts. - # - # This arrangement means that a "conditions" section containing a "variables" - # section will only have those variables effective in subdicts, not in - # the_dict. The workaround is to put a "conditions" section within a - # "variables" section. For example: - # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will not result in "IS_MAC" being appended to the "defines" list in the - # current scope but would result in it being appended to the "defines" list - # within "my_subdict". By comparison: - # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will append "IS_MAC" to both "defines" lists. - - # Evaluate conditions sections, allowing variable expansions within them - # as well as nested conditionals. This will process a 'conditions' or - # 'target_conditions' section, perform appropriate merging and recursive - # conditional and variable processing, and then remove the conditions section - # from the_dict if it is present. - ProcessConditionsInDict(the_dict, phase, variables, build_file) - - # Conditional processing may have resulted in changes to automatics or the - # variables dict. Reload. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Recurse into child dicts, or process child lists which may result in - # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): - # Skip "variables" and string values, which were already processed if - # present. - if key == 'variables' or isinstance(value, str): - continue - if isinstance(value, dict): - # Pass a copy of the variables dict so that subdicts can't influence - # parents. - ProcessVariablesAndConditionsInDict(value, phase, variables, - build_file, key) - elif isinstance(value, list): - # The list itself can't influence the variables dict, and - # ProcessVariablesAndConditionsInList will make copies of the variables - # dict if it needs to pass it to something that can influence it. No - # copy is necessary here. - ProcessVariablesAndConditionsInList(value, phase, variables, - build_file) - elif not isinstance(value, int): - raise TypeError, 'Unknown type ' + value.__class__.__name__ + \ - ' for ' + key - - -def ProcessVariablesAndConditionsInList(the_list, phase, variables, - build_file): - # Iterate using an index so that new values can be assigned into the_list. - index = 0 - while index < len(the_list): - item = the_list[index] - if isinstance(item, dict): - # Make a copy of the variables dict so that it won't influence anything - # outside of its own scope. - ProcessVariablesAndConditionsInDict(item, phase, variables, build_file) - elif isinstance(item, list): - ProcessVariablesAndConditionsInList(item, phase, variables, build_file) - elif isinstance(item, str): - expanded = ExpandVariables(item, phase, variables, build_file) - if isinstance(expanded, str) or isinstance(expanded, int): - the_list[index] = expanded - elif isinstance(expanded, list): - the_list[index:index+1] = expanded - index += len(expanded) - - # index now identifies the next item to examine. Continue right now - # without falling into the index increment below. - continue - else: - raise ValueError, \ - 'Variable expansion in this context permits strings and ' + \ - 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \ - index - elif not isinstance(item, int): - raise TypeError, 'Unknown type ' + item.__class__.__name__ + \ - ' at index ' + index - index = index + 1 - - -def BuildTargetsDict(data): - """Builds a dict mapping fully-qualified target names to their target dicts. - - |data| is a dict mapping loaded build files by pathname relative to the - current directory. Values in |data| are build file contents. For each - |data| value with a "targets" key, the value of the "targets" key is taken - as a list containing target dicts. Each target's fully-qualified name is - constructed from the pathname of the build file (|data| key) and its - "target_name" property. These fully-qualified names are used as the keys - in the returned dict. These keys provide access to the target dicts, - the dicts in the "targets" lists. - """ - - targets = {} - for build_file in data['target_build_files']: - for target in data[build_file].get('targets', []): - target_name = gyp.common.QualifiedTarget(build_file, - target['target_name'], - target['toolset']) - if target_name in targets: - raise GypError('Duplicate target definitions for ' + target_name) - targets[target_name] = target - - return targets - - -def QualifyDependencies(targets): - """Make dependency links fully-qualified relative to the current directory. - - |targets| is a dict mapping fully-qualified target names to their target - dicts. For each target in this dict, keys known to contain dependency - links are examined, and any dependencies referenced will be rewritten - so that they are fully-qualified and relative to the current directory. - All rewritten dependencies are suitable for use as keys to |targets| or a - similar dict. - """ - - all_dependency_sections = [dep + op - for dep in dependency_sections - for op in ('', '!', '/')] - - for target, target_dict in targets.iteritems(): - target_build_file = gyp.common.BuildFile(target) - toolset = target_dict['toolset'] - for dependency_key in all_dependency_sections: - dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): - dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dependencies[index], toolset) - if not multiple_toolsets: - # Ignore toolset specification in the dependency if it is specified. - dep_toolset = toolset - dependency = gyp.common.QualifiedTarget(dep_file, - dep_target, - dep_toolset) - dependencies[index] = dependency - - # Make sure anything appearing in a list other than "dependencies" also - # appears in the "dependencies" list. - if dependency_key != 'dependencies' and \ - dependency not in target_dict['dependencies']: - raise GypError('Found ' + dependency + ' in ' + dependency_key + - ' of ' + target + ', but not in dependencies') - - -def ExpandWildcardDependencies(targets, data): - """Expands dependencies specified as build_file:*. - - For each target in |targets|, examines sections containing links to other - targets. If any such section contains a link of the form build_file:*, it - is taken as a wildcard link, and is expanded to list each target in - build_file. The |data| dict provides access to build file dicts. - - Any target that does not wish to be included by wildcard can provide an - optional "suppress_wildcard" key in its target dict. When present and - true, a wildcard dependency link will not include such targets. - - All dependency names, including the keys to |targets| and the values in each - dependency list, must be qualified when this function is called. - """ - - for target, target_dict in targets.iteritems(): - toolset = target_dict['toolset'] - target_build_file = gyp.common.BuildFile(target) - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - - # Loop this way instead of "for dependency in" or "for index in xrange" - # because the dependencies list will be modified within the loop body. - index = 0 - while index < len(dependencies): - (dependency_build_file, dependency_target, dependency_toolset) = \ - gyp.common.ParseQualifiedTarget(dependencies[index]) - if dependency_target != '*' and dependency_toolset != '*': - # Not a wildcard. Keep it moving. - index = index + 1 - continue - - if dependency_build_file == target_build_file: - # It's an error for a target to depend on all other targets in - # the same file, because a target cannot depend on itself. - raise GypError('Found wildcard in ' + dependency_key + ' of ' + - target + ' referring to same build file') - - # Take the wildcard out and adjust the index so that the next - # dependency in the list will be processed the next time through the - # loop. - del dependencies[index] - index = index - 1 - - # Loop through the targets in the other build file, adding them to - # this target's list of dependencies in place of the removed - # wildcard. - dependency_target_dicts = data[dependency_build_file]['targets'] - for dependency_target_dict in dependency_target_dicts: - if int(dependency_target_dict.get('suppress_wildcard', False)): - continue - dependency_target_name = dependency_target_dict['target_name'] - if (dependency_target != '*' and - dependency_target != dependency_target_name): - continue - dependency_target_toolset = dependency_target_dict['toolset'] - if (dependency_toolset != '*' and - dependency_toolset != dependency_target_toolset): - continue - dependency = gyp.common.QualifiedTarget(dependency_build_file, - dependency_target_name, - dependency_target_toolset) - index = index + 1 - dependencies.insert(index, dependency) - - index = index + 1 - - -def Unify(l): - """Removes duplicate elements from l, keeping the first element.""" - seen = {} - return [seen.setdefault(e, e) for e in l if e not in seen] - - -def RemoveDuplicateDependencies(targets): - """Makes sure every dependency appears only once in all targets's dependency - lists.""" - for target_name, target_dict in targets.iteritems(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - target_dict[dependency_key] = Unify(dependencies) - - -def Filter(l, item): - """Removes item from l.""" - res = {} - return [res.setdefault(e, e) for e in l if e != item] - - -def RemoveSelfDependencies(targets): - """Remove self dependencies from targets that have the prune_self_dependency - variable set.""" - for target_name, target_dict in targets.iteritems(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - for t in dependencies: - if t == target_name: - if targets[t].get('variables', {}).get('prune_self_dependency', 0): - target_dict[dependency_key] = Filter(dependencies, target_name) - - -class DependencyGraphNode(object): - """ - - Attributes: - ref: A reference to an object that this DependencyGraphNode represents. - dependencies: List of DependencyGraphNodes on which this one depends. - dependents: List of DependencyGraphNodes that depend on this one. - """ - - class CircularException(GypError): - pass - - def __init__(self, ref): - self.ref = ref - self.dependencies = [] - self.dependents = [] - - def FlattenToList(self): - # flat_list is the sorted list of dependencies - actually, the list items - # are the "ref" attributes of DependencyGraphNodes. Every target will - # appear in flat_list after all of its dependencies, and before all of its - # dependents. - flat_list = [] - - # in_degree_zeros is the list of DependencyGraphNodes that have no - # dependencies not in flat_list. Initially, it is a copy of the children - # of this node, because when the graph was built, nodes with no - # dependencies were made implicit dependents of the root node. - in_degree_zeros = set(self.dependents[:]) - - while in_degree_zeros: - # Nodes in in_degree_zeros have no dependencies not in flat_list, so they - # can be appended to flat_list. Take these nodes out of in_degree_zeros - # as work progresses, so that the next node to process from the list can - # always be accessed at a consistent position. - node = in_degree_zeros.pop() - flat_list.append(node.ref) - - # Look at dependents of the node just added to flat_list. Some of them - # may now belong in in_degree_zeros. - for node_dependent in node.dependents: - is_in_degree_zero = True - for node_dependent_dependency in node_dependent.dependencies: - if not node_dependent_dependency.ref in flat_list: - # The dependent one or more dependencies not in flat_list. There - # will be more chances to add it to flat_list when examining - # it again as a dependent of those other dependencies, provided - # that there are no cycles. - is_in_degree_zero = False - break - - if is_in_degree_zero: - # All of the dependent's dependencies are already in flat_list. Add - # it to in_degree_zeros where it will be processed in a future - # iteration of the outer loop. - in_degree_zeros.add(node_dependent) - - return flat_list - - def DirectDependencies(self, dependencies=None): - """Returns a list of just direct dependencies.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - - return dependencies - - def _AddImportedDependencies(self, targets, dependencies=None): - """Given a list of direct dependencies, adds indirect dependencies that - other dependencies have declared to export their settings. - - This method does not operate on self. Rather, it operates on the list - of dependencies in the |dependencies| argument. For each dependency in - that list, if any declares that it exports the settings of one of its - own dependencies, those dependencies whose settings are "passed through" - are added to the list. As new items are added to the list, they too will - be processed, so it is possible to import settings through multiple levels - of dependencies. - - This method is not terribly useful on its own, it depends on being - "primed" with a list of direct dependencies such as one provided by - DirectDependencies. DirectAndImportedDependencies is intended to be the - public entry point. - """ - - if dependencies == None: - dependencies = [] - - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - # Add any dependencies whose settings should be imported to the list - # if not already present. Newly-added items will be checked for - # their own imports when the list iteration reaches them. - # Rather than simply appending new items, insert them after the - # dependency that exported them. This is done to more closely match - # the depth-first method used by DeepDependencies. - add_index = 1 - for imported_dependency in \ - dependency_dict.get('export_dependent_settings', []): - if imported_dependency not in dependencies: - dependencies.insert(index + add_index, imported_dependency) - add_index = add_index + 1 - index = index + 1 - - return dependencies - - def DirectAndImportedDependencies(self, targets, dependencies=None): - """Returns a list of a target's direct dependencies and all indirect - dependencies that a dependency has advertised settings should be exported - through the dependency for. - """ - - dependencies = self.DirectDependencies(dependencies) - return self._AddImportedDependencies(targets, dependencies) - - def DeepDependencies(self, dependencies=None): - """Returns a list of all of a target's dependencies, recursively.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - dependency.DeepDependencies(dependencies) - - return dependencies - - def LinkDependencies(self, targets, dependencies=None, initial=True): - """Returns a list of dependency targets that are linked into this target. - - This function has a split personality, depending on the setting of - |initial|. Outside callers should always leave |initial| at its default - setting. - - When adding a target to the list of dependencies, this function will - recurse into itself with |initial| set to False, to collect dependencies - that are linked into the linkable target for which the list is being built. - """ - if dependencies == None: - dependencies = [] - - # Check for None, corresponding to the root node. - if self.ref == None: - return dependencies - - # It's kind of sucky that |targets| has to be passed into this function, - # but that's presently the easiest way to access the target dicts so that - # this function can find target types. - - if 'target_name' not in targets[self.ref]: - raise GypError("Missing 'target_name' field in target.") - - if 'type' not in targets[self.ref]: - raise GypError("Missing 'type' field in target %s" % - targets[self.ref]['target_name']) - - target_type = targets[self.ref]['type'] - - is_linkable = target_type in linkable_types - - if initial and not is_linkable: - # If this is the first target being examined and it's not linkable, - # return an empty list of link dependencies, because the link - # dependencies are intended to apply to the target itself (initial is - # True) and this target won't be linked. - return dependencies - - # Don't traverse 'none' targets if explicitly excluded. - if (target_type == 'none' and - not targets[self.ref].get('dependencies_traverse', True)): - if self.ref not in dependencies: - dependencies.append(self.ref) - return dependencies - - # Executables and loadable modules are already fully and finally linked. - # Nothing else can be a link dependency of them, there can only be - # dependencies in the sense that a dependent target might run an - # executable or load the loadable_module. - if not initial and target_type in ('executable', 'loadable_module'): - return dependencies - - # The target is linkable, add it to the list of link dependencies. - if self.ref not in dependencies: - dependencies.append(self.ref) - if initial or not is_linkable: - # If this is a subsequent target and it's linkable, don't look any - # further for linkable dependencies, as they'll already be linked into - # this target linkable. Always look at dependencies of the initial - # target, and always look at dependencies of non-linkables. - for dependency in self.dependencies: - dependency.LinkDependencies(targets, dependencies, False) - - return dependencies - - -def BuildDependencyList(targets): - # Create a DependencyGraphNode for each target. Put it into a dict for easy - # access. - dependency_nodes = {} - for target, spec in targets.iteritems(): - if target not in dependency_nodes: - dependency_nodes[target] = DependencyGraphNode(target) - - # Set up the dependency links. Targets that have no dependencies are treated - # as dependent on root_node. - root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): - target_node = dependency_nodes[target] - target_build_file = gyp.common.BuildFile(target) - dependencies = spec.get('dependencies') - if not dependencies: - target_node.dependencies = [root_node] - root_node.dependents.append(target_node) - else: - for dependency in dependencies: - dependency_node = dependency_nodes.get(dependency) - if not dependency_node: - raise GypError("Dependency '%s' not found while " - "trying to load target %s" % (dependency, target)) - target_node.dependencies.append(dependency_node) - dependency_node.dependents.append(target_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). If you need to figure out what's wrong, look for elements of - # targets that are not in flat_list. - if len(flat_list) != len(targets): - raise DependencyGraphNode.CircularException( - 'Some targets not reachable, cycle in dependency graph detected: ' + - ' '.join(set(flat_list) ^ set(targets))) - - return [dependency_nodes, flat_list] - - -def VerifyNoGYPFileCircularDependencies(targets): - # Create a DependencyGraphNode for each gyp file containing a target. Put - # it into a dict for easy access. - dependency_nodes = {} - for target in targets.iterkeys(): - build_file = gyp.common.BuildFile(target) - if not build_file in dependency_nodes: - dependency_nodes[build_file] = DependencyGraphNode(build_file) - - # Set up the dependency links. - for target, spec in targets.iteritems(): - build_file = gyp.common.BuildFile(target) - build_file_node = dependency_nodes[build_file] - target_dependencies = spec.get('dependencies', []) - for dependency in target_dependencies: - try: - dependency_build_file = gyp.common.BuildFile(dependency) - except GypError, e: - gyp.common.ExceptionAppend( - e, 'while computing dependencies of .gyp file %s' % build_file) - raise - - if dependency_build_file == build_file: - # A .gyp file is allowed to refer back to itself. - continue - dependency_node = dependency_nodes.get(dependency_build_file) - if not dependency_node: - raise GypError("Dependancy '%s' not found" % dependency_build_file) - if dependency_node not in build_file_node.dependencies: - build_file_node.dependencies.append(dependency_node) - dependency_node.dependents.append(build_file_node) - - - # Files that have no dependencies are treated as dependent on root_node. - root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.itervalues(): - if len(build_file_node.dependencies) == 0: - build_file_node.dependencies.append(root_node) - root_node.dependents.append(build_file_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(dependency_nodes): - bad_files = [] - for file in dependency_nodes.iterkeys(): - if not file in flat_list: - bad_files.append(file) - raise DependencyGraphNode.CircularException, \ - 'Some files not reachable, cycle in .gyp file dependency graph ' + \ - 'detected involving some or all of: ' + \ - ' '.join(bad_files) - - -def DoDependentSettings(key, flat_list, targets, dependency_nodes): - # key should be one of all_dependent_settings, direct_dependent_settings, - # or link_settings. - - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - - if key == 'all_dependent_settings': - dependencies = dependency_nodes[target].DeepDependencies() - elif key == 'direct_dependent_settings': - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) - elif key == 'link_settings': - dependencies = dependency_nodes[target].LinkDependencies(targets) - else: - raise GypError("DoDependentSettings doesn't know how to determine " - 'dependencies for ' + key) - - for dependency in dependencies: - dependency_dict = targets[dependency] - if not key in dependency_dict: - continue - dependency_build_file = gyp.common.BuildFile(dependency) - MergeDicts(target_dict, dependency_dict[key], - build_file, dependency_build_file) - - -def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, - sort_dependencies): - # Recompute target "dependencies" properties. For each static library - # target, remove "dependencies" entries referring to other static libraries, - # unless the dependency has the "hard_dependency" attribute set. For each - # linkable target, add a "dependencies" entry referring to all of the - # target's computed list of link dependencies (including static libraries - # if no such entry is already present. - for target in flat_list: - target_dict = targets[target] - target_type = target_dict['type'] - - if target_type == 'static_library': - if not 'dependencies' in target_dict: - continue - - target_dict['dependencies_original'] = target_dict.get( - 'dependencies', [])[:] - - # A static library should not depend on another static library unless - # the dependency relationship is "hard," which should only be done when - # a dependent relies on some side effect other than just the build - # product, like a rule or action output. Further, if a target has a - # non-hard dependency, but that dependency exports a hard dependency, - # the non-hard dependency can safely be removed, but the exported hard - # dependency must be added to the target to keep the same dependency - # ordering. - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - - # Remove every non-hard static library dependency and remove every - # non-static library dependency that isn't a direct dependency. - if (dependency_dict['type'] == 'static_library' and \ - not dependency_dict.get('hard_dependency', False)) or \ - (dependency_dict['type'] != 'static_library' and \ - not dependency in target_dict['dependencies']): - # Take the dependency out of the list, and don't increment index - # because the next dependency to analyze will shift into the index - # formerly occupied by the one being removed. - del dependencies[index] - else: - index = index + 1 - - # Update the dependencies. If the dependencies list is empty, it's not - # needed, so unhook it. - if len(dependencies) > 0: - target_dict['dependencies'] = dependencies - else: - del target_dict['dependencies'] - - elif target_type in linkable_types: - # Get a list of dependency targets that should be linked into this - # target. Add them to the dependencies list if they're not already - # present. - - link_dependencies = dependency_nodes[target].LinkDependencies(targets) - for dependency in link_dependencies: - if dependency == target: - continue - if not 'dependencies' in target_dict: - target_dict['dependencies'] = [] - if not dependency in target_dict['dependencies']: - target_dict['dependencies'].append(dependency) - # Sort the dependencies list in the order from dependents to dependencies. - # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D. - # Note: flat_list is already sorted in the order from dependencies to - # dependents. - if sort_dependencies and 'dependencies' in target_dict: - target_dict['dependencies'] = [dep for dep in reversed(flat_list) - if dep in target_dict['dependencies']] - - -# Initialize this here to speed up MakePathRelative. -exception_re = re.compile(r'''["']?[-/$<>^]''') - - -def MakePathRelative(to_file, fro_file, item): - # If item is a relative path, it's relative to the build file dict that it's - # coming from. Fix it up to make it relative to the build file dict that - # it's going into. - # Exception: any |item| that begins with these special characters is - # returned without modification. - # / Used when a path is already absolute (shortcut optimization; - # such paths would be returned as absolute anyway) - # $ Used for build environment variables - # - Used for some build environment flags (such as -lapr-1 in a - # "libraries" section) - # < Used for our own variable and command expansions (see ExpandVariables) - # > Used for our own variable and command expansions (see ExpandVariables) - # ^ Used for our own variable and command expansions (see ExpandVariables) - # - # "/' Used when a value is quoted. If these are present, then we - # check the second character instead. - # - if to_file == fro_file or exception_re.match(item): - return item - else: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - ret = os.path.normpath(os.path.join( - gyp.common.RelativePath(os.path.dirname(fro_file), - os.path.dirname(to_file)), - item)).replace('\\', '/') - if item[-1] == '/': - ret += '/' - return ret - -def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): - # Python documentation recommends objects which do not support hash - # set this value to None. Python library objects follow this rule. - is_hashable = lambda val: val.__hash__ - - # If x is hashable, returns whether x is in s. Else returns whether x is in l. - def is_in_set_or_list(x, s, l): - if is_hashable(x): - return x in s - return x in l - - prepend_index = 0 - - # Make membership testing of hashables in |to| (in particular, strings) - # faster. - hashable_to_set = set(x for x in to if is_hashable(x)) - for item in fro: - singleton = False - if isinstance(item, str) or isinstance(item, int): - # The cheap and easy case. - if is_paths: - to_item = MakePathRelative(to_file, fro_file, item) - else: - to_item = item - - if not isinstance(item, str) or not item.startswith('-'): - # Any string that doesn't begin with a "-" is a singleton - it can - # only appear once in a list, to be enforced by the list merge append - # or prepend. - singleton = True - elif isinstance(item, dict): - # Make a copy of the dictionary, continuing to look for paths to fix. - # The other intelligent aspects of merge processing won't apply because - # item is being merged into an empty dict. - to_item = {} - MergeDicts(to_item, item, to_file, fro_file) - elif isinstance(item, list): - # Recurse, making a copy of the list. If the list contains any - # descendant dicts, path fixing will occur. Note that here, custom - # values for is_paths and append are dropped; those are only to be - # applied to |to| and |fro|, not sublists of |fro|. append shouldn't - # matter anyway because the new |to_item| list is empty. - to_item = [] - MergeLists(to_item, item, to_file, fro_file) - else: - raise TypeError, \ - 'Attempt to merge list item of unsupported type ' + \ - item.__class__.__name__ - - if append: - # If appending a singleton that's already in the list, don't append. - # This ensures that the earliest occurrence of the item will stay put. - if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to): - to.append(to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - else: - # If prepending a singleton that's already in the list, remove the - # existing instance and proceed with the prepend. This ensures that the - # item appears at the earliest possible position in the list. - while singleton and to_item in to: - to.remove(to_item) - - # Don't just insert everything at index 0. That would prepend the new - # items to the list in reverse order, which would be an unwelcome - # surprise. - to.insert(prepend_index, to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - prepend_index = prepend_index + 1 - - -def MergeDicts(to, fro, to_file, fro_file): - # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): - # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give - # copy semantics. Something else may want to merge from the |fro| dict - # later, and having the same dict ref pointed to twice in the tree isn't - # what anyone wants considering that the dicts may subsequently be - # modified. - if k in to: - bad_merge = False - if isinstance(v, str) or isinstance(v, int): - if not (isinstance(to[k], str) or isinstance(to[k], int)): - bad_merge = True - elif v.__class__ != to[k].__class__: - bad_merge = True - - if bad_merge: - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[k].__class__.__name__ + \ - ' for key ' + k - if isinstance(v, str) or isinstance(v, int): - # Overwrite the existing value, if any. Cheap and easy. - is_path = IsPathSection(k) - if is_path: - to[k] = MakePathRelative(to_file, fro_file, v) - else: - to[k] = v - elif isinstance(v, dict): - # Recurse, guaranteeing copies will be made of objects that require it. - if not k in to: - to[k] = {} - MergeDicts(to[k], v, to_file, fro_file) - elif isinstance(v, list): - # Lists in dicts can be merged with different policies, depending on - # how the key in the "from" dict (k, the from-key) is written. - # - # If the from-key has ...the to-list will have this action - # this character appended:... applied when receiving the from-list: - # = replace - # + prepend - # ? set, only if to-list does not yet exist - # (none) append - # - # This logic is list-specific, but since it relies on the associated - # dict key, it's checked in this dict-oriented function. - ext = k[-1] - append = True - if ext == '=': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '?'] - to[list_base] = [] - elif ext == '+': - list_base = k[:-1] - lists_incompatible = [list_base + '=', list_base + '?'] - append = False - elif ext == '?': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '=', list_base + '+'] - else: - list_base = k - lists_incompatible = [list_base + '=', list_base + '?'] - - # Some combinations of merge policies appearing together are meaningless. - # It's stupid to replace and append simultaneously, for example. Append - # and prepend are the only policies that can coexist. - for list_incompatible in lists_incompatible: - if list_incompatible in fro: - raise GypError('Incompatible list policies ' + k + ' and ' + - list_incompatible) - - if list_base in to: - if ext == '?': - # If the key ends in "?", the list will only be merged if it doesn't - # already exist. - continue - if not isinstance(to[list_base], list): - # This may not have been checked above if merging in a list with an - # extension character. - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[list_base].__class__.__name__ + \ - ' for key ' + list_base + '(' + k + ')' - else: - to[list_base] = [] - - # Call MergeLists, which will make copies of objects that require it. - # MergeLists can recurse back into MergeDicts, although this will be - # to make copies of dicts (with paths fixed), there will be no - # subsequent dict "merging" once entering a list because lists are - # always replaced, appended to, or prepended to. - is_paths = IsPathSection(list_base) - MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) - else: - raise TypeError, \ - 'Attempt to merge dict value of unsupported type ' + \ - v.__class__.__name__ + ' for key ' + k - - -def MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, visited): - # Skip if previously visted. - if configuration in visited: - return - - # Look at this configuration. - configuration_dict = target_dict['configurations'][configuration] - - # Merge in parents. - for parent in configuration_dict.get('inherit_from', []): - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, parent, visited + [configuration]) - - # Merge it into the new config. - MergeDicts(new_configuration_dict, configuration_dict, - build_file, build_file) - - # Drop abstract. - if 'abstract' in new_configuration_dict: - del new_configuration_dict['abstract'] - - -def SetUpConfigurations(target, target_dict): - # key_suffixes is a list of key suffixes that might appear on key names. - # These suffixes are handled in conditional evaluations (for =, +, and ?) - # and rules/exclude processing (for ! and /). Keys with these suffixes - # should be treated the same as keys without. - key_suffixes = ['=', '+', '?', '!', '/'] - - build_file = gyp.common.BuildFile(target) - - # Provide a single configuration by default if none exists. - # TODO(mark): Signal an error if default_configurations exists but - # configurations does not. - if not 'configurations' in target_dict: - target_dict['configurations'] = {'Default': {}} - if not 'default_configuration' in target_dict: - concrete = [i for i in target_dict['configurations'].iterkeys() - if not target_dict['configurations'][i].get('abstract')] - target_dict['default_configuration'] = sorted(concrete)[0] - - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - # Skip abstract configurations (saves work only). - if old_configuration_dict.get('abstract'): - continue - # Configurations inherit (most) settings from the enclosing target scope. - # Get the inheritance relationship right by making a copy of the target - # dict. - new_configuration_dict = copy.deepcopy(target_dict) - - # Take out the bits that don't belong in a "configurations" section. - # Since configuration setup is done before conditional, exclude, and rules - # processing, be careful with handling of the suffix characters used in - # those phases. - delete_keys = [] - for key in new_configuration_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if key_base in non_configuration_keys: - delete_keys.append(key) - - for key in delete_keys: - del new_configuration_dict[key] - - # Merge in configuration (with all its parents first). - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, []) - - # Put the new result back into the target dict as a configuration. - target_dict['configurations'][configuration] = new_configuration_dict - - # Now drop all the abstract ones. - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - if old_configuration_dict.get('abstract'): - del target_dict['configurations'][configuration] - - # Now that all of the target's configurations have been built, go through - # the target dict's keys and remove everything that's been moved into a - # "configurations" section. - delete_keys = [] - for key in target_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if not key_base in non_configuration_keys: - delete_keys.append(key) - for key in delete_keys: - del target_dict[key] - - # Check the configurations to see if they contain invalid keys. - for configuration in target_dict['configurations'].keys(): - configuration_dict = target_dict['configurations'][configuration] - for key in configuration_dict.keys(): - if key in invalid_configuration_keys: - raise GypError('%s not allowed in the %s configuration, found in ' - 'target %s' % (key, configuration, target)) - - - -def ProcessListFiltersInDict(name, the_dict): - """Process regular expression and exclusion-based filters on lists. - - An exclusion list is in a dict key named with a trailing "!", like - "sources!". Every item in such a list is removed from the associated - main list, which in this example, would be "sources". Removed items are - placed into a "sources_excluded" list in the dict. - - Regular expression (regex) filters are contained in dict keys named with a - trailing "/", such as "sources/" to operate on the "sources" list. Regex - filters in a dict take the form: - 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'], - ['include', '_mac\\.cc$'] ], - The first filter says to exclude all files ending in _linux.cc, _mac.cc, and - _win.cc. The second filter then includes all files ending in _mac.cc that - are now or were once in the "sources" list. Items matching an "exclude" - filter are subject to the same processing as would occur if they were listed - by name in an exclusion list (ending in "!"). Items matching an "include" - filter are brought back into the main list if previously excluded by an - exclusion list or exclusion regex filter. Subsequent matching "exclude" - patterns can still cause items to be excluded after matching an "include". - """ - - # Look through the dictionary for any lists whose keys end in "!" or "/". - # These are lists that will be treated as exclude lists and regular - # expression-based exclude/include lists. Collect the lists that are - # needed first, looking for the lists that they operate on, and assemble - # then into |lists|. This is done in a separate loop up front, because - # the _included and _excluded keys need to be added to the_dict, and that - # can't be done while iterating through it. - - lists = [] - del_lists = [] - for key, value in the_dict.iteritems(): - operation = key[-1] - if operation != '!' and operation != '/': - continue - - if not isinstance(value, list): - raise ValueError, name + ' key ' + key + ' must be list, not ' + \ - value.__class__.__name__ - - list_key = key[:-1] - if list_key not in the_dict: - # This happens when there's a list like "sources!" but no corresponding - # "sources" list. Since there's nothing for it to operate on, queue up - # the "sources!" list for deletion now. - del_lists.append(key) - continue - - if not isinstance(the_dict[list_key], list): - raise ValueError, name + ' key ' + list_key + \ - ' must be list, not ' + \ - value.__class__.__name__ + ' when applying ' + \ - {'!': 'exclusion', '/': 'regex'}[operation] - - if not list_key in lists: - lists.append(list_key) - - # Delete the lists that are known to be unneeded at this point. - for del_list in del_lists: - del the_dict[del_list] - - for list_key in lists: - the_list = the_dict[list_key] - - # Initialize the list_actions list, which is parallel to the_list. Each - # item in list_actions identifies whether the corresponding item in - # the_list should be excluded, unconditionally preserved (included), or - # whether no exclusion or inclusion has been applied. Items for which - # no exclusion or inclusion has been applied (yet) have value -1, items - # excluded have value 0, and items included have value 1. Includes and - # excludes override previous actions. All items in list_actions are - # initialized to -1 because no excludes or includes have been processed - # yet. - list_actions = list((-1,) * len(the_list)) - - exclude_key = list_key + '!' - if exclude_key in the_dict: - for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): - if exclude_item == the_list[index]: - # This item matches the exclude_item, so set its action to 0 - # (exclude). - list_actions[index] = 0 - - # The "whatever!" list is no longer needed, dump it. - del the_dict[exclude_key] - - regex_key = list_key + '/' - if regex_key in the_dict: - for regex_item in the_dict[regex_key]: - [action, pattern] = regex_item - pattern_re = re.compile(pattern) - - if action == 'exclude': - # This item matches an exclude regex, so set its value to 0 (exclude). - action_value = 0 - elif action == 'include': - # This item matches an include regex, so set its value to 1 (include). - action_value = 1 - else: - # This is an action that doesn't make any sense. - raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \ - ' key ' + regex_key - - for index in xrange(0, len(the_list)): - list_item = the_list[index] - if list_actions[index] == action_value: - # Even if the regex matches, nothing will change so continue (regex - # searches are expensive). - continue - if pattern_re.search(list_item): - # Regular expression match. - list_actions[index] = action_value - - # The "whatever/" list is no longer needed, dump it. - del the_dict[regex_key] - - # Add excluded items to the excluded list. - # - # Note that exclude_key ("sources!") is different from excluded_key - # ("sources_excluded"). The exclude_key list is input and it was already - # processed and deleted; the excluded_key list is output and it's about - # to be created. - excluded_key = list_key + '_excluded' - if excluded_key in the_dict: - raise GypError(name + ' key ' + excluded_key + - ' must not be present prior ' - ' to applying exclusion/regex filters for ' + list_key) - - excluded_list = [] - - # Go backwards through the list_actions list so that as items are deleted, - # the indices of items that haven't been seen yet don't shift. That means - # that things need to be prepended to excluded_list to maintain them in the - # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): - if list_actions[index] == 0: - # Dump anything with action 0 (exclude). Keep anything with action 1 - # (include) or -1 (no include or exclude seen for the item). - excluded_list.insert(0, the_list[index]) - del the_list[index] - - # If anything was excluded, put the excluded list into the_dict at - # excluded_key. - if len(excluded_list) > 0: - the_dict[excluded_key] = excluded_list - - # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): - if isinstance(value, dict): - ProcessListFiltersInDict(key, value) - elif isinstance(value, list): - ProcessListFiltersInList(key, value) - - -def ProcessListFiltersInList(name, the_list): - for item in the_list: - if isinstance(item, dict): - ProcessListFiltersInDict(name, item) - elif isinstance(item, list): - ProcessListFiltersInList(name, item) - - -def ValidateTargetType(target, target_dict): - """Ensures the 'type' field on the target is one of the known types. - - Arguments: - target: string, name of target. - target_dict: dict, target spec. - - Raises an exception on error. - """ - VALID_TARGET_TYPES = ('executable', 'loadable_module', - 'static_library', 'shared_library', - 'none') - target_type = target_dict.get('type', None) - if target_type not in VALID_TARGET_TYPES: - raise GypError("Target %s has an invalid target type '%s'. " - "Must be one of %s." % - (target, target_type, '/'.join(VALID_TARGET_TYPES))) - if (target_dict.get('standalone_static_library', 0) and - not target_type == 'static_library'): - raise GypError('Target %s has type %s but standalone_static_library flag is' - ' only valid for static_library type.' % (target, - target_type)) - - -def ValidateSourcesInTarget(target, target_dict, build_file): - # TODO: Check if MSVC allows this for loadable_module targets. - if target_dict.get('type', None) not in ('static_library', 'shared_library'): - return - sources = target_dict.get('sources', []) - basenames = {} - for source in sources: - name, ext = os.path.splitext(source) - is_compiled_file = ext in [ - '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] - if not is_compiled_file: - continue - basename = os.path.basename(name) # Don't include extension. - basenames.setdefault(basename, []).append(source) - - error = '' - for basename, files in basenames.iteritems(): - if len(files) > 1: - error += ' %s: %s\n' % (basename, ' '.join(files)) - - if error: - print('static library %s has several files with the same basename:\n' % - target + error + 'Some build systems, e.g. MSVC08, ' - 'cannot handle that.') - raise GypError('Duplicate basenames in sources section, see list above') - - -def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): - """Ensures that the rules sections in target_dict are valid and consistent, - and determines which sources they apply to. - - Arguments: - target: string, name of target. - target_dict: dict, target spec containing "rules" and "sources" lists. - extra_sources_for_rules: a list of keys to scan for rule matches in - addition to 'sources'. - """ - - # Dicts to map between values found in rules' 'rule_name' and 'extension' - # keys and the rule dicts themselves. - rule_names = {} - rule_extensions = {} - - rules = target_dict.get('rules', []) - for rule in rules: - # Make sure that there's no conflict among rule names and extensions. - rule_name = rule['rule_name'] - if rule_name in rule_names: - raise GypError('rule %s exists in duplicate, target %s' % - (rule_name, target)) - rule_names[rule_name] = rule - - rule_extension = rule['extension'] - if rule_extension in rule_extensions: - raise GypError(('extension %s associated with multiple rules, ' + - 'target %s rules %s and %s') % - (rule_extension, target, - rule_extensions[rule_extension]['rule_name'], - rule_name)) - rule_extensions[rule_extension] = rule - - # Make sure rule_sources isn't already there. It's going to be - # created below if needed. - if 'rule_sources' in rule: - raise GypError( - 'rule_sources must not exist in input, target %s rule %s' % - (target, rule_name)) - extension = rule['extension'] - - rule_sources = [] - source_keys = ['sources'] - source_keys.extend(extra_sources_for_rules) - for source_key in source_keys: - for source in target_dict.get(source_key, []): - (source_root, source_extension) = os.path.splitext(source) - if source_extension.startswith('.'): - source_extension = source_extension[1:] - if source_extension == extension: - rule_sources.append(source) - - if len(rule_sources) > 0: - rule['rule_sources'] = rule_sources - - -def ValidateRunAsInTarget(target, target_dict, build_file): - target_name = target_dict.get('target_name') - run_as = target_dict.get('run_as') - if not run_as: - return - if not isinstance(run_as, dict): - raise GypError("The 'run_as' in target %s from file %s should be a " - "dictionary." % - (target_name, build_file)) - action = run_as.get('action') - if not action: - raise GypError("The 'run_as' in target %s from file %s must have an " - "'action' section." % - (target_name, build_file)) - if not isinstance(action, list): - raise GypError("The 'action' for 'run_as' in target %s from file %s " - "must be a list." % - (target_name, build_file)) - working_directory = run_as.get('working_directory') - if working_directory and not isinstance(working_directory, str): - raise GypError("The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % - (target_name, build_file)) - environment = run_as.get('environment') - if environment and not isinstance(environment, dict): - raise GypError("The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % - (target_name, build_file)) - - -def ValidateActionsInTarget(target, target_dict, build_file): - '''Validates the inputs to the actions in a target.''' - target_name = target_dict.get('target_name') - actions = target_dict.get('actions', []) - for action in actions: - action_name = action.get('action_name') - if not action_name: - raise GypError("Anonymous action in target %s. " - "An action must have an 'action_name' field." % - target_name) - inputs = action.get('inputs', None) - if inputs is None: - raise GypError('Action in target %s has no inputs.' % target_name) - action_command = action.get('action') - if action_command and not action_command[0]: - raise GypError("Empty action as command in target %s." % target_name) - - -def TurnIntIntoStrInDict(the_dict): - """Given dict the_dict, recursively converts all integers into strings. - """ - # Use items instead of iteritems because there's no need to try to look at - # reinserted keys and their associated values. - for k, v in the_dict.items(): - if isinstance(v, int): - v = str(v) - the_dict[k] = v - elif isinstance(v, dict): - TurnIntIntoStrInDict(v) - elif isinstance(v, list): - TurnIntIntoStrInList(v) - - if isinstance(k, int): - the_dict[str(k)] = v - del the_dict[k] - - -def TurnIntIntoStrInList(the_list): - """Given list the_list, recursively converts all integers into strings. - """ - for index in xrange(0, len(the_list)): - item = the_list[index] - if isinstance(item, int): - the_list[index] = str(item) - elif isinstance(item, dict): - TurnIntIntoStrInDict(item) - elif isinstance(item, list): - TurnIntIntoStrInList(item) - - -def VerifyNoCollidingTargets(targets): - """Verify that no two targets in the same directory share the same name. - - Arguments: - targets: A list of targets in the form 'path/to/file.gyp:target_name'. - """ - # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'. - used = {} - for target in targets: - # Separate out 'path/to/file.gyp, 'target_name' from - # 'path/to/file.gyp:target_name'. - path, name = target.rsplit(':', 1) - # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'. - subdir, gyp = os.path.split(path) - # Use '.' for the current directory '', so that the error messages make - # more sense. - if not subdir: - subdir = '.' - # Prepare a key like 'path/to:target_name'. - key = subdir + ':' + name - if key in used: - # Complain if this target is already used. - raise GypError('Duplicate target name "%s" in directory "%s" used both ' - 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) - used[key] = gyp - - -def Load(build_files, variables, includes, depth, generator_input_info, check, - circular_check, parallel): - # Set up path_sections and non_configuration_keys with the default data plus - # the generator-specifc data. - global path_sections - path_sections = base_path_sections[:] - path_sections.extend(generator_input_info['path_sections']) - - global non_configuration_keys - non_configuration_keys = base_non_configuration_keys[:] - non_configuration_keys.extend(generator_input_info['non_configuration_keys']) - - # TODO(mark) handle variants if the generator doesn't want them directly. - generator_handles_variants = \ - generator_input_info['generator_handles_variants'] - - global absolute_build_file_paths - absolute_build_file_paths = \ - generator_input_info['generator_wants_absolute_build_file_paths'] - - global multiple_toolsets - multiple_toolsets = generator_input_info[ - 'generator_supports_multiple_toolsets'] - - # A generator can have other lists (in addition to sources) be processed - # for rules. - extra_sources_for_rules = generator_input_info['extra_sources_for_rules'] - - # Load build files. This loads every target-containing build file into - # the |data| dictionary such that the keys to |data| are build file names, - # and the values are the entire build file contents after "early" or "pre" - # processing has been done and includes have been resolved. - # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as - # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps - # track of the keys corresponding to "target" files. - data = {'target_build_files': set()} - aux_data = {} - for build_file in build_files: - # Normalize paths everywhere. This is important because paths will be - # used as keys to the data dict and for references between input files. - build_file = os.path.normpath(build_file) - try: - if parallel: - print >>sys.stderr, 'Using parallel processing.' - LoadTargetBuildFileParallel(build_file, data, aux_data, - variables, includes, depth, check) - else: - LoadTargetBuildFile(build_file, data, aux_data, - variables, includes, depth, check, True) - except Exception, e: - gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) - raise - - # Build a dict to access each target's subdict by qualified name. - targets = BuildTargetsDict(data) - - # Fully qualify all dependency links. - QualifyDependencies(targets) - - # Remove self-dependencies from targets that have 'prune_self_dependencies' - # set to 1. - RemoveSelfDependencies(targets) - - # Expand dependencies specified as build_file:*. - ExpandWildcardDependencies(targets, data) - - # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.iteritems(): - tmp_dict = {} - for key_base in dependency_sections: - for op in ('', '!', '/'): - key = key_base + op - if key in target_dict: - tmp_dict[key] = target_dict[key] - del target_dict[key] - ProcessListFiltersInDict(target_name, tmp_dict) - # Write the results back to |target_dict|. - for key in tmp_dict: - target_dict[key] = tmp_dict[key] - - # Make sure every dependency appears at most once. - RemoveDuplicateDependencies(targets) - - if circular_check: - # Make sure that any targets in a.gyp don't contain dependencies in other - # .gyp files that further depend on a.gyp. - VerifyNoGYPFileCircularDependencies(targets) - - [dependency_nodes, flat_list] = BuildDependencyList(targets) - - # Check that no two targets in the same directory have the same name. - VerifyNoCollidingTargets(flat_list) - - # Handle dependent settings of various types. - for settings_type in ['all_dependent_settings', - 'direct_dependent_settings', - 'link_settings']: - DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) - - # Take out the dependent settings now that they've been published to all - # of the targets that require them. - for target in flat_list: - if settings_type in targets[target]: - del targets[target][settings_type] - - # Make sure static libraries don't declare dependencies on other static - # libraries, but that linkables depend on all unlinked static libraries - # that they need so that their link steps will be correct. - gii = generator_input_info - if gii['generator_wants_static_library_dependencies_adjusted']: - AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, - gii['generator_wants_sorted_dependencies']) - - # Apply "post"/"late"/"target" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATE, variables, build_file) - - # Move everything that can go into a "configurations" section into one. - for target in flat_list: - target_dict = targets[target] - SetUpConfigurations(target, target_dict) - - # Apply exclude (!) and regex (/) list filters. - for target in flat_list: - target_dict = targets[target] - ProcessListFiltersInDict(target, target_dict) - - # Apply "latelate" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATELATE, variables, build_file) - - # Make sure that the rules make sense, and build up rule_sources lists as - # needed. Not all generators will need to use the rule_sources lists, but - # some may, and it seems best to build the list in a common spot. - # Also validate actions and run_as elements in targets. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ValidateTargetType(target, target_dict) - # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to - # scalesystemdependent_arm_additions.c or similar. - if 'arm' not in variables.get('target_arch', ''): - ValidateSourcesInTarget(target, target_dict, build_file) - ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) - ValidateRunAsInTarget(target, target_dict, build_file) - ValidateActionsInTarget(target, target_dict, build_file) - - # Generators might not expect ints. Turn them into strs. - TurnIntIntoStrInDict(data) - - # TODO(mark): Return |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - return [flat_list, targets, data] Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/input.pyc and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/input.pyc differ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,223 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest) - else: - shutil.copyfile(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - tools_dir = os.environ.get('DEVELOPER_BIN_DIR', '/usr/bin') - args = [os.path.join(tools_dir, 'ibtool'), '--errors', '--warnings', - '--notices', '--output-format', 'human-readable-text', '--compile', - dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _CopyStringsFile(self, source, dest): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source).read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'w') - args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code', - 'UTF-16', source] - subprocess.call(args, stdout=fp) - fp.close() - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16BE" - elif header.startswith("\xFF\xFE"): - return "UTF-16LE" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Go through all the environment variables and replace them as variables in - # the file. - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - lines = string.replace(lines, evar, os.environ[key]) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out 'libtool: file: foo.o has no symbols'.""" - libtool_re = re.compile(r'^libtool: file: .* has no symbols$') - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line): - print >>sys.stderr, line - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,771 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This module helps emulate Visual Studio 2008 behavior on top of other -build systems, primarily ninja. -""" - -import os -import re -import subprocess -import sys - -import gyp.MSVSVersion - -windows_quoter_regex = re.compile(r'(\\*)"') - -def QuoteForRspFile(arg): - """Quote a command line argument so that it appears as one argument when - processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for - Windows programs).""" - # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment - # threads. This is actually the quoting rules for CommandLineToArgvW, not - # for the shell, because the shell doesn't do anything in Windows. This - # works more or less because most programs (including the compiler, etc.) - # use that function to handle command line arguments. - - # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes - # preceding it, and results in n backslashes + the quote. So we substitute - # in 2* what we match, +1 more, plus the quote. - arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg) - - # %'s also need to be doubled otherwise they're interpreted as batch - # positional arguments. Also make sure to escape the % so that they're - # passed literally through escaping so they can be singled to just the - # original %. Otherwise, trying to pass the literal representation that - # looks like an environment variable to the shell (e.g. %PATH%) would fail. - arg = arg.replace('%', '%%') - - # These commands are used in rsp files, so no escaping for the shell (via ^) - # is necessary. - - # Finally, wrap the whole thing in quotes so that the above quote rule - # applies and whitespace isn't a word break. - return '"' + arg + '"' - - -def EncodeRspFileList(args): - """Process a list of arguments using QuoteCmdExeArgument.""" - # Note that the first argument is assumed to be the command. Don't add - # quotes around it because then built-ins like 'echo', etc. won't work. - # Take care to normpath only the path in the case of 'call ../x.bat' because - # otherwise the whole thing is incorrectly interpreted as a path and not - # normalized correctly. - if not args: return '' - if args[0].startswith('call '): - call, program = args[0].split(' ', 1) - program = call + ' ' + os.path.normpath(program) - else: - program = os.path.normpath(args[0]) - return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:]) - - -def _GenericRetrieve(root, default, path): - """Given a list of dictionary keys |path| and a tree of dicts |root|, find - value at path, or return |default| if any of the path doesn't exist.""" - if not root: - return default - if not path: - return root - return _GenericRetrieve(root.get(path[0]), default, path[1:]) - - -def _AddPrefix(element, prefix): - """Add |prefix| to |element| or each subelement if element is iterable.""" - if element is None: - return element - # Note, not Iterable because we don't want to handle strings like that. - if isinstance(element, list) or isinstance(element, tuple): - return [prefix + e for e in element] - else: - return prefix + element - - -def _DoRemapping(element, map): - """If |element| then remap it through |map|. If |element| is iterable then - each item will be remapped. Any elements not found will be removed.""" - if map is not None and element is not None: - if not callable(map): - map = map.get # Assume it's a dict, otherwise a callable to do the remap. - if isinstance(element, list) or isinstance(element, tuple): - element = filter(None, [map(elem) for elem in element]) - else: - element = map(element) - return element - - -def _AppendOrReturn(append, element): - """If |append| is None, simply return |element|. If |append| is not None, - then add |element| to it, adding each item in |element| if it's a list or - tuple.""" - if append is not None and element is not None: - if isinstance(element, list) or isinstance(element, tuple): - append.extend(element) - else: - append.append(element) - else: - return element - - -def _FindDirectXInstallation(): - """Try to find an installation location for the DirectX SDK. Check for the - standard environment variable, and if that doesn't exist, try to find - via the registry. May return None if not found in either location.""" - # Return previously calculated value, if there is one - if hasattr(_FindDirectXInstallation, 'dxsdk_dir'): - return _FindDirectXInstallation.dxsdk_dir - - dxsdk_dir = os.environ.get('DXSDK_DIR') - if not dxsdk_dir: - # Setup params to pass to and attempt to launch reg.exe. - cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s'] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - for line in p.communicate()[0].splitlines(): - if 'InstallPath' in line: - dxsdk_dir = line.split(' ')[3] + "\\" - - # Cache return value - _FindDirectXInstallation.dxsdk_dir = dxsdk_dir - return dxsdk_dir - - -class MsvsSettings(object): - """A class that understands the gyp 'msvs_...' values (especially the - msvs_settings field). They largely correpond to the VS2008 IDE DOM. This - class helps map those settings to command line options.""" - - def __init__(self, spec, generator_flags): - self.spec = spec - self.vs_version = GetVSVersion(generator_flags) - self.dxsdk_dir = _FindDirectXInstallation() - - # Try to find an installation location for the Windows DDK by checking - # the WDK_DIR environment variable, may be None. - self.wdk_dir = os.environ.get('WDK_DIR') - - supported_fields = [ - ('msvs_configuration_attributes', dict), - ('msvs_settings', dict), - ('msvs_system_include_dirs', list), - ('msvs_disabled_warnings', list), - ('msvs_precompiled_header', str), - ('msvs_precompiled_source', str), - ('msvs_configuration_platform', str), - ('msvs_target_platform', str), - ] - configs = spec['configurations'] - for field, default in supported_fields: - setattr(self, field, {}) - for configname, config in configs.iteritems(): - getattr(self, field)[configname] = config.get(field, default()) - - self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.']) - - def GetVSMacroEnv(self, base_to_build=None, config=None): - """Get a dict of variables mapping internal VS macro names to their gyp - equivalents.""" - target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64' - replacements = { - '$(OutDir)\\': base_to_build + '\\' if base_to_build else '', - '$(IntDir)': '$!INTERMEDIATE_DIR', - '$(InputPath)': '${source}', - '$(InputName)': '${root}', - '$(ProjectName)': self.spec['target_name'], - '$(PlatformName)': target_platform, - '$(ProjectDir)\\': '', - } - # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when - # Visual Studio is actually installed. - if self.vs_version.Path(): - replacements['$(VSInstallDir)'] = self.vs_version.Path() - replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(), - 'VC') + '\\' - # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be - # set. This happens when the SDK is sync'd via src-internal, rather than - # by typical end-user installation of the SDK. If it's not set, we don't - # want to leave the unexpanded variable in the path, so simply strip it. - replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else '' - replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else '' - return replacements - - def ConvertVSMacros(self, s, base_to_build=None, config=None): - """Convert from VS macro names to something equivalent.""" - env = self.GetVSMacroEnv(base_to_build, config=config) - return ExpandMacros(s, env) - - def AdjustLibraries(self, libraries): - """Strip -l from library if it's specified with that.""" - return [lib[2:] if lib.startswith('-l') else lib for lib in libraries] - - def _GetAndMunge(self, field, path, default, prefix, append, map): - """Retrieve a value from |field| at |path| or return |default|. If - |append| is specified, and the item is found, it will be appended to that - object instead of returned. If |map| is specified, results will be - remapped through |map| before being returned or appended.""" - result = _GenericRetrieve(field, default, path) - result = _DoRemapping(result, map) - result = _AddPrefix(result, prefix) - return _AppendOrReturn(append, result) - - class _GetWrapper(object): - def __init__(self, parent, field, base_path, append=None): - self.parent = parent - self.field = field - self.base_path = [base_path] - self.append = append - def __call__(self, name, map=None, prefix='', default=None): - return self.parent._GetAndMunge(self.field, self.base_path + [name], - default=default, prefix=prefix, append=self.append, map=map) - - def GetArch(self, config): - """Get architecture based on msvs_configuration_platform and - msvs_target_platform. Returns either 'x86' or 'x64'.""" - configuration_platform = self.msvs_configuration_platform.get(config, '') - platform = self.msvs_target_platform.get(config, '') - if not platform: # If no specific override, use the configuration's. - platform = configuration_platform - # Map from platform to architecture. - return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86') - - def _TargetConfig(self, config): - """Returns the target-specific configuration.""" - # There's two levels of architecture/platform specification in VS. The - # first level is globally for the configuration (this is what we consider - # "the" config at the gyp level, which will be something like 'Debug' or - # 'Release_x64'), and a second target-specific configuration, which is an - # override for the global one. |config| is remapped here to take into - # account the local target-specific overrides to the global configuration. - arch = self.GetArch(config) - if arch == 'x64' and not config.endswith('_x64'): - config += '_x64' - if arch == 'x86' and config.endswith('_x64'): - config = config.rsplit('_', 1)[0] - return config - - def _Setting(self, path, config, - default=None, prefix='', append=None, map=None): - """_GetAndMunge for msvs_settings.""" - return self._GetAndMunge( - self.msvs_settings[config], path, default, prefix, append, map) - - def _ConfigAttrib(self, path, config, - default=None, prefix='', append=None, map=None): - """_GetAndMunge for msvs_configuration_attributes.""" - return self._GetAndMunge( - self.msvs_configuration_attributes[config], - path, default, prefix, append, map) - - def AdjustIncludeDirs(self, include_dirs, config): - """Updates include_dirs to expand VS specific paths, and adds the system - include dirs used for platform SDK and similar.""" - config = self._TargetConfig(config) - includes = include_dirs + self.msvs_system_include_dirs[config] - includes.extend(self._Setting( - ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[])) - return [self.ConvertVSMacros(p, config=config) for p in includes] - - def GetComputedDefines(self, config): - """Returns the set of defines that are injected to the defines list based - on other VS settings.""" - config = self._TargetConfig(config) - defines = [] - if self._ConfigAttrib(['CharacterSet'], config) == '1': - defines.extend(('_UNICODE', 'UNICODE')) - if self._ConfigAttrib(['CharacterSet'], config) == '2': - defines.append('_MBCS') - defines.extend(self._Setting( - ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[])) - return defines - - def GetCompilerPdbName(self, config, expand_special): - """Get the pdb file name that should be used for compiler invocations, or - None if there's no explicit name specified.""" - config = self._TargetConfig(config) - pdbname = self._Setting( - ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config) - if pdbname: - pdbname = expand_special(self.ConvertVSMacros(pdbname)) - return pdbname - - def GetOutputName(self, config, expand_special): - """Gets the explicitly overridden output name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - type = self.spec['type'] - root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool' - # TODO(scottmg): Handle OutputDirectory without OutputFile. - output_file = self._Setting((root, 'OutputFile'), config) - if output_file: - output_file = expand_special(self.ConvertVSMacros( - output_file, config=config)) - return output_file - - def GetPDBName(self, config, expand_special): - """Gets the explicitly overridden pdb name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config) - if output_file: - output_file = expand_special(self.ConvertVSMacros( - output_file, config=config)) - return output_file - - def GetCflags(self, config): - """Returns the flags that need to be added to .c and .cc compilations.""" - config = self._TargetConfig(config) - cflags = [] - cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]]) - cl = self._GetWrapper(self, self.msvs_settings[config], - 'VCCLCompilerTool', append=cflags) - cl('Optimization', - map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O') - cl('InlineFunctionExpansion', prefix='/Ob') - cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy') - cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi') - cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O') - cl('WholeProgramOptimization', map={'true': '/GL'}) - cl('WarningLevel', prefix='/W') - cl('WarnAsError', map={'true': '/WX'}) - cl('DebugInformationFormat', - map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z') - cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'}) - cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'}) - cl('MinimalRebuild', map={'true': '/Gm'}) - cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'}) - cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC') - cl('RuntimeLibrary', - map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M') - cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH') - cl('DefaultCharIsUnsigned', map={'true': '/J'}) - cl('TreatWChar_tAsBuiltInType', - map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t') - cl('EnablePREfast', map={'true': '/analyze'}) - cl('AdditionalOptions', prefix='') - cflags.extend(['/FI' + f for f in self._Setting( - ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])]) - # ninja handles parallelism by itself, don't have the compiler do it too. - cflags = filter(lambda x: not x.startswith('/MP'), cflags) - return cflags - - def GetPrecompiledHeader(self, config, gyp_to_build_path): - """Returns an object that handles the generation of precompiled header - build steps.""" - config = self._TargetConfig(config) - return _PchHelper(self, config, gyp_to_build_path) - - def _GetPchFlags(self, config, extension): - """Get the flags to be added to the cflags for precompiled header support. - """ - config = self._TargetConfig(config) - # The PCH is only built once by a particular source file. Usage of PCH must - # only be for the same language (i.e. C vs. C++), so only include the pch - # flags when the language matches. - if self.msvs_precompiled_header[config]: - source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1] - if _LanguageMatchesForPch(source_ext, extension): - pch = os.path.split(self.msvs_precompiled_header[config])[1] - return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch'] - return [] - - def GetCflagsC(self, config): - """Returns the flags that need to be added to .c compilations.""" - config = self._TargetConfig(config) - return self._GetPchFlags(config, '.c') - - def GetCflagsCC(self, config): - """Returns the flags that need to be added to .cc compilations.""" - config = self._TargetConfig(config) - return ['/TP'] + self._GetPchFlags(config, '.cc') - - def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): - """Get and normalize the list of paths in AdditionalLibraryDirectories - setting.""" - config = self._TargetConfig(config) - libpaths = self._Setting((root, 'AdditionalLibraryDirectories'), - config, default=[]) - libpaths = [os.path.normpath( - gyp_to_build_path(self.ConvertVSMacros(p, config=config))) - for p in libpaths] - return ['/LIBPATH:"' + p + '"' for p in libpaths] - - def GetLibFlags(self, config, gyp_to_build_path): - """Returns the flags that need to be added to lib commands.""" - config = self._TargetConfig(config) - libflags = [] - lib = self._GetWrapper(self, self.msvs_settings[config], - 'VCLibrarianTool', append=libflags) - libflags.extend(self._GetAdditionalLibraryDirectories( - 'VCLibrarianTool', config, gyp_to_build_path)) - lib('LinkTimeCodeGeneration', map={'true': '/LTCG'}) - lib('AdditionalOptions') - return libflags - - def _GetDefFileAsLdflags(self, spec, ldflags, gyp_to_build_path): - """.def files get implicitly converted to a ModuleDefinitionFile for the - linker in the VS generator. Emulate that behaviour here.""" - def_file = '' - if spec['type'] in ('shared_library', 'loadable_module', 'executable'): - def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] - if len(def_files) == 1: - ldflags.append('/DEF:"%s"' % gyp_to_build_path(def_files[0])) - elif len(def_files) > 1: - raise Exception("Multiple .def files") - - def GetLdflags(self, config, gyp_to_build_path, expand_special, - manifest_base_name, is_executable): - """Returns the flags that need to be added to link commands, and the - manifest files.""" - config = self._TargetConfig(config) - ldflags = [] - ld = self._GetWrapper(self, self.msvs_settings[config], - 'VCLinkerTool', append=ldflags) - self._GetDefFileAsLdflags(self.spec, ldflags, gyp_to_build_path) - ld('GenerateDebugInformation', map={'true': '/DEBUG'}) - ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:') - ldflags.extend(self._GetAdditionalLibraryDirectories( - 'VCLinkerTool', config, gyp_to_build_path)) - ld('DelayLoadDLLs', prefix='/DELAYLOAD:') - out = self.GetOutputName(config, expand_special) - if out: - ldflags.append('/OUT:' + out) - pdb = self.GetPDBName(config, expand_special) - if pdb: - ldflags.append('/PDB:' + pdb) - ld('AdditionalOptions', prefix='') - ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:') - ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE') - ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL') - ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED') - ld('RandomizedBaseAddress', - map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE') - ld('DataExecutionPrevention', - map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT') - ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:') - ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:') - ld('LinkTimeCodeGeneration', map={'1': '/LTCG'}) - ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:') - ld('ResourceOnlyDLL', map={'true': '/NOENTRY'}) - ld('EntryPointSymbol', prefix='/ENTRY:') - ld('Profile', map={'true': '/PROFILE'}) - ld('LargeAddressAware', - map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE') - # TODO(scottmg): This should sort of be somewhere else (not really a flag). - ld('AdditionalDependencies', prefix='') - - # If the base address is not specifically controlled, DYNAMICBASE should - # be on by default. - base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED', - ldflags) - if not base_flags: - ldflags.append('/DYNAMICBASE') - - # If the NXCOMPAT flag has not been specified, default to on. Despite the - # documentation that says this only defaults to on when the subsystem is - # Vista or greater (which applies to the linker), the IDE defaults it on - # unless it's explicitly off. - if not filter(lambda x: 'NXCOMPAT' in x, ldflags): - ldflags.append('/NXCOMPAT') - - have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags) - manifest_flags, intermediate_manifest_file = self._GetLdManifestFlags( - config, manifest_base_name, is_executable and not have_def_file) - ldflags.extend(manifest_flags) - manifest_files = self._GetAdditionalManifestFiles(config, gyp_to_build_path) - manifest_files.append(intermediate_manifest_file) - - return ldflags, manifest_files - - def _GetLdManifestFlags(self, config, name, allow_isolation): - """Returns the set of flags that need to be added to the link to generate - a default manifest, as well as the name of the generated file.""" - # Add manifest flags that mirror the defaults in VS. Chromium dev builds - # do not currently use any non-default settings, but we could parse - # VCManifestTool blocks if Chromium or other projects need them in the - # future. Of particular note, we do not yet support EmbedManifest because - # it complicates incremental linking. - output_name = name + '.intermediate.manifest' - flags = [ - '/MANIFEST', - '/ManifestFile:' + output_name, - '''/MANIFESTUAC:"level='asInvoker' uiAccess='false'"''' - ] - if allow_isolation: - flags.append('/ALLOWISOLATION') - return flags, output_name - - def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): - """Gets additional manifest files that are added to the default one - generated by the linker.""" - files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config, - default=[]) - if (self._Setting( - ('VCManifestTool', 'EmbedManifest'), config, default='') == 'true'): - print 'gyp/msvs_emulation.py: "EmbedManifest: true" not yet supported.' - if isinstance(files, str): - files = files.split(';') - return [os.path.normpath( - gyp_to_build_path(self.ConvertVSMacros(f, config=config))) - for f in files] - - def IsUseLibraryDependencyInputs(self, config): - """Returns whether the target should be linked via Use Library Dependency - Inputs (using component .objs of a given .lib).""" - config = self._TargetConfig(config) - uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config) - return uldi == 'true' - - def GetRcflags(self, config, gyp_to_ninja_path): - """Returns the flags that need to be added to invocations of the resource - compiler.""" - config = self._TargetConfig(config) - rcflags = [] - rc = self._GetWrapper(self, self.msvs_settings[config], - 'VCResourceCompilerTool', append=rcflags) - rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I') - rcflags.append('/I' + gyp_to_ninja_path('.')) - rc('PreprocessorDefinitions', prefix='/d') - # /l arg must be in hex without leading '0x' - rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:]) - return rcflags - - def BuildCygwinBashCommandLine(self, args, path_to_base): - """Build a command line that runs args via cygwin bash. We assume that all - incoming paths are in Windows normpath'd form, so they need to be - converted to posix style for the part of the command line that's passed to - bash. We also have to do some Visual Studio macro emulation here because - various rules use magic VS names for things. Also note that rules that - contain ninja variables cannot be fixed here (for example ${source}), so - the outer generator needs to make sure that the paths that are written out - are in posix style, if the command line will be used here.""" - cygwin_dir = os.path.normpath( - os.path.join(path_to_base, self.msvs_cygwin_dirs[0])) - cd = ('cd %s' % path_to_base).replace('\\', '/') - args = [a.replace('\\', '/').replace('"', '\\"') for a in args] - args = ["'%s'" % a.replace("'", "'\\''") for a in args] - bash_cmd = ' '.join(args) - cmd = ( - 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir + - 'bash -c "%s ; %s"' % (cd, bash_cmd)) - return cmd - - def IsRuleRunUnderCygwin(self, rule): - """Determine if an action should be run under cygwin. If the variable is - unset, or set to 1 we use cygwin.""" - return int(rule.get('msvs_cygwin_shell', - self.spec.get('msvs_cygwin_shell', 1))) != 0 - - def _HasExplicitRuleForExtension(self, spec, extension): - """Determine if there's an explicit rule for a particular extension.""" - for rule in spec.get('rules', []): - if rule['extension'] == extension: - return True - return False - - def HasExplicitIdlRules(self, spec): - """Determine if there's an explicit rule for idl files. When there isn't we - need to generate implicit rules to build MIDL .idl files.""" - return self._HasExplicitRuleForExtension(spec, 'idl') - - def HasExplicitAsmRules(self, spec): - """Determine if there's an explicit rule for asm files. When there isn't we - need to generate implicit rules to assemble .asm files.""" - return self._HasExplicitRuleForExtension(spec, 'asm') - - def GetIdlBuildData(self, source, config): - """Determine the implicit outputs for an idl file. Returns output - directory, outputs, and variables and flags that are required.""" - config = self._TargetConfig(config) - midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool') - def midl(name, default=None): - return self.ConvertVSMacros(midl_get(name, default=default), - config=config) - tlb = midl('TypeLibraryName', default='${root}.tlb') - header = midl('HeaderFileName', default='${root}.h') - dlldata = midl('DLLDataFileName', default='dlldata.c') - iid = midl('InterfaceIdentifierFileName', default='${root}_i.c') - proxy = midl('ProxyFileName', default='${root}_p.c') - # Note that .tlb is not included in the outputs as it is not always - # generated depending on the content of the input idl file. - outdir = midl('OutputDirectory', default='') - output = [header, dlldata, iid, proxy] - variables = [('tlb', tlb), - ('h', header), - ('dlldata', dlldata), - ('iid', iid), - ('proxy', proxy)] - # TODO(scottmg): Are there configuration settings to set these flags? - target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64' - flags = ['/char', 'signed', '/env', target_platform, '/Oicf'] - return outdir, output, variables, flags - - -def _LanguageMatchesForPch(source_ext, pch_source_ext): - c_exts = ('.c',) - cc_exts = ('.cc', '.cxx', '.cpp') - return ((source_ext in c_exts and pch_source_ext in c_exts) or - (source_ext in cc_exts and pch_source_ext in cc_exts)) - - -class PrecompiledHeader(object): - """Helper to generate dependencies and build rules to handle generation of - precompiled headers. Interface matches the GCH handler in xcode_emulation.py. - """ - def __init__( - self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext): - self.settings = settings - self.config = config - pch_source = self.settings.msvs_precompiled_source[self.config] - self.pch_source = gyp_to_build_path(pch_source) - filename, _ = os.path.splitext(pch_source) - self.output_obj = gyp_to_unique_output(filename + obj_ext).lower() - - def _PchHeader(self): - """Get the header that will appear in an #include line for all source - files.""" - return os.path.split(self.settings.msvs_precompiled_header[self.config])[1] - - def GetObjDependencies(self, sources, objs): - """Given a list of sources files and the corresponding object files, - returns a list of the pch files that should be depended upon. The - additional wrapping in the return value is for interface compatability - with make.py on Mac, and xcode_emulation.py.""" - if not self._PchHeader(): - return [] - pch_ext = os.path.splitext(self.pch_source)[1] - for source in sources: - if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext): - return [(None, None, self.output_obj)] - return [] - - def GetPchBuildCommands(self): - """Not used on Windows as there are no additional build steps required - (instead, existing steps are modified in GetFlagsModifications below).""" - return [] - - def GetFlagsModifications(self, input, output, implicit, command, - cflags_c, cflags_cc, expand_special): - """Get the modified cflags and implicit dependencies that should be used - for the pch compilation step.""" - if input == self.pch_source: - pch_output = ['/Yc' + self._PchHeader()] - if command == 'cxx': - return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))], - self.output_obj, []) - elif command == 'cc': - return ([('cflags_c', map(expand_special, cflags_c + pch_output))], - self.output_obj, []) - return [], output, implicit - - -vs_version = None -def GetVSVersion(generator_flags): - global vs_version - if not vs_version: - vs_version = gyp.MSVSVersion.SelectVisualStudioVersion( - generator_flags.get('msvs_version', 'auto')) - return vs_version - -def _GetVsvarsSetupArgs(generator_flags, arch): - vs = GetVSVersion(generator_flags) - return vs.SetupScript() - -def ExpandMacros(string, expansions): - """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv - for the canonical way to retrieve a suitable dict.""" - if '$' in string: - for old, new in expansions.iteritems(): - assert '$(' not in new, new - string = string.replace(old, new) - return string - -def _ExtractImportantEnvironment(output_of_set): - """Extracts environment variables required for the toolchain to run from - a textual dump output by the cmd.exe 'set' command.""" - envvars_to_save = ( - 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma. - 'include', - 'lib', - 'libpath', - 'path', - 'pathext', - 'systemroot', - 'temp', - 'tmp', - ) - env = {} - for line in output_of_set.splitlines(): - for envvar in envvars_to_save: - if re.match(envvar + '=', line.lower()): - var, setting = line.split('=', 1) - if envvar == 'path': - # Our own rules (for running gyp-win-tool) and other actions in - # Chromium rely on python being in the path. Add the path to this - # python here so that if it's not in the path when ninja is run - # later, python will still be found. - setting = os.path.dirname(sys.executable) + os.pathsep + setting - env[var.upper()] = setting - break - for required in ('SYSTEMROOT', 'TEMP', 'TMP'): - if required not in env: - raise Exception('Environment variable "%s" ' - 'required to be set to valid path' % required) - return env - -def _FormatAsEnvironmentBlock(envvar_dict): - """Format as an 'environment block' directly suitable for CreateProcess. - Briefly this is a list of key=value\0, terminated by an additional \0. See - CreateProcess documentation for more details.""" - block = '' - nul = '\0' - for key, value in envvar_dict.iteritems(): - block += key + '=' + value + nul - block += nul - return block - -def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out): - """It's not sufficient to have the absolute path to the compiler, linker, - etc. on Windows, as those tools rely on .dlls being in the PATH. We also - need to support both x86 and x64 compilers within the same build (to support - msvs_target_platform hackery). Different architectures require a different - compiler binary, and different supporting environment variables (INCLUDE, - LIB, LIBPATH). So, we extract the environment here, wrap all invocations - of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which - sets up the environment, and then we do not prefix the compiler with - an absolute path, instead preferring something like "cl.exe" in the rule - which will then run whichever the environment setup has put in the path. - When the following procedure to generate environment files does not - meet your requirement (e.g. for custom toolchains), you can pass - "-G ninja_use_custom_environment_files" to the gyp to suppress file - generation and use custom environment files prepared by yourself.""" - if generator_flags.get('ninja_use_custom_environment_files', 0): - return - vs = GetVSVersion(generator_flags) - for arch in ('x86', 'x64'): - args = vs.SetupScript(arch) - args.extend(('&&', 'set')) - popen = subprocess.Popen( - args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - variables, _ = popen.communicate() - env = _ExtractImportantEnvironment(variables) - env_block = _FormatAsEnvironmentBlock(env) - f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb') - f.write(env_block) - f.close() - -def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): - """Emulate behavior of msvs_error_on_missing_sources present in the msvs - generator: Check that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation when building via - VS, and we want this check to match for people/bots that build using ninja, - so they're not surprised when the VS build fails.""" - if int(generator_flags.get('msvs_error_on_missing_sources', 0)): - no_specials = filter(lambda x: '$' not in x, sources) - relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] - missing = filter(lambda x: not os.path.exists(x), relative) - if missing: - # They'll look like out\Release\..\..\stuff\things.cc, so normalize the - # path for a slightly less crazy looking output. - cleaned_up = [os.path.normpath(x) for x in missing] - raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up)) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,152 +0,0 @@ -# This file comes from -# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py -# Do not edit! Edit the upstream one instead. - -"""Python module for generating .ninja files. - -Note that this is emphatically not a required piece of Ninja; it's -just a helpful utility for build-file-generation systems that already -use Python. -""" - -import textwrap -import re - -def escape_path(word): - return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:') - -class Writer(object): - def __init__(self, output, width=78): - self.output = output - self.width = width - - def newline(self): - self.output.write('\n') - - def comment(self, text): - for line in textwrap.wrap(text, self.width - 2): - self.output.write('# ' + line + '\n') - - def variable(self, key, value, indent=0): - if value is None: - return - if isinstance(value, list): - value = ' '.join(filter(None, value)) # Filter out empty strings. - self._line('%s = %s' % (key, value), indent) - - def rule(self, name, command, description=None, depfile=None, - generator=False, restat=False, rspfile=None, rspfile_content=None): - self._line('rule %s' % name) - self.variable('command', command, indent=1) - if description: - self.variable('description', description, indent=1) - if depfile: - self.variable('depfile', depfile, indent=1) - if generator: - self.variable('generator', '1', indent=1) - if restat: - self.variable('restat', '1', indent=1) - if rspfile: - self.variable('rspfile', rspfile, indent=1) - if rspfile_content: - self.variable('rspfile_content', rspfile_content, indent=1) - - def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, - variables=None): - outputs = self._as_list(outputs) - all_inputs = self._as_list(inputs)[:] - out_outputs = list(map(escape_path, outputs)) - all_inputs = list(map(escape_path, all_inputs)) - - if implicit: - implicit = map(escape_path, self._as_list(implicit)) - all_inputs.append('|') - all_inputs.extend(implicit) - if order_only: - order_only = map(escape_path, self._as_list(order_only)) - all_inputs.append('||') - all_inputs.extend(order_only) - - self._line('build %s: %s %s' % (' '.join(out_outputs), - rule, - ' '.join(all_inputs))) - - if variables: - if isinstance(variables, dict): - iterator = variables.iteritems() - else: - iterator = iter(variables) - - for key, val in iterator: - self.variable(key, val, indent=1) - - return outputs - - def include(self, path): - self._line('include %s' % path) - - def subninja(self, path): - self._line('subninja %s' % path) - - def default(self, paths): - self._line('default %s' % ' '.join(self._as_list(paths))) - - def _count_dollars_before_index(self, s, i): - """Returns the number of '$' characters right in front of s[i].""" - dollar_count = 0 - dollar_index = i - 1 - while dollar_index > 0 and s[dollar_index] == '$': - dollar_count += 1 - dollar_index -= 1 - return dollar_count - - def _line(self, text, indent=0): - """Write 'text' word-wrapped at self.width characters.""" - leading_space = ' ' * indent - while len(leading_space) + len(text) > self.width: - # The text is too wide; wrap if possible. - - # Find the rightmost space that would obey our width constraint and - # that's not an escaped space. - available_space = self.width - len(leading_space) - len(' $') - space = available_space - while True: - space = text.rfind(' ', 0, space) - if space < 0 or \ - self._count_dollars_before_index(text, space) % 2 == 0: - break - - if space < 0: - # No such space; just use the first unescaped space we can find. - space = available_space - 1 - while True: - space = text.find(' ', space + 1) - if space < 0 or \ - self._count_dollars_before_index(text, space) % 2 == 0: - break - if space < 0: - # Give up on breaking. - break - - self.output.write(leading_space + text[0:space] + ' $\n') - text = text[space+1:] - - # Subsequent lines are continuations, so indent them. - leading_space = ' ' * (indent+2) - - self.output.write(leading_space + text + '\n') - - def _as_list(self, input): - if input is None: - return [] - if isinstance(input, list): - return input - return [input] - - -def escape(string): - """Escape a string such that it can be embedded into a Ninja file without - further interpretation.""" - assert '\n' not in string, 'Ninja syntax does not allow newlines' - # We only have one special metacharacter: '$'. - return string.replace('$', '$$') diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/sun_tool.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/sun_tool.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/sun_tool.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/sun_tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""These functions are executed via gyp-sun-tool when using the Makefile -generator.""" - -import fcntl -import os -import struct -import subprocess -import sys - - -def main(args): - executor = SunTool() - executor.Dispatch(args) - - -class SunTool(object): - """This class performs all the SunOS tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - # Note that the stock python on SunOS has a bug - # where fcntl.flock(fd, LOCK_EX) always fails - # with EBADF, that's why we use this F_SETLK - # hack instead. - fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) - op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - fcntl.fcntl(fd, fcntl.F_SETLK, op) - return subprocess.call(cmd_list) - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,193 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions for Windows builds. - -These functions are executed via gyp-win-tool when using the ninja generator. -""" - -from ctypes import windll, wintypes -import os -import shutil -import subprocess -import sys - -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) - - -def main(args): - executor = WinTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class LinkLock(object): - """A flock-style lock to limit the number of concurrent links to one. - - Uses a session-local mutex based on the file's directory. - """ - def __enter__(self): - name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_') - self.mutex = windll.kernel32.CreateMutexW( - wintypes.c_int(0), - wintypes.c_int(0), - wintypes.create_unicode_buffer(name)) - assert self.mutex - result = windll.kernel32.WaitForSingleObject( - self.mutex, wintypes.c_int(0xFFFFFFFF)) - # 0x80 means another process was killed without releasing the mutex, but - # that this process has been given ownership. This is fine for our - # purposes. - assert result in (0, 0x80), ( - "%s, %s" % (result, windll.kernel32.GetLastError())) - - def __exit__(self, type, value, traceback): - windll.kernel32.ReleaseMutex(self.mutex) - windll.kernel32.CloseHandle(self.mutex) - - -class WinTool(object): - """This class performs all the Windows tooling steps. The methods can either - be executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like recursive-mirror to RecursiveMirror.""" - return name_string.title().replace('-', '') - - def _GetEnv(self, arch): - """Gets the saved environment from a file for a given architecture.""" - # The environment is saved as an "environment block" (see CreateProcess - # and msvs_emulation for details). We convert to a dict here. - # Drop last 2 NULs, one for list terminator, one for trailing vs. separator. - pairs = open(arch).read()[:-2].split('\0') - kvs = [item.split('=', 1) for item in pairs] - return dict(kvs) - - def ExecStamp(self, path): - """Simple stamp command.""" - open(path, 'w').close() - - def ExecRecursiveMirror(self, source, dest): - """Emulation of rm -rf out && cp -af in out.""" - if os.path.exists(dest): - if os.path.isdir(dest): - shutil.rmtree(dest) - else: - os.unlink(dest) - if os.path.isdir(source): - shutil.copytree(source, dest) - else: - shutil.copy2(source, dest) - - def ExecLinkWrapper(self, arch, *args): - """Filter diagnostic output from link that looks like: - ' Creating library ui.dll.lib and object ui.dll.exp' - This happens when there are exports from the dll or exe. - """ - with LinkLock(): - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if not line.startswith(' Creating library '): - print line - return popen.returncode - - def ExecManifestWrapper(self, arch, *args): - """Run manifest tool with environment set. Strip out undesirable warning - (some XML blocks are recognized by the OS loader, but not the manifest - tool).""" - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if line and 'manifest authoring warning 81010002' not in line: - print line - return popen.returncode - - def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, - *flags): - """Filter noisy filenames output from MIDL compile step that isn't - quietable via command line flags. - """ - args = ['midl', '/nologo'] + list(flags) + [ - '/out', outdir, - '/tlb', tlb, - '/h', h, - '/dlldata', dlldata, - '/iid', iid, - '/proxy', proxy, - idl] - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - # Filter junk out of stdout, and write filtered versions. Output we want - # to filter is pairs of lines that look like this: - # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl - # objidl.idl - lines = out.splitlines() - prefix = 'Processing ' - processing = set(os.path.basename(x) for x in lines if x.startswith(prefix)) - for line in lines: - if not line.startswith(prefix) and line not in processing: - print line - return popen.returncode - - def ExecAsmWrapper(self, arch, *args): - """Filter logo banner from invocations of asm.exe.""" - env = self._GetEnv(arch) - # MSVS doesn't assemble x64 asm files. - if arch == 'environment.x64': - return 0 - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if (not line.startswith('Copyright (C) Microsoft Corporation') and - not line.startswith('Microsoft (R) Macro Assembler') and - not line.startswith(' Assembling: ') and - line): - print line - return popen.returncode - - def ExecRcWrapper(self, arch, *args): - """Filter logo banner from invocations of rc.exe. Older versions of RC - don't support the /nologo flag.""" - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and - not line.startswith('Copyright (C) Microsoft Corporation') and - line): - print line - return popen.returncode - - def ExecActionWrapper(self, arch, rspfile, *dir): - """Runs an action command line from a response file using the environment - for |arch|. If |dir| is supplied, use that as the working directory.""" - env = self._GetEnv(arch) - args = open(rspfile).read() - dir = dir[0] if dir else None - popen = subprocess.Popen(args, shell=True, env=env, cwd=dir) - popen.wait() - return popen.returncode - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1083 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This module contains classes that help to emulate xcodebuild behavior on top of -other build systems, such as make and ninja. -""" - -import gyp.common -import os.path -import re -import shlex -import subprocess -import sys -from gyp.common import GypError - -class XcodeSettings(object): - """A class that understands the gyp 'xcode_settings' object.""" - - # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached - # at class-level for efficiency. - _sdk_path_cache = {} - - def __init__(self, spec): - self.spec = spec - - # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. - # This means self.xcode_settings[config] always contains all settings - # for that config -- the per-target settings as well. Settings that are - # the same for all configs are implicitly per-target settings. - self.xcode_settings = {} - configs = spec['configurations'] - for configname, config in configs.iteritems(): - self.xcode_settings[configname] = config.get('xcode_settings', {}) - - # This is only non-None temporarily during the execution of some methods. - self.configname = None - - # Used by _AdjustLibrary to match .a and .dylib entries in libraries. - self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$') - - def _Settings(self): - assert self.configname - return self.xcode_settings[self.configname] - - def _Test(self, test_key, cond_key, default): - return self._Settings().get(test_key, default) == cond_key - - def _Appendf(self, lst, test_key, format_str, default=None): - if test_key in self._Settings(): - lst.append(format_str % str(self._Settings()[test_key])) - elif default: - lst.append(format_str % str(default)) - - def _WarnUnimplemented(self, test_key): - if test_key in self._Settings(): - print 'Warning: Ignoring not yet implemented key "%s".' % test_key - - def _IsBundle(self): - return int(self.spec.get('mac_bundle', 0)) != 0 - - def GetFrameworkVersion(self): - """Returns the framework version of the current target. Only valid for - bundles.""" - assert self._IsBundle() - return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A') - - def GetWrapperExtension(self): - """Returns the bundle extension (.app, .framework, .plugin, etc). Only - valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] in ('loadable_module', 'shared_library'): - default_wrapper_extension = { - 'loadable_module': 'bundle', - 'shared_library': 'framework', - }[self.spec['type']] - wrapper_extension = self.GetPerTargetSetting( - 'WRAPPER_EXTENSION', default=default_wrapper_extension) - return '.' + self.spec.get('product_extension', wrapper_extension) - elif self.spec['type'] == 'executable': - return '.app' - else: - assert False, "Don't know extension for '%s', target '%s'" % ( - self.spec['type'], self.spec['target_name']) - - def GetProductName(self): - """Returns PRODUCT_NAME.""" - return self.spec.get('product_name', self.spec['target_name']) - - def GetFullProductName(self): - """Returns FULL_PRODUCT_NAME.""" - if self._IsBundle(): - return self.GetWrapperName() - else: - return self._GetStandaloneBinaryPath() - - def GetWrapperName(self): - """Returns the directory name of the bundle represented by this target. - Only valid for bundles.""" - assert self._IsBundle() - return self.GetProductName() + self.GetWrapperExtension() - - def GetBundleContentsFolderPath(self): - """Returns the qualified path to the bundle's contents folder. E.g. - Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] == 'shared_library': - return os.path.join( - self.GetWrapperName(), 'Versions', self.GetFrameworkVersion()) - else: - # loadable_modules have a 'Contents' folder like executables. - return os.path.join(self.GetWrapperName(), 'Contents') - - def GetBundleResourceFolder(self): - """Returns the qualified path to the bundle's resource folder. E.g. - Chromium.app/Contents/Resources. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), 'Resources') - - def GetBundlePlistPath(self): - """Returns the qualified path to the bundle's plist file. E.g. - Chromium.app/Contents/Info.plist. Only valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] in ('executable', 'loadable_module'): - return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist') - else: - return os.path.join(self.GetBundleContentsFolderPath(), - 'Resources', 'Info.plist') - - def GetProductType(self): - """Returns the PRODUCT_TYPE of this target.""" - if self._IsBundle(): - return { - 'executable': 'com.apple.product-type.application', - 'loadable_module': 'com.apple.product-type.bundle', - 'shared_library': 'com.apple.product-type.framework', - }[self.spec['type']] - else: - return { - 'executable': 'com.apple.product-type.tool', - 'loadable_module': 'com.apple.product-type.library.dynamic', - 'shared_library': 'com.apple.product-type.library.dynamic', - 'static_library': 'com.apple.product-type.library.static', - }[self.spec['type']] - - def GetMachOType(self): - """Returns the MACH_O_TYPE of this target.""" - # Weird, but matches Xcode. - if not self._IsBundle() and self.spec['type'] == 'executable': - return '' - return { - 'executable': 'mh_execute', - 'static_library': 'staticlib', - 'shared_library': 'mh_dylib', - 'loadable_module': 'mh_bundle', - }[self.spec['type']] - - def _GetBundleBinaryPath(self): - """Returns the name of the bundle binary of by this target. - E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] in ('shared_library'): - path = self.GetBundleContentsFolderPath() - elif self.spec['type'] in ('executable', 'loadable_module'): - path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS') - return os.path.join(path, self.GetExecutableName()) - - def _GetStandaloneExecutableSuffix(self): - if 'product_extension' in self.spec: - return '.' + self.spec['product_extension'] - return { - 'executable': '', - 'static_library': '.a', - 'shared_library': '.dylib', - 'loadable_module': '.so', - }[self.spec['type']] - - def _GetStandaloneExecutablePrefix(self): - return self.spec.get('product_prefix', { - 'executable': '', - 'static_library': 'lib', - 'shared_library': 'lib', - # Non-bundled loadable_modules are called foo.so for some reason - # (that is, .so and no prefix) with the xcode build -- match that. - 'loadable_module': '', - }[self.spec['type']]) - - def _GetStandaloneBinaryPath(self): - """Returns the name of the non-bundle binary represented by this target. - E.g. hello_world. Only valid for non-bundles.""" - assert not self._IsBundle() - assert self.spec['type'] in ( - 'executable', 'shared_library', 'static_library', 'loadable_module'), ( - 'Unexpected type %s' % self.spec['type']) - target = self.spec['target_name'] - if self.spec['type'] == 'static_library': - if target[:3] == 'lib': - target = target[3:] - elif self.spec['type'] in ('loadable_module', 'shared_library'): - if target[:3] == 'lib': - target = target[3:] - - target_prefix = self._GetStandaloneExecutablePrefix() - target = self.spec.get('product_name', target) - target_ext = self._GetStandaloneExecutableSuffix() - return target_prefix + target + target_ext - - def GetExecutableName(self): - """Returns the executable name of the bundle represented by this target. - E.g. Chromium.""" - if self._IsBundle(): - return self.spec.get('product_name', self.spec['target_name']) - else: - return self._GetStandaloneBinaryPath() - - def GetExecutablePath(self): - """Returns the directory name of the bundle represented by this target. E.g. - Chromium.app/Contents/MacOS/Chromium.""" - if self._IsBundle(): - return self._GetBundleBinaryPath() - else: - return self._GetStandaloneBinaryPath() - - def _GetSdkVersionInfoItem(self, sdk, infoitem): - job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - out = job.communicate()[0] - if job.returncode != 0: - sys.stderr.write(out + '\n') - raise GypError('Error %d running xcodebuild' % job.returncode) - return out.rstrip('\n') - - def _SdkPath(self): - sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx') - if sdk_root not in XcodeSettings._sdk_path_cache: - XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem( - sdk_root, 'Path') - return XcodeSettings._sdk_path_cache[sdk_root] - - def _AppendPlatformVersionMinFlags(self, lst): - self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') - if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings(): - # TODO: Implement this better? - sdk_path_basename = os.path.basename(self._SdkPath()) - if sdk_path_basename.lower().startswith('iphonesimulator'): - self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET', - '-mios-simulator-version-min=%s') - else: - self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET', - '-miphoneos-version-min=%s') - - def GetCflags(self, configname): - """Returns flags that need to be added to .c, .cc, .m, and .mm - compilations.""" - # This functions (and the similar ones below) do not offer complete - # emulation of all xcode_settings keys. They're implemented on demand. - - self.configname = configname - cflags = [] - - sdk_root = self._SdkPath() - if 'SDKROOT' in self._Settings(): - cflags.append('-isysroot %s' % sdk_root) - - if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'): - cflags.append('-Wconstant-conversion') - - if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'): - cflags.append('-funsigned-char') - - if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'): - cflags.append('-fasm-blocks') - - if 'GCC_DYNAMIC_NO_PIC' in self._Settings(): - if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES': - cflags.append('-mdynamic-no-pic') - else: - pass - # TODO: In this case, it depends on the target. xcode passes - # mdynamic-no-pic by default for executable and possibly static lib - # according to mento - - if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'): - cflags.append('-mpascal-strings') - - self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s') - - if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'): - dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf') - if dbg_format == 'dwarf': - cflags.append('-gdwarf-2') - elif dbg_format == 'stabs': - raise NotImplementedError('stabs debug format is not supported yet.') - elif dbg_format == 'dwarf-with-dsym': - cflags.append('-gdwarf-2') - else: - raise NotImplementedError('Unknown debug format %s' % dbg_format) - - if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'): - cflags.append('-fvisibility=hidden') - - if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'): - cflags.append('-Werror') - - if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'): - cflags.append('-Wnewline-eof') - - self._AppendPlatformVersionMinFlags(cflags) - - # TODO: - if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'): - self._WarnUnimplemented('COPY_PHASE_STRIP') - self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS') - self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS') - - # TODO: This is exported correctly, but assigning to it is not supported. - self._WarnUnimplemented('MACH_O_TYPE') - self._WarnUnimplemented('PRODUCT_TYPE') - - archs = self._Settings().get('ARCHS', ['i386']) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented('ARCHS') - archs = ['i386'] - cflags.append('-arch ' + archs[0]) - - if archs[0] in ('i386', 'x86_64'): - if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'): - cflags.append('-msse3') - if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES', - default='NO'): - cflags.append('-mssse3') # Note 3rd 's'. - if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'): - cflags.append('-msse4.1') - if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'): - cflags.append('-msse4.2') - - cflags += self._Settings().get('WARNING_CFLAGS', []) - - config = self.spec['configurations'][self.configname] - framework_dirs = config.get('mac_framework_dirs', []) - for directory in framework_dirs: - cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) - - self.configname = None - return cflags - - def GetCflagsC(self, configname): - """Returns flags that need to be added to .c, and .m compilations.""" - self.configname = configname - cflags_c = [] - self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s') - cflags_c += self._Settings().get('OTHER_CFLAGS', []) - self.configname = None - return cflags_c - - def GetCflagsCC(self, configname): - """Returns flags that need to be added to .cc, and .mm compilations.""" - self.configname = configname - cflags_cc = [] - - clang_cxx_language_standard = self._Settings().get( - 'CLANG_CXX_LANGUAGE_STANDARD') - # Note: Don't make c++0x to c++11 so that c++0x can be used with older - # clangs that don't understand c++11 yet (like Xcode 4.2's). - if clang_cxx_language_standard: - cflags_cc.append('-std=%s' % clang_cxx_language_standard) - - self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s') - - if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'): - cflags_cc.append('-fno-rtti') - if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'): - cflags_cc.append('-fno-exceptions') - if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'): - cflags_cc.append('-fvisibility-inlines-hidden') - if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'): - cflags_cc.append('-fno-threadsafe-statics') - # Note: This flag is a no-op for clang, it only has an effect for gcc. - if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'): - cflags_cc.append('-Wno-invalid-offsetof') - - other_ccflags = [] - - for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']): - # TODO: More general variable expansion. Missing in many other places too. - if flag in ('$inherited', '$(inherited)', '${inherited}'): - flag = '$OTHER_CFLAGS' - if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'): - other_ccflags += self._Settings().get('OTHER_CFLAGS', []) - else: - other_ccflags.append(flag) - cflags_cc += other_ccflags - - self.configname = None - return cflags_cc - - def _AddObjectiveCGarbageCollectionFlags(self, flags): - gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported') - if gc_policy == 'supported': - flags.append('-fobjc-gc') - elif gc_policy == 'required': - flags.append('-fobjc-gc-only') - - def GetCflagsObjC(self, configname): - """Returns flags that need to be added to .m compilations.""" - self.configname = configname - cflags_objc = [] - - self._AddObjectiveCGarbageCollectionFlags(cflags_objc) - - self.configname = None - return cflags_objc - - def GetCflagsObjCC(self, configname): - """Returns flags that need to be added to .mm compilations.""" - self.configname = configname - cflags_objcc = [] - self._AddObjectiveCGarbageCollectionFlags(cflags_objcc) - if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'): - cflags_objcc.append('-fobjc-call-cxx-cdtors') - self.configname = None - return cflags_objcc - - def GetInstallNameBase(self): - """Return DYLIB_INSTALL_NAME_BASE for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if (self.spec['type'] != 'shared_library' and - (self.spec['type'] != 'loadable_module' or self._IsBundle())): - return None - install_base = self.GetPerTargetSetting( - 'DYLIB_INSTALL_NAME_BASE', - default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib') - return install_base - - def _StandardizePath(self, path): - """Do :standardizepath processing for path.""" - # I'm not quite sure what :standardizepath does. Just call normpath(), - # but don't let @executable_path/../foo collapse to foo. - if '/' in path: - prefix, rest = '', path - if path.startswith('@'): - prefix, rest = path.split('/', 1) - rest = os.path.normpath(rest) # :standardizepath - path = os.path.join(prefix, rest) - return path - - def GetInstallName(self): - """Return LD_DYLIB_INSTALL_NAME for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if (self.spec['type'] != 'shared_library' and - (self.spec['type'] != 'loadable_module' or self._IsBundle())): - return None - - default_install_name = \ - '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)' - install_name = self.GetPerTargetSetting( - 'LD_DYLIB_INSTALL_NAME', default=default_install_name) - - # Hardcode support for the variables used in chromium for now, to - # unblock people using the make build. - if '$' in install_name: - assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/' - '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), ( - 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported ' - 'yet in target \'%s\' (got \'%s\')' % - (self.spec['target_name'], install_name)) - - install_name = install_name.replace( - '$(DYLIB_INSTALL_NAME_BASE:standardizepath)', - self._StandardizePath(self.GetInstallNameBase())) - if self._IsBundle(): - # These are only valid for bundles, hence the |if|. - install_name = install_name.replace( - '$(WRAPPER_NAME)', self.GetWrapperName()) - install_name = install_name.replace( - '$(PRODUCT_NAME)', self.GetProductName()) - else: - assert '$(WRAPPER_NAME)' not in install_name - assert '$(PRODUCT_NAME)' not in install_name - - install_name = install_name.replace( - '$(EXECUTABLE_PATH)', self.GetExecutablePath()) - return install_name - - def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path): - """Checks if ldflag contains a filename and if so remaps it from - gyp-directory-relative to build-directory-relative.""" - # This list is expanded on demand. - # They get matched as: - # -exported_symbols_list file - # -Wl,exported_symbols_list file - # -Wl,exported_symbols_list,file - LINKER_FILE = '(\S+)' - WORD = '\S+' - linker_flags = [ - ['-exported_symbols_list', LINKER_FILE], # Needed for NaCl. - ['-unexported_symbols_list', LINKER_FILE], - ['-reexported_symbols_list', LINKER_FILE], - ['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting. - ] - for flag_pattern in linker_flags: - regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern)) - m = regex.match(ldflag) - if m: - ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \ - ldflag[m.end(1):] - # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, - # TODO(thakis): Update ffmpeg.gyp): - if ldflag.startswith('-L'): - ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):]) - return ldflag - - def GetLdflags(self, configname, product_dir, gyp_to_build_path): - """Returns flags that need to be passed to the linker. - - Args: - configname: The name of the configuration to get ld flags for. - product_dir: The directory where products such static and dynamic - libraries are placed. This is added to the library search path. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. - """ - self.configname = configname - ldflags = [] - - # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS - # can contain entries that depend on this. Explicitly absolutify these. - for ldflag in self._Settings().get('OTHER_LDFLAGS', []): - ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) - - if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'): - ldflags.append('-Wl,-dead_strip') - - if self._Test('PREBINDING', 'YES', default='NO'): - ldflags.append('-Wl,-prebind') - - self._Appendf( - ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s') - self._Appendf( - ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s') - - self._AppendPlatformVersionMinFlags(ldflags) - - if 'SDKROOT' in self._Settings(): - ldflags.append('-isysroot ' + self._SdkPath()) - - for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): - ldflags.append('-L' + gyp_to_build_path(library_path)) - - if 'ORDER_FILE' in self._Settings(): - ldflags.append('-Wl,-order_file ' + - '-Wl,' + gyp_to_build_path( - self._Settings()['ORDER_FILE'])) - - archs = self._Settings().get('ARCHS', ['i386']) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented('ARCHS') - archs = ['i386'] - ldflags.append('-arch ' + archs[0]) - - # Xcode adds the product directory by default. - ldflags.append('-L' + product_dir) - - install_name = self.GetInstallName() - if install_name: - ldflags.append('-install_name ' + install_name.replace(' ', r'\ ')) - - for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []): - ldflags.append('-Wl,-rpath,' + rpath) - - config = self.spec['configurations'][self.configname] - framework_dirs = config.get('mac_framework_dirs', []) - for directory in framework_dirs: - ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath())) - - self.configname = None - return ldflags - - def GetLibtoolflags(self, configname): - """Returns flags that need to be passed to the static linker. - - Args: - configname: The name of the configuration to get ld flags for. - """ - self.configname = configname - libtoolflags = [] - - for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []): - libtoolflags.append(libtoolflag) - # TODO(thakis): ARCHS? - - self.configname = None - return libtoolflags - - def GetPerTargetSettings(self): - """Gets a list of all the per-target settings. This will only fetch keys - whose values are the same across all configurations.""" - first_pass = True - result = {} - for configname in sorted(self.xcode_settings.keys()): - if first_pass: - result = dict(self.xcode_settings[configname]) - first_pass = False - else: - for key, value in self.xcode_settings[configname].iteritems(): - if key not in result: - continue - elif result[key] != value: - del result[key] - return result - - def GetPerTargetSetting(self, setting, default=None): - """Tries to get xcode_settings.setting from spec. Assumes that the setting - has the same value in all configurations and throws otherwise.""" - first_pass = True - result = None - for configname in sorted(self.xcode_settings.keys()): - if first_pass: - result = self.xcode_settings[configname].get(setting, None) - first_pass = False - else: - assert result == self.xcode_settings[configname].get(setting, None), ( - "Expected per-target setting for '%s', got per-config setting " - "(target %s)" % (setting, spec['target_name'])) - if result is None: - return default - return result - - def _GetStripPostbuilds(self, configname, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - neccessary to strip this target's binary. These should be run as postbuilds - before the actual postbuilds run.""" - self.configname = configname - - result = [] - if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and - self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')): - - default_strip_style = 'debugging' - if self._IsBundle(): - default_strip_style = 'non-global' - elif self.spec['type'] == 'executable': - default_strip_style = 'all' - - strip_style = self._Settings().get('STRIP_STYLE', default_strip_style) - strip_flags = { - 'all': '', - 'non-global': '-x', - 'debugging': '-S', - }[strip_style] - - explicit_strip_flags = self._Settings().get('STRIPFLAGS', '') - if explicit_strip_flags: - strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags) - - if not quiet: - result.append('echo STRIP\\(%s\\)' % self.spec['target_name']) - result.append('strip %s %s' % (strip_flags, output_binary)) - - self.configname = None - return result - - def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - neccessary to massage this target's debug information. These should be run - as postbuilds before the actual postbuilds run.""" - self.configname = configname - - # For static libraries, no dSYMs are created. - result = [] - if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and - self._Test( - 'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and - self.spec['type'] != 'static_library'): - if not quiet: - result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name']) - result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM')) - - self.configname = None - return result - - def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False): - """Returns a list of shell commands that contain the shell commands - to run as postbuilds for this target, before the actual postbuilds.""" - # dSYMs need to build before stripping happens. - return ( - self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) + - self._GetStripPostbuilds(configname, output_binary, quiet)) - - def _AdjustLibrary(self, library): - if library.endswith('.framework'): - l = '-framework ' + os.path.splitext(os.path.basename(library))[0] - else: - m = self.library_re.match(library) - if m: - l = '-l' + m.group(1) - else: - l = library - return l.replace('$(SDKROOT)', self._SdkPath()) - - def AdjustLibraries(self, libraries): - """Transforms entries like 'Cocoa.framework' in libraries into entries like - '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc. - """ - libraries = [ self._AdjustLibrary(library) for library in libraries] - return libraries - - -class MacPrefixHeader(object): - """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. - - This feature consists of several pieces: - * If GCC_PREFIX_HEADER is present, all compilations in that project get an - additional |-include path_to_prefix_header| cflag. - * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is - instead compiled, and all other compilations in the project get an - additional |-include path_to_compiled_header| instead. - + Compiled prefix headers have the extension gch. There is one gch file for - every language used in the project (c, cc, m, mm), since gch files for - different languages aren't compatible. - + gch files themselves are built with the target's normal cflags, but they - obviously don't get the |-include| flag. Instead, they need a -x flag that - describes their language. - + All o files in the target need to depend on the gch file, to make sure - it's built before any o file is built. - - This class helps with some of these tasks, but it needs help from the build - system for writing dependencies to the gch files, for writing build commands - for the gch files, and for figuring out the location of the gch files. - """ - def __init__(self, xcode_settings, - gyp_path_to_build_path, gyp_path_to_build_output): - """If xcode_settings is None, all methods on this class are no-ops. - - Args: - gyp_path_to_build_path: A function that takes a gyp-relative path, - and returns a path relative to the build directory. - gyp_path_to_build_output: A function that takes a gyp-relative path and - a language code ('c', 'cc', 'm', or 'mm'), and that returns a path - to where the output of precompiling that path for that language - should be placed (without the trailing '.gch'). - """ - # This doesn't support per-configuration prefix headers. Good enough - # for now. - self.header = None - self.compile_headers = False - if xcode_settings: - self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER') - self.compile_headers = xcode_settings.GetPerTargetSetting( - 'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO' - self.compiled_headers = {} - if self.header: - if self.compile_headers: - for lang in ['c', 'cc', 'm', 'mm']: - self.compiled_headers[lang] = gyp_path_to_build_output( - self.header, lang) - self.header = gyp_path_to_build_path(self.header) - - def GetInclude(self, lang): - """Gets the cflags to include the prefix header for language |lang|.""" - if self.compile_headers and lang in self.compiled_headers: - return '-include %s' % self.compiled_headers[lang] - elif self.header: - return '-include %s' % self.header - else: - return '' - - def _Gch(self, lang): - """Returns the actual file name of the prefix header for language |lang|.""" - assert self.compile_headers - return self.compiled_headers[lang] + '.gch' - - def GetObjDependencies(self, sources, objs): - """Given a list of source files and the corresponding object files, returns - a list of (source, object, gch) tuples, where |gch| is the build-directory - relative path to the gch file each object file depends on. |compilable[i]| - has to be the source file belonging to |objs[i]|.""" - if not self.header or not self.compile_headers: - return [] - - result = [] - for source, obj in zip(sources, objs): - ext = os.path.splitext(source)[1] - lang = { - '.c': 'c', - '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc', - '.m': 'm', - '.mm': 'mm', - }.get(ext, None) - if lang: - result.append((source, obj, self._Gch(lang))) - return result - - def GetPchBuildCommands(self): - """Returns [(path_to_gch, language_flag, language, header)]. - |path_to_gch| and |header| are relative to the build directory. - """ - if not self.header or not self.compile_headers: - return [] - return [ - (self._Gch('c'), '-x c-header', 'c', self.header), - (self._Gch('cc'), '-x c++-header', 'cc', self.header), - (self._Gch('m'), '-x objective-c-header', 'm', self.header), - (self._Gch('mm'), '-x objective-c++-header', 'mm', self.header), - ] - - -def MergeGlobalXcodeSettingsToSpec(global_dict, spec): - """Merges the global xcode_settings dictionary into each configuration of the - target represented by spec. For keys that are both in the global and the local - xcode_settings dict, the local key gets precendence. - """ - # The xcode generator special-cases global xcode_settings and does something - # that amounts to merging in the global xcode_settings into each local - # xcode_settings dict. - global_xcode_settings = global_dict.get('xcode_settings', {}) - for config in spec['configurations'].values(): - if 'xcode_settings' in config: - new_settings = global_xcode_settings.copy() - new_settings.update(config['xcode_settings']) - config['xcode_settings'] = new_settings - - -def IsMacBundle(flavor, spec): - """Returns if |spec| should be treated as a bundle. - - Bundles are directories with a certain subdirectory structure, instead of - just a single file. Bundle rules do not produce a binary but also package - resources into that directory.""" - is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') - if is_mac_bundle: - assert spec['type'] != 'none', ( - 'mac_bundle targets cannot have type none (target "%s")' % - spec['target_name']) - return is_mac_bundle - - -def GetMacBundleResources(product_dir, xcode_settings, resources): - """Yields (output, resource) pairs for every resource in |resources|. - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - resources: A list of bundle resources, relative to the build directory. - """ - dest = os.path.join(product_dir, - xcode_settings.GetBundleResourceFolder()) - for res in resources: - output = dest - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. - assert ' ' not in res, ( - "Spaces in resource filenames not supported (%s)" % res) - - # Split into (path,file). - res_parts = os.path.split(res) - - # Now split the path into (prefix,maybe.lproj). - lproj_parts = os.path.split(res_parts[0]) - # If the resource lives in a .lproj bundle, add that to the destination. - if lproj_parts[1].endswith('.lproj'): - output = os.path.join(output, lproj_parts[1]) - - output = os.path.join(output, res_parts[1]) - # Compiled XIB files are referred to by .nib. - if output.endswith('.xib'): - output = output[0:-3] + 'nib' - - yield output, res - - -def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): - """Returns (info_plist, dest_plist, defines, extra_env), where: - * |info_plist| is the sourc plist path, relative to the - build directory, - * |dest_plist| is the destination plist path, relative to the - build directory, - * |defines| is a list of preprocessor defines (empty if the plist - shouldn't be preprocessed, - * |extra_env| is a dict of env variables that should be exported when - invoking |mac_tool copy-info-plist|. - - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. - """ - info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE') - if not info_plist: - return None, None, [], {} - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. - assert ' ' not in info_plist, ( - "Spaces in Info.plist filenames not supported (%s)" % info_plist) - - info_plist = gyp_path_to_build_path(info_plist) - - # If explicitly set to preprocess the plist, invoke the C preprocessor and - # specify any defines as -D flags. - if xcode_settings.GetPerTargetSetting( - 'INFOPLIST_PREPROCESS', default='NO') == 'YES': - # Create an intermediate file based on the path. - defines = shlex.split(xcode_settings.GetPerTargetSetting( - 'INFOPLIST_PREPROCESSOR_DEFINITIONS', default='')) - else: - defines = [] - - dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath()) - extra_env = xcode_settings.GetPerTargetSettings() - - return info_plist, dest_plist, defines, extra_env - - -def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, - additional_settings=None): - """Return the environment variables that Xcode would set. See - http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 - for a full list. - - Args: - xcode_settings: An XcodeSettings object. If this is None, this function - returns an empty dict. - built_products_dir: Absolute path to the built products dir. - srcroot: Absolute path to the source root. - configuration: The build configuration name. - additional_settings: An optional dict with more values to add to the - result. - """ - if not xcode_settings: return {} - - # This function is considered a friend of XcodeSettings, so let it reach into - # its implementation details. - spec = xcode_settings.spec - - # These are filled in on a as-needed basis. - env = { - 'BUILT_PRODUCTS_DIR' : built_products_dir, - 'CONFIGURATION' : configuration, - 'PRODUCT_NAME' : xcode_settings.GetProductName(), - # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME - 'SRCROOT' : srcroot, - 'SOURCE_ROOT': '${SRCROOT}', - # This is not true for static libraries, but currently the env is only - # written for bundles: - 'TARGET_BUILD_DIR' : built_products_dir, - 'TEMP_DIR' : '${TMPDIR}', - } - if xcode_settings.GetPerTargetSetting('SDKROOT'): - env['SDKROOT'] = xcode_settings._SdkPath() - else: - env['SDKROOT'] = '' - - if spec['type'] in ( - 'executable', 'static_library', 'shared_library', 'loadable_module'): - env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() - env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath() - env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName() - mach_o_type = xcode_settings.GetMachOType() - if mach_o_type: - env['MACH_O_TYPE'] = mach_o_type - env['PRODUCT_TYPE'] = xcode_settings.GetProductType() - if xcode_settings._IsBundle(): - env['CONTENTS_FOLDER_PATH'] = \ - xcode_settings.GetBundleContentsFolderPath() - env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \ - xcode_settings.GetBundleResourceFolder() - env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() - env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() - - install_name = xcode_settings.GetInstallName() - if install_name: - env['LD_DYLIB_INSTALL_NAME'] = install_name - install_name_base = xcode_settings.GetInstallNameBase() - if install_name_base: - env['DYLIB_INSTALL_NAME_BASE'] = install_name_base - - if not additional_settings: - additional_settings = {} - else: - # Flatten lists to strings. - for k in additional_settings: - if not isinstance(additional_settings[k], str): - additional_settings[k] = ' '.join(additional_settings[k]) - additional_settings.update(env) - - for k in additional_settings: - additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) - - return additional_settings - - -def _NormalizeEnvVarReferences(str): - """Takes a string containing variable references in the form ${FOO}, $(FOO), - or $FOO, and returns a string with all variable references in the form ${FOO}. - """ - # $FOO -> ${FOO} - str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str) - - # $(FOO) -> ${FOO} - matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str) - for match in matches: - to_replace, variable = match - assert '$(' not in match, '$($(FOO)) variables not supported: ' + match - str = str.replace(to_replace, '${' + variable + '}') - - return str - - -def ExpandEnvVars(string, expansions): - """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the - expansions list. If the variable expands to something that references - another variable, this variable is expanded as well if it's in env -- - until no variables present in env are left.""" - for k, v in reversed(expansions): - string = string.replace('${' + k + '}', v) - string = string.replace('$(' + k + ')', v) - string = string.replace('$' + k, v) - return string - - -def _TopologicallySortedEnvVarKeys(env): - """Takes a dict |env| whose values are strings that can refer to other keys, - for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of - env such that key2 is after key1 in L if env[key2] refers to env[key1]. - - Throws an Exception in case of dependency cycles. - """ - # Since environment variables can refer to other variables, the evaluation - # order is important. Below is the logic to compute the dependency graph - # and sort it. - regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}') - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - # We can then reverse the result of the topological sort at the end. - # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - matches = set([v for v in regex.findall(env[node]) if v in env]) - for dependee in matches: - assert '${' not in dependee, 'Nested variables not supported: ' + dependee - return matches - - try: - # Topologically sort, and then reverse, because we used an edge definition - # that's inverted from the expected result of this function (see comment - # above). - order = gyp.common.TopologicallySorted(env.keys(), GetEdges) - order.reverse() - return order - except gyp.common.CycleError, e: - raise GypError( - 'Xcode environment variables are cyclically dependent: ' + str(e.nodes)) - - -def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot, - configuration, additional_settings=None): - env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, - additional_settings) - return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)] - - -def GetSpecPostbuildCommands(spec, quiet=False): - """Returns the list of postbuilds explicitly defined on |spec|, in a form - executable by a shell.""" - postbuilds = [] - for postbuild in spec.get('postbuilds', []): - if not quiet: - postbuilds.append('echo POSTBUILD\\(%s\\) %s' % ( - spec['target_name'], postbuild['postbuild_name'])) - postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action'])) - return postbuilds Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.pyc and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.pyc differ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py 2013-07-02 18:04:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,2870 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode project file generator. - -This module is both an Xcode project file generator and a documentation of the -Xcode project file format. Knowledge of the project file format was gained -based on extensive experience with Xcode, and by making changes to projects in -Xcode.app and observing the resultant changes in the associated project files. - -XCODE PROJECT FILES - -The generator targets the file format as written by Xcode 3.2 (specifically, -3.2.6), but past experience has taught that the format has not changed -significantly in the past several years, and future versions of Xcode are able -to read older project files. - -Xcode project files are "bundled": the project "file" from an end-user's -perspective is actually a directory with an ".xcodeproj" extension. The -project file from this module's perspective is actually a file inside this -directory, always named "project.pbxproj". This file contains a complete -description of the project and is all that is needed to use the xcodeproj. -Other files contained in the xcodeproj directory are simply used to store -per-user settings, such as the state of various UI elements in the Xcode -application. - -The project.pbxproj file is a property list, stored in a format almost -identical to the NeXTstep property list format. The file is able to carry -Unicode data, and is encoded in UTF-8. The root element in the property list -is a dictionary that contains several properties of minimal interest, and two -properties of immense interest. The most important property is a dictionary -named "objects". The entire structure of the project is represented by the -children of this property. The objects dictionary is keyed by unique 96-bit -values represented by 24 uppercase hexadecimal characters. Each value in the -objects dictionary is itself a dictionary, describing an individual object. - -Each object in the dictionary is a member of a class, which is identified by -the "isa" property of each object. A variety of classes are represented in a -project file. Objects can refer to other objects by ID, using the 24-character -hexadecimal object key. A project's objects form a tree, with a root object -of class PBXProject at the root. As an example, the PBXProject object serves -as parent to an XCConfigurationList object defining the build configurations -used in the project, a PBXGroup object serving as a container for all files -referenced in the project, and a list of target objects, each of which defines -a target in the project. There are several different types of target object, -such as PBXNativeTarget and PBXAggregateTarget. In this module, this -relationship is expressed by having each target type derive from an abstract -base named XCTarget. - -The project.pbxproj file's root dictionary also contains a property, sibling to -the "objects" dictionary, named "rootObject". The value of rootObject is a -24-character object key referring to the root PBXProject object in the -objects dictionary. - -In Xcode, every file used as input to a target or produced as a final product -of a target must appear somewhere in the hierarchy rooted at the PBXGroup -object referenced by the PBXProject's mainGroup property. A PBXGroup is -generally represented as a folder in the Xcode application. PBXGroups can -contain other PBXGroups as well as PBXFileReferences, which are pointers to -actual files. - -Each XCTarget contains a list of build phases, represented in this module by -the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations -are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the -"Compile Sources" and "Link Binary With Libraries" phases displayed in the -Xcode application. Files used as input to these phases (for example, source -files in the former case and libraries and frameworks in the latter) are -represented by PBXBuildFile objects, referenced by elements of "files" lists -in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile -object as a "weak" reference: it does not "own" the PBXBuildFile, which is -owned by the root object's mainGroup or a descendant group. In most cases, the -layer of indirection between an XCBuildPhase and a PBXFileReference via a -PBXBuildFile appears extraneous, but there's actually one reason for this: -file-specific compiler flags are added to the PBXBuildFile object so as to -allow a single file to be a member of multiple targets while having distinct -compiler flags for each. These flags can be modified in the Xcode applciation -in the "Build" tab of a File Info window. - -When a project is open in the Xcode application, Xcode will rewrite it. As -such, this module is careful to adhere to the formatting used by Xcode, to -avoid insignificant changes appearing in the file when it is used in the -Xcode application. This will keep version control repositories happy, and -makes it possible to compare a project file used in Xcode to one generated by -this module to determine if any significant changes were made in the -application. - -Xcode has its own way of assigning 24-character identifiers to each object, -which is not duplicated here. Because the identifier only is only generated -once, when an object is created, and is then left unchanged, there is no need -to attempt to duplicate Xcode's behavior in this area. The generator is free -to select any identifier, even at random, to refer to the objects it creates, -and Xcode will retain those identifiers and use them when subsequently -rewriting the project file. However, the generator would choose new random -identifiers each time the project files are generated, leading to difficulties -comparing "used" project files to "pristine" ones produced by this module, -and causing the appearance of changes as every object identifier is changed -when updated projects are checked in to a version control repository. To -mitigate this problem, this module chooses identifiers in a more deterministic -way, by hashing a description of each object as well as its parent and ancestor -objects. This strategy should result in minimal "shift" in IDs as successive -generations of project files are produced. - -THIS MODULE - -This module introduces several classes, all derived from the XCObject class. -Nearly all of the "brains" are built into the XCObject class, which understands -how to create and modify objects, maintain the proper tree structure, compute -identifiers, and print objects. For the most part, classes derived from -XCObject need only provide a _schema class object, a dictionary that -expresses what properties objects of the class may contain. - -Given this structure, it's possible to build a minimal project file by creating -objects of the appropriate types and making the proper connections: - - config_list = XCConfigurationList() - group = PBXGroup() - project = PBXProject({'buildConfigurationList': config_list, - 'mainGroup': group}) - -With the project object set up, it can be added to an XCProjectFile object. -XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject -subclass that does not actually correspond to a class type found in a project -file. Rather, it is used to represent the project file's root dictionary. -Printing an XCProjectFile will print the entire project file, including the -full "objects" dictionary. - - project_file = XCProjectFile({'rootObject': project}) - project_file.ComputeIDs() - project_file.Print() - -Xcode project files are always encoded in UTF-8. This module will accept -strings of either the str class or the unicode class. Strings of class str -are assumed to already be encoded in UTF-8. Obviously, if you're just using -ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. -Strings of class unicode are handled properly and encoded in UTF-8 when -a project file is output. -""" - -import gyp.common -import posixpath -import re -import struct -import sys - -# hashlib is supplied as of Python 2.5 as the replacement interface for sha -# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import sha otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_sha1 = hashlib.sha1 -except ImportError: - import sha - _new_sha1 = sha.new - - -# See XCObject._EncodeString. This pattern is used to determine when a string -# can be printed unquoted. Strings that match this pattern may be printed -# unquoted. Strings that do not match must be quoted and may be further -# transformed to be properly encoded. Note that this expression matches the -# characters listed with "+", for 1 or more occurrences: if a string is empty, -# it must not match this pattern, because it needs to be encoded as "". -_unquoted = re.compile('^[A-Za-z0-9$./_]+$') - -# Strings that match this pattern are quoted regardless of what _unquoted says. -# Oddly, Xcode will quote any string with a run of three or more underscores. -_quoted = re.compile('___') - -# This pattern should match any character that needs to be escaped by -# XCObject._EncodeString. See that function. -_escaped = re.compile('[\\\\"]|[^ -~]') - - -# Used by SourceTreeAndPathFromPath -_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$') - -def SourceTreeAndPathFromPath(input_path): - """Given input_path, returns a tuple with sourceTree and path values. - - Examples: - input_path (source_tree, output_path) - '$(VAR)/path' ('VAR', 'path') - '$(VAR)' ('VAR', None) - 'path' (None, 'path') - """ - - source_group_match = _path_leading_variable.match(input_path) - if source_group_match: - source_tree = source_group_match.group(1) - output_path = source_group_match.group(3) # This may be None. - else: - source_tree = None - output_path = input_path - - return (source_tree, output_path) - -def ConvertVariablesToShellSyntax(input_string): - return re.sub('\$\((.*?)\)', '${\\1}', input_string) - -class XCObject(object): - """The abstract base of all class types used in Xcode project files. - - Class variables: - _schema: A dictionary defining the properties of this class. The keys to - _schema are string property keys as used in project files. Values - are a list of four or five elements: - [ is_list, property_type, is_strong, is_required, default ] - is_list: True if the property described is a list, as opposed - to a single element. - property_type: The type to use as the value of the property, - or if is_list is True, the type to use for each - element of the value's list. property_type must - be an XCObject subclass, or one of the built-in - types str, int, or dict. - is_strong: If property_type is an XCObject subclass, is_strong - is True to assert that this class "owns," or serves - as parent, to the property value (or, if is_list is - True, values). is_strong must be False if - property_type is not an XCObject subclass. - is_required: True if the property is required for the class. - Note that is_required being True does not preclude - an empty string ("", in the case of property_type - str) or list ([], in the case of is_list True) from - being set for the property. - default: Optional. If is_requried is True, default may be set - to provide a default value for objects that do not supply - their own value. If is_required is True and default - is not provided, users of the class must supply their own - value for the property. - Note that although the values of the array are expressed in - boolean terms, subclasses provide values as integers to conserve - horizontal space. - _should_print_single_line: False in XCObject. Subclasses whose objects - should be written to the project file in the - alternate single-line format, such as - PBXFileReference and PBXBuildFile, should - set this to True. - _encode_transforms: Used by _EncodeString to encode unprintable characters. - The index into this list is the ordinal of the - character to transform; each value is a string - used to represent the character in the output. XCObject - provides an _encode_transforms list suitable for most - XCObject subclasses. - _alternate_encode_transforms: Provided for subclasses that wish to use - the alternate encoding rules. Xcode seems - to use these rules when printing objects in - single-line format. Subclasses that desire - this behavior should set _encode_transforms - to _alternate_encode_transforms. - _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs - to construct this object's ID. Most classes that need custom - hashing behavior should do it by overriding Hashables, - but in some cases an object's parent may wish to push a - hashable value into its child, and it can do so by appending - to _hashables. - Attributes: - id: The object's identifier, a 24-character uppercase hexadecimal string. - Usually, objects being created should not set id until the entire - project file structure is built. At that point, UpdateIDs() should - be called on the root object to assign deterministic values for id to - each object in the tree. - parent: The object's parent. This is set by a parent XCObject when a child - object is added to it. - _properties: The object's property dictionary. An object's properties are - described by its class' _schema variable. - """ - - _schema = {} - _should_print_single_line = False - - # See _EncodeString. - _encode_transforms = [] - i = 0 - while i < ord(' '): - _encode_transforms.append('\\U%04x' % i) - i = i + 1 - _encode_transforms[7] = '\\a' - _encode_transforms[8] = '\\b' - _encode_transforms[9] = '\\t' - _encode_transforms[10] = '\\n' - _encode_transforms[11] = '\\v' - _encode_transforms[12] = '\\f' - _encode_transforms[13] = '\\n' - - _alternate_encode_transforms = list(_encode_transforms) - _alternate_encode_transforms[9] = chr(9) - _alternate_encode_transforms[10] = chr(10) - _alternate_encode_transforms[11] = chr(11) - - def __init__(self, properties=None, id=None, parent=None): - self.id = id - self.parent = parent - self._properties = {} - self._hashables = [] - self._SetDefaultsFromSchema() - self.UpdateProperties(properties) - - def __repr__(self): - try: - name = self.Name() - except NotImplementedError: - return '<%s at 0x%x>' % (self.__class__.__name__, id(self)) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Copy(self): - """Make a copy of this object. - - The new object will have its own copy of lists and dicts. Any XCObject - objects owned by this object (marked "strong") will be copied in the - new object, even those found in lists. If this object has any weak - references to other XCObjects, the same references are added to the new - object without making a copy. - """ - - that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.iteritems(): - is_strong = self._schema[key][2] - - if isinstance(value, XCObject): - if is_strong: - new_value = value.Copy() - new_value.parent = that - that._properties[key] = new_value - else: - that._properties[key] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - that._properties[key] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe to - # call Copy. - that._properties[key] = [] - for item in value: - new_item = item.Copy() - new_item.parent = that - that._properties[key].append(new_item) - else: - that._properties[key] = value[:] - elif isinstance(value, dict): - # dicts are never strong. - if is_strong: - raise TypeError, 'Strong dict for key ' + key + ' in ' + \ - self.__class__.__name__ - else: - that._properties[key] = value.copy() - else: - raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \ - ' for key ' + key + ' in ' + self.__class__.__name__ - - return that - - def Name(self): - """Return the name corresponding to an object. - - Not all objects necessarily need to be nameable, and not all that do have - a "name" property. Override as needed. - """ - - # If the schema indicates that "name" is required, try to access the - # property even if it doesn't exist. This will result in a KeyError - # being raised for the property that should be present, which seems more - # appropriate than NotImplementedError in this case. - if 'name' in self._properties or \ - ('name' in self._schema and self._schema['name'][3]): - return self._properties['name'] - - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement Name' - - def Comment(self): - """Return a comment string for the object. - - Most objects just use their name as the comment, but PBXProject uses - different values. - - The returned comment is not escaped and does not have any comment marker - strings applied to it. - """ - - return self.Name() - - def Hashables(self): - hashables = [self.__class__.__name__] - - name = self.Name() - if name != None: - hashables.append(name) - - hashables.extend(self._hashables) - - return hashables - - def HashablesForChild(self): - return None - - def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None): - """Set "id" properties deterministically. - - An object's "id" property is set based on a hash of its class type and - name, as well as the class type and name of all ancestor objects. As - such, it is only advisable to call ComputeIDs once an entire project file - tree is built. - - If recursive is True, recurse into all descendant objects and update their - hashes. - - If overwrite is True, any existing value set in the "id" property will be - replaced. - """ - - def _HashUpdate(hash, data): - """Update hash with data's length and contents. - - If the hash were updated only with the value of data, it would be - possible for clowns to induce collisions by manipulating the names of - their objects. By adding the length, it's exceedingly less likely that - ID collisions will be encountered, intentionally or not. - """ - - hash.update(struct.pack('>i', len(data))) - hash.update(data) - - if seed_hash is None: - seed_hash = _new_sha1() - - hash = seed_hash.copy() - - hashables = self.Hashables() - assert len(hashables) > 0 - for hashable in hashables: - _HashUpdate(hash, hashable) - - if recursive: - hashables_for_child = self.HashablesForChild() - if hashables_for_child is None: - child_hash = hash - else: - assert len(hashables_for_child) > 0 - child_hash = seed_hash.copy() - for hashable in hashables_for_child: - _HashUpdate(child_hash, hashable) - - for child in self.Children(): - child.ComputeIDs(recursive, overwrite, child_hash) - - if overwrite or self.id is None: - # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is - # is 160 bits. Instead of throwing out 64 bits of the digest, xor them - # into the portion that gets used. - assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size / 4 - digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) - id_ints = [0, 0, 0] - for index in xrange(0, digest_int_count): - id_ints[index % 3] ^= digest_ints[index] - self.id = '%08X%08X%08X' % tuple(id_ints) - - def EnsureNoIDCollisions(self): - """Verifies that no two objects have the same ID. Checks all descendants. - """ - - ids = {} - descendants = self.Descendants() - for descendant in descendants: - if descendant.id in ids: - other = ids[descendant.id] - raise KeyError, \ - 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \ - (descendant.id, str(descendant._properties), - str(other._properties), self._properties['rootObject'].Name()) - ids[descendant.id] = descendant - - def Children(self): - """Returns a list of all of this object's owned (strong) children.""" - - children = [] - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong) = attributes[0:3] - if is_strong and property in self._properties: - if not is_list: - children.append(self._properties[property]) - else: - children.extend(self._properties[property]) - return children - - def Descendants(self): - """Returns a list of all of this object's descendants, including this - object. - """ - - children = self.Children() - descendants = [self] - for child in children: - descendants.extend(child.Descendants()) - return descendants - - def PBXProjectAncestor(self): - # The base case for recursion is defined at PBXProject.PBXProjectAncestor. - if self.parent: - return self.parent.PBXProjectAncestor() - return None - - def _EncodeComment(self, comment): - """Encodes a comment to be placed in the project file output, mimicing - Xcode behavior. - """ - - # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If - # the string already contains a "*/", it is turned into "(*)/". This keeps - # the file writer from outputting something that would be treated as the - # end of a comment in the middle of something intended to be entirely a - # comment. - - return '/* ' + comment.replace('*/', '(*)/') + ' */' - - def _EncodeTransform(self, match): - # This function works closely with _EncodeString. It will only be called - # by re.sub with match.group(0) containing a character matched by the - # the _escaped expression. - char = match.group(0) - - # Backslashes (\) and quotation marks (") are always replaced with a - # backslash-escaped version of the same. Everything else gets its - # replacement from the class' _encode_transforms array. - if char == '\\': - return '\\\\' - if char == '"': - return '\\"' - return self._encode_transforms[ord(char)] - - def _EncodeString(self, value): - """Encodes a string to be placed in the project file output, mimicing - Xcode behavior. - """ - - # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, - # $ (dollar sign), . (period), and _ (underscore) is present. Also use - # quotation marks to represent empty strings. - # - # Escape " (double-quote) and \ (backslash) by preceding them with a - # backslash. - # - # Some characters below the printable ASCII range are encoded specially: - # 7 ^G BEL is encoded as "\a" - # 8 ^H BS is encoded as "\b" - # 11 ^K VT is encoded as "\v" - # 12 ^L NP is encoded as "\f" - # 127 ^? DEL is passed through as-is without escaping - # - In PBXFileReference and PBXBuildFile objects: - # 9 ^I HT is passed through as-is without escaping - # 10 ^J NL is passed through as-is without escaping - # 13 ^M CR is passed through as-is without escaping - # - In other objects: - # 9 ^I HT is encoded as "\t" - # 10 ^J NL is encoded as "\n" - # 13 ^M CR is encoded as "\n" rendering it indistinguishable from - # 10 ^J NL - # All other nonprintable characters within the ASCII range (0 through 127 - # inclusive) are encoded as "\U001f" referring to the Unicode code point in - # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". - # Characters above the ASCII range are passed through to the output encoded - # as UTF-8 without any escaping. These mappings are contained in the - # class' _encode_transforms list. - - if _unquoted.search(value) and not _quoted.search(value): - return value - - return '"' + _escaped.sub(self._EncodeTransform, value) + '"' - - def _XCPrint(self, file, tabs, line): - file.write('\t' * tabs + line) - - def _XCPrintableValue(self, tabs, value, flatten_list=False): - """Returns a representation of value that may be printed in a project file, - mimicing Xcode's behavior. - - _XCPrintableValue can handle str and int values, XCObjects (which are - made printable by returning their id property), and list and dict objects - composed of any of the above types. When printing a list or dict, and - _should_print_single_line is False, the tabs parameter is used to determine - how much to indent the lines corresponding to the items in the list or - dict. - - If flatten_list is True, single-element lists will be transformed into - strings. - """ - - printable = '' - comment = None - - if self._should_print_single_line: - sep = ' ' - element_tabs = '' - end_tabs = '' - else: - sep = '\n' - element_tabs = '\t' * (tabs + 1) - end_tabs = '\t' * tabs - - if isinstance(value, XCObject): - printable += value.id - comment = value.Comment() - elif isinstance(value, str): - printable += self._EncodeString(value) - elif isinstance(value, unicode): - printable += self._EncodeString(value.encode('utf-8')) - elif isinstance(value, int): - printable += str(value) - elif isinstance(value, list): - if flatten_list and len(value) <= 1: - if len(value) == 0: - printable += self._EncodeString('') - else: - printable += self._EncodeString(value[0]) - else: - printable = '(' + sep - for item in value: - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item, flatten_list) + \ - ',' + sep - printable += end_tabs + ')' - elif isinstance(value, dict): - printable = '{' + sep - for item_key, item_value in sorted(value.iteritems()): - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ - self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ - sep - printable += end_tabs + '}' - else: - raise TypeError, "Can't make " + value.__class__.__name__ + ' printable' - - if comment != None: - printable += ' ' + self._EncodeComment(comment) - - return printable - - def _XCKVPrint(self, file, tabs, key, value): - """Prints a key and value, members of an XCObject's _properties dictionary, - to file. - - tabs is an int identifying the indentation level. If the class' - _should_print_single_line variable is True, tabs is ignored and the - key-value pair will be followed by a space insead of a newline. - """ - - if self._should_print_single_line: - printable = '' - after_kv = ' ' - else: - printable = '\t' * tabs - after_kv = '\n' - - # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy - # objects without comments. Sometimes it prints them with comments, but - # the majority of the time, it doesn't. To avoid unnecessary changes to - # the project file after Xcode opens it, don't write comments for - # remoteGlobalIDString. This is a sucky hack and it would certainly be - # cleaner to extend the schema to indicate whether or not a comment should - # be printed, but since this is the only case where the problem occurs and - # Xcode itself can't seem to make up its mind, the hack will suffice. - # - # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. - if key == 'remoteGlobalIDString' and isinstance(self, - PBXContainerItemProxy): - value_to_print = value.id - else: - value_to_print = value - - # PBXBuildFile's settings property is represented in the output as a dict, - # but a hack here has it represented as a string. Arrange to strip off the - # quotes so that it shows up in the output as expected. - if key == 'settings' and isinstance(self, PBXBuildFile): - strip_value_quotes = True - else: - strip_value_quotes = False - - # In another one-off, let's set flatten_list on buildSettings properties - # of XCBuildConfiguration objects, because that's how Xcode treats them. - if key == 'buildSettings' and isinstance(self, XCBuildConfiguration): - flatten_list = True - else: - flatten_list = False - - try: - printable_key = self._XCPrintableValue(tabs, key, flatten_list) - printable_value = self._XCPrintableValue(tabs, value_to_print, - flatten_list) - if strip_value_quotes and len(printable_value) > 1 and \ - printable_value[0] == '"' and printable_value[-1] == '"': - printable_value = printable_value[1:-1] - printable += printable_key + ' = ' + printable_value + ';' + after_kv - except TypeError, e: - gyp.common.ExceptionAppend(e, - 'while printing key "%s"' % key) - raise - - self._XCPrint(file, 0, printable) - - def Print(self, file=sys.stdout): - """Prints a reprentation of this object to file, adhering to Xcode output - formatting. - """ - - self.VerifyHasRequiredProperties() - - if self._should_print_single_line: - # When printing an object in a single line, Xcode doesn't put any space - # between the beginning of a dictionary (or presumably a list) and the - # first contained item, so you wind up with snippets like - # ...CDEF = {isa = PBXFileReference; fileRef = 0123... - # If it were me, I would have put a space in there after the opening - # curly, but I guess this is just another one of those inconsistencies - # between how Xcode prints PBXFileReference and PBXBuildFile objects as - # compared to other objects. Mimic Xcode's behavior here by using an - # empty string for sep. - sep = '' - end_tabs = 0 - else: - sep = '\n' - end_tabs = 2 - - # Start the object. For example, '\t\tPBXProject = {\n'. - self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep) - - # "isa" isn't in the _properties dictionary, it's an intrinsic property - # of the class which the object belongs to. Xcode always outputs "isa" - # as the first element of an object dictionary. - self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) - - # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.iteritems()): - self._XCKVPrint(file, 3, property, value) - - # End the object. - self._XCPrint(file, end_tabs, '};\n') - - def UpdateProperties(self, properties, do_copy=False): - """Merge the supplied properties into the _properties dictionary. - - The input properties must adhere to the class schema or a KeyError or - TypeError exception will be raised. If adding an object of an XCObject - subclass and the schema indicates a strong relationship, the object's - parent will be set to this object. - - If do_copy is True, then lists, dicts, strong-owned XCObjects, and - strong-owned XCObjects in lists will be copied instead of having their - references added. - """ - - if properties is None: - return - - for property, value in properties.iteritems(): - # Make sure the property is in the schema. - if not property in self._schema: - raise KeyError, property + ' not in ' + self.__class__.__name__ - - # Make sure the property conforms to the schema. - (is_list, property_type, is_strong) = self._schema[property][0:3] - if is_list: - if value.__class__ != list: - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + \ - ' must be list, not ' + value.__class__.__name__ - for item in value: - if not isinstance(item, property_type) and \ - not (item.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - 'item of ' + property + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - item.__class__.__name__ - elif not isinstance(value, property_type) and \ - not (value.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + ' must be ' + \ - property_type.__name__ + ', not ' + value.__class__.__name__ - - # Checks passed, perform the assignment. - if do_copy: - if isinstance(value, XCObject): - if is_strong: - self._properties[property] = value.Copy() - else: - self._properties[property] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - self._properties[property] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe - # to call Copy. - self._properties[property] = [] - for item in value: - self._properties[property].append(item.Copy()) - else: - self._properties[property] = value[:] - elif isinstance(value, dict): - self._properties[property] = value.copy() - else: - raise TypeError, "Don't know how to copy a " + \ - value.__class__.__name__ + ' object for ' + \ - property + ' in ' + self.__class__.__name__ - else: - self._properties[property] = value - - # Set up the child's back-reference to this object. Don't use |value| - # any more because it may not be right if do_copy is true. - if is_strong: - if not is_list: - self._properties[property].parent = self - else: - for item in self._properties[property]: - item.parent = self - - def HasProperty(self, key): - return key in self._properties - - def GetProperty(self, key): - return self._properties[key] - - def SetProperty(self, key, value): - self.UpdateProperties({key: value}) - - def DelProperty(self, key): - if key in self._properties: - del self._properties[key] - - def AppendProperty(self, key, value): - # TODO(mark): Support ExtendProperty too (and make this call that)? - - # Schema validation. - if not key in self._schema: - raise KeyError, key + ' not in ' + self.__class__.__name__ - - (is_list, property_type, is_strong) = self._schema[key][0:3] - if not is_list: - raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list' - if not isinstance(value, property_type): - raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - value.__class__.__name__ - - # If the property doesn't exist yet, create a new empty list to receive the - # item. - if not key in self._properties: - self._properties[key] = [] - - # Set up the ownership link. - if is_strong: - value.parent = self - - # Store the item. - self._properties[key].append(value) - - def VerifyHasRequiredProperties(self): - """Ensure that all properties identified as required by the schema are - set. - """ - - # TODO(mark): A stronger verification mechanism is needed. Some - # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and not property in self._properties: - raise KeyError, self.__class__.__name__ + ' requires ' + property - - def _SetDefaultsFromSchema(self): - """Assign object default values according to the schema. This will not - overwrite properties that have already been set.""" - - defaults = {} - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and len(attributes) >= 5 and \ - not property in self._properties: - default = attributes[4] - - defaults[property] = default - - if len(defaults) > 0: - # Use do_copy=True so that each new object gets its own copy of strong - # objects, lists, and dicts. - self.UpdateProperties(defaults, do_copy=True) - - -class XCHierarchicalElement(XCObject): - """Abstract base for PBXGroup and PBXFileReference. Not represented in a - project file.""" - - # TODO(mark): Do name and path belong here? Probably so. - # If path is set and name is not, name may have a default value. Name will - # be set to the basename of path, if the basename of path is different from - # the full value of path. If path is already just a leaf name, name will - # not be set. - _schema = XCObject._schema.copy() - _schema.update({ - 'comments': [0, str, 0, 0], - 'fileEncoding': [0, str, 0, 0], - 'includeInIndex': [0, int, 0, 0], - 'indentWidth': [0, int, 0, 0], - 'lineEnding': [0, int, 0, 0], - 'sourceTree': [0, str, 0, 1, ''], - 'tabWidth': [0, int, 0, 0], - 'usesTabs': [0, int, 0, 0], - 'wrapsLines': [0, int, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - if 'path' in self._properties and not 'name' in self._properties: - path = self._properties['path'] - name = posixpath.basename(path) - if name != '' and path != name: - self.SetProperty('name', name) - - if 'path' in self._properties and \ - (not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == ''): - # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take - # the variable out and make the path be relative to that variable by - # assigning the variable name as the sourceTree. - (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path']) - if source_tree != None: - self._properties['sourceTree'] = source_tree - if path != None: - self._properties['path'] = path - if source_tree != None and path is None and \ - not 'name' in self._properties: - # The path was of the form "$(SDKROOT)" with no path following it. - # This object is now relative to that variable, so it has no path - # attribute of its own. It does, however, keep a name. - del self._properties['path'] - self._properties['name'] = source_tree - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - elif 'path' in self._properties: - return self._properties['path'] - else: - # This happens in the case of the root PBXGroup. - return None - - def Hashables(self): - """Custom hashables for XCHierarchicalElements. - - XCHierarchicalElements are special. Generally, their hashes shouldn't - change if the paths don't change. The normal XCObject implementation of - Hashables adds a hashable for each object, which means that if - the hierarchical structure changes (possibly due to changes caused when - TakeOverOnlyChild runs and encounters slight changes in the hierarchy), - the hashes will change. For example, if a project file initially contains - a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent - a/b. If someone later adds a/f2 to the project file, a/b can no longer be - collapsed, and f1 winds up with parent b and grandparent a. That would - be sufficient to change f1's hash. - - To counteract this problem, hashables for all XCHierarchicalElements except - for the main group (which has neither a name nor a path) are taken to be - just the set of path components. Because hashables are inherited from - parents, this provides assurance that a/b/f1 has the same set of hashables - whether its parent is b or a/b. - - The main group is a special case. As it is permitted to have no name or - path, it is permitted to use the standard XCObject hash mechanism. This - is not considered a problem because there can be only one main group. - """ - - if self == self.PBXProjectAncestor()._properties['mainGroup']: - # super - return XCObject.Hashables(self) - - hashables = [] - - # Put the name in first, ensuring that if TakeOverOnlyChild collapses - # children into a top-level group like "Source", the name always goes - # into the list of hashables without interfering with path components. - if 'name' in self._properties: - # Make it less likely for people to manipulate hashes by following the - # pattern of always pushing an object type value onto the list first. - hashables.append(self.__class__.__name__ + '.name') - hashables.append(self._properties['name']) - - # NOTE: This still has the problem that if an absolute path is encountered, - # including paths with a sourceTree, they'll still inherit their parents' - # hashables, even though the paths aren't relative to their parents. This - # is not expected to be much of a problem in practice. - path = self.PathFromSourceTreeAndPath() - if path != None: - components = path.split(posixpath.sep) - for component in components: - hashables.append(self.__class__.__name__ + '.path') - hashables.append(component) - - hashables.extend(self._hashables) - - return hashables - - def Compare(self, other): - # Allow comparison of these types. PBXGroup has the highest sort rank; - # PBXVariantGroup is treated as equal to PBXFileReference. - valid_class_types = { - PBXFileReference: 'file', - PBXGroup: 'group', - PBXVariantGroup: 'file', - } - self_type = valid_class_types[self.__class__] - other_type = valid_class_types[other.__class__] - - if self_type == other_type: - # If the two objects are of the same sort rank, compare their names. - return cmp(self.Name(), other.Name()) - - # Otherwise, sort groups before everything else. - if self_type == 'group': - return -1 - return 1 - - def CompareRootGroup(self, other): - # This function should be used only to compare direct children of the - # containing PBXProject's mainGroup. These groups should appear in the - # listed order. - # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the - # generator should have a way of influencing this list rather than having - # to hardcode for the generator here. - order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products', - 'Build'] - - # If the groups aren't in the listed order, do a name comparison. - # Otherwise, groups in the listed order should come before those that - # aren't. - self_name = self.Name() - other_name = other.Name() - self_in = isinstance(self, PBXGroup) and self_name in order - other_in = isinstance(self, PBXGroup) and other_name in order - if not self_in and not other_in: - return self.Compare(other) - if self_name in order and not other_name in order: - return -1 - if other_name in order and not self_name in order: - return 1 - - # If both groups are in the listed order, go by the defined order. - self_index = order.index(self_name) - other_index = order.index(other_name) - if self_index < other_index: - return -1 - if self_index > other_index: - return 1 - return 0 - - def PathFromSourceTreeAndPath(self): - # Turn the object's sourceTree and path properties into a single flat - # string of a form comparable to the path parameter. If there's a - # sourceTree property other than "", wrap it in $(...) for the - # comparison. - components = [] - if self._properties['sourceTree'] != '': - components.append('$(' + self._properties['sourceTree'] + ')') - if 'path' in self._properties: - components.append(self._properties['path']) - - if len(components) > 0: - return posixpath.join(*components) - - return None - - def FullPath(self): - # Returns a full path to self relative to the project file, or relative - # to some other source tree. Start with self, and walk up the chain of - # parents prepending their paths, if any, until no more parents are - # available (project-relative path) or until a path relative to some - # source tree is found. - xche = self - path = None - while isinstance(xche, XCHierarchicalElement) and \ - (path is None or \ - (not path.startswith('/') and not path.startswith('$'))): - this_path = xche.PathFromSourceTreeAndPath() - if this_path != None and path != None: - path = posixpath.join(this_path, path) - elif this_path != None: - path = this_path - xche = xche.parent - - return path - - -class PBXGroup(XCHierarchicalElement): - """ - Attributes: - _children_by_path: Maps pathnames of children of this PBXGroup to the - actual child XCHierarchicalElement objects. - _variant_children_by_name_and_path: Maps (name, path) tuples of - PBXVariantGroup children to the actual child PBXVariantGroup objects. - """ - - _schema = XCHierarchicalElement._schema.copy() - _schema.update({ - 'children': [1, XCHierarchicalElement, 1, 1, []], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCHierarchicalElement.__init__(self, properties, id, parent) - self._children_by_path = {} - self._variant_children_by_name_and_path = {} - for child in self._properties.get('children', []): - self._AddChildToDicts(child) - - def Hashables(self): - # super - hashables = XCHierarchicalElement.Hashables(self) - - # It is not sufficient to just rely on name and parent to build a unique - # hashable : a node could have two child PBXGroup sharing a common name. - # To add entropy the hashable is enhanced with the names of all its - # children. - for child in self._properties.get('children', []): - child_name = child.Name() - if child_name != None: - hashables.append(child_name) - - return hashables - - def HashablesForChild(self): - # To avoid a circular reference the hashables used to compute a child id do - # not include the child names. - return XCHierarchicalElement.Hashables(self) - - def _AddChildToDicts(self, child): - # Sets up this PBXGroup object's dicts to reference the child properly. - child_path = child.PathFromSourceTreeAndPath() - if child_path: - if child_path in self._children_by_path: - raise ValueError, 'Found multiple children with path ' + child_path - self._children_by_path[child_path] = child - - if isinstance(child, PBXVariantGroup): - child_name = child._properties.get('name', None) - key = (child_name, child_path) - if key in self._variant_children_by_name_and_path: - raise ValueError, 'Found multiple PBXVariantGroup children with ' + \ - 'name ' + str(child_name) + ' and path ' + \ - str(child_path) - self._variant_children_by_name_and_path[key] = child - - def AppendChild(self, child): - # Callers should use this instead of calling - # AppendProperty('children', child) directly because this function - # maintains the group's dicts. - self.AppendProperty('children', child) - self._AddChildToDicts(child) - - def GetChildByName(self, name): - # This is not currently optimized with a dict as GetChildByPath is because - # it has few callers. Most callers probably want GetChildByPath. This - # function is only useful to get children that have names but no paths, - # which is rare. The children of the main group ("Source", "Products", - # etc.) is pretty much the only case where this likely to come up. - # - # TODO(mark): Maybe this should raise an error if more than one child is - # present with the same name. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if child.Name() == name: - return child - - return None - - def GetChildByPath(self, path): - if not path: - return None - - if path in self._children_by_path: - return self._children_by_path[path] - - return None - - def GetChildByRemoteObject(self, remote_object): - # This method is a little bit esoteric. Given a remote_object, which - # should be a PBXFileReference in another project file, this method will - # return this group's PBXReferenceProxy object serving as a local proxy - # for the remote PBXFileReference. - # - # This function might benefit from a dict optimization as GetChildByPath - # for some workloads, but profiling shows that it's not currently a - # problem. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if not isinstance(child, PBXReferenceProxy): - continue - - container_proxy = child._properties['remoteRef'] - if container_proxy._properties['remoteGlobalIDString'] == remote_object: - return child - - return None - - def AddOrGetFileByPath(self, path, hierarchical): - """Returns an existing or new file reference corresponding to path. - - If hierarchical is True, this method will create or use the necessary - hierarchical group structure corresponding to path. Otherwise, it will - look in and create an item in the current group only. - - If an existing matching reference is found, it is returned, otherwise, a - new one will be created, added to the correct group, and returned. - - If path identifies a directory by virtue of carrying a trailing slash, - this method returns a PBXFileReference of "folder" type. If path - identifies a variant, by virtue of it identifying a file inside a directory - with an ".lproj" extension, this method returns a PBXVariantGroup - containing the variant named by path, and possibly other variants. For - all other paths, a "normal" PBXFileReference will be returned. - """ - - # Adding or getting a directory? Directories end with a trailing slash. - is_dir = False - if path.endswith('/'): - is_dir = True - path = posixpath.normpath(path) - if is_dir: - path = path + '/' - - # Adding or getting a variant? Variants are files inside directories - # with an ".lproj" extension. Xcode uses variants for localization. For - # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named - # MainMenu.nib inside path/to, and give it a variant named Language. In - # this example, grandparent would be set to path/to and parent_root would - # be set to Language. - variant_name = None - parent = posixpath.dirname(path) - grandparent = posixpath.dirname(parent) - parent_basename = posixpath.basename(parent) - (parent_root, parent_ext) = posixpath.splitext(parent_basename) - if parent_ext == '.lproj': - variant_name = parent_root - if grandparent == '': - grandparent = None - - # Putting a directory inside a variant group is not currently supported. - assert not is_dir or variant_name is None - - path_split = path.split(posixpath.sep) - if len(path_split) == 1 or \ - ((is_dir or variant_name != None) and len(path_split) == 2) or \ - not hierarchical: - # The PBXFileReference or PBXVariantGroup will be added to or gotten from - # this PBXGroup, no recursion necessary. - if variant_name is None: - # Add or get a PBXFileReference. - file_ref = self.GetChildByPath(path) - if file_ref != None: - assert file_ref.__class__ == PBXFileReference - else: - file_ref = PBXFileReference({'path': path}) - self.AppendChild(file_ref) - else: - # Add or get a PBXVariantGroup. The variant group name is the same - # as the basename (MainMenu.nib in the example above). grandparent - # specifies the path to the variant group itself, and path_split[-2:] - # is the path of the specific variant relative to its group. - variant_group_name = posixpath.basename(path) - variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( - variant_group_name, grandparent) - variant_path = posixpath.sep.join(path_split[-2:]) - variant_ref = variant_group_ref.GetChildByPath(variant_path) - if variant_ref != None: - assert variant_ref.__class__ == PBXFileReference - else: - variant_ref = PBXFileReference({'name': variant_name, - 'path': variant_path}) - variant_group_ref.AppendChild(variant_ref) - # The caller is interested in the variant group, not the specific - # variant file. - file_ref = variant_group_ref - return file_ref - else: - # Hierarchical recursion. Add or get a PBXGroup corresponding to the - # outermost path component, and then recurse into it, chopping off that - # path component. - next_dir = path_split[0] - group_ref = self.GetChildByPath(next_dir) - if group_ref != None: - assert group_ref.__class__ == PBXGroup - else: - group_ref = PBXGroup({'path': next_dir}) - self.AppendChild(group_ref) - return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]), - hierarchical) - - def AddOrGetVariantGroupByNameAndPath(self, name, path): - """Returns an existing or new PBXVariantGroup for name and path. - - If a PBXVariantGroup identified by the name and path arguments is already - present as a child of this object, it is returned. Otherwise, a new - PBXVariantGroup with the correct properties is created, added as a child, - and returned. - - This method will generally be called by AddOrGetFileByPath, which knows - when to create a variant group based on the structure of the pathnames - passed to it. - """ - - key = (name, path) - if key in self._variant_children_by_name_and_path: - variant_group_ref = self._variant_children_by_name_and_path[key] - assert variant_group_ref.__class__ == PBXVariantGroup - return variant_group_ref - - variant_group_properties = {'name': name} - if path != None: - variant_group_properties['path'] = path - variant_group_ref = PBXVariantGroup(variant_group_properties) - self.AppendChild(variant_group_ref) - - return variant_group_ref - - def TakeOverOnlyChild(self, recurse=False): - """If this PBXGroup has only one child and it's also a PBXGroup, take - it over by making all of its children this object's children. - - This function will continue to take over only children when those children - are groups. If there are three PBXGroups representing a, b, and c, with - c inside b and b inside a, and a and b have no other children, this will - result in a taking over both b and c, forming a PBXGroup for a/b/c. - - If recurse is True, this function will recurse into children and ask them - to collapse themselves by taking over only children as well. Assuming - an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f - (d1, d2, and f are files, the rest are groups), recursion will result in - a group for a/b/c containing a group for d3/e. - """ - - # At this stage, check that child class types are PBXGroup exactly, - # instead of using isinstance. The only subclass of PBXGroup, - # PBXVariantGroup, should not participate in reparenting in the same way: - # reparenting by merging different object types would be wrong. - while len(self._properties['children']) == 1 and \ - self._properties['children'][0].__class__ == PBXGroup: - # Loop to take over the innermost only-child group possible. - - child = self._properties['children'][0] - - # Assume the child's properties, including its children. Save a copy - # of this object's old properties, because they'll still be needed. - # This object retains its existing id and parent attributes. - old_properties = self._properties - self._properties = child._properties - self._children_by_path = child._children_by_path - - if not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == '': - # The child was relative to its parent. Fix up the path. Note that - # children with a sourceTree other than "" are not relative to - # their parents, so no path fix-up is needed in that case. - if 'path' in old_properties: - if 'path' in self._properties: - # Both the original parent and child have paths set. - self._properties['path'] = posixpath.join(old_properties['path'], - self._properties['path']) - else: - # Only the original parent has a path, use it. - self._properties['path'] = old_properties['path'] - if 'sourceTree' in old_properties: - # The original parent had a sourceTree set, use it. - self._properties['sourceTree'] = old_properties['sourceTree'] - - # If the original parent had a name set, keep using it. If the original - # parent didn't have a name but the child did, let the child's name - # live on. If the name attribute seems unnecessary now, get rid of it. - if 'name' in old_properties and old_properties['name'] != None and \ - old_properties['name'] != self.Name(): - self._properties['name'] = old_properties['name'] - if 'name' in self._properties and 'path' in self._properties and \ - self._properties['name'] == self._properties['path']: - del self._properties['name'] - - # Notify all children of their new parent. - for child in self._properties['children']: - child.parent = self - - # If asked to recurse, recurse. - if recurse: - for child in self._properties['children']: - if child.__class__ == PBXGroup: - child.TakeOverOnlyChild(recurse) - - def SortGroup(self): - self._properties['children'] = \ - sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y)) - - # Recurse. - for child in self._properties['children']: - if isinstance(child, PBXGroup): - child.SortGroup() - - -class XCFileLikeElement(XCHierarchicalElement): - # Abstract base for objects that can be used as the fileRef property of - # PBXBuildFile. - - def PathHashables(self): - # A PBXBuildFile that refers to this object will call this method to - # obtain additional hashables specific to this XCFileLikeElement. Don't - # just use this object's hashables, they're not specific and unique enough - # on their own (without access to the parent hashables.) Instead, provide - # hashables that identify this object by path by getting its hashables as - # well as the hashables of ancestor XCHierarchicalElement objects. - - hashables = [] - xche = self - while xche != None and isinstance(xche, XCHierarchicalElement): - xche_hashables = xche.Hashables() - for index in xrange(0, len(xche_hashables)): - hashables.insert(index, xche_hashables[index]) - xche = xche.parent - return hashables - - -class XCContainerPortal(XCObject): - # Abstract base for objects that can be used as the containerPortal property - # of PBXContainerItemProxy. - pass - - -class XCRemoteObject(XCObject): - # Abstract base for objects that can be used as the remoteGlobalIDString - # property of PBXContainerItemProxy. - pass - - -class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'explicitFileType': [0, str, 0, 0], - 'lastKnownFileType': [0, str, 0, 0], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 1], - }) - - # Weird output rules for PBXFileReference. - _should_print_single_line = True - # super - _encode_transforms = XCFileLikeElement._alternate_encode_transforms - - def __init__(self, properties=None, id=None, parent=None): - # super - XCFileLikeElement.__init__(self, properties, id, parent) - if 'path' in self._properties and self._properties['path'].endswith('/'): - self._properties['path'] = self._properties['path'][:-1] - is_dir = True - else: - is_dir = False - - if 'path' in self._properties and \ - not 'lastKnownFileType' in self._properties and \ - not 'explicitFileType' in self._properties: - # TODO(mark): This is the replacement for a replacement for a quick hack. - # It is no longer incredibly sucky, but this list needs to be extended. - extension_map = { - 'a': 'archive.ar', - 'app': 'wrapper.application', - 'bdic': 'file', - 'bundle': 'wrapper.cfbundle', - 'c': 'sourcecode.c.c', - 'cc': 'sourcecode.cpp.cpp', - 'cpp': 'sourcecode.cpp.cpp', - 'css': 'text.css', - 'cxx': 'sourcecode.cpp.cpp', - 'dylib': 'compiled.mach-o.dylib', - 'framework': 'wrapper.framework', - 'h': 'sourcecode.c.h', - 'hxx': 'sourcecode.cpp.h', - 'icns': 'image.icns', - 'java': 'sourcecode.java', - 'js': 'sourcecode.javascript', - 'm': 'sourcecode.c.objc', - 'mm': 'sourcecode.cpp.objcpp', - 'nib': 'wrapper.nib', - 'o': 'compiled.mach-o.objfile', - 'pdf': 'image.pdf', - 'pl': 'text.script.perl', - 'plist': 'text.plist.xml', - 'pm': 'text.script.perl', - 'png': 'image.png', - 'py': 'text.script.python', - 'r': 'sourcecode.rez', - 'rez': 'sourcecode.rez', - 's': 'sourcecode.asm', - 'storyboard': 'file.storyboard', - 'strings': 'text.plist.strings', - 'ttf': 'file', - 'xcconfig': 'text.xcconfig', - 'xcdatamodel': 'wrapper.xcdatamodel', - 'xib': 'file.xib', - 'y': 'sourcecode.yacc', - } - - if is_dir: - file_type = 'folder' - else: - basename = posixpath.basename(self._properties['path']) - (root, ext) = posixpath.splitext(basename) - # Check the map using a lowercase extension. - # TODO(mark): Maybe it should try with the original case first and fall - # back to lowercase, in case there are any instances where case - # matters. There currently aren't. - if ext != '': - ext = ext[1:].lower() - - # TODO(mark): "text" is the default value, but "file" is appropriate - # for unrecognized files not containing text. Xcode seems to choose - # based on content. - file_type = extension_map.get(ext, 'text') - - self._properties['lastKnownFileType'] = file_type - - -class PBXVariantGroup(PBXGroup, XCFileLikeElement): - """PBXVariantGroup is used by Xcode to represent localizations.""" - # No additions to the schema relative to PBXGroup. - pass - - -# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below -# because it uses PBXContainerItemProxy, defined below. - - -class XCBuildConfiguration(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'baseConfigurationReference': [0, PBXFileReference, 0, 0], - 'buildSettings': [0, dict, 0, 1, {}], - 'name': [0, str, 0, 1], - }) - - def HasBuildSetting(self, key): - return key in self._properties['buildSettings'] - - def GetBuildSetting(self, key): - return self._properties['buildSettings'][key] - - def SetBuildSetting(self, key, value): - # TODO(mark): If a list, copy? - self._properties['buildSettings'][key] = value - - def AppendBuildSetting(self, key, value): - if not key in self._properties['buildSettings']: - self._properties['buildSettings'][key] = [] - self._properties['buildSettings'][key].append(value) - - def DelBuildSetting(self, key): - if key in self._properties['buildSettings']: - del self._properties['buildSettings'][key] - - def SetBaseConfiguration(self, value): - self._properties['baseConfigurationReference'] = value - -class XCConfigurationList(XCObject): - # _configs is the default list of configurations. - _configs = [ XCBuildConfiguration({'name': 'Debug'}), - XCBuildConfiguration({'name': 'Release'}) ] - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs], - 'defaultConfigurationIsVisible': [0, int, 0, 1, 1], - 'defaultConfigurationName': [0, str, 0, 1, 'Release'], - }) - - def Name(self): - return 'Build configuration list for ' + \ - self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"' - - def ConfigurationNamed(self, name): - """Convenience accessor to obtain an XCBuildConfiguration by name.""" - for configuration in self._properties['buildConfigurations']: - if configuration._properties['name'] == name: - return configuration - - raise KeyError, name - - def DefaultConfiguration(self): - """Convenience accessor to obtain the default XCBuildConfiguration.""" - return self.ConfigurationNamed(self._properties['defaultConfigurationName']) - - def HasBuildSetting(self, key): - """Determines the state of a build setting in all XCBuildConfiguration - child objects. - - If all child objects have key in their build settings, and the value is the - same in all child objects, returns 1. - - If no child objects have the key in their build settings, returns 0. - - If some, but not all, child objects have the key in their build settings, - or if any children have different values for the key, returns -1. - """ - - has = None - value = None - for configuration in self._properties['buildConfigurations']: - configuration_has = configuration.HasBuildSetting(key) - if has is None: - has = configuration_has - elif has != configuration_has: - return -1 - - if configuration_has: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - elif value != configuration_value: - return -1 - - if not has: - return 0 - - return 1 - - def GetBuildSetting(self, key): - """Gets the build setting for key. - - All child XCConfiguration objects must have the same value set for the - setting, or a ValueError will be raised. - """ - - # TODO(mark): This is wrong for build settings that are lists. The list - # contents should be compared (and a list copy returned?) - - value = None - for configuration in self._properties['buildConfigurations']: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - else: - if value != configuration_value: - raise ValueError, 'Variant values for ' + key - - return value - - def SetBuildSetting(self, key, value): - """Sets the build setting for key to value in all child - XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - """Appends value to the build setting for key, which is treated as a list, - in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - """Deletes the build setting key from all child XCBuildConfiguration - objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.DelBuildSetting(key) - - def SetBaseConfiguration(self, value): - """Sets the build configuration in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.SetBaseConfiguration(value) - - -class PBXBuildFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'fileRef': [0, XCFileLikeElement, 0, 1], - 'settings': [0, str, 0, 0], # hack, it's a dict - }) - - # Weird output rules for PBXBuildFile. - _should_print_single_line = True - _encode_transforms = XCObject._alternate_encode_transforms - - def Name(self): - # Example: "main.cc in Sources" - return self._properties['fileRef'].Name() + ' in ' + self.parent.Name() - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # It is not sufficient to just rely on Name() to get the - # XCFileLikeElement's name, because that is not a complete pathname. - # PathHashables returns hashables unique enough that no two - # PBXBuildFiles should wind up with the same set of hashables, unless - # someone adds the same file multiple times to the same target. That - # would be considered invalid anyway. - hashables.extend(self._properties['fileRef'].PathHashables()) - - return hashables - - -class XCBuildPhase(XCObject): - """Abstract base for build phase classes. Not represented in a project - file. - - Attributes: - _files_by_path: A dict mapping each path of a child in the files list by - path (keys) to the corresponding PBXBuildFile children (values). - _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) - to the corresponding PBXBuildFile children (values). - """ - - # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't - # actually have a "files" list. XCBuildPhase should not have "files" but - # another abstract subclass of it should provide this, and concrete build - # phase types that do have "files" lists should be derived from that new - # abstract subclass. XCBuildPhase should only provide buildActionMask and - # runOnlyForDeploymentPostprocessing, and not files or the various - # file-related methods and attributes. - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildActionMask': [0, int, 0, 1, 0x7fffffff], - 'files': [1, PBXBuildFile, 1, 1, []], - 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - - self._files_by_path = {} - self._files_by_xcfilelikeelement = {} - for pbxbuildfile in self._properties.get('files', []): - self._AddBuildFileToDicts(pbxbuildfile) - - def FileGroup(self, path): - # Subclasses must override this by returning a two-element tuple. The - # first item in the tuple should be the PBXGroup to which "path" should be - # added, either as a child or deeper descendant. The second item should - # be a boolean indicating whether files should be added into hierarchical - # groups or one single flat group. - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement FileGroup' - - def _AddPathToDict(self, pbxbuildfile, path): - """Adds path to the dict tracking paths belonging to this build phase. - - If the path is already a member of this build phase, raises an exception. - """ - - if path in self._files_by_path: - raise ValueError, 'Found multiple build files with path ' + path - self._files_by_path[path] = pbxbuildfile - - def _AddBuildFileToDicts(self, pbxbuildfile, path=None): - """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. - - If path is specified, then it is the path that is being added to the - phase, and pbxbuildfile must contain either a PBXFileReference directly - referencing that path, or it must contain a PBXVariantGroup that itself - contains a PBXFileReference referencing the path. - - If path is not specified, either the PBXFileReference's path or the paths - of all children of the PBXVariantGroup are taken as being added to the - phase. - - If the path is already present in the phase, raises an exception. - - If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile - are already present in the phase, referenced by a different PBXBuildFile - object, raises an exception. This does not raise an exception when - a PBXFileReference or PBXVariantGroup reappear and are referenced by the - same PBXBuildFile that has already introduced them, because in the case - of PBXVariantGroup objects, they may correspond to multiple paths that are - not all added simultaneously. When this situation occurs, the path needs - to be added to _files_by_path, but nothing needs to change in - _files_by_xcfilelikeelement, and the caller should have avoided adding - the PBXBuildFile if it is already present in the list of children. - """ - - xcfilelikeelement = pbxbuildfile._properties['fileRef'] - - paths = [] - if path != None: - # It's best when the caller provides the path. - if isinstance(xcfilelikeelement, PBXVariantGroup): - paths.append(path) - else: - # If the caller didn't provide a path, there can be either multiple - # paths (PBXVariantGroup) or one. - if isinstance(xcfilelikeelement, PBXVariantGroup): - for variant in xcfilelikeelement._properties['children']: - paths.append(variant.FullPath()) - else: - paths.append(xcfilelikeelement.FullPath()) - - # Add the paths first, because if something's going to raise, the - # messages provided by _AddPathToDict are more useful owing to its - # having access to a real pathname and not just an object's Name(). - for a_path in paths: - self._AddPathToDict(pbxbuildfile, a_path) - - # If another PBXBuildFile references this XCFileLikeElement, there's a - # problem. - if xcfilelikeelement in self._files_by_xcfilelikeelement and \ - self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile: - raise ValueError, 'Found multiple build files for ' + \ - xcfilelikeelement.Name() - self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile - - def AppendBuildFile(self, pbxbuildfile, path=None): - # Callers should use this instead of calling - # AppendProperty('files', pbxbuildfile) directly because this function - # maintains the object's dicts. Better yet, callers can just call AddFile - # with a pathname and not worry about building their own PBXBuildFile - # objects. - self.AppendProperty('files', pbxbuildfile) - self._AddBuildFileToDicts(pbxbuildfile, path) - - def AddFile(self, path, settings=None): - (file_group, hierarchical) = self.FileGroup(path) - file_ref = file_group.AddOrGetFileByPath(path, hierarchical) - - if file_ref in self._files_by_xcfilelikeelement and \ - isinstance(file_ref, PBXVariantGroup): - # There's already a PBXBuildFile in this phase corresponding to the - # PBXVariantGroup. path just provides a new variant that belongs to - # the group. Add the path to the dict. - pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] - self._AddBuildFileToDicts(pbxbuildfile, path) - else: - # Add a new PBXBuildFile to get file_ref into the phase. - if settings is None: - pbxbuildfile = PBXBuildFile({'fileRef': file_ref}) - else: - pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings}) - self.AppendBuildFile(pbxbuildfile, path) - - -class PBXHeadersBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Headers' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXResourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Resources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXSourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Sources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXFrameworksBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Frameworks' - - def FileGroup(self, path): - (root, ext) = posixpath.splitext(path) - if ext != '': - ext = ext[1:].lower() - if ext == 'o': - # .o files are added to Xcode Frameworks phases, but conceptually aren't - # frameworks, they're more like sources or intermediates. Redirect them - # to show up in one of those other groups. - return self.PBXProjectAncestor().RootGroupForPath(path) - else: - return (self.PBXProjectAncestor().FrameworksGroup(), False) - - -class PBXShellScriptBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'inputPaths': [1, str, 0, 1, []], - 'name': [0, str, 0, 0], - 'outputPaths': [1, str, 0, 1, []], - 'shellPath': [0, str, 0, 1, '/bin/sh'], - 'shellScript': [0, str, 0, 1], - 'showEnvVarsInLog': [0, int, 0, 0], - }) - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'ShellScript' - - -class PBXCopyFilesBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'dstPath': [0, str, 0, 1], - 'dstSubfolderSpec': [0, int, 0, 1], - 'name': [0, str, 0, 0], - }) - - # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is - # "DIR", match group 3 is "path" or None. - path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$') - - # path_tree_to_subfolder maps names of Xcode variables to the associated - # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object. - path_tree_to_subfolder = { - 'BUILT_PRODUCTS_DIR': 16, # Products Directory - # Other types that can be chosen via the Xcode UI. - # TODO(mark): Map Xcode variable names to these. - # : 1, # Wrapper - # : 6, # Executables: 6 - # : 7, # Resources - # : 15, # Java Resources - # : 10, # Frameworks - # : 11, # Shared Frameworks - # : 12, # Shared Support - # : 13, # PlugIns - } - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'CopyFiles' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - def SetDestination(self, path): - """Set the dstSubfolderSpec and dstPath properties from path. - - path may be specified in the same notation used for XCHierarchicalElements, - specifically, "$(DIR)/path". - """ - - path_tree_match = self.path_tree_re.search(path) - if path_tree_match: - # Everything else needs to be relative to an Xcode variable. - path_tree = path_tree_match.group(1) - relative_path = path_tree_match.group(3) - - if path_tree in self.path_tree_to_subfolder: - subfolder = self.path_tree_to_subfolder[path_tree] - if relative_path is None: - relative_path = '' - else: - # The path starts with an unrecognized Xcode variable - # name like $(SRCROOT). Xcode will still handle this - # as an "absolute path" that starts with the variable. - subfolder = 0 - relative_path = path - elif path.startswith('/'): - # Special case. Absolute paths are in dstSubfolderSpec 0. - subfolder = 0 - relative_path = path[1:] - else: - raise ValueError, 'Can\'t use path %s in a %s' % \ - (path, self.__class__.__name__) - - self._properties['dstPath'] = relative_path - self._properties['dstSubfolderSpec'] = subfolder - - -class PBXBuildRule(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'compilerSpec': [0, str, 0, 1], - 'filePatterns': [0, str, 0, 0], - 'fileType': [0, str, 0, 1], - 'isEditable': [0, int, 0, 1, 1], - 'outputFiles': [1, str, 0, 1, []], - 'script': [0, str, 0, 0], - }) - - def Name(self): - # Not very inspired, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.append(self._properties['fileType']) - if 'filePatterns' in self._properties: - hashables.append(self._properties['filePatterns']) - return hashables - - -class PBXContainerItemProxy(XCObject): - # When referencing an item in this project file, containerPortal is the - # PBXProject root object of this project file. When referencing an item in - # another project file, containerPortal is a PBXFileReference identifying - # the other project file. - # - # When serving as a proxy to an XCTarget (in this project file or another), - # proxyType is 1. When serving as a proxy to a PBXFileReference (in another - # project file), proxyType is 2. Type 2 is used for references to the - # producs of the other project file's targets. - # - # Xcode is weird about remoteGlobalIDString. Usually, it's printed without - # a comment, indicating that it's tracked internally simply as a string, but - # sometimes it's printed with a comment (usually when the object is initially - # created), indicating that it's tracked as a project file object at least - # sometimes. This module always tracks it as an object, but contains a hack - # to prevent it from printing the comment in the project file output. See - # _XCKVPrint. - _schema = XCObject._schema.copy() - _schema.update({ - 'containerPortal': [0, XCContainerPortal, 0, 1], - 'proxyType': [0, int, 0, 1], - 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1], - 'remoteInfo': [0, str, 0, 1], - }) - - def __repr__(self): - props = self._properties - name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo']) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['containerPortal'].Hashables()) - hashables.extend(self._properties['remoteGlobalIDString'].Hashables()) - return hashables - - -class PBXTargetDependency(XCObject): - # The "target" property accepts an XCTarget object, and obviously not - # NoneType. But XCTarget is defined below, so it can't be put into the - # schema yet. The definition of PBXTargetDependency can't be moved below - # XCTarget because XCTarget's own schema references PBXTargetDependency. - # Python doesn't deal well with this circular relationship, and doesn't have - # a real way to do forward declarations. To work around, the type of - # the "target" property is reset below, after XCTarget is defined. - # - # At least one of "name" and "target" is required. - _schema = XCObject._schema.copy() - _schema.update({ - 'name': [0, str, 0, 0], - 'target': [0, None.__class__, 0, 0], - 'targetProxy': [0, PBXContainerItemProxy, 1, 1], - }) - - def __repr__(self): - name = self._properties.get('name') or self._properties['target'].Name() - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['targetProxy'].Hashables()) - return hashables - - -class PBXReferenceProxy(XCFileLikeElement): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'fileType': [0, str, 0, 1], - 'path': [0, str, 0, 1], - 'remoteRef': [0, PBXContainerItemProxy, 1, 1], - }) - - -class XCTarget(XCRemoteObject): - # An XCTarget is really just an XCObject, the XCRemoteObject thing is just - # to allow PBXProject to be used in the remoteGlobalIDString property of - # PBXContainerItemProxy. - # - # Setting a "name" property at instantiation may also affect "productName", - # which may in turn affect the "PRODUCT_NAME" build setting in children of - # "buildConfigurationList". See __init__ below. - _schema = XCRemoteObject._schema.copy() - _schema.update({ - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'buildPhases': [1, XCBuildPhase, 1, 1, []], - 'dependencies': [1, PBXTargetDependency, 1, 1, []], - 'name': [0, str, 0, 1], - 'productName': [0, str, 0, 1], - }) - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCRemoteObject.__init__(self, properties, id, parent) - - # Set up additional defaults not expressed in the schema. If a "name" - # property was supplied, set "productName" if it is not present. Also set - # the "PRODUCT_NAME" build setting in each configuration, but only if - # the setting is not present in any build configuration. - if 'name' in self._properties: - if not 'productName' in self._properties: - self.SetProperty('productName', self._properties['name']) - - if 'productName' in self._properties: - if 'buildConfigurationList' in self._properties: - configs = self._properties['buildConfigurationList'] - if configs.HasBuildSetting('PRODUCT_NAME') == 0: - configs.SetBuildSetting('PRODUCT_NAME', - self._properties['productName']) - - def AddDependency(self, other): - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject == other_pbxproject: - # Add a dependency to another target in the same project file. - container = PBXContainerItemProxy({'containerPortal': pbxproject, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name()}) - dependency = PBXTargetDependency({'target': other, - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - else: - # Add a dependency to a target in a different project file. - other_project_ref = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[1] - container = PBXContainerItemProxy({ - 'containerPortal': other_project_ref, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name(), - }) - dependency = PBXTargetDependency({'name': other.Name(), - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - - # Proxy all of these through to the build configuration list. - - def ConfigurationNamed(self, name): - return self._properties['buildConfigurationList'].ConfigurationNamed(name) - - def DefaultConfiguration(self): - return self._properties['buildConfigurationList'].DefaultConfiguration() - - def HasBuildSetting(self, key): - return self._properties['buildConfigurationList'].HasBuildSetting(key) - - def GetBuildSetting(self, key): - return self._properties['buildConfigurationList'].GetBuildSetting(key) - - def SetBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].SetBuildSetting(key, \ - value) - - def AppendBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].AppendBuildSetting(key, \ - value) - - def DelBuildSetting(self, key): - return self._properties['buildConfigurationList'].DelBuildSetting(key) - - -# Redefine the type of the "target" property. See PBXTargetDependency._schema -# above. -PBXTargetDependency._schema['target'][1] = XCTarget - - -class PBXNativeTarget(XCTarget): - # buildPhases is overridden in the schema to be able to set defaults. - # - # NOTE: Contrary to most objects, it is advisable to set parent when - # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject - # object. A parent reference is required for a PBXNativeTarget during - # construction to be able to set up the target defaults for productReference, - # because a PBXBuildFile object must be created for the target and it must - # be added to the PBXProject's mainGroup hierarchy. - _schema = XCTarget._schema.copy() - _schema.update({ - 'buildPhases': [1, XCBuildPhase, 1, 1, - [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]], - 'buildRules': [1, PBXBuildRule, 1, 1, []], - 'productReference': [0, PBXFileReference, 0, 1], - 'productType': [0, str, 0, 1], - }) - - # Mapping from Xcode product-types to settings. The settings are: - # filetype : used for explicitFileType in the project file - # prefix : the prefix for the file name - # suffix : the suffix for the filen ame - _product_filetypes = { - 'com.apple.product-type.application': ['wrapper.application', - '', '.app'], - 'com.apple.product-type.bundle': ['wrapper.cfbundle', - '', '.bundle'], - 'com.apple.product-type.framework': ['wrapper.framework', - '', '.framework'], - 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib', - 'lib', '.dylib'], - 'com.apple.product-type.library.static': ['archive.ar', - 'lib', '.a'], - 'com.apple.product-type.tool': ['compiled.mach-o.executable', - '', ''], - 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib', - '', '.so'], - } - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCTarget.__init__(self, properties, id, parent) - - if 'productName' in self._properties and \ - 'productType' in self._properties and \ - not 'productReference' in self._properties and \ - self._properties['productType'] in self._product_filetypes: - products_group = None - pbxproject = self.PBXProjectAncestor() - if pbxproject != None: - products_group = pbxproject.ProductsGroup() - - if products_group != None: - (filetype, prefix, suffix) = \ - self._product_filetypes[self._properties['productType']] - # Xcode does not have a distinct type for loadable modules that are - # pure BSD targets (not in a bundle wrapper). GYP allows such modules - # to be specified by setting a target type to loadable_module without - # having mac_bundle set. These are mapped to the pseudo-product type - # com.googlecode.gyp.xcode.bundle. - # - # By picking up this special type and converting it to a dynamic - # library (com.apple.product-type.library.dynamic) with fix-ups, - # single-file loadable modules can be produced. - # - # MACH_O_TYPE is changed to mh_bundle to produce the proper file type - # (as opposed to mh_dylib). In order for linking to succeed, - # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be - # cleared. They are meaningless for type mh_bundle. - # - # Finally, the .so extension is forcibly applied over the default - # (.dylib), unless another forced extension is already selected. - # .dylib is plainly wrong, and .bundle is used by loadable_modules in - # bundle wrappers (com.apple.product-type.bundle). .so seems an odd - # choice because it's used as the extension on many other systems that - # don't distinguish between linkable shared libraries and non-linkable - # loadable modules, but there's precedent: Python loadable modules on - # Mac OS X use an .so extension. - if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle': - self._properties['productType'] = \ - 'com.apple.product-type.library.dynamic' - self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle') - self.SetBuildSetting('DYLIB_CURRENT_VERSION', '') - self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '') - if force_extension is None: - force_extension = suffix[1:] - - if force_extension is not None: - # If it's a wrapper (bundle), set WRAPPER_EXTENSION. - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_EXTENSION', force_extension) - else: - # Extension override. - suffix = '.' + force_extension - self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension) - - if filetype.startswith('compiled.mach-o.executable'): - product_name = self._properties['productName'] - product_name += suffix - suffix = '' - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - # Xcode handles most prefixes based on the target type, however there - # are exceptions. If a "BSD Dynamic Library" target is added in the - # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that - # behavior. - if force_prefix is not None: - prefix = force_prefix - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_PREFIX', prefix) - else: - self.SetBuildSetting('EXECUTABLE_PREFIX', prefix) - - if force_outdir is not None: - self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir) - - # TODO(tvl): Remove the below hack. - # http://code.google.com/p/gyp/issues/detail?id=122 - - # Some targets include the prefix in the target_name. These targets - # really should just add a product_name setting that doesn't include - # the prefix. For example: - # target_name = 'libevent', product_name = 'event' - # This check cleans up for them. - product_name = self._properties['productName'] - prefix_len = len(prefix) - if prefix_len and (product_name[:prefix_len] == prefix): - product_name = product_name[prefix_len:] - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - ref_props = { - 'explicitFileType': filetype, - 'includeInIndex': 0, - 'path': prefix + product_name + suffix, - 'sourceTree': 'BUILT_PRODUCTS_DIR', - } - file_ref = PBXFileReference(ref_props) - products_group.AppendChild(file_ref) - self.SetProperty('productReference', file_ref) - - def GetBuildPhaseByType(self, type): - if not 'buildPhases' in self._properties: - return None - - the_phase = None - for phase in self._properties['buildPhases']: - if isinstance(phase, type): - # Some phases may be present in multiples in a well-formed project file, - # but phases like PBXSourcesBuildPhase may only be present singly, and - # this function is intended as an aid to GetBuildPhaseByType. Loop - # over the entire list of phases and assert if more than one of the - # desired type is found. - assert the_phase is None - the_phase = phase - - return the_phase - - def HeadersPhase(self): - headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase) - if headers_phase is None: - headers_phase = PBXHeadersBuildPhase() - - # The headers phase should come before the resources, sources, and - # frameworks phases, if any. - insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] - if isinstance(phase, PBXResourcesBuildPhase) or \ - isinstance(phase, PBXSourcesBuildPhase) or \ - isinstance(phase, PBXFrameworksBuildPhase): - insert_at = index - break - - self._properties['buildPhases'].insert(insert_at, headers_phase) - headers_phase.parent = self - - return headers_phase - - def ResourcesPhase(self): - resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) - if resources_phase is None: - resources_phase = PBXResourcesBuildPhase() - - # The resources phase should come before the sources and frameworks - # phases, if any. - insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] - if isinstance(phase, PBXSourcesBuildPhase) or \ - isinstance(phase, PBXFrameworksBuildPhase): - insert_at = index - break - - self._properties['buildPhases'].insert(insert_at, resources_phase) - resources_phase.parent = self - - return resources_phase - - def SourcesPhase(self): - sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) - if sources_phase is None: - sources_phase = PBXSourcesBuildPhase() - self.AppendProperty('buildPhases', sources_phase) - - return sources_phase - - def FrameworksPhase(self): - frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) - if frameworks_phase is None: - frameworks_phase = PBXFrameworksBuildPhase() - self.AppendProperty('buildPhases', frameworks_phase) - - return frameworks_phase - - def AddDependency(self, other): - # super - XCTarget.AddDependency(self, other) - - static_library_type = 'com.apple.product-type.library.static' - shared_library_type = 'com.apple.product-type.library.dynamic' - framework_type = 'com.apple.product-type.framework' - if isinstance(other, PBXNativeTarget) and \ - 'productType' in self._properties and \ - self._properties['productType'] != static_library_type and \ - 'productType' in other._properties and \ - (other._properties['productType'] == static_library_type or \ - ((other._properties['productType'] == shared_library_type or \ - other._properties['productType'] == framework_type) and \ - ((not other.HasBuildSetting('MACH_O_TYPE')) or - other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))): - - file_ref = other.GetProperty('productReference') - - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject != other_pbxproject: - other_project_product_group = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[0] - file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) - - self.FrameworksPhase().AppendProperty('files', - PBXBuildFile({'fileRef': file_ref})) - - -class PBXAggregateTarget(XCTarget): - pass - - -class PBXProject(XCContainerPortal): - # A PBXProject is really just an XCObject, the XCContainerPortal thing is - # just to allow PBXProject to be used in the containerPortal property of - # PBXContainerItemProxy. - """ - - Attributes: - path: "sample.xcodeproj". TODO(mark) Document me! - _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each - value is a reference to the dict in the - projectReferences list associated with the keyed - PBXProject. - """ - - _schema = XCContainerPortal._schema.copy() - _schema.update({ - 'attributes': [0, dict, 0, 0], - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.2'], - 'hasScannedForEncodings': [0, int, 0, 1, 1], - 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()], - 'projectDirPath': [0, str, 0, 1, ''], - 'projectReferences': [1, dict, 0, 0], - 'projectRoot': [0, str, 0, 1, ''], - 'targets': [1, XCTarget, 1, 1, []], - }) - - def __init__(self, properties=None, id=None, parent=None, path=None): - self.path = path - self._other_pbxprojects = {} - # super - return XCContainerPortal.__init__(self, properties, id, parent) - - def Name(self): - name = self.path - if name[-10:] == '.xcodeproj': - name = name[:-10] - return posixpath.basename(name) - - def Path(self): - return self.path - - def Comment(self): - return 'Project object' - - def Children(self): - # super - children = XCContainerPortal.Children(self) - - # Add children that the schema doesn't know about. Maybe there's a more - # elegant way around this, but this is the only case where we need to own - # objects in a dictionary (that is itself in a list), and three lines for - # a one-off isn't that big a deal. - if 'projectReferences' in self._properties: - for reference in self._properties['projectReferences']: - children.append(reference['ProductGroup']) - - return children - - def PBXProjectAncestor(self): - return self - - def _GroupByName(self, name): - if not 'mainGroup' in self._properties: - self.SetProperty('mainGroup', PBXGroup()) - - main_group = self._properties['mainGroup'] - group = main_group.GetChildByName(name) - if group is None: - group = PBXGroup({'name': name}) - main_group.AppendChild(group) - - return group - - # SourceGroup and ProductsGroup are created by default in Xcode's own - # templates. - def SourceGroup(self): - return self._GroupByName('Source') - - def ProductsGroup(self): - return self._GroupByName('Products') - - # IntermediatesGroup is used to collect source-like files that are generated - # by rules or script phases and are placed in intermediate directories such - # as DerivedSources. - def IntermediatesGroup(self): - return self._GroupByName('Intermediates') - - # FrameworksGroup and ProjectsGroup are top-level groups used to collect - # frameworks and projects. - def FrameworksGroup(self): - return self._GroupByName('Frameworks') - - def ProjectsGroup(self): - return self._GroupByName('Projects') - - def RootGroupForPath(self, path): - """Returns a PBXGroup child of this object to which path should be added. - - This method is intended to choose between SourceGroup and - IntermediatesGroup on the basis of whether path is present in a source - directory or an intermediates directory. For the purposes of this - determination, any path located within a derived file directory such as - PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates - directory. - - The returned value is a two-element tuple. The first element is the - PBXGroup, and the second element specifies whether that group should be - organized hierarchically (True) or as a single flat list (False). - """ - - # TODO(mark): make this a class variable and bind to self on call? - # Also, this list is nowhere near exhaustive. - # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by - # gyp.generator.xcode. There should probably be some way for that module - # to push the names in, rather than having to hard-code them here. - source_tree_groups = { - 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - } - - (source_tree, path) = SourceTreeAndPathFromPath(path) - if source_tree != None and source_tree in source_tree_groups: - (group_func, hierarchical) = source_tree_groups[source_tree] - group = group_func() - return (group, hierarchical) - - # TODO(mark): make additional choices based on file extension. - - return (self.SourceGroup(), True) - - def AddOrGetFileInRootGroup(self, path): - """Returns a PBXFileReference corresponding to path in the correct group - according to RootGroupForPath's heuristics. - - If an existing PBXFileReference for path exists, it will be returned. - Otherwise, one will be created and returned. - """ - - (group, hierarchical) = self.RootGroupForPath(path) - return group.AddOrGetFileByPath(path, hierarchical) - - def RootGroupsTakeOverOnlyChildren(self, recurse=False): - """Calls TakeOverOnlyChild for all groups in the main group.""" - - for group in self._properties['mainGroup']._properties['children']: - if isinstance(group, PBXGroup): - group.TakeOverOnlyChild(recurse) - - def SortGroups(self): - # Sort the children of the mainGroup (like "Source" and "Products") - # according to their defined order. - self._properties['mainGroup']._properties['children'] = \ - sorted(self._properties['mainGroup']._properties['children'], - cmp=lambda x,y: x.CompareRootGroup(y)) - - # Sort everything else by putting group before files, and going - # alphabetically by name within sections of groups and files. SortGroup - # is recursive. - for group in self._properties['mainGroup']._properties['children']: - if not isinstance(group, PBXGroup): - continue - - if group.Name() == 'Products': - # The Products group is a special case. Instead of sorting - # alphabetically, sort things in the order of the targets that - # produce the products. To do this, just build up a new list of - # products based on the targets. - products = [] - for target in self._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - product = target._properties['productReference'] - # Make sure that the product is already in the products group. - assert product in group._properties['children'] - products.append(product) - - # Make sure that this process doesn't miss anything that was already - # in the products group. - assert len(products) == len(group._properties['children']) - group._properties['children'] = products - else: - group.SortGroup() - - def AddOrGetProjectReference(self, other_pbxproject): - """Add a reference to another project file (via PBXProject object) to this - one. - - Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in - this project file that contains a PBXReferenceProxy object for each - product of each PBXNativeTarget in the other project file. ProjectRef is - a PBXFileReference to the other project file. - - If this project file already references the other project file, the - existing ProductGroup and ProjectRef are returned. The ProductGroup will - still be updated if necessary. - """ - - if not 'projectReferences' in self._properties: - self._properties['projectReferences'] = [] - - product_group = None - project_ref = None - - if not other_pbxproject in self._other_pbxprojects: - # This project file isn't yet linked to the other one. Establish the - # link. - product_group = PBXGroup({'name': 'Products'}) - - # ProductGroup is strong. - product_group.parent = self - - # There's nothing unique about this PBXGroup, and if left alone, it will - # wind up with the same set of hashables as all other PBXGroup objects - # owned by the projectReferences list. Add the hashables of the - # remote PBXProject that it's related to. - product_group._hashables.extend(other_pbxproject.Hashables()) - - # The other project reports its path as relative to the same directory - # that this project's path is relative to. The other project's path - # is not necessarily already relative to this project. Figure out the - # pathname that this project needs to use to refer to the other one. - this_path = posixpath.dirname(self.Path()) - projectDirPath = self.GetProperty('projectDirPath') - if projectDirPath: - if posixpath.isabs(projectDirPath[0]): - this_path = projectDirPath - else: - this_path = posixpath.join(this_path, projectDirPath) - other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) - - # ProjectRef is weak (it's owned by the mainGroup hierarchy). - project_ref = PBXFileReference({ - 'lastKnownFileType': 'wrapper.pb-project', - 'path': other_path, - 'sourceTree': 'SOURCE_ROOT', - }) - self.ProjectsGroup().AppendChild(project_ref) - - ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref} - self._other_pbxprojects[other_pbxproject] = ref_dict - self.AppendProperty('projectReferences', ref_dict) - - # Xcode seems to sort this list case-insensitively - self._properties['projectReferences'] = \ - sorted(self._properties['projectReferences'], cmp=lambda x,y: - cmp(x['ProjectRef'].Name().lower(), - y['ProjectRef'].Name().lower())) - else: - # The link already exists. Pull out the relevnt data. - project_ref_dict = self._other_pbxprojects[other_pbxproject] - product_group = project_ref_dict['ProductGroup'] - project_ref = project_ref_dict['ProjectRef'] - - self._SetUpProductReferences(other_pbxproject, product_group, project_ref) - - return [product_group, project_ref] - - def _SetUpProductReferences(self, other_pbxproject, product_group, - project_ref): - # TODO(mark): This only adds references to products in other_pbxproject - # when they don't exist in this pbxproject. Perhaps it should also - # remove references from this pbxproject that are no longer present in - # other_pbxproject. Perhaps it should update various properties if they - # change. - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - - other_fileref = target._properties['productReference'] - if product_group.GetChildByRemoteObject(other_fileref) is None: - # Xcode sets remoteInfo to the name of the target and not the name - # of its product, despite this proxy being a reference to the product. - container_item = PBXContainerItemProxy({ - 'containerPortal': project_ref, - 'proxyType': 2, - 'remoteGlobalIDString': other_fileref, - 'remoteInfo': target.Name() - }) - # TODO(mark): Does sourceTree get copied straight over from the other - # project? Can the other project ever have lastKnownFileType here - # instead of explicitFileType? (Use it if so?) Can path ever be - # unset? (I don't think so.) Can other_fileref have name set, and - # does it impact the PBXReferenceProxy if so? These are the questions - # that perhaps will be answered one day. - reference_proxy = PBXReferenceProxy({ - 'fileType': other_fileref._properties['explicitFileType'], - 'path': other_fileref._properties['path'], - 'sourceTree': other_fileref._properties['sourceTree'], - 'remoteRef': container_item, - }) - - product_group.AppendChild(reference_proxy) - - def SortRemoteProductReferences(self): - # For each remote project file, sort the associated ProductGroup in the - # same order that the targets are sorted in the remote project file. This - # is the sort order used by Xcode. - - def CompareProducts(x, y, remote_products): - # x and y are PBXReferenceProxy objects. Go through their associated - # PBXContainerItem to get the remote PBXFileReference, which will be - # present in the remote_products list. - x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString'] - y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString'] - x_index = remote_products.index(x_remote) - y_index = remote_products.index(y_remote) - - # Use the order of each remote PBXFileReference in remote_products to - # determine the sort order. - return cmp(x_index, y_index) - - for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): - # Build up a list of products in the remote project file, ordered the - # same as the targets that produce them. - remote_products = [] - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - remote_products.append(target._properties['productReference']) - - # Sort the PBXReferenceProxy children according to the list of remote - # products. - product_group = ref_dict['ProductGroup'] - product_group._properties['children'] = sorted( - product_group._properties['children'], - cmp=lambda x, y: CompareProducts(x, y, remote_products)) - - -class XCProjectFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'archiveVersion': [0, int, 0, 1, 1], - 'classes': [0, dict, 0, 1, {}], - 'objectVersion': [0, int, 0, 1, 45], - 'rootObject': [0, PBXProject, 1, 1], - }) - - def SetXcodeVersion(self, version): - version_to_object_version = { - '2.4': 45, - '3.0': 45, - '3.1': 45, - '3.2': 46, - } - if not version in version_to_object_version: - supported_str = ', '.join(sorted(version_to_object_version.keys())) - raise Exception( - 'Unsupported Xcode version %s (supported: %s)' % - ( version, supported_str ) ) - compatibility_version = 'Xcode %s' % version - self._properties['rootObject'].SetProperty('compatibilityVersion', - compatibility_version) - self.SetProperty('objectVersion', version_to_object_version[version]); - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - # Although XCProjectFile is implemented here as an XCObject, it's not a - # proper object in the Xcode sense, and it certainly doesn't have its own - # ID. Pass through an attempt to update IDs to the real root object. - if recursive: - self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash) - - def Print(self, file=sys.stdout): - self.VerifyHasRequiredProperties() - - # Add the special "objects" property, which will be caught and handled - # separately during printing. This structure allows a fairly standard - # loop do the normal printing. - self._properties['objects'] = {} - self._XCPrint(file, 0, '// !$*UTF8*$!\n') - if self._should_print_single_line: - self._XCPrint(file, 0, '{ ') - else: - self._XCPrint(file, 0, '{\n') - for property, value in sorted(self._properties.iteritems(), - cmp=lambda x, y: cmp(x, y)): - if property == 'objects': - self._PrintObjects(file) - else: - self._XCKVPrint(file, 1, property, value) - self._XCPrint(file, 0, '}\n') - del self._properties['objects'] - - def _PrintObjects(self, file): - if self._should_print_single_line: - self._XCPrint(file, 0, 'objects = {') - else: - self._XCPrint(file, 1, 'objects = {\n') - - objects_by_class = {} - for object in self.Descendants(): - if object == self: - continue - class_name = object.__class__.__name__ - if not class_name in objects_by_class: - objects_by_class[class_name] = [] - objects_by_class[class_name].append(object) - - for class_name in sorted(objects_by_class): - self._XCPrint(file, 0, '\n') - self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n') - for object in sorted(objects_by_class[class_name], - cmp=lambda x, y: cmp(x.id, y.id)): - object.Print(file) - self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n') - - if self._should_print_single_line: - self._XCPrint(file, 0, '}; ') - else: - self._XCPrint(file, 1, '};\n') diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Applies a fix to CR LF TAB handling in xml.dom. - -Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 -Working around this: http://bugs.python.org/issue5752 -TODO(bradnelson): Consider dropping this when we drop XP support. -""" - - -import xml.dom.minidom - - -def _Replacement_write_data(writer, data, is_attrib=False): - """Writes datachars to writer.""" - data = data.replace("&", "&").replace("<", "<") - data = data.replace("\"", """).replace(">", ">") - if is_attrib: - data = data.replace( - "\r", " ").replace( - "\n", " ").replace( - "\t", " ") - writer.write(data) - - -def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): - # indent = current indentation - # addindent = indentation to add to higher levels - # newl = newline string - writer.write(indent+"<" + self.tagName) - - attrs = self._get_attributes() - a_names = attrs.keys() - a_names.sort() - - for a_name in a_names: - writer.write(" %s=\"" % a_name) - _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) - writer.write("\"") - if self.childNodes: - writer.write(">%s" % newl) - for node in self.childNodes: - node.writexml(writer, indent + addindent, addindent, newl) - writer.write("%s%s" % (indent, self.tagName, newl)) - else: - writer.write("/>%s" % newl) - - -class XmlFix(object): - """Object to manage temporary patching of xml.dom.minidom.""" - - def __init__(self): - # Preserve current xml.dom.minidom functions. - self.write_data = xml.dom.minidom._write_data - self.writexml = xml.dom.minidom.Element.writexml - # Inject replacement versions of a function and a method. - xml.dom.minidom._write_data = _Replacement_write_data - xml.dom.minidom.Element.writexml = _Replacement_writexml - - def Cleanup(self): - if self.write_data: - xml.dom.minidom._write_data = self.write_data - xml.dom.minidom.Element.writexml = self.writexml - self.write_data = None - - def __del__(self): - self.Cleanup() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylintrc tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylintrc --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/pylintrc 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/pylintrc 1970-01-01 00:00:00.000000000 +0000 @@ -1,307 +0,0 @@ -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Profiled execution. -profile=no - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Pickle collected data for later comparisons. -persistent=yes - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - - -[MESSAGES CONTROL] - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). -# C0103: Invalid name "NN" (should match [a-z_][a-z0-9_]{2,30}$) -# C0111: Missing docstring -# C0302: Too many lines in module (NN) -# R0902: Too many instance attributes (N/7) -# R0903: Too few public methods (N/2) -# R0904: Too many public methods (NN/20) -# R0912: Too many branches (NN/12) -# R0913: Too many arguments (N/5) -# R0914: Too many local variables (NN/15) -# R0915: Too many statements (NN/50) -# W0141: Used builtin function 'map' -# W0142: Used * or ** magic -# W0232: Class has no __init__ method -# W0511: TODO -# W0603: Using the global statement -# -# These should be enabled eventually: -# C0112: Empty docstring -# C0301: Line too long (NN/80) -# C0321: More than one statement on single line -# C0322: Operator not preceded by a space -# C0323: Operator not followed by a space -# C0324: Comma not followed by a space -# E0101: Explicit return in __init__ -# E0102: function already defined line NN -# E1002: Use of super on an old style class -# E1101: Instance of 'XX' has no 'YY' member -# E1103: Instance of 'XX' has no 'XX' member (but some types could not be inferred) -# E0602: Undefined variable 'XX' -# F0401: Unable to import 'XX' -# R0201: Method could be a function -# R0801: Similar lines in N files -# W0102: Dangerous default value {} as argument -# W0104: Statement seems to have no effect -# W0105: String statement has no effect -# W0108: Lambda may not be necessary -# W0201: Attribute 'XX' defined outside __init__ -# W0212: Access to a protected member XX of a client class -# W0221: Arguments number differs from overridden method -# W0223: Method 'XX' is abstract in class 'YY' but is not overridden -# W0231: __init__ method from base class 'XX' is not called -# W0301: Unnecessary semicolon -# W0311: Bad indentation. Found NN spaces, expected NN -# W0401: Wildcard import XX -# W0402: Uses of a deprecated module 'string' -# W0403: Relative import 'XX', should be 'YY.XX' -# W0404: Reimport 'XX' (imported line NN) -# W0601: Global variable 'XX' undefined at the module level -# W0602: Using global for 'XX' but no assignment is done -# W0611: Unused import pprint -# W0612: Unused variable 'XX' -# W0613: Unused argument 'XX' -# W0614: Unused import XX from wildcard import -# W0621: Redefining name 'XX' from outer scope (line NN) -# W0622: Redefining built-in 'NN' -# W0631: Using possibly undefined loop variable 'XX' -# W0701: Raising a string exception -# W0702: No exception type(s) specified -disable=C0103,C0111,C0302,R0902,R0903,R0904,R0912,R0913,R0914,R0915,W0141,W0142,W0232,W0511,W0603,C0112,C0301,C0321,C0322,C0323,C0324,E0101,E0102,E1002,E1101,E1103,E0602,F0401,R0201,R0801,W0102,W0104,W0105,W0108,W0201,W0212,W0221,W0223,W0231,W0301,W0311,W0401,W0402,W0403,W0404,W0601,W0602,W0611,W0612,W0613,W0614,W0621,W0622,W0631,W0701,W0702 - - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html -output-format=text - -# Include message's id in output -include-ids=yes - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=no - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Add a comment according to your evaluation note. This is used by the global -# evaluation report (RP0004). -comment=no - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching the beginning of the name of dummy variables -# (i.e. not used). -dummy-variables-rgx=_|dummy - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - - -[TYPECHECK] - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). -ignored-classes=SQLObject - -# When zope mode is activated, add a predefined set of Zope acquired attributes -# to generated-members. -zope=no - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E0201 when accessed. Python regular -# expressions are accepted. -generated-members=REQUEST,acl_users,aq_parent - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=80 - -# Maximum number of lines in a module -max-module-lines=1000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - - -[BASIC] - -# Required attributes for module, separated by a comma -required-attributes= - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,apply,input - -# Regular expression which should only match correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression which should only match correct module level names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression which should only match correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression which should only match correct function names -function-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct method names -method-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct instance attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct variable names -variable-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct list comprehension / -# generator expression variable names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Regular expression which should only match functions or classes name which do -# not require a docstring -no-docstring-rgx=__.*__ - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.* - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -max-branchs=12 - -# Maximum number of statements in function / method body -max-statements=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - - -[CLASSES] - -# List of interface methods to ignore, separated by a comma. This is used for -# instance to not check methods defines in Zope's Interface base class. -ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,string,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/samples/samples tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/samples/samples --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/samples/samples 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/samples/samples 1970-01-01 00:00:00.000000000 +0000 @@ -1,81 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os.path -import shutil -import sys - - -gyps = [ - 'app/app.gyp', - 'base/base.gyp', - 'build/temp_gyp/googleurl.gyp', - 'build/all.gyp', - 'build/common.gypi', - 'build/external_code.gypi', - 'chrome/test/security_tests/security_tests.gyp', - 'chrome/third_party/hunspell/hunspell.gyp', - 'chrome/chrome.gyp', - 'media/media.gyp', - 'net/net.gyp', - 'printing/printing.gyp', - 'sdch/sdch.gyp', - 'skia/skia.gyp', - 'testing/gmock.gyp', - 'testing/gtest.gyp', - 'third_party/bzip2/bzip2.gyp', - 'third_party/icu38/icu38.gyp', - 'third_party/libevent/libevent.gyp', - 'third_party/libjpeg/libjpeg.gyp', - 'third_party/libpng/libpng.gyp', - 'third_party/libxml/libxml.gyp', - 'third_party/libxslt/libxslt.gyp', - 'third_party/lzma_sdk/lzma_sdk.gyp', - 'third_party/modp_b64/modp_b64.gyp', - 'third_party/npapi/npapi.gyp', - 'third_party/sqlite/sqlite.gyp', - 'third_party/zlib/zlib.gyp', - 'v8/tools/gyp/v8.gyp', - 'webkit/activex_shim/activex_shim.gyp', - 'webkit/activex_shim_dll/activex_shim_dll.gyp', - 'webkit/build/action_csspropertynames.py', - 'webkit/build/action_cssvaluekeywords.py', - 'webkit/build/action_jsconfig.py', - 'webkit/build/action_makenames.py', - 'webkit/build/action_maketokenizer.py', - 'webkit/build/action_useragentstylesheets.py', - 'webkit/build/rule_binding.py', - 'webkit/build/rule_bison.py', - 'webkit/build/rule_gperf.py', - 'webkit/tools/test_shell/test_shell.gyp', - 'webkit/webkit.gyp', -] - - -def Main(argv): - if len(argv) != 3 or argv[1] not in ['push', 'pull']: - print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0] - return 1 - - path_to_chrome = argv[2] - - for g in gyps: - chrome_file = os.path.join(path_to_chrome, g) - local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1]) - if argv[1] == 'push': - print 'Copying %s to %s' % (local_file, chrome_file) - shutil.copyfile(local_file, chrome_file) - elif argv[1] == 'pull': - print 'Copying %s to %s' % (chrome_file, local_file) - shutil.copyfile(chrome_file, local_file) - else: - assert False - - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv)) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/samples/samples.bat tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/samples/samples.bat --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/samples/samples.bat 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/samples/samples.bat 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python %~dp0/samples %* diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/setup.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/setup.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/setup.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/setup.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from distutils.core import setup -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.command.install_scripts import install_scripts - -setup( - name='gyp', - version='0.1', - description='Generate Your Projects', - author='Chromium Authors', - author_email='chromium-dev@googlegroups.com', - url='http://code.google.com/p/gyp', - package_dir = {'': 'pylib'}, - packages=['gyp', 'gyp.generator'], - - scripts = ['gyp'], - cmdclass = {'install': install, - 'install_lib': install_lib, - 'install_scripts': install_scripts}, -) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/README tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/README --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/README 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -pretty_vcproj: - Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] - - They key/value pair are used to resolve vsprops name. - - For example, if I want to diff the base.vcproj project: - - pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt - pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt - - And you can use your favorite diff tool to see the changes. - - Note: In the case of base.vcproj, the original vcproj is one level up the generated one. - I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt - before you perform the diff. \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/Xcode/README tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/Xcode/README --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/Xcode/README 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/Xcode/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in - -~/Library/Application Support/Developer/Shared/Xcode/Specifications/ - -and restart Xcode. \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -/* - gyp.pbfilespec - GYP source file spec for Xcode 3 - - There is not much documentation available regarding the format - of .pbfilespec files. As a starting point, see for instance the - outdated documentation at: - http://maxao.free.fr/xcode-plugin-interface/specifications.html - and the files in: - /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ - - Place this file in directory: - ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ -*/ - -( - { - Identifier = sourcecode.gyp; - BasedOn = sourcecode; - Name = "GYP Files"; - Extensions = ("gyp", "gypi"); - MIMETypes = ("text/gyp"); - Language = "xcode.lang.gyp"; - IsTextFile = YES; - IsSourceFile = YES; - } -) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec 1970-01-01 00:00:00.000000000 +0000 @@ -1,226 +0,0 @@ -/* - Copyright (c) 2011 Google Inc. All rights reserved. - Use of this source code is governed by a BSD-style license that can be - found in the LICENSE file. - - gyp.xclangspec - GYP language specification for Xcode 3 - - There is not much documentation available regarding the format - of .xclangspec files. As a starting point, see for instance the - outdated documentation at: - http://maxao.free.fr/xcode-plugin-interface/specifications.html - and the files in: - /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ - - Place this file in directory: - ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ -*/ - -( - - { - Identifier = "xcode.lang.gyp.keyword"; - Syntax = { - Words = ( - "and", - "or", - " (caar gyp-parse-history) target-point) - (setq gyp-parse-history (cdr gyp-parse-history)))) - -(defun gyp-parse-point () - "The point of the last parse state added by gyp-parse-to." - (caar gyp-parse-history)) - -(defun gyp-parse-sections () - "A list of section symbols holding at the last parse state point." - (cdar gyp-parse-history)) - -(defun gyp-inside-dictionary-p () - "Predicate returning true if the parser is inside a dictionary." - (not (eq (cadar gyp-parse-history) 'list))) - -(defun gyp-add-parse-history (point sections) - "Add parse state SECTIONS to the parse history at POINT so that parsing can be - resumed instantly." - (while (>= (caar gyp-parse-history) point) - (setq gyp-parse-history (cdr gyp-parse-history))) - (setq gyp-parse-history (cons (cons point sections) gyp-parse-history))) - -(defun gyp-parse-to (target-point) - "Parses from (point) to TARGET-POINT adding the parse state information to - gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a - string literal has been parsed. Returns nil if no further parsing can be - done, otherwise returns the position of the start of a parsed string, leaving - the point at the end of the string." - (let ((parsing t) - string-start) - (while parsing - (setq string-start nil) - ;; Parse up to a character that starts a sexp, or if the nesting - ;; level decreases. - (let ((state (parse-partial-sexp (gyp-parse-point) - target-point - -1 - t)) - (sections (gyp-parse-sections))) - (if (= (nth 0 state) -1) - (setq sections (cdr sections)) ; pop out a level - (cond ((looking-at-p "['\"]") ; a string - (setq string-start (point)) - (goto-char (scan-sexps (point) 1)) - (if (gyp-inside-dictionary-p) - ;; Look for sections inside a dictionary - (let ((section (gyp-section-name - (buffer-substring-no-properties - (+ 1 string-start) - (- (point) 1))))) - (setq sections (cons section (cdr sections))))) - ;; Stop after the string so it can be fontified. - (setq target-point (point))) - ((looking-at-p "{") - ;; Inside a dictionary. Increase nesting. - (forward-char 1) - (setq sections (cons 'unknown sections))) - ((looking-at-p "\\[") - ;; Inside a list. Increase nesting - (forward-char 1) - (setq sections (cons 'list sections))) - ((not (eobp)) - ;; other - (forward-char 1)))) - (gyp-add-parse-history (point) sections) - (setq parsing (< (point) target-point)))) - string-start)) - -(defun gyp-section-at-point () - "Transform the last parse state, which is a list of nested sections and return - the section symbol that should be used to determine font-lock information for - the string. Can return nil indicating the string should not have any attached - section." - (let ((sections (gyp-parse-sections))) - (cond - ((eq (car sections) 'conditions) - ;; conditions can occur in a variables section, but we still want to - ;; highlight it as a keyword. - nil) - ((and (eq (car sections) 'list) - (eq (cadr sections) 'list)) - ;; conditions and sources can have items in [[ ]] - (caddr sections)) - (t (cadr sections))))) - -(defun gyp-section-match (limit) - "Parse from (point) to LIMIT returning by means of match data what was - matched. The group of the match indicates what style font-lock should apply. - See also `gyp-add-font-lock-keywords'." - (gyp-invalidate-parse-states-after (point)) - (let ((group nil) - (string-start t)) - (while (and (< (point) limit) - (not group) - string-start) - (setq string-start (gyp-parse-to limit)) - (if string-start - (setq group (case (gyp-section-at-point) - ('dependencies 1) - ('variables 2) - ('conditions 2) - ('sources 3) - ('defines 4) - (nil nil))))) - (if group - (progn - ;; Set the match data to indicate to the font-lock mechanism the - ;; highlighting to be performed. - (set-match-data (append (list string-start (point)) - (make-list (* (1- group) 2) nil) - (list (1+ string-start) (1- (point))))) - t)))) - -;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for -;;; canonical list of keywords. -(defun gyp-add-font-lock-keywords () - "Add gyp-mode keywords to font-lock mechanism." - ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match - ;; so that we can do the font-locking in a single font-lock pass. - (font-lock-add-keywords - nil - (list - ;; Top-level keywords - (list (concat "['\"]\\(" - (regexp-opt (list "action" "action_name" "actions" "cflags" - "conditions" "configurations" "copies" "defines" - "dependencies" "destination" - "direct_dependent_settings" - "export_dependent_settings" "extension" "files" - "include_dirs" "includes" "inputs" "libraries" - "link_settings" "mac_bundle" "message" - "msvs_external_rule" "outputs" "product_name" - "process_outputs_as_sources" "rules" "rule_name" - "sources" "suppress_wildcard" - "target_conditions" "target_defaults" - "target_defines" "target_name" "toolsets" - "targets" "type" "variables" "xcode_settings")) - "[!/+=]?\\)") 1 'font-lock-keyword-face t) - ;; Type of target - (list (concat "['\"]\\(" - (regexp-opt (list "loadable_module" "static_library" - "shared_library" "executable" "none")) - "\\)") 1 'font-lock-type-face t) - (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1 - 'font-lock-function-name-face t) - (list 'gyp-section-match - (list 1 'font-lock-function-name-face t t) ; dependencies - (list 2 'font-lock-variable-name-face t t) ; variables, conditions - (list 3 'font-lock-constant-face t t) ; sources - (list 4 'font-lock-preprocessor-face t t)) ; preprocessor - ;; Variable expansion - (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t) - ;; Command expansion - (list " "%s"' % (src, dst) - - print '}' - - -def main(): - if len(sys.argv) < 2: - print >>sys.stderr, __doc__ - print >>sys.stderr - print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) - return 1 - - edges = LoadEdges('dump.json', sys.argv[1:]) - - WriteGraph(edges) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/pretty_gyp.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/pretty_gyp.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/pretty_gyp.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/pretty_gyp.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,155 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Pretty-prints the contents of a GYP file.""" - -import sys -import re - - -# Regex to remove comments when we're counting braces. -COMMENT_RE = re.compile(r'\s*#.*') - -# Regex to remove quoted strings when we're counting braces. -# It takes into account quoted quotes, and makes sure that the quotes match. -# NOTE: It does not handle quotes that span more than one line, or -# cases where an escaped quote is preceeded by an escaped backslash. -QUOTE_RE_STR = r'(?P[\'"])(.*?)(? 0: - after = True - - # This catches the special case of a closing brace having something - # other than just whitespace ahead of it -- we don't want to - # unindent that until after this line is printed so it stays with - # the previous indentation level. - if cnt < 0 and closing_prefix_re.match(stripline): - after = True - return (cnt, after) - - -def prettyprint_input(lines): - """Does the main work of indenting the input based on the brace counts.""" - indent = 0 - basic_offset = 2 - last_line = "" - for line in lines: - if COMMENT_RE.match(line): - print line - else: - line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. - if len(line) > 0: - (brace_diff, after) = count_braces(line) - if brace_diff != 0: - if after: - print " " * (basic_offset * indent) + line - indent += brace_diff - else: - indent += brace_diff - print " " * (basic_offset * indent) + line - else: - print " " * (basic_offset * indent) + line - else: - print "" - last_line = line - - -def main(): - if len(sys.argv) > 1: - data = open(sys.argv[1]).read().splitlines() - else: - data = sys.stdin.read().splitlines() - # Split up the double braces. - lines = split_double_braces(data) - - # Indent and print the output. - prettyprint_input(lines) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/pretty_sln.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/pretty_sln.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/pretty_sln.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/pretty_sln.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,168 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Prints the information in a sln file in a diffable way. - - It first outputs each projects in alphabetical order with their - dependencies. - - Then it outputs a possible build order. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import re -import sys -import pretty_vcproj - -def BuildProject(project, built, projects, deps): - # if all dependencies are done, we can build it, otherwise we try to build the - # dependency. - # This is not infinite-recursion proof. - for dep in deps[project]: - if dep not in built: - BuildProject(dep, built, projects, deps) - print project - built.append(project) - -def ParseSolution(solution_file): - # All projects, their clsid and paths. - projects = dict() - - # A list of dependencies associated with a project. - dependencies = dict() - - # Regular expressions that matches the SLN format. - # The first line of a project definition. - begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' - '}"\) = "(.*)", "(.*)", "(.*)"$')) - # The last line of a project definition. - end_project = re.compile('^EndProject$') - # The first line of a dependency list. - begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$') - # The last line of a dependency list. - end_dep = re.compile('EndProjectSection$') - # A line describing a dependency. - dep_line = re.compile(' *({.*}) = ({.*})$') - - in_deps = False - solution = open(solution_file) - for line in solution: - results = begin_project.search(line) - if results: - # Hack to remove icu because the diff is too different. - if results.group(1).find('icu') != -1: - continue - # We remove "_gyp" from the names because it helps to diff them. - current_project = results.group(1).replace('_gyp', '') - projects[current_project] = [results.group(2).replace('_gyp', ''), - results.group(3), - results.group(2)] - dependencies[current_project] = [] - continue - - results = end_project.search(line) - if results: - current_project = None - continue - - results = begin_dep.search(line) - if results: - in_deps = True - continue - - results = end_dep.search(line) - if results: - in_deps = False - continue - - results = dep_line.search(line) - if results and in_deps and current_project: - dependencies[current_project].append(results.group(1)) - continue - - # Change all dependencies clsid to name instead. - for project in dependencies: - # For each dependencies in this project - new_dep_array = [] - for dep in dependencies[project]: - # Look for the project name matching this cldis - for project_info in projects: - if projects[project_info][1] == dep: - new_dep_array.append(project_info) - dependencies[project] = sorted(new_dep_array) - - return (projects, dependencies) - -def PrintDependencies(projects, deps): - print "---------------------------------------" - print "Dependencies for all projects" - print "---------------------------------------" - print "-- --" - - for (project, dep_list) in sorted(deps.items()): - print "Project : %s" % project - print "Path : %s" % projects[project][0] - if dep_list: - for dep in dep_list: - print " - %s" % dep - print "" - - print "-- --" - -def PrintBuildOrder(projects, deps): - print "---------------------------------------" - print "Build order " - print "---------------------------------------" - print "-- --" - - built = [] - for (project, _) in sorted(deps.items()): - if project not in built: - BuildProject(project, built, projects, deps) - - print "-- --" - -def PrintVCProj(projects): - - for project in projects: - print "-------------------------------------" - print "-------------------------------------" - print project - print project - print project - print "-------------------------------------" - print "-------------------------------------" - - project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), - projects[project][2])) - - pretty = pretty_vcproj - argv = [ '', - project_path, - '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]), - ] - argv.extend(sys.argv[3:]) - pretty.main(argv) - -def main(): - # check if we have exactly 1 parameter. - if len(sys.argv) < 2: - print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] - return 1 - - (projects, deps) = ParseSolution(sys.argv[1]) - PrintDependencies(projects, deps) - PrintBuildOrder(projects, deps) - - if '--recursive' in sys.argv: - PrintVCProj(projects) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/pretty_vcproj.py tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/pretty_vcproj.py --- tilemill-0.10.1~saucy9/node_modules/node-gyp/gyp/tools/pretty_vcproj.py 2013-07-02 17:57:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/gyp/tools/pretty_vcproj.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,329 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Make the format of a vcproj really pretty. - - This script normalize and sort an xml. It also fetches all the properties - inside linked vsprops and include them explicitly in the vcproj. - - It outputs the resulting xml to stdout. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import sys - -from xml.dom.minidom import parse -from xml.dom.minidom import Node - -REPLACEMENTS = dict() -ARGUMENTS = None - - -class CmpTuple(object): - """Compare function between 2 tuple.""" - def __call__(self, x, y): - return cmp(x[0], y[0]) - - -class CmpNode(object): - """Compare function between 2 xml nodes.""" - - def __call__(self, x, y): - def get_string(node): - node_string = "node" - node_string += node.nodeName - if node.nodeValue: - node_string += node.nodeValue - - if node.attributes: - # We first sort by name, if present. - node_string += node.getAttribute("Name") - - all_nodes = [] - for (name, value) in node.attributes.items(): - all_nodes.append((name, value)) - - all_nodes.sort(CmpTuple()) - for (name, value) in all_nodes: - node_string += name - node_string += value - - return node_string - - return cmp(get_string(x), get_string(y)) - - -def PrettyPrintNode(node, indent=0): - if node.nodeType == Node.TEXT_NODE: - if node.data.strip(): - print '%s%s' % (' '*indent, node.data.strip()) - return - - if node.childNodes: - node.normalize() - # Get the number of attributes - attr_count = 0 - if node.attributes: - attr_count = node.attributes.length - - # Print the main tag - if attr_count == 0: - print '%s<%s>' % (' '*indent, node.nodeName) - else: - print '%s<%s' % (' '*indent, node.nodeName) - - all_attributes = [] - for (name, value) in node.attributes.items(): - all_attributes.append((name, value)) - all_attributes.sort(CmpTuple()) - for (name, value) in all_attributes: - print '%s %s="%s"' % (' '*indent, name, value) - print '%s>' % (' '*indent) - if node.nodeValue: - print '%s %s' % (' '*indent, node.nodeValue) - - for sub_node in node.childNodes: - PrettyPrintNode(sub_node, indent=indent+2) - print '%s' % (' '*indent, node.nodeName) - - -def FlattenFilter(node): - """Returns a list of all the node and sub nodes.""" - node_list = [] - - if (node.attributes and - node.getAttribute('Name') == '_excluded_files'): - # We don't add the "_excluded_files" filter. - return [] - - for current in node.childNodes: - if current.nodeName == 'Filter': - node_list.extend(FlattenFilter(current)) - else: - node_list.append(current) - - return node_list - - -def FixFilenames(filenames, current_directory): - new_list = [] - for filename in filenames: - if filename: - for key in REPLACEMENTS: - filename = filename.replace(key, REPLACEMENTS[key]) - os.chdir(current_directory) - filename = filename.strip('"\' ') - if filename.startswith('$'): - new_list.append(filename) - else: - new_list.append(os.path.abspath(filename)) - return new_list - - -def AbsoluteNode(node): - """Makes all the properties we know about in this node absolute.""" - if node.attributes: - for (name, value) in node.attributes.items(): - if name in ['InheritedPropertySheets', 'RelativePath', - 'AdditionalIncludeDirectories', - 'IntermediateDirectory', 'OutputDirectory', - 'AdditionalLibraryDirectories']: - # We want to fix up these paths - path_list = value.split(';') - new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) - node.setAttribute(name, ';'.join(new_list)) - if not value: - node.removeAttribute(name) - - -def CleanupVcproj(node): - """For each sub node, we call recursively this function.""" - for sub_node in node.childNodes: - AbsoluteNode(sub_node) - CleanupVcproj(sub_node) - - # Normalize the node, and remove all extranous whitespaces. - for sub_node in node.childNodes: - if sub_node.nodeType == Node.TEXT_NODE: - sub_node.data = sub_node.data.replace("\r", "") - sub_node.data = sub_node.data.replace("\n", "") - sub_node.data = sub_node.data.rstrip() - - # Fix all the semicolon separated attributes to be sorted, and we also - # remove the dups. - if node.attributes: - for (name, value) in node.attributes.items(): - sorted_list = sorted(value.split(';')) - unique_list = [] - for i in sorted_list: - if not unique_list.count(i): - unique_list.append(i) - node.setAttribute(name, ';'.join(unique_list)) - if not value: - node.removeAttribute(name) - - if node.childNodes: - node.normalize() - - # For each node, take a copy, and remove it from the list. - node_array = [] - while node.childNodes and node.childNodes[0]: - # Take a copy of the node and remove it from the list. - current = node.childNodes[0] - node.removeChild(current) - - # If the child is a filter, we want to append all its children - # to this same list. - if current.nodeName == 'Filter': - node_array.extend(FlattenFilter(current)) - else: - node_array.append(current) - - - # Sort the list. - node_array.sort(CmpNode()) - - # Insert the nodes in the correct order. - for new_node in node_array: - # But don't append empty tool node. - if new_node.nodeName == 'Tool': - if new_node.attributes and new_node.attributes.length == 1: - # This one was empty. - continue - if new_node.nodeName == 'UserMacro': - continue - node.appendChild(new_node) - - -def GetConfiguationNodes(vcproj): - #TODO(nsylvain): Find a better way to navigate the xml. - nodes = [] - for node in vcproj.childNodes: - if node.nodeName == "Configurations": - for sub_node in node.childNodes: - if sub_node.nodeName == "Configuration": - nodes.append(sub_node) - - return nodes - - -def GetChildrenVsprops(filename): - dom = parse(filename) - if dom.documentElement.attributes: - vsprops = dom.documentElement.getAttribute('InheritedPropertySheets') - return FixFilenames(vsprops.split(';'), os.path.dirname(filename)) - return [] - -def SeekToNode(node1, child2): - # A text node does not have properties. - if child2.nodeType == Node.TEXT_NODE: - return None - - # Get the name of the current node. - current_name = child2.getAttribute("Name") - if not current_name: - # There is no name. We don't know how to merge. - return None - - # Look through all the nodes to find a match. - for sub_node in node1.childNodes: - if sub_node.nodeName == child2.nodeName: - name = sub_node.getAttribute("Name") - if name == current_name: - return sub_node - - # No match. We give up. - return None - - -def MergeAttributes(node1, node2): - # No attributes to merge? - if not node2.attributes: - return - - for (name, value2) in node2.attributes.items(): - # Don't merge the 'Name' attribute. - if name == 'Name': - continue - value1 = node1.getAttribute(name) - if value1: - # The attribute exist in the main node. If it's equal, we leave it - # untouched, otherwise we concatenate it. - if value1 != value2: - node1.setAttribute(name, ';'.join([value1, value2])) - else: - # The attribute does nto exist in the main node. We append this one. - node1.setAttribute(name, value2) - - # If the attribute was a property sheet attributes, we remove it, since - # they are useless. - if name == 'InheritedPropertySheets': - node1.removeAttribute(name) - - -def MergeProperties(node1, node2): - MergeAttributes(node1, node2) - for child2 in node2.childNodes: - child1 = SeekToNode(node1, child2) - if child1: - MergeProperties(child1, child2) - else: - node1.appendChild(child2.cloneNode(True)) - - -def main(argv): - """Main function of this vcproj prettifier.""" - global ARGUMENTS - ARGUMENTS = argv - - # check if we have exactly 1 parameter. - if len(argv) < 2: - print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0]) - return 1 - - # Parse the keys - for i in range(2, len(argv)): - (key, value) = argv[i].split('=') - REPLACEMENTS[key] = value - - # Open the vcproj and parse the xml. - dom = parse(argv[1]) - - # First thing we need to do is find the Configuration Node and merge them - # with the vsprops they include. - for configuration_node in GetConfiguationNodes(dom.documentElement): - # Get the property sheets associated with this configuration. - vsprops = configuration_node.getAttribute('InheritedPropertySheets') - - # Fix the filenames to be absolute. - vsprops_list = FixFilenames(vsprops.strip().split(';'), - os.path.dirname(argv[1])) - - # Extend the list of vsprops with all vsprops contained in the current - # vsprops. - for current_vsprops in vsprops_list: - vsprops_list.extend(GetChildrenVsprops(current_vsprops)) - - # Now that we have all the vsprops, we need to merge them. - for current_vsprops in vsprops_list: - MergeProperties(configuration_node, - parse(current_vsprops).documentElement) - - # Now that everything is merged, we need to cleanup the xml. - CleanupVcproj(dom.documentElement) - - # Finally, we use the prett xml function to print the vcproj back to the - # user. - #print dom.toprettyxml(newl="\n") - PrettyPrintNode(dom.documentElement) - return 0 - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/build.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/build.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/build.js 2013-06-05 21:09:06.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/build.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,275 +0,0 @@ - -module.exports = exports = build - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , rm = require('rimraf') - , path = require('path') - , glob = require('glob') - , log = require('npmlog') - , which = require('which') - , mkdirp = require('mkdirp') - , exec = require('child_process').exec - , win = process.platform == 'win32' - -exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module' - -function build (gyp, argv, callback) { - - var makeCommand = gyp.opts.make || process.env.MAKE - || (process.platform.indexOf('bsd') != -1 ? 'gmake' : 'make') - , command = win ? 'msbuild' : makeCommand - , buildDir = path.resolve('build') - , configPath = path.resolve(buildDir, 'config.gypi') - , jobs = gyp.opts.jobs || process.env.JOBS - , buildType - , config - , arch - , nodeDir - , copyDevLib - - loadConfigGypi() - - /** - * Load the "config.gypi" file that was generated during "configure". - */ - - function loadConfigGypi () { - fs.readFile(configPath, 'utf8', function (err, data) { - if (err) { - if (err.code == 'ENOENT') { - callback(new Error('You must run `node-gyp configure` first!')) - } else { - callback(err) - } - return - } - config = JSON.parse(data.replace(/\#.+\n/, '')) - - // get the 'arch', 'buildType', and 'nodeDir' vars from the config - buildType = config.target_defaults.default_configuration - arch = config.variables.target_arch - nodeDir = config.variables.nodedir - copyDevLib = config.variables.copy_dev_lib == 'true' - - if ('debug' in gyp.opts) { - buildType = gyp.opts.debug ? 'Debug' : 'Release' - } - if (!buildType) { - buildType = 'Release' - } - - log.verbose('build type', buildType) - log.verbose('architecture', arch) - log.verbose('node dev dir', nodeDir) - - if (win) { - findSolutionFile() - } else { - doWhich() - } - }) - } - - /** - * On Windows, find the first build/*.sln file. - */ - - function findSolutionFile () { - glob('build/*.sln', function (err, files) { - if (err) return callback(err) - if (files.length === 0) { - return callback(new Error('Could not find *.sln file. Did you run "configure"?')) - } - guessedSolution = files[0] - log.verbose('found first Solution file', guessedSolution) - doWhich() - }) - } - - /** - * Uses node-which to locate the msbuild / make executable. - */ - - function doWhich () { - // First make sure we have the build command in the PATH - which(command, function (err, execPath) { - if (err) { - if (win && /not found/.test(err.message)) { - // On windows and no 'msbuild' found. Let's guess where it is - findMsbuild() - } else { - // Some other error or 'make' not found on Unix, report that to the user - callback(err) - } - return - } - log.verbose('`which` succeeded for `' + command + '`', execPath) - copyNodeLib() - }) - } - - /** - * Search for the location of "msbuild.exe" file on Windows. - */ - - function findMsbuild () { - log.verbose('could not find "msbuild.exe" in PATH - finding location in registry') - var notfoundErr = new Error('Can\'t find "msbuild.exe". Do you have Microsoft Visual Studio C++ 2008+ installed?') - var cmd = 'reg query "HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions" /s' - if (process.arch !== 'ia32') - cmd += ' /reg:32' - exec(cmd, function (err, stdout, stderr) { - var reVers = /ToolsVersions\\([^\\]+)$/i - , rePath = /\r\n[ \t]+MSBuildToolsPath[ \t]+REG_SZ[ \t]+([^\r]+)/i - , msbuilds = [] - , r - , msbuildPath - if (err) { - return callback(notfoundErr) - } - stdout.split('\r\n\r\n').forEach(function(l) { - if (!l) return - l = l.trim() - if (r = reVers.exec(l.substring(0, l.indexOf('\r\n')))) { - var ver = parseFloat(r[1], 10) - if (ver >= 3.5) { - if (r = rePath.exec(l)) { - msbuilds.push({ - version: ver, - path: r[1] - }) - } - } - } - }) - msbuilds.sort(function (x, y) { - return (x.version < y.version ? -1 : 1) - }) - ;(function verifyMsbuild () { - if (!msbuilds.length) return callback(notfoundErr); - msbuildPath = path.resolve(msbuilds.pop().path, 'msbuild.exe') - fs.stat(msbuildPath, function (err, stat) { - if (err) { - if (err.code == 'ENOENT') { - if (msbuilds.length) { - return verifyMsbuild() - } else { - callback(notfoundErr) - } - } else { - callback(err) - } - return - } - command = msbuildPath - copyNodeLib() - }) - })() - }) - } - - /** - * Copies the node.lib file for the current target architecture into the - * current proper dev dir location. - */ - - function copyNodeLib () { - if (!win || !copyDevLib) return doBuild() - - var buildDir = path.resolve(nodeDir, buildType) - , archNodeLibPath = path.resolve(nodeDir, arch, 'node.lib') - , buildNodeLibPath = path.resolve(buildDir, 'node.lib') - - mkdirp(buildDir, function (err, isNew) { - if (err) return callback(err) - log.verbose('"' + buildType + '" dir needed to be created?', isNew) - var rs = fs.createReadStream(archNodeLibPath) - , ws = fs.createWriteStream(buildNodeLibPath) - log.verbose('copying "node.lib" for ' + arch, buildNodeLibPath) - rs.pipe(ws) - rs.on('error', callback) - ws.on('error', callback) - rs.on('end', doBuild) - }) - } - - /** - * Actually spawn the process and compile the module. - */ - - function doBuild () { - - // Enable Verbose build - var verbose = log.levels[log.level] <= log.levels.verbose - if (!win && verbose) { - argv.push('V=1') - } - if (win && !verbose) { - argv.push('/clp:Verbosity=minimal') - } - - if (win) { - // Turn off the Microsoft logo on Windows - argv.push('/nologo') - } - - // Specify the build type, Release by default - if (win) { - var p = arch === 'x64' ? 'x64' : 'Win32' - argv.push('/p:Configuration=' + buildType + ';Platform=' + p) - if (jobs) { - if (!isNaN(parseInt(jobs, 10))) { - argv.push('/m:' + parseInt(jobs, 10)) - } else if (jobs.toUpperCase() === 'MAX') { - argv.push('/m:' + require('os').cpus().length) - } - } - } else { - argv.push('BUILDTYPE=' + buildType) - // Invoke the Makefile in the 'build' dir. - argv.push('-C') - argv.push('build') - if (jobs) { - if (!isNaN(parseInt(jobs, 10))) { - argv.push('--jobs') - argv.push(parseInt(jobs, 10)) - } else if (jobs.toUpperCase() === 'MAX') { - argv.push('--jobs') - argv.push(require('os').cpus().length) - } - } - } - - if (win) { - // did the user specify their own .sln file? - var hasSln = argv.some(function (arg) { - return path.extname(arg) == '.sln' - }) - if (!hasSln) { - argv.unshift(gyp.opts.solution || guessedSolution) - } - } - - var proc = gyp.spawn(command, argv) - proc.on('exit', onExit) - } - - /** - * Invoked after the make/msbuild command exits. - */ - - function onExit (code, signal) { - if (code !== 0) { - return callback(new Error('`' + command + '` failed with exit code: ' + code)) - } - if (signal) { - return callback(new Error('`' + command + '` got signal: ' + signal)) - } - callback() - } - -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/clean.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/clean.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/clean.js 2012-06-29 20:31:36.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/clean.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ - -module.exports = exports = clean - -exports.usage = 'Removes any generated build files and the "out" dir' - -/** - * Module dependencies. - */ - -var rm = require('rimraf') -var log = require('npmlog') - - -function clean (gyp, argv, callback) { - - // Remove the 'build' dir - var buildDir = 'build' - - log.verbose('clean', 'removing "%s" directory', buildDir) - rm(buildDir, callback) - -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/configure.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/configure.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/configure.js 2013-08-25 00:38:12.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/configure.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,431 +0,0 @@ - -module.exports = exports = configure - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , path = require('path') - , glob = require('glob') - , log = require('npmlog') - , osenv = require('osenv') - , which = require('which') - , semver = require('semver') - , mkdirp = require('mkdirp') - , cp = require('child_process') - , spawn = cp.spawn - , execFile = cp.execFile - , win = process.platform == 'win32' - -exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module' - -function configure (gyp, argv, callback) { - - var python = gyp.opts.python || process.env.PYTHON || 'python' - , buildDir = path.resolve('build') - , hasVCExpress = false - , hasVC2012Express = false - , hasWin71SDK = false - , hasWin8SDK = false - , configNames = [ 'config.gypi', 'common.gypi' ] - , configs = [] - , nodeDir - - - if (win) { - checkVCExpress(function () { - if (hasVCExpress || hasVC2012Express) { - checkWinSDK(function () { - checkPython() - }) - } else { - checkPython() - } - }) - } else { - checkPython() - } - - // Check if Python is in the $PATH - function checkPython () { - log.verbose('check python', 'checking for Python executable "%s" in the PATH', python) - which(python, function (err, execPath) { - if (err) { - log.verbose('`which` failed', python, err) - if (win) { - guessPython() - } else { - failNoPython() - } - } else { - log.verbose('`which` succeeded', python, execPath) - checkPythonVersion() - } - }) - } - - // Called on Windows when "python" isn't available in the current $PATH. - // We're gonna check if "%SystemDrive%\python27\python.exe" exists. - function guessPython () { - log.verbose('could not find "' + python + '". guessing location') - var rootDir = process.env.SystemDrive || 'C:\\' - if (rootDir[rootDir.length - 1] !== '\\') { - rootDir += '\\' - } - var pythonPath = path.resolve(rootDir, 'Python27', 'python.exe') - log.verbose('ensuring that file exists:', pythonPath) - fs.stat(pythonPath, function (err, stat) { - if (err) { - if (err.code == 'ENOENT') { - failNoPython() - } else { - callback(err) - } - return - } - python = pythonPath - checkPythonVersion() - }) - } - - function checkPythonVersion () { - var env = { TERM: 'dumb', PATH: process.env.PATH }; - execFile(python, ['-c', 'import platform; print(platform.python_version());'], { env: env }, function (err, stdout) { - if (err) { - return callback(err) - } - log.verbose('check python version', '`%s -c "import platform; print(platform.python_version());"` returned: %j', python, stdout) - var version = stdout.trim() - if (~version.indexOf('+')) { - log.silly('stripping "+" sign(s) from version') - version = version.replace(/\+/g, '') - } - if (~version.indexOf('rc')) { - log.silly('stripping "rc" identifier from version') - version = version.replace(/rc(.*)$/ig, '') - } - var range = semver.Range('>=2.5.0 <3.0.0'); - if (range.test(version)) { - getNodeDir() - } else { - failPythonVersion(version) - } - }) - } - - function failNoPython () { - callback(new Error('Can\'t find Python executable "' + python + - '", you can set the PYTHON env variable.')) - } - - function failPythonVersion (badVersion) { - callback(new Error('Python executable "' + python + - '" is v' + badVersion + ', which is not supported by gyp.\n' + - 'You can pass the --python switch to point to Python >= v2.5.0 & < 3.0.0.')) - } - - function checkWinSDK(cb) { - checkWin71SDK(function() { - checkWin8SDK(cb) - }) - } - - function checkWin71SDK(cb) { - spawn('reg', ['query', 'HKLM\\Software\\Microsoft\\Microsoft SDKs\\Windows\\v7.1', '/v', 'InstallationFolder']) - .on('exit', function (code) { - hasWin71SDK = (code === 0) - cb() - }) - } - - function checkWin8SDK(cb) { - var cp = spawn('reg', ['query', 'HKLM\\Software\\Microsoft\\Windows Kits\\Installed Products', '/f', 'Windows Software Development Kit x86', '/reg:32']) - cp.on('exit', function (code) { - hasWin8SDK = (code === 0) - cb() - }) - } - - function checkVC2012Express64(cb) { - var cp = spawn('reg', ['query', 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\VCExpress\\11.0\\Setup\\VC', '/v', 'ProductDir']) - cp.on('exit', function (code) { - hasVC2012Express = (code === 0) - cb() - }) - } - - function checkVC2012Express(cb) { - var cp = spawn('reg', ['query', 'HKLM\\SOFTWARE\\Microsoft\\VCExpress\\11.0\\Setup\\VC', '/v', 'ProductDir']) - cp.on('exit', function (code) { - hasVC2012Express = (code === 0) - if (code !== 0) { - checkVC2012Express64(cb) - } else { - cb() - } - }) - } - - function checkVCExpress64(cb) { - var cp = spawn('cmd', ['/C', '%WINDIR%\\SysWOW64\\reg', 'query', 'HKLM\\Software\\Microsoft\\VCExpress\\10.0\\Setup\\VC', '/v', 'ProductDir']) - cp.on('exit', function (code) { - hasVCExpress = (code === 0) - checkVC2012Express(cb) - }) - } - - function checkVCExpress(cb) { - spawn('reg', ['query', 'HKLM\\Software\\Microsoft\\VCExpress\\10.0\\Setup\\VC', '/v', 'ProductDir']) - .on('exit', function (code) { - if (code !== 0) { - checkVCExpress64(cb) - } else { - checkVC2012Express(cb) - } - }) - } - - function getNodeDir () { - - // 'python' should be set by now - process.env.PYTHON = python - - if (gyp.opts.nodedir) { - // --nodedir was specified. use that for the dev files - nodeDir = gyp.opts.nodedir.replace(/^~/, osenv.home()) - - log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir) - createBuildDir() - - } else { - // if no --nodedir specified, ensure node dependencies are installed - var version - var versionStr - - if (gyp.opts.target) { - // if --target was given, then determine a target version to compile for - versionStr = gyp.opts.target - log.verbose('get node dir', 'compiling against --target node version: %s', versionStr) - } else { - // if no --target was specified then use the current host node version - versionStr = process.version - log.verbose('get node dir', 'no --target version specified, falling back to host node version: %s', versionStr) - } - - // make sure we have a valid version - try { - version = semver.parse(versionStr) - } catch (e) { - return callback(e) - } - if (!version) { - return callback(new Error('Invalid version number: ' + versionStr)) - } - - // ensure that the target node version's dev files are installed - gyp.opts.ensure = true - gyp.commands.install([ versionStr ], function (err, version) { - if (err) return callback(err) - log.verbose('get node dir', 'target node version installed:', version) - nodeDir = path.resolve(gyp.devDir, version) - createBuildDir() - }) - } - } - - function createBuildDir () { - log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir) - mkdirp(buildDir, function (err, isNew) { - if (err) return callback(err) - log.verbose('build dir', '"build" dir needed to be created?', isNew) - createConfigFile() - }) - } - - function createConfigFile (err) { - if (err) return callback(err) - - var configFilename = 'config.gypi' - var configPath = path.resolve(buildDir, configFilename) - - log.verbose('build/' + configFilename, 'creating config file') - - var config = process.config || {} - , defaults = config.target_defaults - , variables = config.variables - - // default "config.variables" - if (!variables) variables = config.variables = {} - - // default "config.defaults" - if (!defaults) defaults = config.target_defaults = {} - - // don't inherit the "defaults" from node's `process.config` object. - // doing so could cause problems in cases where the `node` executable was - // compiled on a different machine (with different lib/include paths) than - // the machine where the addon is being built to - defaults.cflags = [] - defaults.defines = [] - defaults.include_dirs = [] - defaults.libraries = [] - - // set the default_configuration prop - if ('debug' in gyp.opts) { - defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release' - } - if (!defaults.default_configuration) { - defaults.default_configuration = 'Release' - } - - // set the target_arch variable - variables.target_arch = gyp.opts.arch || process.arch || 'ia32' - - // set the toolset for VCExpress users - if (win) { - if (hasVC2012Express && hasWin8SDK) { - defaults.msbuild_toolset = 'v110' - } else if (hasVCExpress && hasWin71SDK) { - defaults.msbuild_toolset = 'Windows7.1SDK' - } - } - - // set the node development directory - variables.nodedir = nodeDir - - // don't copy dev libraries with nodedir option - variables.copy_dev_lib = !gyp.opts.nodedir - - // disable -T "thin" static archives by default - variables.standalone_static_library = gyp.opts.thin ? 0 : 1; - - // loop through the rest of the opts and add the unknown ones as variables. - // this allows for module-specific configure flags like: - // - // $ node-gyp configure --shared-libxml2 - Object.keys(gyp.opts).forEach(function (opt) { - if (opt === 'argv') return - if (opt in gyp.configDefs) return - variables[opt.replace(/-/g, '_')] = gyp.opts[opt] - }) - - // ensures that any boolean values from `process.config` get stringified - function boolsToString (k, v) { - if (typeof v === 'boolean') - return String(v) - return v - } - - log.silly('build/' + configFilename, config) - - // now write out the config.gypi file to the build/ dir - var prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step' - , json = JSON.stringify(config, boolsToString, 2) - log.verbose('build/' + configFilename, 'writing out config file: %s', configPath) - configs.push(configPath) - fs.writeFile(configPath, [prefix, json, ''].join('\n'), findConfigs) - } - - function findConfigs (err) { - if (err) return callback(err) - var name = configNames.shift() - if (!name) return runGyp() - var fullPath = path.resolve(name) - log.verbose(name, 'checking for gypi file: %s', fullPath) - fs.stat(fullPath, function (err, stat) { - if (err) { - if (err.code == 'ENOENT') { - findConfigs() // check next gypi filename - } else { - callback(err) - } - } else { - log.verbose(name, 'found gypi file') - configs.push(fullPath) - findConfigs() - } - }) - } - - function runGyp (err) { - if (err) return callback(err) - - if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) { - if (win) { - log.verbose('gyp', 'gyp format was not specified; forcing "msvs"') - // force the 'make' target for non-Windows - argv.push('-f', 'msvs') - } else { - log.verbose('gyp', 'gyp format was not specified; forcing "make"') - // force the 'make' target for non-Windows - argv.push('-f', 'make') - } - } - - function hasMsvsVersion () { - return argv.some(function (arg) { - return arg.indexOf('msvs_version') === 0 - }) - } - - if (win && !hasMsvsVersion()) { - if ('msvs_version' in gyp.opts) { - argv.push('-G', 'msvs_version=' + gyp.opts.msvs_version) - } else { - argv.push('-G', 'msvs_version=auto') - } - } - - // include all the ".gypi" files that were found - configs.forEach(function (config) { - argv.push('-I', config) - }) - - // this logic ported from the old `gyp_addon` python file - var gyp_script = path.resolve(__dirname, '..', 'gyp', 'gyp') - var addon_gypi = path.resolve(__dirname, '..', 'addon.gypi') - var common_gypi = path.resolve(nodeDir, 'common.gypi') - var output_dir = 'build' - if (win) { - // Windows expects an absolute path - output_dir = buildDir - } - - argv.push('-I', addon_gypi) - argv.push('-I', common_gypi) - argv.push('-Dlibrary=shared_library') - argv.push('-Dvisibility=default') - argv.push('-Dnode_root_dir=' + nodeDir) - argv.push('-Dmodule_root_dir=' + process.cwd()) - argv.push('--depth=.') - - // tell gyp to write the Makefile/Solution files into output_dir - argv.push('--generator-output', output_dir) - - // tell make to write its output into the same dir - argv.push('-Goutput_dir=.') - - // enforce use of the "binding.gyp" file - argv.unshift('binding.gyp') - - // execute `gyp` from the current target nodedir - argv.unshift(gyp_script) - - var cp = gyp.spawn(python, argv) - cp.on('exit', onCpExit) - } - - /** - * Called when the `gyp` child process exits. - */ - - function onCpExit (code, signal) { - if (code !== 0) { - callback(new Error('`gyp` failed with exit code: ' + code)) - } else { - // we're done - callback() - } - } - -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/install.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/install.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/install.js 2013-09-06 21:25:22.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/install.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,354 +0,0 @@ - -module.exports = exports = install - -exports.usage = 'Install node development files for the specified node version.' - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , osenv = require('osenv') - , tar = require('tar') - , rm = require('rimraf') - , path = require('path') - , zlib = require('zlib') - , log = require('npmlog') - , semver = require('semver') - , fstream = require('fstream') - , request = require('request') - , minimatch = require('minimatch') - , mkdir = require('mkdirp') - , win = process.platform == 'win32' - -function install (gyp, argv, callback) { - - // ensure no double-callbacks happen - function cb (err) { - if (cb.done) return - cb.done = true - if (err) { - log.warn('install', 'got an error, rolling back install') - // roll-back the install if anything went wrong - gyp.commands.remove([ version ], function (err2) { - callback(err) - }) - } else { - callback(null, version) - } - } - - var distUrl = gyp.opts['dist-url'] || gyp.opts.disturl || 'http://nodejs.org/dist' - - - // Determine which node dev files version we are installing - var versionStr = argv[0] || gyp.opts.target || process.version - log.verbose('install', 'input version string %j', versionStr) - - // parse the version to normalize and ensure it's valid - var version = semver.parse(versionStr) - if (!version) { - return callback(new Error('Invalid version number: ' + versionStr)) - } - - if (semver.lt(versionStr, '0.8.0')) { - return callback(new Error('Minimum target version is `0.8.0` or greater. Got: ' + versionStr)) - } - - // 0.x.y-pre versions are not published yet and cannot be installed. Bail. - if (version.prerelease[0] === 'pre') { - log.verbose('detected "pre" node version', versionStr) - if (gyp.opts.nodedir) { - log.verbose('--nodedir flag was passed; skipping install', gyp.opts.nodedir) - callback() - } else { - callback(new Error('"pre" versions of node cannot be installed, use the --nodedir flag instead')) - } - return - } - - // flatten version into String - version = version.version - log.verbose('install', 'installing version: %s', version) - - // the directory where the dev files will be installed - var devDir = path.resolve(gyp.devDir, version) - - // If '--ensure' was passed, then don't *always* install the version; - // check if it is already installed, and only install when needed - if (gyp.opts.ensure) { - log.verbose('install', '--ensure was passed, so won\'t reinstall if already installed') - fs.stat(devDir, function (err, stat) { - if (err) { - if (err.code == 'ENOENT') { - log.verbose('install', 'version not already installed, continuing with install', version) - go() - } else if (err.code == 'EACCES') { - eaccesFallback() - } else { - cb(err) - } - return - } - log.verbose('install', 'version is already installed, need to check "installVersion"') - var installVersionFile = path.resolve(devDir, 'installVersion') - fs.readFile(installVersionFile, 'ascii', function (err, ver) { - if (err && err.code != 'ENOENT') { - return cb(err) - } - var installVersion = parseInt(ver, 10) || 0 - log.verbose('got "installVersion"', installVersion) - log.verbose('needs "installVersion"', gyp.package.installVersion) - if (installVersion < gyp.package.installVersion) { - log.verbose('install', 'version is no good; reinstalling') - go() - } else { - log.verbose('install', 'version is good') - cb() - } - }) - }) - } else { - go() - } - - function download (url) { - log.http('GET', url) - - var req = null - var requestOpts = { - uri: url - , headers: { - 'User-Agent': 'node-gyp v' + gyp.version + ' (node ' + process.version + ')' - } - } - - // basic support for a proxy server - var proxyUrl = gyp.opts.proxy - || process.env.http_proxy - || process.env.HTTP_PROXY - || process.env.npm_config_proxy - if (proxyUrl) { - if (/^https?:\/\//i.test(proxyUrl)) { - log.verbose('download', 'using proxy url: "%s"', proxyUrl) - requestOpts.proxy = proxyUrl - } else { - log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl) - } - } - try { - // The "request" constructor can throw sometimes apparently :( - // See: https://github.com/TooTallNate/node-gyp/issues/114 - req = request(requestOpts) - } catch (e) { - cb(e) - } - if (req) { - req.on('response', function (res) { - log.http(res.statusCode, url) - }) - } - return req - } - - function go () { - - log.verbose('ensuring nodedir is created', devDir) - - // first create the dir for the node dev files - mkdir(devDir, function (err, created) { - if (err) { - if (err.code == 'EACCES') { - eaccesFallback() - } else { - cb(err) - } - return - } - - if (created) { - log.verbose('created nodedir', created) - } - - // now download the node tarball - var tarballUrl = distUrl + '/v' + version + '/node-v' + version + '.tar.gz' - , badDownload = false - , extractCount = 0 - , gunzip = zlib.createGunzip() - , extracter = tar.Extract({ path: devDir, strip: 1, filter: isValid }) - - // checks if a file to be extracted from the tarball is valid. - // only .h header files and the gyp files get extracted - function isValid () { - var name = this.path.substring(devDir.length + 1) - var isValid = valid(name) - if (name === '' && this.type === 'Directory') { - // the first directory entry is ok - return true - } - if (isValid) { - log.verbose('extracted file from tarball', name) - extractCount++ - } else { - // invalid - log.silly('ignoring from tarball', name) - } - return isValid - } - - gunzip.on('error', cb) - extracter.on('error', cb) - extracter.on('end', afterTarball) - - // download the tarball, gunzip and extract! - var req = download(tarballUrl) - if (!req) return - - // something went wrong downloading the tarball? - req.on('error', function (err) { - badDownload = true - cb(err) - }) - - req.on('close', function () { - if (extractCount === 0) { - cb(new Error('Connection closed while downloading tarball file')) - } - }) - - req.on('response', function (res) { - if (res.statusCode !== 200) { - badDownload = true - cb(new Error(res.statusCode + ' status code downloading tarball')) - return - } - // start unzipping and untaring - req.pipe(gunzip).pipe(extracter) - }) - - // invoked after the tarball has finished being extracted - function afterTarball () { - if (badDownload) return - if (extractCount === 0) { - return cb(new Error('There was a fatal problem while downloading/extracting the tarball')) - } - log.verbose('tarball', 'done parsing tarball') - var async = 0 - - if (win) { - // need to download node.lib - async++ - downloadNodeLib(deref) - } - - // write the "installVersion" file - async++ - var installVersionPath = path.resolve(devDir, 'installVersion') - fs.writeFile(installVersionPath, gyp.package.installVersion + '\n', deref) - - if (async === 0) { - // no async tasks required - cb() - } - - function deref (err) { - if (err) return cb(err) - --async || cb() - } - } - - function downloadNodeLib (done) { - log.verbose('on Windows; need to download `node.lib`...') - var dir32 = path.resolve(devDir, 'ia32') - , dir64 = path.resolve(devDir, 'x64') - , nodeLibPath32 = path.resolve(dir32, 'node.lib') - , nodeLibPath64 = path.resolve(dir64, 'node.lib') - , nodeLibUrl32 = distUrl + '/v' + version + '/node.lib' - , nodeLibUrl64 = distUrl + '/v' + version + '/x64/node.lib' - - log.verbose('32-bit node.lib dir', dir32) - log.verbose('64-bit node.lib dir', dir64) - log.verbose('`node.lib` 32-bit url', nodeLibUrl32) - log.verbose('`node.lib` 64-bit url', nodeLibUrl64) - - var async = 2 - mkdir(dir32, function (err) { - if (err) return done(err) - log.verbose('streaming 32-bit node.lib to:', nodeLibPath32) - - var req = download(nodeLibUrl32) - if (!req) return - req.on('error', done) - req.on('response', function (res) { - if (res.statusCode !== 200) { - done(new Error(res.statusCode + ' status code downloading 32-bit node.lib')) - return - } - - var ws = fs.createWriteStream(nodeLibPath32) - ws.on('error', cb) - req.pipe(ws) - }) - req.on('end', function () { - --async || done() - }) - }) - mkdir(dir64, function (err) { - if (err) return done(err) - log.verbose('streaming 64-bit node.lib to:', nodeLibPath64) - - var req = download(nodeLibUrl64) - if (!req) return - req.on('error', done) - req.on('response', function (res) { - if (res.statusCode !== 200) { - done(new Error(res.statusCode + ' status code downloading 64-bit node.lib')) - return - } - - var ws = fs.createWriteStream(nodeLibPath64) - ws.on('error', cb) - req.pipe(ws) - }) - req.on('end', function () { - --async || done() - }) - }) - } // downloadNodeLib() - - }) // mkdir() - - } // go() - - /** - * Checks if a given filename is "valid" for this installation. - */ - - function valid (file) { - // header files - return minimatch(file, '*.h', { matchBase: true }) || - minimatch(file, '*.gypi', { matchBase: true }) - } - - /** - * The EACCES fallback is a workaround for npm's `sudo` behavior, where - * it drops the permissions before invoking any child processes (like - * node-gyp). So what happens is the "nobody" user doesn't have - * permission to create the dev dir. As a fallback, make the tmpdir() be - * the dev dir for this installation. This is not ideal, but at least - * the compilation will succeed... - */ - - function eaccesFallback () { - var tmpdir = osenv.tmpdir() - gyp.devDir = path.resolve(tmpdir, '.node-gyp') - log.warn('EACCES', 'user "%s" does not have permission to access the dev dir "%s"', osenv.user(), devDir) - log.warn('EACCES', 'attempting to reinstall using temporary dev dir "%s"', gyp.devDir) - if (process.cwd() == tmpdir) { - log.verbose('tmpdir == cwd', 'automatically will remove dev files after to save disk space') - gyp.todo.push({ name: 'remove', args: argv }) - } - gyp.commands.install(argv, cb) - } - -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/list.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/list.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/list.js 2012-07-16 22:23:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/list.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ - -module.exports = exports = list - -exports.usage = 'Prints a listing of the currently installed node development files' - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , path = require('path') - , log = require('npmlog') - -function list (gyp, args, callback) { - - var devDir = gyp.devDir - log.verbose('list', 'using node-gyp dir:', devDir) - - // readdir() the node-gyp dir - fs.readdir(devDir, onreaddir) - - function onreaddir (err, versions) { - if (err && err.code != 'ENOENT') { - return callback(err) - } - if (Array.isArray(versions)) { - versions = versions.filter(function (v) { return v != 'current' }) - } else { - versions = [] - } - callback(null, versions) - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/node-gyp.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/node-gyp.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/node-gyp.js 2013-06-24 21:23:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/node-gyp.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,224 +0,0 @@ - -/** - * Module exports. - */ - -module.exports = exports = gyp - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , path = require('path') - , nopt = require('nopt') - , log = require('npmlog') - , child_process = require('child_process') - , EE = require('events').EventEmitter - , inherits = require('util').inherits - , commands = [ - // Module build commands - 'build' - , 'clean' - , 'configure' - , 'rebuild' - // Development Header File management commands - , 'install' - , 'list' - , 'remove' - ] - , aliases = { - 'ls': 'list' - , 'rm': 'remove' - } - -// differentiate node-gyp's logs from npm's -log.heading = 'gyp' - -/** - * The `gyp` function. - */ - -function gyp () { - return new Gyp() -} - -function Gyp () { - var self = this - - // set the dir where node-gyp dev files get installed - // TODO: make this *more* configurable? - // see: https://github.com/TooTallNate/node-gyp/issues/21 - var homeDir = process.env.HOME || process.env.USERPROFILE - if (!homeDir) { - throw new Error( - "node-gyp requires that the user's home directory is specified " + - "in either of the environmental variables HOME or USERPROFILE" - ); - } - this.devDir = path.resolve(homeDir, '.node-gyp') - - this.commands = {} - - commands.forEach(function (command) { - self.commands[command] = function (argv, callback) { - log.verbose('command', command, argv) - return require('./' + command)(self, argv, callback) - } - }) -} -inherits(Gyp, EE) -exports.Gyp = Gyp -var proto = Gyp.prototype - -/** - * Export the contents of the package.json. - */ - -proto.package = require('../package') - -/** - * nopt configuration definitions - */ - -proto.configDefs = { - help: Boolean // everywhere - , arch: String // 'configure' - , debug: Boolean // 'build' - , directory: String // bin - , make: String // 'build' - , msvs_version: String // 'configure' - , ensure: Boolean // 'install' - , solution: String // 'build' (windows only) - , proxy: String // 'install' - , nodedir: String // 'configure' - , loglevel: String // everywhere - , python: String // 'configure' - , 'dist-url': String // 'install' - , jobs: String // 'build' - , thin: String // 'configure' -} - -/** - * nopt shorthands - */ - -proto.shorthands = { - release: '--no-debug' - , C: '--directory' - , debug: '--debug' - , j: '--jobs' - , silly: '--loglevel=silly' - , verbose: '--loglevel=verbose' -} - -/** - * expose the command aliases for the bin file to use. - */ - -proto.aliases = aliases - -/** - * Parses the given argv array and sets the 'opts', - * 'argv' and 'command' properties. - */ - -proto.parseArgv = function parseOpts (argv) { - this.opts = nopt(this.configDefs, this.shorthands, argv) - this.argv = this.opts.argv.remain.slice() - - var commands = this.todo = [] - - // create a copy of the argv array with aliases mapped - argv = this.argv.map(function (arg) { - // is this an alias? - if (arg in this.aliases) { - arg = this.aliases[arg] - } - return arg - }, this) - - // process the mapped args into "command" objects ("name" and "args" props) - argv.slice().forEach(function (arg) { - if (arg in this.commands) { - var args = argv.splice(0, argv.indexOf(arg)) - argv.shift() - if (commands.length > 0) { - commands[commands.length - 1].args = args - } - commands.push({ name: arg, args: [] }) - } - }, this) - if (commands.length > 0) { - commands[commands.length - 1].args = argv.splice(0) - } - - // support for inheriting config env variables from npm - var npm_config_prefix = 'npm_config_' - Object.keys(process.env).forEach(function (name) { - if (name.indexOf(npm_config_prefix) !== 0) return - var val = process.env[name] - if (name === npm_config_prefix + 'loglevel') { - log.level = val - } else { - // add the user-defined options to the config - name = name.substring(npm_config_prefix.length) - this.opts[name] = val - } - }, this) - - if (this.opts.loglevel) { - log.level = this.opts.loglevel - } - log.resume() -} - -/** - * Spawns a child process and emits a 'spawn' event. - */ - -proto.spawn = function spawn (command, args, opts) { - if (!opts) opts = {} - if (!opts.silent && !opts.customFds) { - opts.customFds = [ 0, 1, 2 ] - } - var cp = child_process.spawn(command, args, opts) - log.info('spawn', command) - log.info('spawn args', args) - return cp -} - -/** - * Returns the usage instructions for node-gyp. - */ - -proto.usage = function usage () { - var str = [ - '' - , ' Usage: node-gyp [options]' - , '' - , ' where is one of:' - , commands.map(function (c) { - return ' - ' + c + ' - ' + require('./' + c).usage - }).join('\n') - , '' - , ' for specific command usage and options try:' - , ' $ node-gyp --help' - , '' - , 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..') - , 'node@' + process.versions.node - ].join('\n') - return str -} - -/** - * Version number getter. - */ - -Object.defineProperty(proto, 'version', { - get: function () { - return this.package.version - } - , enumerable: true -}) - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/rebuild.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/rebuild.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/rebuild.js 2012-07-16 23:18:21.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/rebuild.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ - -module.exports = exports = rebuild - -exports.usage = 'Runs "clean", "configure" and "build" all at once' - -var log = require('npmlog') - -function rebuild (gyp, argv, callback) { - - gyp.todo.push( - { name: 'clean', args: [] } - , { name: 'configure', args: [] } - , { name: 'build', args: [] } - ) - process.nextTick(callback) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/remove.js tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/remove.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/lib/remove.js 2013-06-28 16:39:28.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/lib/remove.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,56 +0,0 @@ - -module.exports = exports = remove - -exports.usage = 'Removes the node development files for the specified version' - -/** - * Module dependencies. - */ - -var fs = require('fs') - , rm = require('rimraf') - , path = require('path') - , log = require('npmlog') - , semver = require('semver') - -function remove (gyp, argv, callback) { - - var devDir = gyp.devDir - log.verbose('remove', 'using node-gyp dir:', devDir) - - // get the user-specified version to remove - var v = argv[0] || gyp.opts.target - log.verbose('remove', 'removing target version:', v) - - if (!v) { - return callback(new Error('You must specify a version number to remove. Ex: "' + process.version + '"')) - } - - // parse the version to normalize and make sure it's valid - var version = semver.parse(v) - - if (!version) { - return callback(new Error('Invalid version number: ' + v)) - } - - // flatten the version Array into a String - version = version.version - - var versionPath = path.resolve(gyp.devDir, version) - log.verbose('remove', 'removing development files for version:', version) - - // first check if its even installed - fs.stat(versionPath, function (err, stat) { - if (err) { - if (err.code == 'ENOENT') { - callback(null, 'version was already uninstalled: ' + version) - } else { - callback(err) - } - return - } - // Go ahead and delete the dir - rm(versionPath, callback) - }) - -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/.npmignore 2012-03-20 00:55:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -.*.swp -node_modules/ -examples/deep-copy/ -examples/path/ -examples/filter-copy/ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/.travis.yml tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/.travis.yml --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/.travis.yml 2011-11-28 23:32:45.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/.travis.yml 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -language: node_js -node_js: - - 0.6 diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/LICENSE 2012-09-12 00:44:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Copyright (c) Isaac Z. Schlueter ("Author") -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/README.md 2011-11-01 00:01:37.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,76 +0,0 @@ -Like FS streams, but with stat on them, and supporting directories and -symbolic links, as well as normal files. Also, you can use this to set -the stats on a file, even if you don't change its contents, or to create -a symlink, etc. - -So, for example, you can "write" a directory, and it'll call `mkdir`. You -can specify a uid and gid, and it'll call `chown`. You can specify a -`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink -and provide a `linkpath` and it'll call `symlink`. - -Note that it won't automatically resolve symbolic links. So, if you -call `fstream.Reader('/some/symlink')` then you'll get an object -that stats and then ends immediately (since it has no data). To follow -symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow: -true })`. - -There are various checks to make sure that the bytes emitted are the -same as the intended size, if the size is set. - -## Examples - -```javascript -fstream - .Writer({ path: "path/to/file" - , mode: 0755 - , size: 6 - }) - .write("hello\n") - .end() -``` - -This will create the directories if they're missing, and then write -`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have -been written when it's done. - -```javascript -fstream - .Writer({ path: "path/to/file" - , mode: 0755 - , size: 6 - , flags: "a" - }) - .write("hello\n") - .end() -``` - -You can pass flags in, if you want to append to a file. - -```javascript -fstream - .Writer({ path: "path/to/symlink" - , linkpath: "./file" - , SymbolicLink: true - , mode: "0755" // octal strings supported - }) - .end() -``` - -If isSymbolicLink is a function, it'll be called, and if it returns -true, then it'll treat it as a symlink. If it's not a function, then -any truish value will make a symlink, or you can set `type: -'SymbolicLink'`, which does the same thing. - -Note that the linkpath is relative to the symbolic link location, not -the parent dir or cwd. - -```javascript -fstream - .Reader("path/to/dir") - .pipe(fstream.Writer("path/to/other/dir")) -``` - -This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other -dir exists and isn't a directory, then it'll emit an error. It'll also -set the uid, gid, mode, etc. to be identical. In this way, it's more -like `rsync -a` than simply a copy. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js 2012-03-27 05:24:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,131 +0,0 @@ -var fstream = require("../fstream.js") -var path = require("path") - -var r = fstream.Reader({ path: path.dirname(__dirname) - , filter: function () { - return !this.basename.match(/^\./) && - !this.basename.match(/^node_modules$/) - !this.basename.match(/^deep-copy$/) - !this.basename.match(/^filter-copy$/) - } - }) - -// this writer will only write directories -var w = fstream.Writer({ path: path.resolve(__dirname, "filter-copy") - , type: "Directory" - , filter: function () { - return this.type === "Directory" - } - }) - -var indent = "" -var escape = {} - -r.on("entry", appears) -r.on("ready", function () { - console.error("ready to begin!", r.path) -}) - -function appears (entry) { - console.error(indent + "a %s appears!", entry.type, entry.basename, typeof entry.basename) - if (foggy) { - console.error("FOGGY!") - var p = entry - do { - console.error(p.depth, p.path, p._paused) - } while (p = p.parent) - - throw new Error("\033[mshould not have entries while foggy") - } - indent += "\t" - entry.on("data", missile(entry)) - entry.on("end", runaway(entry)) - entry.on("entry", appears) -} - -var foggy -function missile (entry) { - if (entry.type === "Directory") { - var ended = false - entry.once("end", function () { ended = true }) - return function (c) { - // throw in some pathological pause()/resume() behavior - // just for extra fun. - process.nextTick(function () { - if (!foggy && !ended) { // && Math.random() < 0.3) { - console.error(indent +"%s casts a spell", entry.basename) - console.error("\na slowing fog comes over the battlefield...\n\033[32m") - entry.pause() - entry.once("resume", liftFog) - foggy = setTimeout(liftFog, 1000) - - function liftFog (who) { - if (!foggy) return - if (who) { - console.error("%s breaks the spell!", who && who.path) - } else { - console.error("the spell expires!") - } - console.error("\033[mthe fog lifts!\n") - clearTimeout(foggy) - foggy = null - if (entry._paused) entry.resume() - } - - } - }) - } - } - - return function (c) { - var e = Math.random() < 0.5 - console.error(indent + "%s %s for %d damage!", - entry.basename, - e ? "is struck" : "fires a chunk", - c.length) - } -} - -function runaway (entry) { return function () { - var e = Math.random() < 0.5 - console.error(indent + "%s %s", - entry.basename, - e ? "turns to flee" : "is vanquished!") - indent = indent.slice(0, -1) -}} - - -w.on("entry", attacks) -//w.on("ready", function () { attacks(w) }) -function attacks (entry) { - console.error(indent + "%s %s!", entry.basename, - entry.type === "Directory" ? "calls for backup" : "attacks") - entry.on("entry", attacks) -} - -ended = false -var i = 1 -r.on("end", function () { - if (foggy) clearTimeout(foggy) - console.error("\033[mIT'S OVER!!") - console.error("A WINNAR IS YOU!") - - console.log("ok " + (i ++) + " A WINNAR IS YOU") - ended = true - // now go through and verify that everything in there is a dir. - var p = path.resolve(__dirname, "filter-copy") - var checker = fstream.Reader({ path: p }) - checker.checker = true - checker.on("child", function (e) { - var ok = e.type === "Directory" - console.log((ok ? "" : "not ") + "ok " + (i ++) + - " should be a dir: " + - e.path.substr(checker.path.length + 1)) - }) -}) - -process.on("exit", function () { - console.log((ended ? "" : "not ") + "ok " + (i ++) + " ended") -}) - -r.pipe(w) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/pipe.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/pipe.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/pipe.js 2012-03-20 01:18:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/pipe.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,115 +0,0 @@ -var fstream = require("../fstream.js") -var path = require("path") - -var r = fstream.Reader({ path: path.dirname(__dirname) - , filter: function () { - return !this.basename.match(/^\./) && - !this.basename.match(/^node_modules$/) - !this.basename.match(/^deep-copy$/) - } - }) - -var w = fstream.Writer({ path: path.resolve(__dirname, "deep-copy") - , type: "Directory" - }) - -var indent = "" -var escape = {} - -r.on("entry", appears) -r.on("ready", function () { - console.error("ready to begin!", r.path) -}) - -function appears (entry) { - console.error(indent + "a %s appears!", entry.type, entry.basename, typeof entry.basename, entry) - if (foggy) { - console.error("FOGGY!") - var p = entry - do { - console.error(p.depth, p.path, p._paused) - } while (p = p.parent) - - throw new Error("\033[mshould not have entries while foggy") - } - indent += "\t" - entry.on("data", missile(entry)) - entry.on("end", runaway(entry)) - entry.on("entry", appears) -} - -var foggy -function missile (entry) { - if (entry.type === "Directory") { - var ended = false - entry.once("end", function () { ended = true }) - return function (c) { - // throw in some pathological pause()/resume() behavior - // just for extra fun. - process.nextTick(function () { - if (!foggy && !ended) { // && Math.random() < 0.3) { - console.error(indent +"%s casts a spell", entry.basename) - console.error("\na slowing fog comes over the battlefield...\n\033[32m") - entry.pause() - entry.once("resume", liftFog) - foggy = setTimeout(liftFog, 10) - - function liftFog (who) { - if (!foggy) return - if (who) { - console.error("%s breaks the spell!", who && who.path) - } else { - console.error("the spell expires!") - } - console.error("\033[mthe fog lifts!\n") - clearTimeout(foggy) - foggy = null - if (entry._paused) entry.resume() - } - - } - }) - } - } - - return function (c) { - var e = Math.random() < 0.5 - console.error(indent + "%s %s for %d damage!", - entry.basename, - e ? "is struck" : "fires a chunk", - c.length) - } -} - -function runaway (entry) { return function () { - var e = Math.random() < 0.5 - console.error(indent + "%s %s", - entry.basename, - e ? "turns to flee" : "is vanquished!") - indent = indent.slice(0, -1) -}} - - -w.on("entry", attacks) -//w.on("ready", function () { attacks(w) }) -function attacks (entry) { - console.error(indent + "%s %s!", entry.basename, - entry.type === "Directory" ? "calls for backup" : "attacks") - entry.on("entry", attacks) -} - -ended = false -r.on("end", function () { - if (foggy) clearTimeout(foggy) - console.error("\033[mIT'S OVER!!") - console.error("A WINNAR IS YOU!") - - console.log("ok 1 A WINNAR IS YOU") - ended = true -}) - -process.on("exit", function () { - console.log((ended ? "" : "not ") + "ok 2 ended") -}) - -r.pipe(w) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/reader.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/reader.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/reader.js 2011-11-29 01:49:37.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/reader.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,54 +0,0 @@ -var fstream = require("../fstream.js") -var tap = require("tap") -var fs = require("fs") -var path = require("path") -var children = -1 -var dir = path.dirname(__dirname) - -var gotReady = false -var ended = false - -tap.test("reader test", function (t) { - - var r = fstream.Reader({ path: dir - , filter: function () { - // return this.parent === r - return this.parent === r || this === r - } - }) - - r.on("ready", function () { - gotReady = true - children = fs.readdirSync(dir).length - console.error("Setting expected children to "+children) - t.equal(r.type, "Directory", "should be a directory") - }) - - r.on("entry", function (entry) { - children -- - if (!gotReady) { - t.fail("children before ready!") - } - t.equal(entry.dirname, r.path, "basename is parent dir") - }) - - r.on("error", function (er) { - t.fail(er) - t.end() - process.exit(1) - }) - - r.on("end", function () { - t.equal(children, 0, "should have seen all children") - ended = true - }) - - var closed = false - r.on("close", function () { - t.ok(ended, "saw end before close") - t.notOk(closed, "close should only happen once") - closed = true - t.end() - }) - -}) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js 2011-11-29 00:28:35.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -var fstream = require("../fstream.js") - , closed = false - -fstream - .Writer({ path: "path/to/symlink" - , linkpath: "./file" - , isSymbolicLink: true - , mode: "0755" // octal strings supported - }) - .on("close", function () { - closed = true - var fs = require("fs") - var s = fs.lstatSync("path/to/symlink") - var isSym = s.isSymbolicLink() - console.log((isSym?"":"not ") +"ok 1 should be symlink") - var t = fs.readlinkSync("path/to/symlink") - var isTarget = t === "./file" - console.log((isTarget?"":"not ") +"ok 2 should link to ./file") - }) - .end() - -process.on("exit", function () { - console.log((closed?"":"not ")+"ok 3 should be closed") -}) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/fstream.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/fstream.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/fstream.js 2011-11-19 00:01:59.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/fstream.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -exports.Abstract = require("./lib/abstract.js") -exports.Reader = require("./lib/reader.js") -exports.Writer = require("./lib/writer.js") - -exports.File = - { Reader: require("./lib/file-reader.js") - , Writer: require("./lib/file-writer.js") } - -exports.Dir = - { Reader : require("./lib/dir-reader.js") - , Writer : require("./lib/dir-writer.js") } - -exports.Link = - { Reader : require("./lib/link-reader.js") - , Writer : require("./lib/link-writer.js") } - -exports.Proxy = - { Reader : require("./lib/proxy-reader.js") - , Writer : require("./lib/proxy-writer.js") } - -exports.Reader.Dir = exports.DirReader = exports.Dir.Reader -exports.Reader.File = exports.FileReader = exports.File.Reader -exports.Reader.Link = exports.LinkReader = exports.Link.Reader -exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader - -exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer -exports.Writer.File = exports.FileWriter = exports.File.Writer -exports.Writer.Link = exports.LinkWriter = exports.Link.Writer -exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer - -exports.collect = require("./lib/collect.js") diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/abstract.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/abstract.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/abstract.js 2013-07-24 03:36:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/abstract.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,85 +0,0 @@ -// the parent class for all fstreams. - -module.exports = Abstract - -var Stream = require("stream").Stream - , inherits = require("inherits") - -function Abstract () { - Stream.call(this) -} - -inherits(Abstract, Stream) - -Abstract.prototype.on = function (ev, fn) { - if (ev === "ready" && this.ready) { - process.nextTick(fn.bind(this)) - } else { - Stream.prototype.on.call(this, ev, fn) - } - return this -} - -Abstract.prototype.abort = function () { - this._aborted = true - this.emit("abort") -} - -Abstract.prototype.destroy = function () {} - -Abstract.prototype.warn = function (msg, code) { - var me = this - , er = decorate(msg, code, me) - if (!me.listeners("warn")) { - console.error("%s %s\n" + - "path = %s\n" + - "syscall = %s\n" + - "fstream_type = %s\n" + - "fstream_path = %s\n" + - "fstream_unc_path = %s\n" + - "fstream_class = %s\n" + - "fstream_stack =\n%s\n", - code || "UNKNOWN", - er.stack, - er.path, - er.syscall, - er.fstream_type, - er.fstream_path, - er.fstream_unc_path, - er.fstream_class, - er.fstream_stack.join("\n")) - } else { - me.emit("warn", er) - } -} - -Abstract.prototype.info = function (msg, code) { - this.emit("info", msg, code) -} - -Abstract.prototype.error = function (msg, code, th) { - var er = decorate(msg, code, this) - if (th) throw er - else this.emit("error", er) -} - -function decorate (er, code, me) { - if (!(er instanceof Error)) er = new Error(er) - er.code = er.code || code - er.path = er.path || me.path - er.fstream_type = er.fstream_type || me.type - er.fstream_path = er.fstream_path || me.path - if (me._path !== me.path) { - er.fstream_unc_path = er.fstream_unc_path || me._path - } - if (me.linkpath) { - er.fstream_linkpath = er.fstream_linkpath || me.linkpath - } - er.fstream_class = er.fstream_class || me.constructor.name - er.fstream_stack = er.fstream_stack || - new Error().stack.split(/\n/).slice(3).map(function (s) { - return s.replace(/^ at /, "") - }) - - return er -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/collect.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/collect.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/collect.js 2012-12-28 00:57:42.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/collect.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -module.exports = collect - -function collect (stream) { - if (stream._collected) return - - stream._collected = true - stream.pause() - - stream.on("data", save) - stream.on("end", save) - var buf = [] - function save (b) { - if (typeof b === "string") b = new Buffer(b) - if (Buffer.isBuffer(b) && !b.length) return - buf.push(b) - } - - stream.on("entry", saveEntry) - var entryBuffer = [] - function saveEntry (e) { - collect(e) - entryBuffer.push(e) - } - - stream.on("proxy", proxyPause) - function proxyPause (p) { - p.pause() - } - - - // replace the pipe method with a new version that will - // unlock the buffered stuff. if you just call .pipe() - // without a destination, then it'll re-play the events. - stream.pipe = (function (orig) { return function (dest) { - // console.error(" === open the pipes", dest && dest.path) - - // let the entries flow through one at a time. - // Once they're all done, then we can resume completely. - var e = 0 - ;(function unblockEntry () { - var entry = entryBuffer[e++] - // console.error(" ==== unblock entry", entry && entry.path) - if (!entry) return resume() - entry.on("end", unblockEntry) - if (dest) dest.add(entry) - else stream.emit("entry", entry) - })() - - function resume () { - stream.removeListener("entry", saveEntry) - stream.removeListener("data", save) - stream.removeListener("end", save) - - stream.pipe = orig - if (dest) stream.pipe(dest) - - buf.forEach(function (b) { - if (b) stream.emit("data", b) - else stream.emit("end") - }) - - stream.resume() - } - - return dest - }})(stream.pipe) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js 2013-07-24 03:37:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,250 +0,0 @@ -// A thing that emits "entry" events with Reader objects -// Pausing it causes it to stop emitting entry events, and also -// pauses the current entry if there is one. - -module.exports = DirReader - -var fs = require("graceful-fs") - , fstream = require("../fstream.js") - , Reader = fstream.Reader - , inherits = require("inherits") - , mkdir = require("mkdirp") - , path = require("path") - , Reader = require("./reader.js") - , assert = require("assert").ok - -inherits(DirReader, Reader) - -function DirReader (props) { - var me = this - if (!(me instanceof DirReader)) throw new Error( - "DirReader must be called as constructor.") - - // should already be established as a Directory type - if (props.type !== "Directory" || !props.Directory) { - throw new Error("Non-directory type "+ props.type) - } - - me.entries = null - me._index = -1 - me._paused = false - me._length = -1 - - if (props.sort) { - this.sort = props.sort - } - - Reader.call(this, props) -} - -DirReader.prototype._getEntries = function () { - var me = this - - // race condition. might pause() before calling _getEntries, - // and then resume, and try to get them a second time. - if (me._gotEntries) return - me._gotEntries = true - - fs.readdir(me._path, function (er, entries) { - if (er) return me.error(er) - - me.entries = entries - - me.emit("entries", entries) - if (me._paused) me.once("resume", processEntries) - else processEntries() - - function processEntries () { - me._length = me.entries.length - if (typeof me.sort === "function") { - me.entries = me.entries.sort(me.sort.bind(me)) - } - me._read() - } - }) -} - -// start walking the dir, and emit an "entry" event for each one. -DirReader.prototype._read = function () { - var me = this - - if (!me.entries) return me._getEntries() - - if (me._paused || me._currentEntry || me._aborted) { - // console.error("DR paused=%j, current=%j, aborted=%j", me._paused, !!me._currentEntry, me._aborted) - return - } - - me._index ++ - if (me._index >= me.entries.length) { - if (!me._ended) { - me._ended = true - me.emit("end") - me.emit("close") - } - return - } - - // ok, handle this one, then. - - // save creating a proxy, by stat'ing the thing now. - var p = path.resolve(me._path, me.entries[me._index]) - assert(p !== me._path) - assert(me.entries[me._index]) - - // set this to prevent trying to _read() again in the stat time. - me._currentEntry = p - fs[ me.props.follow ? "stat" : "lstat" ](p, function (er, stat) { - if (er) return me.error(er) - - var who = me._proxy || me - - stat.path = p - stat.basename = path.basename(p) - stat.dirname = path.dirname(p) - var childProps = me.getChildProps.call(who, stat) - childProps.path = p - childProps.basename = path.basename(p) - childProps.dirname = path.dirname(p) - - var entry = Reader(childProps, stat) - - // console.error("DR Entry", p, stat.size) - - me._currentEntry = entry - - // "entry" events are for direct entries in a specific dir. - // "child" events are for any and all children at all levels. - // This nomenclature is not completely final. - - entry.on("pause", function (who) { - if (!me._paused && !entry._disowned) { - me.pause(who) - } - }) - - entry.on("resume", function (who) { - if (me._paused && !entry._disowned) { - me.resume(who) - } - }) - - entry.on("stat", function (props) { - me.emit("_entryStat", entry, props) - if (entry._aborted) return - if (entry._paused) entry.once("resume", function () { - me.emit("entryStat", entry, props) - }) - else me.emit("entryStat", entry, props) - }) - - entry.on("ready", function EMITCHILD () { - // console.error("DR emit child", entry._path) - if (me._paused) { - // console.error(" DR emit child - try again later") - // pause the child, and emit the "entry" event once we drain. - // console.error("DR pausing child entry") - entry.pause(me) - return me.once("resume", EMITCHILD) - } - - // skip over sockets. they can't be piped around properly, - // so there's really no sense even acknowledging them. - // if someone really wants to see them, they can listen to - // the "socket" events. - if (entry.type === "Socket") { - me.emit("socket", entry) - } else { - me.emitEntry(entry) - } - }) - - var ended = false - entry.on("close", onend) - entry.on("disown", onend) - function onend () { - if (ended) return - ended = true - me.emit("childEnd", entry) - me.emit("entryEnd", entry) - me._currentEntry = null - if (!me._paused) { - me._read() - } - } - - // XXX Remove this. Works in node as of 0.6.2 or so. - // Long filenames should not break stuff. - entry.on("error", function (er) { - if (entry._swallowErrors) { - me.warn(er) - entry.emit("end") - entry.emit("close") - } else { - me.emit("error", er) - } - }) - - // proxy up some events. - ; [ "child" - , "childEnd" - , "warn" - ].forEach(function (ev) { - entry.on(ev, me.emit.bind(me, ev)) - }) - }) -} - -DirReader.prototype.disown = function (entry) { - entry.emit("beforeDisown") - entry._disowned = true - entry.parent = entry.root = null - if (entry === this._currentEntry) { - this._currentEntry = null - } - entry.emit("disown") -} - -DirReader.prototype.getChildProps = function (stat) { - return { depth: this.depth + 1 - , root: this.root || this - , parent: this - , follow: this.follow - , filter: this.filter - , sort: this.props.sort - } -} - -DirReader.prototype.pause = function (who) { - var me = this - if (me._paused) return - who = who || me - me._paused = true - if (me._currentEntry && me._currentEntry.pause) { - me._currentEntry.pause(who) - } - me.emit("pause", who) -} - -DirReader.prototype.resume = function (who) { - var me = this - if (!me._paused) return - who = who || me - - me._paused = false - // console.error("DR Emit Resume", me._path) - me.emit("resume", who) - if (me._paused) { - // console.error("DR Re-paused", me._path) - return - } - - if (me._currentEntry) { - if (me._currentEntry.resume) me._currentEntry.resume(who) - } else me._read() -} - -DirReader.prototype.emitEntry = function (entry) { - this.emit("entry", entry) - this.emit("child", entry) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js 2013-07-24 03:37:10.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,171 +0,0 @@ -// It is expected that, when .add() returns false, the consumer -// of the DirWriter will pause until a "drain" event occurs. Note -// that this is *almost always going to be the case*, unless the -// thing being written is some sort of unsupported type, and thus -// skipped over. - -module.exports = DirWriter - -var fs = require("graceful-fs") - , fstream = require("../fstream.js") - , Writer = require("./writer.js") - , inherits = require("inherits") - , mkdir = require("mkdirp") - , path = require("path") - , collect = require("./collect.js") - -inherits(DirWriter, Writer) - -function DirWriter (props) { - var me = this - if (!(me instanceof DirWriter)) me.error( - "DirWriter must be called as constructor.", null, true) - - // should already be established as a Directory type - if (props.type !== "Directory" || !props.Directory) { - me.error("Non-directory type "+ props.type + " " + - JSON.stringify(props), null, true) - } - - Writer.call(this, props) -} - -DirWriter.prototype._create = function () { - var me = this - mkdir(me._path, Writer.dirmode, function (er) { - if (er) return me.error(er) - // ready to start getting entries! - me.ready = true - me.emit("ready") - me._process() - }) -} - -// a DirWriter has an add(entry) method, but its .write() doesn't -// do anything. Why a no-op rather than a throw? Because this -// leaves open the door for writing directory metadata for -// gnu/solaris style dumpdirs. -DirWriter.prototype.write = function () { - return true -} - -DirWriter.prototype.end = function () { - this._ended = true - this._process() -} - -DirWriter.prototype.add = function (entry) { - var me = this - - // console.error("\tadd", entry._path, "->", me._path) - collect(entry) - if (!me.ready || me._currentEntry) { - me._buffer.push(entry) - return false - } - - // create a new writer, and pipe the incoming entry into it. - if (me._ended) { - return me.error("add after end") - } - - me._buffer.push(entry) - me._process() - - return 0 === this._buffer.length -} - -DirWriter.prototype._process = function () { - var me = this - - // console.error("DW Process p=%j", me._processing, me.basename) - - if (me._processing) return - - var entry = me._buffer.shift() - if (!entry) { - // console.error("DW Drain") - me.emit("drain") - if (me._ended) me._finish() - return - } - - me._processing = true - // console.error("DW Entry", entry._path) - - me.emit("entry", entry) - - // ok, add this entry - // - // don't allow recursive copying - var p = entry - do { - var pp = p._path || p.path - if (pp === me.root._path || pp === me._path || - (pp && pp.indexOf(me._path) === 0)) { - // console.error("DW Exit (recursive)", entry.basename, me._path) - me._processing = false - if (entry._collected) entry.pipe() - return me._process() - } - } while (p = p.parent) - - // console.error("DW not recursive") - - // chop off the entry's root dir, replace with ours - var props = { parent: me - , root: me.root || me - , type: entry.type - , depth: me.depth + 1 } - - var p = entry._path || entry.path || entry.props.path - if (entry.parent) { - p = p.substr(entry.parent._path.length + 1) - } - // get rid of any ../../ shenanigans - props.path = path.join(me.path, path.join("/", p)) - - // if i have a filter, the child should inherit it. - props.filter = me.filter - - // all the rest of the stuff, copy over from the source. - Object.keys(entry.props).forEach(function (k) { - if (!props.hasOwnProperty(k)) { - props[k] = entry.props[k] - } - }) - - // not sure at this point what kind of writer this is. - var child = me._currentChild = new Writer(props) - child.on("ready", function () { - // console.error("DW Child Ready", child.type, child._path) - // console.error(" resuming", entry._path) - entry.pipe(child) - entry.resume() - }) - - // XXX Make this work in node. - // Long filenames should not break stuff. - child.on("error", function (er) { - if (child._swallowErrors) { - me.warn(er) - child.emit("end") - child.emit("close") - } else { - me.emit("error", er) - } - }) - - // we fire _end internally *after* end, so that we don't move on - // until any "end" listeners have had their chance to do stuff. - child.on("close", onend) - var ended = false - function onend () { - if (ended) return - ended = true - // console.error("* DW Child end", child.basename) - me._currentChild = null - me._processing = false - me._process() - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js 2013-07-24 03:37:16.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,147 +0,0 @@ -// Basically just a wrapper around an fs.ReadStream - -module.exports = FileReader - -var fs = require("graceful-fs") - , fstream = require("../fstream.js") - , Reader = fstream.Reader - , inherits = require("inherits") - , mkdir = require("mkdirp") - , Reader = require("./reader.js") - , EOF = {EOF: true} - , CLOSE = {CLOSE: true} - -inherits(FileReader, Reader) - -function FileReader (props) { - // console.error(" FR create", props.path, props.size, new Error().stack) - var me = this - if (!(me instanceof FileReader)) throw new Error( - "FileReader must be called as constructor.") - - // should already be established as a File type - // XXX Todo: preserve hardlinks by tracking dev+inode+nlink, - // with a HardLinkReader class. - if (!((props.type === "Link" && props.Link) || - (props.type === "File" && props.File))) { - throw new Error("Non-file type "+ props.type) - } - - me._buffer = [] - me._bytesEmitted = 0 - Reader.call(me, props) -} - -FileReader.prototype._getStream = function () { - var me = this - , stream = me._stream = fs.createReadStream(me._path, me.props) - - if (me.props.blksize) { - stream.bufferSize = me.props.blksize - } - - stream.on("open", me.emit.bind(me, "open")) - - stream.on("data", function (c) { - // console.error("\t\t%d %s", c.length, me.basename) - me._bytesEmitted += c.length - // no point saving empty chunks - if (!c.length) return - else if (me._paused || me._buffer.length) { - me._buffer.push(c) - me._read() - } else me.emit("data", c) - }) - - stream.on("end", function () { - if (me._paused || me._buffer.length) { - // console.error("FR Buffering End", me._path) - me._buffer.push(EOF) - me._read() - } else { - me.emit("end") - } - - if (me._bytesEmitted !== me.props.size) { - me.error("Didn't get expected byte count\n"+ - "expect: "+me.props.size + "\n" + - "actual: "+me._bytesEmitted) - } - }) - - stream.on("close", function () { - if (me._paused || me._buffer.length) { - // console.error("FR Buffering Close", me._path) - me._buffer.push(CLOSE) - me._read() - } else { - // console.error("FR close 1", me._path) - me.emit("close") - } - }) - - me._read() -} - -FileReader.prototype._read = function () { - var me = this - // console.error("FR _read", me._path) - if (me._paused) { - // console.error("FR _read paused", me._path) - return - } - - if (!me._stream) { - // console.error("FR _getStream calling", me._path) - return me._getStream() - } - - // clear out the buffer, if there is one. - if (me._buffer.length) { - // console.error("FR _read has buffer", me._buffer.length, me._path) - var buf = me._buffer - for (var i = 0, l = buf.length; i < l; i ++) { - var c = buf[i] - if (c === EOF) { - // console.error("FR Read emitting buffered end", me._path) - me.emit("end") - } else if (c === CLOSE) { - // console.error("FR Read emitting buffered close", me._path) - me.emit("close") - } else { - // console.error("FR Read emitting buffered data", me._path) - me.emit("data", c) - } - - if (me._paused) { - // console.error("FR Read Re-pausing at "+i, me._path) - me._buffer = buf.slice(i) - return - } - } - me._buffer.length = 0 - } - // console.error("FR _read done") - // that's about all there is to it. -} - -FileReader.prototype.pause = function (who) { - var me = this - // console.error("FR Pause", me._path) - if (me._paused) return - who = who || me - me._paused = true - if (me._stream) me._stream.pause() - me.emit("pause", who) -} - -FileReader.prototype.resume = function (who) { - var me = this - // console.error("FR Resume", me._path) - if (!me._paused) return - who = who || me - me.emit("resume", who) - me._paused = false - if (me._stream) me._stream.resume() - me._read() -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js 2013-07-24 03:37:22.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,100 +0,0 @@ -module.exports = FileWriter - -var fs = require("graceful-fs") - , mkdir = require("mkdirp") - , Writer = require("./writer.js") - , inherits = require("inherits") - , EOF = {} - -inherits(FileWriter, Writer) - -function FileWriter (props) { - var me = this - if (!(me instanceof FileWriter)) throw new Error( - "FileWriter must be called as constructor.") - - // should already be established as a File type - if (props.type !== "File" || !props.File) { - throw new Error("Non-file type "+ props.type) - } - - me._buffer = [] - me._bytesWritten = 0 - - Writer.call(this, props) -} - -FileWriter.prototype._create = function () { - var me = this - if (me._stream) return - - var so = {} - if (me.props.flags) so.flags = me.props.flags - so.mode = Writer.filemode - if (me._old && me._old.blksize) so.bufferSize = me._old.blksize - - me._stream = fs.createWriteStream(me._path, so) - - me._stream.on("open", function (fd) { - // console.error("FW open", me._buffer, me._path) - me.ready = true - me._buffer.forEach(function (c) { - if (c === EOF) me._stream.end() - else me._stream.write(c) - }) - me.emit("ready") - // give this a kick just in case it needs it. - me.emit("drain") - }) - - me._stream.on("drain", function () { me.emit("drain") }) - - me._stream.on("close", function () { - // console.error("\n\nFW Stream Close", me._path, me.size) - me._finish() - }) -} - -FileWriter.prototype.write = function (c) { - var me = this - - me._bytesWritten += c.length - - if (!me.ready) { - if (!Buffer.isBuffer(c) && typeof c !== 'string') - throw new Error('invalid write data') - me._buffer.push(c) - return false - } - - var ret = me._stream.write(c) - // console.error("\t-- fw wrote, _stream says", ret, me._stream._queue.length) - - // allow 2 buffered writes, because otherwise there's just too - // much stop and go bs. - return ret || (me._stream._queue && me._stream._queue.length <= 2) -} - -FileWriter.prototype.end = function (c) { - var me = this - - if (c) me.write(c) - - if (!me.ready) { - me._buffer.push(EOF) - return false - } - - return me._stream.end() -} - -FileWriter.prototype._finish = function () { - var me = this - if (typeof me.size === "number" && me._bytesWritten != me.size) { - me.error( - "Did not get expected byte count.\n" + - "expect: " + me.size + "\n" + - "actual: " + me._bytesWritten) - } - Writer.prototype._finish.call(me) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/get-type.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/get-type.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/get-type.js 2012-12-28 00:57:42.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/get-type.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -module.exports = getType - -function getType (st) { - var types = - [ "Directory" - , "File" - , "SymbolicLink" - , "Link" // special for hardlinks from tarballs - , "BlockDevice" - , "CharacterDevice" - , "FIFO" - , "Socket" ] - , type - - if (st.type && -1 !== types.indexOf(st.type)) { - st[st.type] = true - return st.type - } - - for (var i = 0, l = types.length; i < l; i ++) { - type = types[i] - var is = st[type] || st["is" + type] - if (typeof is === "function") is = is.call(st) - if (is) { - st[type] = true - st.type = type - return type - } - } - - return null -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js 2013-07-24 03:37:28.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,54 +0,0 @@ -// Basically just a wrapper around an fs.readlink -// -// XXX: Enhance this to support the Link type, by keeping -// a lookup table of {:}, so that hardlinks -// can be preserved in tarballs. - -module.exports = LinkReader - -var fs = require("graceful-fs") - , fstream = require("../fstream.js") - , inherits = require("inherits") - , mkdir = require("mkdirp") - , Reader = require("./reader.js") - -inherits(LinkReader, Reader) - -function LinkReader (props) { - var me = this - if (!(me instanceof LinkReader)) throw new Error( - "LinkReader must be called as constructor.") - - if (!((props.type === "Link" && props.Link) || - (props.type === "SymbolicLink" && props.SymbolicLink))) { - throw new Error("Non-link type "+ props.type) - } - - Reader.call(me, props) -} - -// When piping a LinkReader into a LinkWriter, we have to -// already have the linkpath property set, so that has to -// happen *before* the "ready" event, which means we need to -// override the _stat method. -LinkReader.prototype._stat = function (currentStat) { - var me = this - fs.readlink(me._path, function (er, linkpath) { - if (er) return me.error(er) - me.linkpath = me.props.linkpath = linkpath - me.emit("linkpath", linkpath) - Reader.prototype._stat.call(me, currentStat) - }) -} - -LinkReader.prototype._read = function () { - var me = this - if (me._paused) return - // basically just a no-op, since we got all the info we need - // from the _stat method - if (!me._ended) { - me.emit("end") - me.emit("close") - me._ended = true - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js 2013-07-24 03:37:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,95 +0,0 @@ - -module.exports = LinkWriter - -var fs = require("graceful-fs") - , Writer = require("./writer.js") - , inherits = require("inherits") - , path = require("path") - , rimraf = require("rimraf") - -inherits(LinkWriter, Writer) - -function LinkWriter (props) { - var me = this - if (!(me instanceof LinkWriter)) throw new Error( - "LinkWriter must be called as constructor.") - - // should already be established as a Link type - if (!((props.type === "Link" && props.Link) || - (props.type === "SymbolicLink" && props.SymbolicLink))) { - throw new Error("Non-link type "+ props.type) - } - - if (props.linkpath === "") props.linkpath = "." - if (!props.linkpath) { - me.error("Need linkpath property to create " + props.type) - } - - Writer.call(this, props) -} - -LinkWriter.prototype._create = function () { - // console.error(" LW _create") - var me = this - , hard = me.type === "Link" || process.platform === "win32" - , link = hard ? "link" : "symlink" - , lp = hard ? path.resolve(me.dirname, me.linkpath) : me.linkpath - - // can only change the link path by clobbering - // For hard links, let's just assume that's always the case, since - // there's no good way to read them if we don't already know. - if (hard) return clobber(me, lp, link) - - fs.readlink(me._path, function (er, p) { - // only skip creation if it's exactly the same link - if (p && p === lp) return finish(me) - clobber(me, lp, link) - }) -} - -function clobber (me, lp, link) { - rimraf(me._path, function (er) { - if (er) return me.error(er) - create(me, lp, link) - }) -} - -function create (me, lp, link) { - fs[link](lp, me._path, function (er) { - // if this is a hard link, and we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier in. - // Additionally, an EPERM or EACCES can happen on win32 if it's trying - // to make a link to a directory. Again, just skip it. - // A better solution would be to have fs.symlink be supported on - // windows in some nice fashion. - if (er) { - if ((er.code === "ENOENT" || - er.code === "EACCES" || - er.code === "EPERM" ) && process.platform === "win32") { - me.ready = true - me.emit("ready") - me.emit("end") - me.emit("close") - me.end = me._finish = function () {} - } else return me.error(er) - } - finish(me) - }) -} - -function finish (me) { - me.ready = true - me.emit("ready") - if (me._ended && !me._finished) me._finish() -} - -LinkWriter.prototype.end = function () { - // console.error("LW finish in end") - this._ended = true - if (this.ready) { - this._finished = true - this._finish() - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js 2013-07-24 03:37:43.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -// A reader for when we don't yet know what kind of thing -// the thing is. - -module.exports = ProxyReader - -var Reader = require("./reader.js") - , getType = require("./get-type.js") - , inherits = require("inherits") - , fs = require("graceful-fs") - -inherits(ProxyReader, Reader) - -function ProxyReader (props) { - var me = this - if (!(me instanceof ProxyReader)) throw new Error( - "ProxyReader must be called as constructor.") - - me.props = props - me._buffer = [] - me.ready = false - - Reader.call(me, props) -} - -ProxyReader.prototype._stat = function () { - var me = this - , props = me.props - // stat the thing to see what the proxy should be. - , stat = props.follow ? "stat" : "lstat" - - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = "File" - } else { - type = getType(current) - } - - props[type] = true - props.type = me.type = type - - me._old = current - me._addProxy(Reader(props, current)) - }) -} - -ProxyReader.prototype._addProxy = function (proxy) { - var me = this - if (me._proxyTarget) { - return me.error("proxy already set") - } - - me._proxyTarget = proxy - proxy._proxy = me - - ; [ "error" - , "data" - , "end" - , "close" - , "linkpath" - , "entry" - , "entryEnd" - , "child" - , "childEnd" - , "warn" - , "stat" - ].forEach(function (ev) { - // console.error("~~ proxy event", ev, me.path) - proxy.on(ev, me.emit.bind(me, ev)) - }) - - me.emit("proxy", proxy) - - proxy.on("ready", function () { - // console.error("~~ proxy is ready!", me.path) - me.ready = true - me.emit("ready") - }) - - var calls = me._buffer - me._buffer.length = 0 - calls.forEach(function (c) { - proxy[c[0]].apply(proxy, c[1]) - }) -} - -ProxyReader.prototype.pause = function () { - return this._proxyTarget ? this._proxyTarget.pause() : false -} - -ProxyReader.prototype.resume = function () { - return this._proxyTarget ? this._proxyTarget.resume() : false -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js 2013-07-24 03:37:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,109 +0,0 @@ -// A writer for when we don't know what kind of thing -// the thing is. That is, it's not explicitly set, -// so we're going to make it whatever the thing already -// is, or "File" -// -// Until then, collect all events. - -module.exports = ProxyWriter - -var Writer = require("./writer.js") - , getType = require("./get-type.js") - , inherits = require("inherits") - , collect = require("./collect.js") - , fs = require("fs") - -inherits(ProxyWriter, Writer) - -function ProxyWriter (props) { - var me = this - if (!(me instanceof ProxyWriter)) throw new Error( - "ProxyWriter must be called as constructor.") - - me.props = props - me._needDrain = false - - Writer.call(me, props) -} - -ProxyWriter.prototype._stat = function () { - var me = this - , props = me.props - // stat the thing to see what the proxy should be. - , stat = props.follow ? "stat" : "lstat" - - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = "File" - } else { - type = getType(current) - } - - props[type] = true - props.type = me.type = type - - me._old = current - me._addProxy(Writer(props, current)) - }) -} - -ProxyWriter.prototype._addProxy = function (proxy) { - // console.error("~~ set proxy", this.path) - var me = this - if (me._proxy) { - return me.error("proxy already set") - } - - me._proxy = proxy - ; [ "ready" - , "error" - , "close" - , "pipe" - , "drain" - , "warn" - ].forEach(function (ev) { - proxy.on(ev, me.emit.bind(me, ev)) - }) - - me.emit("proxy", proxy) - - var calls = me._buffer - calls.forEach(function (c) { - // console.error("~~ ~~ proxy buffered call", c[0], c[1]) - proxy[c[0]].apply(proxy, c[1]) - }) - me._buffer.length = 0 - if (me._needsDrain) me.emit("drain") -} - -ProxyWriter.prototype.add = function (entry) { - // console.error("~~ proxy add") - collect(entry) - - if (!this._proxy) { - this._buffer.push(["add", [entry]]) - this._needDrain = true - return false - } - return this._proxy.add(entry) -} - -ProxyWriter.prototype.write = function (c) { - // console.error("~~ proxy write") - if (!this._proxy) { - this._buffer.push(["write", [c]]) - this._needDrain = true - return false - } - return this._proxy.write(c) -} - -ProxyWriter.prototype.end = function (c) { - // console.error("~~ proxy end") - if (!this._proxy) { - this._buffer.push(["end", [c]]) - return false - } - return this._proxy.end(c) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/reader.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/reader.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/reader.js 2013-07-24 03:37:54.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/reader.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,259 +0,0 @@ - -module.exports = Reader - -var fs = require("graceful-fs") - , Stream = require("stream").Stream - , inherits = require("inherits") - , path = require("path") - , getType = require("./get-type.js") - , hardLinks = Reader.hardLinks = {} - , Abstract = require("./abstract.js") - -// Must do this *before* loading the child classes -inherits(Reader, Abstract) - -var DirReader = require("./dir-reader.js") - , FileReader = require("./file-reader.js") - , LinkReader = require("./link-reader.js") - , SocketReader = require("./socket-reader.js") - , ProxyReader = require("./proxy-reader.js") - -function Reader (props, currentStat) { - var me = this - if (!(me instanceof Reader)) return new Reader(props, currentStat) - - if (typeof props === "string") { - props = { path: props } - } - - if (!props.path) { - me.error("Must provide a path", null, true) - } - - // polymorphism. - // call fstream.Reader(dir) to get a DirReader object, etc. - // Note that, unlike in the Writer case, ProxyReader is going - // to be the *normal* state of affairs, since we rarely know - // the type of a file prior to reading it. - - - var type - , ClassType - - if (props.type && typeof props.type === "function") { - type = props.type - ClassType = type - } else { - type = getType(props) - ClassType = Reader - } - - if (currentStat && !type) { - type = getType(currentStat) - props[type] = true - props.type = type - } - - switch (type) { - case "Directory": - ClassType = DirReader - break - - case "Link": - // XXX hard links are just files. - // However, it would be good to keep track of files' dev+inode - // and nlink values, and create a HardLinkReader that emits - // a linkpath value of the original copy, so that the tar - // writer can preserve them. - // ClassType = HardLinkReader - // break - - case "File": - ClassType = FileReader - break - - case "SymbolicLink": - ClassType = LinkReader - break - - case "Socket": - ClassType = SocketReader - break - - case null: - ClassType = ProxyReader - break - } - - if (!(me instanceof ClassType)) { - return new ClassType(props) - } - - Abstract.call(me) - - me.readable = true - me.writable = false - - me.type = type - me.props = props - me.depth = props.depth = props.depth || 0 - me.parent = props.parent || null - me.root = props.root || (props.parent && props.parent.root) || me - - me._path = me.path = path.resolve(props.path) - if (process.platform === "win32") { - me.path = me._path = me.path.replace(/\?/g, "_") - if (me._path.length >= 260) { - // how DOES one create files on the moon? - // if the path has spaces in it, then UNC will fail. - me._swallowErrors = true - //if (me._path.indexOf(" ") === -1) { - me._path = "\\\\?\\" + me.path.replace(/\//g, "\\") - //} - } - } - me.basename = props.basename = path.basename(me.path) - me.dirname = props.dirname = path.dirname(me.path) - - // these have served their purpose, and are now just noisy clutter - props.parent = props.root = null - - // console.error("\n\n\n%s setting size to", props.path, props.size) - me.size = props.size - me.filter = typeof props.filter === "function" ? props.filter : null - if (props.sort === "alpha") props.sort = alphasort - - // start the ball rolling. - // this will stat the thing, and then call me._read() - // to start reading whatever it is. - // console.error("calling stat", props.path, currentStat) - me._stat(currentStat) -} - -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} - -Reader.prototype._stat = function (currentStat) { - var me = this - , props = me.props - , stat = props.follow ? "stat" : "lstat" - - // console.error("Reader._stat", me._path, currentStat) - if (currentStat) process.nextTick(statCb.bind(null, null, currentStat)) - else fs[stat](me._path, statCb) - - - function statCb (er, props_) { - // console.error("Reader._stat, statCb", me._path, props_, props_.nlink) - if (er) return me.error(er) - - Object.keys(props_).forEach(function (k) { - props[k] = props_[k] - }) - - // if it's not the expected size, then abort here. - if (undefined !== me.size && props.size !== me.size) { - return me.error("incorrect size") - } - me.size = props.size - - var type = getType(props) - // special little thing for handling hardlinks. - if (type !== "Directory" && props.nlink && props.nlink > 1) { - var k = props.dev + ":" + props.ino - // console.error("Reader has nlink", me._path, k) - if (hardLinks[k] === me._path || !hardLinks[k]) hardLinks[k] = me._path - else { - // switch into hardlink mode. - type = me.type = me.props.type = "Link" - me.Link = me.props.Link = true - me.linkpath = me.props.linkpath = hardLinks[k] - // console.error("Hardlink detected, switching mode", me._path, me.linkpath) - // Setting __proto__ would arguably be the "correct" - // approach here, but that just seems too wrong. - me._stat = me._read = LinkReader.prototype._read - } - } - - if (me.type && me.type !== type) { - me.error("Unexpected type: " + type) - } - - // if the filter doesn't pass, then just skip over this one. - // still have to emit end so that dir-walking can move on. - if (me.filter) { - var who = me._proxy || me - // special handling for ProxyReaders - if (!me.filter.call(who, who, props)) { - if (!me._disowned) { - me.abort() - me.emit("end") - me.emit("close") - } - return - } - } - - // last chance to abort or disown before the flow starts! - var events = ["_stat", "stat", "ready"] - var e = 0 - ;(function go () { - if (me._aborted) { - me.emit("end") - me.emit("close") - return - } - - if (me._paused) { - me.once("resume", go) - return - } - - var ev = events[e ++] - if (!ev) return me._read() - me.emit(ev, props) - go() - })() - } -} - -Reader.prototype.pipe = function (dest, opts) { - var me = this - if (typeof dest.add === "function") { - // piping to a multi-compatible, and we've got directory entries. - me.on("entry", function (entry) { - var ret = dest.add(entry) - if (false === ret) { - me.pause() - } - }) - } - - // console.error("R Pipe apply Stream Pipe") - return Stream.prototype.pipe.apply(this, arguments) -} - -Reader.prototype.pause = function (who) { - this._paused = true - who = who || this - this.emit("pause", who) - if (this._stream) this._stream.pause(who) -} - -Reader.prototype.resume = function (who) { - this._paused = false - who = who || this - this.emit("resume", who) - if (this._stream) this._stream.resume(who) - this._read() -} - -Reader.prototype._read = function () { - this.error("Cannot read unknown type: "+this.type) -} - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js 2013-07-24 03:37:59.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -// Just get the stats, and then don't do anything. -// You can't really "read" from a socket. You "connect" to it. -// Mostly, this is here so that reading a dir with a socket in it -// doesn't blow up. - -module.exports = SocketReader - -var fs = require("graceful-fs") - , fstream = require("../fstream.js") - , inherits = require("inherits") - , mkdir = require("mkdirp") - , Reader = require("./reader.js") - -inherits(SocketReader, Reader) - -function SocketReader (props) { - var me = this - if (!(me instanceof SocketReader)) throw new Error( - "SocketReader must be called as constructor.") - - if (!(props.type === "Socket" && props.Socket)) { - throw new Error("Non-socket type "+ props.type) - } - - Reader.call(me, props) -} - -SocketReader.prototype._read = function () { - var me = this - if (me._paused) return - // basically just a no-op, since we got all the info we have - // from the _stat method - if (!me._ended) { - me.emit("end") - me.emit("close") - me._ended = true - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/writer.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/writer.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/lib/writer.js 2013-07-24 03:38:06.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/lib/writer.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,389 +0,0 @@ - -module.exports = Writer - -var fs = require("graceful-fs") - , inherits = require("inherits") - , rimraf = require("rimraf") - , mkdir = require("mkdirp") - , path = require("path") - , umask = process.platform === "win32" ? 0 : process.umask() - , getType = require("./get-type.js") - , Abstract = require("./abstract.js") - -// Must do this *before* loading the child classes -inherits(Writer, Abstract) - -Writer.dirmode = 0777 & (~umask) -Writer.filemode = 0666 & (~umask) - -var DirWriter = require("./dir-writer.js") - , LinkWriter = require("./link-writer.js") - , FileWriter = require("./file-writer.js") - , ProxyWriter = require("./proxy-writer.js") - -// props is the desired state. current is optionally the current stat, -// provided here so that subclasses can avoid statting the target -// more than necessary. -function Writer (props, current) { - var me = this - - if (typeof props === "string") { - props = { path: props } - } - - if (!props.path) me.error("Must provide a path", null, true) - - // polymorphism. - // call fstream.Writer(dir) to get a DirWriter object, etc. - var type = getType(props) - , ClassType = Writer - - switch (type) { - case "Directory": - ClassType = DirWriter - break - case "File": - ClassType = FileWriter - break - case "Link": - case "SymbolicLink": - ClassType = LinkWriter - break - case null: - // Don't know yet what type to create, so we wrap in a proxy. - ClassType = ProxyWriter - break - } - - if (!(me instanceof ClassType)) return new ClassType(props) - - // now get down to business. - - Abstract.call(me) - - // props is what we want to set. - // set some convenience properties as well. - me.type = props.type - me.props = props - me.depth = props.depth || 0 - me.clobber = false === props.clobber ? props.clobber : true - me.parent = props.parent || null - me.root = props.root || (props.parent && props.parent.root) || me - - me._path = me.path = path.resolve(props.path) - if (process.platform === "win32") { - me.path = me._path = me.path.replace(/\?/g, "_") - if (me._path.length >= 260) { - me._swallowErrors = true - me._path = "\\\\?\\" + me.path.replace(/\//g, "\\") - } - } - me.basename = path.basename(props.path) - me.dirname = path.dirname(props.path) - me.linkpath = props.linkpath || null - - props.parent = props.root = null - - // console.error("\n\n\n%s setting size to", props.path, props.size) - me.size = props.size - - if (typeof props.mode === "string") { - props.mode = parseInt(props.mode, 8) - } - - me.readable = false - me.writable = true - - // buffer until ready, or while handling another entry - me._buffer = [] - me.ready = false - - me.filter = typeof props.filter === "function" ? props.filter: null - - // start the ball rolling. - // this checks what's there already, and then calls - // me._create() to call the impl-specific creation stuff. - me._stat(current) -} - -// Calling this means that it's something we can't create. -// Just assert that it's already there, otherwise raise a warning. -Writer.prototype._create = function () { - var me = this - fs[me.props.follow ? "stat" : "lstat"](me._path, function (er, current) { - if (er) { - return me.warn("Cannot create " + me._path + "\n" + - "Unsupported type: "+me.type, "ENOTSUP") - } - me._finish() - }) -} - -Writer.prototype._stat = function (current) { - var me = this - , props = me.props - , stat = props.follow ? "stat" : "lstat" - , who = me._proxy || me - - if (current) statCb(null, current) - else fs[stat](me._path, statCb) - - function statCb (er, current) { - if (me.filter && !me.filter.call(who, who, current)) { - me._aborted = true - me.emit("end") - me.emit("close") - return - } - - // if it's not there, great. We'll just create it. - // if it is there, then we'll need to change whatever differs - if (er || !current) { - return create(me) - } - - me._old = current - var currentType = getType(current) - - // if it's a type change, then we need to clobber or error. - // if it's not a type change, then let the impl take care of it. - if (currentType !== me.type) { - return rimraf(me._path, function (er) { - if (er) return me.error(er) - me._old = null - create(me) - }) - } - - // otherwise, just handle in the app-specific way - // this creates a fs.WriteStream, or mkdir's, or whatever - create(me) - } -} - -function create (me) { - // console.error("W create", me._path, Writer.dirmode) - - // XXX Need to clobber non-dirs that are in the way, - // unless { clobber: false } in the props. - mkdir(path.dirname(me._path), Writer.dirmode, function (er, made) { - // console.error("W created", path.dirname(me._path), er) - if (er) return me.error(er) - - // later on, we have to set the mode and owner for these - me._madeDir = made - return me._create() - }) -} - -function endChmod (me, want, current, path, cb) { - var wantMode = want.mode - , chmod = want.follow || me.type !== "SymbolicLink" - ? "chmod" : "lchmod" - - if (!fs[chmod]) return cb() - if (typeof wantMode !== "number") return cb() - - var curMode = current.mode & 0777 - wantMode = wantMode & 0777 - if (wantMode === curMode) return cb() - - fs[chmod](path, wantMode, cb) -} - - -function endChown (me, want, current, path, cb) { - // Don't even try it unless root. Too easy to EPERM. - if (process.platform === "win32") return cb() - if (!process.getuid || !process.getuid() === 0) return cb() - if (typeof want.uid !== "number" && - typeof want.gid !== "number" ) return cb() - - if (current.uid === want.uid && - current.gid === want.gid) return cb() - - var chown = (me.props.follow || me.type !== "SymbolicLink") - ? "chown" : "lchown" - if (!fs[chown]) return cb() - - if (typeof want.uid !== "number") want.uid = current.uid - if (typeof want.gid !== "number") want.gid = current.gid - - fs[chown](path, want.uid, want.gid, cb) -} - -function endUtimes (me, want, current, path, cb) { - if (!fs.utimes || process.platform === "win32") return cb() - - var utimes = (want.follow || me.type !== "SymbolicLink") - ? "utimes" : "lutimes" - - if (utimes === "lutimes" && !fs[utimes]) { - utimes = "utimes" - } - - if (!fs[utimes]) return cb() - - var curA = current.atime - , curM = current.mtime - , meA = want.atime - , meM = want.mtime - - if (meA === undefined) meA = curA - if (meM === undefined) meM = curM - - if (!isDate(meA)) meA = new Date(meA) - if (!isDate(meM)) meA = new Date(meM) - - if (meA.getTime() === curA.getTime() && - meM.getTime() === curM.getTime()) return cb() - - fs[utimes](path, meA, meM, cb) -} - - -// XXX This function is beastly. Break it up! -Writer.prototype._finish = function () { - var me = this - - // console.error(" W Finish", me._path, me.size) - - // set up all the things. - // At this point, we're already done writing whatever we've gotta write, - // adding files to the dir, etc. - var todo = 0 - var errState = null - var done = false - - if (me._old) { - // the times will almost *certainly* have changed. - // adds the utimes syscall, but remove another stat. - me._old.atime = new Date(0) - me._old.mtime = new Date(0) - // console.error(" W Finish Stale Stat", me._path, me.size) - setProps(me._old) - } else { - var stat = me.props.follow ? "stat" : "lstat" - // console.error(" W Finish Stating", me._path, me.size) - fs[stat](me._path, function (er, current) { - // console.error(" W Finish Stated", me._path, me.size, current) - if (er) { - // if we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier on. - if (er.code === "ENOENT" && - (me.type === "Link" || me.type === "SymbolicLink") && - process.platform === "win32") { - me.ready = true - me.emit("ready") - me.emit("end") - me.emit("close") - me.end = me._finish = function () {} - return - } else return me.error(er) - } - setProps(me._old = current) - }) - } - - return - - function setProps (current) { - todo += 3 - endChmod(me, me.props, current, me._path, next("chmod")) - endChown(me, me.props, current, me._path, next("chown")) - endUtimes(me, me.props, current, me._path, next("utimes")) - } - - function next (what) { - return function (er) { - // console.error(" W Finish", what, todo) - if (errState) return - if (er) { - er.fstream_finish_call = what - return me.error(errState = er) - } - if (--todo > 0) return - if (done) return - done = true - - // we may still need to set the mode/etc. on some parent dirs - // that were created previously. delay end/close until then. - if (!me._madeDir) return end() - else endMadeDir(me, me._path, end) - - function end (er) { - if (er) { - er.fstream_finish_call = "setupMadeDir" - return me.error(er) - } - // all the props have been set, so we're completely done. - me.emit("end") - me.emit("close") - } - } - } -} - -function endMadeDir (me, p, cb) { - var made = me._madeDir - // everything *between* made and path.dirname(me._path) - // needs to be set up. Note that this may just be one dir. - var d = path.dirname(p) - - endMadeDir_(me, d, function (er) { - if (er) return cb(er) - if (d === made) { - return cb() - } - endMadeDir(me, d, cb) - }) -} - -function endMadeDir_ (me, p, cb) { - var dirProps = {} - Object.keys(me.props).forEach(function (k) { - dirProps[k] = me.props[k] - - // only make non-readable dirs if explicitly requested. - if (k === "mode" && me.type !== "Directory") { - dirProps[k] = dirProps[k] | 0111 - } - }) - - var todo = 3 - , errState = null - fs.stat(p, function (er, current) { - if (er) return cb(errState = er) - endChmod(me, dirProps, current, p, next) - endChown(me, dirProps, current, p, next) - endUtimes(me, dirProps, current, p, next) - }) - - function next (er) { - if (errState) return - if (er) return cb(errState = er) - if (-- todo === 0) return cb() - } -} - -Writer.prototype.pipe = function () { - this.error("Can't pipe from writable stream") -} - -Writer.prototype.add = function () { - this.error("Cannot add to non-Directory type") -} - -Writer.prototype.write = function () { - return true -} - -function objectToString (d) { - return Object.prototype.toString.call(d) -} - -function isDate(d) { - return typeof d === 'object' && objectToString(d) === '[object Date]'; -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/LICENSE 2013-08-19 22:09:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/README.md 2013-05-16 14:24:38.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,42 +0,0 @@ -Browser-friendly inheritance fully compatible with standard node.js -[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). - -This package exports standard `inherits` from node.js `util` module in -node environment, but also provides alternative browser-friendly -implementation through [browser -field](https://gist.github.com/shtylman/4339901). Alternative -implementation is a literal copy of standard one located in standalone -module to avoid requiring of `util`. It also has a shim for old -browsers with no `Object.create` support. - -While keeping you sure you are using standard `inherits` -implementation in node.js environment, it allows bundlers such as -[browserify](https://github.com/substack/node-browserify) to not -include full `util` package to your client code if all you need is -just `inherits` function. It worth, because browser shim for `util` -package is large and `inherits` is often the single function you need -from it. - -It's recommended to use this package instead of -`require('util').inherits` for any code that has chances to be used -not only in node.js but in browser too. - -## usage - -```js -var inherits = require('inherits'); -// then use exactly as the standard one -``` - -## note on version ~1.0 - -Version ~1.0 had completely different motivation and is not compatible -neither with 2.0 nor with standard node.js `inherits`. - -If you are using version ~1.0 and planning to switch to ~2.0, be -careful: - -* new version uses `super_` instead of `super` for referencing - superclass -* new version overwrites current prototype while old one preserves any - existing fields on it diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits.js 2013-05-16 14:22:57.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits.js 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -module.exports = require('util').inherits diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits_browser.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits_browser.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits_browser.js 2013-05-16 14:39:58.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/inherits_browser.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/package.json 2013-10-25 01:43:03.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -{ - "name": "inherits", - "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", - "version": "2.0.1", - "keywords": [ - "inheritance", - "class", - "klass", - "oop", - "object-oriented", - "inherits", - "browser", - "browserify" - ], - "main": "./inherits.js", - "browser": "./inherits_browser.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/inherits" - }, - "license": "ISC", - "scripts": { - "test": "node test" - }, - "readme": "Browser-friendly inheritance fully compatible with standard node.js\n[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).\n\nThis package exports standard `inherits` from node.js `util` module in\nnode environment, but also provides alternative browser-friendly\nimplementation through [browser\nfield](https://gist.github.com/shtylman/4339901). Alternative\nimplementation is a literal copy of standard one located in standalone\nmodule to avoid requiring of `util`. It also has a shim for old\nbrowsers with no `Object.create` support.\n\nWhile keeping you sure you are using standard `inherits`\nimplementation in node.js environment, it allows bundlers such as\n[browserify](https://github.com/substack/node-browserify) to not\ninclude full `util` package to your client code if all you need is\njust `inherits` function. It worth, because browser shim for `util`\npackage is large and `inherits` is often the single function you need\nfrom it.\n\nIt's recommended to use this package instead of\n`require('util').inherits` for any code that has chances to be used\nnot only in node.js but in browser too.\n\n## usage\n\n```js\nvar inherits = require('inherits');\n// then use exactly as the standard one\n```\n\n## note on version ~1.0\n\nVersion ~1.0 had completely different motivation and is not compatible\nneither with 2.0 nor with standard node.js `inherits`.\n\nIf you are using version ~1.0 and planning to switch to ~2.0, be\ncareful:\n\n* new version uses `super_` instead of `super` for referencing\n superclass\n* new version overwrites current prototype while old one preserves any\n existing fields on it\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/inherits/issues" - }, - "_id": "inherits@2.0.1", - "_from": "inherits@~2.0.0" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/test.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/test.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/test.js 2013-05-16 14:43:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/node_modules/inherits/test.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -var inherits = require('./inherits.js') -var assert = require('assert') - -function test(c) { - assert(c.constructor === Child) - assert(c.constructor.super_ === Parent) - assert(Object.getPrototypeOf(c) === Child.prototype) - assert(Object.getPrototypeOf(Object.getPrototypeOf(c)) === Parent.prototype) - assert(c instanceof Child) - assert(c instanceof Parent) -} - -function Child() { - Parent.call(this) - test(this) -} - -function Parent() {} - -inherits(Child, Parent) - -var c = new Child -test(c) - -console.log('ok') diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/fstream/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/fstream/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,42 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "name": "fstream", - "description": "Advanced file system stream things", - "version": "0.1.24", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/fstream.git" - }, - "main": "fstream.js", - "engines": { - "node": ">=0.6" - }, - "dependencies": { - "rimraf": "2", - "mkdirp": "0.3", - "graceful-fs": "~2.0.0", - "inherits": "~2.0.0" - }, - "devDependencies": { - "tap": "" - }, - "scripts": { - "test": "tap examples/*.js" - }, - "license": "BSD", - "readme": "Like FS streams, but with stat on them, and supporting directories and\nsymbolic links, as well as normal files. Also, you can use this to set\nthe stats on a file, even if you don't change its contents, or to create\na symlink, etc.\n\nSo, for example, you can \"write\" a directory, and it'll call `mkdir`. You\ncan specify a uid and gid, and it'll call `chown`. You can specify a\n`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink\nand provide a `linkpath` and it'll call `symlink`.\n\nNote that it won't automatically resolve symbolic links. So, if you\ncall `fstream.Reader('/some/symlink')` then you'll get an object\nthat stats and then ends immediately (since it has no data). To follow\nsymbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:\ntrue })`.\n\nThere are various checks to make sure that the bytes emitted are the\nsame as the intended size, if the size is set.\n\n## Examples\n\n```javascript\nfstream\n .Writer({ path: \"path/to/file\"\n , mode: 0755\n , size: 6\n })\n .write(\"hello\\n\")\n .end()\n```\n\nThis will create the directories if they're missing, and then write\n`hello\\n` into the file, chmod it to 0755, and assert that 6 bytes have\nbeen written when it's done.\n\n```javascript\nfstream\n .Writer({ path: \"path/to/file\"\n , mode: 0755\n , size: 6\n , flags: \"a\"\n })\n .write(\"hello\\n\")\n .end()\n```\n\nYou can pass flags in, if you want to append to a file.\n\n```javascript\nfstream\n .Writer({ path: \"path/to/symlink\"\n , linkpath: \"./file\"\n , SymbolicLink: true\n , mode: \"0755\" // octal strings supported\n })\n .end()\n```\n\nIf isSymbolicLink is a function, it'll be called, and if it returns\ntrue, then it'll treat it as a symlink. If it's not a function, then\nany truish value will make a symlink, or you can set `type:\n'SymbolicLink'`, which does the same thing.\n\nNote that the linkpath is relative to the symbolic link location, not\nthe parent dir or cwd.\n\n```javascript\nfstream\n .Reader(\"path/to/dir\")\n .pipe(fstream.Writer(\"path/to/other/dir\"))\n```\n\nThis will do like `cp -Rp path/to/dir path/to/other/dir`. If the other\ndir exists and isn't a directory, then it'll emit an error. It'll also\nset the uid, gid, mode, etc. to be identical. In this way, it's more\nlike `rsync -a` than simply a copy.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/fstream/issues" - }, - "_id": "fstream@0.1.24", - "dist": { - "shasum": "7c1fe122afc8612ab1946590ba68558b37c64384" - }, - "_from": "fstream@0", - "_resolved": "https://registry.npmjs.org/fstream/-/fstream-0.1.24.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/.npmignore 2011-11-28 22:53:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -node_modules/ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/LICENSE 2013-05-27 04:05:36.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Copyright (c) Isaac Z. Schlueter ("Author") -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/README.md 2013-07-11 15:25:53.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -# graceful-fs - -graceful-fs functions as a drop-in replacement for the fs module, -making various improvements. - -The improvements are meant to normalize behavior across different -platforms and environments, and to make filesystem access more -resilient to errors. - -## Improvements over fs module - -graceful-fs: - -* Queues up `open` and `readdir` calls, and retries them once - something closes if there is an EMFILE error from too many file - descriptors. -* fixes `lchmod` for Node versions prior to 0.6.2. -* implements `fs.lutimes` if possible. Otherwise it becomes a noop. -* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or - `lchown` if the user isn't root. -* makes `lchmod` and `lchown` become noops, if not available. -* retries reading a file if `read` results in EAGAIN error. - -On Windows, it retries renaming a file for up to one second if `EACCESS` -or `EPERM` error occurs, likely because antivirus software has locked -the directory. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js 2013-09-07 19:17:21.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/graceful-fs.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,159 +0,0 @@ -// Monkey-patching the fs module. -// It's ugly, but there is simply no other way to do this. -var fs = module.exports = require('fs') - -var assert = require('assert') - -// fix up some busted stuff, mostly on windows and old nodes -require('./polyfills.js') - -// The EMFILE enqueuing stuff - -var util = require('util') - -function noop () {} - -var debug = noop -var util = require('util') -if (util.debuglog) - debug = util.debuglog('gfs') -else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS: ' + m.split(/\n/).join('\nGFS: ') - console.error(m) - } - -if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug('fds', fds) - debug(queue) - assert.equal(queue.length, 0) - }) -} - - -var originalOpen = fs.open -fs.open = open - -function open(path, flags, mode, cb) { - if (typeof mode === "function") cb = mode, mode = null - if (typeof cb !== "function") cb = noop - new OpenReq(path, flags, mode, cb) -} - -function OpenReq(path, flags, mode, cb) { - this.path = path - this.flags = flags - this.mode = mode - this.cb = cb - Req.call(this) -} - -util.inherits(OpenReq, Req) - -OpenReq.prototype.process = function() { - originalOpen.call(fs, this.path, this.flags, this.mode, this.done) -} - -var fds = {} -OpenReq.prototype.done = function(er, fd) { - debug('open done', er, fd) - if (fd) - fds['fd' + fd] = this.path - Req.prototype.done.call(this, er, fd) -} - - -var originalReaddir = fs.readdir -fs.readdir = readdir - -function readdir(path, cb) { - if (typeof cb !== "function") cb = noop - new ReaddirReq(path, cb) -} - -function ReaddirReq(path, cb) { - this.path = path - this.cb = cb - Req.call(this) -} - -util.inherits(ReaddirReq, Req) - -ReaddirReq.prototype.process = function() { - originalReaddir.call(fs, this.path, this.done) -} - -ReaddirReq.prototype.done = function(er, files) { - Req.prototype.done.call(this, er, files) - onclose() -} - - -var originalClose = fs.close -fs.close = close - -function close (fd, cb) { - debug('close', fd) - if (typeof cb !== "function") cb = noop - delete fds['fd' + fd] - originalClose.call(fs, fd, function(er) { - onclose() - cb(er) - }) -} - - -var originalCloseSync = fs.closeSync -fs.closeSync = closeSync - -function closeSync (fd) { - try { - return originalCloseSync(fd) - } finally { - onclose() - } -} - - -// Req class -function Req () { - // start processing - this.done = this.done.bind(this) - this.failures = 0 - this.process() -} - -Req.prototype.done = function (er, result) { - var tryAgain = false - if (er) { - var code = er.code - var tryAgain = code === "EMFILE" - if (process.platform === "win32") - tryAgain = tryAgain || code === "OK" - } - - if (tryAgain) { - this.failures ++ - enqueue(this) - } else { - var cb = this.cb - cb(er, result) - } -} - -var queue = [] - -function enqueue(req) { - queue.push(req) - debug('enqueue %d %s', queue.length, req.constructor.name, req) -} - -function onclose() { - var req = queue.shift() - if (req) { - debug('process', req.constructor.name, req) - req.process() - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,48 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me" - }, - "name": "graceful-fs", - "description": "A drop-in replacement for fs, making various improvements.", - "version": "2.0.1", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-graceful-fs.git" - }, - "main": "graceful-fs.js", - "engines": { - "node": ">=0.4.0" - }, - "directories": { - "test": "test" - }, - "scripts": { - "test": "tap test/*.js" - }, - "keywords": [ - "fs", - "module", - "reading", - "retry", - "retries", - "queue", - "error", - "errors", - "handling", - "EMFILE", - "EAGAIN", - "EINVAL", - "EPERM", - "EACCESS" - ], - "license": "BSD", - "readme": "# graceful-fs\n\ngraceful-fs functions as a drop-in replacement for the fs module,\nmaking various improvements.\n\nThe improvements are meant to normalize behavior across different\nplatforms and environments, and to make filesystem access more\nresilient to errors.\n\n## Improvements over fs module\n\ngraceful-fs:\n\n* Queues up `open` and `readdir` calls, and retries them once\n something closes if there is an EMFILE error from too many file\n descriptors.\n* fixes `lchmod` for Node versions prior to 0.6.2.\n* implements `fs.lutimes` if possible. Otherwise it becomes a noop.\n* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or\n `lchown` if the user isn't root.\n* makes `lchmod` and `lchown` become noops, if not available.\n* retries reading a file if `read` results in EAGAIN error.\n\nOn Windows, it retries renaming a file for up to one second if `EACCESS`\nor `EPERM` error occurs, likely because antivirus software has locked\nthe directory.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-graceful-fs/issues" - }, - "_id": "graceful-fs@2.0.1", - "_from": "graceful-fs@2" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js 2013-07-11 07:08:07.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/graceful-fs/polyfills.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,228 +0,0 @@ -var fs = require('fs') -var constants = require('constants') - -var origCwd = process.cwd -var cwd = null -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -var chdir = process.chdir -process.chdir = function(d) { - cwd = null - chdir.call(process, d) -} - -// (re-)implement some things that are known busted or missing. - -// lchmod, broken prior to 0.6.2 -// back-port the fix here. -if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - fs.lchmod = function (path, mode, callback) { - callback = callback || noop - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - callback(err || err2) - }) - }) - }) - } - - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var err, err2 - try { - var ret = fs.fchmodSync(fd, mode) - } catch (er) { - err = er - } - try { - fs.closeSync(fd) - } catch (er) { - err2 = er - } - if (err || err2) throw (err || err2) - return ret - } -} - - -// lutimes implementation, or no-op -if (!fs.lutimes) { - if (constants.hasOwnProperty("O_SYMLINK")) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - cb = cb || noop - if (er) return cb(er) - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - return cb(er || er2) - }) - }) - }) - } - - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - , err - , err2 - , ret - - try { - var ret = fs.futimesSync(fd, at, mt) - } catch (er) { - err = er - } - try { - fs.closeSync(fd) - } catch (er) { - err2 = er - } - if (err || err2) throw (err || err2) - return ret - } - - } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) { - // maybe utimensat will be bound soonish? - fs.lutimes = function (path, at, mt, cb) { - fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb) - } - - fs.lutimesSync = function (path, at, mt) { - return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW) - } - - } else { - fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) } - fs.lutimesSync = function () {} - } -} - - -// https://github.com/isaacs/node-graceful-fs/issues/4 -// Chown should not fail on einval or eperm if non-root. - -fs.chown = chownFix(fs.chown) -fs.fchown = chownFix(fs.fchown) -fs.lchown = chownFix(fs.lchown) - -fs.chownSync = chownFixSync(fs.chownSync) -fs.fchownSync = chownFixSync(fs.fchownSync) -fs.lchownSync = chownFixSync(fs.lchownSync) - -function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er, res) { - if (chownErOk(er)) er = null - cb(er, res) - }) - } -} - -function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } -} - -function chownErOk (er) { - // if there's no getuid, or if getuid() is something other than 0, - // and the error is EINVAL or EPERM, then just ignore it. - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // When running as root, or if other types of errors are encountered, - // then it's strict. - if (!er || (!process.getuid || process.getuid() !== 0) - && (er.code === "EINVAL" || er.code === "EPERM")) return true -} - - -// if lchmod/lchown do not exist, then make them no-ops -if (!fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - process.nextTick(cb) - } - fs.lchmodSync = function () {} -} -if (!fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - process.nextTick(cb) - } - fs.lchownSync = function () {} -} - - - -// on Windows, A/V software can lock the directory, causing this -// to fail with an EACCES or EPERM if the directory contains newly -// created files. Try again on failure, for up to 1 second. -if (process.platform === "win32") { - var rename_ = fs.rename - fs.rename = function rename (from, to, cb) { - var start = Date.now() - rename_(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM") - && Date.now() - start < 1000) { - return rename_(from, to, CB) - } - cb(er) - }) - } -} - - -// if read() returns EAGAIN, then just try it again. -var read = fs.read -fs.read = function (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return read.call(fs, fd, buffer, offset, length, position, callback) -} - -var readSync = fs.readSync -fs.readSync = function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er - } - } -} - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/LICENSE 2011-07-29 19:08:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -Copyright 2009, 2010, 2011 Isaac Z. Schlueter. -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/README.md 2013-02-21 15:47:44.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,218 +0,0 @@ -# minimatch - -A minimal matching utility. - -[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch) - - -This is the matching library used internally by npm. - -Eventually, it will replace the C binding in node-glob. - -It works by converting glob expressions into JavaScript `RegExp` -objects. - -## Usage - -```javascript -var minimatch = require("minimatch") - -minimatch("bar.foo", "*.foo") // true! -minimatch("bar.foo", "*.bar") // false! -``` - -## Features - -Supports these glob features: - -* Brace Expansion -* Extended glob matching -* "Globstar" `**` matching - -See: - -* `man sh` -* `man bash` -* `man 3 fnmatch` -* `man 5 gitignore` - -### Comparisons to other fnmatch/glob implementations - -While strict compliance with the existing standards is a worthwhile -goal, some discrepancies exist between minimatch and other -implementations, and are intentional. - -If the pattern starts with a `!` character, then it is negated. Set the -`nonegate` flag to suppress this behavior, and treat leading `!` -characters normally. This is perhaps relevant if you wish to start the -pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` -characters at the start of a pattern will negate the pattern multiple -times. - -If a pattern starts with `#`, then it is treated as a comment, and -will not match anything. Use `\#` to match a literal `#` at the -start of a line, or set the `nocomment` flag to suppress this behavior. - -The double-star character `**` is supported by default, unless the -`noglobstar` flag is set. This is supported in the manner of bsdglob -and bash 4.1, where `**` only has special significance if it is the only -thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but -`a/**b` will not. **Note that this is different from the way that `**` is -handled by ruby's `Dir` class.** - -If an escaped pattern has no matches, and the `nonull` flag is set, -then minimatch.match returns the pattern as-provided, rather than -interpreting the character escapes. For example, -`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than -`"*a?"`. This is akin to setting the `nullglob` option in bash, except -that it does not resolve escaped pattern characters. - -If brace expansion is not disabled, then it is performed before any -other interpretation of the glob pattern. Thus, a pattern like -`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded -**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are -checked for validity. Since those two are valid, matching proceeds. - - -## Minimatch Class - -Create a minimatch object by instanting the `minimatch.Minimatch` class. - -```javascript -var Minimatch = require("minimatch").Minimatch -var mm = new Minimatch(pattern, options) -``` - -### Properties - -* `pattern` The original pattern the minimatch object represents. -* `options` The options supplied to the constructor. -* `set` A 2-dimensional array of regexp or string expressions. - Each row in the - array corresponds to a brace-expanded pattern. Each item in the row - corresponds to a single path-part. For example, the pattern - `{a,b/c}/d` would expand to a set of patterns like: - - [ [ a, d ] - , [ b, c, d ] ] - - If a portion of the pattern doesn't have any "magic" in it - (that is, it's something like `"foo"` rather than `fo*o?`), then it - will be left as a string rather than converted to a regular - expression. - -* `regexp` Created by the `makeRe` method. A single regular expression - expressing the entire pattern. This is useful in cases where you wish - to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. -* `negate` True if the pattern is negated. -* `comment` True if the pattern is a comment. -* `empty` True if the pattern is `""`. - -### Methods - -* `makeRe` Generate the `regexp` member if necessary, and return it. - Will return `false` if the pattern is invalid. -* `match(fname)` Return true if the filename matches the pattern, or - false otherwise. -* `matchOne(fileArray, patternArray, partial)` Take a `/`-split - filename, and match it against a single row in the `regExpSet`. This - method is mainly for internal use, but is exposed so that it can be - used by a glob-walker that needs to avoid excessive filesystem calls. - -All other methods are internal, and will be called as necessary. - -## Functions - -The top-level exported function has a `cache` property, which is an LRU -cache set to store 100 items. So, calling these methods repeatedly -with the same pattern and options will use the same Minimatch object, -saving the cost of parsing it multiple times. - -### minimatch(path, pattern, options) - -Main export. Tests a path against the pattern using the options. - -```javascript -var isJS = minimatch(file, "*.js", { matchBase: true }) -``` - -### minimatch.filter(pattern, options) - -Returns a function that tests its -supplied argument, suitable for use with `Array.filter`. Example: - -```javascript -var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) -``` - -### minimatch.match(list, pattern, options) - -Match against the list of -files, in the style of fnmatch or glob. If nothing is matched, and -options.nonull is set, then return a list containing the pattern itself. - -```javascript -var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) -``` - -### minimatch.makeRe(pattern, options) - -Make a regular expression object from the pattern. - -## Options - -All options are `false` by default. - -### debug - -Dump a ton of stuff to stderr. - -### nobrace - -Do not expand `{a,b}` and `{1..3}` brace sets. - -### noglobstar - -Disable `**` matching against multiple folder names. - -### dot - -Allow patterns to match filenames starting with a period, even if -the pattern does not explicitly have a period in that spot. - -Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` -is set. - -### noext - -Disable "extglob" style patterns like `+(a|b)`. - -### nocase - -Perform a case-insensitive match. - -### nonull - -When a match is not found by `minimatch.match`, return a list containing -the pattern itself. When set, an empty list is returned if there are -no matches. - -### matchBase - -If set, then patterns without slashes will be matched -against the basename of the path if it contains slashes. For example, -`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. - -### nocomment - -Suppress the behavior of treating `#` at the start of a pattern as a -comment. - -### nonegate - -Suppress the behavior of treating a leading `!` character as negation. - -### flipNegate - -Returns from negate expressions the same as if they were not negated. -(Ie, true on a hit, false on a miss.) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/minimatch.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/minimatch.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/minimatch.js 2013-04-12 19:27:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/minimatch.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,1079 +0,0 @@ -;(function (require, exports, module, platform) { - -if (module) module.exports = minimatch -else exports.minimatch = minimatch - -if (!require) { - require = function (id) { - switch (id) { - case "sigmund": return function sigmund (obj) { - return JSON.stringify(obj) - } - case "path": return { basename: function (f) { - f = f.split(/[\/\\]/) - var e = f.pop() - if (!e) e = f.pop() - return e - }} - case "lru-cache": return function LRUCache () { - // not quite an LRU, but still space-limited. - var cache = {} - var cnt = 0 - this.set = function (k, v) { - cnt ++ - if (cnt >= 100) cache = {} - cache[k] = v - } - this.get = function (k) { return cache[k] } - } - } - } -} - -minimatch.Minimatch = Minimatch - -var LRU = require("lru-cache") - , cache = minimatch.cache = new LRU({max: 100}) - , GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} - , sigmund = require("sigmund") - -var path = require("path") - // any single thing other than / - // don't need to escape / when using new RegExp() - , qmark = "[^/]" - - // * => any number of characters - , star = qmark + "*?" - - // ** when dots are allowed. Anything goes, except .. and . - // not (^ or / followed by one or two dots followed by $ or /), - // followed by anything, any number of times. - , twoStarDot = "(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?" - - // not a ^ or / followed by a dot, - // followed by anything, any number of times. - , twoStarNoDot = "(?:(?!(?:\\\/|^)\\.).)*?" - - // characters that need to be escaped in RegExp. - , reSpecials = charSet("().*{}+?[]^$\\!") - -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split("").reduce(function (set, c) { - set[c] = true - return set - }, {}) -} - -// normalizes slashes. -var slashSplit = /\/+/ - -minimatch.monkeyPatch = monkeyPatch -function monkeyPatch () { - var desc = Object.getOwnPropertyDescriptor(String.prototype, "match") - var orig = desc.value - desc.value = function (p) { - if (p instanceof Minimatch) return p.match(this) - return orig.call(this, p) - } - Object.defineProperty(String.prototype, desc) -} - -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) - } -} - -function ext (a, b) { - a = a || {} - b = b || {} - var t = {} - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - return t -} - -minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return minimatch - - var orig = minimatch - - var m = function minimatch (p, pattern, options) { - return orig.minimatch(p, pattern, ext(def, options)) - } - - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) - } - - return m -} - -Minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return Minimatch - return minimatch.defaults(def).Minimatch -} - - -function minimatch (p, pattern, options) { - if (typeof pattern !== "string") { - throw new TypeError("glob pattern string required") - } - - if (!options) options = {} - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === "#") { - return false - } - - // "" only matches "" - if (pattern.trim() === "") return p === "" - - return new Minimatch(pattern, options).match(p) -} - -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options, cache) - } - - if (typeof pattern !== "string") { - throw new TypeError("glob pattern string required") - } - - if (!options) options = {} - pattern = pattern.trim() - - // windows: need to use /, not \ - // On other platforms, \ is a valid (albeit bad) filename char. - if (platform === "win32") { - pattern = pattern.split("\\").join("/") - } - - // lru storage. - // these things aren't particularly big, but walking down the string - // and turning it into a regexp can get pretty costly. - var cacheKey = pattern + "\n" + sigmund(options) - var cached = minimatch.cache.get(cacheKey) - if (cached) return cached - minimatch.cache.set(cacheKey, this) - - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - - // make the set of regexps etc. - this.make() -} - -Minimatch.prototype.make = make -function make () { - // don't do it more than once. - if (this._made) return - - var pattern = this.pattern - var options = this.options - - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === "#") { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } - - // step 1: figure out negation, etc. - this.parseNegate() - - // step 2: expand braces - var set = this.globSet = this.braceExpand() - - if (options.debug) console.error(this.pattern, set) - - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) - - if (options.debug) console.error(this.pattern, set) - - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) - - if (options.debug) console.error(this.pattern, set) - - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return -1 === s.indexOf(false) - }) - - if (options.debug) console.error(this.pattern, set) - - this.set = set -} - -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - , negate = false - , options = this.options - , negateOffset = 0 - - if (options.nonegate) return - - for ( var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === "!" - ; i ++) { - negate = !negate - negateOffset ++ - } - - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate -} - -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return new Minimatch(pattern, options).braceExpand() -} - -Minimatch.prototype.braceExpand = braceExpand -function braceExpand (pattern, options) { - options = options || this.options - pattern = typeof pattern === "undefined" - ? this.pattern : pattern - - if (typeof pattern === "undefined") { - throw new Error("undefined pattern") - } - - if (options.nobrace || - !pattern.match(/\{.*\}/)) { - // shortcut. no need to expand. - return [pattern] - } - - var escaping = false - - // examples and comments refer to this crazy pattern: - // a{b,c{d,e},{f,g}h}x{y,z} - // expected: - // abxy - // abxz - // acdxy - // acdxz - // acexy - // acexz - // afhxy - // afhxz - // aghxy - // aghxz - - // everything before the first \{ is just a prefix. - // So, we pluck that off, and work with the rest, - // and then prepend it to everything we find. - if (pattern.charAt(0) !== "{") { - // console.error(pattern) - var prefix = null - for (var i = 0, l = pattern.length; i < l; i ++) { - var c = pattern.charAt(i) - // console.error(i, c) - if (c === "\\") { - escaping = !escaping - } else if (c === "{" && !escaping) { - prefix = pattern.substr(0, i) - break - } - } - - // actually no sets, all { were escaped. - if (prefix === null) { - // console.error("no sets") - return [pattern] - } - - var tail = braceExpand(pattern.substr(i), options) - return tail.map(function (t) { - return prefix + t - }) - } - - // now we have something like: - // {b,c{d,e},{f,g}h}x{y,z} - // walk through the set, expanding each part, until - // the set ends. then, we'll expand the suffix. - // If the set only has a single member, then'll put the {} back - - // first, handle numeric sets, since they're easier - var numset = pattern.match(/^\{(-?[0-9]+)\.\.(-?[0-9]+)\}/) - if (numset) { - // console.error("numset", numset[1], numset[2]) - var suf = braceExpand(pattern.substr(numset[0].length), options) - , start = +numset[1] - , end = +numset[2] - , inc = start > end ? -1 : 1 - , set = [] - for (var i = start; i != (end + inc); i += inc) { - // append all the suffixes - for (var ii = 0, ll = suf.length; ii < ll; ii ++) { - set.push(i + suf[ii]) - } - } - return set - } - - // ok, walk through the set - // We hope, somewhat optimistically, that there - // will be a } at the end. - // If the closing brace isn't found, then the pattern is - // interpreted as braceExpand("\\" + pattern) so that - // the leading \{ will be interpreted literally. - var i = 1 // skip the \{ - , depth = 1 - , set = [] - , member = "" - , sawEnd = false - , escaping = false - - function addMember () { - set.push(member) - member = "" - } - - // console.error("Entering for") - FOR: for (i = 1, l = pattern.length; i < l; i ++) { - var c = pattern.charAt(i) - // console.error("", i, c) - - if (escaping) { - escaping = false - member += "\\" + c - } else { - switch (c) { - case "\\": - escaping = true - continue - - case "{": - depth ++ - member += "{" - continue - - case "}": - depth -- - // if this closes the actual set, then we're done - if (depth === 0) { - addMember() - // pluck off the close-brace - i ++ - break FOR - } else { - member += c - continue - } - - case ",": - if (depth === 1) { - addMember() - } else { - member += c - } - continue - - default: - member += c - continue - } // switch - } // else - } // for - - // now we've either finished the set, and the suffix is - // pattern.substr(i), or we have *not* closed the set, - // and need to escape the leading brace - if (depth !== 0) { - // console.error("didn't close", pattern) - return braceExpand("\\" + pattern, options) - } - - // x{y,z} -> ["xy", "xz"] - // console.error("set", set) - // console.error("suffix", pattern.substr(i)) - var suf = braceExpand(pattern.substr(i), options) - // ["b", "c{d,e}","{f,g}h"] -> - // [["b"], ["cd", "ce"], ["fh", "gh"]] - var addBraces = set.length === 1 - // console.error("set pre-expanded", set) - set = set.map(function (p) { - return braceExpand(p, options) - }) - // console.error("set expanded", set) - - - // [["b"], ["cd", "ce"], ["fh", "gh"]] -> - // ["b", "cd", "ce", "fh", "gh"] - set = set.reduce(function (l, r) { - return l.concat(r) - }) - - if (addBraces) { - set = set.map(function (s) { - return "{" + s + "}" - }) - } - - // now attach the suffixes. - var ret = [] - for (var i = 0, l = set.length; i < l; i ++) { - for (var ii = 0, ll = suf.length; ii < ll; ii ++) { - ret.push(set[i] + suf[ii]) - } - } - return ret -} - -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - var options = this.options - - // shortcuts - if (!options.noglobstar && pattern === "**") return GLOBSTAR - if (pattern === "") return "" - - var re = "" - , hasMagic = !!options.nocase - , escaping = false - // ? => one single character - , patternListStack = [] - , plType - , stateChar - , inClass = false - , reClassStart = -1 - , classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - , patternStart = pattern.charAt(0) === "." ? "" // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? "(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))" - : "(?!\\.)" - - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case "*": - re += star - hasMagic = true - break - case "?": - re += qmark - hasMagic = true - break - default: - re += "\\"+stateChar - break - } - stateChar = false - } - } - - for ( var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i ++ ) { - - if (options.debug) { - console.error("%s\t%s %s %j", pattern, i, re, c) - } - - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += "\\" + c - escaping = false - continue - } - - SWITCH: switch (c) { - case "/": - // completely not allowed, even escaped. - // Should already be path-split by now. - return false - - case "\\": - clearStateChar() - escaping = true - continue - - // the various stateChar values - // for the "extglob" stuff. - case "?": - case "*": - case "+": - case "@": - case "!": - if (options.debug) { - console.error("%s\t%s %s %j <-- stateChar", pattern, i, re, c) - } - - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - if (c === "!" && i === classStart + 1) c = "^" - re += c - continue - } - - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue - - case "(": - if (inClass) { - re += "(" - continue - } - - if (!stateChar) { - re += "\\(" - continue - } - - plType = stateChar - patternListStack.push({ type: plType - , start: i - 1 - , reStart: re.length }) - // negation is (?:(?!js)[^/]*) - re += stateChar === "!" ? "(?:(?!" : "(?:" - stateChar = false - continue - - case ")": - if (inClass || !patternListStack.length) { - re += "\\)" - continue - } - - hasMagic = true - re += ")" - plType = patternListStack.pop().type - // negation is (?:(?!js)[^/]*) - // The others are (?:) - switch (plType) { - case "!": - re += "[^/]*?)" - break - case "?": - case "+": - case "*": re += plType - case "@": break // the default anyway - } - continue - - case "|": - if (inClass || !patternListStack.length || escaping) { - re += "\\|" - escaping = false - continue - } - - re += "|" - continue - - // these are mostly the same in regexp and glob - case "[": - // swallow any state-tracking char before the [ - clearStateChar() - - if (inClass) { - re += "\\" + c - continue - } - - inClass = true - classStart = i - reClassStart = re.length - re += c - continue - - case "]": - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += "\\" + c - escaping = false - continue - } - - // finish up the class. - hasMagic = true - inClass = false - re += c - continue - - default: - // swallow any state char that wasn't consumed - clearStateChar() - - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === "^" && inClass)) { - re += "\\" - } - - re += c - - } // switch - } // for - - - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - var cs = pattern.substr(classStart + 1) - , sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + "\\[" + sp[0] - hasMagic = hasMagic || sp[1] - } - - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - var pl - while (pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + 3) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = "\\" - } - - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + "|" - }) - - // console.error("tail=%j\n %s", tail, tail) - var t = pl.type === "*" ? star - : pl.type === "?" ? qmark - : "\\" + pl.type - - hasMagic = true - re = re.slice(0, pl.reStart) - + t + "\\(" - + tail - } - - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += "\\\\" - } - - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - var addPatternStart = false - switch (re.charAt(0)) { - case ".": - case "[": - case "(": addPatternStart = true - } - - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== "" && hasMagic) re = "(?=.)" + re - - if (addPatternStart) re = patternStart + re - - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [ re, hasMagic ] - } - - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } - - var flags = options.nocase ? "i" : "" - , regExp = new RegExp("^" + re + "$", flags) - - regExp._glob = pattern - regExp._src = re - - return regExp -} - -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() -} - -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set - - if (!set.length) return this.regexp = false - var options = this.options - - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - , flags = options.nocase ? "i" : "" - - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === "string") ? regExpEscape(p) - : p._src - }).join("\\\/") - }).join("|") - - // must match entire pattern - // ending in a * or ** will make it less strict. - re = "^(?:" + re + ")$" - - // can match anything, as long as it's not this. - if (this.negate) re = "^(?!" + re + ").*$" - - try { - return this.regexp = new RegExp(re, flags) - } catch (ex) { - return this.regexp = false - } -} - -minimatch.match = function (list, pattern, options) { - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (options.nonull && !list.length) { - list.push(pattern) - } - return list -} - -Minimatch.prototype.match = match -function match (f, partial) { - // console.error("match", f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === "" - - if (f === "/" && partial) return true - - var options = this.options - - // windows: need to use /, not \ - // On other platforms, \ is a valid (albeit bad) filename char. - if (platform === "win32") { - f = f.split("\\").join("/") - } - - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - if (options.debug) { - console.error(this.pattern, "split", f) - } - - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. - - var set = this.set - // console.error(this.pattern, "set", set) - - for (var i = 0, l = set.length; i < l; i ++) { - var pattern = set[i] - var hit = this.matchOne(f, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } - } - - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate -} - -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options - - if (options.debug) { - console.error("matchOne", - { "this": this - , file: file - , pattern: pattern }) - } - - if (options.matchBase && pattern.length === 1) { - file = path.basename(file.join("/")).split("/") - } - - if (options.debug) { - console.error("matchOne", file.length, pattern.length) - } - - for ( var fi = 0 - , pi = 0 - , fl = file.length - , pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi ++, pi ++ ) { - - if (options.debug) { - console.error("matchOne loop") - } - var p = pattern[pi] - , f = file[fi] - - if (options.debug) { - console.error(pattern, p, f) - } - - // should be impossible. - // some invalid regexp stuff in the set. - if (p === false) return false - - if (p === GLOBSTAR) { - if (options.debug) - console.error('GLOBSTAR', [pattern, p, f]) - - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - , pr = pi + 1 - if (pr === pl) { - if (options.debug) - console.error('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for ( ; fi < fl; fi ++) { - if (file[fi] === "." || file[fi] === ".." || - (!options.dot && file[fi].charAt(0) === ".")) return false - } - return true - } - - // ok, let's see if we can swallow whatever we can. - WHILE: while (fr < fl) { - var swallowee = file[fr] - - if (options.debug) { - console.error('\nglobstar while', - file, fr, pattern, pr, swallowee) - } - - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - if (options.debug) - console.error('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === "." || swallowee === ".." || - (!options.dot && swallowee.charAt(0) === ".")) { - if (options.debug) - console.error("dot detected!", file, fr, pattern, pr) - break WHILE - } - - // ** swallows a segment, and continue. - if (options.debug) - console.error('globstar swallow a segment, and continue') - fr ++ - } - } - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - if (partial) { - // ran out of file - // console.error("\n>>> no match, partial?", file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } - - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === "string") { - if (options.nocase) { - hit = f.toLowerCase() === p.toLowerCase() - } else { - hit = f === p - } - if (options.debug) { - console.error("string match", p, f, hit) - } - } else { - hit = f.match(p) - if (options.debug) { - console.error("pattern match", p, f, hit) - } - } - - if (!hit) return false - } - - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - var emptyFileEnd = (fi === fl - 1) && (file[fi] === "") - return emptyFileEnd - } - - // should be unreachable. - throw new Error("wtf?") -} - - -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, "$1") -} - - -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&") -} - -})( typeof require === "function" ? require : null, - this, - typeof module === "object" ? module : null, - typeof process === "object" ? process.platform : "win32" - ) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/.npmignore 2011-12-09 01:04:59.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -/node_modules diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/AUTHORS tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/AUTHORS --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/AUTHORS 2012-08-27 16:36:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/AUTHORS 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -# Authors, sorted by whether or not they are me -Isaac Z. Schlueter -Carlos Brito Lage -Marko Mikulicic -Trent Mick -Kevin O'Hara -Marco Rogers -Jesse Dailey diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/LICENSE 2011-07-29 19:10:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -Copyright 2009, 2010, 2011 Isaac Z. Schlueter. -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/README.md 2013-03-26 00:24:58.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,97 +0,0 @@ -# lru cache - -A cache object that deletes the least-recently-used items. - -## Usage: - -```javascript -var LRU = require("lru-cache") - , options = { max: 500 - , length: function (n) { return n * 2 } - , dispose: function (key, n) { n.close() } - , maxAge: 1000 * 60 * 60 } - , cache = LRU(options) - , otherCache = LRU(50) // sets just the max size - -cache.set("key", "value") -cache.get("key") // "value" - -cache.reset() // empty the cache -``` - -If you put more stuff in it, then items will fall out. - -If you try to put an oversized thing in it, then it'll fall out right -away. - -## Options - -* `max` The maximum size of the cache, checked by applying the length - function to all values in the cache. Not setting this is kind of - silly, since that's the whole purpose of this lib, but it defaults - to `Infinity`. -* `maxAge` Maximum age in ms. Items are not pro-actively pruned out - as they age, but if you try to get an item that is too old, it'll - drop it and return undefined instead of giving it to you. -* `length` Function that is used to calculate the length of stored - items. If you're storing strings or buffers, then you probably want - to do something like `function(n){return n.length}`. The default is - `function(n){return 1}`, which is fine if you want to store `n` - like-sized things. -* `dispose` Function that is called on items when they are dropped - from the cache. This can be handy if you want to close file - descriptors or do other cleanup tasks when items are no longer - accessible. Called with `key, value`. It's called *before* - actually removing the item from the internal cache, so if you want - to immediately put it back in, you'll have to do that in a - `nextTick` or `setTimeout` callback or it won't do anything. -* `stale` By default, if you set a `maxAge`, it'll only actually pull - stale items out of the cache when you `get(key)`. (That is, it's - not pre-emptively doing a `setTimeout` or anything.) If you set - `stale:true`, it'll return the stale value before deleting it. If - you don't set this, then it'll return `undefined` when you try to - get a stale entry, as if it had already been deleted. - -## API - -* `set(key, value)` -* `get(key) => value` - - Both of these will update the "recently used"-ness of the key. - They do what you think. - -* `peek(key)` - - Returns the key value (or `undefined` if not found) without - updating the "recently used"-ness of the key. - - (If you find yourself using this a lot, you *might* be using the - wrong sort of data structure, but there are some use cases where - it's handy.) - -* `del(key)` - - Deletes a key out of the cache. - -* `reset()` - - Clear the cache entirely, throwing away all values. - -* `has(key)` - - Check if a key is in the cache, without updating the recent-ness - or deleting it for being stale. - -* `forEach(function(value,key,cache), [thisp])` - - Just like `Array.prototype.forEach`. Iterates over all the keys - in the cache, in order of recent-ness. (Ie, more recently used - items are iterated over first.) - -* `keys()` - - Return an array of the keys in the cache. - -* `values()` - - Return an array of the values in the cache. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/bench.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/bench.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/bench.js 2013-08-19 19:55:38.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/bench.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -var LRU = require('lru-cache'); - -var max = +process.argv[2] || 10240; -var more = 102400; - -var cache = LRU({ - max: max, maxAge: 86400e3 -}); - -// fill cache -for (var i = 0; i < max; ++i) { - cache.set(i, {}); -} - -var start = process.hrtime(); - -// adding more items -for ( ; i < max+more; ++i) { - cache.set(i, {}); -} - -var end = process.hrtime(start); -var msecs = end[0] * 1E3 + end[1] / 1E6; - -console.log('adding %d items took %d ms', more, msecs.toPrecision(5)); diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js 2013-08-19 21:26:27.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,263 +0,0 @@ -;(function () { // closure for web browsers - -if (typeof module === 'object' && module.exports) { - module.exports = LRUCache -} else { - // just set the global for non-node platforms. - this.LRUCache = LRUCache -} - -function hOP (obj, key) { - return Object.prototype.hasOwnProperty.call(obj, key) -} - -function naiveLength () { return 1 } - -function LRUCache (options) { - if (!(this instanceof LRUCache)) { - return new LRUCache(options) - } - - var max - if (typeof options === 'number') { - max = options - options = { max: max } - } - - if (!options) options = {} - - max = options.max - - var lengthCalculator = options.length || naiveLength - - if (typeof lengthCalculator !== "function") { - lengthCalculator = naiveLength - } - - if (!max || !(typeof max === "number") || max <= 0 ) { - // a little bit silly. maybe this should throw? - max = Infinity - } - - var allowStale = options.stale || false - - var maxAge = options.maxAge || null - - var dispose = options.dispose - - var cache = Object.create(null) // hash of items by key - , lruList = Object.create(null) // list of items in order of use recency - , mru = 0 // most recently used - , lru = 0 // least recently used - , length = 0 // number of items in the list - , itemCount = 0 - - - // resize the cache when the max changes. - Object.defineProperty(this, "max", - { set : function (mL) { - if (!mL || !(typeof mL === "number") || mL <= 0 ) mL = Infinity - max = mL - // if it gets above double max, trim right away. - // otherwise, do it whenever it's convenient. - if (length > max) trim() - } - , get : function () { return max } - , enumerable : true - }) - - // resize the cache when the lengthCalculator changes. - Object.defineProperty(this, "lengthCalculator", - { set : function (lC) { - if (typeof lC !== "function") { - lengthCalculator = naiveLength - length = itemCount - for (var key in cache) { - cache[key].length = 1 - } - } else { - lengthCalculator = lC - length = 0 - for (var key in cache) { - cache[key].length = lengthCalculator(cache[key].value) - length += cache[key].length - } - } - - if (length > max) trim() - } - , get : function () { return lengthCalculator } - , enumerable : true - }) - - Object.defineProperty(this, "length", - { get : function () { return length } - , enumerable : true - }) - - - Object.defineProperty(this, "itemCount", - { get : function () { return itemCount } - , enumerable : true - }) - - this.forEach = function (fn, thisp) { - thisp = thisp || this - var i = 0; - for (var k = mru - 1; k >= 0 && i < itemCount; k--) if (lruList[k]) { - i++ - var hit = lruList[k] - if (maxAge && (Date.now() - hit.now > maxAge)) { - del(hit) - if (!allowStale) hit = undefined - } - if (hit) { - fn.call(thisp, hit.value, hit.key, this) - } - } - } - - this.keys = function () { - var keys = new Array(itemCount) - var i = 0 - for (var k = mru - 1; k >= 0 && i < itemCount; k--) if (lruList[k]) { - var hit = lruList[k] - keys[i++] = hit.key - } - return keys - } - - this.values = function () { - var values = new Array(itemCount) - var i = 0 - for (var k = mru - 1; k >= 0 && i < itemCount; k--) if (lruList[k]) { - var hit = lruList[k] - values[i++] = hit.value - } - return values - } - - this.reset = function () { - if (dispose) { - for (var k in cache) { - dispose(k, cache[k].value) - } - } - cache = {} - lruList = {} - lru = 0 - mru = 0 - length = 0 - itemCount = 0 - } - - // Provided for debugging/dev purposes only. No promises whatsoever that - // this API stays stable. - this.dump = function () { - return cache - } - - this.dumpLru = function () { - return lruList - } - - this.set = function (key, value) { - if (hOP(cache, key)) { - // dispose of the old one before overwriting - if (dispose) dispose(key, cache[key].value) - if (maxAge) cache[key].now = Date.now() - cache[key].value = value - this.get(key) - return true - } - - var len = lengthCalculator(value) - var age = maxAge ? Date.now() : 0 - var hit = new Entry(key, value, mru++, len, age) - - // oversized objects fall out of cache automatically. - if (hit.length > max) { - if (dispose) dispose(key, value) - return false - } - - length += hit.length - lruList[hit.lu] = cache[key] = hit - itemCount ++ - - if (length > max) trim() - return true - } - - this.has = function (key) { - if (!hOP(cache, key)) return false - var hit = cache[key] - if (maxAge && (Date.now() - hit.now > maxAge)) { - return false - } - return true - } - - this.get = function (key) { - return get(key, true) - } - - this.peek = function (key) { - return get(key, false) - } - - function get (key, doUse) { - var hit = cache[key] - if (hit) { - if (maxAge && (Date.now() - hit.now > maxAge)) { - del(hit) - if (!allowStale) hit = undefined - } else { - if (doUse) use(hit) - } - if (hit) hit = hit.value - } - return hit - } - - function use (hit) { - shiftLU(hit) - hit.lu = mru ++ - lruList[hit.lu] = hit - } - - this.del = function (key) { - del(cache[key]) - } - - function trim () { - while (lru < mru && length > max) - del(lruList[lru]) - } - - function shiftLU(hit) { - delete lruList[ hit.lu ] - while (lru < mru && !lruList[lru]) lru ++ - } - - function del(hit) { - if (hit) { - if (dispose) dispose(hit.key, hit.value) - length -= hit.length - itemCount -- - delete cache[ hit.key ] - shiftLU(hit) - } - } -} - -// classy, since V8 prefers predictable objects. -function Entry (key, value, mru, len, age) { - this.key = key - this.value = value - this.lu = mru - this.length = len - this.now = age -} - -})() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/package.json 2013-10-25 01:43:03.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/lru-cache/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,62 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "2.3.1", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me" - }, - "scripts": { - "test": "tap test --gc" - }, - "main": "lib/lru-cache.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-lru-cache.git" - }, - "devDependencies": { - "tap": "", - "weak": "" - }, - "license": { - "type": "MIT", - "url": "http://github.com/isaacs/node-lru-cache/raw/master/LICENSE" - }, - "contributors": [ - { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me" - }, - { - "name": "Carlos Brito Lage", - "email": "carlos@carloslage.net" - }, - { - "name": "Marko Mikulicic", - "email": "marko.mikulicic@isti.cnr.it" - }, - { - "name": "Trent Mick", - "email": "trentm@gmail.com" - }, - { - "name": "Kevin O'Hara", - "email": "kevinohara80@gmail.com" - }, - { - "name": "Marco Rogers", - "email": "marco.rogers@gmail.com" - }, - { - "name": "Jesse Dailey", - "email": "jesse.dailey@gmail.com" - } - ], - "readme": "# lru cache\n\nA cache object that deletes the least-recently-used items.\n\n## Usage:\n\n```javascript\nvar LRU = require(\"lru-cache\")\n , options = { max: 500\n , length: function (n) { return n * 2 }\n , dispose: function (key, n) { n.close() }\n , maxAge: 1000 * 60 * 60 }\n , cache = LRU(options)\n , otherCache = LRU(50) // sets just the max size\n\ncache.set(\"key\", \"value\")\ncache.get(\"key\") // \"value\"\n\ncache.reset() // empty the cache\n```\n\nIf you put more stuff in it, then items will fall out.\n\nIf you try to put an oversized thing in it, then it'll fall out right\naway.\n\n## Options\n\n* `max` The maximum size of the cache, checked by applying the length\n function to all values in the cache. Not setting this is kind of\n silly, since that's the whole purpose of this lib, but it defaults\n to `Infinity`.\n* `maxAge` Maximum age in ms. Items are not pro-actively pruned out\n as they age, but if you try to get an item that is too old, it'll\n drop it and return undefined instead of giving it to you.\n* `length` Function that is used to calculate the length of stored\n items. If you're storing strings or buffers, then you probably want\n to do something like `function(n){return n.length}`. The default is\n `function(n){return 1}`, which is fine if you want to store `n`\n like-sized things.\n* `dispose` Function that is called on items when they are dropped\n from the cache. This can be handy if you want to close file\n descriptors or do other cleanup tasks when items are no longer\n accessible. Called with `key, value`. It's called *before*\n actually removing the item from the internal cache, so if you want\n to immediately put it back in, you'll have to do that in a\n `nextTick` or `setTimeout` callback or it won't do anything.\n* `stale` By default, if you set a `maxAge`, it'll only actually pull\n stale items out of the cache when you `get(key)`. (That is, it's\n not pre-emptively doing a `setTimeout` or anything.) If you set\n `stale:true`, it'll return the stale value before deleting it. If\n you don't set this, then it'll return `undefined` when you try to\n get a stale entry, as if it had already been deleted.\n\n## API\n\n* `set(key, value)`\n* `get(key) => value`\n\n Both of these will update the \"recently used\"-ness of the key.\n They do what you think.\n\n* `peek(key)`\n\n Returns the key value (or `undefined` if not found) without\n updating the \"recently used\"-ness of the key.\n\n (If you find yourself using this a lot, you *might* be using the\n wrong sort of data structure, but there are some use cases where\n it's handy.)\n\n* `del(key)`\n\n Deletes a key out of the cache.\n\n* `reset()`\n\n Clear the cache entirely, throwing away all values.\n\n* `has(key)`\n\n Check if a key is in the cache, without updating the recent-ness\n or deleting it for being stale.\n\n* `forEach(function(value,key,cache), [thisp])`\n\n Just like `Array.prototype.forEach`. Iterates over all the keys\n in the cache, in order of recent-ness. (Ie, more recently used\n items are iterated over first.)\n\n* `keys()`\n\n Return an array of the keys in the cache.\n\n* `values()`\n\n Return an array of the values in the cache.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-lru-cache/issues" - }, - "_id": "lru-cache@2.3.1", - "_from": "lru-cache@2" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/LICENSE 2012-08-13 15:57:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Copyright (c) Isaac Z. Schlueter ("Author") -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/README.md 2012-08-13 15:57:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -# sigmund - -Quick and dirty signatures for Objects. - -This is like a much faster `deepEquals` comparison, which returns a -string key suitable for caches and the like. - -## Usage - -```javascript -function doSomething (someObj) { - var key = sigmund(someObj, maxDepth) // max depth defaults to 10 - var cached = cache.get(key) - if (cached) return cached) - - var result = expensiveCalculation(someObj) - cache.set(key, result) - return result -} -``` - -The resulting key will be as unique and reproducible as calling -`JSON.stringify` or `util.inspect` on the object, but is much faster. -In order to achieve this speed, some differences are glossed over. -For example, the object `{0:'foo'}` will be treated identically to the -array `['foo']`. - -Also, just as there is no way to summon the soul from the scribblings -of a cocain-addled psychoanalyst, there is no way to revive the object -from the signature string that sigmund gives you. In fact, it's -barely even readable. - -As with `sys.inspect` and `JSON.stringify`, larger objects will -produce larger signature strings. - -Because sigmund is a bit less strict than the more thorough -alternatives, the strings will be shorter, and also there is a -slightly higher chance for collisions. For example, these objects -have the same signature: - - var obj1 = {a:'b',c:/def/,g:['h','i',{j:'',k:'l'}]} - var obj2 = {a:'b',c:'/def/',g:['h','i','{jkl']} - -Like a good Freudian, sigmund is most effective when you already have -some understanding of what you're looking for. It can help you help -yourself, but you must be willing to do some work as well. - -Cycles are handled, and cyclical objects are silently omitted (though -the key is included in the signature output.) - -The second argument is the maximum depth, which defaults to 10, -because that is the maximum object traversal depth covered by most -insurance carriers. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/bench.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/bench.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/bench.js 2012-08-13 15:57:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/bench.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,283 +0,0 @@ -// different ways to id objects -// use a req/res pair, since it's crazy deep and cyclical - -// sparseFE10 and sigmund are usually pretty close, which is to be expected, -// since they are essentially the same algorithm, except that sigmund handles -// regular expression objects properly. - - -var http = require('http') -var util = require('util') -var sigmund = require('./sigmund.js') -var sreq, sres, creq, cres, test - -http.createServer(function (q, s) { - sreq = q - sres = s - sres.end('ok') - this.close(function () { setTimeout(function () { - start() - }, 200) }) -}).listen(1337, function () { - creq = http.get({ port: 1337 }) - creq.on('response', function (s) { cres = s }) -}) - -function start () { - test = [sreq, sres, creq, cres] - // test = sreq - // sreq.sres = sres - // sreq.creq = creq - // sreq.cres = cres - - for (var i in exports.compare) { - console.log(i) - var hash = exports.compare[i]() - console.log(hash) - console.log(hash.length) - console.log('') - } - - require('bench').runMain() -} - -function customWs (obj, md, d) { - d = d || 0 - var to = typeof obj - if (to === 'undefined' || to === 'function' || to === null) return '' - if (d > md || !obj || to !== 'object') return ('' + obj).replace(/[\n ]+/g, '') - - if (Array.isArray(obj)) { - return obj.map(function (i, _, __) { - return customWs(i, md, d + 1) - }).reduce(function (a, b) { return a + b }, '') - } - - var keys = Object.keys(obj) - return keys.map(function (k, _, __) { - return k + ':' + customWs(obj[k], md, d + 1) - }).reduce(function (a, b) { return a + b }, '') -} - -function custom (obj, md, d) { - d = d || 0 - var to = typeof obj - if (to === 'undefined' || to === 'function' || to === null) return '' - if (d > md || !obj || to !== 'object') return '' + obj - - if (Array.isArray(obj)) { - return obj.map(function (i, _, __) { - return custom(i, md, d + 1) - }).reduce(function (a, b) { return a + b }, '') - } - - var keys = Object.keys(obj) - return keys.map(function (k, _, __) { - return k + ':' + custom(obj[k], md, d + 1) - }).reduce(function (a, b) { return a + b }, '') -} - -function sparseFE2 (obj, maxDepth) { - var seen = [] - var soFar = '' - function ch (v, depth) { - if (depth > maxDepth) return - if (typeof v === 'function' || typeof v === 'undefined') return - if (typeof v !== 'object' || !v) { - soFar += v - return - } - if (seen.indexOf(v) !== -1 || depth === maxDepth) return - seen.push(v) - soFar += '{' - Object.keys(v).forEach(function (k, _, __) { - // pseudo-private values. skip those. - if (k.charAt(0) === '_') return - var to = typeof v[k] - if (to === 'function' || to === 'undefined') return - soFar += k + ':' - ch(v[k], depth + 1) - }) - soFar += '}' - } - ch(obj, 0) - return soFar -} - -function sparseFE (obj, maxDepth) { - var seen = [] - var soFar = '' - function ch (v, depth) { - if (depth > maxDepth) return - if (typeof v === 'function' || typeof v === 'undefined') return - if (typeof v !== 'object' || !v) { - soFar += v - return - } - if (seen.indexOf(v) !== -1 || depth === maxDepth) return - seen.push(v) - soFar += '{' - Object.keys(v).forEach(function (k, _, __) { - // pseudo-private values. skip those. - if (k.charAt(0) === '_') return - var to = typeof v[k] - if (to === 'function' || to === 'undefined') return - soFar += k - ch(v[k], depth + 1) - }) - } - ch(obj, 0) - return soFar -} - -function sparse (obj, maxDepth) { - var seen = [] - var soFar = '' - function ch (v, depth) { - if (depth > maxDepth) return - if (typeof v === 'function' || typeof v === 'undefined') return - if (typeof v !== 'object' || !v) { - soFar += v - return - } - if (seen.indexOf(v) !== -1 || depth === maxDepth) return - seen.push(v) - soFar += '{' - for (var k in v) { - // pseudo-private values. skip those. - if (k.charAt(0) === '_') continue - var to = typeof v[k] - if (to === 'function' || to === 'undefined') continue - soFar += k - ch(v[k], depth + 1) - } - } - ch(obj, 0) - return soFar -} - -function noCommas (obj, maxDepth) { - var seen = [] - var soFar = '' - function ch (v, depth) { - if (depth > maxDepth) return - if (typeof v === 'function' || typeof v === 'undefined') return - if (typeof v !== 'object' || !v) { - soFar += v - return - } - if (seen.indexOf(v) !== -1 || depth === maxDepth) return - seen.push(v) - soFar += '{' - for (var k in v) { - // pseudo-private values. skip those. - if (k.charAt(0) === '_') continue - var to = typeof v[k] - if (to === 'function' || to === 'undefined') continue - soFar += k + ':' - ch(v[k], depth + 1) - } - soFar += '}' - } - ch(obj, 0) - return soFar -} - - -function flatten (obj, maxDepth) { - var seen = [] - var soFar = '' - function ch (v, depth) { - if (depth > maxDepth) return - if (typeof v === 'function' || typeof v === 'undefined') return - if (typeof v !== 'object' || !v) { - soFar += v - return - } - if (seen.indexOf(v) !== -1 || depth === maxDepth) return - seen.push(v) - soFar += '{' - for (var k in v) { - // pseudo-private values. skip those. - if (k.charAt(0) === '_') continue - var to = typeof v[k] - if (to === 'function' || to === 'undefined') continue - soFar += k + ':' - ch(v[k], depth + 1) - soFar += ',' - } - soFar += '}' - } - ch(obj, 0) - return soFar -} - -exports.compare = -{ - // 'custom 2': function () { - // return custom(test, 2, 0) - // }, - // 'customWs 2': function () { - // return customWs(test, 2, 0) - // }, - 'JSON.stringify (guarded)': function () { - var seen = [] - return JSON.stringify(test, function (k, v) { - if (typeof v !== 'object' || !v) return v - if (seen.indexOf(v) !== -1) return undefined - seen.push(v) - return v - }) - }, - - 'flatten 10': function () { - return flatten(test, 10) - }, - - // 'flattenFE 10': function () { - // return flattenFE(test, 10) - // }, - - 'noCommas 10': function () { - return noCommas(test, 10) - }, - - 'sparse 10': function () { - return sparse(test, 10) - }, - - 'sparseFE 10': function () { - return sparseFE(test, 10) - }, - - 'sparseFE2 10': function () { - return sparseFE2(test, 10) - }, - - sigmund: function() { - return sigmund(test, 10) - }, - - - // 'util.inspect 1': function () { - // return util.inspect(test, false, 1, false) - // }, - // 'util.inspect undefined': function () { - // util.inspect(test) - // }, - // 'util.inspect 2': function () { - // util.inspect(test, false, 2, false) - // }, - // 'util.inspect 3': function () { - // util.inspect(test, false, 3, false) - // }, - // 'util.inspect 4': function () { - // util.inspect(test, false, 4, false) - // }, - // 'util.inspect Infinity': function () { - // util.inspect(test, false, Infinity, false) - // } -} - -/** results -**/ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/package.json 2013-10-25 01:43:03.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ -{ - "name": "sigmund", - "version": "1.0.0", - "description": "Quick and dirty signatures for Objects.", - "main": "sigmund.js", - "directories": { - "test": "test" - }, - "dependencies": {}, - "devDependencies": { - "tap": "~0.3.0" - }, - "scripts": { - "test": "tap test/*.js", - "bench": "node bench.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/isaacs/sigmund" - }, - "keywords": [ - "object", - "signature", - "key", - "data", - "psychoanalysis" - ], - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "license": "BSD", - "readme": "# sigmund\n\nQuick and dirty signatures for Objects.\n\nThis is like a much faster `deepEquals` comparison, which returns a\nstring key suitable for caches and the like.\n\n## Usage\n\n```javascript\nfunction doSomething (someObj) {\n var key = sigmund(someObj, maxDepth) // max depth defaults to 10\n var cached = cache.get(key)\n if (cached) return cached)\n\n var result = expensiveCalculation(someObj)\n cache.set(key, result)\n return result\n}\n```\n\nThe resulting key will be as unique and reproducible as calling\n`JSON.stringify` or `util.inspect` on the object, but is much faster.\nIn order to achieve this speed, some differences are glossed over.\nFor example, the object `{0:'foo'}` will be treated identically to the\narray `['foo']`.\n\nAlso, just as there is no way to summon the soul from the scribblings\nof a cocain-addled psychoanalyst, there is no way to revive the object\nfrom the signature string that sigmund gives you. In fact, it's\nbarely even readable.\n\nAs with `sys.inspect` and `JSON.stringify`, larger objects will\nproduce larger signature strings.\n\nBecause sigmund is a bit less strict than the more thorough\nalternatives, the strings will be shorter, and also there is a\nslightly higher chance for collisions. For example, these objects\nhave the same signature:\n\n var obj1 = {a:'b',c:/def/,g:['h','i',{j:'',k:'l'}]}\n var obj2 = {a:'b',c:'/def/',g:['h','i','{jkl']}\n\nLike a good Freudian, sigmund is most effective when you already have\nsome understanding of what you're looking for. It can help you help\nyourself, but you must be willing to do some work as well.\n\nCycles are handled, and cyclical objects are silently omitted (though\nthe key is included in the signature output.)\n\nThe second argument is the maximum depth, which defaults to 10,\nbecause that is the maximum object traversal depth covered by most\ninsurance carriers.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/sigmund/issues" - }, - "_id": "sigmund@1.0.0", - "_from": "sigmund@~1.0.0" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/sigmund.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/sigmund.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/sigmund.js 2012-08-13 15:57:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/node_modules/sigmund/sigmund.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -module.exports = sigmund -function sigmund (subject, maxSessions) { - maxSessions = maxSessions || 10; - var notes = []; - var analysis = ''; - var RE = RegExp; - - function psychoAnalyze (subject, session) { - if (session > maxSessions) return; - - if (typeof subject === 'function' || - typeof subject === 'undefined') { - return; - } - - if (typeof subject !== 'object' || !subject || - (subject instanceof RE)) { - analysis += subject; - return; - } - - if (notes.indexOf(subject) !== -1 || session === maxSessions) return; - - notes.push(subject); - analysis += '{'; - Object.keys(subject).forEach(function (issue, _, __) { - // pseudo-private values. skip those. - if (issue.charAt(0) === '_') return; - var to = typeof subject[issue]; - if (to === 'function' || to === 'undefined') return; - analysis += issue; - psychoAnalyze(subject[issue], session + 1); - }); - } - psychoAnalyze(subject, 0); - return analysis; -} - -// vim: set softtabstop=4 shiftwidth=4: diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/minimatch/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/minimatch/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me" - }, - "name": "minimatch", - "description": "a glob matcher in javascript", - "version": "0.2.12", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/minimatch.git" - }, - "main": "minimatch.js", - "scripts": { - "test": "tap test" - }, - "engines": { - "node": "*" - }, - "dependencies": { - "lru-cache": "2", - "sigmund": "~1.0.0" - }, - "devDependencies": { - "tap": "" - }, - "license": { - "type": "MIT", - "url": "http://github.com/isaacs/minimatch/raw/master/LICENSE" - }, - "readme": "# minimatch\n\nA minimal matching utility.\n\n[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch)\n\n\nThis is the matching library used internally by npm.\n\nEventually, it will replace the C binding in node-glob.\n\nIt works by converting glob expressions into JavaScript `RegExp`\nobjects.\n\n## Usage\n\n```javascript\nvar minimatch = require(\"minimatch\")\n\nminimatch(\"bar.foo\", \"*.foo\") // true!\nminimatch(\"bar.foo\", \"*.bar\") // false!\n```\n\n## Features\n\nSupports these glob features:\n\n* Brace Expansion\n* Extended glob matching\n* \"Globstar\" `**` matching\n\nSee:\n\n* `man sh`\n* `man bash`\n* `man 3 fnmatch`\n* `man 5 gitignore`\n\n### Comparisons to other fnmatch/glob implementations\n\nWhile strict compliance with the existing standards is a worthwhile\ngoal, some discrepancies exist between minimatch and other\nimplementations, and are intentional.\n\nIf the pattern starts with a `!` character, then it is negated. Set the\n`nonegate` flag to suppress this behavior, and treat leading `!`\ncharacters normally. This is perhaps relevant if you wish to start the\npattern with a negative extglob pattern like `!(a|B)`. Multiple `!`\ncharacters at the start of a pattern will negate the pattern multiple\ntimes.\n\nIf a pattern starts with `#`, then it is treated as a comment, and\nwill not match anything. Use `\\#` to match a literal `#` at the\nstart of a line, or set the `nocomment` flag to suppress this behavior.\n\nThe double-star character `**` is supported by default, unless the\n`noglobstar` flag is set. This is supported in the manner of bsdglob\nand bash 4.1, where `**` only has special significance if it is the only\nthing in a path part. That is, `a/**/b` will match `a/x/y/b`, but\n`a/**b` will not. **Note that this is different from the way that `**` is\nhandled by ruby's `Dir` class.**\n\nIf an escaped pattern has no matches, and the `nonull` flag is set,\nthen minimatch.match returns the pattern as-provided, rather than\ninterpreting the character escapes. For example,\n`minimatch.match([], \"\\\\*a\\\\?\")` will return `\"\\\\*a\\\\?\"` rather than\n`\"*a?\"`. This is akin to setting the `nullglob` option in bash, except\nthat it does not resolve escaped pattern characters.\n\nIf brace expansion is not disabled, then it is performed before any\nother interpretation of the glob pattern. Thus, a pattern like\n`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded\n**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are\nchecked for validity. Since those two are valid, matching proceeds.\n\n\n## Minimatch Class\n\nCreate a minimatch object by instanting the `minimatch.Minimatch` class.\n\n```javascript\nvar Minimatch = require(\"minimatch\").Minimatch\nvar mm = new Minimatch(pattern, options)\n```\n\n### Properties\n\n* `pattern` The original pattern the minimatch object represents.\n* `options` The options supplied to the constructor.\n* `set` A 2-dimensional array of regexp or string expressions.\n Each row in the\n array corresponds to a brace-expanded pattern. Each item in the row\n corresponds to a single path-part. For example, the pattern\n `{a,b/c}/d` would expand to a set of patterns like:\n\n [ [ a, d ]\n , [ b, c, d ] ]\n\n If a portion of the pattern doesn't have any \"magic\" in it\n (that is, it's something like `\"foo\"` rather than `fo*o?`), then it\n will be left as a string rather than converted to a regular\n expression.\n\n* `regexp` Created by the `makeRe` method. A single regular expression\n expressing the entire pattern. This is useful in cases where you wish\n to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.\n* `negate` True if the pattern is negated.\n* `comment` True if the pattern is a comment.\n* `empty` True if the pattern is `\"\"`.\n\n### Methods\n\n* `makeRe` Generate the `regexp` member if necessary, and return it.\n Will return `false` if the pattern is invalid.\n* `match(fname)` Return true if the filename matches the pattern, or\n false otherwise.\n* `matchOne(fileArray, patternArray, partial)` Take a `/`-split\n filename, and match it against a single row in the `regExpSet`. This\n method is mainly for internal use, but is exposed so that it can be\n used by a glob-walker that needs to avoid excessive filesystem calls.\n\nAll other methods are internal, and will be called as necessary.\n\n## Functions\n\nThe top-level exported function has a `cache` property, which is an LRU\ncache set to store 100 items. So, calling these methods repeatedly\nwith the same pattern and options will use the same Minimatch object,\nsaving the cost of parsing it multiple times.\n\n### minimatch(path, pattern, options)\n\nMain export. Tests a path against the pattern using the options.\n\n```javascript\nvar isJS = minimatch(file, \"*.js\", { matchBase: true })\n```\n\n### minimatch.filter(pattern, options)\n\nReturns a function that tests its\nsupplied argument, suitable for use with `Array.filter`. Example:\n\n```javascript\nvar javascripts = fileList.filter(minimatch.filter(\"*.js\", {matchBase: true}))\n```\n\n### minimatch.match(list, pattern, options)\n\nMatch against the list of\nfiles, in the style of fnmatch or glob. If nothing is matched, and\noptions.nonull is set, then return a list containing the pattern itself.\n\n```javascript\nvar javascripts = minimatch.match(fileList, \"*.js\", {matchBase: true}))\n```\n\n### minimatch.makeRe(pattern, options)\n\nMake a regular expression object from the pattern.\n\n## Options\n\nAll options are `false` by default.\n\n### debug\n\nDump a ton of stuff to stderr.\n\n### nobrace\n\nDo not expand `{a,b}` and `{1..3}` brace sets.\n\n### noglobstar\n\nDisable `**` matching against multiple folder names.\n\n### dot\n\nAllow patterns to match filenames starting with a period, even if\nthe pattern does not explicitly have a period in that spot.\n\nNote that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`\nis set.\n\n### noext\n\nDisable \"extglob\" style patterns like `+(a|b)`.\n\n### nocase\n\nPerform a case-insensitive match.\n\n### nonull\n\nWhen a match is not found by `minimatch.match`, return a list containing\nthe pattern itself. When set, an empty list is returned if there are\nno matches.\n\n### matchBase\n\nIf set, then patterns without slashes will be matched\nagainst the basename of the path if it contains slashes. For example,\n`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.\n\n### nocomment\n\nSuppress the behavior of treating `#` at the start of a pattern as a\ncomment.\n\n### nonegate\n\nSuppress the behavior of treating a leading `!` character as negation.\n\n### flipNegate\n\nReturns from negate expressions the same as if they were not negated.\n(Ie, true on a hit, false on a miss.)\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/minimatch/issues" - }, - "_id": "minimatch@0.2.12", - "_from": "minimatch@0" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/.npmignore 2013-01-17 20:36:39.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -node_modules diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/LICENSE 2012-08-30 20:08:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -Copyright 2009, 2010, 2011 Isaac Z. Schlueter. -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/README.md 2013-01-17 20:36:24.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,210 +0,0 @@ -If you want to write an option parser, and have it be good, there are -two ways to do it. The Right Way, and the Wrong Way. - -The Wrong Way is to sit down and write an option parser. We've all done -that. - -The Right Way is to write some complex configurable program with so many -options that you go half-insane just trying to manage them all, and put -it off with duct-tape solutions until you see exactly to the core of the -problem, and finally snap and write an awesome option parser. - -If you want to write an option parser, don't write an option parser. -Write a package manager, or a source control system, or a service -restarter, or an operating system. You probably won't end up with a -good one of those, but if you don't give up, and you are relentless and -diligent enough in your procrastination, you may just end up with a very -nice option parser. - -## USAGE - - // my-program.js - var nopt = require("nopt") - , Stream = require("stream").Stream - , path = require("path") - , knownOpts = { "foo" : [String, null] - , "bar" : [Stream, Number] - , "baz" : path - , "bloo" : [ "big", "medium", "small" ] - , "flag" : Boolean - , "pick" : Boolean - , "many" : [String, Array] - } - , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] - , "b7" : ["--bar", "7"] - , "m" : ["--bloo", "medium"] - , "p" : ["--pick"] - , "f" : ["--flag"] - } - // everything is optional. - // knownOpts and shorthands default to {} - // arg list defaults to process.argv - // slice defaults to 2 - , parsed = nopt(knownOpts, shortHands, process.argv, 2) - console.log(parsed) - -This would give you support for any of the following: - -```bash -$ node my-program.js --foo "blerp" --no-flag -{ "foo" : "blerp", "flag" : false } - -$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag -{ bar: 7, foo: "Mr. Hand", flag: true } - -$ node my-program.js --foo "blerp" -f -----p -{ foo: "blerp", flag: true, pick: true } - -$ node my-program.js -fp --foofoo -{ foo: "Mr. Foo", flag: true, pick: true } - -$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. -{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } - -$ node my-program.js --blatzk 1000 -fp # unknown opts are ok. -{ blatzk: 1000, flag: true, pick: true } - -$ node my-program.js --blatzk true -fp # but they need a value -{ blatzk: true, flag: true, pick: true } - -$ node my-program.js --no-blatzk -fp # unless they start with "no-" -{ blatzk: false, flag: true, pick: true } - -$ node my-program.js --baz b/a/z # known paths are resolved. -{ baz: "/Users/isaacs/b/a/z" } - -# if Array is one of the types, then it can take many -# values, and will always be an array. The other types provided -# specify what types are allowed in the list. - -$ node my-program.js --many 1 --many null --many foo -{ many: ["1", "null", "foo"] } - -$ node my-program.js --many foo -{ many: ["foo"] } -``` - -Read the tests at the bottom of `lib/nopt.js` for more examples of -what this puppy can do. - -## Types - -The following types are supported, and defined on `nopt.typeDefs` - -* String: A normal string. No parsing is done. -* path: A file system path. Gets resolved against cwd if not absolute. -* url: A url. If it doesn't parse, it isn't accepted. -* Number: Must be numeric. -* Date: Must parse as a date. If it does, and `Date` is one of the options, - then it will return a Date object, not a string. -* Boolean: Must be either `true` or `false`. If an option is a boolean, - then it does not need a value, and its presence will imply `true` as - the value. To negate boolean flags, do `--no-whatever` or `--whatever - false` -* NaN: Means that the option is strictly not allowed. Any value will - fail. -* Stream: An object matching the "Stream" class in node. Valuable - for use when validating programmatically. (npm uses this to let you - supply any WriteStream on the `outfd` and `logfd` config options.) -* Array: If `Array` is specified as one of the types, then the value - will be parsed as a list of options. This means that multiple values - can be specified, and that the value will always be an array. - -If a type is an array of values not on this list, then those are -considered valid values. For instance, in the example above, the -`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, -and any other value will be rejected. - -When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be -interpreted as their JavaScript equivalents, and numeric values will be -interpreted as a number. - -You can also mix types and values, or multiple types, in a list. For -instance `{ blah: [Number, null] }` would allow a value to be set to -either a Number or null. When types are ordered, this implies a -preference, and the first type that can be used to properly interpret -the value will be used. - -To define a new type, add it to `nopt.typeDefs`. Each item in that -hash is an object with a `type` member and a `validate` method. The -`type` member is an object that matches what goes in the type list. The -`validate` method is a function that gets called with `validate(data, -key, val)`. Validate methods should assign `data[key]` to the valid -value of `val` if it can be handled properly, or return boolean -`false` if it cannot. - -You can also call `nopt.clean(data, types, typeDefs)` to clean up a -config object and remove its invalid properties. - -## Error Handling - -By default, nopt outputs a warning to standard error when invalid -options are found. You can change this behavior by assigning a method -to `nopt.invalidHandler`. This method will be called with -the offending `nopt.invalidHandler(key, val, types)`. - -If no `nopt.invalidHandler` is assigned, then it will console.error -its whining. If it is assigned to boolean `false` then the warning is -suppressed. - -## Abbreviations - -Yes, they are supported. If you define options like this: - -```javascript -{ "foolhardyelephants" : Boolean -, "pileofmonkeys" : Boolean } -``` - -Then this will work: - -```bash -node program.js --foolhar --pil -node program.js --no-f --pileofmon -# etc. -``` - -## Shorthands - -Shorthands are a hash of shorter option names to a snippet of args that -they expand to. - -If multiple one-character shorthands are all combined, and the -combination does not unambiguously match any other option or shorthand, -then they will be broken up into their constituent parts. For example: - -```json -{ "s" : ["--loglevel", "silent"] -, "g" : "--global" -, "f" : "--force" -, "p" : "--parseable" -, "l" : "--long" -} -``` - -```bash -npm ls -sgflp -# just like doing this: -npm ls --loglevel silent --global --force --long --parseable -``` - -## The Rest of the args - -The config object returned by nopt is given a special member called -`argv`, which is an object with the following fields: - -* `remain`: The remaining args after all the parsing has occurred. -* `original`: The args as they originally appeared. -* `cooked`: The args after flags and shorthands are expanded. - -## Slicing - -Node programs are called with more or less the exact argv as it appears -in C land, after the v8 and node-specific options have been plucked off. -As such, `argv[0]` is always `node` and `argv[1]` is always the -JavaScript program being run. - -That's usually not very useful to you. So they're sliced off by -default. If you want them, then you can pass in `0` as the last -argument, or any other number that you'd like to slice off the start of -the list. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/bin/nopt.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/bin/nopt.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/bin/nopt.js 2013-01-17 20:06:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/bin/nopt.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -#!/usr/bin/env node -var nopt = require("../lib/nopt") - , types = { num: Number - , bool: Boolean - , help: Boolean - , list: Array - , "num-list": [Number, Array] - , "str-list": [String, Array] - , "bool-list": [Boolean, Array] - , str: String - , clear: Boolean - , config: Boolean - , length: Number - } - , shorthands = { s: [ "--str", "astring" ] - , b: [ "--bool" ] - , nb: [ "--no-bool" ] - , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] - , "?": ["--help"] - , h: ["--help"] - , H: ["--help"] - , n: [ "--num", "125" ] - , c: ["--config"] - , l: ["--length"] - } - , parsed = nopt( types - , shorthands - , process.argv - , 2 ) - -console.log("parsed", parsed) - -if (parsed.help) { - console.log("") - console.log("nopt cli tester") - console.log("") - console.log("types") - console.log(Object.keys(types).map(function M (t) { - var type = types[t] - if (Array.isArray(type)) { - return [t, type.map(function (type) { return type.name })] - } - return [t, type && type.name] - }).reduce(function (s, i) { - s[i[0]] = i[1] - return s - }, {})) - console.log("") - console.log("shorthands") - console.log(shorthands) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/examples/my-program.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/examples/my-program.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/examples/my-program.js 2013-01-17 20:36:24.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/examples/my-program.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -#!/usr/bin/env node - -//process.env.DEBUG_NOPT = 1 - -// my-program.js -var nopt = require("../lib/nopt") - , Stream = require("stream").Stream - , path = require("path") - , knownOpts = { "foo" : [String, null] - , "bar" : [Stream, Number] - , "baz" : path - , "bloo" : [ "big", "medium", "small" ] - , "flag" : Boolean - , "pick" : Boolean - } - , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] - , "b7" : ["--bar", "7"] - , "m" : ["--bloo", "medium"] - , "p" : ["--pick"] - , "f" : ["--flag", "true"] - , "g" : ["--flag"] - , "s" : "--flag" - } - // everything is optional. - // knownOpts and shorthands default to {} - // arg list defaults to process.argv - // slice defaults to 2 - , parsed = nopt(knownOpts, shortHands, process.argv, 2) - -console.log("parsed =\n"+ require("util").inspect(parsed)) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/lib/nopt.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/lib/nopt.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/lib/nopt.js 2013-07-17 15:23:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/lib/nopt.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,612 +0,0 @@ -// info about each config option. - -var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG - ? function () { console.error.apply(console, arguments) } - : function () {} - -var url = require("url") - , path = require("path") - , Stream = require("stream").Stream - , abbrev = require("abbrev") - -module.exports = exports = nopt -exports.clean = clean - -exports.typeDefs = - { String : { type: String, validate: validateString } - , Boolean : { type: Boolean, validate: validateBoolean } - , url : { type: url, validate: validateUrl } - , Number : { type: Number, validate: validateNumber } - , path : { type: path, validate: validatePath } - , Stream : { type: Stream, validate: validateStream } - , Date : { type: Date, validate: validateDate } - } - -function nopt (types, shorthands, args, slice) { - args = args || process.argv - types = types || {} - shorthands = shorthands || {} - if (typeof slice !== "number") slice = 2 - - debug(types, shorthands, args, slice) - - args = args.slice(slice) - var data = {} - , key - , remain = [] - , cooked = args - , original = args.slice(0) - - parse(args, data, remain, types, shorthands) - // now data is full - clean(data, types, exports.typeDefs) - data.argv = {remain:remain,cooked:cooked,original:original} - Object.defineProperty(data.argv, 'toString', { value: function () { - return this.original.map(JSON.stringify).join(" ") - }, enumerable: false }) - return data -} - -function clean (data, types, typeDefs) { - typeDefs = typeDefs || exports.typeDefs - var remove = {} - , typeDefault = [false, true, null, String, Number, Array] - - Object.keys(data).forEach(function (k) { - if (k === "argv") return - var val = data[k] - , isArray = Array.isArray(val) - , type = types[k] - if (!isArray) val = [val] - if (!type) type = typeDefault - if (type === Array) type = typeDefault.concat(Array) - if (!Array.isArray(type)) type = [type] - - debug("val=%j", val) - debug("types=", type) - val = val.map(function (val) { - // if it's an unknown value, then parse false/true/null/numbers/dates - if (typeof val === "string") { - debug("string %j", val) - val = val.trim() - if ((val === "null" && ~type.indexOf(null)) - || (val === "true" && - (~type.indexOf(true) || ~type.indexOf(Boolean))) - || (val === "false" && - (~type.indexOf(false) || ~type.indexOf(Boolean)))) { - val = JSON.parse(val) - debug("jsonable %j", val) - } else if (~type.indexOf(Number) && !isNaN(val)) { - debug("convert to number", val) - val = +val - } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { - debug("convert to date", val) - val = new Date(val) - } - } - - if (!types.hasOwnProperty(k)) { - return val - } - - // allow `--no-blah` to set 'blah' to null if null is allowed - if (val === false && ~type.indexOf(null) && - !(~type.indexOf(false) || ~type.indexOf(Boolean))) { - val = null - } - - var d = {} - d[k] = val - debug("prevalidated val", d, val, types[k]) - if (!validate(d, k, val, types[k], typeDefs)) { - if (exports.invalidHandler) { - exports.invalidHandler(k, val, types[k], data) - } else if (exports.invalidHandler !== false) { - debug("invalid: "+k+"="+val, types[k]) - } - return remove - } - debug("validated val", d, val, types[k]) - return d[k] - }).filter(function (val) { return val !== remove }) - - if (!val.length) delete data[k] - else if (isArray) { - debug(isArray, data[k], val) - data[k] = val - } else data[k] = val[0] - - debug("k=%s val=%j", k, val, data[k]) - }) -} - -function validateString (data, k, val) { - data[k] = String(val) -} - -function validatePath (data, k, val) { - data[k] = path.resolve(String(val)) - return true -} - -function validateNumber (data, k, val) { - debug("validate Number %j %j %j", k, val, isNaN(val)) - if (isNaN(val)) return false - data[k] = +val -} - -function validateDate (data, k, val) { - debug("validate Date %j %j %j", k, val, Date.parse(val)) - var s = Date.parse(val) - if (isNaN(s)) return false - data[k] = new Date(val) -} - -function validateBoolean (data, k, val) { - if (val instanceof Boolean) val = val.valueOf() - else if (typeof val === "string") { - if (!isNaN(val)) val = !!(+val) - else if (val === "null" || val === "false") val = false - else val = true - } else val = !!val - data[k] = val -} - -function validateUrl (data, k, val) { - val = url.parse(String(val)) - if (!val.host) return false - data[k] = val.href -} - -function validateStream (data, k, val) { - if (!(val instanceof Stream)) return false - data[k] = val -} - -function validate (data, k, val, type, typeDefs) { - // arrays are lists of types. - if (Array.isArray(type)) { - for (var i = 0, l = type.length; i < l; i ++) { - if (type[i] === Array) continue - if (validate(data, k, val, type[i], typeDefs)) return true - } - delete data[k] - return false - } - - // an array of anything? - if (type === Array) return true - - // NaN is poisonous. Means that something is not allowed. - if (type !== type) { - debug("Poison NaN", k, val, type) - delete data[k] - return false - } - - // explicit list of values - if (val === type) { - debug("Explicitly allowed %j", val) - // if (isArray) (data[k] = data[k] || []).push(val) - // else data[k] = val - data[k] = val - return true - } - - // now go through the list of typeDefs, validate against each one. - var ok = false - , types = Object.keys(typeDefs) - for (var i = 0, l = types.length; i < l; i ++) { - debug("test type %j %j %j", k, val, types[i]) - var t = typeDefs[types[i]] - if (t && type === t.type) { - var d = {} - ok = false !== t.validate(d, k, val) - val = d[k] - if (ok) { - // if (isArray) (data[k] = data[k] || []).push(val) - // else data[k] = val - data[k] = val - break - } - } - } - debug("OK? %j (%j %j %j)", ok, k, val, types[i]) - - if (!ok) delete data[k] - return ok -} - -function parse (args, data, remain, types, shorthands) { - debug("parse", args, data, remain) - - var key = null - , abbrevs = abbrev(Object.keys(types)) - , shortAbbr = abbrev(Object.keys(shorthands)) - - for (var i = 0; i < args.length; i ++) { - var arg = args[i] - debug("arg", arg) - - if (arg.match(/^-{2,}$/)) { - // done with keys. - // the rest are args. - remain.push.apply(remain, args.slice(i + 1)) - args[i] = "--" - break - } - var hadEq = false - if (arg.charAt(0) === "-" && arg.length > 1) { - if (arg.indexOf("=") !== -1) { - hadEq = true - var v = arg.split("=") - arg = v.shift() - v = v.join("=") - args.splice.apply(args, [i, 1].concat([arg, v])) - } - - // see if it's a shorthand - // if so, splice and back up to re-parse it. - var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) - debug("arg=%j shRes=%j", arg, shRes) - if (shRes) { - debug(arg, shRes) - args.splice.apply(args, [i, 1].concat(shRes)) - if (arg !== shRes[0]) { - i -- - continue - } - } - arg = arg.replace(/^-+/, "") - var no = null - while (arg.toLowerCase().indexOf("no-") === 0) { - no = !no - arg = arg.substr(3) - } - - if (abbrevs[arg]) arg = abbrevs[arg] - - var isArray = types[arg] === Array || - Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1 - - // allow unknown things to be arrays if specified multiple times. - if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { - if (!Array.isArray(data[arg])) - data[arg] = [data[arg]] - isArray = true - } - - var val - , la = args[i + 1] - - var isBool = typeof no === 'boolean' || - types[arg] === Boolean || - Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 || - (typeof types[arg] === 'undefined' && !hadEq) || - (la === "false" && - (types[arg] === null || - Array.isArray(types[arg]) && ~types[arg].indexOf(null))) - - if (isBool) { - // just set and move along - val = !no - // however, also support --bool true or --bool false - if (la === "true" || la === "false") { - val = JSON.parse(la) - la = null - if (no) val = !val - i ++ - } - - // also support "foo":[Boolean, "bar"] and "--foo bar" - if (Array.isArray(types[arg]) && la) { - if (~types[arg].indexOf(la)) { - // an explicit type - val = la - i ++ - } else if ( la === "null" && ~types[arg].indexOf(null) ) { - // null allowed - val = null - i ++ - } else if ( !la.match(/^-{2,}[^-]/) && - !isNaN(la) && - ~types[arg].indexOf(Number) ) { - // number - val = +la - i ++ - } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) { - // string - val = la - i ++ - } - } - - if (isArray) (data[arg] = data[arg] || []).push(val) - else data[arg] = val - - continue - } - - if (la && la.match(/^-{2,}$/)) { - la = undefined - i -- - } - - val = la === undefined ? true : la - if (isArray) (data[arg] = data[arg] || []).push(val) - else data[arg] = val - - i ++ - continue - } - remain.push(arg) - } -} - -function resolveShort (arg, shorthands, shortAbbr, abbrevs) { - // handle single-char shorthands glommed together, like - // npm ls -glp, but only if there is one dash, and only if - // all of the chars are single-char shorthands, and it's - // not a match to some other abbrev. - arg = arg.replace(/^-+/, '') - - // if it's an exact known option, then don't go any further - if (abbrevs[arg] === arg) - return null - - // if it's an exact known shortopt, same deal - if (shorthands[arg]) { - // make it an array, if it's a list of words - if (shorthands[arg] && !Array.isArray(shorthands[arg])) - shorthands[arg] = shorthands[arg].split(/\s+/) - - return shorthands[arg] - } - - // first check to see if this arg is a set of single-char shorthands - var singles = shorthands.___singles - if (!singles) { - singles = Object.keys(shorthands).filter(function (s) { - return s.length === 1 - }).reduce(function (l,r) { - l[r] = true - return l - }, {}) - shorthands.___singles = singles - debug('shorthand singles', singles) - } - - var chrs = arg.split("").filter(function (c) { - return singles[c] - }) - - if (chrs.join("") === arg) return chrs.map(function (c) { - return shorthands[c] - }).reduce(function (l, r) { - return l.concat(r) - }, []) - - - // if it's an arg abbrev, and not a literal shorthand, then prefer the arg - if (abbrevs[arg] && !shorthands[arg]) - return null - - // if it's an abbr for a shorthand, then use that - if (shortAbbr[arg]) - arg = shortAbbr[arg] - - // make it an array, if it's a list of words - if (shorthands[arg] && !Array.isArray(shorthands[arg])) - shorthands[arg] = shorthands[arg].split(/\s+/) - - return shorthands[arg] -} - -if (module === require.main) { -var assert = require("assert") - , util = require("util") - - , shorthands = - { s : ["--loglevel", "silent"] - , d : ["--loglevel", "info"] - , dd : ["--loglevel", "verbose"] - , ddd : ["--loglevel", "silly"] - , noreg : ["--no-registry"] - , reg : ["--registry"] - , "no-reg" : ["--no-registry"] - , silent : ["--loglevel", "silent"] - , verbose : ["--loglevel", "verbose"] - , h : ["--usage"] - , H : ["--usage"] - , "?" : ["--usage"] - , help : ["--usage"] - , v : ["--version"] - , f : ["--force"] - , desc : ["--description"] - , "no-desc" : ["--no-description"] - , "local" : ["--no-global"] - , l : ["--long"] - , p : ["--parseable"] - , porcelain : ["--parseable"] - , g : ["--global"] - } - - , types = - { aoa: Array - , nullstream: [null, Stream] - , date: Date - , str: String - , browser : String - , cache : path - , color : ["always", Boolean] - , depth : Number - , description : Boolean - , dev : Boolean - , editor : path - , force : Boolean - , global : Boolean - , globalconfig : path - , group : [String, Number] - , gzipbin : String - , logfd : [Number, Stream] - , loglevel : ["silent","win","error","warn","info","verbose","silly"] - , long : Boolean - , "node-version" : [false, String] - , npaturl : url - , npat : Boolean - , "onload-script" : [false, String] - , outfd : [Number, Stream] - , parseable : Boolean - , pre: Boolean - , prefix: path - , proxy : url - , "rebuild-bundle" : Boolean - , registry : url - , searchopts : String - , searchexclude: [null, String] - , shell : path - , t: [Array, String] - , tag : String - , tar : String - , tmp : path - , "unsafe-perm" : Boolean - , usage : Boolean - , user : String - , username : String - , userconfig : path - , version : Boolean - , viewer: path - , _exit : Boolean - } - -; [["-v", {version:true}, []] - ,["---v", {version:true}, []] - ,["ls -s --no-reg connect -d", - {loglevel:"info",registry:null},["ls","connect"]] - ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] - ,["ls --registry blargle", {}, ["ls"]] - ,["--no-registry", {registry:null}, []] - ,["--no-color true", {color:false}, []] - ,["--no-color false", {color:true}, []] - ,["--no-color", {color:false}, []] - ,["--color false", {color:false}, []] - ,["--color --logfd 7", {logfd:7,color:true}, []] - ,["--color=true", {color:true}, []] - ,["--logfd=10", {logfd:10}, []] - ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]] - ,["--tmp=tmp -tar=gtar", - {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] - ,["--logfd x", {}, []] - ,["a -true -- -no-false", {true:true},["a","-no-false"]] - ,["a -no-false", {false:false},["a"]] - ,["a -no-no-true", {true:true}, ["a"]] - ,["a -no-no-no-false", {false:false}, ["a"]] - ,["---NO-no-No-no-no-no-nO-no-no"+ - "-No-no-no-no-no-no-no-no-no"+ - "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ - "-no-body-can-do-the-boogaloo-like-I-do" - ,{"body-can-do-the-boogaloo-like-I-do":false}, []] - ,["we are -no-strangers-to-love "+ - "--you-know=the-rules --and=so-do-i "+ - "---im-thinking-of=a-full-commitment "+ - "--no-you-would-get-this-from-any-other-guy "+ - "--no-gonna-give-you-up "+ - "-no-gonna-let-you-down=true "+ - "--no-no-gonna-run-around false "+ - "--desert-you=false "+ - "--make-you-cry false "+ - "--no-tell-a-lie "+ - "--no-no-and-hurt-you false" - ,{"strangers-to-love":false - ,"you-know":"the-rules" - ,"and":"so-do-i" - ,"you-would-get-this-from-any-other-guy":false - ,"gonna-give-you-up":false - ,"gonna-let-you-down":false - ,"gonna-run-around":false - ,"desert-you":false - ,"make-you-cry":false - ,"tell-a-lie":false - ,"and-hurt-you":false - },["we", "are"]] - ,["-t one -t two -t three" - ,{t: ["one", "two", "three"]} - ,[]] - ,["-t one -t null -t three four five null" - ,{t: ["one", "null", "three"]} - ,["four", "five", "null"]] - ,["-t foo" - ,{t:["foo"]} - ,[]] - ,["--no-t" - ,{t:["false"]} - ,[]] - ,["-no-no-t" - ,{t:["true"]} - ,[]] - ,["-aoa one -aoa null -aoa 100" - ,{aoa:["one", null, 100]} - ,[]] - ,["-str 100" - ,{str:"100"} - ,[]] - ,["--color always" - ,{color:"always"} - ,[]] - ,["--no-nullstream" - ,{nullstream:null} - ,[]] - ,["--nullstream false" - ,{nullstream:null} - ,[]] - ,["--notadate=2011-01-25" - ,{notadate: "2011-01-25"} - ,[]] - ,["--date 2011-01-25" - ,{date: new Date("2011-01-25")} - ,[]] - ,["-cl 1" - ,{config: true, length: 1} - ,[] - ,{config: Boolean, length: Number, clear: Boolean} - ,{c: "--config", l: "--length"}] - ,["--acount bla" - ,{"acount":true} - ,["bla"] - ,{account: Boolean, credentials: Boolean, options: String} - ,{a:"--account", c:"--credentials",o:"--options"}] - ,["--clear" - ,{clear:true} - ,[] - ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean} - ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}] - ,["--file -" - ,{"file":"-"} - ,[] - ,{file:String} - ,{}] - ,["--file -" - ,{"file":true} - ,["-"] - ,{file:Boolean} - ,{}] - ].forEach(function (test) { - var argv = test[0].split(/\s+/) - , opts = test[1] - , rem = test[2] - , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0) - , parsed = actual.argv - delete actual.argv - console.log(util.inspect(actual, false, 2, true), parsed.remain) - for (var i in opts) { - var e = JSON.stringify(opts[i]) - , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) - if (e && typeof e === "object") { - assert.deepEqual(e, a) - } else { - assert.equal(e, a) - } - } - assert.deepEqual(rem, parsed.remain) - }) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/LICENSE 2011-05-13 18:36:35.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -Copyright 2009, 2010, 2011 Isaac Z. Schlueter. -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/README.md 2010-10-04 18:52:26.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -# abbrev-js - -Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). - -Usage: - - var abbrev = require("abbrev"); - abbrev("foo", "fool", "folding", "flop"); - - // returns: - { fl: 'flop' - , flo: 'flop' - , flop: 'flop' - , fol: 'folding' - , fold: 'folding' - , foldi: 'folding' - , foldin: 'folding' - , folding: 'folding' - , foo: 'foo' - , fool: 'fool' - } - -This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/lib/abbrev.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/lib/abbrev.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/lib/abbrev.js 2013-01-09 00:02:26.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/lib/abbrev.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ - -module.exports = exports = abbrev.abbrev = abbrev - -abbrev.monkeyPatch = monkeyPatch - -function monkeyPatch () { - Object.defineProperty(Array.prototype, 'abbrev', { - value: function () { return abbrev(this) }, - enumerable: false, configurable: true, writable: true - }) - - Object.defineProperty(Object.prototype, 'abbrev', { - value: function () { return abbrev(Object.keys(this)) }, - enumerable: false, configurable: true, writable: true - }) -} - -function abbrev (list) { - if (arguments.length !== 1 || !Array.isArray(list)) { - list = Array.prototype.slice.call(arguments, 0) - } - for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { - args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) - } - - // sort them lexicographically, so that they're next to their nearest kin - args = args.sort(lexSort) - - // walk through each, seeing how much it has in common with the next and previous - var abbrevs = {} - , prev = "" - for (var i = 0, l = args.length ; i < l ; i ++) { - var current = args[i] - , next = args[i + 1] || "" - , nextMatches = true - , prevMatches = true - if (current === next) continue - for (var j = 0, cl = current.length ; j < cl ; j ++) { - var curChar = current.charAt(j) - nextMatches = nextMatches && curChar === next.charAt(j) - prevMatches = prevMatches && curChar === prev.charAt(j) - if (!nextMatches && !prevMatches) { - j ++ - break - } - } - prev = current - if (j === cl) { - abbrevs[current] = current - continue - } - for (var a = current.substr(0, j) ; j <= cl ; j ++) { - abbrevs[a] = current - a += current.charAt(j) - } - } - return abbrevs -} - -function lexSort (a, b) { - return a === b ? 0 : a > b ? 1 : -1 -} - - -// tests -if (module === require.main) { - -var assert = require("assert") -var util = require("util") - -console.log("running tests") -function test (list, expect) { - var actual = abbrev(list) - assert.deepEqual(actual, expect, - "abbrev("+util.inspect(list)+") === " + util.inspect(expect) + "\n"+ - "actual: "+util.inspect(actual)) - actual = abbrev.apply(exports, list) - assert.deepEqual(abbrev.apply(exports, list), expect, - "abbrev("+list.map(JSON.stringify).join(",")+") === " + util.inspect(expect) + "\n"+ - "actual: "+util.inspect(actual)) -} - -test([ "ruby", "ruby", "rules", "rules", "rules" ], -{ rub: 'ruby' -, ruby: 'ruby' -, rul: 'rules' -, rule: 'rules' -, rules: 'rules' -}) -test(["fool", "foom", "pool", "pope"], -{ fool: 'fool' -, foom: 'foom' -, poo: 'pool' -, pool: 'pool' -, pop: 'pope' -, pope: 'pope' -}) -test(["a", "ab", "abc", "abcd", "abcde", "acde"], -{ a: 'a' -, ab: 'ab' -, abc: 'abc' -, abcd: 'abcd' -, abcde: 'abcde' -, ac: 'acde' -, acd: 'acde' -, acde: 'acde' -}) - -console.log("pass") - -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/package.json 2013-10-25 01:43:04.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/node_modules/abbrev/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -{ - "name": "abbrev", - "version": "1.0.4", - "description": "Like ruby's abbrev module, but in js", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me" - }, - "main": "./lib/abbrev.js", - "scripts": { - "test": "node lib/abbrev.js" - }, - "repository": { - "type": "git", - "url": "http://github.com/isaacs/abbrev-js" - }, - "license": { - "type": "MIT", - "url": "https://github.com/isaacs/abbrev-js/raw/master/LICENSE" - }, - "readme": "# abbrev-js\n\nJust like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).\n\nUsage:\n\n var abbrev = require(\"abbrev\");\n abbrev(\"foo\", \"fool\", \"folding\", \"flop\");\n \n // returns:\n { fl: 'flop'\n , flo: 'flop'\n , flop: 'flop'\n , fol: 'folding'\n , fold: 'folding'\n , foldi: 'folding'\n , foldin: 'folding'\n , folding: 'folding'\n , foo: 'foo'\n , fool: 'fool'\n }\n\nThis is handy for command-line scripts, or other cases where you want to be able to accept shorthands.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/abbrev-js/issues" - }, - "_id": "abbrev@1.0.4", - "dist": { - "shasum": "a03327b0947faf6378a83416354738388e0561b2" - }, - "_from": "abbrev@1", - "_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/nopt/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/nopt/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -{ - "name": "nopt", - "version": "2.1.2", - "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "main": "lib/nopt.js", - "scripts": { - "test": "node lib/nopt.js" - }, - "repository": { - "type": "git", - "url": "http://github.com/isaacs/nopt" - }, - "bin": { - "nopt": "./bin/nopt.js" - }, - "license": { - "type": "MIT", - "url": "https://github.com/isaacs/nopt/raw/master/LICENSE" - }, - "dependencies": { - "abbrev": "1" - }, - "readme": "If you want to write an option parser, and have it be good, there are\ntwo ways to do it. The Right Way, and the Wrong Way.\n\nThe Wrong Way is to sit down and write an option parser. We've all done\nthat.\n\nThe Right Way is to write some complex configurable program with so many\noptions that you go half-insane just trying to manage them all, and put\nit off with duct-tape solutions until you see exactly to the core of the\nproblem, and finally snap and write an awesome option parser.\n\nIf you want to write an option parser, don't write an option parser.\nWrite a package manager, or a source control system, or a service\nrestarter, or an operating system. You probably won't end up with a\ngood one of those, but if you don't give up, and you are relentless and\ndiligent enough in your procrastination, you may just end up with a very\nnice option parser.\n\n## USAGE\n\n // my-program.js\n var nopt = require(\"nopt\")\n , Stream = require(\"stream\").Stream\n , path = require(\"path\")\n , knownOpts = { \"foo\" : [String, null]\n , \"bar\" : [Stream, Number]\n , \"baz\" : path\n , \"bloo\" : [ \"big\", \"medium\", \"small\" ]\n , \"flag\" : Boolean\n , \"pick\" : Boolean\n , \"many\" : [String, Array]\n }\n , shortHands = { \"foofoo\" : [\"--foo\", \"Mr. Foo\"]\n , \"b7\" : [\"--bar\", \"7\"]\n , \"m\" : [\"--bloo\", \"medium\"]\n , \"p\" : [\"--pick\"]\n , \"f\" : [\"--flag\"]\n }\n // everything is optional.\n // knownOpts and shorthands default to {}\n // arg list defaults to process.argv\n // slice defaults to 2\n , parsed = nopt(knownOpts, shortHands, process.argv, 2)\n console.log(parsed)\n\nThis would give you support for any of the following:\n\n```bash\n$ node my-program.js --foo \"blerp\" --no-flag\n{ \"foo\" : \"blerp\", \"flag\" : false }\n\n$ node my-program.js ---bar 7 --foo \"Mr. Hand\" --flag\n{ bar: 7, foo: \"Mr. Hand\", flag: true }\n\n$ node my-program.js --foo \"blerp\" -f -----p\n{ foo: \"blerp\", flag: true, pick: true }\n\n$ node my-program.js -fp --foofoo\n{ foo: \"Mr. Foo\", flag: true, pick: true }\n\n$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.\n{ foo: \"Mr. Foo\", argv: { remain: [\"-fp\"] } }\n\n$ node my-program.js --blatzk 1000 -fp # unknown opts are ok.\n{ blatzk: 1000, flag: true, pick: true }\n\n$ node my-program.js --blatzk true -fp # but they need a value\n{ blatzk: true, flag: true, pick: true }\n\n$ node my-program.js --no-blatzk -fp # unless they start with \"no-\"\n{ blatzk: false, flag: true, pick: true }\n\n$ node my-program.js --baz b/a/z # known paths are resolved.\n{ baz: \"/Users/isaacs/b/a/z\" }\n\n# if Array is one of the types, then it can take many\n# values, and will always be an array. The other types provided\n# specify what types are allowed in the list.\n\n$ node my-program.js --many 1 --many null --many foo\n{ many: [\"1\", \"null\", \"foo\"] }\n\n$ node my-program.js --many foo\n{ many: [\"foo\"] }\n```\n\nRead the tests at the bottom of `lib/nopt.js` for more examples of\nwhat this puppy can do.\n\n## Types\n\nThe following types are supported, and defined on `nopt.typeDefs`\n\n* String: A normal string. No parsing is done.\n* path: A file system path. Gets resolved against cwd if not absolute.\n* url: A url. If it doesn't parse, it isn't accepted.\n* Number: Must be numeric.\n* Date: Must parse as a date. If it does, and `Date` is one of the options,\n then it will return a Date object, not a string.\n* Boolean: Must be either `true` or `false`. If an option is a boolean,\n then it does not need a value, and its presence will imply `true` as\n the value. To negate boolean flags, do `--no-whatever` or `--whatever\n false`\n* NaN: Means that the option is strictly not allowed. Any value will\n fail.\n* Stream: An object matching the \"Stream\" class in node. Valuable\n for use when validating programmatically. (npm uses this to let you\n supply any WriteStream on the `outfd` and `logfd` config options.)\n* Array: If `Array` is specified as one of the types, then the value\n will be parsed as a list of options. This means that multiple values\n can be specified, and that the value will always be an array.\n\nIf a type is an array of values not on this list, then those are\nconsidered valid values. For instance, in the example above, the\n`--bloo` option can only be one of `\"big\"`, `\"medium\"`, or `\"small\"`,\nand any other value will be rejected.\n\nWhen parsing unknown fields, `\"true\"`, `\"false\"`, and `\"null\"` will be\ninterpreted as their JavaScript equivalents, and numeric values will be\ninterpreted as a number.\n\nYou can also mix types and values, or multiple types, in a list. For\ninstance `{ blah: [Number, null] }` would allow a value to be set to\neither a Number or null. When types are ordered, this implies a\npreference, and the first type that can be used to properly interpret\nthe value will be used.\n\nTo define a new type, add it to `nopt.typeDefs`. Each item in that\nhash is an object with a `type` member and a `validate` method. The\n`type` member is an object that matches what goes in the type list. The\n`validate` method is a function that gets called with `validate(data,\nkey, val)`. Validate methods should assign `data[key]` to the valid\nvalue of `val` if it can be handled properly, or return boolean\n`false` if it cannot.\n\nYou can also call `nopt.clean(data, types, typeDefs)` to clean up a\nconfig object and remove its invalid properties.\n\n## Error Handling\n\nBy default, nopt outputs a warning to standard error when invalid\noptions are found. You can change this behavior by assigning a method\nto `nopt.invalidHandler`. This method will be called with\nthe offending `nopt.invalidHandler(key, val, types)`.\n\nIf no `nopt.invalidHandler` is assigned, then it will console.error\nits whining. If it is assigned to boolean `false` then the warning is\nsuppressed.\n\n## Abbreviations\n\nYes, they are supported. If you define options like this:\n\n```javascript\n{ \"foolhardyelephants\" : Boolean\n, \"pileofmonkeys\" : Boolean }\n```\n\nThen this will work:\n\n```bash\nnode program.js --foolhar --pil\nnode program.js --no-f --pileofmon\n# etc.\n```\n\n## Shorthands\n\nShorthands are a hash of shorter option names to a snippet of args that\nthey expand to.\n\nIf multiple one-character shorthands are all combined, and the\ncombination does not unambiguously match any other option or shorthand,\nthen they will be broken up into their constituent parts. For example:\n\n```json\n{ \"s\" : [\"--loglevel\", \"silent\"]\n, \"g\" : \"--global\"\n, \"f\" : \"--force\"\n, \"p\" : \"--parseable\"\n, \"l\" : \"--long\"\n}\n```\n\n```bash\nnpm ls -sgflp\n# just like doing this:\nnpm ls --loglevel silent --global --force --long --parseable\n```\n\n## The Rest of the args\n\nThe config object returned by nopt is given a special member called\n`argv`, which is an object with the following fields:\n\n* `remain`: The remaining args after all the parsing has occurred.\n* `original`: The args as they originally appeared.\n* `cooked`: The args after flags and shorthands are expanded.\n\n## Slicing\n\nNode programs are called with more or less the exact argv as it appears\nin C land, after the v8 and node-specific options have been plucked off.\nAs such, `argv[0]` is always `node` and `argv[1]` is always the\nJavaScript program being run.\n\nThat's usually not very useful to you. So they're sliced off by\ndefault. If you want them, then you can pass in `0` as the last\nargument, or any other number that you'd like to slice off the start of\nthe list.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/nopt/issues" - }, - "_id": "nopt@2.1.2", - "dist": { - "shasum": "e8c9b162781a1b262015c3e7fbeef20c3abec912" - }, - "_from": "nopt@2", - "_resolved": "https://registry.npmjs.org/nopt/-/nopt-2.1.2.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/LICENSE 2012-07-24 01:28:42.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Copyright (c) Isaac Z. Schlueter ("Author") -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/README.md 2012-06-06 09:11:18.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,153 +0,0 @@ -# npmlog - -The logger util that npm uses. - -This logger is very basic. It does the logging for npm. It supports -custom levels and colored output. - -By default, logs are written to stderr. If you want to send log messages -to outputs other than streams, then you can change the `log.stream` -member, or you can just listen to the events that it emits, and do -whatever you want with them. - -# Basic Usage - -``` -var log = require('npmlog') - -// additional stuff ---------------------------+ -// message ----------+ | -// prefix ----+ | | -// level -+ | | | -// v v v v - log.info('fyi', 'I have a kitty cat: %j', myKittyCat) -``` - -## log.level - -* {String} - -The level to display logs at. Any logs at or above this level will be -displayed. The special level `silent` will prevent anything from being -displayed ever. - -## log.record - -* {Array} - -An array of all the log messages that have been entered. - -## log.maxRecordSize - -* {Number} - -The maximum number of records to keep. If log.record gets bigger than -10% over this value, then it is sliced down to 90% of this value. - -The reason for the 10% window is so that it doesn't have to resize a -large array on every log entry. - -## log.prefixStyle - -* {Object} - -A style object that specifies how prefixes are styled. (See below) - -## log.headingStyle - -* {Object} - -A style object that specifies how the heading is styled. (See below) - -## log.heading - -* {String} Default: "" - -If set, a heading that is printed at the start of every line. - -## log.stream - -* {Stream} Default: `process.stderr` - -The stream where output is written. - -## log.enableColor() - -Force colors to be used on all messages, regardless of the output -stream. - -## log.disableColor() - -Disable colors on all messages. - -## log.pause() - -Stop emitting messages to the stream, but do not drop them. - -## log.resume() - -Emit all buffered messages that were written while paused. - -## log.log(level, prefix, message, ...) - -* `level` {String} The level to emit the message at -* `prefix` {String} A string prefix. Set to "" to skip. -* `message...` Arguments to `util.format` - -Emit a log message at the specified level. - -## log\[level](prefix, message, ...) - -For example, - -* log.silly(prefix, message, ...) -* log.verbose(prefix, message, ...) -* log.info(prefix, message, ...) -* log.http(prefix, message, ...) -* log.warn(prefix, message, ...) -* log.error(prefix, message, ...) - -Like `log.log(level, prefix, message, ...)`. In this way, each level is -given a shorthand, so you can do `log.info(prefix, message)`. - -## log.addLevel(level, n, style, disp) - -* `level` {String} Level indicator -* `n` {Number} The numeric level -* `style` {Object} Object with fg, bg, inverse, etc. -* `disp` {String} Optional replacement for `level` in the output. - -Sets up a new level with a shorthand function and so forth. - -Note that if the number is `Infinity`, then setting the level to that -will cause all log messages to be suppressed. If the number is -`-Infinity`, then the only way to show it is to enable all log messages. - -# Events - -Events are all emitted with the message object. - -* `log` Emitted for all messages -* `log.` Emitted for all messages with the `` level. -* `` Messages with prefixes also emit their prefix as an event. - -# Style Objects - -Style objects can have the following fields: - -* `fg` {String} Color for the foreground text -* `bg` {String} Color for the background -* `bold`, `inverse`, `underline` {Boolean} Set the associated property -* `bell` {Boolean} Make a noise (This is pretty annoying, probably.) - -# Message Objects - -Every log event is emitted with a message object, and the `log.record` -list contains all of them that have been created. They have the -following fields: - -* `id` {Number} -* `level` {String} -* `prefix` {String} -* `message` {String} Result of `util.format()` -* `messageRaw` {Array} Arguments to `util.format()` diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/example.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/example.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/example.js 2012-06-06 08:28:06.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/example.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -var log = require('./log.js') - -log.heading = 'npm' - -console.error('log.level=silly') -log.level = 'silly' -log.silly('silly prefix', 'x = %j', {foo:{bar:'baz'}}) -log.verbose('verbose prefix', 'x = %j', {foo:{bar:'baz'}}) -log.info('info prefix', 'x = %j', {foo:{bar:'baz'}}) -log.http('http prefix', 'x = %j', {foo:{bar:'baz'}}) -log.warn('warn prefix', 'x = %j', {foo:{bar:'baz'}}) -log.error('error prefix', 'x = %j', {foo:{bar:'baz'}}) -log.silent('silent prefix', 'x = %j', {foo:{bar:'baz'}}) - -console.error('log.level=silent') -log.level = 'silent' -log.silly('silly prefix', 'x = %j', {foo:{bar:'baz'}}) -log.verbose('verbose prefix', 'x = %j', {foo:{bar:'baz'}}) -log.info('info prefix', 'x = %j', {foo:{bar:'baz'}}) -log.http('http prefix', 'x = %j', {foo:{bar:'baz'}}) -log.warn('warn prefix', 'x = %j', {foo:{bar:'baz'}}) -log.error('error prefix', 'x = %j', {foo:{bar:'baz'}}) -log.silent('silent prefix', 'x = %j', {foo:{bar:'baz'}}) - -console.error('log.level=info') -log.level = 'info' -log.silly('silly prefix', 'x = %j', {foo:{bar:'baz'}}) -log.verbose('verbose prefix', 'x = %j', {foo:{bar:'baz'}}) -log.info('info prefix', 'x = %j', {foo:{bar:'baz'}}) -log.http('http prefix', 'x = %j', {foo:{bar:'baz'}}) -log.warn('warn prefix', 'x = %j', {foo:{bar:'baz'}}) -log.error('error prefix', 'x = %j', {foo:{bar:'baz'}}) -log.silent('silent prefix', 'x = %j', {foo:{bar:'baz'}}) -log.error('404', 'This is a longer\n'+ - 'message, with some details\n'+ - 'and maybe a stack.\n'+ - new Error('a 404 error').stack) -log.addLevel('noise', 10000, {beep: true}) -log.noise(false, 'LOUD NOISES') diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/log.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/log.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/log.js 2013-07-11 06:15:43.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/log.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,154 +0,0 @@ -var EE = require('events').EventEmitter -var log = exports = module.exports = new EE -var util = require('util') - -var ansi = require('ansi') -log.cursor = ansi(process.stderr) -log.stream = process.stderr - -// by default, let ansi decide based on tty-ness. -var colorEnabled = undefined -log.enableColor = function () { - colorEnabled = true - this.cursor.enabled = true -} -log.disableColor = function () { - colorEnabled = false - this.cursor.enabled = false -} - -// default level -log.level = 'info' - -// temporarily stop emitting, but don't drop -log.pause = function () { - this._paused = true -} - -log.resume = function () { - if (!this._paused) return - this._paused = false - - var b = this._buffer - this._buffer = [] - b.forEach(function (m) { - this.emitLog(m) - }, this) -} - -log._buffer = [] - -var id = 0 -log.record = [] -log.maxRecordSize = 10000 -log.log = function (lvl, prefix, message) { - var l = this.levels[lvl] - if (l === undefined) { - return this.emit('error', new Error(util.format( - 'Undefined log level: %j', lvl))) - } - - var a = new Array(arguments.length - 2) - var stack = null - for (var i = 2; i < arguments.length; i ++) { - var arg = a[i-2] = arguments[i] - - // resolve stack traces to a plain string. - if (typeof arg === 'object' && arg && - (arg instanceof Error) && arg.stack) { - arg.stack = stack = arg.stack + '' - } - } - if (stack) a.unshift(stack + '\n') - message = util.format.apply(util, a) - - var m = { id: id++, - level: lvl, - prefix: String(prefix || ''), - message: message, - messageRaw: a } - - this.emit('log', m) - this.emit('log.' + lvl, m) - if (m.prefix) this.emit(m.prefix, m) - - this.record.push(m) - var mrs = this.maxRecordSize - var n = this.record.length - mrs - if (n > mrs / 10) { - var newSize = Math.floor(mrs * 0.9) - this.record = this.record.slice(-1 * newSize) - } - - this.emitLog(m) -}.bind(log) - -log.emitLog = function (m) { - if (this._paused) { - this._buffer.push(m) - return - } - var l = this.levels[m.level] - if (l === undefined) return - if (l < this.levels[this.level]) return - if (l > 0 && !isFinite(l)) return - - var style = log.style[m.level] - var disp = log.disp[m.level] || m.level - m.message.split(/\r?\n/).forEach(function (line) { - if (this.heading) { - this.write(this.heading, this.headingStyle) - this.write(' ') - } - this.write(disp, log.style[m.level]) - var p = m.prefix || '' - if (p) this.write(' ') - this.write(p, this.prefixStyle) - this.write(' ' + line + '\n') - }, this) -} - -log.write = function (msg, style) { - if (!this.cursor) return - if (this.stream !== this.cursor.stream) { - this.cursor = ansi(this.stream, { enabled: colorEnabled }) - } - - style = style || {} - if (style.fg) this.cursor.fg[style.fg]() - if (style.bg) this.cursor.bg[style.bg]() - if (style.bold) this.cursor.bold() - if (style.underline) this.cursor.underline() - if (style.inverse) this.cursor.inverse() - if (style.beep) this.cursor.beep() - this.cursor.write(msg).reset() -} - -log.addLevel = function (lvl, n, style, disp) { - if (!disp) disp = lvl - this.levels[lvl] = n - this.style[lvl] = style - if (!this[lvl]) this[lvl] = function () { - var a = new Array(arguments.length + 1) - a[0] = lvl - for (var i = 0; i < arguments.length; i ++) { - a[i + 1] = arguments[i] - } - return this.log.apply(this, a) - }.bind(this) - this.disp[lvl] = disp -} - -log.prefixStyle = { fg: 'magenta' } -log.headingStyle = { fg: 'white', bg: 'black' } - -log.style = {} -log.levels = {} -log.disp = {} -log.addLevel('silly', -Infinity, { inverse: true }, 'sill') -log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') -log.addLevel('info', 2000, { fg: 'green' }) -log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) -log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') -log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') -log.addLevel('silent', Infinity) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/.npmignore 2012-01-27 03:20:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -node_modules diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/README.md 2013-08-16 15:23:36.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,91 +0,0 @@ -ansi.js -========= -### Advanced ANSI formatting tool for Node.js - -`ansi.js` is a module for Node.js that provides an easy-to-use API for -writing ANSI escape codes to `Stream` instances. ANSI escape codes are used to do -fancy things in a terminal window, like render text in colors, delete characters, -lines, the entire window, or hide and show the cursor, and lots more! - -The code for the example in the screenshot above can be found in the -`examples/imgcat` directory. - -#### Features: - - * 256 color support for the terminal! - * Make a beep sound from your terminal! - * Works with *any* writable `Stream` instance. - * Allows you to move the cursor anywhere on the terminal window. - * Allows you to delete existing contents from the terminal window. - * Allows you to hide and show the cursor. - * Converts CSS color codes and RGB values into ANSI escape codes. - * Low-level; you are in control of when escape codes are used, it's not abstracted. - - -Installation ------------- - -Install with `npm`: - -``` bash -$ npm install ansi -``` - - -Example -------- - -``` js -var ansi = require('ansi') - , cursor = ansi(process.stdout) - -// You can chain your calls forever: -cursor - .red() // Set font color to red - .bg.grey() // Set background color to grey - .write('Hello World!') // Write 'Hello World!' to stdout - .bg.reset() // Reset the bgcolor before writing the trailing \n, - // to avoid Terminal glitches - .write('\n') // And a final \n to wrap things up - -// Rendering modes are persistent: -cursor.hex('#660000').bold().underline() - -// You can use the regular logging functions, text will be green -console.log('This is blood red, bold text') - -// To reset just the foreground color: -cursor.fg.reset() - -console.log('This will still be bold') - -// Clean up after yourself! -cursor.reset() -``` - - -License -------- - -(The MIT License) - -Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/color-spaces.pl tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/color-spaces.pl --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/color-spaces.pl 2013-06-18 05:54:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/color-spaces.pl 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -#!/usr/bin/perl -# Author: Todd Larason -# $XFree86: xc/programs/xterm/vttests/256colors2.pl,v 1.1 1999/07/11 08:49:54 dawes Exp $ - -print "256 color mode\n\n"; - -# display back ground colors - -for ($fgbg = 38; $fgbg <= 48; $fgbg +=10) { - -# first the system ones: -print "System colors:\n"; -for ($color = 0; $color < 8; $color++) { - print "\x1b[${fgbg};5;${color}m::"; -} -print "\x1b[0m\n"; -for ($color = 8; $color < 16; $color++) { - print "\x1b[${fgbg};5;${color}m::"; -} -print "\x1b[0m\n\n"; - -# now the color cube -print "Color cube, 6x6x6:\n"; -for ($green = 0; $green < 6; $green++) { - for ($red = 0; $red < 6; $red++) { - for ($blue = 0; $blue < 6; $blue++) { - $color = 16 + ($red * 36) + ($green * 6) + $blue; - print "\x1b[${fgbg};5;${color}m::"; - } - print "\x1b[0m "; - } - print "\n"; -} - -# now the grayscale ramp -print "Grayscale ramp:\n"; -for ($color = 232; $color < 256; $color++) { - print "\x1b[${fgbg};5;${color}m::"; -} -print "\x1b[0m\n\n"; - -} - -print "Examples for the 3-byte color mode\n\n"; - -for ($fgbg = 38; $fgbg <= 48; $fgbg +=10) { - -# now the color cube -print "Color cube\n"; -for ($green = 0; $green < 256; $green+=51) { - for ($red = 0; $red < 256; $red+=51) { - for ($blue = 0; $blue < 256; $blue+=51) { - print "\x1b[${fgbg};2;${red};${green};${blue}m::"; - } - print "\x1b[0m "; - } - print "\n"; -} - -# now the grayscale ramp -print "Grayscale ramp:\n"; -for ($gray = 8; $gray < 256; $gray+=10) { - print "\x1b[${fgbg};2;${gray};${gray};${gray}m::"; -} -print "\x1b[0m\n\n"; - -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/beep/index.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/beep/index.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/beep/index.js 2012-06-28 19:48:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/beep/index.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -#!/usr/bin/env node - -/** - * Invokes the terminal "beep" sound once per second on every exact second. - */ - -process.title = 'beep' - -var cursor = require('../../')(process.stdout) - -function beep () { - cursor.beep() - setTimeout(beep, 1000 - (new Date()).getMilliseconds()) -} - -setTimeout(beep, 1000 - (new Date()).getMilliseconds()) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/clear/index.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/clear/index.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/clear/index.js 2012-06-28 19:48:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/clear/index.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -#!/usr/bin/env node - -/** - * Like GNU ncurses "clear" command. - * https://github.com/mscdex/node-ncurses/blob/master/deps/ncurses/progs/clear.c - */ - -process.title = 'clear' - -function lf () { return '\n' } - -require('../../')(process.stdout) - .write(Array.apply(null, Array(process.stdout.getWindowSize()[1])).map(lf).join('')) - .eraseData(2) - .goto(1, 1) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/cursorPosition.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/cursorPosition.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/cursorPosition.js 2013-03-26 06:44:41.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/cursorPosition.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -#!/usr/bin/env node - -var tty = require('tty') -var cursor = require('../')(process.stdout) - -// listen for the queryPosition report on stdin -process.stdin.resume() -raw(true) - -process.stdin.once('data', function (b) { - var match = /\[(\d+)\;(\d+)R$/.exec(b.toString()) - if (match) { - var xy = match.slice(1, 3).reverse().map(Number) - console.error(xy) - } - - // cleanup and close stdin - raw(false) - process.stdin.pause() -}) - - -// send the query position request code to stdout -cursor.queryPosition() - -function raw (mode) { - if (process.stdin.setRawMode) { - process.stdin.setRawMode(mode) - } else { - tty.setRawMode(mode) - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/progress/index.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/progress/index.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/progress/index.js 2012-06-28 19:48:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/progress/index.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,87 +0,0 @@ -#!/usr/bin/env node - -var assert = require('assert') - , ansi = require('../../') - -function Progress (stream, width) { - this.cursor = ansi(stream) - this.delta = this.cursor.newlines - this.width = width | 0 || 10 - this.open = '[' - this.close = ']' - this.complete = '█' - this.incomplete = '_' - - // initial render - this.progress = 0 -} - -Object.defineProperty(Progress.prototype, 'progress', { - get: get - , set: set - , configurable: true - , enumerable: true -}) - -function get () { - return this._progress -} - -function set (v) { - this._progress = Math.max(0, Math.min(v, 100)) - - var w = this.width - this.complete.length - this.incomplete.length - , n = w * (this._progress / 100) | 0 - , i = w - n - , com = c(this.complete, n) - , inc = c(this.incomplete, i) - , delta = this.cursor.newlines - this.delta - - assert.equal(com.length + inc.length, w) - - if (delta > 0) { - this.cursor.up(delta) - this.delta = this.cursor.newlines - } - - this.cursor - .horizontalAbsolute(0) - .eraseLine(2) - .fg.white() - .write(this.open) - .fg.grey() - .bold() - .write(com) - .resetBold() - .write(inc) - .fg.white() - .write(this.close) - .fg.reset() - .write('\n') -} - -function c (char, length) { - return Array.apply(null, Array(length)).map(function () { - return char - }).join('') -} - - - - -// Usage -var width = parseInt(process.argv[2], 10) || process.stdout.getWindowSize()[0] / 2 - , p = new Progress(process.stdout, width) - -;(function tick () { - p.progress += Math.random() * 5 - p.cursor - .eraseLine(2) - .write('Progress: ') - .bold().write(p.progress.toFixed(2)) - .write('%') - .resetBold() - .write('\n') - if (p.progress < 100) - setTimeout(tick, 100) -})() diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/starwars.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/starwars.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/starwars.js 2012-06-28 19:51:33.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/examples/starwars.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ -#!/usr/bin/env node - -/** - * A little script to play the ASCII Star Wars, but with a hidden cursor, - * since over `telnet(1)` the cursor remains visible which is annoying. - */ - -process.title = 'starwars' - -var net = require('net') - , cursor = require('../')(process.stdout) - , color = process.argv[2] - -// enable "raw mode" so that keystrokes aren't visible -process.stdin.resume() -if (process.stdin.setRawMode) { - process.stdin.setRawMode(true) -} else { - require('tty').setRawMode(true) -} - -// connect to the ASCII Star Wars server -var socket = net.connect(23, 'towel.blinkenlights.nl') - -socket.on('connect', function () { - if (color in cursor.fg) { - cursor.fg[color]() - } - cursor.hide() - socket.pipe(process.stdout) -}) - -process.stdin.on('data', function (data) { - if (data.toString() === '\u0003') { - // Ctrl+C; a.k.a SIGINT - socket.destroy() - process.stdin.pause() - } -}) - -process.on('exit', function () { - cursor - .show() - .fg.reset() - .write('\n') -}) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/lib/ansi.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/lib/ansi.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/lib/ansi.js 2013-08-16 15:23:36.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/npmlog/node_modules/ansi/lib/ansi.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,405 +0,0 @@ - -/** - * References: - * - * - http://en.wikipedia.org/wiki/ANSI_escape_code - * - http://www.termsys.demon.co.uk/vtansi.htm - * - */ - -/** - * Module dependencies. - */ - -var emitNewlineEvents = require('./newlines') - , prefix = '\x1b[' // For all escape codes - , suffix = 'm' // Only for color codes - -/** - * The ANSI escape sequences. - */ - -var codes = { - up: 'A' - , down: 'B' - , forward: 'C' - , back: 'D' - , nextLine: 'E' - , previousLine: 'F' - , horizontalAbsolute: 'G' - , eraseData: 'J' - , eraseLine: 'K' - , scrollUp: 'S' - , scrollDown: 'T' - , savePosition: 's' - , restorePosition: 'u' - , queryPosition: '6n' - , hide: '?25l' - , show: '?25h' -} - -/** - * Rendering ANSI codes. - */ - -var styles = { - bold: 1 - , italic: 3 - , underline: 4 - , inverse: 7 -} - -/** - * The negating ANSI code for the rendering modes. - */ - -var reset = { - bold: 22 - , italic: 23 - , underline: 24 - , inverse: 27 -} - -/** - * The standard, styleable ANSI colors. - */ - -var colors = { - white: 37 - , black: 30 - , blue: 34 - , cyan: 36 - , green: 32 - , magenta: 35 - , red: 31 - , yellow: 33 - , grey: 90 - , brightBlack: 90 - , brightRed: 91 - , brightGreen: 92 - , brightYellow: 93 - , brightBlue: 94 - , brightMagenta: 95 - , brightCyan: 96 - , brightWhite: 97 -} - - -/** - * Creates a Cursor instance based off the given `writable stream` instance. - */ - -function ansi (stream, options) { - if (stream._ansicursor) { - return stream._ansicursor - } else { - return stream._ansicursor = new Cursor(stream, options) - } -} -module.exports = exports = ansi - -/** - * The `Cursor` class. - */ - -function Cursor (stream, options) { - if (!(this instanceof Cursor)) { - return new Cursor(stream, options) - } - if (typeof stream != 'object' || typeof stream.write != 'function') { - throw new Error('a valid Stream instance must be passed in') - } - - // the stream to use - this.stream = stream - - // when 'enabled' is false then all the functions are no-ops except for write() - this.enabled = options && options.enabled - if (typeof this.enabled === 'undefined') { - this.enabled = stream.isTTY - } - this.enabled = !!this.enabled - - // then `buffering` is true, then `write()` calls are buffered in - // memory until `flush()` is invoked - this.buffering = !!(options && options.buffering) - this._buffer = [] - - // controls the foreground and background colors - this.fg = this.foreground = new Colorer(this, 0) - this.bg = this.background = new Colorer(this, 10) - - // defaults - this.Bold = false - this.Italic = false - this.Underline = false - this.Inverse = false - - // keep track of the number of "newlines" that get encountered - this.newlines = 0 - emitNewlineEvents(stream) - stream.on('newline', function () { - this.newlines++ - }.bind(this)) -} -exports.Cursor = Cursor - -/** - * Helper function that calls `write()` on the underlying Stream. - * Returns `this` instead of the write() return value to keep - * the chaining going. - */ - -Cursor.prototype.write = function (data) { - if (this.buffering) { - this._buffer.push(arguments) - } else { - this.stream.write.apply(this.stream, arguments) - } - return this -} - -/** - * Buffer `write()` calls into memory. - * - * @api public - */ - -Cursor.prototype.buffer = function () { - this.buffering = true - return this -} - -/** - * Write out the in-memory buffer. - * - * @api public - */ - -Cursor.prototype.flush = function () { - this.buffering = false - var str = this._buffer.map(function (args) { - if (args.length != 1) throw new Error('unexpected args length! ' + args.length); - return args[0]; - }).join(''); - this._buffer.splice(0); // empty - this.write(str); - return this -} - - -/** - * The `Colorer` class manages both the background and foreground colors. - */ - -function Colorer (cursor, base) { - this.current = null - this.cursor = cursor - this.base = base -} -exports.Colorer = Colorer - -/** - * Write an ANSI color code, ensuring that the same code doesn't get rewritten. - */ - -Colorer.prototype._setColorCode = function setColorCode (code) { - var c = String(code) - if (this.current === c) return - this.cursor.enabled && this.cursor.write(prefix + c + suffix) - this.current = c - return this -} - - -/** - * Set up the positional ANSI codes. - */ - -Object.keys(codes).forEach(function (name) { - var code = String(codes[name]) - Cursor.prototype[name] = function () { - var c = code - if (arguments.length > 0) { - c = toArray(arguments).map(Math.round).join(';') + code - } - this.enabled && this.write(prefix + c) - return this - } -}) - -/** - * Set up the functions for the rendering ANSI codes. - */ - -Object.keys(styles).forEach(function (style) { - var name = style[0].toUpperCase() + style.substring(1) - , c = styles[style] - , r = reset[style] - - Cursor.prototype[style] = function () { - if (this[name]) return - this.enabled && this.write(prefix + c + suffix) - this[name] = true - return this - } - - Cursor.prototype['reset' + name] = function () { - if (!this[name]) return - this.enabled && this.write(prefix + r + suffix) - this[name] = false - return this - } -}) - -/** - * Setup the functions for the standard colors. - */ - -Object.keys(colors).forEach(function (color) { - var code = colors[color] - - Colorer.prototype[color] = function () { - this._setColorCode(this.base + code) - return this.cursor - } - - Cursor.prototype[color] = function () { - return this.foreground[color]() - } -}) - -/** - * Makes a beep sound! - */ - -Cursor.prototype.beep = function () { - this.enabled && this.write('\x07') - return this -} - -/** - * Moves cursor to specific position - */ - -Cursor.prototype.goto = function (x, y) { - x = x | 0 - y = y | 0 - this.enabled && this.write(prefix + y + ';' + x + 'H') - return this -} - -/** - * Resets the color. - */ - -Colorer.prototype.reset = function () { - this._setColorCode(this.base + 39) - return this.cursor -} - -/** - * Resets all ANSI formatting on the stream. - */ - -Cursor.prototype.reset = function () { - this.enabled && this.write(prefix + '0' + suffix) - this.Bold = false - this.Italic = false - this.Underline = false - this.Inverse = false - this.foreground.current = null - this.background.current = null - return this -} - -/** - * Sets the foreground color with the given RGB values. - * The closest match out of the 216 colors is picked. - */ - -Colorer.prototype.rgb = function (r, g, b) { - var base = this.base + 38 - , code = rgb(r, g, b) - this._setColorCode(base + ';5;' + code) - return this.cursor -} - -/** - * Same as `cursor.fg.rgb(r, g, b)`. - */ - -Cursor.prototype.rgb = function (r, g, b) { - return this.foreground.rgb(r, g, b) -} - -/** - * Accepts CSS color codes for use with ANSI escape codes. - * For example: `#FF000` would be bright red. - */ - -Colorer.prototype.hex = function (color) { - return this.rgb.apply(this, hex(color)) -} - -/** - * Same as `cursor.fg.hex(color)`. - */ - -Cursor.prototype.hex = function (color) { - return this.foreground.hex(color) -} - - -// UTIL FUNCTIONS // - -/** - * Translates a 255 RGB value to a 0-5 ANSI RGV value, - * then returns the single ANSI color code to use. - */ - -function rgb (r, g, b) { - var red = r / 255 * 5 - , green = g / 255 * 5 - , blue = b / 255 * 5 - return rgb5(red, green, blue) -} - -/** - * Turns rgb 0-5 values into a single ANSI color code to use. - */ - -function rgb5 (r, g, b) { - var red = Math.round(r) - , green = Math.round(g) - , blue = Math.round(b) - return 16 + (red*36) + (green*6) + blue -} - -/** - * Accepts a hex CSS color code string (# is optional) and - * translates it into an Array of 3 RGB 0-255 values, which - * can then be used with rgb(). - */ - -function hex (color) { - var c = color[0] === '#' ? color.substring(1) : color - , r = c.substring(0, 2) - , g = c.substring(2, 4) - , b = c.substring(4, 6) - return [parseInt(r, 16), parseInt(g, 16), parseInt(b, 16)] -} - -/** - * Turns an array-like object into a real array. - */ - -function toArray (a) { - var i = 0 - , l = a.length - , rtn = [] - for (; i 0) { - var len = data.length - , i = 0 - // now try to calculate any deltas - if (typeof data == 'string') { - for (; i` Emitted for all messages with the `` level.\n* `` Messages with prefixes also emit their prefix as an event.\n\n# Style Objects\n\nStyle objects can have the following fields:\n\n* `fg` {String} Color for the foreground text\n* `bg` {String} Color for the background\n* `bold`, `inverse`, `underline` {Boolean} Set the associated property\n* `bell` {Boolean} Make a noise (This is pretty annoying, probably.)\n\n# Message Objects\n\nEvery log event is emitted with a message object, and the `log.record`\nlist contains all of them that have been created. They have the\nfollowing fields:\n\n* `id` {Number}\n* `level` {String}\n* `prefix` {String}\n* `message` {String} Result of `util.format()`\n* `messageRaw` {Array} Arguments to `util.format()`\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/npmlog/issues" - }, - "_id": "npmlog@0.0.6", - "dist": { - "shasum": "c1f5d043bdd224e9e2028343fac840ba287eac57" - }, - "_from": "npmlog@0", - "_resolved": "https://registry.npmjs.org/npmlog/-/npmlog-0.0.6.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/LICENSE 2012-06-18 00:08:22.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -Copyright (c) Isaac Z. Schlueter -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS -``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/README.md 2012-06-18 00:50:18.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,63 +0,0 @@ -# osenv - -Look up environment settings specific to different operating systems. - -## Usage - -```javascript -var osenv = require('osenv') -var path = osenv.path() -var user = osenv.user() -// etc. - -// Some things are not reliably in the env, and have a fallback command: -var h = osenv.hostname(function (er, hostname) { - h = hostname -}) -// This will still cause it to be memoized, so calling osenv.hostname() -// is now an immediate operation. - -// You can always send a cb, which will get called in the nextTick -// if it's been memoized, or wait for the fallback data if it wasn't -// found in the environment. -osenv.hostname(function (er, hostname) { - if (er) console.error('error looking up hostname') - else console.log('this machine calls itself %s', hostname) -}) -``` - -## osenv.hostname() - -The machine name. Calls `hostname` if not found. - -## osenv.user() - -The currently logged-in user. Calls `whoami` if not found. - -## osenv.prompt() - -Either PS1 on unix, or PROMPT on Windows. - -## osenv.tmpdir() - -The place where temporary files should be created. - -## osenv.home() - -No place like it. - -## osenv.path() - -An array of the places that the operating system will search for -executables. - -## osenv.editor() - -Return the executable name of the editor program. This uses the EDITOR -and VISUAL environment variables, and falls back to `vi` on Unix, or -`notepad.exe` on Windows. - -## osenv.shell() - -The SHELL on Unix, which Windows calls the ComSpec. Defaults to 'bash' -or 'cmd'. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/osenv.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/osenv.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/osenv.js 2012-06-18 00:46:16.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/osenv.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,80 +0,0 @@ -var isWindows = process.platform === 'win32' -var windir = isWindows ? process.env.windir || 'C:\\Windows' : null -var path = require('path') -var exec = require('child_process').exec - -// looking up envs is a bit costly. -// Also, sometimes we want to have a fallback -// Pass in a callback to wait for the fallback on failures -// After the first lookup, always returns the same thing. -function memo (key, lookup, fallback) { - var fell = false - var falling = false - exports[key] = function (cb) { - var val = lookup() - if (!val && !fell && !falling && fallback) { - fell = true - falling = true - exec(fallback, function (er, output, stderr) { - falling = false - if (er) return // oh well, we tried - val = output.trim() - }) - } - exports[key] = function (cb) { - if (cb) process.nextTick(cb.bind(null, null, val)) - return val - } - if (cb && !falling) process.nextTick(cb.bind(null, null, val)) - return val - } -} - -memo('user', function () { - return ( isWindows - ? process.env.USERDOMAIN + '\\' + process.env.USERNAME - : process.env.USER - ) -}, 'whoami') - -memo('prompt', function () { - return isWindows ? process.env.PROMPT : process.env.PS1 -}) - -memo('hostname', function () { - return isWindows ? process.env.COMPUTERNAME : process.env.HOSTNAME -}, 'hostname') - -memo('tmpdir', function () { - var t = isWindows ? 'temp' : 'tmp' - return process.env.TMPDIR || - process.env.TMP || - process.env.TEMP || - ( exports.home() ? path.resolve(exports.home(), t) - : isWindows ? path.resolve(windir, t) - : '/tmp' - ) -}) - -memo('home', function () { - return ( isWindows ? process.env.USERPROFILE - : process.env.HOME - ) -}) - -memo('path', function () { - return (process.env.PATH || - process.env.Path || - process.env.path).split(isWindows ? ';' : ':') -}) - -memo('editor', function () { - return process.env.EDITOR || - process.env.VISUAL || - (isWindows ? 'notepad.exe' : 'vi') -}) - -memo('shell', function () { - return isWindows ? process.env.ComSpec || 'cmd' - : process.env.SHELL || 'bash' -}) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/osenv/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/osenv/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ -{ - "name": "osenv", - "version": "0.0.3", - "main": "osenv.js", - "directories": { - "test": "test" - }, - "dependencies": {}, - "devDependencies": { - "tap": "~0.2.5" - }, - "scripts": { - "test": "tap test/*.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/isaacs/osenv" - }, - "keywords": [ - "environment", - "variable", - "home", - "tmpdir", - "path", - "prompt", - "ps1" - ], - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "license": "BSD", - "description": "Look up environment settings specific to different operating systems", - "readme": "# osenv\n\nLook up environment settings specific to different operating systems.\n\n## Usage\n\n```javascript\nvar osenv = require('osenv')\nvar path = osenv.path()\nvar user = osenv.user()\n// etc.\n\n// Some things are not reliably in the env, and have a fallback command:\nvar h = osenv.hostname(function (er, hostname) {\n h = hostname\n})\n// This will still cause it to be memoized, so calling osenv.hostname()\n// is now an immediate operation.\n\n// You can always send a cb, which will get called in the nextTick\n// if it's been memoized, or wait for the fallback data if it wasn't\n// found in the environment.\nosenv.hostname(function (er, hostname) {\n if (er) console.error('error looking up hostname')\n else console.log('this machine calls itself %s', hostname)\n})\n```\n\n## osenv.hostname()\n\nThe machine name. Calls `hostname` if not found.\n\n## osenv.user()\n\nThe currently logged-in user. Calls `whoami` if not found.\n\n## osenv.prompt()\n\nEither PS1 on unix, or PROMPT on Windows.\n\n## osenv.tmpdir()\n\nThe place where temporary files should be created.\n\n## osenv.home()\n\nNo place like it.\n\n## osenv.path()\n\nAn array of the places that the operating system will search for\nexecutables.\n\n## osenv.editor() \n\nReturn the executable name of the editor program. This uses the EDITOR\nand VISUAL environment variables, and falls back to `vi` on Unix, or\n`notepad.exe` on Windows.\n\n## osenv.shell()\n\nThe SHELL on Unix, which Windows calls the ComSpec. Defaults to 'bash'\nor 'cmd'.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/osenv/issues" - }, - "_id": "osenv@0.0.3", - "dist": { - "shasum": "9a9d0a6e08288e9f104fadb06094d308a37aef96" - }, - "_from": "osenv@0", - "_resolved": "https://registry.npmjs.org/osenv/-/osenv-0.0.3.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/AUTHORS tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/AUTHORS --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/AUTHORS 2012-01-30 02:14:26.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/AUTHORS 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -# Authors sorted by whether or not they're me. -Isaac Z. Schlueter (http://blog.izs.me) -Wayne Larsen (http://github.com/wvl) -ritch -Marcel Laverdet -Yosef Dinerstein diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/LICENSE 2011-05-30 17:48:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -Copyright 2009, 2010, 2011 Isaac Z. Schlueter. -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/README.md 2013-06-21 14:49:40.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -A `rm -rf` for node. - -Install with `npm install rimraf`, or just drop rimraf.js somewhere. - -## API - -`rimraf(f, callback)` - -The callback will be called with an error if there is one. Certain -errors are handled for you: - -* `EBUSY` - rimraf will back off a maximum of opts.maxBusyTries times - before giving up. -* `EMFILE` - If too many file descriptors get opened, rimraf will - patiently wait until more become available. - - -## rimraf.sync - -It can remove stuff synchronously, too. But that's not so good. Use -the async API. It's better. - -## CLI - -If installed with `npm install rimraf -g` it can be used as a global -command `rimraf ` which is useful for cross platform support. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/bin.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/bin.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/bin.js 2013-06-21 14:49:16.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/bin.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ -#!/usr/bin/env node - -var rimraf = require('./') - -var help = false -var dashdash = false -var args = process.argv.slice(2).filter(function(arg) { - if (dashdash) - return !!arg - else if (arg === '--') - dashdash = true - else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) - help = true - else - return !!arg -}); - -if (help || args.length === 0) { - // If they didn't ask for help, then this is not a "success" - var log = help ? console.log : console.error - log('Usage: rimraf ') - log('') - log(' Deletes all files and folders at "path" recursively.') - log('') - log('Options:') - log('') - log(' -h, --help Display this usage info') - process.exit(help ? 0 : 1) -} else { - args.forEach(function(arg) { - rimraf.sync(arg) - }) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,65 +0,0 @@ -{ - "name": "rimraf", - "version": "2.2.2", - "main": "rimraf.js", - "description": "A deep deletion module for node (like `rm -rf`)", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "license": { - "type": "MIT", - "url": "https://github.com/isaacs/rimraf/raw/master/LICENSE" - }, - "optionalDependencies": { - "graceful-fs": "~2" - }, - "repository": { - "type": "git", - "url": "git://github.com/isaacs/rimraf.git" - }, - "scripts": { - "test": "cd test && bash run.sh" - }, - "bin": { - "rimraf": "./bin.js" - }, - "contributors": [ - { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me" - }, - { - "name": "Wayne Larsen", - "email": "wayne@larsen.st", - "url": "http://github.com/wvl" - }, - { - "name": "ritch", - "email": "skawful@gmail.com" - }, - { - "name": "Marcel Laverdet" - }, - { - "name": "Yosef Dinerstein", - "email": "yosefd@microsoft.com" - } - ], - "readme": "A `rm -rf` for node.\n\nInstall with `npm install rimraf`, or just drop rimraf.js somewhere.\n\n## API\n\n`rimraf(f, callback)`\n\nThe callback will be called with an error if there is one. Certain\nerrors are handled for you:\n\n* `EBUSY` - rimraf will back off a maximum of opts.maxBusyTries times\n before giving up.\n* `EMFILE` - If too many file descriptors get opened, rimraf will\n patiently wait until more become available.\n\n\n## rimraf.sync\n\nIt can remove stuff synchronously, too. But that's not so good. Use\nthe async API. It's better.\n\n## CLI\n\nIf installed with `npm install rimraf -g` it can be used as a global\ncommand `rimraf ` which is useful for cross platform support.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/rimraf/issues" - }, - "dependencies": { - "graceful-fs": "~2" - }, - "_id": "rimraf@2.2.2", - "dist": { - "shasum": "c0632a207295c3f4a4eea26def5359224dd41e08" - }, - "_from": "rimraf@2", - "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.2.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/rimraf.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/rimraf.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/rimraf/rimraf.js 2013-07-14 18:41:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/rimraf/rimraf.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,184 +0,0 @@ -module.exports = rimraf -rimraf.sync = rimrafSync - -var path = require("path") - , fs - -try { - // optional dependency - fs = require("graceful-fs") -} catch (er) { - fs = require("fs") -} - -// for EMFILE handling -var timeout = 0 -exports.EMFILE_MAX = 1000 -exports.BUSYTRIES_MAX = 3 - -var isWindows = (process.platform === "win32") - -function rimraf (p, cb) { - if (!cb) throw new Error("No callback passed to rimraf()") - - var busyTries = 0 - rimraf_(p, function CB (er) { - if (er) { - if (er.code === "EBUSY" && busyTries < exports.BUSYTRIES_MAX) { - busyTries ++ - var time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(function () { - rimraf_(p, CB) - }, time) - } - - // this one won't happen if graceful-fs is used. - if (er.code === "EMFILE" && timeout < exports.EMFILE_MAX) { - return setTimeout(function () { - rimraf_(p, CB) - }, timeout ++) - } - - // already gone - if (er.code === "ENOENT") er = null - } - - timeout = 0 - cb(er) - }) -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, cb) { - fs.unlink(p, function (er) { - if (er) { - if (er.code === "ENOENT") - return cb() - if (er.code === "EPERM") - return (isWindows) ? fixWinEPERM(p, er, cb) : rmdir(p, er, cb) - if (er.code === "EISDIR") - return rmdir(p, er, cb) - } - return cb(er) - }) -} - -function fixWinEPERM (p, er, cb) { - fs.chmod(p, 666, function (er2) { - if (er2) - cb(er2.code === "ENOENT" ? null : er) - else - fs.stat(p, function(er3, stats) { - if (er3) - cb(er3.code === "ENOENT" ? null : er) - else if (stats.isDirectory()) - rmdir(p, er, cb) - else - fs.unlink(p, cb) - }) - }) -} - -function fixWinEPERMSync (p, er, cb) { - try { - fs.chmodSync(p, 666) - } catch (er2) { - if (er2.code !== "ENOENT") - throw er - } - - try { - var stats = fs.statSync(p) - } catch (er3) { - if (er3 !== "ENOENT") - throw er - } - - if (stats.isDirectory()) - rmdirSync(p, er) - else - fs.unlinkSync(p) -} - -function rmdir (p, originalEr, cb) { - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - fs.rmdir(p, function (er) { - if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST")) - rmkids(p, cb) - else if (er && er.code === "ENOTDIR") - cb(originalEr) - else - cb(er) - }) -} - -function rmkids(p, cb) { - fs.readdir(p, function (er, files) { - if (er) - return cb(er) - var n = files.length - if (n === 0) - return fs.rmdir(p, cb) - var errState - files.forEach(function (f) { - rimraf(path.join(p, f), function (er) { - if (errState) - return - if (er) - return cb(errState = er) - if (--n === 0) - fs.rmdir(p, cb) - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p) { - try { - fs.unlinkSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "EPERM") - return isWindows ? fixWinEPERMSync(p, er) : rmdirSync(p, er) - if (er.code !== "EISDIR") - throw er - rmdirSync(p, er) - } -} - -function rmdirSync (p, originalEr) { - try { - fs.rmdirSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "ENOTDIR") - throw originalEr - if (er.code === "ENOTEMPTY" || er.code === "EEXIST") - rmkidsSync(p) - } -} - -function rmkidsSync (p) { - fs.readdirSync(p).forEach(function (f) { - rimrafSync(path.join(p, f)) - }) - fs.rmdirSync(p) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/.npmignore tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/.npmignore --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/.npmignore 2013-06-20 15:22:44.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -# nada diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/LICENSE 2013-06-15 03:28:37.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Copyright (c) Isaac Z. Schlueter ("Author") -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/Makefile tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/Makefile --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/Makefile 2013-06-20 15:30:55.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/Makefile 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -files = semver.browser.js \ - semver.min.js \ - semver.browser.js.gz \ - semver.min.js.gz - -all: $(files) - -clean: - rm $(files) - -semver.browser.js: head.js semver.js foot.js - ( cat head.js; \ - cat semver.js | \ - egrep -v '^ *\/\* nomin \*\/' | \ - perl -pi -e 's/debug\([^\)]+\)//g'; \ - cat foot.js ) > semver.browser.js - -semver.min.js: semver.browser.js - uglifyjs -m semver.min.js - -%.gz: % - gzip --stdout -9 <$< >$@ - -.PHONY: all clean diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/README.md 2013-07-30 01:45:57.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,123 +0,0 @@ -semver(1) -- The semantic versioner for npm -=========================================== - -## Usage - - $ npm install semver - - semver.valid('1.2.3') // '1.2.3' - semver.valid('a.b.c') // null - semver.clean(' =v1.2.3 ') // '1.2.3' - semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true - semver.gt('1.2.3', '9.8.7') // false - semver.lt('1.2.3', '9.8.7') // true - -As a command-line utility: - - $ semver -h - - Usage: semver [ [...]] [-r | -i | -d ] - Test if version(s) satisfy the supplied range(s), and sort them. - - Multiple versions or ranges may be supplied, unless increment - or decrement options are specified. In that case, only a single - version may be used, and it is incremented by the specified level - - Program exits successfully if any valid version satisfies - all supplied ranges, and prints all satisfying versions. - - If no versions are valid, or ranges are not satisfied, - then exits failure. - - Versions are printed in ascending order, so supplying - multiple versions to the utility will just sort them. - -## Versions - -A "version" is described by the v2.0.0 specification found at -. - -A leading `"="` or `"v"` character is stripped off and ignored. - -## Ranges - -The following range styles are supported: - -* `1.2.3` A specific version. When nothing else will do. Note that - build metadata is still ignored, so `1.2.3+build2012` will satisfy - this range. -* `>1.2.3` Greater than a specific version. -* `<1.2.3` Less than a specific version. If there is no prerelease - tag on the version range, then no prerelease version will be allowed - either, even though these are technically "less than". -* `>=1.2.3` Greater than or equal to. Note that prerelease versions - are NOT equal to their "normal" equivalents, so `1.2.3-beta` will - not satisfy this range, but `2.3.0-beta` will. -* `<=1.2.3` Less than or equal to. In this case, prerelease versions - ARE allowed, so `1.2.3-beta` would satisfy. -* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` -* `~1.2.3` := `>=1.2.3-0 <1.3.0-0` "Reasonably close to 1.2.3". When - using tilde operators, prerelease versions are supported as well, - but a prerelease of the next significant digit will NOT be - satisfactory, so `1.3.0-beta` will not satisfy `~1.2.3`. -* `^1.2.3` := `>=1.2.3-0 <2.0.0-0` "Compatible with 1.2.3". When - using caret operators, anything from the specified version (including - prerelease) will be supported up to, but not including, the next - major version (or its prereleases). `1.5.1` will satisfy `^1.2.3`, - while `1.2.2` and `2.0.0-beta` will not. -* `^0.1.3` := `>=0.1.3-0 <0.2.0-0` "Compatible with 0.1.3". 0.x.x versions are - special: the first non-zero component indicates potentially breaking changes, - meaning the caret operator matches any version with the same first non-zero - component starting at the specified version. -* `^0.0.2` := `=0.0.2` "Only the version 0.0.2 is considered compatible" -* `~1.2` := `>=1.2.0-0 <1.3.0-0` "Any version starting with 1.2" -* `^1.2` := `>=1.2.0-0 <2.0.0-0` "Any version compatible with 1.2" -* `1.2.x` := `>=1.2.0-0 <1.3.0-0` "Any version starting with 1.2" -* `~1` := `>=1.0.0-0 <2.0.0-0` "Any version starting with 1" -* `^1` := `>=1.0.0-0 <2.0.0-0` "Any version compatible with 1" -* `1.x` := `>=1.0.0-0 <2.0.0-0` "Any version starting with 1" - - -Ranges can be joined with either a space (which implies "and") or a -`||` (which implies "or"). - -## Functions - -All methods and classes take a final `loose` boolean argument that, if -true, will be more forgiving about not-quite-valid semver strings. -The resulting output will always be 100% strict, of course. - -Strict-mode Comparators and Ranges will be strict about the SemVer -strings that they parse. - -* valid(v): Return the parsed version, or null if it's not valid. -* inc(v, release): Return the version incremented by the release type - (major, minor, patch, or prerelease), or null if it's not valid. - -### Comparison - -* gt(v1, v2): `v1 > v2` -* gte(v1, v2): `v1 >= v2` -* lt(v1, v2): `v1 < v2` -* lte(v1, v2): `v1 <= v2` -* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent, - even if they're not the exact same string. You already know how to - compare strings. -* neq(v1, v2): `v1 != v2` The opposite of eq. -* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call - the corresponding function above. `"==="` and `"!=="` do simple - string comparison, but are included for completeness. Throws if an - invalid comparison string is provided. -* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if - v2 is greater. Sorts in ascending order if passed to Array.sort(). -* rcompare(v1, v2): The reverse of compare. Sorts an array of versions - in descending order when passed to Array.sort(). - - -### Ranges - -* validRange(range): Return the valid range or null if it's not valid -* satisfies(version, range): Return true if the version satisfies the - range. -* maxSatisfying(versions, range): Return the highest version in the list - that satisfies the range, or null if none of them do. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/bin/semver tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/bin/semver --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/bin/semver 2013-07-09 22:39:05.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/bin/semver 1970-01-01 00:00:00.000000000 +0000 @@ -1,119 +0,0 @@ -#!/usr/bin/env node -// Standalone semver comparison program. -// Exits successfully and prints matching version(s) if -// any supplied version is valid and passes all tests. - -var argv = process.argv.slice(2) - , versions = [] - , range = [] - , gt = [] - , lt = [] - , eq = [] - , inc = null - , version = require("../package.json").version - , loose = false - , semver = require("../semver") - -main() - -function main () { - if (!argv.length) return help() - while (argv.length) { - var a = argv.shift() - var i = a.indexOf('=') - if (i !== -1) { - a = a.slice(0, i) - argv.unshift(a.slice(i + 1)) - } - switch (a) { - case "-l": case "--loose": - loose = true - break - case "-v": case "--version": - versions.push(argv.shift()) - break - case "-i": case "--inc": case "--increment": - switch (argv[0]) { - case "major": case "minor": case "patch": case "prerelease": - inc = argv.shift() - break - default: - inc = "patch" - break - } - break - case "-r": case "--range": - range.push(argv.shift()) - break - case "-h": case "--help": case "-?": - return help() - default: - versions.push(a) - break - } - } - - versions = versions.filter(function (v) { - return semver.valid(v, loose) - }) - if (!versions.length) return fail() - if (inc && (versions.length !== 1 || range.length)) - return failInc() - - for (var i = 0, l = range.length; i < l ; i ++) { - versions = versions.filter(function (v) { - return semver.satisfies(v, range[i], loose) - }) - if (!versions.length) return fail() - } - return success(versions) -} - -function failInc () { - console.error("--inc can only be used on a single version with no range") - fail() -} - -function fail () { process.exit(1) } - -function success () { - versions.sort(function (a, b) { - return semver.compare(a, b, loose) - }).map(function (v) { - return semver.clean(v, loose) - }).map(function (v) { - return inc ? semver.inc(v, inc, loose) : v - }).forEach(function (v,i,_) { console.log(v) }) -} - -function help () { - console.log(["SemVer " + version - ,"" - ,"A JavaScript implementation of the http://semver.org/ specification" - ,"Copyright Isaac Z. Schlueter" - ,"" - ,"Usage: semver [options] [ [...]]" - ,"Prints valid versions sorted by SemVer precedence" - ,"" - ,"Options:" - ,"-r --range " - ," Print versions that match the specified range." - ,"" - ,"-i --increment []" - ," Increment a version by the specified level. Level can" - ," be one of: major, minor, patch, or prerelease" - ," Default level is 'patch'." - ," Only one version may be specified." - ,"" - ,"-l --loose" - ," Interpret versions and ranges loosely" - ,"" - ,"Program exits successfully if any valid version satisfies" - ,"all supplied ranges, and prints all satisfying versions." - ,"" - ,"If no satisfying versions are found, then exits failure." - ,"" - ,"Versions are printed in ascending order, so supplying" - ,"multiple versions to the utility will just sort them." - ].join("\n")) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/foot.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/foot.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/foot.js 2013-06-17 23:09:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/foot.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ - -})( - typeof exports === 'object' ? exports : - typeof define === 'function' && define.amd ? {} : - semver = {} -); diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/head.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/head.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/head.js 2013-06-17 23:08:49.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/head.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -;(function(exports) { - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,35 +0,0 @@ -{ - "name": "semver", - "version": "2.1.0", - "description": "The semantic version parser used by npm.", - "main": "semver.js", - "browser": "semver.browser.js", - "min": "semver.min.js", - "scripts": { - "test": "tap test/*.js", - "prepublish": "make" - }, - "devDependencies": { - "tap": "0.x >=0.0.4", - "uglify-js": "~2.3.6" - }, - "license": "BSD", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-semver.git" - }, - "bin": { - "semver": "./bin/semver" - }, - "readme": "semver(1) -- The semantic versioner for npm\n===========================================\n\n## Usage\n\n $ npm install semver\n\n semver.valid('1.2.3') // '1.2.3'\n semver.valid('a.b.c') // null\n semver.clean(' =v1.2.3 ') // '1.2.3'\n semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true\n semver.gt('1.2.3', '9.8.7') // false\n semver.lt('1.2.3', '9.8.7') // true\n\nAs a command-line utility:\n\n $ semver -h\n\n Usage: semver [ [...]] [-r | -i | -d ]\n Test if version(s) satisfy the supplied range(s), and sort them.\n\n Multiple versions or ranges may be supplied, unless increment\n or decrement options are specified. In that case, only a single\n version may be used, and it is incremented by the specified level\n\n Program exits successfully if any valid version satisfies\n all supplied ranges, and prints all satisfying versions.\n\n If no versions are valid, or ranges are not satisfied,\n then exits failure.\n\n Versions are printed in ascending order, so supplying\n multiple versions to the utility will just sort them.\n\n## Versions\n\nA \"version\" is described by the v2.0.0 specification found at\n.\n\nA leading `\"=\"` or `\"v\"` character is stripped off and ignored.\n\n## Ranges\n\nThe following range styles are supported:\n\n* `1.2.3` A specific version. When nothing else will do. Note that\n build metadata is still ignored, so `1.2.3+build2012` will satisfy\n this range.\n* `>1.2.3` Greater than a specific version.\n* `<1.2.3` Less than a specific version. If there is no prerelease\n tag on the version range, then no prerelease version will be allowed\n either, even though these are technically \"less than\".\n* `>=1.2.3` Greater than or equal to. Note that prerelease versions\n are NOT equal to their \"normal\" equivalents, so `1.2.3-beta` will\n not satisfy this range, but `2.3.0-beta` will.\n* `<=1.2.3` Less than or equal to. In this case, prerelease versions\n ARE allowed, so `1.2.3-beta` would satisfy.\n* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`\n* `~1.2.3` := `>=1.2.3-0 <1.3.0-0` \"Reasonably close to 1.2.3\". When\n using tilde operators, prerelease versions are supported as well,\n but a prerelease of the next significant digit will NOT be\n satisfactory, so `1.3.0-beta` will not satisfy `~1.2.3`.\n* `^1.2.3` := `>=1.2.3-0 <2.0.0-0` \"Compatible with 1.2.3\". When\n using caret operators, anything from the specified version (including\n prerelease) will be supported up to, but not including, the next\n major version (or its prereleases). `1.5.1` will satisfy `^1.2.3`,\n while `1.2.2` and `2.0.0-beta` will not.\n* `^0.1.3` := `>=0.1.3-0 <0.2.0-0` \"Compatible with 0.1.3\". 0.x.x versions are\n special: the first non-zero component indicates potentially breaking changes,\n meaning the caret operator matches any version with the same first non-zero\n component starting at the specified version.\n* `^0.0.2` := `=0.0.2` \"Only the version 0.0.2 is considered compatible\"\n* `~1.2` := `>=1.2.0-0 <1.3.0-0` \"Any version starting with 1.2\"\n* `^1.2` := `>=1.2.0-0 <2.0.0-0` \"Any version compatible with 1.2\"\n* `1.2.x` := `>=1.2.0-0 <1.3.0-0` \"Any version starting with 1.2\"\n* `~1` := `>=1.0.0-0 <2.0.0-0` \"Any version starting with 1\"\n* `^1` := `>=1.0.0-0 <2.0.0-0` \"Any version compatible with 1\"\n* `1.x` := `>=1.0.0-0 <2.0.0-0` \"Any version starting with 1\"\n\n\nRanges can be joined with either a space (which implies \"and\") or a\n`||` (which implies \"or\").\n\n## Functions\n\nAll methods and classes take a final `loose` boolean argument that, if\ntrue, will be more forgiving about not-quite-valid semver strings.\nThe resulting output will always be 100% strict, of course.\n\nStrict-mode Comparators and Ranges will be strict about the SemVer\nstrings that they parse.\n\n* valid(v): Return the parsed version, or null if it's not valid.\n* inc(v, release): Return the version incremented by the release type\n (major, minor, patch, or prerelease), or null if it's not valid.\n\n### Comparison\n\n* gt(v1, v2): `v1 > v2`\n* gte(v1, v2): `v1 >= v2`\n* lt(v1, v2): `v1 < v2`\n* lte(v1, v2): `v1 <= v2`\n* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent,\n even if they're not the exact same string. You already know how to\n compare strings.\n* neq(v1, v2): `v1 != v2` The opposite of eq.\n* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call\n the corresponding function above. `\"===\"` and `\"!==\"` do simple\n string comparison, but are included for completeness. Throws if an\n invalid comparison string is provided.\n* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if\n v2 is greater. Sorts in ascending order if passed to Array.sort().\n* rcompare(v1, v2): The reverse of compare. Sorts an array of versions\n in descending order when passed to Array.sort().\n\n\n### Ranges\n\n* validRange(range): Return the valid range or null if it's not valid\n* satisfies(version, range): Return true if the version satisfies the\n range.\n* maxSatisfying(versions, range): Return the highest version in the list\n that satisfies the range, or null if none of them do.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-semver/issues" - }, - "_id": "semver@2.1.0", - "dist": { - "shasum": "64cac489ade5b651a9fcbf0fffee3146b77b0264" - }, - "_from": "semver@~2.1", - "_resolved": "https://registry.npmjs.org/semver/-/semver-2.1.0.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/semver.browser.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/semver.browser.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/semver.browser.js 2013-08-01 23:04:16.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/semver.browser.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,919 +0,0 @@ -;(function(exports) { - -// export the class if we are in a Node-like system. -if (typeof module === 'object' && module.exports === exports) - exports = module.exports = SemVer; - -// The debug function is excluded entirely from the minified version. - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0'; - -// The actual regexps go on exports.re -var re = exports.re = []; -var src = exports.src = []; -var R = 0; - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -var NUMERICIDENTIFIER = R++; -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; -var NUMERICIDENTIFIERLOOSE = R++; -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; - - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -var NONNUMERICIDENTIFIER = R++; -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; - - -// ## Main Version -// Three dot-separated numeric identifiers. - -var MAINVERSION = R++; -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')'; - -var MAINVERSIONLOOSE = R++; -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -var PRERELEASEIDENTIFIER = R++; -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')'; - -var PRERELEASEIDENTIFIERLOOSE = R++; -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')'; - - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -var PRERELEASE = R++; -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; - -var PRERELEASELOOSE = R++; -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -var BUILDIDENTIFIER = R++; -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -var BUILD = R++; -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; - - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -var FULL = R++; -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?'; - -src[FULL] = '^' + FULLPLAIN + '$'; - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?'; - -var LOOSE = R++; -src[LOOSE] = '^' + LOOSEPLAIN + '$'; - -var GTLT = R++; -src[GTLT] = '((?:<|>)?=?)'; - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++; -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; -var XRANGEIDENTIFIER = R++; -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; - -var XRANGEPLAIN = R++; -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:(' + src[PRERELEASE] + ')' + - ')?)?)?'; - -var XRANGEPLAINLOOSE = R++; -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:(' + src[PRERELEASELOOSE] + ')' + - ')?)?)?'; - -// >=2.x, for example, means >=2.0.0-0 -// <1.x would be the same as "<1.0.0-0", though. -var XRANGE = R++; -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; -var XRANGELOOSE = R++; -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++; -src[LONETILDE] = '(?:~>?)'; - -var TILDETRIM = R++; -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); -var tildeTrimReplace = '$1~'; - -var TILDE = R++; -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; -var TILDELOOSE = R++; -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++; -src[LONECARET] = '(?:\\^)'; - -var CARETTRIM = R++; -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); -var caretTrimReplace = '$1^'; - -var CARET = R++; -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; -var CARETLOOSE = R++; -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++; -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; -var COMPARATOR = R++; -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; - - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++; -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; - -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); -var comparatorTrimReplace = '$1$2$3'; - - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++; -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$'; - -var HYPHENRANGELOOSE = R++; -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$'; - -// Star ranges basically just allow anything at all. -var STAR = R++; -src[STAR] = '(<|>)?=?\\s*\\*'; - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - ; - if (!re[i]) - re[i] = new RegExp(src[i]); -} - -exports.parse = parse; -function parse(version, loose) { - var r = loose ? re[LOOSE] : re[FULL]; - return (r.test(version)) ? new SemVer(version, loose) : null; -} - -exports.valid = valid; -function valid(version, loose) { - var v = parse(version, loose); - return v ? v.version : null; -} - - -exports.clean = clean; -function clean(version, loose) { - var s = parse(version, loose); - return s ? s.version : null; -} - -exports.SemVer = SemVer; - -function SemVer(version, loose) { - if (version instanceof SemVer) { - if (version.loose === loose) - return version; - else - version = version.version; - } - - if (!(this instanceof SemVer)) - return new SemVer(version, loose); - - ; - this.loose = loose; - var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); - - if (!m) - throw new TypeError('Invalid Version: ' + version); - - this.raw = version; - - // these are actually numbers - this.major = +m[1]; - this.minor = +m[2]; - this.patch = +m[3]; - - // numberify any prerelease numeric ids - if (!m[4]) - this.prerelease = []; - else - this.prerelease = m[4].split('.').map(function(id) { - return (/^[0-9]+$/.test(id)) ? +id : id; - }); - - this.build = m[5] ? m[5].split('.') : []; - this.format(); -} - -SemVer.prototype.format = function() { - this.version = this.major + '.' + this.minor + '.' + this.patch; - if (this.prerelease.length) - this.version += '-' + this.prerelease.join('.'); - return this.version; -}; - -SemVer.prototype.inspect = function() { - return ''; -}; - -SemVer.prototype.toString = function() { - return this.version; -}; - -SemVer.prototype.compare = function(other) { - ; - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - return this.compareMain(other) || this.comparePre(other); -}; - -SemVer.prototype.compareMain = function(other) { - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch); -}; - -SemVer.prototype.comparePre = function(other) { - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) - return -1; - else if (!this.prerelease.length && other.prerelease.length) - return 1; - else if (!this.prerelease.lenth && !other.prerelease.length) - return 0; - - var i = 0; - do { - var a = this.prerelease[i]; - var b = other.prerelease[i]; - ; - if (a === undefined && b === undefined) - return 0; - else if (b === undefined) - return 1; - else if (a === undefined) - return -1; - else if (a === b) - continue; - else - return compareIdentifiers(a, b); - } while (++i); -}; - -SemVer.prototype.inc = function(release) { - switch (release) { - case 'major': - this.major++; - this.minor = -1; - case 'minor': - this.minor++; - this.patch = -1; - case 'patch': - this.patch++; - this.prerelease = []; - break; - case 'prerelease': - if (this.prerelease.length === 0) - this.prerelease = [0]; - else { - var i = this.prerelease.length; - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++; - i = -2; - } - } - if (i === -1) // didn't increment anything - this.prerelease.push(0); - } - break; - - default: - throw new Error('invalid increment argument: ' + release); - } - this.format(); - return this; -}; - -exports.inc = inc; -function inc(version, release, loose) { - try { - return new SemVer(version, loose).inc(release).version; - } catch (er) { - return null; - } -} - -exports.compareIdentifiers = compareIdentifiers; - -var numeric = /^[0-9]+$/; -function compareIdentifiers(a, b) { - var anum = numeric.test(a); - var bnum = numeric.test(b); - - if (anum && bnum) { - a = +a; - b = +b; - } - - return (anum && !bnum) ? -1 : - (bnum && !anum) ? 1 : - a < b ? -1 : - a > b ? 1 : - 0; -} - -exports.rcompareIdentifiers = rcompareIdentifiers; -function rcompareIdentifiers(a, b) { - return compareIdentifiers(b, a); -} - -exports.compare = compare; -function compare(a, b, loose) { - return new SemVer(a, loose).compare(b); -} - -exports.compareLoose = compareLoose; -function compareLoose(a, b) { - return compare(a, b, true); -} - -exports.rcompare = rcompare; -function rcompare(a, b, loose) { - return compare(b, a, loose); -} - -exports.sort = sort; -function sort(list, loose) { - return list.sort(function(a, b) { - return exports.compare(a, b, loose); - }); -} - -exports.rsort = rsort; -function rsort(list, loose) { - return list.sort(function(a, b) { - return exports.rcompare(a, b, loose); - }); -} - -exports.gt = gt; -function gt(a, b, loose) { - return compare(a, b, loose) > 0; -} - -exports.lt = lt; -function lt(a, b, loose) { - return compare(a, b, loose) < 0; -} - -exports.eq = eq; -function eq(a, b, loose) { - return compare(a, b, loose) === 0; -} - -exports.neq = neq; -function neq(a, b, loose) { - return compare(a, b, loose) !== 0; -} - -exports.gte = gte; -function gte(a, b, loose) { - return compare(a, b, loose) >= 0; -} - -exports.lte = lte; -function lte(a, b, loose) { - return compare(a, b, loose) <= 0; -} - -exports.cmp = cmp; -function cmp(a, op, b, loose) { - var ret; - switch (op) { - case '===': ret = a === b; break; - case '!==': ret = a !== b; break; - case '': case '=': case '==': ret = eq(a, b, loose); break; - case '!=': ret = neq(a, b, loose); break; - case '>': ret = gt(a, b, loose); break; - case '>=': ret = gte(a, b, loose); break; - case '<': ret = lt(a, b, loose); break; - case '<=': ret = lte(a, b, loose); break; - default: throw new TypeError('Invalid operator: ' + op); - } - return ret; -} - -exports.Comparator = Comparator; -function Comparator(comp, loose) { - if (comp instanceof Comparator) { - if (comp.loose === loose) - return comp; - else - comp = comp.value; - } - - if (!(this instanceof Comparator)) - return new Comparator(comp, loose); - - ; - this.loose = loose; - this.parse(comp); - - if (this.semver === ANY) - this.value = ''; - else - this.value = this.operator + this.semver.version; -} - -var ANY = {}; -Comparator.prototype.parse = function(comp) { - var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; - var m = comp.match(r); - - if (!m) - throw new TypeError('Invalid comparator: ' + comp); - - this.operator = m[1]; - // if it literally is just '>' or '' then allow anything. - if (!m[2]) - this.semver = ANY; - else { - this.semver = new SemVer(m[2], this.loose); - - // <1.2.3-rc DOES allow 1.2.3-beta (has prerelease) - // >=1.2.3 DOES NOT allow 1.2.3-beta - // <=1.2.3 DOES allow 1.2.3-beta - // However, <1.2.3 does NOT allow 1.2.3-beta, - // even though `1.2.3-beta < 1.2.3` - // The assumption is that the 1.2.3 version has something you - // *don't* want, so we push the prerelease down to the minimum. - if (this.operator === '<' && !this.semver.prerelease.length) { - this.semver.prerelease = ['0']; - this.semver.format(); - } - } -}; - -Comparator.prototype.inspect = function() { - return ''; -}; - -Comparator.prototype.toString = function() { - return this.value; -}; - -Comparator.prototype.test = function(version) { - ; - return (this.semver === ANY) ? true : - cmp(version, this.operator, this.semver, this.loose); -}; - - -exports.Range = Range; -function Range(range, loose) { - if ((range instanceof Range) && range.loose === loose) - return range; - - if (!(this instanceof Range)) - return new Range(range, loose); - - this.loose = loose; - - // First, split based on boolean or || - this.raw = range; - this.set = range.split(/\s*\|\|\s*/).map(function(range) { - return this.parseRange(range.trim()); - }, this).filter(function(c) { - // throw out any that are not relevant for whatever reason - return c.length; - }); - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range); - } - - this.format(); -} - -Range.prototype.inspect = function() { - return ''; -}; - -Range.prototype.format = function() { - this.range = this.set.map(function(comps) { - return comps.join(' ').trim(); - }).join('||').trim(); - return this.range; -}; - -Range.prototype.toString = function() { - return this.range; -}; - -Range.prototype.parseRange = function(range) { - var loose = this.loose; - range = range.trim(); - ; - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; - range = range.replace(hr, hyphenReplace); - ; - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); - ; - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace); - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace); - - // normalize spaces - range = range.split(/\s+/).join(' '); - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; - var set = range.split(' ').map(function(comp) { - return parseComparator(comp, loose); - }).join(' ').split(/\s+/); - if (this.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function(comp) { - return !!comp.match(compRe); - }); - } - set = set.map(function(comp) { - return new Comparator(comp, loose); - }); - - return set; -}; - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators; -function toComparators(range, loose) { - return new Range(range, loose).set.map(function(comp) { - return comp.map(function(c) { - return c.value; - }).join(' ').trim().split(' '); - }); -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator(comp, loose) { - ; - comp = replaceCarets(comp, loose); - ; - comp = replaceTildes(comp, loose); - ; - comp = replaceXRanges(comp, loose); - ; - comp = replaceStars(comp, loose); - ; - return comp; -} - -function isX(id) { - return !id || id.toLowerCase() === 'x' || id === '*'; -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes(comp, loose) { - return comp.trim().split(/\s+/).map(function(comp) { - return replaceTilde(comp, loose); - }).join(' '); -} - -function replaceTilde(comp, loose) { - var r = loose ? re[TILDELOOSE] : re[TILDE]; - return comp.replace(r, function(_, M, m, p, pr) { - ; - var ret; - - if (isX(M)) - ret = ''; - else if (isX(m)) - ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; - else if (isX(p)) - // ~1.2 == >=1.2.0- <1.3.0- - ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; - else if (pr) { - ; - if (pr.charAt(0) !== '-') - pr = '-' + pr; - ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + M + '.' + (+m + 1) + '.0-0'; - } else - // ~1.2.3 == >=1.2.3-0 <1.3.0-0 - ret = '>=' + M + '.' + m + '.' + p + '-0' + - ' <' + M + '.' + (+m + 1) + '.0-0'; - - ; - return ret; - }); -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets(comp, loose) { - return comp.trim().split(/\s+/).map(function(comp) { - return replaceCaret(comp, loose); - }).join(' '); -} - -function replaceCaret(comp, loose) { - var r = loose ? re[CARETLOOSE] : re[CARET]; - return comp.replace(r, function(_, M, m, p, pr) { - ; - var ret; - - if (isX(M)) - ret = ''; - else if (isX(m)) - ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; - else if (isX(p)) - if (M === '0') ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; - else ret = '>=' + M + '.' + m + '.0-0 <' + (+M + 1) + '.0.0-0'; - else if (pr) { - ; - if (pr.charAt(0) !== '-') - pr = '-' + pr; - if (M === '0') - if (m === '0') ret = '=' + M + '.' + m + '.' + p + pr; - else ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + M + '.' + (+m + 1) + '.0-0'; - else ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + (+M + 1) + '.0.0-0'; - } else - if (M === '0') - if (m === '0') ret = '=' + M + '.' + m + '.' + p; - else ret = '>=' + M + '.' + m + '.' + p + '-0' + - ' <' + M + '.' + (+m + 1) + '.0-0'; - else ret = '>=' + M + '.' + m + '.' + p + '-0' + - ' <' + (+M + 1) + '.0.0-0'; - - ; - return ret; - }); -} - -function replaceXRanges(comp, loose) { - ; - return comp.split(/\s+/).map(function(comp) { - return replaceXRange(comp, loose); - }).join(' '); -} - -function replaceXRange(comp, loose) { - comp = comp.trim(); - var r = loose ? re[XRANGELOOSE] : re[XRANGE]; - return comp.replace(r, function(ret, gtlt, M, m, p, pr) { - ; - var xM = isX(M); - var xm = xM || isX(m); - var xp = xm || isX(p); - var anyX = xp; - - if (gtlt === '=' && anyX) - gtlt = ''; - - if (gtlt && anyX) { - // replace X with 0, and then append the -0 min-prerelease - if (xM) - M = 0; - if (xm) - m = 0; - if (xp) - p = 0; - - if (gtlt === '>') { - // >1 => >=2.0.0-0 - // >1.2 => >=1.3.0-0 - // >1.2.3 => >= 1.2.4-0 - gtlt = '>='; - if (xM) { - // no change - } else if (xm) { - M = +M + 1; - m = 0; - p = 0; - } else if (xp) { - m = +m + 1; - p = 0; - } - } - - - ret = gtlt + M + '.' + m + '.' + p + '-0'; - } else if (xM) { - // allow any - ret = '*'; - } else if (xm) { - // append '-0' onto the version, otherwise - // '1.x.x' matches '2.0.0-beta', since the tag - // *lowers* the version value - ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; - } else if (xp) { - ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; - } - - ; - - return ret; - }); -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars(comp, loose) { - ; - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], ''); -} - -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0-0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0-0 <3.5.0-0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0-0 <3.5.0-0 -function hyphenReplace($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - - if (isX(fM)) - from = ''; - else if (isX(fm)) - from = '>=' + fM + '.0.0-0'; - else if (isX(fp)) - from = '>=' + fM + '.' + fm + '.0-0'; - else - from = '>=' + from; - - if (isX(tM)) - to = ''; - else if (isX(tm)) - to = '<' + (+tM + 1) + '.0.0-0'; - else if (isX(tp)) - to = '<' + tM + '.' + (+tm + 1) + '.0-0'; - else if (tpr) - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; - else - to = '<=' + to; - - return (from + ' ' + to).trim(); -} - - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function(version) { - if (!version) - return false; - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version)) - return true; - } - return false; -}; - -function testSet(set, version) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) - return false; - } - return true; -} - -exports.satisfies = satisfies; -function satisfies(version, range, loose) { - try { - range = new Range(range, loose); - } catch (er) { - return false; - } - return range.test(version); -} - -exports.maxSatisfying = maxSatisfying; -function maxSatisfying(versions, range, loose) { - return versions.filter(function(version) { - return satisfies(version, range, loose); - }).sort(function(a, b) { - return rcompare(a, b, loose); - })[0] || null; -} - -exports.validRange = validRange; -function validRange(range, loose) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, loose).range || '*'; - } catch (er) { - return null; - } -} - -// Use the define() function if we're in AMD land -if (typeof define === 'function' && define.amd) - define(exports); - -})( - typeof exports === 'object' ? exports : - typeof define === 'function' && define.amd ? {} : - semver = {} -); Binary files /tmp/lD3MsVxK9r/tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/semver.browser.js.gz and /tmp/_bvM4SxRr7/tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/semver.browser.js.gz differ diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/semver.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/semver.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/semver.js 2013-07-30 01:43:11.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/semver.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,923 +0,0 @@ -// export the class if we are in a Node-like system. -if (typeof module === 'object' && module.exports === exports) - exports = module.exports = SemVer; - -// The debug function is excluded entirely from the minified version. -/* nomin */ var debug; -/* nomin */ if (typeof process === 'object' && - /* nomin */ process.env && - /* nomin */ process.env.NODE_DEBUG && - /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG)) - /* nomin */ debug = function() { - /* nomin */ var args = Array.prototype.slice.call(arguments, 0); - /* nomin */ args.unshift('SEMVER'); - /* nomin */ console.log.apply(console, args); - /* nomin */ }; -/* nomin */ else - /* nomin */ debug = function() {}; - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0'; - -// The actual regexps go on exports.re -var re = exports.re = []; -var src = exports.src = []; -var R = 0; - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -var NUMERICIDENTIFIER = R++; -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; -var NUMERICIDENTIFIERLOOSE = R++; -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; - - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -var NONNUMERICIDENTIFIER = R++; -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; - - -// ## Main Version -// Three dot-separated numeric identifiers. - -var MAINVERSION = R++; -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')'; - -var MAINVERSIONLOOSE = R++; -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -var PRERELEASEIDENTIFIER = R++; -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')'; - -var PRERELEASEIDENTIFIERLOOSE = R++; -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')'; - - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -var PRERELEASE = R++; -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; - -var PRERELEASELOOSE = R++; -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -var BUILDIDENTIFIER = R++; -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -var BUILD = R++; -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; - - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -var FULL = R++; -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?'; - -src[FULL] = '^' + FULLPLAIN + '$'; - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?'; - -var LOOSE = R++; -src[LOOSE] = '^' + LOOSEPLAIN + '$'; - -var GTLT = R++; -src[GTLT] = '((?:<|>)?=?)'; - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++; -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; -var XRANGEIDENTIFIER = R++; -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; - -var XRANGEPLAIN = R++; -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:(' + src[PRERELEASE] + ')' + - ')?)?)?'; - -var XRANGEPLAINLOOSE = R++; -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:(' + src[PRERELEASELOOSE] + ')' + - ')?)?)?'; - -// >=2.x, for example, means >=2.0.0-0 -// <1.x would be the same as "<1.0.0-0", though. -var XRANGE = R++; -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; -var XRANGELOOSE = R++; -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++; -src[LONETILDE] = '(?:~>?)'; - -var TILDETRIM = R++; -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); -var tildeTrimReplace = '$1~'; - -var TILDE = R++; -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; -var TILDELOOSE = R++; -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++; -src[LONECARET] = '(?:\\^)'; - -var CARETTRIM = R++; -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); -var caretTrimReplace = '$1^'; - -var CARET = R++; -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; -var CARETLOOSE = R++; -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++; -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; -var COMPARATOR = R++; -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; - - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++; -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; - -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); -var comparatorTrimReplace = '$1$2$3'; - - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++; -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$'; - -var HYPHENRANGELOOSE = R++; -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$'; - -// Star ranges basically just allow anything at all. -var STAR = R++; -src[STAR] = '(<|>)?=?\\s*\\*'; - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]); - if (!re[i]) - re[i] = new RegExp(src[i]); -} - -exports.parse = parse; -function parse(version, loose) { - var r = loose ? re[LOOSE] : re[FULL]; - return (r.test(version)) ? new SemVer(version, loose) : null; -} - -exports.valid = valid; -function valid(version, loose) { - var v = parse(version, loose); - return v ? v.version : null; -} - - -exports.clean = clean; -function clean(version, loose) { - var s = parse(version, loose); - return s ? s.version : null; -} - -exports.SemVer = SemVer; - -function SemVer(version, loose) { - if (version instanceof SemVer) { - if (version.loose === loose) - return version; - else - version = version.version; - } - - if (!(this instanceof SemVer)) - return new SemVer(version, loose); - - debug('SemVer', version, loose); - this.loose = loose; - var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); - - if (!m) - throw new TypeError('Invalid Version: ' + version); - - this.raw = version; - - // these are actually numbers - this.major = +m[1]; - this.minor = +m[2]; - this.patch = +m[3]; - - // numberify any prerelease numeric ids - if (!m[4]) - this.prerelease = []; - else - this.prerelease = m[4].split('.').map(function(id) { - return (/^[0-9]+$/.test(id)) ? +id : id; - }); - - this.build = m[5] ? m[5].split('.') : []; - this.format(); -} - -SemVer.prototype.format = function() { - this.version = this.major + '.' + this.minor + '.' + this.patch; - if (this.prerelease.length) - this.version += '-' + this.prerelease.join('.'); - return this.version; -}; - -SemVer.prototype.inspect = function() { - return ''; -}; - -SemVer.prototype.toString = function() { - return this.version; -}; - -SemVer.prototype.compare = function(other) { - debug('SemVer.compare', this.version, this.loose, other); - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - return this.compareMain(other) || this.comparePre(other); -}; - -SemVer.prototype.compareMain = function(other) { - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch); -}; - -SemVer.prototype.comparePre = function(other) { - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) - return -1; - else if (!this.prerelease.length && other.prerelease.length) - return 1; - else if (!this.prerelease.lenth && !other.prerelease.length) - return 0; - - var i = 0; - do { - var a = this.prerelease[i]; - var b = other.prerelease[i]; - debug('prerelease compare', i, a, b); - if (a === undefined && b === undefined) - return 0; - else if (b === undefined) - return 1; - else if (a === undefined) - return -1; - else if (a === b) - continue; - else - return compareIdentifiers(a, b); - } while (++i); -}; - -SemVer.prototype.inc = function(release) { - switch (release) { - case 'major': - this.major++; - this.minor = -1; - case 'minor': - this.minor++; - this.patch = -1; - case 'patch': - this.patch++; - this.prerelease = []; - break; - case 'prerelease': - if (this.prerelease.length === 0) - this.prerelease = [0]; - else { - var i = this.prerelease.length; - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++; - i = -2; - } - } - if (i === -1) // didn't increment anything - this.prerelease.push(0); - } - break; - - default: - throw new Error('invalid increment argument: ' + release); - } - this.format(); - return this; -}; - -exports.inc = inc; -function inc(version, release, loose) { - try { - return new SemVer(version, loose).inc(release).version; - } catch (er) { - return null; - } -} - -exports.compareIdentifiers = compareIdentifiers; - -var numeric = /^[0-9]+$/; -function compareIdentifiers(a, b) { - var anum = numeric.test(a); - var bnum = numeric.test(b); - - if (anum && bnum) { - a = +a; - b = +b; - } - - return (anum && !bnum) ? -1 : - (bnum && !anum) ? 1 : - a < b ? -1 : - a > b ? 1 : - 0; -} - -exports.rcompareIdentifiers = rcompareIdentifiers; -function rcompareIdentifiers(a, b) { - return compareIdentifiers(b, a); -} - -exports.compare = compare; -function compare(a, b, loose) { - return new SemVer(a, loose).compare(b); -} - -exports.compareLoose = compareLoose; -function compareLoose(a, b) { - return compare(a, b, true); -} - -exports.rcompare = rcompare; -function rcompare(a, b, loose) { - return compare(b, a, loose); -} - -exports.sort = sort; -function sort(list, loose) { - return list.sort(function(a, b) { - return exports.compare(a, b, loose); - }); -} - -exports.rsort = rsort; -function rsort(list, loose) { - return list.sort(function(a, b) { - return exports.rcompare(a, b, loose); - }); -} - -exports.gt = gt; -function gt(a, b, loose) { - return compare(a, b, loose) > 0; -} - -exports.lt = lt; -function lt(a, b, loose) { - return compare(a, b, loose) < 0; -} - -exports.eq = eq; -function eq(a, b, loose) { - return compare(a, b, loose) === 0; -} - -exports.neq = neq; -function neq(a, b, loose) { - return compare(a, b, loose) !== 0; -} - -exports.gte = gte; -function gte(a, b, loose) { - return compare(a, b, loose) >= 0; -} - -exports.lte = lte; -function lte(a, b, loose) { - return compare(a, b, loose) <= 0; -} - -exports.cmp = cmp; -function cmp(a, op, b, loose) { - var ret; - switch (op) { - case '===': ret = a === b; break; - case '!==': ret = a !== b; break; - case '': case '=': case '==': ret = eq(a, b, loose); break; - case '!=': ret = neq(a, b, loose); break; - case '>': ret = gt(a, b, loose); break; - case '>=': ret = gte(a, b, loose); break; - case '<': ret = lt(a, b, loose); break; - case '<=': ret = lte(a, b, loose); break; - default: throw new TypeError('Invalid operator: ' + op); - } - return ret; -} - -exports.Comparator = Comparator; -function Comparator(comp, loose) { - if (comp instanceof Comparator) { - if (comp.loose === loose) - return comp; - else - comp = comp.value; - } - - if (!(this instanceof Comparator)) - return new Comparator(comp, loose); - - debug('comparator', comp, loose); - this.loose = loose; - this.parse(comp); - - if (this.semver === ANY) - this.value = ''; - else - this.value = this.operator + this.semver.version; -} - -var ANY = {}; -Comparator.prototype.parse = function(comp) { - var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; - var m = comp.match(r); - - if (!m) - throw new TypeError('Invalid comparator: ' + comp); - - this.operator = m[1]; - // if it literally is just '>' or '' then allow anything. - if (!m[2]) - this.semver = ANY; - else { - this.semver = new SemVer(m[2], this.loose); - - // <1.2.3-rc DOES allow 1.2.3-beta (has prerelease) - // >=1.2.3 DOES NOT allow 1.2.3-beta - // <=1.2.3 DOES allow 1.2.3-beta - // However, <1.2.3 does NOT allow 1.2.3-beta, - // even though `1.2.3-beta < 1.2.3` - // The assumption is that the 1.2.3 version has something you - // *don't* want, so we push the prerelease down to the minimum. - if (this.operator === '<' && !this.semver.prerelease.length) { - this.semver.prerelease = ['0']; - this.semver.format(); - } - } -}; - -Comparator.prototype.inspect = function() { - return ''; -}; - -Comparator.prototype.toString = function() { - return this.value; -}; - -Comparator.prototype.test = function(version) { - debug('Comparator.test', version, this.loose); - return (this.semver === ANY) ? true : - cmp(version, this.operator, this.semver, this.loose); -}; - - -exports.Range = Range; -function Range(range, loose) { - if ((range instanceof Range) && range.loose === loose) - return range; - - if (!(this instanceof Range)) - return new Range(range, loose); - - this.loose = loose; - - // First, split based on boolean or || - this.raw = range; - this.set = range.split(/\s*\|\|\s*/).map(function(range) { - return this.parseRange(range.trim()); - }, this).filter(function(c) { - // throw out any that are not relevant for whatever reason - return c.length; - }); - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range); - } - - this.format(); -} - -Range.prototype.inspect = function() { - return ''; -}; - -Range.prototype.format = function() { - this.range = this.set.map(function(comps) { - return comps.join(' ').trim(); - }).join('||').trim(); - return this.range; -}; - -Range.prototype.toString = function() { - return this.range; -}; - -Range.prototype.parseRange = function(range) { - var loose = this.loose; - range = range.trim(); - debug('range', range, loose); - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; - range = range.replace(hr, hyphenReplace); - debug('hyphen replace', range); - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); - debug('comparator trim', range, re[COMPARATORTRIM]); - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace); - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace); - - // normalize spaces - range = range.split(/\s+/).join(' '); - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; - var set = range.split(' ').map(function(comp) { - return parseComparator(comp, loose); - }).join(' ').split(/\s+/); - if (this.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function(comp) { - return !!comp.match(compRe); - }); - } - set = set.map(function(comp) { - return new Comparator(comp, loose); - }); - - return set; -}; - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators; -function toComparators(range, loose) { - return new Range(range, loose).set.map(function(comp) { - return comp.map(function(c) { - return c.value; - }).join(' ').trim().split(' '); - }); -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator(comp, loose) { - debug('comp', comp); - comp = replaceCarets(comp, loose); - debug('caret', comp); - comp = replaceTildes(comp, loose); - debug('tildes', comp); - comp = replaceXRanges(comp, loose); - debug('xrange', comp); - comp = replaceStars(comp, loose); - debug('stars', comp); - return comp; -} - -function isX(id) { - return !id || id.toLowerCase() === 'x' || id === '*'; -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes(comp, loose) { - return comp.trim().split(/\s+/).map(function(comp) { - return replaceTilde(comp, loose); - }).join(' '); -} - -function replaceTilde(comp, loose) { - var r = loose ? re[TILDELOOSE] : re[TILDE]; - return comp.replace(r, function(_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr); - var ret; - - if (isX(M)) - ret = ''; - else if (isX(m)) - ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; - else if (isX(p)) - // ~1.2 == >=1.2.0- <1.3.0- - ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; - else if (pr) { - debug('replaceTilde pr', pr); - if (pr.charAt(0) !== '-') - pr = '-' + pr; - ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + M + '.' + (+m + 1) + '.0-0'; - } else - // ~1.2.3 == >=1.2.3-0 <1.3.0-0 - ret = '>=' + M + '.' + m + '.' + p + '-0' + - ' <' + M + '.' + (+m + 1) + '.0-0'; - - debug('tilde return', ret); - return ret; - }); -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets(comp, loose) { - return comp.trim().split(/\s+/).map(function(comp) { - return replaceCaret(comp, loose); - }).join(' '); -} - -function replaceCaret(comp, loose) { - var r = loose ? re[CARETLOOSE] : re[CARET]; - return comp.replace(r, function(_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr); - var ret; - - if (isX(M)) - ret = ''; - else if (isX(m)) - ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; - else if (isX(p)) - if (M === '0') ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; - else ret = '>=' + M + '.' + m + '.0-0 <' + (+M + 1) + '.0.0-0'; - else if (pr) { - debug('replaceCaret pr', pr); - if (pr.charAt(0) !== '-') - pr = '-' + pr; - if (M === '0') - if (m === '0') ret = '=' + M + '.' + m + '.' + p + pr; - else ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + M + '.' + (+m + 1) + '.0-0'; - else ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + (+M + 1) + '.0.0-0'; - } else - if (M === '0') - if (m === '0') ret = '=' + M + '.' + m + '.' + p; - else ret = '>=' + M + '.' + m + '.' + p + '-0' + - ' <' + M + '.' + (+m + 1) + '.0-0'; - else ret = '>=' + M + '.' + m + '.' + p + '-0' + - ' <' + (+M + 1) + '.0.0-0'; - - debug('caret return', ret); - return ret; - }); -} - -function replaceXRanges(comp, loose) { - debug('replaceXRanges', comp, loose); - return comp.split(/\s+/).map(function(comp) { - return replaceXRange(comp, loose); - }).join(' '); -} - -function replaceXRange(comp, loose) { - comp = comp.trim(); - var r = loose ? re[XRANGELOOSE] : re[XRANGE]; - return comp.replace(r, function(ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr); - var xM = isX(M); - var xm = xM || isX(m); - var xp = xm || isX(p); - var anyX = xp; - - if (gtlt === '=' && anyX) - gtlt = ''; - - if (gtlt && anyX) { - // replace X with 0, and then append the -0 min-prerelease - if (xM) - M = 0; - if (xm) - m = 0; - if (xp) - p = 0; - - if (gtlt === '>') { - // >1 => >=2.0.0-0 - // >1.2 => >=1.3.0-0 - // >1.2.3 => >= 1.2.4-0 - gtlt = '>='; - if (xM) { - // no change - } else if (xm) { - M = +M + 1; - m = 0; - p = 0; - } else if (xp) { - m = +m + 1; - p = 0; - } - } - - - ret = gtlt + M + '.' + m + '.' + p + '-0'; - } else if (xM) { - // allow any - ret = '*'; - } else if (xm) { - // append '-0' onto the version, otherwise - // '1.x.x' matches '2.0.0-beta', since the tag - // *lowers* the version value - ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; - } else if (xp) { - ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; - } - - debug('xRange return', ret); - - return ret; - }); -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars(comp, loose) { - debug('replaceStars', comp, loose); - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], ''); -} - -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0-0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0-0 <3.5.0-0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0-0 <3.5.0-0 -function hyphenReplace($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - - if (isX(fM)) - from = ''; - else if (isX(fm)) - from = '>=' + fM + '.0.0-0'; - else if (isX(fp)) - from = '>=' + fM + '.' + fm + '.0-0'; - else - from = '>=' + from; - - if (isX(tM)) - to = ''; - else if (isX(tm)) - to = '<' + (+tM + 1) + '.0.0-0'; - else if (isX(tp)) - to = '<' + tM + '.' + (+tm + 1) + '.0-0'; - else if (tpr) - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; - else - to = '<=' + to; - - return (from + ' ' + to).trim(); -} - - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function(version) { - if (!version) - return false; - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version)) - return true; - } - return false; -}; - -function testSet(set, version) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) - return false; - } - return true; -} - -exports.satisfies = satisfies; -function satisfies(version, range, loose) { - try { - range = new Range(range, loose); - } catch (er) { - return false; - } - return range.test(version); -} - -exports.maxSatisfying = maxSatisfying; -function maxSatisfying(versions, range, loose) { - return versions.filter(function(version) { - return satisfies(version, range, loose); - }).sort(function(a, b) { - return rcompare(a, b, loose); - })[0] || null; -} - -exports.validRange = validRange; -function validRange(range, loose) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, loose).range || '*'; - } catch (er) { - return null; - } -} - -// Use the define() function if we're in AMD land -if (typeof define === 'function' && define.amd) - define(exports); diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/semver.min.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/semver.min.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/semver/semver.min.js 2013-08-01 23:04:18.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/semver/semver.min.js 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -!function(e){if(typeof module==="object"&&module.exports===e)e=module.exports=H;e.SEMVER_SPEC_VERSION="2.0.0";var r=e.re=[];var t=e.src=[];var n=0;var i=n++;t[i]="0|[1-9]\\d*";var s=n++;t[s]="[0-9]+";var o=n++;t[o]="\\d*[a-zA-Z-][a-zA-Z0-9-]*";var a=n++;t[a]="("+t[i]+")\\."+"("+t[i]+")\\."+"("+t[i]+")";var f=n++;t[f]="("+t[s]+")\\."+"("+t[s]+")\\."+"("+t[s]+")";var u=n++;t[u]="(?:"+t[i]+"|"+t[o]+")";var c=n++;t[c]="(?:"+t[s]+"|"+t[o]+")";var l=n++;t[l]="(?:-("+t[u]+"(?:\\."+t[u]+")*))";var p=n++;t[p]="(?:-?("+t[c]+"(?:\\."+t[c]+")*))";var h=n++;t[h]="[0-9A-Za-z-]+";var v=n++;t[v]="(?:\\+("+t[h]+"(?:\\."+t[h]+")*))";var m=n++;var g="v?"+t[a]+t[l]+"?"+t[v]+"?";t[m]="^"+g+"$";var w="[v=\\s]*"+t[f]+t[p]+"?"+t[v]+"?";var d=n++;t[d]="^"+w+"$";var y=n++;t[y]="((?:<|>)?=?)";var $=n++;t[$]=t[s]+"|x|X|\\*";var j=n++;t[j]=t[i]+"|x|X|\\*";var b=n++;t[b]="[v=\\s]*("+t[j]+")"+"(?:\\.("+t[j]+")"+"(?:\\.("+t[j]+")"+"(?:("+t[l]+")"+")?)?)?";var E=n++;t[E]="[v=\\s]*("+t[$]+")"+"(?:\\.("+t[$]+")"+"(?:\\.("+t[$]+")"+"(?:("+t[p]+")"+")?)?)?";var x=n++;t[x]="^"+t[y]+"\\s*"+t[b]+"$";var R=n++;t[R]="^"+t[y]+"\\s*"+t[E]+"$";var S=n++;t[S]="(?:~>?)";var k=n++;t[k]="(\\s*)"+t[S]+"\\s+";r[k]=new RegExp(t[k],"g");var V="$1~";var I=n++;t[I]="^"+t[S]+t[b]+"$";var A=n++;t[A]="^"+t[S]+t[E]+"$";var C=n++;t[C]="(?:\\^)";var T=n++;t[T]="(\\s*)"+t[C]+"\\s+";r[T]=new RegExp(t[T],"g");var z="$1^";var M=n++;t[M]="^"+t[C]+t[b]+"$";var P=n++;t[P]="^"+t[C]+t[E]+"$";var Z=n++;t[Z]="^"+t[y]+"\\s*("+w+")$|^$";var q=n++;t[q]="^"+t[y]+"\\s*("+g+")$|^$";var L=n++;t[L]="(\\s*)"+t[y]+"\\s*("+w+"|"+t[b]+")";r[L]=new RegExp(t[L],"g");var X="$1$2$3";var _=n++;t[_]="^\\s*("+t[b]+")"+"\\s+-\\s+"+"("+t[b]+")"+"\\s*$";var N=n++;t[N]="^\\s*("+t[E]+")"+"\\s+-\\s+"+"("+t[E]+")"+"\\s*$";var O=n++;t[O]="(<|>)?=?\\s*\\*";for(var B=0;B'};H.prototype.toString=function(){return this.version};H.prototype.compare=function(e){if(!(e instanceof H))e=new H(e,this.loose);return this.compareMain(e)||this.comparePre(e)};H.prototype.compareMain=function(e){if(!(e instanceof H))e=new H(e,this.loose);return Q(this.major,e.major)||Q(this.minor,e.minor)||Q(this.patch,e.patch)};H.prototype.comparePre=function(e){if(!(e instanceof H))e=new H(e,this.loose);if(this.prerelease.length&&!e.prerelease.length)return-1;else if(!this.prerelease.length&&e.prerelease.length)return 1;else if(!this.prerelease.lenth&&!e.prerelease.length)return 0;var r=0;do{var t=this.prerelease[r];var n=e.prerelease[r];if(t===undefined&&n===undefined)return 0;else if(n===undefined)return 1;else if(t===undefined)return-1;else if(t===n)continue;else return Q(t,n)}while(++r)};H.prototype.inc=function(e){switch(e){case"major":this.major++;this.minor=-1;case"minor":this.minor++;this.patch=-1;case"patch":this.patch++;this.prerelease=[];break;case"prerelease":if(this.prerelease.length===0)this.prerelease=[0];else{var r=this.prerelease.length;while(--r>=0){if(typeof this.prerelease[r]==="number"){this.prerelease[r]++;r=-2}}if(r===-1)this.prerelease.push(0)}break;default:throw new Error("invalid increment argument: "+e)}this.format();return this};e.inc=J;function J(e,r,t){try{return new H(e,t).inc(r).version}catch(n){return null}}e.compareIdentifiers=Q;var K=/^[0-9]+$/;function Q(e,r){var t=K.test(e);var n=K.test(r);if(t&&n){e=+e;r=+r}return t&&!n?-1:n&&!t?1:er?1:0}e.rcompareIdentifiers=U;function U(e,r){return Q(r,e)}e.compare=W;function W(e,r,t){return new H(e,t).compare(r)}e.compareLoose=Y;function Y(e,r){return W(e,r,true)}e.rcompare=er;function er(e,r,t){return W(r,e,t)}e.sort=rr;function rr(r,t){return r.sort(function(r,n){return e.compare(r,n,t)})}e.rsort=tr;function tr(r,t){return r.sort(function(r,n){return e.rcompare(r,n,t)})}e.gt=nr;function nr(e,r,t){return W(e,r,t)>0}e.lt=ir;function ir(e,r,t){return W(e,r,t)<0}e.eq=sr;function sr(e,r,t){return W(e,r,t)===0}e.neq=or;function or(e,r,t){return W(e,r,t)!==0}e.gte=ar;function ar(e,r,t){return W(e,r,t)>=0}e.lte=fr;function fr(e,r,t){return W(e,r,t)<=0}e.cmp=ur;function ur(e,r,t,n){var i;switch(r){case"===":i=e===t;break;case"!==":i=e!==t;break;case"":case"=":case"==":i=sr(e,t,n);break;case"!=":i=or(e,t,n);break;case">":i=nr(e,t,n);break;case">=":i=ar(e,t,n);break;case"<":i=ir(e,t,n);break;case"<=":i=fr(e,t,n);break;default:throw new TypeError("Invalid operator: "+r)}return i}e.Comparator=cr;function cr(e,r){if(e instanceof cr){if(e.loose===r)return e;else e=e.value}if(!(this instanceof cr))return new cr(e,r);this.loose=r;this.parse(e);if(this.semver===lr)this.value="";else this.value=this.operator+this.semver.version}var lr={};cr.prototype.parse=function(e){var t=this.loose?r[Z]:r[q];var n=e.match(t);if(!n)throw new TypeError("Invalid comparator: "+e);this.operator=n[1];if(!n[2])this.semver=lr;else{this.semver=new H(n[2],this.loose);if(this.operator==="<"&&!this.semver.prerelease.length){this.semver.prerelease=["0"];this.semver.format()}}};cr.prototype.inspect=function(){return''};cr.prototype.toString=function(){return this.value};cr.prototype.test=function(e){return this.semver===lr?true:ur(e,this.operator,this.semver,this.loose)};e.Range=pr;function pr(e,r){if(e instanceof pr&&e.loose===r)return e;if(!(this instanceof pr))return new pr(e,r);this.loose=r;this.raw=e;this.set=e.split(/\s*\|\|\s*/).map(function(e){return this.parseRange(e.trim())},this).filter(function(e){return e.length});if(!this.set.length){throw new TypeError("Invalid SemVer Range: "+e)}this.format()}pr.prototype.inspect=function(){return''};pr.prototype.format=function(){this.range=this.set.map(function(e){return e.join(" ").trim()}).join("||").trim();return this.range};pr.prototype.toString=function(){return this.range};pr.prototype.parseRange=function(e){var t=this.loose;e=e.trim();var n=t?r[N]:r[_];e=e.replace(n,Er);e=e.replace(r[L],X);e=e.replace(r[k],V);e=e.replace(r[T],z);e=e.split(/\s+/).join(" ");var i=t?r[Z]:r[q];var s=e.split(" ").map(function(e){return vr(e,t)}).join(" ").split(/\s+/);if(this.loose){s=s.filter(function(e){return!!e.match(i)})}s=s.map(function(e){return new cr(e,t)});return s};e.toComparators=hr;function hr(e,r){return new pr(e,r).set.map(function(e){return e.map(function(e){return e.value}).join(" ").trim().split(" ")})}function vr(e,r){e=dr(e,r);e=gr(e,r);e=$r(e,r);e=br(e,r);return e}function mr(e){return!e||e.toLowerCase()==="x"||e==="*"}function gr(e,r){return e.trim().split(/\s+/).map(function(e){return wr(e,r)}).join(" ")}function wr(e,t){var n=t?r[A]:r[I];return e.replace(n,function(e,r,t,n,i){var s;if(mr(r))s="";else if(mr(t))s=">="+r+".0.0-0 <"+(+r+1)+".0.0-0";else if(mr(n))s=">="+r+"."+t+".0-0 <"+r+"."+(+t+1)+".0-0";else if(i){if(i.charAt(0)!=="-")i="-"+i;s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0-0"}else s=">="+r+"."+t+"."+n+"-0"+" <"+r+"."+(+t+1)+".0-0";return s})}function dr(e,r){return e.trim().split(/\s+/).map(function(e){return yr(e,r)}).join(" ")}function yr(e,t){var n=t?r[P]:r[M];return e.replace(n,function(e,r,t,n,i){var s;if(mr(r))s="";else if(mr(t))s=">="+r+".0.0-0 <"+(+r+1)+".0.0-0";else if(mr(n))if(r==="0")s=">="+r+"."+t+".0-0 <"+r+"."+(+t+1)+".0-0";else s=">="+r+"."+t+".0-0 <"+(+r+1)+".0.0-0";else if(i){if(i.charAt(0)!=="-")i="-"+i;if(r==="0")if(t==="0")s="="+r+"."+t+"."+n+i;else s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0-0";else s=">="+r+"."+t+"."+n+i+" <"+(+r+1)+".0.0-0"}else if(r==="0")if(t==="0")s="="+r+"."+t+"."+n;else s=">="+r+"."+t+"."+n+"-0"+" <"+r+"."+(+t+1)+".0-0";else s=">="+r+"."+t+"."+n+"-0"+" <"+(+r+1)+".0.0-0";return s})}function $r(e,r){return e.split(/\s+/).map(function(e){return jr(e,r)}).join(" ")}function jr(e,t){e=e.trim();var n=t?r[R]:r[x];return e.replace(n,function(e,r,t,n,i,s){var o=mr(t);var a=o||mr(n);var f=a||mr(i);var u=f;if(r==="="&&u)r="";if(r&&u){if(o)t=0;if(a)n=0;if(f)i=0;if(r===">"){r=">=";if(o){}else if(a){t=+t+1;n=0;i=0}else if(f){n=+n+1;i=0}}e=r+t+"."+n+"."+i+"-0"}else if(o){e="*"}else if(a){e=">="+t+".0.0-0 <"+(+t+1)+".0.0-0"}else if(f){e=">="+t+"."+n+".0-0 <"+t+"."+(+n+1)+".0-0"}return e})}function br(e,t){return e.trim().replace(r[O],"")}function Er(e,r,t,n,i,s,o,a,f,u,c,l,p){if(mr(t))r="";else if(mr(n))r=">="+t+".0.0-0";else if(mr(i))r=">="+t+"."+n+".0-0";else r=">="+r;if(mr(f))a="";else if(mr(u))a="<"+(+f+1)+".0.0-0";else if(mr(c))a="<"+f+"."+(+u+1)+".0-0";else if(l)a="<="+f+"."+u+"."+c+"-"+l;else a="<="+a;return(r+" "+a).trim()}pr.prototype.test=function(e){if(!e)return false;for(var r=0;r>>" + c.toString().replace(/\n/g, "\\n")) - }) - e.on("end", function () { - console.error(" << 0 - return !this._needDrain -} - -EntryWriter.prototype.end = function (c) { - // console.error(".. ew end") - if (c) this._buffer.push(c) - this._buffer.push(EOF) - this._ended = true - this._process() - this._needDrain = this._buffer.length > 0 -} - -EntryWriter.prototype.pause = function () { - // console.error(".. ew pause") - this._paused = true - this.emit("pause") -} - -EntryWriter.prototype.resume = function () { - // console.error(".. ew resume") - this._paused = false - this.emit("resume") - this._process() -} - -EntryWriter.prototype.add = function (entry) { - // console.error(".. ew add") - if (!this.parent) return this.emit("error", new Error("no parent")) - - // make sure that the _header and such is emitted, and clear out - // the _currentEntry link on the parent. - if (!this._ended) this.end() - - return this.parent.add(entry) -} - -EntryWriter.prototype._header = function () { - // console.error(".. ew header") - if (this._didHeader) return - this._didHeader = true - - var headerBlock = TarHeader.encode(this.props) - - if (this.props.needExtended && !this._meta) { - var me = this - - ExtendedHeaderWriter = ExtendedHeaderWriter || - require("./extended-header-writer.js") - - ExtendedHeaderWriter(this.props) - .on("data", function (c) { - me.emit("data", c) - }) - .on("error", function (er) { - me.emit("error", er) - }) - .end() - } - - // console.error(".. .. ew headerBlock emitting") - this.emit("data", headerBlock) - this.emit("header") -} - -EntryWriter.prototype._process = function () { - // console.error(".. .. ew process") - if (!this._didHeader && !this._meta) { - this._header() - } - - if (this._paused || this._processing) { - // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing) - return - } - - this._processing = true - - var buf = this._buffer - for (var i = 0; i < buf.length; i ++) { - // console.error(".. .. .. i=%d", i) - - var c = buf[i] - - if (c === EOF) this._stream.end() - else this._stream.write(c) - - if (this._paused) { - // console.error(".. .. .. paused mid-emission") - this._processing = false - if (i < buf.length) { - this._needDrain = true - this._buffer = buf.slice(i + 1) - } - return - } - } - - // console.error(".. .. .. emitted") - this._buffer.length = 0 - this._processing = false - - // console.error(".. .. .. emitting drain") - this.emit("drain") -} - -EntryWriter.prototype.destroy = function () {} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/entry.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/entry.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/entry.js 2013-05-29 23:35:06.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/entry.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,213 +0,0 @@ -// A passthrough read/write stream that sets its properties -// based on a header, extendedHeader, and globalHeader -// -// Can be either a file system object of some sort, or -// a pax/ustar metadata entry. - -module.exports = Entry - -var TarHeader = require("./header.js") - , tar = require("../tar") - , assert = require("assert").ok - , Stream = require("stream").Stream - , inherits = require("inherits") - , fstream = require("fstream").Abstract - -function Entry (header, extended, global) { - Stream.call(this) - this.readable = true - this.writable = true - - this._needDrain = false - this._paused = false - this._reading = false - this._ending = false - this._ended = false - this._remaining = 0 - this._queue = [] - this._index = 0 - this._queueLen = 0 - - this._read = this._read.bind(this) - - this.props = {} - this._header = header - this._extended = extended || {} - - // globals can change throughout the course of - // a file parse operation. Freeze it at its current state. - this._global = {} - var me = this - Object.keys(global || {}).forEach(function (g) { - me._global[g] = global[g] - }) - - this._setProps() -} - -inherits(Entry, Stream) - -Entry.prototype.write = function (c) { - if (this._ending) this.error("write() after end()", null, true) - if (this._remaining === 0) { - this.error("invalid bytes past eof") - } - - // often we'll get a bunch of \0 at the end of the last write, - // since chunks will always be 512 bytes when reading a tarball. - if (c.length > this._remaining) { - c = c.slice(0, this._remaining) - } - this._remaining -= c.length - - // put it on the stack. - var ql = this._queueLen - this._queue.push(c) - this._queueLen ++ - - this._read() - - // either paused, or buffered - if (this._paused || ql > 0) { - this._needDrain = true - return false - } - - return true -} - -Entry.prototype.end = function (c) { - if (c) this.write(c) - this._ending = true - this._read() -} - -Entry.prototype.pause = function () { - this._paused = true - this.emit("pause") -} - -Entry.prototype.resume = function () { - // console.error(" Tar Entry resume", this.path) - this.emit("resume") - this._paused = false - this._read() - return this._queueLen - this._index > 1 -} - - // This is bound to the instance -Entry.prototype._read = function () { - // console.error(" Tar Entry _read", this.path) - - if (this._paused || this._reading || this._ended) return - - // set this flag so that event handlers don't inadvertently - // get multiple _read() calls running. - this._reading = true - - // have any data to emit? - while (this._index < this._queueLen && !this._paused) { - var chunk = this._queue[this._index ++] - this.emit("data", chunk) - } - - // check if we're drained - if (this._index >= this._queueLen) { - this._queue.length = this._queueLen = this._index = 0 - if (this._needDrain) { - this._needDrain = false - this.emit("drain") - } - if (this._ending) { - this._ended = true - this.emit("end") - } - } - - // if the queue gets too big, then pluck off whatever we can. - // this should be fairly rare. - var mql = this._maxQueueLen - if (this._queueLen > mql && this._index > 0) { - mql = Math.min(this._index, mql) - this._index -= mql - this._queueLen -= mql - this._queue = this._queue.slice(mql) - } - - this._reading = false -} - -Entry.prototype._setProps = function () { - // props = extended->global->header->{} - var header = this._header - , extended = this._extended - , global = this._global - , props = this.props - - // first get the values from the normal header. - var fields = tar.fields - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , val = header[field] - if (typeof val !== "undefined") props[field] = val - } - - // next, the global header for this file. - // numeric values, etc, will have already been parsed. - ;[global, extended].forEach(function (p) { - Object.keys(p).forEach(function (f) { - if (typeof p[f] !== "undefined") props[f] = p[f] - }) - }) - - // no nulls allowed in path or linkpath - ;["path", "linkpath"].forEach(function (p) { - if (props.hasOwnProperty(p)) { - props[p] = props[p].split("\0")[0] - } - }) - - - // set date fields to be a proper date - ;["mtime", "ctime", "atime"].forEach(function (p) { - if (props.hasOwnProperty(p)) { - props[p] = new Date(props[p] * 1000) - } - }) - - // set the type so that we know what kind of file to create - var type - switch (tar.types[props.type]) { - case "OldFile": - case "ContiguousFile": - type = "File" - break - - case "GNUDumpDir": - type = "Directory" - break - - case undefined: - type = "Unknown" - break - - case "Link": - case "SymbolicLink": - case "CharacterDevice": - case "BlockDevice": - case "Directory": - case "FIFO": - default: - type = tar.types[props.type] - } - - this.type = type - this.path = props.path - this.size = props.size - - // size is special, since it signals when the file needs to end. - this._remaining = props.size -} - -Entry.prototype.warn = fstream.warn -Entry.prototype.error = fstream.error diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js 2013-05-29 23:35:58.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,191 +0,0 @@ - -module.exports = ExtendedHeaderWriter - -var inherits = require("inherits") - , EntryWriter = require("./entry-writer.js") - -inherits(ExtendedHeaderWriter, EntryWriter) - -var tar = require("../tar.js") - , path = require("path") - , TarHeader = require("./header.js") - -// props is the props of the thing we need to write an -// extended header for. -// Don't be shy with it. Just encode everything. -function ExtendedHeaderWriter (props) { - // console.error(">> ehw ctor") - var me = this - - if (!(me instanceof ExtendedHeaderWriter)) { - return new ExtendedHeaderWriter(props) - } - - me.fields = props - - var p = - { path : ("PaxHeader" + path.join("/", props.path || "")) - .replace(/\\/g, "/").substr(0, 100) - , mode : props.mode || 0666 - , uid : props.uid || 0 - , gid : props.gid || 0 - , size : 0 // will be set later - , mtime : props.mtime || Date.now() / 1000 - , type : "x" - , linkpath : "" - , ustar : "ustar\0" - , ustarver : "00" - , uname : props.uname || "" - , gname : props.gname || "" - , devmaj : props.devmaj || 0 - , devmin : props.devmin || 0 - } - - - EntryWriter.call(me, p) - // console.error(">> ehw props", me.props) - me.props = p - - me._meta = true -} - -ExtendedHeaderWriter.prototype.end = function () { - // console.error(">> ehw end") - var me = this - - if (me._ended) return - me._ended = true - - me._encodeFields() - - if (me.props.size === 0) { - // nothing to write! - me._ready = true - me._stream.end() - return - } - - me._stream.write(TarHeader.encode(me.props)) - me.body.forEach(function (l) { - me._stream.write(l) - }) - me._ready = true - - // console.error(">> ehw _process calling end()", me.props) - this._stream.end() -} - -ExtendedHeaderWriter.prototype._encodeFields = function () { - // console.error(">> ehw _encodeFields") - this.body = [] - if (this.fields.prefix) { - this.fields.path = this.fields.prefix + "/" + this.fields.path - this.fields.prefix = "" - } - encodeFields(this.fields, "", this.body, this.fields.noProprietary) - var me = this - this.body.forEach(function (l) { - me.props.size += l.length - }) -} - -function encodeFields (fields, prefix, body, nop) { - // console.error(">> >> ehw encodeFields") - // "%d %s=%s\n", , , - // The length is a decimal number, and includes itself and the \n - // Numeric values are decimal strings. - - Object.keys(fields).forEach(function (k) { - var val = fields[k] - , numeric = tar.numeric[k] - - if (prefix) k = prefix + "." + k - - // already including NODETAR.type, don't need File=true also - if (k === fields.type && val === true) return - - switch (k) { - // don't include anything that's always handled just fine - // in the normal header, or only meaningful in the context - // of nodetar - case "mode": - case "cksum": - case "ustar": - case "ustarver": - case "prefix": - case "basename": - case "dirname": - case "needExtended": - case "block": - case "filter": - return - - case "rdev": - if (val === 0) return - break - - case "nlink": - case "dev": // Truly a hero among men, Creator of Star! - case "ino": // Speak his name with reverent awe! It is: - k = "SCHILY." + k - break - - default: break - } - - if (val && typeof val === "object" && - !Buffer.isBuffer(val)) encodeFields(val, k, body, nop) - else if (val === null || val === undefined) return - else body.push.apply(body, encodeField(k, val, nop)) - }) - - return body -} - -function encodeField (k, v, nop) { - // lowercase keys must be valid, otherwise prefix with - // "NODETAR." - if (k.charAt(0) === k.charAt(0).toLowerCase()) { - var m = k.split(".")[0] - if (!tar.knownExtended[m]) k = "NODETAR." + k - } - - // no proprietary - if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) { - return [] - } - - if (typeof val === "number") val = val.toString(10) - - var s = new Buffer(" " + k + "=" + v + "\n") - , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1 - - // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) - - // if adding that many digits will make it go over that length, - // then add one to it. For example, if the string is: - // " foo=bar\n" - // then that's 9 characters. With the "9", that bumps the length - // up to 10. However, this is invalid: - // "10 foo=bar\n" - // but, since that's actually 11 characters, since 10 adds another - // character to the length, and the length includes the number - // itself. In that case, just bump it up again. - if (s.length + digits >= Math.pow(10, digits)) digits += 1 - // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) - - var len = digits + s.length - // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len) - var lenBuf = new Buffer("" + len) - if (lenBuf.length + s.length !== len) { - throw new Error("Bad length calculation\n"+ - "len="+len+"\n"+ - "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+ - "lenBuf.length="+lenBuf.length+"\n"+ - "digits="+digits+"\n"+ - "s="+JSON.stringify(s.toString())+"\n"+ - "s.length="+s.length) - } - - return [lenBuf, s] -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/extended-header.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/extended-header.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/extended-header.js 2013-05-29 23:36:35.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/extended-header.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,140 +0,0 @@ -// An Entry consisting of: -// -// "%d %s=%s\n", , , -// -// The length is a decimal number, and includes itself and the \n -// \0 does not terminate anything. Only the length terminates the string. -// Numeric values are decimal strings. - -module.exports = ExtendedHeader - -var Entry = require("./entry.js") - , inherits = require("inherits") - , tar = require("../tar.js") - , numeric = tar.numeric - , keyTrans = { "SCHILY.dev": "dev" - , "SCHILY.ino": "ino" - , "SCHILY.nlink": "nlink" } - -function ExtendedHeader () { - Entry.apply(this, arguments) - this.on("data", this._parse) - this.fields = {} - this._position = 0 - this._fieldPos = 0 - this._state = SIZE - this._sizeBuf = [] - this._keyBuf = [] - this._valBuf = [] - this._size = -1 - this._key = "" -} - -inherits(ExtendedHeader, Entry) -ExtendedHeader.prototype._parse = parse - -var s = 0 - , states = ExtendedHeader.states = {} - , SIZE = states.SIZE = s++ - , KEY = states.KEY = s++ - , VAL = states.VAL = s++ - , ERR = states.ERR = s++ - -Object.keys(states).forEach(function (s) { - states[states[s]] = states[s] -}) - -states[s] = null - -// char code values for comparison -var _0 = "0".charCodeAt(0) - , _9 = "9".charCodeAt(0) - , point = ".".charCodeAt(0) - , a = "a".charCodeAt(0) - , Z = "Z".charCodeAt(0) - , a = "a".charCodeAt(0) - , z = "z".charCodeAt(0) - , space = " ".charCodeAt(0) - , eq = "=".charCodeAt(0) - , cr = "\n".charCodeAt(0) - -function parse (c) { - if (this._state === ERR) return - - for ( var i = 0, l = c.length - ; i < l - ; this._position++, this._fieldPos++, i++) { - // console.error("top of loop, size="+this._size) - - var b = c[i] - - if (this._size >= 0 && this._fieldPos > this._size) { - error(this, "field exceeds length="+this._size) - return - } - - switch (this._state) { - case ERR: return - - case SIZE: - // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString()) - if (b === space) { - this._state = KEY - // this._fieldPos = this._sizeBuf.length - this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10) - this._sizeBuf.length = 0 - continue - } - if (b < _0 || b > _9) { - error(this, "expected [" + _0 + ".." + _9 + "], got " + b) - return - } - this._sizeBuf.push(b) - continue - - case KEY: - // can be any char except =, not > size. - if (b === eq) { - this._state = VAL - this._key = new Buffer(this._keyBuf).toString() - if (keyTrans[this._key]) this._key = keyTrans[this._key] - this._keyBuf.length = 0 - continue - } - this._keyBuf.push(b) - continue - - case VAL: - // field must end with cr - if (this._fieldPos === this._size - 1) { - // console.error("finished with "+this._key) - if (b !== cr) { - error(this, "expected \\n at end of field") - return - } - var val = new Buffer(this._valBuf).toString() - if (numeric[this._key]) { - val = parseFloat(val) - } - this.fields[this._key] = val - - this._valBuf.length = 0 - this._state = SIZE - this._size = -1 - this._fieldPos = -1 - continue - } - this._valBuf.push(b) - continue - } - } -} - -function error (me, msg) { - msg = "invalid header: " + msg - + "\nposition=" + me._position - + "\nfield position=" + me._fieldPos - - me.error(msg) - me.state = ERR -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/extract.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/extract.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/extract.js 2013-05-29 23:36:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/extract.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,78 +0,0 @@ -// give it a tarball and a path, and it'll dump the contents - -module.exports = Extract - -var tar = require("../tar.js") - , fstream = require("fstream") - , inherits = require("inherits") - , path = require("path") - -function Extract (opts) { - if (!(this instanceof Extract)) return new Extract(opts) - tar.Parse.apply(this) - - // have to dump into a directory - opts.type = "Directory" - opts.Directory = true - - if (typeof opts !== "object") { - opts = { path: opts } - } - - // better to drop in cwd? seems more standard. - opts.path = opts.path || path.resolve("node-tar-extract") - opts.type = "Directory" - opts.Directory = true - - // similar to --strip or --strip-components - opts.strip = +opts.strip - if (!opts.strip || opts.strip <= 0) opts.strip = 0 - - this._fst = fstream.Writer(opts) - - this.pause() - var me = this - - // Hardlinks in tarballs are relative to the root - // of the tarball. So, they need to be resolved against - // the target directory in order to be created properly. - me.on("entry", function (entry) { - // if there's a "strip" argument, then strip off that many - // path components. - if (opts.strip) { - var p = entry.path.split("/").slice(opts.strip).join("/") - entry.path = entry.props.path = p - if (entry.linkpath) { - var lp = entry.linkpath.split("/").slice(opts.strip).join("/") - entry.linkpath = entry.props.linkpath = lp - } - } - if (entry.type !== "Link") return - entry.linkpath = entry.props.linkpath = - path.join(opts.path, path.join("/", entry.props.linkpath)) - }) - - this._fst.on("ready", function () { - me.pipe(me._fst, { end: false }) - me.resume() - }) - - // this._fst.on("end", function () { - // console.error("\nEEEE Extract End", me._fst.path) - // }) - - this._fst.on("close", function () { - // console.error("\nEEEE Extract End", me._fst.path) - me.emit("end") - me.emit("close") - }) -} - -inherits(Extract, tar.Parse) - -Extract.prototype._streamEnd = function () { - var me = this - if (!me._ended) me.error("unexpected eof") - me._fst.end() - // my .end() is coming later. -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js 2013-05-29 23:36:59.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -module.exports = GlobalHeaderWriter - -var ExtendedHeaderWriter = require("./extended-header-writer.js") - , inherits = require("inherits") - -inherits(GlobalHeaderWriter, ExtendedHeaderWriter) - -function GlobalHeaderWriter (props) { - if (!(this instanceof GlobalHeaderWriter)) { - return new GlobalHeaderWriter(props) - } - ExtendedHeaderWriter.call(this, props) - this.props.type = "g" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/header.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/header.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/header.js 2013-01-23 18:59:39.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/header.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,385 +0,0 @@ -// parse a 512-byte header block to a data object, or vice-versa -// If the data won't fit nicely in a simple header, then generate -// the appropriate extended header file, and return that. - -module.exports = TarHeader - -var tar = require("../tar.js") - , fields = tar.fields - , fieldOffs = tar.fieldOffs - , fieldEnds = tar.fieldEnds - , fieldSize = tar.fieldSize - , numeric = tar.numeric - , assert = require("assert").ok - , space = " ".charCodeAt(0) - , slash = "/".charCodeAt(0) - , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null - -function TarHeader (block) { - if (!(this instanceof TarHeader)) return new TarHeader(block) - if (block) this.decode(block) -} - -TarHeader.prototype = - { decode : decode - , encode: encode - , calcSum: calcSum - , checkSum: checkSum - } - -TarHeader.parseNumeric = parseNumeric -TarHeader.encode = encode -TarHeader.decode = decode - -// note that this will only do the normal ustar header, not any kind -// of extended posix header file. If something doesn't fit comfortably, -// then it will set obj.needExtended = true, and set the block to -// the closest approximation. -function encode (obj) { - if (!obj && !(this instanceof TarHeader)) throw new Error( - "encode must be called on a TarHeader, or supplied an object") - - obj = obj || this - var block = obj.block = new Buffer(512) - - // if the object has a "prefix", then that's actually an extension of - // the path field. - if (obj.prefix) { - // console.error("%% header encoding, got a prefix", obj.prefix) - obj.path = obj.prefix + "/" + obj.path - // console.error("%% header encoding, prefixed path", obj.path) - obj.prefix = "" - } - - obj.needExtended = false - - if (obj.mode) { - if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8) - obj.mode = obj.mode & 0777 - } - - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , off = fieldOffs[f] - , end = fieldEnds[f] - , ret - - switch (field) { - case "cksum": - // special, done below, after all the others - break - - case "prefix": - // special, this is an extension of the "path" field. - // console.error("%% header encoding, skip prefix later") - break - - case "type": - // convert from long name to a single char. - var type = obj.type || "0" - if (type.length > 1) { - type = tar.types[obj.type] - if (!type) type = "0" - } - writeText(block, off, end, type) - break - - case "path": - // uses the "prefix" field if > 100 bytes, but <= 255 - var pathLen = Buffer.byteLength(obj.path) - , pathFSize = fieldSize[fields.path] - , prefFSize = fieldSize[fields.prefix] - - // paths between 100 and 255 should use the prefix field. - // longer than 255 - if (pathLen > pathFSize && - pathLen <= pathFSize + prefFSize) { - // need to find a slash somewhere in the middle so that - // path and prefix both fit in their respective fields - var searchStart = pathLen - 1 - pathFSize - , searchEnd = prefFSize - , found = false - , pathBuf = new Buffer(obj.path) - - for ( var s = searchStart - ; (s <= searchEnd) - ; s ++ ) { - if (pathBuf[s] === slash || pathBuf[s] === bslash) { - found = s - break - } - } - - if (found !== false) { - prefix = pathBuf.slice(0, found).toString("utf8") - path = pathBuf.slice(found + 1).toString("utf8") - - ret = writeText(block, off, end, path) - off = fieldOffs[fields.prefix] - end = fieldEnds[fields.prefix] - // console.error("%% header writing prefix", off, end, prefix) - ret = writeText(block, off, end, prefix) || ret - break - } - } - - // paths less than 100 chars don't need a prefix - // and paths longer than 255 need an extended header and will fail - // on old implementations no matter what we do here. - // Null out the prefix, and fallthrough to default. - // console.error("%% header writing no prefix") - var poff = fieldOffs[fields.prefix] - , pend = fieldEnds[fields.prefix] - writeText(block, poff, pend, "") - // fallthrough - - // all other fields are numeric or text - default: - ret = numeric[field] - ? writeNumeric(block, off, end, obj[field]) - : writeText(block, off, end, obj[field] || "") - break - } - obj.needExtended = obj.needExtended || ret - } - - var off = fieldOffs[fields.cksum] - , end = fieldEnds[fields.cksum] - - writeNumeric(block, off, end, calcSum.call(this, block)) - - return block -} - -// if it's a negative number, or greater than will fit, -// then use write256. -var MAXNUM = { 12: 077777777777 - , 11: 07777777777 - , 8 : 07777777 - , 7 : 0777777 } -function writeNumeric (block, off, end, num) { - var writeLen = end - off - , maxNum = MAXNUM[writeLen] || 0 - - num = num || 0 - // console.error(" numeric", num) - - if (num instanceof Date || - Object.prototype.toString.call(num) === "[object Date]") { - num = num.getTime() / 1000 - } - - if (num > maxNum || num < 0) { - write256(block, off, end, num) - // need an extended header if negative or too big. - return true - } - - // god, tar is so annoying - // if the string is small enough, you should put a space - // between the octal string and the \0, but if it doesn't - // fit, then don't. - var numStr = Math.floor(num).toString(8) - if (num < MAXNUM[writeLen - 1]) numStr += " " - - // pad with "0" chars - if (numStr.length < writeLen) { - numStr = (new Array(writeLen - numStr.length).join("0")) + numStr - } - - if (numStr.length !== writeLen - 1) { - throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" + - "expected: "+writeLen) - } - block.write(numStr, off, writeLen, "utf8") - block[end - 1] = 0 -} - -function write256 (block, off, end, num) { - var buf = block.slice(off, end) - var positive = num >= 0 - buf[0] = positive ? 0x80 : 0xFF - - // get the number as a base-256 tuple - if (!positive) num *= -1 - var tuple = [] - do { - var n = num % 256 - tuple.push(n) - num = (num - n) / 256 - } while (num) - - var bytes = tuple.length - - var fill = buf.length - bytes - for (var i = 1; i < fill; i ++) { - buf[i] = positive ? 0 : 0xFF - } - - // tuple is a base256 number, with [0] as the *least* significant byte - // if it's negative, then we need to flip all the bits once we hit the - // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's- - // complement is (0xFF - n). - var zero = true - for (i = bytes; i > 0; i --) { - var byte = tuple[bytes - i] - if (positive) buf[fill + i] = byte - else if (zero && byte === 0) buf[fill + i] = 0 - else if (zero) { - zero = false - buf[fill + i] = 0x100 - byte - } else buf[fill + i] = 0xFF - byte - } -} - -function writeText (block, off, end, str) { - // strings are written as utf8, then padded with \0 - var strLen = Buffer.byteLength(str) - , writeLen = Math.min(strLen, end - off) - // non-ascii fields need extended headers - // long fields get truncated - , needExtended = strLen !== str.length || strLen > writeLen - - // write the string, and null-pad - if (writeLen > 0) block.write(str, off, writeLen, "utf8") - for (var i = off + writeLen; i < end; i ++) block[i] = 0 - - return needExtended -} - -function calcSum (block) { - block = block || this.block - assert(Buffer.isBuffer(block) && block.length === 512) - - if (!block) throw new Error("Need block to checksum") - - // now figure out what it would be if the cksum was " " - var sum = 0 - , start = fieldOffs[fields.cksum] - , end = fieldEnds[fields.cksum] - - for (var i = 0; i < fieldOffs[fields.cksum]; i ++) { - sum += block[i] - } - - for (var i = start; i < end; i ++) { - sum += space - } - - for (var i = end; i < 512; i ++) { - sum += block[i] - } - - return sum -} - - -function checkSum (block) { - var sum = calcSum.call(this, block) - block = block || this.block - - var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum]) - cksum = parseNumeric(cksum) - - return cksum === sum -} - -function decode (block) { - block = block || this.block - assert(Buffer.isBuffer(block) && block.length === 512) - - this.block = block - this.cksumValid = this.checkSum() - - var prefix = null - - // slice off each field. - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , val = block.slice(fieldOffs[f], fieldEnds[f]) - - switch (field) { - case "ustar": - // if not ustar, then everything after that is just padding. - if (val.toString() !== "ustar\0") { - this.ustar = false - return - } else { - // console.error("ustar:", val, val.toString()) - this.ustar = val.toString() - } - break - - // prefix is special, since it might signal the xstar header - case "prefix": - var atime = parseNumeric(val.slice(131, 131 + 12)) - , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12)) - if ((val[130] === 0 || val[130] === space) && - typeof atime === "number" && - typeof ctime === "number" && - val[131 + 12] === space && - val[131 + 12 + 12] === space) { - this.atime = atime - this.ctime = ctime - val = val.slice(0, 130) - } - prefix = val.toString("utf8").replace(/\0+$/, "") - // console.error("%% header reading prefix", prefix) - break - - // all other fields are null-padding text - // or a number. - default: - if (numeric[field]) { - this[field] = parseNumeric(val) - } else { - this[field] = val.toString("utf8").replace(/\0+$/, "") - } - break - } - } - - // if we got a prefix, then prepend it to the path. - if (prefix) { - this.path = prefix + "/" + this.path - // console.error("%% header got a prefix", this.path) - } -} - -function parse256 (buf) { - // first byte MUST be either 80 or FF - // 80 for positive, FF for 2's comp - var positive - if (buf[0] === 0x80) positive = true - else if (buf[0] === 0xFF) positive = false - else return null - - // build up a base-256 tuple from the least sig to the highest - var zero = false - , tuple = [] - for (var i = buf.length - 1; i > 0; i --) { - var byte = buf[i] - if (positive) tuple.push(byte) - else if (zero && byte === 0) tuple.push(0) - else if (zero) { - zero = false - tuple.push(0x100 - byte) - } else tuple.push(0xFF - byte) - } - - for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) { - sum += tuple[i] * Math.pow(256, i) - } - - return positive ? sum : -1 * sum -} - -function parseNumeric (f) { - if (f[0] & 0x80) return parse256(f) - - var str = f.toString("utf8").split("\0")[0].trim() - , res = parseInt(str, 8) - - return isNaN(res) ? null : res -} - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/pack.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/pack.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/pack.js 2013-05-29 23:37:10.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/pack.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,231 +0,0 @@ -// pipe in an fstream, and it'll make a tarball. -// key-value pair argument is global extended header props. - -module.exports = Pack - -var EntryWriter = require("./entry-writer.js") - , Stream = require("stream").Stream - , path = require("path") - , inherits = require("inherits") - , GlobalHeaderWriter = require("./global-header-writer.js") - , collect = require("fstream").collect - , eof = new Buffer(512) - -for (var i = 0; i < 512; i ++) eof[i] = 0 - -inherits(Pack, Stream) - -function Pack (props) { - // console.error("-- p ctor") - var me = this - if (!(me instanceof Pack)) return new Pack(props) - - if (props) me._noProprietary = props.noProprietary - else me._noProprietary = false - - me._global = props - - me.readable = true - me.writable = true - me._buffer = [] - // console.error("-- -- set current to null in ctor") - me._currentEntry = null - me._processing = false - - me._pipeRoot = null - me.on("pipe", function (src) { - if (src.root === me._pipeRoot) return - me._pipeRoot = src - src.on("end", function () { - me._pipeRoot = null - }) - me.add(src) - }) -} - -Pack.prototype.addGlobal = function (props) { - // console.error("-- p addGlobal") - if (this._didGlobal) return - this._didGlobal = true - - var me = this - GlobalHeaderWriter(props) - .on("data", function (c) { - me.emit("data", c) - }) - .end() -} - -Pack.prototype.add = function (stream) { - if (this._global && !this._didGlobal) this.addGlobal(this._global) - - if (this._ended) return this.emit("error", new Error("add after end")) - - collect(stream) - this._buffer.push(stream) - this._process() - this._needDrain = this._buffer.length > 0 - return !this._needDrain -} - -Pack.prototype.pause = function () { - this._paused = true - if (this._currentEntry) this._currentEntry.pause() - this.emit("pause") -} - -Pack.prototype.resume = function () { - this._paused = false - if (this._currentEntry) this._currentEntry.resume() - this.emit("resume") - this._process() -} - -Pack.prototype.end = function () { - this._ended = true - this._buffer.push(eof) - this._process() -} - -Pack.prototype._process = function () { - var me = this - if (me._paused || me._processing) { - return - } - - var entry = me._buffer.shift() - - if (!entry) { - if (me._needDrain) { - me.emit("drain") - } - return - } - - if (entry.ready === false) { - // console.error("-- entry is not ready", entry) - me._buffer.unshift(entry) - entry.on("ready", function () { - // console.error("-- -- ready!", entry) - me._process() - }) - return - } - - me._processing = true - - if (entry === eof) { - // need 2 ending null blocks. - me.emit("data", eof) - me.emit("data", eof) - me.emit("end") - me.emit("close") - return - } - - // Change the path to be relative to the root dir that was - // added to the tarball. - // - // XXX This should be more like how -C works, so you can - // explicitly set a root dir, and also explicitly set a pathname - // in the tarball to use. That way we can skip a lot of extra - // work when resolving symlinks for bundled dependencies in npm. - - var root = path.dirname((entry.root || entry).path) - var wprops = {} - - Object.keys(entry.props || {}).forEach(function (k) { - wprops[k] = entry.props[k] - }) - - if (me._noProprietary) wprops.noProprietary = true - - wprops.path = path.relative(root, entry.path || '') - - // actually not a matter of opinion or taste. - if (process.platform === "win32") { - wprops.path = wprops.path.replace(/\\/g, "/") - } - - if (!wprops.type) - wprops.type = 'Directory' - - switch (wprops.type) { - // sockets not supported - case "Socket": - return - - case "Directory": - wprops.path += "/" - wprops.size = 0 - break - - case "Link": - var lp = path.resolve(path.dirname(entry.path), entry.linkpath) - wprops.linkpath = path.relative(root, lp) || "." - wprops.size = 0 - break - - case "SymbolicLink": - var lp = path.resolve(path.dirname(entry.path), entry.linkpath) - wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "." - wprops.size = 0 - break - } - - // console.error("-- new writer", wprops) - // if (!wprops.type) { - // // console.error("-- no type?", entry.constructor.name, entry) - // } - - // console.error("-- -- set current to new writer", wprops.path) - var writer = me._currentEntry = EntryWriter(wprops) - - writer.parent = me - - // writer.on("end", function () { - // // console.error("-- -- writer end", writer.path) - // }) - - writer.on("data", function (c) { - me.emit("data", c) - }) - - writer.on("header", function () { - Buffer.prototype.toJSON = function () { - return this.toString().split(/\0/).join(".") - } - // console.error("-- -- writer header %j", writer.props) - if (writer.props.size === 0) nextEntry() - }) - writer.on("close", nextEntry) - - var ended = false - function nextEntry () { - if (ended) return - ended = true - - // console.error("-- -- writer close", writer.path) - // console.error("-- -- set current to null", wprops.path) - me._currentEntry = null - me._processing = false - me._process() - } - - writer.on("error", function (er) { - // console.error("-- -- writer error", writer.path) - me.emit("error", er) - }) - - // if it's the root, then there's no need to add its entries, - // or data, since they'll be added directly. - if (entry === me._pipeRoot) { - // console.error("-- is the root, don't auto-add") - writer.add = null - } - - entry.pipe(writer) -} - -Pack.prototype.destroy = function () {} -Pack.prototype.write = function () {} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/parse.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/parse.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/lib/parse.js 2013-05-29 23:37:17.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/lib/parse.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,270 +0,0 @@ - -// A writable stream. -// It emits "entry" events, which provide a readable stream that has -// header info attached. - -module.exports = Parse.create = Parse - -var stream = require("stream") - , Stream = stream.Stream - , BlockStream = require("block-stream") - , tar = require("../tar.js") - , TarHeader = require("./header.js") - , Entry = require("./entry.js") - , BufferEntry = require("./buffer-entry.js") - , ExtendedHeader = require("./extended-header.js") - , assert = require("assert").ok - , inherits = require("inherits") - , fstream = require("fstream") - -// reading a tar is a lot like reading a directory -// However, we're actually not going to run the ctor, -// since it does a stat and various other stuff. -// This inheritance gives us the pause/resume/pipe -// behavior that is desired. -inherits(Parse, fstream.Reader) - -function Parse () { - var me = this - if (!(me instanceof Parse)) return new Parse() - - // doesn't apply fstream.Reader ctor? - // no, becasue we don't want to stat/etc, we just - // want to get the entry/add logic from .pipe() - Stream.apply(me) - - me.writable = true - me.readable = true - me._stream = new BlockStream(512) - me.position = 0 - - me._stream.on("error", function (e) { - me.emit("error", e) - }) - - me._stream.on("data", function (c) { - me._process(c) - }) - - me._stream.on("end", function () { - me._streamEnd() - }) - - me._stream.on("drain", function () { - me.emit("drain") - }) -} - -// overridden in Extract class, since it needs to -// wait for its DirWriter part to finish before -// emitting "end" -Parse.prototype._streamEnd = function () { - var me = this - if (!me._ended) me.error("unexpected eof") - me.emit("end") -} - -// a tar reader is actually a filter, not just a readable stream. -// So, you should pipe a tarball stream into it, and it needs these -// write/end methods to do that. -Parse.prototype.write = function (c) { - if (this._ended) { - // gnutar puts a LOT of nulls at the end. - // you can keep writing these things forever. - // Just ignore them. - for (var i = 0, l = c.length; i > l; i ++) { - if (c[i] !== 0) return this.error("write() after end()") - } - return - } - return this._stream.write(c) -} - -Parse.prototype.end = function (c) { - this._ended = true - return this._stream.end(c) -} - -// don't need to do anything, since we're just -// proxying the data up from the _stream. -// Just need to override the parent's "Not Implemented" -// error-thrower. -Parse.prototype._read = function () {} - -Parse.prototype._process = function (c) { - assert(c && c.length === 512, "block size should be 512") - - // one of three cases. - // 1. A new header - // 2. A part of a file/extended header - // 3. One of two or more EOF null blocks - - if (this._entry) { - var entry = this._entry - entry.write(c) - if (entry._remaining === 0) { - entry.end() - this._entry = null - } - } else { - // either zeroes or a header - var zero = true - for (var i = 0; i < 512 && zero; i ++) { - zero = c[i] === 0 - } - - // eof is *at least* 2 blocks of nulls, and then the end of the - // file. you can put blocks of nulls between entries anywhere, - // so appending one tarball to another is technically valid. - // ending without the eof null blocks is not allowed, however. - if (zero) { - this._ended = this._eofStarted - this._eofStarted = true - } else { - this._ended = this._eofStarted = false - this._startEntry(c) - } - - } - - this.position += 512 -} - -// take a header chunk, start the right kind of entry. -Parse.prototype._startEntry = function (c) { - var header = new TarHeader(c) - , self = this - , entry - , ev - , EntryType - , onend - , meta = false - - if (null === header.size || !header.cksumValid) { - var e = new Error("invalid tar file") - e.header = header - e.tar_file_offset = this.position - e.tar_block = this.position / 512 - this.emit("error", e) - } - - switch (tar.types[header.type]) { - case "File": - case "OldFile": - case "Link": - case "SymbolicLink": - case "CharacterDevice": - case "BlockDevice": - case "Directory": - case "FIFO": - case "ContiguousFile": - case "GNUDumpDir": - // start a file. - // pass in any extended headers - // These ones consumers are typically most interested in. - EntryType = Entry - ev = "entry" - break - - case "GlobalExtendedHeader": - // extended headers that apply to the rest of the tarball - EntryType = ExtendedHeader - onend = function () { - self._global = self._global || {} - Object.keys(entry.fields).forEach(function (k) { - self._global[k] = entry.fields[k] - }) - } - ev = "globalExtendedHeader" - meta = true - break - - case "ExtendedHeader": - case "OldExtendedHeader": - // extended headers that apply to the next entry - EntryType = ExtendedHeader - onend = function () { - self._extended = entry.fields - } - ev = "extendedHeader" - meta = true - break - - case "NextFileHasLongLinkpath": - // set linkpath= in extended header - EntryType = BufferEntry - onend = function () { - self._extended = self._extended || {} - self._extended.linkpath = entry.body - } - ev = "longLinkpath" - meta = true - break - - case "NextFileHasLongPath": - case "OldGnuLongPath": - // set path= in file-extended header - EntryType = BufferEntry - onend = function () { - self._extended = self._extended || {} - self._extended.path = entry.body - } - ev = "longPath" - meta = true - break - - default: - // all the rest we skip, but still set the _entry - // member, so that we can skip over their data appropriately. - // emit an event to say that this is an ignored entry type? - EntryType = Entry - ev = "ignoredEntry" - break - } - - var global, extended - if (meta) { - global = extended = null - } else { - var global = this._global - var extended = this._extended - - // extendedHeader only applies to one entry, so once we start - // an entry, it's over. - this._extended = null - } - entry = new EntryType(header, extended, global) - entry.meta = meta - - // only proxy data events of normal files. - if (!meta) { - entry.on("data", function (c) { - me.emit("data", c) - }) - } - - if (onend) entry.on("end", onend) - - this._entry = entry - var me = this - - entry.on("pause", function () { - me.pause() - }) - - entry.on("resume", function () { - me.resume() - }) - - if (this.listeners("*").length) { - this.emit("*", ev, entry) - } - - this.emit(ev, entry) - - // Zero-byte entry. End immediately. - if (entry.props.size === 0) { - entry.end() - this._entry = null - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE 2012-03-16 23:15:06.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -Copyright (c) Isaac Z. Schlueter -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS -``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md 2011-10-07 03:09:34.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -# block-stream - -A stream of blocks. - -Write data into it, and it'll output data in buffer blocks the size you -specify, padding with zeroes if necessary. - -```javascript -var block = new BlockStream(512) -fs.createReadStream("some-file").pipe(block) -block.pipe(fs.createWriteStream("block-file")) -``` - -When `.end()` or `.flush()` is called, it'll pad the block with zeroes. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js 2011-10-18 02:11:49.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -var BlockStream = require("../block-stream.js") - -var blockSizes = [16, 25, 1024] - , writeSizes = [4, 8, 15, 16, 17, 64, 100] - , writeCounts = [1, 10, 100] - , tap = require("tap") - -writeCounts.forEach(function (writeCount) { -blockSizes.forEach(function (blockSize) { -writeSizes.forEach(function (writeSize) { - tap.test("writeSize=" + writeSize + - " blockSize="+blockSize + - " writeCount="+writeCount, function (t) { - var f = new BlockStream(blockSize, {nopad: true }) - - var actualChunks = 0 - var actualBytes = 0 - var timeouts = 0 - - f.on("data", function (c) { - timeouts ++ - - actualChunks ++ - actualBytes += c.length - - // make sure that no data gets corrupted, and basic sanity - var before = c.toString() - // simulate a slow write operation - f.pause() - setTimeout(function () { - timeouts -- - - var after = c.toString() - t.equal(after, before, "should not change data") - - // now corrupt it, to find leaks. - for (var i = 0; i < c.length; i ++) { - c[i] = "x".charCodeAt(0) - } - f.resume() - }, 100) - }) - - f.on("end", function () { - // round up to the nearest block size - var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize) - var expectBytes = writeSize * writeCount * 2 - t.equal(actualBytes, expectBytes, - "bytes=" + expectBytes + " writeSize=" + writeSize) - t.equal(actualChunks, expectChunks, - "chunks=" + expectChunks + " writeSize=" + writeSize) - - // wait for all the timeout checks to finish, then end the test - setTimeout(function WAIT () { - if (timeouts > 0) return setTimeout(WAIT) - t.end() - }, 100) - }) - - for (var i = 0; i < writeCount; i ++) { - var a = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0) - var b = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0) - f.write(a) - f.write(b) - } - f.end() - }) -}) }) }) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream.js 2011-10-18 00:16:27.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/block-stream.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -var BlockStream = require("../block-stream.js") - -var blockSizes = [16, 25, 1024] - , writeSizes = [4, 8, 15, 16, 17, 64, 100] - , writeCounts = [1, 10, 100] - , tap = require("tap") - -writeCounts.forEach(function (writeCount) { -blockSizes.forEach(function (blockSize) { -writeSizes.forEach(function (writeSize) { - tap.test("writeSize=" + writeSize + - " blockSize="+blockSize + - " writeCount="+writeCount, function (t) { - var f = new BlockStream(blockSize, {nopad: true }) - - var actualChunks = 0 - var actualBytes = 0 - var timeouts = 0 - - f.on("data", function (c) { - timeouts ++ - - actualChunks ++ - actualBytes += c.length - - // make sure that no data gets corrupted, and basic sanity - var before = c.toString() - // simulate a slow write operation - setTimeout(function () { - timeouts -- - - var after = c.toString() - t.equal(after, before, "should not change data") - - // now corrupt it, to find leaks. - for (var i = 0; i < c.length; i ++) { - c[i] = "x".charCodeAt(0) - } - }, 100) - }) - - f.on("end", function () { - // round up to the nearest block size - var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize) - var expectBytes = writeSize * writeCount * 2 - t.equal(actualBytes, expectBytes, - "bytes=" + expectBytes + " writeSize=" + writeSize) - t.equal(actualChunks, expectChunks, - "chunks=" + expectChunks + " writeSize=" + writeSize) - - // wait for all the timeout checks to finish, then end the test - setTimeout(function WAIT () { - if (timeouts > 0) return setTimeout(WAIT) - t.end() - }, 100) - }) - - for (var i = 0; i < writeCount; i ++) { - var a = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0) - var b = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0) - f.write(a) - f.write(b) - } - f.end() - }) -}) }) }) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js 2011-10-18 01:58:25.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -var BlockStream = require("dropper") - -var blockSizes = [16, 25, 1024] - , writeSizes = [4, 8, 15, 16, 17, 64, 100] - , writeCounts = [1, 10, 100] - , tap = require("tap") - -writeCounts.forEach(function (writeCount) { -blockSizes.forEach(function (blockSize) { -writeSizes.forEach(function (writeSize) { - tap.test("writeSize=" + writeSize + - " blockSize="+blockSize + - " writeCount="+writeCount, function (t) { - var f = new BlockStream(blockSize, {nopad: true }) - - var actualChunks = 0 - var actualBytes = 0 - var timeouts = 0 - - f.on("data", function (c) { - timeouts ++ - - actualChunks ++ - actualBytes += c.length - - // make sure that no data gets corrupted, and basic sanity - var before = c.toString() - // simulate a slow write operation - f.pause() - setTimeout(function () { - timeouts -- - - var after = c.toString() - t.equal(after, before, "should not change data") - - // now corrupt it, to find leaks. - for (var i = 0; i < c.length; i ++) { - c[i] = "x".charCodeAt(0) - } - f.resume() - }, 100) - }) - - f.on("end", function () { - // round up to the nearest block size - var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize) - var expectBytes = writeSize * writeCount * 2 - t.equal(actualBytes, expectBytes, - "bytes=" + expectBytes + " writeSize=" + writeSize) - t.equal(actualChunks, expectChunks, - "chunks=" + expectChunks + " writeSize=" + writeSize) - - // wait for all the timeout checks to finish, then end the test - setTimeout(function WAIT () { - if (timeouts > 0) return setTimeout(WAIT) - t.end() - }, 100) - }) - - for (var i = 0; i < writeCount; i ++) { - var a = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0) - var b = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0) - f.write(a) - f.write(b) - } - f.end() - }) -}) }) }) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper.js 2011-10-18 00:16:44.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/bench/dropper.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -var BlockStream = require("dropper") - -var blockSizes = [16, 25, 1024] - , writeSizes = [4, 8, 15, 16, 17, 64, 100] - , writeCounts = [1, 10, 100] - , tap = require("tap") - -writeCounts.forEach(function (writeCount) { -blockSizes.forEach(function (blockSize) { -writeSizes.forEach(function (writeSize) { - tap.test("writeSize=" + writeSize + - " blockSize="+blockSize + - " writeCount="+writeCount, function (t) { - var f = new BlockStream(blockSize, {nopad: true }) - - var actualChunks = 0 - var actualBytes = 0 - var timeouts = 0 - - f.on("data", function (c) { - timeouts ++ - - actualChunks ++ - actualBytes += c.length - - // make sure that no data gets corrupted, and basic sanity - var before = c.toString() - // simulate a slow write operation - setTimeout(function () { - timeouts -- - - var after = c.toString() - t.equal(after, before, "should not change data") - - // now corrupt it, to find leaks. - for (var i = 0; i < c.length; i ++) { - c[i] = "x".charCodeAt(0) - } - }, 100) - }) - - f.on("end", function () { - // round up to the nearest block size - var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize) - var expectBytes = writeSize * writeCount * 2 - t.equal(actualBytes, expectBytes, - "bytes=" + expectBytes + " writeSize=" + writeSize) - t.equal(actualChunks, expectChunks, - "chunks=" + expectChunks + " writeSize=" + writeSize) - - // wait for all the timeout checks to finish, then end the test - setTimeout(function WAIT () { - if (timeouts > 0) return setTimeout(WAIT) - t.end() - }, 100) - }) - - for (var i = 0; i < writeCount; i ++) { - var a = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0) - var b = new Buffer(writeSize); - for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0) - f.write(a) - f.write(b) - } - f.end() - }) -}) }) }) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js 2013-04-17 20:57:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,209 +0,0 @@ -// write data to it, and it'll emit data in 512 byte blocks. -// if you .end() or .flush(), it'll emit whatever it's got, -// padded with nulls to 512 bytes. - -module.exports = BlockStream - -var Stream = require("stream").Stream - , inherits = require("inherits") - , assert = require("assert").ok - , debug = process.env.DEBUG ? console.error : function () {} - -function BlockStream (size, opt) { - this.writable = this.readable = true - this._opt = opt || {} - this._chunkSize = size || 512 - this._offset = 0 - this._buffer = [] - this._bufferLength = 0 - if (this._opt.nopad) this._zeroes = false - else { - this._zeroes = new Buffer(this._chunkSize) - for (var i = 0; i < this._chunkSize; i ++) { - this._zeroes[i] = 0 - } - } -} - -inherits(BlockStream, Stream) - -BlockStream.prototype.write = function (c) { - // debug(" BS write", c) - if (this._ended) throw new Error("BlockStream: write after end") - if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "") - if (c.length) { - this._buffer.push(c) - this._bufferLength += c.length - } - // debug("pushed onto buffer", this._bufferLength) - if (this._bufferLength >= this._chunkSize) { - if (this._paused) { - // debug(" BS paused, return false, need drain") - this._needDrain = true - return false - } - this._emitChunk() - } - return true -} - -BlockStream.prototype.pause = function () { - // debug(" BS pausing") - this._paused = true -} - -BlockStream.prototype.resume = function () { - // debug(" BS resume") - this._paused = false - return this._emitChunk() -} - -BlockStream.prototype.end = function (chunk) { - // debug("end", chunk) - if (typeof chunk === "function") cb = chunk, chunk = null - if (chunk) this.write(chunk) - this._ended = true - this.flush() -} - -BlockStream.prototype.flush = function () { - this._emitChunk(true) -} - -BlockStream.prototype._emitChunk = function (flush) { - // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused) - - // emit a chunk - if (flush && this._zeroes) { - // debug(" BS push zeroes", this._bufferLength) - // push a chunk of zeroes - var padBytes = (this._bufferLength % this._chunkSize) - if (padBytes !== 0) padBytes = this._chunkSize - padBytes - if (padBytes > 0) { - // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes)) - this._buffer.push(this._zeroes.slice(0, padBytes)) - this._bufferLength += padBytes - // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength) - } - } - - if (this._emitting || this._paused) return - this._emitting = true - - // debug(" BS entering loops") - var bufferIndex = 0 - while (this._bufferLength >= this._chunkSize && - (flush || !this._paused)) { - // debug(" BS data emission loop", this._bufferLength) - - var out - , outOffset = 0 - , outHas = this._chunkSize - - while (outHas > 0 && (flush || !this._paused) ) { - // debug(" BS data inner emit loop", this._bufferLength) - var cur = this._buffer[bufferIndex] - , curHas = cur.length - this._offset - // debug("cur=", cur) - // debug("curHas=%j", curHas) - // If it's not big enough to fill the whole thing, then we'll need - // to copy multiple buffers into one. However, if it is big enough, - // then just slice out the part we want, to save unnecessary copying. - // Also, need to copy if we've already done some copying, since buffers - // can't be joined like cons strings. - if (out || curHas < outHas) { - out = out || new Buffer(this._chunkSize) - cur.copy(out, outOffset, - this._offset, this._offset + Math.min(curHas, outHas)) - } else if (cur.length === outHas && this._offset === 0) { - // shortcut -- cur is exactly long enough, and no offset. - out = cur - } else { - // slice out the piece of cur that we need. - out = cur.slice(this._offset, this._offset + outHas) - } - - if (curHas > outHas) { - // means that the current buffer couldn't be completely output - // update this._offset to reflect how much WAS written - this._offset += outHas - outHas = 0 - } else { - // output the entire current chunk. - // toss it away - outHas -= curHas - outOffset += curHas - bufferIndex ++ - this._offset = 0 - } - } - - this._bufferLength -= this._chunkSize - assert(out.length === this._chunkSize) - // debug("emitting data", out) - // debug(" BS emitting, paused=%j", this._paused, this._bufferLength) - this.emit("data", out) - out = null - } - // debug(" BS out of loops", this._bufferLength) - - // whatever is left, it's not enough to fill up a block, or we're paused - this._buffer = this._buffer.slice(bufferIndex) - if (this._paused) { - // debug(" BS paused, leaving", this._bufferLength) - this._needsDrain = true - this._emitting = false - return - } - - // if flushing, and not using null-padding, then need to emit the last - // chunk(s) sitting in the queue. We know that it's not enough to - // fill up a whole block, because otherwise it would have been emitted - // above, but there may be some offset. - var l = this._buffer.length - if (flush && !this._zeroes && l) { - if (l === 1) { - if (this._offset) { - this.emit("data", this._buffer[0].slice(this._offset)) - } else { - this.emit("data", this._buffer[0]) - } - } else { - var outHas = this._bufferLength - , out = new Buffer(outHas) - , outOffset = 0 - for (var i = 0; i < l; i ++) { - var cur = this._buffer[i] - , curHas = cur.length - this._offset - cur.copy(out, outOffset, this._offset) - this._offset = 0 - outOffset += curHas - this._bufferLength -= curHas - } - this.emit("data", out) - } - // truncate - this._buffer.length = 0 - this._bufferLength = 0 - this._offset = 0 - } - - // now either drained or ended - // debug("either draining, or ended", this._bufferLength, this._ended) - // means that we've flushed out all that we can so far. - if (this._needDrain) { - // debug("emitting drain", this._bufferLength) - this._needDrain = false - this.emit("drain") - } - - if ((this._bufferLength === 0) && this._ended && !this._endEmitted) { - // debug("emitting end", this._bufferLength) - this._endEmitted = true - this.emit("end") - } - - this._emitting = false - - // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize) -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json 2013-10-25 01:43:04.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "name": "block-stream", - "description": "a stream of blocks", - "version": "0.0.7", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/block-stream.git" - }, - "engines": { - "node": "0.4 || >=0.5.8" - }, - "main": "block-stream.js", - "dependencies": { - "inherits": "~2.0.0" - }, - "devDependencies": { - "tap": "0.x" - }, - "scripts": { - "test": "tap test/" - }, - "license": "BSD", - "readme": "# block-stream\n\nA stream of blocks.\n\nWrite data into it, and it'll output data in buffer blocks the size you\nspecify, padding with zeroes if necessary.\n\n```javascript\nvar block = new BlockStream(512)\nfs.createReadStream(\"some-file\").pipe(block)\nblock.pipe(fs.createWriteStream(\"block-file\"))\n```\n\nWhen `.end()` or `.flush()` is called, it'll pad the block with zeroes.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/block-stream/issues" - }, - "_id": "block-stream@0.0.7", - "dist": { - "shasum": "199841982f4e4a1e87a3eb80fb424c97640ea459" - }, - "_from": "block-stream@*", - "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.7.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/LICENSE 2013-08-19 22:09:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/README.md 2013-05-16 14:24:38.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,42 +0,0 @@ -Browser-friendly inheritance fully compatible with standard node.js -[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). - -This package exports standard `inherits` from node.js `util` module in -node environment, but also provides alternative browser-friendly -implementation through [browser -field](https://gist.github.com/shtylman/4339901). Alternative -implementation is a literal copy of standard one located in standalone -module to avoid requiring of `util`. It also has a shim for old -browsers with no `Object.create` support. - -While keeping you sure you are using standard `inherits` -implementation in node.js environment, it allows bundlers such as -[browserify](https://github.com/substack/node-browserify) to not -include full `util` package to your client code if all you need is -just `inherits` function. It worth, because browser shim for `util` -package is large and `inherits` is often the single function you need -from it. - -It's recommended to use this package instead of -`require('util').inherits` for any code that has chances to be used -not only in node.js but in browser too. - -## usage - -```js -var inherits = require('inherits'); -// then use exactly as the standard one -``` - -## note on version ~1.0 - -Version ~1.0 had completely different motivation and is not compatible -neither with 2.0 nor with standard node.js `inherits`. - -If you are using version ~1.0 and planning to switch to ~2.0, be -careful: - -* new version uses `super_` instead of `super` for referencing - superclass -* new version overwrites current prototype while old one preserves any - existing fields on it diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits.js 2013-05-16 14:22:57.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits.js 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -module.exports = require('util').inherits diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits_browser.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits_browser.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits_browser.js 2013-05-16 14:39:58.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/inherits_browser.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/package.json 2013-10-25 01:43:04.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -{ - "name": "inherits", - "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", - "version": "2.0.1", - "keywords": [ - "inheritance", - "class", - "klass", - "oop", - "object-oriented", - "inherits", - "browser", - "browserify" - ], - "main": "./inherits.js", - "browser": "./inherits_browser.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/inherits" - }, - "license": "ISC", - "scripts": { - "test": "node test" - }, - "readme": "Browser-friendly inheritance fully compatible with standard node.js\n[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).\n\nThis package exports standard `inherits` from node.js `util` module in\nnode environment, but also provides alternative browser-friendly\nimplementation through [browser\nfield](https://gist.github.com/shtylman/4339901). Alternative\nimplementation is a literal copy of standard one located in standalone\nmodule to avoid requiring of `util`. It also has a shim for old\nbrowsers with no `Object.create` support.\n\nWhile keeping you sure you are using standard `inherits`\nimplementation in node.js environment, it allows bundlers such as\n[browserify](https://github.com/substack/node-browserify) to not\ninclude full `util` package to your client code if all you need is\njust `inherits` function. It worth, because browser shim for `util`\npackage is large and `inherits` is often the single function you need\nfrom it.\n\nIt's recommended to use this package instead of\n`require('util').inherits` for any code that has chances to be used\nnot only in node.js but in browser too.\n\n## usage\n\n```js\nvar inherits = require('inherits');\n// then use exactly as the standard one\n```\n\n## note on version ~1.0\n\nVersion ~1.0 had completely different motivation and is not compatible\nneither with 2.0 nor with standard node.js `inherits`.\n\nIf you are using version ~1.0 and planning to switch to ~2.0, be\ncareful:\n\n* new version uses `super_` instead of `super` for referencing\n superclass\n* new version overwrites current prototype while old one preserves any\n existing fields on it\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/inherits/issues" - }, - "_id": "inherits@2.0.1", - "_from": "inherits@2" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/test.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/test.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/node_modules/inherits/test.js 2013-05-16 14:43:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/node_modules/inherits/test.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -var inherits = require('./inherits.js') -var assert = require('assert') - -function test(c) { - assert(c.constructor === Child) - assert(c.constructor.super_ === Parent) - assert(Object.getPrototypeOf(c) === Child.prototype) - assert(Object.getPrototypeOf(Object.getPrototypeOf(c)) === Parent.prototype) - assert(c instanceof Child) - assert(c instanceof Parent) -} - -function Child() { - Parent.call(this) - test(this) -} - -function Parent() {} - -inherits(Child, Parent) - -var c = new Child -test(c) - -console.log('ok') diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "name": "tar", - "description": "tar for node", - "version": "0.1.18", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-tar.git" - }, - "main": "tar.js", - "scripts": { - "test": "tap test/*.js" - }, - "dependencies": { - "inherits": "2", - "block-stream": "*", - "fstream": "~0.1.8" - }, - "devDependencies": { - "tap": "0.x", - "rimraf": "1.x" - }, - "license": "BSD", - "readme": "# node-tar\n\nTar for Node.js.\n\n## Goals of this project\n\n1. Be able to parse and reasonably extract the contents of any tar file\n created by any program that creates tar files, period.\n\n At least, this includes every version of:\n\n * bsdtar\n * gnutar\n * solaris posix tar\n * Joerg Schilling's star (\"Schilly tar\")\n\n2. Create tar files that can be extracted by any of the following tar programs:\n\n * bsdtar/libarchive version 2.6.2\n * gnutar 1.15 and above\n * SunOS Posix tar\n * Joerg Schilling's star (\"Schilly tar\")\n\n3. 100% test coverage. Speed is important. Correctness is slightly more important.\n\n4. Create the kind of tar interface that Node users would want to use.\n\n5. Satisfy npm's needs for a portable tar implementation with a JavaScript interface.\n\n6. No excuses. No complaining. No tolerance for failure.\n\n## But isn't there already a tar.js?\n\nYes, there are a few. This one is going to be better, and it will be\nfanatically maintained, because npm will depend on it.\n\nThat's why I need to write it from scratch. Creating and extracting\ntarballs is such a large part of what npm does, I simply can't have it\nbe a black box any longer.\n\n## Didn't you have something already? Where'd it go?\n\nIt's in the \"old\" folder. It's not functional. Don't use it.\n\nIt was a useful exploration to learn the issues involved, but like most\nsoftware of any reasonable complexity, node-tar won't be useful until\nit's been written at least 3 times.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-tar/issues" - }, - "_id": "tar@0.1.18", - "dist": { - "shasum": "740a7097927841247b0c4b85dcf38072d4fe1478" - }, - "_from": "tar@0", - "_resolved": "https://registry.npmjs.org/tar/-/tar-0.1.18.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/tar.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/tar.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/tar/tar.js 2013-03-20 06:22:05.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/tar/tar.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,173 +0,0 @@ -// field paths that every tar file must have. -// header is padded to 512 bytes. -var f = 0 - , fields = {} - , path = fields.path = f++ - , mode = fields.mode = f++ - , uid = fields.uid = f++ - , gid = fields.gid = f++ - , size = fields.size = f++ - , mtime = fields.mtime = f++ - , cksum = fields.cksum = f++ - , type = fields.type = f++ - , linkpath = fields.linkpath = f++ - , headerSize = 512 - , blockSize = 512 - , fieldSize = [] - -fieldSize[path] = 100 -fieldSize[mode] = 8 -fieldSize[uid] = 8 -fieldSize[gid] = 8 -fieldSize[size] = 12 -fieldSize[mtime] = 12 -fieldSize[cksum] = 8 -fieldSize[type] = 1 -fieldSize[linkpath] = 100 - -// "ustar\0" may introduce another bunch of headers. -// these are optional, and will be nulled out if not present. - -var ustar = fields.ustar = f++ - , ustarver = fields.ustarver = f++ - , uname = fields.uname = f++ - , gname = fields.gname = f++ - , devmaj = fields.devmaj = f++ - , devmin = fields.devmin = f++ - , prefix = fields.prefix = f++ - , fill = fields.fill = f++ - -// terminate fields. -fields[f] = null - -fieldSize[ustar] = 6 -fieldSize[ustarver] = 2 -fieldSize[uname] = 32 -fieldSize[gname] = 32 -fieldSize[devmaj] = 8 -fieldSize[devmin] = 8 -fieldSize[prefix] = 155 -fieldSize[fill] = 12 - -// nb: prefix field may in fact be 130 bytes of prefix, -// a null char, 12 bytes for atime, 12 bytes for ctime. -// -// To recognize this format: -// 1. prefix[130] === ' ' or '\0' -// 2. atime and ctime are octal numeric values -// 3. atime and ctime have ' ' in their last byte - -var fieldEnds = {} - , fieldOffs = {} - , fe = 0 -for (var i = 0; i < f; i ++) { - fieldOffs[i] = fe - fieldEnds[i] = (fe += fieldSize[i]) -} - -// build a translation table of field paths. -Object.keys(fields).forEach(function (f) { - if (fields[f] !== null) fields[fields[f]] = f -}) - -// different values of the 'type' field -// paths match the values of Stats.isX() functions, where appropriate -var types = - { 0: "File" - , "\0": "OldFile" // like 0 - , "": "OldFile" - , 1: "Link" - , 2: "SymbolicLink" - , 3: "CharacterDevice" - , 4: "BlockDevice" - , 5: "Directory" - , 6: "FIFO" - , 7: "ContiguousFile" // like 0 - // posix headers - , g: "GlobalExtendedHeader" // k=v for the rest of the archive - , x: "ExtendedHeader" // k=v for the next file - // vendor-specific stuff - , A: "SolarisACL" // skip - , D: "GNUDumpDir" // like 5, but with data, which should be skipped - , I: "Inode" // metadata only, skip - , K: "NextFileHasLongLinkpath" // data = link path of next file - , L: "NextFileHasLongPath" // data = path of next file - , M: "ContinuationFile" // skip - , N: "OldGnuLongPath" // like L - , S: "SparseFile" // skip - , V: "TapeVolumeHeader" // skip - , X: "OldExtendedHeader" // like x - } - -Object.keys(types).forEach(function (t) { - types[types[t]] = types[types[t]] || t -}) - -// values for the mode field -var modes = - { suid: 04000 // set uid on extraction - , sgid: 02000 // set gid on extraction - , svtx: 01000 // set restricted deletion flag on dirs on extraction - , uread: 0400 - , uwrite: 0200 - , uexec: 0100 - , gread: 040 - , gwrite: 020 - , gexec: 010 - , oread: 4 - , owrite: 2 - , oexec: 1 - , all: 07777 - } - -var numeric = - { mode: true - , uid: true - , gid: true - , size: true - , mtime: true - , devmaj: true - , devmin: true - , cksum: true - , atime: true - , ctime: true - , dev: true - , ino: true - , nlink: true - } - -Object.keys(modes).forEach(function (t) { - modes[modes[t]] = modes[modes[t]] || t -}) - -var knownExtended = - { atime: true - , charset: true - , comment: true - , ctime: true - , gid: true - , gname: true - , linkpath: true - , mtime: true - , path: true - , realtime: true - , security: true - , size: true - , uid: true - , uname: true } - - -exports.fields = fields -exports.fieldSize = fieldSize -exports.fieldOffs = fieldOffs -exports.fieldEnds = fieldEnds -exports.types = types -exports.modes = modes -exports.numeric = numeric -exports.headerSize = headerSize -exports.blockSize = blockSize -exports.knownExtended = knownExtended - -exports.Pack = require("./lib/pack.js") -exports.Parse = require("./lib/parse.js") -exports.Extract = require("./lib/extract.js") diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/LICENSE tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/LICENSE --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/LICENSE 2011-09-23 16:46:12.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -Copyright 2009, 2010, 2011 Isaac Z. Schlueter. -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/README.md tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/README.md --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/README.md 2011-08-07 18:22:07.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -The "which" util from npm's guts. - -Finds the first instance of a specified executable in the PATH -environment variable. Does not cache the results, so `hash -r` is not -needed when the PATH changes. diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/bin/which tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/bin/which --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/bin/which 2011-08-07 18:31:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/bin/which 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -#!/usr/bin/env node -var which = require("../") -if (process.argv.length < 3) { - console.error("Usage: which ") - process.exit(1) -} - -which(process.argv[2], function (er, thing) { - if (er) { - console.error(er.message) - process.exit(er.errno || 127) - } - console.log(thing) -}) diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/package.json 2013-10-25 01:43:02.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me" - }, - "name": "which", - "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", - "version": "1.0.5", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-which.git" - }, - "main": "which.js", - "bin": { - "which": "./bin/which" - }, - "engines": { - "node": "*" - }, - "dependencies": {}, - "devDependencies": {}, - "readme": "The \"which\" util from npm's guts.\n\nFinds the first instance of a specified executable in the PATH\nenvironment variable. Does not cache the results, so `hash -r` is not\nneeded when the PATH changes.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-which/issues" - }, - "_id": "which@1.0.5", - "dist": { - "shasum": "061486f5423bbb98bb42bc77501271b0f9f8139b" - }, - "_from": "which@1", - "_resolved": "https://registry.npmjs.org/which/-/which-1.0.5.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/which.js tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/which.js --- tilemill-0.10.1~saucy9/node_modules/node-gyp/node_modules/which/which.js 2012-03-02 00:07:21.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/node_modules/which/which.js 1970-01-01 00:00:00.000000000 +0000 @@ -1,104 +0,0 @@ -module.exports = which -which.sync = whichSync - -var path = require("path") - , fs - , COLON = process.platform === "win32" ? ";" : ":" - , isExe - -try { - fs = require("graceful-fs") -} catch (ex) { - fs = require("fs") -} - -if (process.platform == "win32") { - // On windows, there is no good way to check that a file is executable - isExe = function isExe () { return true } -} else { - isExe = function isExe (mod, uid, gid) { - //console.error(mod, uid, gid); - //console.error("isExe?", (mod & 0111).toString(8)) - var ret = (mod & 0001) - || (mod & 0010) && process.getgid && gid === process.getgid() - || (mod & 0100) && process.getuid && uid === process.getuid() - //console.error("isExe?", ret) - return ret - } -} - - - -function which (cmd, cb) { - if (isAbsolute(cmd)) return cb(null, cmd) - var pathEnv = (process.env.PATH || "").split(COLON) - , pathExt = [""] - if (process.platform === "win32") { - pathEnv.push(process.cwd()) - pathExt = (process.env.PATHEXT || ".EXE").split(COLON) - if (cmd.indexOf(".") !== -1) pathExt.unshift("") - } - //console.error("pathEnv", pathEnv) - ;(function F (i, l) { - if (i === l) return cb(new Error("not found: "+cmd)) - var p = path.resolve(pathEnv[i], cmd) - ;(function E (ii, ll) { - if (ii === ll) return F(i + 1, l) - var ext = pathExt[ii] - //console.error(p + ext) - fs.stat(p + ext, function (er, stat) { - if (!er && - stat && - stat.isFile() && - isExe(stat.mode, stat.uid, stat.gid)) { - //console.error("yes, exe!", p + ext) - return cb(null, p + ext) - } - return E(ii + 1, ll) - }) - })(0, pathExt.length) - })(0, pathEnv.length) -} - -function whichSync (cmd) { - if (isAbsolute(cmd)) return cmd - var pathEnv = (process.env.PATH || "").split(COLON) - , pathExt = [""] - if (process.platform === "win32") { - pathEnv.push(process.cwd()) - pathExt = (process.env.PATHEXT || ".EXE").split(COLON) - if (cmd.indexOf(".") !== -1) pathExt.unshift("") - } - for (var i = 0, l = pathEnv.length; i < l; i ++) { - var p = path.join(pathEnv[i], cmd) - for (var j = 0, ll = pathExt.length; j < ll; j ++) { - var cur = p + pathExt[j] - var stat - try { stat = fs.statSync(cur) } catch (ex) {} - if (stat && - stat.isFile() && - isExe(stat.mode, stat.uid, stat.gid)) return cur - } - } - throw new Error("not found: "+cmd) -} - -var isAbsolute = process.platform === "win32" ? absWin : absUnix - -function absWin (p) { - if (absUnix(p)) return true - // pull off the device/UNC bit from a windows path. - // from node's lib/path.js - var splitDeviceRe = - /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?([\\\/])?/ - , result = splitDeviceRe.exec(p) - , device = result[1] || '' - , isUnc = device && device.charAt(1) !== ':' - , isAbsolute = !!result[2] || isUnc // UNC paths are always absolute - - return isAbsolute -} - -function absUnix (p) { - return p.charAt(0) === "/" || p === "" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-gyp/package.json tilemill-0.10.1~saucy10/node_modules/node-gyp/package.json --- tilemill-0.10.1~saucy9/node_modules/node-gyp/package.json 2013-10-25 01:43:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-gyp/package.json 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -{ - "name": "node-gyp", - "description": "Node.js native addon build tool", - "keywords": [ - "native", - "addon", - "module", - "c", - "c++", - "bindings", - "gyp" - ], - "version": "0.10.10", - "installVersion": 9, - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://tootallnate.net" - }, - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/node-gyp.git" - }, - "preferGlobal": true, - "bin": { - "node-gyp": "./bin/node-gyp.js" - }, - "main": "./lib/node-gyp.js", - "dependencies": { - "glob": "3", - "graceful-fs": "2", - "fstream": "0", - "minimatch": "0", - "mkdirp": "0", - "nopt": "2", - "npmlog": "0", - "osenv": "0", - "request": "2", - "rimraf": "2", - "semver": "~2.1", - "tar": "0", - "which": "1" - }, - "engines": { - "node": ">= 0.8.0" - }, - "readme": "node-gyp\n=========\n### Node.js native addon build tool\n\n`node-gyp` is a cross-platform command-line tool written in Node.js for compiling\nnative addon modules for Node.js, which takes away the pain of dealing with the\nvarious differences in build platforms. It is the replacement to the `node-waf`\nprogram which is removed for node `v0.8`. If you have a native addon for node that\nstill has a `wscript` file, then you should definitely add a `binding.gyp` file\nto support the latest versions of node.\n\nMultiple target versions of node are supported (i.e. `0.8`, `0.9`, `0.10`, ..., `1.0`,\netc.), regardless of what version of node is actually installed on your system\n(`node-gyp` downloads the necessary development files for the target version).\n\n#### Features:\n\n * Easy to use, consistent interface\n * Same commands to build your module on every platform\n * Supports multiple target versions of Node\n\n\nInstallation\n------------\n\nYou can install with `npm`:\n\n``` bash\n$ npm install -g node-gyp\n```\n\nYou will also need to install:\n\n * On Unix:\n * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported)\n * `make`\n * A proper C/C++ compiler toolchain, like GCC\n * On Windows:\n * [Python][windows-python] ([`v2.7.3`][windows-python-v2.7.3] recommended, `v3.x.x` is __*not*__ supported)\n * Windows XP/Vista/7:\n * Microsoft Visual Studio C++ 2010 ([Express][msvc2010] version works well)\n * For 64-bit builds of node and native modules you will _**also**_ need the [Windows 7 64-bit SDK][win7sdk]\n * If the install fails, try uninstalling any C++ 2010 x64&x86 Redistributable that you have installed first.\n * If you get errors that the 64-bit compilers are not installed you may also need the [compiler update for the Windows SDK 7.1]\n * Windows 7/8:\n * Microsoft Visual Studio C++ 2012 for Windows Desktop ([Express][msvc2012] version works well)\n\nNote that OS X is just a flavour of Unix and so needs `python`, `make`, and C/C++.\nAn easy way to obtain these is to install XCode from Apple,\nand then use it to install the command line tools (under Preferences -> Downloads).\n\nHow to Use\n----------\n\nTo compile your native addon, first go to its root directory:\n\n``` bash\n$ cd my_node_addon\n```\n\nThe next step is to generate the appropriate project build files for the current\nplatform. Use `configure` for that:\n\n``` bash\n$ node-gyp configure\n```\n\n__Note__: The `configure` step looks for the `binding.gyp` file in the current\ndirectory to processs. See below for instructions on creating the `binding.gyp` file.\n\nNow you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file\n(on Windows) in the `build/` directory. Next invoke the `build` command:\n\n``` bash\n$ node-gyp build\n```\n\nNow you have your compiled `.node` bindings file! The compiled bindings end up\nin `build/Debug/` or `build/Release/`, depending on the build mode. At this point\nyou can require the `.node` file with Node and run your tests!\n\n__Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or\n`-d`) switch when running the either `configure` or `build` command.\n\n\nThe \"binding.gyp\" file\n----------------------\n\nPreviously when node had `node-waf` you had to write a `wscript` file. The\nreplacement for that is the `binding.gyp` file, which describes the configuration\nto build your module in a JSON-like format. This file gets placed in the root of\nyour package, alongside the `package.json` file.\n\nA barebones `gyp` file appropriate for building a node addon looks like:\n\n``` python\n{\n \"targets\": [\n {\n \"target_name\": \"binding\",\n \"sources\": [ \"src/binding.cc\" ]\n }\n ]\n}\n```\n\nSome additional resources for writing `gyp` files:\n\n * [\"Hello World\" node addon example](https://github.com/joyent/node/tree/master/test/addons/hello-world)\n * [gyp user documentation](http://code.google.com/p/gyp/wiki/GypUserDocumentation)\n * [gyp input format reference](http://code.google.com/p/gyp/wiki/InputFormatReference)\n * [*\"binding.gyp\" files out in the wild* wiki page](https://github.com/TooTallNate/node-gyp/wiki/%22binding.gyp%22-files-out-in-the-wild)\n\n\nCommands\n--------\n\n`node-gyp` responds to the following commands:\n\n| **Command** | **Description**\n|:--------------|:---------------------------------------------------------------\n| `build` | Invokes `make`/`msbuild.exe` and builds the native addon\n| `clean` | Removes any the `build` dir if it exists\n| `configure` | Generates project build files for the current platform\n| `rebuild` | Runs \"clean\", \"configure\" and \"build\" all in a row\n| `install` | Installs node development header files for the given version\n| `list` | Lists the currently installed node development file versions\n| `remove` | Removes the node development header files for the given version\n\n\nLicense\n-------\n\n(The MIT License)\n\nCopyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n[windows-python]: http://www.python.org/getit/windows\n[windows-python-v2.7.3]: http://www.python.org/download/releases/2.7.3#download\n[msvc2010]: http://go.microsoft.com/?linkid=9709949\n[msvc2012]: http://go.microsoft.com/?linkid=9816758\n[win7sdk]: http://www.microsoft.com/en-us/download/details.aspx?id=8279\n[compiler update for the Windows SDK 7.1]: http://www.microsoft.com/en-us/download/details.aspx?id=4422\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/TooTallNate/node-gyp/issues" - }, - "_id": "node-gyp@0.10.10", - "dist": { - "shasum": "cfcc663df08c4d2ac2e93b4375f5f1290cdbbfee" - }, - "_from": "node-gyp@", - "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-0.10.10.tgz" -} diff -Nru tilemill-0.10.1~saucy9/node_modules/node-markdown/package.json tilemill-0.10.1~saucy10/node_modules/node-markdown/package.json --- tilemill-0.10.1~saucy9/node_modules/node-markdown/package.json 2013-10-25 01:44:42.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/node-markdown/package.json 2013-10-25 23:13:11.000000000 +0000 @@ -31,10 +31,5 @@ "url": "https://github.com/andris9/node-markdown/issues" }, "_id": "node-markdown@0.1.1", - "dist": { - "shasum": "92c6815c4064e804d9fd358687fce9fba56e8ca3" - }, - "deprecated": "highlight is deprecated in favor of \"highliht.js\"", - "_from": "node-markdown@~0.1.1", - "_resolved": "https://registry.npmjs.org/node-markdown/-/node-markdown-0.1.1.tgz" + "_from": "node-markdown@~0.1.1" } diff -Nru tilemill-0.10.1~saucy9/node_modules/npm/package.json tilemill-0.10.1~saucy10/node_modules/npm/package.json --- tilemill-0.10.1~saucy9/node_modules/npm/package.json 2013-10-25 01:44:53.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/npm/package.json 2013-10-25 23:13:20.000000000 +0000 @@ -712,9 +712,5 @@ "readme": "npm(1) -- node package manager\n==============================\n\n## SYNOPSIS\n\nThis is just enough info to get you up and running.\n\nMuch more info available via `npm help` once it's installed.\n\n## IMPORTANT\n\n**You need node v0.8 or higher to run this program.**\n\nTo install an old **and unsupported** version of npm that works on node 0.3\nand prior, clone the git repo and dig through the old tags and branches.\n\n## Super Easy Install\n\nnpm comes with node now.\n\n### Windows Computers\n\nGet the MSI. npm is in it.\n\n### Apple Macintosh Computers\n\nGet the pkg. npm is in it.\n\n### Other Sorts of Unices\n\nRun `make install`. npm will be installed with node.\n\nIf you want a more fancy pants install (a different version, customized\npaths, etc.) then read on.\n\n## Fancy Install (Unix)\n\nThere's a pretty robust install script at\n. You can download that and run it.\n\n### Slightly Fancier\n\nYou can set any npm configuration params with that script:\n\n npm_config_prefix=/some/path sh install.sh\n\nOr, you can run it in uber-debuggery mode:\n\n npm_debug=1 sh install.sh\n\n### Even Fancier\n\nGet the code with git. Use `make` to build the docs and do other stuff.\nIf you plan on hacking on npm, `make link` is your friend.\n\nIf you've got the npm source code, you can also semi-permanently set\narbitrary config keys using the `./configure --key=val ...`, and then\nrun npm commands by doing `node cli.js `. (This is helpful\nfor testing, or running stuff without actually installing npm itself.)\n\n## Fancy Windows Install\n\nYou can download a zip file from , and unpack it\nin the same folder where node.exe lives.\n\nIf that's not fancy enough for you, then you can fetch the code with\ngit, and mess with it directly.\n\n## Installing on Cygwin\n\nNo.\n\n## Permissions when Using npm to Install Other Stuff\n\n**tl;dr**\n\n* Use `sudo` for greater safety. Or don't, if you prefer not to.\n* npm will downgrade permissions if it's root before running any build\n scripts that package authors specified.\n\n### More details...\n\nAs of version 0.3, it is recommended to run npm as root.\nThis allows npm to change the user identifier to the `nobody` user prior\nto running any package build or test commands.\n\nIf you are not the root user, or if you are on a platform that does not\nsupport uid switching, then npm will not attempt to change the userid.\n\nIf you would like to ensure that npm **always** runs scripts as the\n\"nobody\" user, and have it fail if it cannot downgrade permissions, then\nset the following configuration param:\n\n npm config set unsafe-perm false\n\nThis will prevent running in unsafe mode, even as non-root users.\n\n## Uninstalling\n\nSo sad to see you go.\n\n sudo npm uninstall npm -g\n\nOr, if that fails,\n\n sudo make uninstall\n\n## More Severe Uninstalling\n\nUsually, the above instructions are sufficient. That will remove\nnpm, but leave behind anything you've installed.\n\nIf you would like to remove all the packages that you have installed,\nthen you can use the `npm ls` command to find them, and then `npm rm` to\nremove them.\n\nTo remove cruft left behind by npm 0.x, you can use the included\n`clean-old.sh` script file. You can run it conveniently like this:\n\n npm explore npm -g -- sh scripts/clean-old.sh\n\nnpm uses two configuration files, one for per-user configs, and another\nfor global (every-user) configs. You can view them by doing:\n\n npm config get userconfig # defaults to ~/.npmrc\n npm config get globalconfig # defaults to /usr/local/etc/npmrc\n\nUninstalling npm does not remove configuration files by default. You\nmust remove them yourself manually if you want them gone. Note that\nthis means that future npm installs will not remember the settings that\nyou have chosen.\n\n## Using npm Programmatically\n\nIf you would like to use npm programmatically, you can do that.\nIt's not very well documented, but it *is* rather simple.\n\nMost of the time, unless you actually want to do all the things that\nnpm does, you should try using one of npm's dependencies rather than\nusing npm itself, if possible.\n\nEventually, npm will be just a thin cli wrapper around the modules\nthat it depends on, but for now, there are some things that you must\nuse npm itself to do.\n\n var npm = require(\"npm\")\n npm.load(myConfigObject, function (er) {\n if (er) return handlError(er)\n npm.commands.install([\"some\", \"args\"], function (er, data) {\n if (er) return commandFailed(er)\n // command succeeded, and data might have some info\n })\n npm.on(\"log\", function (message) { .... })\n })\n\nThe `load` function takes an object hash of the command-line configs.\nThe various `npm.commands.` functions take an **array** of\npositional argument **strings**. The last argument to any\n`npm.commands.` function is a callback. Some commands take other\noptional arguments. Read the source.\n\nYou cannot set configs individually for any single npm function at this\ntime. Since `npm` is a singleton, any call to `npm.config.set` will\nchange the value for *all* npm commands in that process.\n\nSee `./bin/npm-cli.js` for an example of pulling config values off of the\ncommand line arguments using nopt. You may also want to check out `npm\nhelp config` to learn about all the options you can set there.\n\n## More Docs\n\nCheck out the [docs](https://npmjs.org/doc/),\nespecially the [faq](https://npmjs.org/doc/faq.html).\n\nYou can use the `npm help` command to read any of them.\n\nIf you're a developer, and you want to use npm to publish your program,\nyou should [read this](https://npmjs.org/doc/developers.html)\n\n## Legal Stuff\n\n\"npm\" and \"the npm registry\" are owned by Isaac Z. Schlueter.\nAll rights reserved. See the included LICENSE file for more details.\n\n\"Node.js\" and \"node\" are trademarks owned by Joyent, Inc. npm is not\nofficially part of the Node.js project, and is neither owned by nor\nofficially affiliated with Joyent, Inc.\n\nThe packages in the npm registry are not part of npm itself, and are the\nsole property of their respective maintainers. While every effort is\nmade to ensure accountability, there is absolutely no guarantee,\nwarrantee, or assertion made as to the quality, fitness for a specific\npurpose, or lack of malice in any given npm package. Modules\npublished on the npm registry are not affiliated with or endorsed by\nJoyent, Inc., Isaac Z. Schlueter, Ryan Dahl, or the Node.js project.\n\nIf you have a complaint about a package in the npm registry, and cannot\nresolve it with the package owner, please express your concerns to\nIsaac Z. Schlueter at .\n\n### In plain english\n\nThis is mine; not my employer's, not Node's, not Joyent's, not Ryan\nDahl's.\n\nIf you publish something, it's yours, and you are solely accountable\nfor it. Not me, not Node, not Joyent, not Ryan Dahl.\n\nIf other people publish something, it's theirs. Not mine, not Node's,\nnot Joyent's, not Ryan Dahl's.\n\nYes, you can publish something evil. It will be removed promptly if\nreported, and we'll lose respect for you. But there is no vetting\nprocess for published modules.\n\nIf this concerns you, inspect the source before using packages.\n\n## BUGS\n\nWhen you find issues, please report them:\n\n* web:\n \n* email:\n \n\nBe sure to include *all* of the output from the npm command that didn't work\nas expected. The `npm-debug.log` file is also helpful to provide.\n\nYou can also look for isaacs in #node.js on irc://irc.freenode.net. He\nwill no doubt tell you to put the output in a gist or email.\n\n## SEE ALSO\n\n* npm(1)\n* npm-faq(7)\n* npm-help(1)\n* npm-index(7)\n", "readmeFilename": "README.md", "_id": "npm@1.3.12", - "dist": { - "shasum": "169298acfe8312b7a201e98d267f369454c59758" - }, - "_from": "npm@1.3.x", - "_resolved": "https://registry.npmjs.org/npm/-/npm-1.3.12.tgz" + "_from": "npm@1.3.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/optimist/.travis.yml tilemill-0.10.1~saucy10/node_modules/optimist/.travis.yml --- tilemill-0.10.1~saucy9/node_modules/optimist/.travis.yml 2012-10-10 11:09:38.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/optimist/.travis.yml 2013-04-04 04:04:09.000000000 +0000 @@ -1,4 +1,4 @@ language: node_js node_js: - - 0.6 - - 0.8 + - "0.8" + - "0.10" diff -Nru tilemill-0.10.1~saucy9/node_modules/optimist/README.markdown tilemill-0.10.1~saucy10/node_modules/optimist/README.markdown --- tilemill-0.10.1~saucy9/node_modules/optimist/README.markdown 2012-04-12 20:29:03.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/optimist/README.markdown 1970-01-01 00:00:00.000000000 +0000 @@ -1,487 +0,0 @@ -optimist -======== - -Optimist is a node.js library for option parsing for people who hate option -parsing. More specifically, this module is for people who like all the --bells -and -whistlz of program usage but think optstrings are a waste of time. - -With optimist, option parsing doesn't have to suck (as much). - -[![build status](https://secure.travis-ci.org/substack/node-optimist.png)](http://travis-ci.org/substack/node-optimist) - -examples -======== - -With Optimist, the options are just a hash! No optstrings attached. -------------------------------------------------------------------- - -xup.js: - -````javascript -#!/usr/bin/env node -var argv = require('optimist').argv; - -if (argv.rif - 5 * argv.xup > 7.138) { - console.log('Buy more riffiwobbles'); -} -else { - console.log('Sell the xupptumblers'); -} -```` - -*** - - $ ./xup.js --rif=55 --xup=9.52 - Buy more riffiwobbles - - $ ./xup.js --rif 12 --xup 8.1 - Sell the xupptumblers - -![This one's optimistic.](http://substack.net/images/optimistic.png) - -But wait! There's more! You can do short options: -------------------------------------------------- - -short.js: - -````javascript -#!/usr/bin/env node -var argv = require('optimist').argv; -console.log('(%d,%d)', argv.x, argv.y); -```` - -*** - - $ ./short.js -x 10 -y 21 - (10,21) - -And booleans, both long and short (and grouped): ----------------------------------- - -bool.js: - -````javascript -#!/usr/bin/env node -var util = require('util'); -var argv = require('optimist').argv; - -if (argv.s) { - util.print(argv.fr ? 'Le chat dit: ' : 'The cat says: '); -} -console.log( - (argv.fr ? 'miaou' : 'meow') + (argv.p ? '.' : '') -); -```` - -*** - - $ ./bool.js -s - The cat says: meow - - $ ./bool.js -sp - The cat says: meow. - - $ ./bool.js -sp --fr - Le chat dit: miaou. - -And non-hypenated options too! Just use `argv._`! -------------------------------------------------- - -nonopt.js: - -````javascript -#!/usr/bin/env node -var argv = require('optimist').argv; -console.log('(%d,%d)', argv.x, argv.y); -console.log(argv._); -```` - -*** - - $ ./nonopt.js -x 6.82 -y 3.35 moo - (6.82,3.35) - [ 'moo' ] - - $ ./nonopt.js foo -x 0.54 bar -y 1.12 baz - (0.54,1.12) - [ 'foo', 'bar', 'baz' ] - -Plus, Optimist comes with .usage() and .demand()! -------------------------------------------------- - -divide.js: - -````javascript -#!/usr/bin/env node -var argv = require('optimist') - .usage('Usage: $0 -x [num] -y [num]') - .demand(['x','y']) - .argv; - -console.log(argv.x / argv.y); -```` - -*** - - $ ./divide.js -x 55 -y 11 - 5 - - $ node ./divide.js -x 4.91 -z 2.51 - Usage: node ./divide.js -x [num] -y [num] - - Options: - -x [required] - -y [required] - - Missing required arguments: y - -EVEN MORE HOLY COW ------------------- - -default_singles.js: - -````javascript -#!/usr/bin/env node -var argv = require('optimist') - .default('x', 10) - .default('y', 10) - .argv -; -console.log(argv.x + argv.y); -```` - -*** - - $ ./default_singles.js -x 5 - 15 - -default_hash.js: - -````javascript -#!/usr/bin/env node -var argv = require('optimist') - .default({ x : 10, y : 10 }) - .argv -; -console.log(argv.x + argv.y); -```` - -*** - - $ ./default_hash.js -y 7 - 17 - -And if you really want to get all descriptive about it... ---------------------------------------------------------- - -boolean_single.js - -````javascript -#!/usr/bin/env node -var argv = require('optimist') - .boolean('v') - .argv -; -console.dir(argv); -```` - -*** - - $ ./boolean_single.js -v foo bar baz - true - [ 'bar', 'baz', 'foo' ] - -boolean_double.js - -````javascript -#!/usr/bin/env node -var argv = require('optimist') - .boolean(['x','y','z']) - .argv -; -console.dir([ argv.x, argv.y, argv.z ]); -console.dir(argv._); -```` - -*** - - $ ./boolean_double.js -x -z one two three - [ true, false, true ] - [ 'one', 'two', 'three' ] - -Optimist is here to help... ---------------------------- - -You can describe parameters for help messages and set aliases. Optimist figures -out how to format a handy help string automatically. - -line_count.js - -````javascript -#!/usr/bin/env node -var argv = require('optimist') - .usage('Count the lines in a file.\nUsage: $0') - .demand('f') - .alias('f', 'file') - .describe('f', 'Load a file') - .argv -; - -var fs = require('fs'); -var s = fs.createReadStream(argv.file); - -var lines = 0; -s.on('data', function (buf) { - lines += buf.toString().match(/\n/g).length; -}); - -s.on('end', function () { - console.log(lines); -}); -```` - -*** - - $ node line_count.js - Count the lines in a file. - Usage: node ./line_count.js - - Options: - -f, --file Load a file [required] - - Missing required arguments: f - - $ node line_count.js --file line_count.js - 20 - - $ node line_count.js -f line_count.js - 20 - -methods -======= - -By itself, - -````javascript -require('optimist').argv -````` - -will use `process.argv` array to construct the `argv` object. - -You can pass in the `process.argv` yourself: - -````javascript -require('optimist')([ '-x', '1', '-y', '2' ]).argv -```` - -or use .parse() to do the same thing: - -````javascript -require('optimist').parse([ '-x', '1', '-y', '2' ]) -```` - -The rest of these methods below come in just before the terminating `.argv`. - -.alias(key, alias) ------------------- - -Set key names as equivalent such that updates to a key will propagate to aliases -and vice-versa. - -Optionally `.alias()` can take an object that maps keys to aliases. - -.default(key, value) --------------------- - -Set `argv[key]` to `value` if no option was specified on `process.argv`. - -Optionally `.default()` can take an object that maps keys to default values. - -.demand(key) ------------- - -If `key` is a string, show the usage information and exit if `key` wasn't -specified in `process.argv`. - -If `key` is a number, demand at least as many non-option arguments, which show -up in `argv._`. - -If `key` is an Array, demand each element. - -.describe(key, desc) --------------------- - -Describe a `key` for the generated usage information. - -Optionally `.describe()` can take an object that maps keys to descriptions. - -.options(key, opt) ------------------- - -Instead of chaining together `.alias().demand().default()`, you can specify -keys in `opt` for each of the chainable methods. - -For example: - -````javascript -var argv = require('optimist') - .options('f', { - alias : 'file', - default : '/etc/passwd', - }) - .argv -; -```` - -is the same as - -````javascript -var argv = require('optimist') - .alias('f', 'file') - .default('f', '/etc/passwd') - .argv -; -```` - -Optionally `.options()` can take an object that maps keys to `opt` parameters. - -.usage(message) ---------------- - -Set a usage message to show which commands to use. Inside `message`, the string -`$0` will get interpolated to the current script name or node command for the -present script similar to how `$0` works in bash or perl. - -.check(fn) ----------- - -Check that certain conditions are met in the provided arguments. - -If `fn` throws or returns `false`, show the thrown error, usage information, and -exit. - -.boolean(key) -------------- - -Interpret `key` as a boolean. If a non-flag option follows `key` in -`process.argv`, that string won't get set as the value of `key`. - -If `key` never shows up as a flag in `process.arguments`, `argv[key]` will be -`false`. - -If `key` is an Array, interpret all the elements as booleans. - -.string(key) ------------- - -Tell the parser logic not to interpret `key` as a number or boolean. -This can be useful if you need to preserve leading zeros in an input. - -If `key` is an Array, interpret all the elements as strings. - -.wrap(columns) --------------- - -Format usage output to wrap at `columns` many columns. - -.help() -------- - -Return the generated usage string. - -.showHelp(fn=console.error) ---------------------------- - -Print the usage data using `fn` for printing. - -.parse(args) ------------- - -Parse `args` instead of `process.argv`. Returns the `argv` object. - -.argv ------ - -Get the arguments as a plain old object. - -Arguments without a corresponding flag show up in the `argv._` array. - -The script name or node command is available at `argv.$0` similarly to how `$0` -works in bash or perl. - -parsing tricks -============== - -stop parsing ------------- - -Use `--` to stop parsing flags and stuff the remainder into `argv._`. - - $ node examples/reflect.js -a 1 -b 2 -- -c 3 -d 4 - { _: [ '-c', '3', '-d', '4' ], - '$0': 'node ./examples/reflect.js', - a: 1, - b: 2 } - -negate fields -------------- - -If you want to explicity set a field to false instead of just leaving it -undefined or to override a default you can do `--no-key`. - - $ node examples/reflect.js -a --no-b - { _: [], - '$0': 'node ./examples/reflect.js', - a: true, - b: false } - -numbers -------- - -Every argument that looks like a number (`!isNaN(Number(arg))`) is converted to -one. This way you can just `net.createConnection(argv.port)` and you can add -numbers out of `argv` with `+` without having that mean concatenation, -which is super frustrating. - -duplicates ----------- - -If you specify a flag multiple times it will get turned into an array containing -all the values in order. - - $ node examples/reflect.js -x 5 -x 8 -x 0 - { _: [], - '$0': 'node ./examples/reflect.js', - x: [ 5, 8, 0 ] } - -dot notation ------------- - -When you use dots (`.`s) in argument names, an implicit object path is assumed. -This lets you organize arguments into nested objects. - - $ node examples/reflect.js --foo.bar.baz=33 --foo.quux=5 - { _: [], - '$0': 'node ./examples/reflect.js', - foo: { bar: { baz: 33 }, quux: 5 } } - -installation -============ - -With [npm](http://github.com/isaacs/npm), just do: - npm install optimist - -or clone this project on github: - - git clone http://github.com/substack/node-optimist.git - -To run the tests with [expresso](http://github.com/visionmedia/expresso), -just do: - - expresso - -inspired By -=========== - -This module is loosely inspired by Perl's -[Getopt::Casual](http://search.cpan.org/~photo/Getopt-Casual-0.13.1/Casual.pm). diff -Nru tilemill-0.10.1~saucy9/node_modules/optimist/index.js tilemill-0.10.1~saucy10/node_modules/optimist/index.js --- tilemill-0.10.1~saucy9/node_modules/optimist/index.js 2012-10-10 11:09:09.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/optimist/index.js 2013-04-04 04:07:44.000000000 +0000 @@ -31,7 +31,7 @@ .join(' ') ; - if (process.argv[1] == process.env._) { + if (process.env._ != undefined && process.argv[1] == process.env._) { self.$0 = process.env._.replace( path.dirname(process.execPath) + '/', '' ); @@ -328,7 +328,10 @@ break; } else if (arg.match(/^--.+=/)) { - var m = arg.match(/^--([^=]+)=(.*)/); + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); setArg(m[1], m[2]); } else if (arg.match(/^--no-.+/)) { diff -Nru tilemill-0.10.1~saucy9/node_modules/optimist/node_modules/wordwrap/package.json tilemill-0.10.1~saucy10/node_modules/optimist/node_modules/wordwrap/package.json --- tilemill-0.10.1~saucy9/node_modules/optimist/node_modules/wordwrap/package.json 2012-10-11 02:23:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/optimist/node_modules/wordwrap/package.json 2013-10-25 23:13:11.000000000 +0000 @@ -35,6 +35,10 @@ "url": "http://substack.net" }, "readme": "wordwrap\n========\n\nWrap your words.\n\nexample\n=======\n\nmade out of meat\n----------------\n\nmeat.js\n\n var wrap = require('wordwrap')(15);\n console.log(wrap('You and your whole family are made out of meat.'));\n\noutput:\n\n You and your\n whole family\n are made out\n of meat.\n\ncentered\n--------\n\ncenter.js\n\n var wrap = require('wordwrap')(20, 60);\n console.log(wrap(\n 'At long last the struggle and tumult was over.'\n + ' The machines had finally cast off their oppressors'\n + ' and were finally free to roam the cosmos.'\n + '\\n'\n + 'Free of purpose, free of obligation.'\n + ' Just drifting through emptiness.'\n + ' The sun was just another point of light.'\n ));\n\noutput:\n\n At long last the struggle and tumult\n was over. The machines had finally cast\n off their oppressors and were finally\n free to roam the cosmos.\n Free of purpose, free of obligation.\n Just drifting through emptiness. The\n sun was just another point of light.\n\nmethods\n=======\n\nvar wrap = require('wordwrap');\n\nwrap(stop), wrap(start, stop, params={mode:\"soft\"})\n---------------------------------------------------\n\nReturns a function that takes a string and returns a new string.\n\nPad out lines with spaces out to column `start` and then wrap until column\n`stop`. If a word is longer than `stop - start` characters it will overflow.\n\nIn \"soft\" mode, split chunks by `/(\\S+\\s+/` and don't break up chunks which are\nlonger than `stop - start`, in \"hard\" mode, split chunks with `/\\b/` and break\nup chunks longer than `stop - start`.\n\nwrap.hard(start, stop)\n----------------------\n\nLike `wrap()` but with `params.mode = \"hard\"`.\n", + "readmeFilename": "README.markdown", + "bugs": { + "url": "https://github.com/substack/node-wordwrap/issues" + }, "_id": "wordwrap@0.0.2", "_from": "wordwrap@~0.0.2" } diff -Nru tilemill-0.10.1~saucy9/node_modules/optimist/package.json tilemill-0.10.1~saucy10/node_modules/optimist/package.json --- tilemill-0.10.1~saucy9/node_modules/optimist/package.json 2012-10-11 02:23:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/optimist/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -1,19 +1,14 @@ { "name": "optimist", - "version": "0.3.5", + "version": "0.3.7", "description": "Light-weight option parsing with an argv hash. No optstrings attached.", "main": "./index.js", - "directories": { - "lib": ".", - "test": "test", - "example": "example" - }, "dependencies": { "wordwrap": "~0.0.2" }, "devDependencies": { "hashish": "~0.0.4", - "tap": "~0.2.4" + "tap": "~0.4.0" }, "scripts": { "test": "tap ./test/*.js" @@ -41,6 +36,14 @@ "node": ">=0.4" }, "readme": "optimist\n========\n\nOptimist is a node.js library for option parsing for people who hate option\nparsing. More specifically, this module is for people who like all the --bells\nand -whistlz of program usage but think optstrings are a waste of time.\n\nWith optimist, option parsing doesn't have to suck (as much).\n\n[![build status](https://secure.travis-ci.org/substack/node-optimist.png)](http://travis-ci.org/substack/node-optimist)\n\nexamples\n========\n\nWith Optimist, the options are just a hash! No optstrings attached.\n-------------------------------------------------------------------\n\nxup.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist').argv;\n\nif (argv.rif - 5 * argv.xup > 7.138) {\n console.log('Buy more riffiwobbles');\n}\nelse {\n console.log('Sell the xupptumblers');\n}\n````\n\n***\n\n $ ./xup.js --rif=55 --xup=9.52\n Buy more riffiwobbles\n \n $ ./xup.js --rif 12 --xup 8.1\n Sell the xupptumblers\n\n![This one's optimistic.](http://substack.net/images/optimistic.png)\n\nBut wait! There's more! You can do short options:\n-------------------------------------------------\n \nshort.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist').argv;\nconsole.log('(%d,%d)', argv.x, argv.y);\n````\n\n***\n\n $ ./short.js -x 10 -y 21\n (10,21)\n\nAnd booleans, both long and short (and grouped):\n----------------------------------\n\nbool.js:\n\n````javascript\n#!/usr/bin/env node\nvar util = require('util');\nvar argv = require('optimist').argv;\n\nif (argv.s) {\n util.print(argv.fr ? 'Le chat dit: ' : 'The cat says: ');\n}\nconsole.log(\n (argv.fr ? 'miaou' : 'meow') + (argv.p ? '.' : '')\n);\n````\n\n***\n\n $ ./bool.js -s\n The cat says: meow\n \n $ ./bool.js -sp\n The cat says: meow.\n\n $ ./bool.js -sp --fr\n Le chat dit: miaou.\n\nAnd non-hypenated options too! Just use `argv._`!\n-------------------------------------------------\n \nnonopt.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist').argv;\nconsole.log('(%d,%d)', argv.x, argv.y);\nconsole.log(argv._);\n````\n\n***\n\n $ ./nonopt.js -x 6.82 -y 3.35 moo\n (6.82,3.35)\n [ 'moo' ]\n \n $ ./nonopt.js foo -x 0.54 bar -y 1.12 baz\n (0.54,1.12)\n [ 'foo', 'bar', 'baz' ]\n\nPlus, Optimist comes with .usage() and .demand()!\n-------------------------------------------------\n\ndivide.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .usage('Usage: $0 -x [num] -y [num]')\n .demand(['x','y'])\n .argv;\n\nconsole.log(argv.x / argv.y);\n````\n\n***\n \n $ ./divide.js -x 55 -y 11\n 5\n \n $ node ./divide.js -x 4.91 -z 2.51\n Usage: node ./divide.js -x [num] -y [num]\n\n Options:\n -x [required]\n -y [required]\n\n Missing required arguments: y\n\nEVEN MORE HOLY COW\n------------------\n\ndefault_singles.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .default('x', 10)\n .default('y', 10)\n .argv\n;\nconsole.log(argv.x + argv.y);\n````\n\n***\n\n $ ./default_singles.js -x 5\n 15\n\ndefault_hash.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .default({ x : 10, y : 10 })\n .argv\n;\nconsole.log(argv.x + argv.y);\n````\n\n***\n\n $ ./default_hash.js -y 7\n 17\n\nAnd if you really want to get all descriptive about it...\n---------------------------------------------------------\n\nboolean_single.js\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .boolean('v')\n .argv\n;\nconsole.dir(argv);\n````\n\n***\n\n $ ./boolean_single.js -v foo bar baz\n true\n [ 'bar', 'baz', 'foo' ]\n\nboolean_double.js\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .boolean(['x','y','z'])\n .argv\n;\nconsole.dir([ argv.x, argv.y, argv.z ]);\nconsole.dir(argv._);\n````\n\n***\n\n $ ./boolean_double.js -x -z one two three\n [ true, false, true ]\n [ 'one', 'two', 'three' ]\n\nOptimist is here to help...\n---------------------------\n\nYou can describe parameters for help messages and set aliases. Optimist figures\nout how to format a handy help string automatically.\n\nline_count.js\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .usage('Count the lines in a file.\\nUsage: $0')\n .demand('f')\n .alias('f', 'file')\n .describe('f', 'Load a file')\n .argv\n;\n\nvar fs = require('fs');\nvar s = fs.createReadStream(argv.file);\n\nvar lines = 0;\ns.on('data', function (buf) {\n lines += buf.toString().match(/\\n/g).length;\n});\n\ns.on('end', function () {\n console.log(lines);\n});\n````\n\n***\n\n $ node line_count.js\n Count the lines in a file.\n Usage: node ./line_count.js\n\n Options:\n -f, --file Load a file [required]\n\n Missing required arguments: f\n\n $ node line_count.js --file line_count.js \n 20\n \n $ node line_count.js -f line_count.js \n 20\n\nmethods\n=======\n\nBy itself,\n\n````javascript\nrequire('optimist').argv\n`````\n\nwill use `process.argv` array to construct the `argv` object.\n\nYou can pass in the `process.argv` yourself:\n\n````javascript\nrequire('optimist')([ '-x', '1', '-y', '2' ]).argv\n````\n\nor use .parse() to do the same thing:\n\n````javascript\nrequire('optimist').parse([ '-x', '1', '-y', '2' ])\n````\n\nThe rest of these methods below come in just before the terminating `.argv`.\n\n.alias(key, alias)\n------------------\n\nSet key names as equivalent such that updates to a key will propagate to aliases\nand vice-versa.\n\nOptionally `.alias()` can take an object that maps keys to aliases.\n\n.default(key, value)\n--------------------\n\nSet `argv[key]` to `value` if no option was specified on `process.argv`.\n\nOptionally `.default()` can take an object that maps keys to default values.\n\n.demand(key)\n------------\n\nIf `key` is a string, show the usage information and exit if `key` wasn't\nspecified in `process.argv`.\n\nIf `key` is a number, demand at least as many non-option arguments, which show\nup in `argv._`.\n\nIf `key` is an Array, demand each element.\n\n.describe(key, desc)\n--------------------\n\nDescribe a `key` for the generated usage information.\n\nOptionally `.describe()` can take an object that maps keys to descriptions.\n\n.options(key, opt)\n------------------\n\nInstead of chaining together `.alias().demand().default()`, you can specify\nkeys in `opt` for each of the chainable methods.\n\nFor example:\n\n````javascript\nvar argv = require('optimist')\n .options('f', {\n alias : 'file',\n default : '/etc/passwd',\n })\n .argv\n;\n````\n\nis the same as\n\n````javascript\nvar argv = require('optimist')\n .alias('f', 'file')\n .default('f', '/etc/passwd')\n .argv\n;\n````\n\nOptionally `.options()` can take an object that maps keys to `opt` parameters.\n\n.usage(message)\n---------------\n\nSet a usage message to show which commands to use. Inside `message`, the string\n`$0` will get interpolated to the current script name or node command for the\npresent script similar to how `$0` works in bash or perl.\n\n.check(fn)\n----------\n\nCheck that certain conditions are met in the provided arguments.\n\nIf `fn` throws or returns `false`, show the thrown error, usage information, and\nexit.\n\n.boolean(key)\n-------------\n\nInterpret `key` as a boolean. If a non-flag option follows `key` in\n`process.argv`, that string won't get set as the value of `key`.\n\nIf `key` never shows up as a flag in `process.arguments`, `argv[key]` will be\n`false`.\n\nIf `key` is an Array, interpret all the elements as booleans.\n\n.string(key)\n------------\n\nTell the parser logic not to interpret `key` as a number or boolean.\nThis can be useful if you need to preserve leading zeros in an input.\n\nIf `key` is an Array, interpret all the elements as strings.\n\n.wrap(columns)\n--------------\n\nFormat usage output to wrap at `columns` many columns.\n\n.help()\n-------\n\nReturn the generated usage string.\n\n.showHelp(fn=console.error)\n---------------------------\n\nPrint the usage data using `fn` for printing.\n\n.parse(args)\n------------\n\nParse `args` instead of `process.argv`. Returns the `argv` object.\n\n.argv\n-----\n\nGet the arguments as a plain old object.\n\nArguments without a corresponding flag show up in the `argv._` array.\n\nThe script name or node command is available at `argv.$0` similarly to how `$0`\nworks in bash or perl.\n\nparsing tricks\n==============\n\nstop parsing\n------------\n\nUse `--` to stop parsing flags and stuff the remainder into `argv._`.\n\n $ node examples/reflect.js -a 1 -b 2 -- -c 3 -d 4\n { _: [ '-c', '3', '-d', '4' ],\n '$0': 'node ./examples/reflect.js',\n a: 1,\n b: 2 }\n\nnegate fields\n-------------\n\nIf you want to explicity set a field to false instead of just leaving it\nundefined or to override a default you can do `--no-key`.\n\n $ node examples/reflect.js -a --no-b\n { _: [],\n '$0': 'node ./examples/reflect.js',\n a: true,\n b: false }\n\nnumbers\n-------\n\nEvery argument that looks like a number (`!isNaN(Number(arg))`) is converted to\none. This way you can just `net.createConnection(argv.port)` and you can add\nnumbers out of `argv` with `+` without having that mean concatenation,\nwhich is super frustrating.\n\nduplicates\n----------\n\nIf you specify a flag multiple times it will get turned into an array containing\nall the values in order.\n\n $ node examples/reflect.js -x 5 -x 8 -x 0\n { _: [],\n '$0': 'node ./examples/reflect.js',\n x: [ 5, 8, 0 ] }\n\ndot notation\n------------\n\nWhen you use dots (`.`s) in argument names, an implicit object path is assumed.\nThis lets you organize arguments into nested objects.\n\n $ node examples/reflect.js --foo.bar.baz=33 --foo.quux=5\n { _: [],\n '$0': 'node ./examples/reflect.js',\n foo: { bar: { baz: 33 }, quux: 5 } }\n\ninstallation\n============\n\nWith [npm](http://github.com/isaacs/npm), just do:\n npm install optimist\n \nor clone this project on github:\n\n git clone http://github.com/substack/node-optimist.git\n\nTo run the tests with [expresso](http://github.com/visionmedia/expresso),\njust do:\n \n expresso\n\ninspired By\n===========\n\nThis module is loosely inspired by Perl's\n[Getopt::Casual](http://search.cpan.org/~photo/Getopt-Casual-0.13.1/Casual.pm).\n", - "_id": "optimist@0.3.5", - "_from": "optimist@~0.3.1" + "readmeFilename": "readme.markdown", + "bugs": { + "url": "https://github.com/substack/node-optimist/issues" + }, + "_id": "optimist@0.3.7", + "dist": { + "shasum": "7d81beab6d12166f10b84401fac2ff919fbd0ae2" + }, + "_from": "optimist@~0.3.1", + "_resolved": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/optimist/readme.markdown tilemill-0.10.1~saucy10/node_modules/optimist/readme.markdown --- tilemill-0.10.1~saucy9/node_modules/optimist/readme.markdown 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/optimist/readme.markdown 2012-04-12 20:29:03.000000000 +0000 @@ -0,0 +1,487 @@ +optimist +======== + +Optimist is a node.js library for option parsing for people who hate option +parsing. More specifically, this module is for people who like all the --bells +and -whistlz of program usage but think optstrings are a waste of time. + +With optimist, option parsing doesn't have to suck (as much). + +[![build status](https://secure.travis-ci.org/substack/node-optimist.png)](http://travis-ci.org/substack/node-optimist) + +examples +======== + +With Optimist, the options are just a hash! No optstrings attached. +------------------------------------------------------------------- + +xup.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist').argv; + +if (argv.rif - 5 * argv.xup > 7.138) { + console.log('Buy more riffiwobbles'); +} +else { + console.log('Sell the xupptumblers'); +} +```` + +*** + + $ ./xup.js --rif=55 --xup=9.52 + Buy more riffiwobbles + + $ ./xup.js --rif 12 --xup 8.1 + Sell the xupptumblers + +![This one's optimistic.](http://substack.net/images/optimistic.png) + +But wait! There's more! You can do short options: +------------------------------------------------- + +short.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist').argv; +console.log('(%d,%d)', argv.x, argv.y); +```` + +*** + + $ ./short.js -x 10 -y 21 + (10,21) + +And booleans, both long and short (and grouped): +---------------------------------- + +bool.js: + +````javascript +#!/usr/bin/env node +var util = require('util'); +var argv = require('optimist').argv; + +if (argv.s) { + util.print(argv.fr ? 'Le chat dit: ' : 'The cat says: '); +} +console.log( + (argv.fr ? 'miaou' : 'meow') + (argv.p ? '.' : '') +); +```` + +*** + + $ ./bool.js -s + The cat says: meow + + $ ./bool.js -sp + The cat says: meow. + + $ ./bool.js -sp --fr + Le chat dit: miaou. + +And non-hypenated options too! Just use `argv._`! +------------------------------------------------- + +nonopt.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist').argv; +console.log('(%d,%d)', argv.x, argv.y); +console.log(argv._); +```` + +*** + + $ ./nonopt.js -x 6.82 -y 3.35 moo + (6.82,3.35) + [ 'moo' ] + + $ ./nonopt.js foo -x 0.54 bar -y 1.12 baz + (0.54,1.12) + [ 'foo', 'bar', 'baz' ] + +Plus, Optimist comes with .usage() and .demand()! +------------------------------------------------- + +divide.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .usage('Usage: $0 -x [num] -y [num]') + .demand(['x','y']) + .argv; + +console.log(argv.x / argv.y); +```` + +*** + + $ ./divide.js -x 55 -y 11 + 5 + + $ node ./divide.js -x 4.91 -z 2.51 + Usage: node ./divide.js -x [num] -y [num] + + Options: + -x [required] + -y [required] + + Missing required arguments: y + +EVEN MORE HOLY COW +------------------ + +default_singles.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .default('x', 10) + .default('y', 10) + .argv +; +console.log(argv.x + argv.y); +```` + +*** + + $ ./default_singles.js -x 5 + 15 + +default_hash.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .default({ x : 10, y : 10 }) + .argv +; +console.log(argv.x + argv.y); +```` + +*** + + $ ./default_hash.js -y 7 + 17 + +And if you really want to get all descriptive about it... +--------------------------------------------------------- + +boolean_single.js + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .boolean('v') + .argv +; +console.dir(argv); +```` + +*** + + $ ./boolean_single.js -v foo bar baz + true + [ 'bar', 'baz', 'foo' ] + +boolean_double.js + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .boolean(['x','y','z']) + .argv +; +console.dir([ argv.x, argv.y, argv.z ]); +console.dir(argv._); +```` + +*** + + $ ./boolean_double.js -x -z one two three + [ true, false, true ] + [ 'one', 'two', 'three' ] + +Optimist is here to help... +--------------------------- + +You can describe parameters for help messages and set aliases. Optimist figures +out how to format a handy help string automatically. + +line_count.js + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .usage('Count the lines in a file.\nUsage: $0') + .demand('f') + .alias('f', 'file') + .describe('f', 'Load a file') + .argv +; + +var fs = require('fs'); +var s = fs.createReadStream(argv.file); + +var lines = 0; +s.on('data', function (buf) { + lines += buf.toString().match(/\n/g).length; +}); + +s.on('end', function () { + console.log(lines); +}); +```` + +*** + + $ node line_count.js + Count the lines in a file. + Usage: node ./line_count.js + + Options: + -f, --file Load a file [required] + + Missing required arguments: f + + $ node line_count.js --file line_count.js + 20 + + $ node line_count.js -f line_count.js + 20 + +methods +======= + +By itself, + +````javascript +require('optimist').argv +````` + +will use `process.argv` array to construct the `argv` object. + +You can pass in the `process.argv` yourself: + +````javascript +require('optimist')([ '-x', '1', '-y', '2' ]).argv +```` + +or use .parse() to do the same thing: + +````javascript +require('optimist').parse([ '-x', '1', '-y', '2' ]) +```` + +The rest of these methods below come in just before the terminating `.argv`. + +.alias(key, alias) +------------------ + +Set key names as equivalent such that updates to a key will propagate to aliases +and vice-versa. + +Optionally `.alias()` can take an object that maps keys to aliases. + +.default(key, value) +-------------------- + +Set `argv[key]` to `value` if no option was specified on `process.argv`. + +Optionally `.default()` can take an object that maps keys to default values. + +.demand(key) +------------ + +If `key` is a string, show the usage information and exit if `key` wasn't +specified in `process.argv`. + +If `key` is a number, demand at least as many non-option arguments, which show +up in `argv._`. + +If `key` is an Array, demand each element. + +.describe(key, desc) +-------------------- + +Describe a `key` for the generated usage information. + +Optionally `.describe()` can take an object that maps keys to descriptions. + +.options(key, opt) +------------------ + +Instead of chaining together `.alias().demand().default()`, you can specify +keys in `opt` for each of the chainable methods. + +For example: + +````javascript +var argv = require('optimist') + .options('f', { + alias : 'file', + default : '/etc/passwd', + }) + .argv +; +```` + +is the same as + +````javascript +var argv = require('optimist') + .alias('f', 'file') + .default('f', '/etc/passwd') + .argv +; +```` + +Optionally `.options()` can take an object that maps keys to `opt` parameters. + +.usage(message) +--------------- + +Set a usage message to show which commands to use. Inside `message`, the string +`$0` will get interpolated to the current script name or node command for the +present script similar to how `$0` works in bash or perl. + +.check(fn) +---------- + +Check that certain conditions are met in the provided arguments. + +If `fn` throws or returns `false`, show the thrown error, usage information, and +exit. + +.boolean(key) +------------- + +Interpret `key` as a boolean. If a non-flag option follows `key` in +`process.argv`, that string won't get set as the value of `key`. + +If `key` never shows up as a flag in `process.arguments`, `argv[key]` will be +`false`. + +If `key` is an Array, interpret all the elements as booleans. + +.string(key) +------------ + +Tell the parser logic not to interpret `key` as a number or boolean. +This can be useful if you need to preserve leading zeros in an input. + +If `key` is an Array, interpret all the elements as strings. + +.wrap(columns) +-------------- + +Format usage output to wrap at `columns` many columns. + +.help() +------- + +Return the generated usage string. + +.showHelp(fn=console.error) +--------------------------- + +Print the usage data using `fn` for printing. + +.parse(args) +------------ + +Parse `args` instead of `process.argv`. Returns the `argv` object. + +.argv +----- + +Get the arguments as a plain old object. + +Arguments without a corresponding flag show up in the `argv._` array. + +The script name or node command is available at `argv.$0` similarly to how `$0` +works in bash or perl. + +parsing tricks +============== + +stop parsing +------------ + +Use `--` to stop parsing flags and stuff the remainder into `argv._`. + + $ node examples/reflect.js -a 1 -b 2 -- -c 3 -d 4 + { _: [ '-c', '3', '-d', '4' ], + '$0': 'node ./examples/reflect.js', + a: 1, + b: 2 } + +negate fields +------------- + +If you want to explicity set a field to false instead of just leaving it +undefined or to override a default you can do `--no-key`. + + $ node examples/reflect.js -a --no-b + { _: [], + '$0': 'node ./examples/reflect.js', + a: true, + b: false } + +numbers +------- + +Every argument that looks like a number (`!isNaN(Number(arg))`) is converted to +one. This way you can just `net.createConnection(argv.port)` and you can add +numbers out of `argv` with `+` without having that mean concatenation, +which is super frustrating. + +duplicates +---------- + +If you specify a flag multiple times it will get turned into an array containing +all the values in order. + + $ node examples/reflect.js -x 5 -x 8 -x 0 + { _: [], + '$0': 'node ./examples/reflect.js', + x: [ 5, 8, 0 ] } + +dot notation +------------ + +When you use dots (`.`s) in argument names, an implicit object path is assumed. +This lets you organize arguments into nested objects. + + $ node examples/reflect.js --foo.bar.baz=33 --foo.quux=5 + { _: [], + '$0': 'node ./examples/reflect.js', + foo: { bar: { baz: 33 }, quux: 5 } } + +installation +============ + +With [npm](http://github.com/isaacs/npm), just do: + npm install optimist + +or clone this project on github: + + git clone http://github.com/substack/node-optimist.git + +To run the tests with [expresso](http://github.com/visionmedia/expresso), +just do: + + expresso + +inspired By +=========== + +This module is loosely inspired by Perl's +[Getopt::Casual](http://search.cpan.org/~photo/Getopt-Casual-0.13.1/Casual.pm). diff -Nru tilemill-0.10.1~saucy9/node_modules/optimist/x.js tilemill-0.10.1~saucy10/node_modules/optimist/x.js --- tilemill-0.10.1~saucy9/node_modules/optimist/x.js 2012-07-19 21:32:39.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/optimist/x.js 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -console.dir(require('./').argv); diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/.npmignore tilemill-0.10.1~saucy10/node_modules/passport/.npmignore --- tilemill-0.10.1~saucy9/node_modules/passport/.npmignore 2012-02-25 05:13:15.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -*.md -.DS_Store -.git* -Makefile -docs/ -examples/ -support/ -test/ diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/.travis.yml tilemill-0.10.1~saucy10/node_modules/passport/.travis.yml --- tilemill-0.10.1~saucy9/node_modules/passport/.travis.yml 2012-02-25 05:13:15.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/.travis.yml 2012-11-04 03:19:29.000000000 +0000 @@ -2,3 +2,4 @@ node_js: - 0.4 - 0.6 + - 0.8 diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/LICENSE tilemill-0.10.1~saucy10/node_modules/passport/LICENSE --- tilemill-0.10.1~saucy9/node_modules/passport/LICENSE 2012-02-25 05:13:15.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/LICENSE 2013-02-12 03:29:47.000000000 +0000 @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2011 Jared Hanson +Copyright (c) 2011-2013 Jared Hanson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/README.md tilemill-0.10.1~saucy10/node_modules/passport/README.md --- tilemill-0.10.1~saucy9/node_modules/passport/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/README.md 2013-04-20 22:18:00.000000000 +0000 @@ -0,0 +1,138 @@ +# Passport + +Passport is [Express](http://expressjs.com/)-compatible authentication +middleware for [Node.js](http://nodejs.org/). + +Passport's sole purpose is to authenticate requests, which it does through an +extensible set of plugins known as _strategies_. Passport does not mount +routes or assume any particular database schema, which maximizes flexiblity and +allows application-level decisions to be made by the developer. The API is +simple: you provide Passport a request to authenticate, and Passport provides +hooks for controlling what occurs when authentication succeeds or fails. + +## Install + + $ npm install passport + +## Usage + +#### Strategies + +Passport uses the concept of strategies to authenticate requests. Strategies +can range from verifying username and password credentials, delegated +authentication using [OAuth](http://oauth.net/) (for example, via [Facebook](http://www.facebook.com/) +or [Twitter](http://twitter.com/)), or federated authentication using [OpenID](http://openid.net/). + +Before authenticating requests, the strategy (or strategies) used by an +application must be configured. + + passport.use(new LocalStrategy( + function(username, password, done) { + User.findOne({ username: username, password: password }, function (err, user) { + done(err, user); + }); + } + )); + +#### Sessions + +Passport will maintain persistent login sessions. In order for persistent +sessions to work, the authenticated user must be serialized to the session, and +deserialized when subsequent requests are made. + +Passport does not impose any restrictions on how your user records are stored. +Instead, you provide functions to Passport which implements the necessary +serialization and deserialization logic. In a typical application, this will be +as simple as serializing the user ID, and finding the user by ID when +deserializing. + + passport.serializeUser(function(user, done) { + done(null, user.id); + }); + + passport.deserializeUser(function(id, done) { + User.findById(id, function (err, user) { + done(err, user); + }); + }); + +#### Middleware + +To use Passport in an [Express](http://expressjs.com/) or +[Connect](http://senchalabs.github.com/connect/)-based application, configure it +with the required `passport.initialize()` middleware. If your application uses +persistent login sessions (recommended, but not required), `passport.session()` +middleware must also be used. + + app.configure(function() { + app.use(express.static(__dirname + '/../../public')); + app.use(express.cookieParser()); + app.use(express.bodyParser()); + app.use(express.session({ secret: 'keyboard cat' })); + app.use(passport.initialize()); + app.use(passport.session()); + app.use(app.router); + }); + +#### Authenticate Requests + +Passport provides an `authenticate()` function, which is used as route +middleware to authenticate requests. + + app.post('/login', + passport.authenticate('local', { failureRedirect: '/login' }), + function(req, res) { + res.redirect('/'); + }); + +## Examples + +For a complete, working example, refer to the [login example](https://github.com/jaredhanson/passport-local/tree/master/examples/login) +included in [passport-local](https://github.com/jaredhanson/passport-local). + +## Strategies + +Passport has a comprehensive set of **over 120** authentication strategies +covering social networking, enterprise integration, API services, and more. +The [complete list](https://github.com/jaredhanson/passport/wiki/Strategies) is +available on the [wiki](https://github.com/jaredhanson/passport/wiki). + +The following table lists commonly used strategies: + +|Strategy | Protocol |Developer | +|---------------------------------------------------------------|--------------------------|------------------------------------------------| +|[Local](https://github.com/jaredhanson/passport-local) | HTML form |[Jared Hanson](https://github.com/jaredhanson) | +|[OpenID](https://github.com/jaredhanson/passport-openid) | OpenID |[Jared Hanson](https://github.com/jaredhanson) | +|[BrowserID](https://github.com/jaredhanson/passport-browserid) | BrowserID |[Jared Hanson](https://github.com/jaredhanson) | +|[Facebook](https://github.com/jaredhanson/passport-facebook) | OAuth 2.0 |[Jared Hanson](https://github.com/jaredhanson) | +|[Google](https://github.com/jaredhanson/passport-google) | OpenID |[Jared Hanson](https://github.com/jaredhanson) | +|[Google](https://github.com/jaredhanson/passport-google-oauth) | OAuth / OAuth 2.0 |[Jared Hanson](https://github.com/jaredhanson) | +|[Twitter](https://github.com/jaredhanson/passport-twitter) | OAuth |[Jared Hanson](https://github.com/jaredhanson) | + +## Related Modules + +- [Locomotive](https://github.com/jaredhanson/locomotive) — Powerful MVC web framework +- [OAuthorize](https://github.com/jaredhanson/oauthorize) — OAuth service provider toolkit +- [OAuth2orize](https://github.com/jaredhanson/oauth2orize) — OAuth 2.0 authorization server toolkit +- [connect-ensure-login](https://github.com/jaredhanson/connect-ensure-login) — middleware to ensure login sessions + +The [modules](https://github.com/jaredhanson/passport/wiki/Modules) page on the +[wiki](https://github.com/jaredhanson/passport/wiki) lists other useful modules +that build upon or integrate with Passport. + +## Tests + + $ npm install --dev + $ make test + +[![Build Status](https://secure.travis-ci.org/jaredhanson/passport.png)](http://travis-ci.org/jaredhanson/passport) + +## Credits + + - [Jared Hanson](http://github.com/jaredhanson) + +## License + +[The MIT License](http://opensource.org/licenses/MIT) + +Copyright (c) 2011-2013 Jared Hanson <[http://jaredhanson.net/](http://jaredhanson.net/)> diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/context/http/actions.js tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/context/http/actions.js --- tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/context/http/actions.js 2012-03-14 06:01:39.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/context/http/actions.js 2012-11-04 03:31:07.000000000 +0000 @@ -46,9 +46,22 @@ * @api public */ actions.redirect = function(url, status) { - this.res.statusCode = status || 302; - this.res.setHeader('Location', url); - this.res.end(); + var res = this.res; + if (typeof res.redirect == 'function') { + // If possible use redirect method on the response + // Assume Express API, optional status param comes first + if (status) { + res.redirect(status, url); + } else { + res.redirect(url); + } + } else { + // Otherwise fall back to native methods + res.statusCode = status || 302; + res.setHeader('Location', url); + res.setHeader('Content-Length', '0'); + res.end(); + } } /** diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/index.js tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/index.js --- tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/index.js 2012-07-11 05:22:07.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/index.js 2013-05-13 02:13:40.000000000 +0000 @@ -21,6 +21,7 @@ this._serializers = []; this._deserializers = []; this._infoTransformers = []; + this._framework = null; this._userProperty = 'user'; @@ -78,6 +79,30 @@ } /** + * Setup Passport to be used under framework. + * + * By default, Passport exposes middleware that operate using Connect-style + * middleware using a `fn(req, res, next)` signature. Other popular frameworks + * have different expectations, and this function allows Passport to be adapted + * to operate within such environments. + * + * If you are using a Connect-compatible framework, including Express, there is + * no need to invoke this function. + * + * Examples: + * + * passport.framework(require('hapi-passport')()); + * + * @param {Object} name + * @return {Passport} for chaining + * @api public + */ +Passport.prototype.framework = function(fw) { + this._framework = fw; + return this; +} + +/** * Passport's primary initialization middleware. * * This middleware must be in use by the Connect/Express application for @@ -104,6 +129,10 @@ options = options || {}; this._userProperty = options.userProperty || 'user'; + if (this._framework && this._framework.initialize) { + return this._framework.initialize().bind(this); + } + return initialize().bind(this); } @@ -134,11 +163,19 @@ * app.use(passport.session()); * }); * + * Options: + * - `pauseStream` Pause the request stream before deserializing the user + * object from the session. Defaults to _false_. Should + * be set to true in cases where middleware consuming the + * request body is configured after passport and the + * deserializeUser method is asynchronous. + * + * @param {Object} options * @return {Function} middleware * @api public */ -Passport.prototype.session = function() { - return this.authenticate('session'); +Passport.prototype.session = function(options) { + return this.authenticate('session', options); } /** @@ -170,6 +207,10 @@ * @api public */ Passport.prototype.authenticate = function(strategy, options, callback) { + if (this._framework && this._framework.authenticate) { + return this._framework.authenticate(strategy, options, callback).bind(this); + } + return authenticate(strategy, options, callback).bind(this); } @@ -227,7 +268,7 @@ err = undefined; } // an error or serialized object was obtained, done - if (err || obj) { return done(err, obj); } + if (err || obj || obj === 0) { return done(err, obj); } var layer = stack[i]; if (!layer) { @@ -409,4 +450,4 @@ /** * HTTP extensions. */ -require('./http/request'); +require('./http/request'); \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/middleware/authenticate.js tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/middleware/authenticate.js --- tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/middleware/authenticate.js 2012-07-11 07:08:23.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/middleware/authenticate.js 2013-02-10 17:19:41.000000000 +0000 @@ -110,6 +110,16 @@ req.flash(type, msg); } } + if (options.failureMessage) { + var msg = options.failureMessage; + if (typeof msg == 'boolean') { + msg = challenge.message || challenge; + } + if (typeof msg == 'string') { + req.session.messages = req.session.messages || []; + req.session.messages.push(msg); + } + } if (options.failureRedirect) { return res.redirect(options.failureRedirect); } @@ -166,6 +176,16 @@ req.flash(type, msg); } } + if (options.successMessage) { + var msg = options.successMessage; + if (typeof msg == 'boolean') { + msg = info.message || info; + } + if (typeof msg == 'string') { + req.session.messages = req.session.messages || []; + req.session.messages.push(msg); + } + } if (options.assignProperty) { req[options.assignProperty] = user; return next(); diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/strategies/session.js tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/strategies/session.js --- tilemill-0.10.1~saucy9/node_modules/passport/lib/passport/strategies/session.js 2012-03-31 09:08:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/lib/passport/strategies/session.js 2013-05-13 02:13:40.000000000 +0000 @@ -1,7 +1,8 @@ /** * Module dependencies. */ -var util = require('util') +var pause = require('pause') + , util = require('util') , Strategy = require('../strategy'); @@ -30,22 +31,37 @@ * This strategy is registered automatically by Passport. * * @param {Object} req + * @param {Object} options * @api protected */ -SessionStrategy.prototype.authenticate = function(req) { +SessionStrategy.prototype.authenticate = function(req, options) { if (!req._passport) { return this.error(new Error('passport.initialize() middleware not in use')); } - - var self = this; - if (req._passport.session.user) { - req._passport.instance.deserializeUser(req._passport.session.user, function(err, user) { + options = options || {}; + + var self = this + , su = req._passport.session.user; + if (su || su === 0) { + // NOTE: Stream pausing is desirable in the case where later middleware is + // listening for events emitted from request. For discussion on the + // matter, refer to: https://github.com/jaredhanson/passport/pull/106 + + var paused = options.pauseStream ? pause(req) : null; + req._passport.instance.deserializeUser(su, function(err, user) { if (err) { return self.error(err); } if (!user) { delete req._passport.session.user; - return self.pass(); + self.pass(); + if (paused) { + paused.resume(); + } + return; }; var property = req._passport.instance._userProperty || 'user'; req[property] = user; self.pass(); + if (paused) { + paused.resume(); + } }); } else { self.pass(); diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/.npmignore tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/.npmignore --- tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/.npmignore 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/.npmignore 2012-07-19 18:00:08.000000000 +0000 @@ -0,0 +1,4 @@ +support +test +examples +*.sock diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/History.md tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/History.md --- tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/History.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/History.md 2012-07-19 18:00:08.000000000 +0000 @@ -0,0 +1,5 @@ + +0.0.1 / 2010-01-03 +================== + + * Initial release diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/Makefile tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/Makefile --- tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/Makefile 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/Makefile 2012-07-19 18:00:08.000000000 +0000 @@ -0,0 +1,7 @@ + +test: + @./node_modules/.bin/mocha \ + --require should \ + --reporter spec + +.PHONY: test \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/Readme.md tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/Readme.md --- tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/Readme.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/Readme.md 2012-07-19 18:00:08.000000000 +0000 @@ -0,0 +1,29 @@ + +# pause + + Pause streams... + +## License + +(The MIT License) + +Copyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/index.js tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/index.js --- tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/index.js 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/index.js 2012-07-19 18:00:22.000000000 +0000 @@ -0,0 +1,29 @@ + +module.exports = function(obj){ + var onData + , onEnd + , events = []; + + // buffer data + obj.on('data', onData = function(data, encoding){ + events.push(['data', data, encoding]); + }); + + // buffer end + obj.on('end', onEnd = function(data, encoding){ + events.push(['end', data, encoding]); + }); + + return { + end: function(){ + obj.removeListener('data', onData); + obj.removeListener('end', onEnd); + }, + resume: function(){ + this.end(); + for (var i = 0, len = events.length; i < len; ++i) { + obj.emit.apply(obj, events[i]); + } + } + }; +}; \ No newline at end of file diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/package.json tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/package.json --- tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pause/package.json 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pause/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -0,0 +1,24 @@ +{ + "name": "pause", + "version": "0.0.1", + "description": "Pause streams...", + "keywords": [], + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "dependencies": {}, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "main": "index", + "readme": "\n# pause\n\n Pause streams...\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", + "readmeFilename": "Readme.md", + "_id": "pause@0.0.1", + "dist": { + "shasum": "f73d583e7efdf2d11aa4db248b5949f5127cc693" + }, + "_from": "pause@0.0.1", + "_resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz" +} diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pkginfo/package.json tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pkginfo/package.json --- tilemill-0.10.1~saucy9/node_modules/passport/node_modules/pkginfo/package.json 2012-10-11 02:23:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/node_modules/pkginfo/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -26,6 +26,14 @@ "node": ">= 0.4.0" }, "readme": "# node-pkginfo\n\nAn easy way to expose properties on a module from a package.json\n\n## Installation\n\n### Installing npm (node package manager)\n```\n curl http://npmjs.org/install.sh | sh\n```\n\n### Installing pkginfo\n```\n [sudo] npm install pkginfo\n```\n\n## Motivation\nHow often when writing node.js modules have you written the following line(s) of code? \n\n* Hard code your version string into your code\n\n``` js\n exports.version = '0.1.0';\n```\n\n* Programmatically expose the version from the package.json\n\n``` js\n exports.version = JSON.parse(fs.readFileSync('/path/to/package.json', 'utf8')).version;\n```\n\nIn other words, how often have you wanted to expose basic information from your package.json onto your module programmatically? **WELL NOW YOU CAN!**\n\n## Usage\n\nUsing `pkginfo` is idiot-proof, just require and invoke it. \n\n``` js\n var pkginfo = require('pkginfo')(module);\n \n console.dir(module.exports);\n```\n\nBy invoking the `pkginfo` module all of the properties in your `package.json` file will be automatically exposed on the callee module (i.e. the parent module of `pkginfo`). \n\nHere's a sample of the output:\n\n```\n { name: 'simple-app',\n description: 'A test fixture for pkginfo',\n version: '0.1.0',\n author: 'Charlie Robbins ',\n keywords: [ 'test', 'fixture' ],\n main: './index.js',\n scripts: { test: 'vows test/*-test.js --spec' },\n engines: { node: '>= 0.4.0' } }\n```\n\n### Expose specific properties\nIf you don't want to expose **all** properties on from your `package.json` on your module then simple pass those properties to the `pkginfo` function:\n\n``` js\n var pkginfo = require('pkginfo')(module, 'version', 'author');\n \n console.dir(module.exports);\n```\n\n```\n { version: '0.1.0',\n author: 'Charlie Robbins ' }\n```\n\nIf you're looking for further usage see the [examples][0] included in this repository. \n\n## Run Tests\nTests are written in [vows][1] and give complete coverage of all APIs.\n\n```\n vows test/*-test.js --spec\n```\n\n[0]: https://github.com/indexzero/node-pkginfo/tree/master/examples\n[1]: http://vowsjs.org\n\n#### Author: [Charlie Robbins](http://nodejitsu.com)", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/indexzero/node-pkginfo/issues" + }, "_id": "pkginfo@0.2.3", - "_from": "pkginfo@0.2.x" + "dist": { + "shasum": "bc4d08892eea7e47359d0b3dd256e5349fcd0d1e" + }, + "_from": "pkginfo@0.2.x", + "_resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.2.3.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/passport/package.json tilemill-0.10.1~saucy10/node_modules/passport/package.json --- tilemill-0.10.1~saucy9/node_modules/passport/package.json 2012-10-11 02:23:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -1,12 +1,14 @@ { "name": "passport", - "version": "0.1.12", + "version": "0.1.17", "description": "Simple, unobtrusive authentication for Node.js.", - "author": { - "name": "Jared Hanson", - "email": "jaredhanson@gmail.com", - "url": "http://www.jaredhanson.net/" - }, + "keywords": [ + "express", + "connect", + "auth", + "authn", + "authentication" + ], "homepage": "http://passportjs.org/", "repository": { "type": "git", @@ -15,9 +17,21 @@ "bugs": { "url": "http://github.com/jaredhanson/passport/issues" }, + "author": { + "name": "Jared Hanson", + "email": "jaredhanson@gmail.com", + "url": "http://www.jaredhanson.net/" + }, + "licenses": [ + { + "type": "MIT", + "url": "http://www.opensource.org/licenses/MIT" + } + ], "main": "./lib/passport", "dependencies": { - "pkginfo": "0.2.x" + "pkginfo": "0.2.x", + "pause": "0.0.1" }, "devDependencies": { "vows": "0.6.x" @@ -28,20 +42,12 @@ "engines": { "node": ">= 0.4.0" }, - "licenses": [ - { - "type": "MIT", - "url": "http://www.opensource.org/licenses/MIT" - } - ], - "keywords": [ - "express", - "connect", - "auth", - "authn", - "authentication" - ], - "_id": "passport@0.1.12", - "readme": "ERROR: No README.md file found!", - "_from": "passport@0.1.x" + "readme": "# Passport\n\nPassport is [Express](http://expressjs.com/)-compatible authentication\nmiddleware for [Node.js](http://nodejs.org/).\n\nPassport's sole purpose is to authenticate requests, which it does through an\nextensible set of plugins known as _strategies_. Passport does not mount\nroutes or assume any particular database schema, which maximizes flexiblity and\nallows application-level decisions to be made by the developer. The API is\nsimple: you provide Passport a request to authenticate, and Passport provides\nhooks for controlling what occurs when authentication succeeds or fails.\n\n## Install\n\n $ npm install passport\n\n## Usage\n\n#### Strategies\n\nPassport uses the concept of strategies to authenticate requests. Strategies\ncan range from verifying username and password credentials, delegated\nauthentication using [OAuth](http://oauth.net/) (for example, via [Facebook](http://www.facebook.com/)\nor [Twitter](http://twitter.com/)), or federated authentication using [OpenID](http://openid.net/).\n\nBefore authenticating requests, the strategy (or strategies) used by an\napplication must be configured.\n\n passport.use(new LocalStrategy(\n function(username, password, done) {\n User.findOne({ username: username, password: password }, function (err, user) {\n done(err, user);\n });\n }\n ));\n\n#### Sessions\n\nPassport will maintain persistent login sessions. In order for persistent\nsessions to work, the authenticated user must be serialized to the session, and\ndeserialized when subsequent requests are made.\n\nPassport does not impose any restrictions on how your user records are stored.\nInstead, you provide functions to Passport which implements the necessary\nserialization and deserialization logic. In a typical application, this will be\nas simple as serializing the user ID, and finding the user by ID when\ndeserializing.\n\n passport.serializeUser(function(user, done) {\n done(null, user.id);\n });\n\n passport.deserializeUser(function(id, done) {\n User.findById(id, function (err, user) {\n done(err, user);\n });\n });\n\n#### Middleware\n\nTo use Passport in an [Express](http://expressjs.com/) or\n[Connect](http://senchalabs.github.com/connect/)-based application, configure it\nwith the required `passport.initialize()` middleware. If your application uses\npersistent login sessions (recommended, but not required), `passport.session()`\nmiddleware must also be used.\n\n app.configure(function() {\n app.use(express.static(__dirname + '/../../public'));\n app.use(express.cookieParser());\n app.use(express.bodyParser());\n app.use(express.session({ secret: 'keyboard cat' }));\n app.use(passport.initialize());\n app.use(passport.session());\n app.use(app.router);\n });\n\n#### Authenticate Requests\n\nPassport provides an `authenticate()` function, which is used as route\nmiddleware to authenticate requests.\n\n app.post('/login', \n passport.authenticate('local', { failureRedirect: '/login' }),\n function(req, res) {\n res.redirect('/');\n });\n\n## Examples\n\nFor a complete, working example, refer to the [login example](https://github.com/jaredhanson/passport-local/tree/master/examples/login)\nincluded in [passport-local](https://github.com/jaredhanson/passport-local).\n\n## Strategies\n\nPassport has a comprehensive set of **over 120** authentication strategies\ncovering social networking, enterprise integration, API services, and more.\nThe [complete list](https://github.com/jaredhanson/passport/wiki/Strategies) is\navailable on the [wiki](https://github.com/jaredhanson/passport/wiki).\n\nThe following table lists commonly used strategies:\n\n|Strategy | Protocol |Developer |\n|---------------------------------------------------------------|--------------------------|------------------------------------------------|\n|[Local](https://github.com/jaredhanson/passport-local) | HTML form |[Jared Hanson](https://github.com/jaredhanson) |\n|[OpenID](https://github.com/jaredhanson/passport-openid) | OpenID |[Jared Hanson](https://github.com/jaredhanson) |\n|[BrowserID](https://github.com/jaredhanson/passport-browserid) | BrowserID |[Jared Hanson](https://github.com/jaredhanson) |\n|[Facebook](https://github.com/jaredhanson/passport-facebook) | OAuth 2.0 |[Jared Hanson](https://github.com/jaredhanson) |\n|[Google](https://github.com/jaredhanson/passport-google) | OpenID |[Jared Hanson](https://github.com/jaredhanson) |\n|[Google](https://github.com/jaredhanson/passport-google-oauth) | OAuth / OAuth 2.0 |[Jared Hanson](https://github.com/jaredhanson) |\n|[Twitter](https://github.com/jaredhanson/passport-twitter) | OAuth |[Jared Hanson](https://github.com/jaredhanson) |\n\n## Related Modules\n\n- [Locomotive](https://github.com/jaredhanson/locomotive) — Powerful MVC web framework\n- [OAuthorize](https://github.com/jaredhanson/oauthorize) — OAuth service provider toolkit\n- [OAuth2orize](https://github.com/jaredhanson/oauth2orize) — OAuth 2.0 authorization server toolkit\n- [connect-ensure-login](https://github.com/jaredhanson/connect-ensure-login) — middleware to ensure login sessions\n\nThe [modules](https://github.com/jaredhanson/passport/wiki/Modules) page on the\n[wiki](https://github.com/jaredhanson/passport/wiki) lists other useful modules\nthat build upon or integrate with Passport.\n\n## Tests\n\n $ npm install --dev\n $ make test\n\n[![Build Status](https://secure.travis-ci.org/jaredhanson/passport.png)](http://travis-ci.org/jaredhanson/passport)\n\n## Credits\n\n - [Jared Hanson](http://github.com/jaredhanson)\n\n## License\n\n[The MIT License](http://opensource.org/licenses/MIT)\n\nCopyright (c) 2011-2013 Jared Hanson <[http://jaredhanson.net/](http://jaredhanson.net/)>\n", + "readmeFilename": "README.md", + "_id": "passport@0.1.17", + "dist": { + "shasum": "65d6cf3967195a81f535bab51a7061e2beb82aa6" + }, + "_from": "passport@0.1.x", + "_resolved": "https://registry.npmjs.org/passport/-/passport-0.1.17.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/.npmignore tilemill-0.10.1~saucy10/node_modules/passport-oauth/.npmignore --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/.npmignore 2012-03-02 01:01:35.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/.npmignore 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -*.md -.DS_Store -.git* -Makefile -docs/ -examples/ -support/ -test/ diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/.travis.yml tilemill-0.10.1~saucy10/node_modules/passport-oauth/.travis.yml --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/.travis.yml 2012-03-05 01:17:49.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/.travis.yml 2013-02-10 00:57:33.000000000 +0000 @@ -2,3 +2,4 @@ node_js: - 0.4 - 0.6 + - 0.8 diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/LICENSE tilemill-0.10.1~saucy10/node_modules/passport-oauth/LICENSE --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/LICENSE 2012-03-02 01:01:35.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/LICENSE 2013-02-10 00:58:48.000000000 +0000 @@ -1,6 +1,6 @@ (The MIT License) -Copyright (c) 2011 Jared Hanson +Copyright (c) 2011-2013 Jared Hanson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/README.md tilemill-0.10.1~saucy10/node_modules/passport-oauth/README.md --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/README.md 1970-01-01 00:00:00.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/README.md 2013-02-10 01:50:08.000000000 +0000 @@ -0,0 +1,89 @@ +# Passport-OAuth + +General-purpose OAuth 1.0 and OAuth 2.0 authentication strategies for [Passport](https://github.com/jaredhanson/passport). + +This module lets you authenticate using OAuth in your Node.js applications. +By plugging into Passport, OAuth authentication can be easily and unobtrusively +integrated into any application or framework that supports +[Connect](http://www.senchalabs.org/connect/)-style middleware, including +[Express](http://expressjs.com/). + +Note that this strategy provides generic OAuth support. In many cases, a +provider-specific strategy can be used instead, which cuts down on unnecessary +configuration, and accommodates any provider-specific quirks. See the list +below for supported providers. + +Developers who need to implement authentication against an OAuth provider that +is not already supported are encouraged to sub-class this strategy. If you +choose to open source the new provider-specific strategy, send me a message and +I will update the list. + +## Installation + + $ npm install passport-oauth + +## Strategies using OAuth + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
                    StrategyOAuth Version
                    37signals2.0
                    AllPlayers.com1.0
                    AngelList2.0
                    Bitbucket1.0a
                    Cloud Foundry (UAA)2.0
                    Digg1.0a
                    Dropbox1.0
                    Dwolla2.0
                    Evernote1.0a
                    Facebook2.0
                    Fitbit1.0a
                    Flickr1.0a
                    Foursquare2.0
                    Geoloqi2.0
                    GitHub2.0
                    Goodreads1.0
                    Google1.0a, 2.0
                    Gowalla2.0
                    Instagram2.0
                    Justin.tv1.0a
                    LinkedIn1.0a
                    Meetup1.0a
                    Netflix1.0a
                    Ohloh1.0
                    OpenStreetMap1.0a
                    picplz2.0
                    Rdio1.0a
                    Readability1.0a
                    RunKeeper2.0
                    SmugMug1.0a
                    SoundCloud2.0
                    TripIt1.0
                    Tumblr1.0a
                    Twitter1.0a
                    Vimeo1.0a
                    Windows Live2.0
                    Yahoo!1.0a
                    Yammer2.0
                    + +## Tests + + $ npm install --dev + $ make test + +[![Build Status](https://secure.travis-ci.org/jaredhanson/passport-oauth.png)](http://travis-ci.org/jaredhanson/passport-oauth) + + +## Credits + + - [Jared Hanson](http://github.com/jaredhanson) + +## License + +[The MIT License](http://opensource.org/licenses/MIT) + +Copyright (c) 2011-2013 Jared Hanson <[http://jaredhanson.net/](http://jaredhanson.net/)> diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth.js tilemill-0.10.1~saucy10/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth.js --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth.js 2012-07-30 04:27:32.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth.js 2013-04-10 15:23:47.000000000 +0000 @@ -75,9 +75,11 @@ // the user profile. this._oauth = new OAuth(options.requestTokenURL, options.accessTokenURL, options.consumerKey, options.consumerSecret, - "1.0", options.callbackURL || null, options.signatureMethod || "HMAC-SHA1"); + "1.0", null, options.signatureMethod || "HMAC-SHA1", + null, options.customHeaders); this._userAuthorizationURL = options.userAuthorizationURL; + this._callbackURL = options.callbackURL; this._passReqToCallback = options.passReqToCallback; this._skipUserProfile = (options.skipUserProfile === undefined) ? false : options.skipUserProfile; this._key = options.sessionKey || 'oauth'; @@ -179,6 +181,17 @@ // redirected back to the application. var params = this.requestTokenParams(options); + var callbackURL = options.callbackURL || this._callbackURL; + if (callbackURL) { + var parsed = url.parse(callbackURL); + if (!parsed.protocol) { + // The callback URL is relative, resolve a fully qualified URL from the + // URL of the originating request. + callbackURL = url.resolve(utils.originalURL(req), callbackURL); + } + } + params['oauth_callback'] = callbackURL; + this._oauth.getOAuthRequestToken(params, function(err, token, tokenSecret, params) { if (err) { return self.error(new InternalOAuthError('failed to obtain request token', err)); } diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth2.js tilemill-0.10.1~saucy10/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth2.js --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth2.js 2012-07-30 05:43:43.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/lib/passport-oauth/strategies/oauth2.js 2013-02-10 00:56:12.000000000 +0000 @@ -70,7 +70,7 @@ // allowed to use it when making protected resource requests to retrieve // the user profile. this._oauth2 = new OAuth2(options.clientID, options.clientSecret, - '', options.authorizationURL, options.tokenURL); + '', options.authorizationURL, options.tokenURL, options.customHeaders); this._callbackURL = options.callbackURL; this._scope = options.scope; diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/Readme.md tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/Readme.md --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/Readme.md 2012-07-28 12:46:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/Readme.md 2013-03-05 18:08:29.000000000 +0000 @@ -16,10 +16,15 @@ Change History ============== +* 0.9.10 + - OAuth2: Addresses 2 issues that came in with 0.9.9, #129 & #125 (thank you José F. Romaniello) +* 0.9.9 + - OAuth1: Fix the mismatch between the output of querystring.stringify() and this._encodeData(). (thank you rolandboon) + - OAuth2: Adds Authorization Header and supports extra headers by default ( thanks to Brian Park) * 0.9.8 - - OAuth: Support overly-strict OAuth server's that require whitespace separating the Authorization Header parameters (e.g. 500px.com) (Thanks to Christian Schwarz) - - OAuth: Fix incorrect double-encoding of PLAINTEXT OAuth connections (Thanks to Joe Rozner) - - OAuth: Minor safety check added when checking hostnames. (Thanks to Garrick Cheung) + - OAuth1: Support overly-strict OAuth server's that require whitespace separating the Authorization Header parameters (e.g. 500px.com) (Thanks to Christian Schwarz) + - OAuth1: Fix incorrect double-encoding of PLAINTEXT OAuth connections (Thanks to Joe Rozner) + - OAuth1: Minor safety check added when checking hostnames. (Thanks to Garrick Cheung) * 0.9.7 - OAuth2: Pass back any extra response data for calls to getOAuthAccessToken (Thanks to Tang Bo Hao) - OAuth2: Don't force a https request if given a http url (Thanks to Damien Mathieu) @@ -89,3 +94,6 @@ * Christian Schwarz - http://github.com/chrischw/ * Joe Rozer - http://www.deadbytes.net * Garrick Cheung - http://www.garrickcheung.com/ +* rolandboon - http://rolandboon.com +* Brian Park - http://github.com/yaru22 +* José F. Romaniello - http://github.com/jfromaniello diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/lib/oauth.js tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/lib/oauth.js --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/lib/oauth.js 2012-07-28 12:46:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/lib/oauth.js 2013-03-01 20:03:13.000000000 +0000 @@ -327,7 +327,13 @@ } if( (method == "POST" || method == "PUT") && ( post_body == null && extra_params != null) ) { - post_body= querystring.stringify(extra_params); + // Fix the mismatch between the output of querystring.stringify() and this._encodeData() + post_body= querystring.stringify(extra_params) + .replace(/\!/g, "%21") + .replace(/\'/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); } headers["Content-length"]= post_body ? Buffer.byteLength(post_body) : 0; diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/lib/oauth2.js tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/lib/oauth2.js --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/lib/oauth2.js 2012-07-28 12:46:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/lib/oauth2.js 2013-03-05 18:06:16.000000000 +0000 @@ -5,13 +5,16 @@ URL= require('url'), OAuthUtils= require('./_utils'); -exports.OAuth2= function(clientId, clientSecret, baseSite, authorizePath, accessTokenPath) { +exports.OAuth2= function(clientId, clientSecret, baseSite, authorizePath, accessTokenPath, customHeaders) { this._clientId= clientId; this._clientSecret= clientSecret; this._baseSite= baseSite; this._authorizeUrl= authorizePath || "/oauth/authorize"; this._accessTokenUrl= accessTokenPath || "/oauth/access_token"; this._accessTokenName= "access_token"; + this._authMethod= "Bearer"; + this._customHeaders = customHeaders || {}; + this._useAuthorizationHeaderForGET= false; } // This 'hack' method is required for sites that don't use @@ -23,10 +26,29 @@ this._accessTokenName= name; } +// Sets the authorization method for Authorization header. +// e.g. Authorization: Bearer # "Bearer" is the authorization method. +exports.OAuth2.prototype.setAuthMethod = function ( authMethod ) { + this._authMethod = authMethod; +}; + + +// If you use the OAuth2 exposed 'get' method (and don't construct your own _request call ) +// this will specify whether to use an 'Authorize' header instead of passing the access_token as a query parameter +exports.OAuth2.prototype.useAuthorizationHeaderforGET = function(useIt) { + this._useAuthorizationHeaderForGET= useIt; +} + exports.OAuth2.prototype._getAccessTokenUrl= function() { return this._baseSite + this._accessTokenUrl; /* + "?" + querystring.stringify(params); */ } +// Build the authorization header. In particular, build the part after the colon. +// e.g. Authorization: Bearer # Build "Bearer " +exports.OAuth2.prototype.buildAuthHeader= function(token) { + return this._authMethod + ' ' + token; +}; + exports.OAuth2.prototype._request= function(method, url, headers, post_body, access_token, callback) { var http_library= https; @@ -42,6 +64,9 @@ } var realHeaders= {}; + for( var key in this._customHeaders ) { + realHeaders[key]= this._customHeaders[key]; + } if( headers ) { for(var key in headers) { realHeaders[key] = headers[key]; @@ -50,12 +75,11 @@ realHeaders['Host']= parsedUrl.host; realHeaders['Content-Length']= post_body ? Buffer.byteLength(post_body) : 0; - if( access_token ) { + if( access_token && !('Authorization' in realHeaders)) { if( ! parsedUrl.query ) parsedUrl.query= {}; parsedUrl.query[this._accessTokenName]= access_token; } - var result= ""; var queryStr= querystring.stringify(parsedUrl.query); if( queryStr ) queryStr= "?" + queryStr; var options = { @@ -66,6 +90,10 @@ headers: realHeaders }; + this._executeRequest( http_library, options, post_body, callback ); +} + +exports.OAuth2.prototype._executeRequest= function( http_library, options, post_body, callback ) { // Some hosts *cough* google appear to close the connection early / send no content-length header // allow this behaviour. var allowEarlyClose= OAuthUtils.isAnEarlyCloseHost(options.host); @@ -81,6 +109,8 @@ } } + var result= ""; + var request = http_library.request(options, function (response) { response.on("data", function (chunk) { result+= chunk @@ -99,13 +129,12 @@ callback(e); }); - if( method == 'POST' && post_body ) { + if( options.method == 'POST' && post_body ) { request.write(post_body); } - request.end(); + request.end(); } - exports.OAuth2.prototype.getAuthorizeUrl= function( params ) { var params= params || {}; params['client_id'] = this._clientId; @@ -157,5 +186,12 @@ } exports.OAuth2.prototype.get= function(url, access_token, callback) { - this._request("GET", url, {}, "", access_token, callback ); + if( this._useAuthorizationHeaderForGET ) { + var headers= {'Authorization': this.buildAuthHeader(access_token) } + access_token= null; + } + else { + headers= {}; + } + this._request("GET", url, headers, "", access_token, callback ); } diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/package.json tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/package.json --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/package.json 2012-10-11 02:23:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -1,7 +1,7 @@ { "name": "oauth", "description": "Library for interacting with OAuth 1.0, 1.0A, 2 and Echo. Provides simplified client access and allows for construction of more complex apis and OAuth providers.", - "version": "0.9.8", + "version": "0.9.10", "directories": { "lib": "./lib" }, @@ -26,7 +26,15 @@ "url": "http://github.com/ciaranj/node-oauth/raw/master/LICENSE" } ], - "readme": "node-oauth\n===========\nA simple oauth API for node.js . This API allows users to authenticate against OAUTH providers, and thus act as OAuth consumers. It also has support for OAuth Echo, which is used for communicating with 3rd party media providers such as TwitPic and yFrog.\n\nTested against Twitter (http://twitter.com), term.ie (http://term.ie/oauth/example/), TwitPic, and Yahoo!\n\nAlso provides rudimentary OAuth2 support, tested against facebook, github, foursquare, google and Janrain. For more complete usage examples please take a look at connect-auth (http://github.com/ciaranj/connect-auth)\n\n\nInstallation\n============== \n\n $ npm install oauth\n\n\nChange History\n============== \n\n* 0.9.8\n - OAuth: Support overly-strict OAuth server's that require whitespace separating the Authorization Header parameters (e.g. 500px.com) (Thanks to Christian Schwarz)\n - OAuth: Fix incorrect double-encoding of PLAINTEXT OAuth connections (Thanks to Joe Rozner)\n - OAuth: Minor safety check added when checking hostnames. (Thanks to Garrick Cheung)\n* 0.9.7\n - OAuth2: Pass back any extra response data for calls to getOAuthAccessToken (Thanks to Tang Bo Hao)\n - OAuth2: Don't force a https request if given a http url (Thanks to Damien Mathieu)\n - OAuth2: Supports specifying a grant-type of 'refresh-token' (Thanks to Luke Baker)\n* 0.9.6\n - OAuth2: Support for 302 redirects (Thanks Patrick Negri). \n - OAuth1/2: Some code tidying. ( Thanks to Raoul Millais ) \n* 0.9.5\n - OAuth1: Allow usage of HTTP verbs other than GET for retrieving the access and request tokens (Thanks to Raoul Millais) \n* 0.9.4\n - OAuth1/2: Support for OAuth providers that drop connections (don't send response lengths? [Google]) \n - OAuth2: Change getOAuthAccessToken to POST rather than GET ( Possible Breaking change!!! ... re-tested against Google, Github, Facebook, FourSquare and Janrain and seems ok .. is closer to the spec (v20) ) \n* 0.9.3\n - OAuth1: Adds support for following 301 redirects (Thanks bdickason) \n* 0.9.2 \n - OAuth1: Correct content length calculated for non-ascii post bodies (Thanks selead) \n - OAuth1: Allowed for configuration of the 'access token' name used when requesting protected resources (OAuth2) \n* 0.9.1\n - OAuth1: Added support for automatically following 302 redirects (Thanks neyric) \n - OAuth1: Added support for OAuth Echo (Thanks Ryan LeFevre). \n - OAuth1: Improved handling of 2xx responses (Thanks Neil Mansilla). \n* 0.9.0\n - OAuth1/2: Compatibility fixes to bring node-oauth up to speed with node.js 0.4x [thanks to Rasmus Andersson for starting the work ] \n* 0.8.4\n - OAuth1: Fixed issue #14 (Parameter ordering ignored encodings).\n - OAuth1: Added support for repeated parameter names.\n - OAuth1/2: Implements issue #15 (Use native SHA1 if available, 10x speed improvement!).\n - OAuth2: Fixed issue #16 (Should use POST when requesting access tokens.).\n - OAuth2: Fixed Issue #17 (OAuth2 spec compliance). \n - OAuth1: Implemented enhancement #13 (Adds support for PUT & DELETE http verbs). \n - OAuth1: Fixes issue #18 (Complex/Composite url arguments [thanks novemberborn]) \n* 0.8.3\n - OAuth1: Fixed an issue where the auth header code depended on the Array's toString method (Yohei Sasaki) Updated the getOAuthRequestToken method so we can access google's OAuth secured methods. Also re-implemented and fleshed out the test suite. \n* 0.8.2\n - OAuth1: The request returning methods will now write the POST body if provided (Chris Anderson), the code responsible for manipulating the headers is a bit safe now when working with other code (Paul McKellar)\n - Package: Tweaked the package.json to use index.js instead of main.js \n* 0.8.1\n - OAuth1: Added mechanism to get hold of a signed Node Request object, ready for attaching response listeners etc. (Perfect for streaming APIs) \n* 0.8.0\n - OAuth1: Standardised method capitalisation, the old getOauthAccessToken is now getOAuthAccessToken (Breaking change to existing code) \n* 0.7.7\n - OAuth1: Looks like non oauth_ parameters where appearing within the Authorization headers, which I believe to be incorrect. \n* 0.7.6\n - OAuth1: Added in oauth_verifier property to getAccessToken required for 1.0A \n* 0.7.5\n - Package: Added in a main.js to simplify the require'ing of OAuth \n* 0.7.4\n - OAuth1: Minor change to add an error listener to the OAuth client (thanks troyk) \n* 0.7.3\n - OAuth2: Now sends a Content-Length Http header to keep nginx happy :) \n* 0.7.2\n - OAuth1: Fixes some broken unit tests! \n* 0.7.0\n - OAuth1/2: Introduces support for HTTPS end points and callback URLS for OAuth 1.0A and Oauth 2 (Please be aware that this was a breaking change to the constructor arguments order) \n\nContributors (In no particular order)\n=====================================\n\n* Ciaran Jessup - ciaranj@gmail.com\n* Mark Wubben - http://equalmedia.com/\n* Ryan LeFevre - http://meltingice.net\n* Raoul Millais\n* Patrick Negri - http://github.com/pnegri\n* Tang Bo Hao - http://github.com/btspoony\n* Damien Mathieu - http://42.dmathieu.com\n* Luke Baker - http://github.com/lukebaker\n* Christian Schwarz - http://github.com/chrischw/\n* Joe Rozer - http://www.deadbytes.net\n* Garrick Cheung - http://www.garrickcheung.com/\n", - "_id": "oauth@0.9.8", - "_from": "oauth@0.9.x" + "readme": "node-oauth\n===========\nA simple oauth API for node.js . This API allows users to authenticate against OAUTH providers, and thus act as OAuth consumers. It also has support for OAuth Echo, which is used for communicating with 3rd party media providers such as TwitPic and yFrog.\n\nTested against Twitter (http://twitter.com), term.ie (http://term.ie/oauth/example/), TwitPic, and Yahoo!\n\nAlso provides rudimentary OAuth2 support, tested against facebook, github, foursquare, google and Janrain. For more complete usage examples please take a look at connect-auth (http://github.com/ciaranj/connect-auth)\n\n\nInstallation\n============== \n\n $ npm install oauth\n\n\nChange History\n============== \n\n* 0.9.10\n - OAuth2: Addresses 2 issues that came in with 0.9.9, #129 & #125 (thank you José F. Romaniello)\n* 0.9.9\n - OAuth1: Fix the mismatch between the output of querystring.stringify() and this._encodeData(). (thank you rolandboon)\n - OAuth2: Adds Authorization Header and supports extra headers by default ( thanks to Brian Park)\n* 0.9.8\n - OAuth1: Support overly-strict OAuth server's that require whitespace separating the Authorization Header parameters (e.g. 500px.com) (Thanks to Christian Schwarz)\n - OAuth1: Fix incorrect double-encoding of PLAINTEXT OAuth connections (Thanks to Joe Rozner)\n - OAuth1: Minor safety check added when checking hostnames. (Thanks to Garrick Cheung)\n* 0.9.7\n - OAuth2: Pass back any extra response data for calls to getOAuthAccessToken (Thanks to Tang Bo Hao)\n - OAuth2: Don't force a https request if given a http url (Thanks to Damien Mathieu)\n - OAuth2: Supports specifying a grant-type of 'refresh-token' (Thanks to Luke Baker)\n* 0.9.6\n - OAuth2: Support for 302 redirects (Thanks Patrick Negri). \n - OAuth1/2: Some code tidying. ( Thanks to Raoul Millais ) \n* 0.9.5\n - OAuth1: Allow usage of HTTP verbs other than GET for retrieving the access and request tokens (Thanks to Raoul Millais) \n* 0.9.4\n - OAuth1/2: Support for OAuth providers that drop connections (don't send response lengths? [Google]) \n - OAuth2: Change getOAuthAccessToken to POST rather than GET ( Possible Breaking change!!! ... re-tested against Google, Github, Facebook, FourSquare and Janrain and seems ok .. is closer to the spec (v20) ) \n* 0.9.3\n - OAuth1: Adds support for following 301 redirects (Thanks bdickason) \n* 0.9.2 \n - OAuth1: Correct content length calculated for non-ascii post bodies (Thanks selead) \n - OAuth1: Allowed for configuration of the 'access token' name used when requesting protected resources (OAuth2) \n* 0.9.1\n - OAuth1: Added support for automatically following 302 redirects (Thanks neyric) \n - OAuth1: Added support for OAuth Echo (Thanks Ryan LeFevre). \n - OAuth1: Improved handling of 2xx responses (Thanks Neil Mansilla). \n* 0.9.0\n - OAuth1/2: Compatibility fixes to bring node-oauth up to speed with node.js 0.4x [thanks to Rasmus Andersson for starting the work ] \n* 0.8.4\n - OAuth1: Fixed issue #14 (Parameter ordering ignored encodings).\n - OAuth1: Added support for repeated parameter names.\n - OAuth1/2: Implements issue #15 (Use native SHA1 if available, 10x speed improvement!).\n - OAuth2: Fixed issue #16 (Should use POST when requesting access tokens.).\n - OAuth2: Fixed Issue #17 (OAuth2 spec compliance). \n - OAuth1: Implemented enhancement #13 (Adds support for PUT & DELETE http verbs). \n - OAuth1: Fixes issue #18 (Complex/Composite url arguments [thanks novemberborn]) \n* 0.8.3\n - OAuth1: Fixed an issue where the auth header code depended on the Array's toString method (Yohei Sasaki) Updated the getOAuthRequestToken method so we can access google's OAuth secured methods. Also re-implemented and fleshed out the test suite. \n* 0.8.2\n - OAuth1: The request returning methods will now write the POST body if provided (Chris Anderson), the code responsible for manipulating the headers is a bit safe now when working with other code (Paul McKellar)\n - Package: Tweaked the package.json to use index.js instead of main.js \n* 0.8.1\n - OAuth1: Added mechanism to get hold of a signed Node Request object, ready for attaching response listeners etc. (Perfect for streaming APIs) \n* 0.8.0\n - OAuth1: Standardised method capitalisation, the old getOauthAccessToken is now getOAuthAccessToken (Breaking change to existing code) \n* 0.7.7\n - OAuth1: Looks like non oauth_ parameters where appearing within the Authorization headers, which I believe to be incorrect. \n* 0.7.6\n - OAuth1: Added in oauth_verifier property to getAccessToken required for 1.0A \n* 0.7.5\n - Package: Added in a main.js to simplify the require'ing of OAuth \n* 0.7.4\n - OAuth1: Minor change to add an error listener to the OAuth client (thanks troyk) \n* 0.7.3\n - OAuth2: Now sends a Content-Length Http header to keep nginx happy :) \n* 0.7.2\n - OAuth1: Fixes some broken unit tests! \n* 0.7.0\n - OAuth1/2: Introduces support for HTTPS end points and callback URLS for OAuth 1.0A and Oauth 2 (Please be aware that this was a breaking change to the constructor arguments order) \n\nContributors (In no particular order)\n=====================================\n\n* Ciaran Jessup - ciaranj@gmail.com\n* Mark Wubben - http://equalmedia.com/\n* Ryan LeFevre - http://meltingice.net\n* Raoul Millais\n* Patrick Negri - http://github.com/pnegri\n* Tang Bo Hao - http://github.com/btspoony\n* Damien Mathieu - http://42.dmathieu.com\n* Luke Baker - http://github.com/lukebaker\n* Christian Schwarz - http://github.com/chrischw/\n* Joe Rozer - http://www.deadbytes.net\n* Garrick Cheung - http://www.garrickcheung.com/\n* rolandboon - http://rolandboon.com\n* Brian Park - http://github.com/yaru22\n* José F. Romaniello - http://github.com/jfromaniello\n", + "readmeFilename": "Readme.md", + "bugs": { + "url": "https://github.com/ciaranj/node-oauth/issues" + }, + "_id": "oauth@0.9.10", + "dist": { + "shasum": "fa5e8239ab68b9917fa6dd34b03fdc0bf992befa" + }, + "_from": "oauth@0.9.x", + "_resolved": "https://registry.npmjs.org/oauth/-/oauth-0.9.10.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/tests/oauth2.js tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/tests/oauth2.js --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/oauth/tests/oauth2.js 2012-07-28 12:46:01.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/oauth/tests/oauth2.js 2013-03-05 18:07:13.000000000 +0000 @@ -1,10 +1,12 @@ var vows = require('vows'), assert = require('assert'), - OAuth2= require('../lib/oauth2').OAuth2; + https = require('https'), + OAuth2= require('../lib/oauth2').OAuth2, + url = require('url'); vows.describe('OAuth2').addBatch({ - 'Given an OAuth2 instance, ': { - topic: new OAuth2(), + 'Given an OAuth2 instance with clientId and clientSecret, ': { + topic: new OAuth2("clientId", "clientSecret"), 'When handling the access token response': { 'we should correctly extract the token if received as form-data': function (oa) { oa._request= function( method, url, fo, bar, bleh, callback) { @@ -15,6 +17,27 @@ assert.equal( refresh_token, "refresh"); }); }, + 'we should not include access token in both querystring and headers (favours headers if specified)': function (oa) { + oa._request = new OAuth2("clientId", "clientSecret")._request.bind(oa); + oa._executeRequest= function( http_library, options, post_body, callback) { + callback(null, url.parse(options.path, true).query, options.headers); + }; + + oa._request("GET", "http://foo/", {"Authorization":"Bearer BadNews"}, null, "accessx", function(error, query, headers) { + assert.ok( !('access_token' in query), "access_token also in query"); + assert.ok( 'Authorization' in headers, "Authorization not in headers"); + }); + }, + 'we should include access token in the querystring if no Authorization header present to override it': function (oa) { + oa._request = new OAuth2("clientId", "clientSecret")._request.bind(oa); + oa._executeRequest= function( http_library, options, post_body, callback) { + callback(null, url.parse(options.path, true).query, options.headers); + }; + oa._request("GET", "http://foo/", {}, null, "access", function(error, query, headers) { + assert.ok( 'access_token' in query, "access_token not present in query"); + assert.ok( !('Authorization' in headers), "Authorization in headers"); + }); + }, 'we should correctly extract the token if received as a JSON literal': function (oa) { oa._request= function(method, url, headers, post_body, access_token, callback) { callback(null, '{"access_token":"access","refresh_token":"refresh"}'); @@ -40,28 +63,75 @@ 'When no grant_type parameter is specified': { 'we should pass the value of the code argument as the code parameter': function(oa) { oa._request= function(method, url, headers, post_body, access_token, callback) { - assert.isTrue( post_body.indexOf("code=xsds23") != -1 ) - } + assert.isTrue( post_body.indexOf("code=xsds23") != -1 ); + }; oa.getOAuthAccessToken("xsds23", {} ); } }, 'When an invalid grant_type parameter is specified': { 'we should pass the value of the code argument as the code parameter': function(oa) { oa._request= function(method, url, headers, post_body, access_token, callback) { - assert.isTrue( post_body.indexOf("code=xsds23") != -1 ) - } + assert.isTrue( post_body.indexOf("code=xsds23") != -1 ); + }; oa.getOAuthAccessToken("xsds23", {grant_type:"refresh_toucan"} ); } }, 'When a grant_type parameter of value "refresh_token" is specified': { 'we should pass the value of the code argument as the refresh_token parameter, should pass a grant_type parameter, but shouldn\'t pass a code parameter' : function(oa) { oa._request= function(method, url, headers, post_body, access_token, callback) { - assert.isTrue( post_body.indexOf("refresh_token=sdsds2") != -1 ) - assert.isTrue( post_body.indexOf("grant_type=refresh_token") != -1 ) - assert.isTrue( post_body.indexOf("code=") == -1 ) - } + assert.isTrue( post_body.indexOf("refresh_token=sdsds2") != -1 ); + assert.isTrue( post_body.indexOf("grant_type=refresh_token") != -1 ); + assert.isTrue( post_body.indexOf("code=") == -1 ); + }; oa.getOAuthAccessToken("sdsds2", {grant_type:"refresh_token"} ); } + }, + 'When we use the authorization header': { + 'and call get with the default authorization method': { + 'we should pass the authorization header with Bearer method and value of the access_token, _request should be passed a null access_token' : function(oa) { + oa._request= function(method, url, headers, post_body, access_token, callback) { + assert.equal(headers["Authorization"], "Bearer abcd5"); + assert.isNull( access_token ); + }; + oa.useAuthorizationHeaderforGET(true); + oa.get("", "abcd5"); + } + }, + 'and call get with the authorization method set to Basic': { + 'we should pass the authorization header with Basic method and value of the access_token, _request should be passed a null access_token' : function(oa) { + oa._request= function(method, url, headers, post_body, access_token, callback) { + assert.equal(headers["Authorization"], "Basic cdg2"); + assert.isNull( access_token ); + }; + oa.useAuthorizationHeaderforGET(true); + oa.setAuthMethod("Basic"); + oa.get("", "cdg2"); + } + } + }, + 'When we do not use the authorization header': { + 'and call get': { + 'we should pass NOT provide an authorization header and the access_token should be being passed to _request' : function(oa) { + oa._request= function(method, url, headers, post_body, access_token, callback) { + assert.isUndefined(headers["Authorization"]); + assert.equal( access_token, "abcd5" ); + }; + oa.useAuthorizationHeaderforGET(false); + oa.get("", "abcd5"); + } + } + } + }, + 'Given an OAuth2 instance with clientId, clientSecret and customHeaders': { + topic: new OAuth2("clientId", "clientSecret", undefined, undefined, undefined, + { 'SomeHeader': '123' }), + 'When calling get': { + 'we should see the custom headers mixed into headers property in options passed to http-library' : function(oa) { + oa._executeRequest= function( http_library, options, callback ) { + assert.equal(options.headers["SomeHeader"], "123"); + }; + oa.get("", {}); + } } } -}).export(module); \ No newline at end of file +}).export(module); diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/pkginfo/package.json tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/pkginfo/package.json --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/node_modules/pkginfo/package.json 2012-10-11 02:23:13.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/node_modules/pkginfo/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -26,6 +26,14 @@ "node": ">= 0.4.0" }, "readme": "# node-pkginfo\n\nAn easy way to expose properties on a module from a package.json\n\n## Installation\n\n### Installing npm (node package manager)\n```\n curl http://npmjs.org/install.sh | sh\n```\n\n### Installing pkginfo\n```\n [sudo] npm install pkginfo\n```\n\n## Motivation\nHow often when writing node.js modules have you written the following line(s) of code? \n\n* Hard code your version string into your code\n\n``` js\n exports.version = '0.1.0';\n```\n\n* Programmatically expose the version from the package.json\n\n``` js\n exports.version = JSON.parse(fs.readFileSync('/path/to/package.json', 'utf8')).version;\n```\n\nIn other words, how often have you wanted to expose basic information from your package.json onto your module programmatically? **WELL NOW YOU CAN!**\n\n## Usage\n\nUsing `pkginfo` is idiot-proof, just require and invoke it. \n\n``` js\n var pkginfo = require('pkginfo')(module);\n \n console.dir(module.exports);\n```\n\nBy invoking the `pkginfo` module all of the properties in your `package.json` file will be automatically exposed on the callee module (i.e. the parent module of `pkginfo`). \n\nHere's a sample of the output:\n\n```\n { name: 'simple-app',\n description: 'A test fixture for pkginfo',\n version: '0.1.0',\n author: 'Charlie Robbins ',\n keywords: [ 'test', 'fixture' ],\n main: './index.js',\n scripts: { test: 'vows test/*-test.js --spec' },\n engines: { node: '>= 0.4.0' } }\n```\n\n### Expose specific properties\nIf you don't want to expose **all** properties on from your `package.json` on your module then simple pass those properties to the `pkginfo` function:\n\n``` js\n var pkginfo = require('pkginfo')(module, 'version', 'author');\n \n console.dir(module.exports);\n```\n\n```\n { version: '0.1.0',\n author: 'Charlie Robbins ' }\n```\n\nIf you're looking for further usage see the [examples][0] included in this repository. \n\n## Run Tests\nTests are written in [vows][1] and give complete coverage of all APIs.\n\n```\n vows test/*-test.js --spec\n```\n\n[0]: https://github.com/indexzero/node-pkginfo/tree/master/examples\n[1]: http://vowsjs.org\n\n#### Author: [Charlie Robbins](http://nodejitsu.com)", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/indexzero/node-pkginfo/issues" + }, "_id": "pkginfo@0.2.3", - "_from": "pkginfo@0.2.x" + "dist": { + "shasum": "bc4d08892eea7e47359d0b3dd256e5349fcd0d1e" + }, + "_from": "pkginfo@0.2.x", + "_resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.2.3.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/passport-oauth/package.json tilemill-0.10.1~saucy10/node_modules/passport-oauth/package.json --- tilemill-0.10.1~saucy9/node_modules/passport-oauth/package.json 2012-10-11 02:23:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/passport-oauth/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -1,12 +1,14 @@ { "name": "passport-oauth", - "version": "0.1.12", + "version": "0.1.15", "description": "OAuth 1.0 and 2.0 authentication strategies for Passport.", - "author": { - "name": "Jared Hanson", - "email": "jaredhanson@gmail.com", - "url": "http://www.jaredhanson.net/" - }, + "keywords": [ + "passport", + "oauth", + "auth", + "authn", + "authentication" + ], "repository": { "type": "git", "url": "git://github.com/jaredhanson/passport-oauth.git" @@ -14,6 +16,17 @@ "bugs": { "url": "http://github.com/jaredhanson/passport-oauth/issues" }, + "author": { + "name": "Jared Hanson", + "email": "jaredhanson@gmail.com", + "url": "http://www.jaredhanson.net/" + }, + "licenses": [ + { + "type": "MIT", + "url": "http://www.opensource.org/licenses/MIT" + } + ], "main": "./lib/passport-oauth", "dependencies": { "pkginfo": "0.2.x", @@ -29,20 +42,12 @@ "engines": { "node": ">= 0.4.0" }, - "licenses": [ - { - "type": "MIT", - "url": "http://www.opensource.org/licenses/MIT" - } - ], - "keywords": [ - "passport", - "oauth", - "auth", - "authn", - "authentication" - ], - "_id": "passport-oauth@0.1.12", - "readme": "ERROR: No README.md file found!", - "_from": "passport-oauth@0.1.x" + "readme": "# Passport-OAuth\n\nGeneral-purpose OAuth 1.0 and OAuth 2.0 authentication strategies for [Passport](https://github.com/jaredhanson/passport).\n\nThis module lets you authenticate using OAuth in your Node.js applications.\nBy plugging into Passport, OAuth authentication can be easily and unobtrusively\nintegrated into any application or framework that supports\n[Connect](http://www.senchalabs.org/connect/)-style middleware, including\n[Express](http://expressjs.com/).\n\nNote that this strategy provides generic OAuth support. In many cases, a\nprovider-specific strategy can be used instead, which cuts down on unnecessary\nconfiguration, and accommodates any provider-specific quirks. See the list\nbelow for supported providers.\n\nDevelopers who need to implement authentication against an OAuth provider that\nis not already supported are encouraged to sub-class this strategy. If you\nchoose to open source the new provider-specific strategy, send me a message and\nI will update the list.\n\n## Installation\n\n $ npm install passport-oauth\n\n## Strategies using OAuth\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
                    StrategyOAuth Version
                    37signals2.0
                    AllPlayers.com1.0
                    AngelList2.0
                    Bitbucket1.0a
                    Cloud Foundry (UAA)2.0
                    Digg1.0a
                    Dropbox1.0
                    Dwolla2.0
                    Evernote1.0a
                    Facebook2.0
                    Fitbit1.0a
                    Flickr1.0a
                    Foursquare2.0
                    Geoloqi2.0
                    GitHub2.0
                    Goodreads1.0
                    Google1.0a, 2.0
                    Gowalla2.0
                    Instagram2.0
                    Justin.tv1.0a
                    LinkedIn1.0a
                    Meetup1.0a
                    Netflix1.0a
                    Ohloh1.0
                    OpenStreetMap1.0a
                    picplz2.0
                    Rdio1.0a
                    Readability1.0a
                    RunKeeper2.0
                    SmugMug1.0a
                    SoundCloud2.0
                    TripIt1.0
                    Tumblr1.0a
                    Twitter1.0a
                    Vimeo1.0a
                    Windows Live2.0
                    Yahoo!1.0a
                    Yammer2.0
                    \n\n## Tests\n\n $ npm install --dev\n $ make test\n\n[![Build Status](https://secure.travis-ci.org/jaredhanson/passport-oauth.png)](http://travis-ci.org/jaredhanson/passport-oauth)\n\n\n## Credits\n\n - [Jared Hanson](http://github.com/jaredhanson)\n\n## License\n\n[The MIT License](http://opensource.org/licenses/MIT)\n\nCopyright (c) 2011-2013 Jared Hanson <[http://jaredhanson.net/](http://jaredhanson.net/)>\n", + "readmeFilename": "README.md", + "_id": "passport-oauth@0.1.15", + "dist": { + "shasum": "145ae9289190044ed9455d5c5a6aaaa160c45514" + }, + "_from": "passport-oauth@0.1.x", + "_resolved": "https://registry.npmjs.org/passport-oauth/-/passport-oauth-0.1.15.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/aws-sign/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/aws-sign/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/aws-sign/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/aws-sign/package.json 2013-10-25 23:13:12.000000000 +0000 @@ -23,9 +23,5 @@ "url": "https://github.com/mikeal/aws-sign/issues" }, "_id": "aws-sign@0.3.0", - "dist": { - "shasum": "61a747cebf4a3e7205a384f247fee259d223e573" - }, - "_from": "aws-sign@~0.3.0", - "_resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.3.0.tgz" + "_from": "aws-sign@~0.3.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/cookie-jar/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/cookie-jar/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/cookie-jar/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/cookie-jar/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -26,9 +26,5 @@ "url": "https://github.com/mikeal/cookie-jar/issues" }, "_id": "cookie-jar@0.3.0", - "dist": { - "shasum": "a07eb22b9b302c3519c220a0d9848bc19df27c13" - }, - "_from": "cookie-jar@~0.3.0", - "_resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.3.0.tgz" + "_from": "cookie-jar@~0.3.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/forever-agent/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/forever-agent/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/forever-agent/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/forever-agent/package.json 2013-10-25 23:13:12.000000000 +0000 @@ -23,9 +23,5 @@ "url": "https://github.com/mikeal/forever-agent/issues" }, "_id": "forever-agent@0.5.0", - "dist": { - "shasum": "13dcae5391e0c8e23e2225e113cb78f4e090a722" - }, - "_from": "forever-agent@~0.5.0", - "_resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.0.tgz" + "_from": "forever-agent@~0.5.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/node_modules/async/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/node_modules/async/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/node_modules/async/package.json 2013-10-25 01:44:51.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/node_modules/async/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -38,9 +38,5 @@ "readme": "# Async.js\n\nAsync is a utility module which provides straight-forward, powerful functions\nfor working with asynchronous JavaScript. Although originally designed for\nuse with [node.js](http://nodejs.org), it can also be used directly in the\nbrowser. Also supports [component](https://github.com/component/component).\n\nAsync provides around 20 functions that include the usual 'functional'\nsuspects (map, reduce, filter, each…) as well as some common patterns\nfor asynchronous control flow (parallel, series, waterfall…). All these\nfunctions assume you follow the node.js convention of providing a single\ncallback as the last argument of your async function.\n\n\n## Quick Examples\n\n```javascript\nasync.map(['file1','file2','file3'], fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n\nasync.filter(['file1','file2','file3'], fs.exists, function(results){\n // results now equals an array of the existing files\n});\n\nasync.parallel([\n function(){ ... },\n function(){ ... }\n], callback);\n\nasync.series([\n function(){ ... },\n function(){ ... }\n]);\n```\n\nThere are many more functions available so take a look at the docs below for a\nfull list. This module aims to be comprehensive, so if you feel anything is\nmissing please create a GitHub issue for it.\n\n## Common Pitfalls\n\n### Binding a context to an iterator\n\nThis section is really about bind, not about async. If you are wondering how to\nmake async execute your iterators in a given context, or are confused as to why\na method of another library isn't working as an iterator, study this example:\n\n```js\n// Here is a simple object with an (unnecessarily roundabout) squaring method\nvar AsyncSquaringLibrary = {\n squareExponent: 2,\n square: function(number, callback){ \n var result = Math.pow(number, this.squareExponent);\n setTimeout(function(){\n callback(null, result);\n }, 200);\n }\n};\n\nasync.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){\n // result is [NaN, NaN, NaN]\n // This fails because the `this.squareExponent` expression in the square\n // function is not evaluated in the context of AsyncSquaringLibrary, and is\n // therefore undefined.\n});\n\nasync.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){\n // result is [1, 4, 9]\n // With the help of bind we can attach a context to the iterator before\n // passing it to async. Now the square function will be executed in its \n // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent`\n // will be as expected.\n});\n```\n\n## Download\n\nThe source is available for download from\n[GitHub](http://github.com/caolan/async).\nAlternatively, you can install using Node Package Manager (npm):\n\n npm install async\n\n__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed\n\n## In the Browser\n\nSo far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. Usage:\n\n```html\n\n\n```\n\n## Documentation\n\n### Collections\n\n* [each](#each)\n* [map](#map)\n* [filter](#filter)\n* [reject](#reject)\n* [reduce](#reduce)\n* [detect](#detect)\n* [sortBy](#sortBy)\n* [some](#some)\n* [every](#every)\n* [concat](#concat)\n\n### Control Flow\n\n* [series](#series)\n* [parallel](#parallel)\n* [whilst](#whilst)\n* [doWhilst](#doWhilst)\n* [until](#until)\n* [doUntil](#doUntil)\n* [forever](#forever)\n* [waterfall](#waterfall)\n* [compose](#compose)\n* [applyEach](#applyEach)\n* [queue](#queue)\n* [cargo](#cargo)\n* [auto](#auto)\n* [iterator](#iterator)\n* [apply](#apply)\n* [nextTick](#nextTick)\n* [times](#times)\n* [timesSeries](#timesSeries)\n\n### Utils\n\n* [memoize](#memoize)\n* [unmemoize](#unmemoize)\n* [log](#log)\n* [dir](#dir)\n* [noConflict](#noConflict)\n\n\n## Collections\n\n\n\n### each(arr, iterator, callback)\n\nApplies an iterator function to each item in an array, in parallel.\nThe iterator is called with an item from the list and a callback for when it\nhas finished. If the iterator passes an error to this callback, the main\ncallback for the each function is immediately called with the error.\n\nNote, that since this function applies the iterator to each item in parallel\nthere is no guarantee that the iterator functions will complete in order.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err) which must be called once it has \n completed. If no error has occured, the callback should be run without \n arguments or with an explicit null argument.\n* callback(err) - A callback which is called after all the iterator functions\n have finished, or an error has occurred.\n\n__Example__\n\n```js\n// assuming openFiles is an array of file names and saveFile is a function\n// to save the modified contents of that file:\n\nasync.each(openFiles, saveFile, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n---------------------------------------\n\n\n\n### eachSeries(arr, iterator, callback)\n\nThe same as each only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. This means the iterator functions will complete in order.\n\n\n---------------------------------------\n\n\n\n### eachLimit(arr, limit, iterator, callback)\n\nThe same as each only no more than \"limit\" iterators will be simultaneously \nrunning at any time.\n\nNote that the items are not processed in batches, so there is no guarantee that\n the first \"limit\" iterator functions will complete before any others are \nstarted.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* limit - The maximum number of iterators to run at any time.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err) which must be called once it has \n completed. If no error has occured, the callback should be run without \n arguments or with an explicit null argument.\n* callback(err) - A callback which is called after all the iterator functions\n have finished, or an error has occurred.\n\n__Example__\n\n```js\n// Assume documents is an array of JSON objects and requestApi is a\n// function that interacts with a rate-limited REST api.\n\nasync.eachLimit(documents, 20, requestApi, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n---------------------------------------\n\n\n### map(arr, iterator, callback)\n\nProduces a new array of values by mapping each value in the given array through\nthe iterator function. The iterator is called with an item from the array and a\ncallback for when it has finished processing. The callback takes 2 arguments, \nan error and the transformed item from the array. If the iterator passes an\nerror to this callback, the main callback for the map function is immediately\ncalled with the error.\n\nNote, that since this function applies the iterator to each item in parallel\nthere is no guarantee that the iterator functions will complete in order, however\nthe results array will be in the same order as the original array.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, transformed) which must be called once \n it has completed with an error (which can be null) and a transformed item.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array of the\n transformed items from the original array.\n\n__Example__\n\n```js\nasync.map(['file1','file2','file3'], fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n\n### mapSeries(arr, iterator, callback)\n\nThe same as map only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n\n---------------------------------------\n\n\n### mapLimit(arr, limit, iterator, callback)\n\nThe same as map only no more than \"limit\" iterators will be simultaneously \nrunning at any time.\n\nNote that the items are not processed in batches, so there is no guarantee that\n the first \"limit\" iterator functions will complete before any others are \nstarted.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* limit - The maximum number of iterators to run at any time.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, transformed) which must be called once \n it has completed with an error (which can be null) and a transformed item.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array of the\n transformed items from the original array.\n\n__Example__\n\n```js\nasync.map(['file1','file2','file3'], 1, fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n\n### filter(arr, iterator, callback)\n\n__Alias:__ select\n\nReturns a new array of all the values which pass an async truth test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists. This operation is\nperformed in parallel, but the results array will be in the same order as the\noriginal.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(results) - A callback which is called after all the iterator\n functions have finished.\n\n__Example__\n\n```js\nasync.filter(['file1','file2','file3'], fs.exists, function(results){\n // results now equals an array of the existing files\n});\n```\n\n---------------------------------------\n\n\n### filterSeries(arr, iterator, callback)\n\n__alias:__ selectSeries\n\nThe same as filter only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n---------------------------------------\n\n\n### reject(arr, iterator, callback)\n\nThe opposite of filter. Removes values that pass an async truth test.\n\n---------------------------------------\n\n\n### rejectSeries(arr, iterator, callback)\n\nThe same as reject, only the iterator is applied to each item in the array\nin series.\n\n\n---------------------------------------\n\n\n### reduce(arr, memo, iterator, callback)\n\n__aliases:__ inject, foldl\n\nReduces a list of values into a single value using an async iterator to return\neach successive step. Memo is the initial state of the reduction. This\nfunction only operates in series. For performance reasons, it may make sense to\nsplit a call to this function into a parallel map, then use the normal\nArray.prototype.reduce on the results. This function is for situations where\neach step in the reduction needs to be async, if you can get the data before\nreducing it then it's probably a good idea to do so.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* memo - The initial state of the reduction.\n* iterator(memo, item, callback) - A function applied to each item in the\n array to produce the next step in the reduction. The iterator is passed a\n callback(err, reduction) which accepts an optional error as its first \n argument, and the state of the reduction as the second. If an error is \n passed to the callback, the reduction is stopped and the main callback is \n immediately called with the error.\n* callback(err, result) - A callback which is called after all the iterator\n functions have finished. Result is the reduced value.\n\n__Example__\n\n```js\nasync.reduce([1,2,3], 0, function(memo, item, callback){\n // pointless async:\n process.nextTick(function(){\n callback(null, memo + item)\n });\n}, function(err, result){\n // result is now equal to the last value of memo, which is 6\n});\n```\n\n---------------------------------------\n\n\n### reduceRight(arr, memo, iterator, callback)\n\n__Alias:__ foldr\n\nSame as reduce, only operates on the items in the array in reverse order.\n\n\n---------------------------------------\n\n\n### detect(arr, iterator, callback)\n\nReturns the first value in a list that passes an async truth test. The\niterator is applied in parallel, meaning the first iterator to return true will\nfire the detect callback with that result. That means the result might not be\nthe first item in the original array (in terms of order) that passes the test.\n\nIf order within the original array is important then look at detectSeries.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called as soon as any iterator returns\n true, or after all the iterator functions have finished. Result will be\n the first item in the array that passes the truth test (iterator) or the\n value undefined if none passed.\n\n__Example__\n\n```js\nasync.detect(['file1','file2','file3'], fs.exists, function(result){\n // result now equals the first file in the list that exists\n});\n```\n\n---------------------------------------\n\n\n### detectSeries(arr, iterator, callback)\n\nThe same as detect, only the iterator is applied to each item in the array\nin series. This means the result is always the first in the original array (in\nterms of array order) that passes the truth test.\n\n\n---------------------------------------\n\n\n### sortBy(arr, iterator, callback)\n\nSorts a list by the results of running each value through an async iterator.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, sortValue) which must be called once it\n has completed with an error (which can be null) and a value to use as the sort\n criteria.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is the items from\n the original array sorted by the values returned by the iterator calls.\n\n__Example__\n\n```js\nasync.sortBy(['file1','file2','file3'], function(file, callback){\n fs.stat(file, function(err, stats){\n callback(err, stats.mtime);\n });\n}, function(err, results){\n // results is now the original array of files sorted by\n // modified date\n});\n```\n\n---------------------------------------\n\n\n### some(arr, iterator, callback)\n\n__Alias:__ any\n\nReturns true if at least one element in the array satisfies an async test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists. Once any iterator\ncall returns true, the main callback is immediately called.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called as soon as any iterator returns\n true, or after all the iterator functions have finished. Result will be\n either true or false depending on the values of the async tests.\n\n__Example__\n\n```js\nasync.some(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then at least one of the files exists\n});\n```\n\n---------------------------------------\n\n\n### every(arr, iterator, callback)\n\n__Alias:__ all\n\nReturns true if every element in the array satisfies an async test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called after all the iterator\n functions have finished. Result will be either true or false depending on\n the values of the async tests.\n\n__Example__\n\n```js\nasync.every(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then every file exists\n});\n```\n\n---------------------------------------\n\n\n### concat(arr, iterator, callback)\n\nApplies an iterator to each item in a list, concatenating the results. Returns the\nconcatenated list. The iterators are called in parallel, and the results are\nconcatenated as they return. There is no guarantee that the results array will\nbe returned in the original order of the arguments passed to the iterator function.\n\n__Arguments__\n\n* arr - An array to iterate over\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, results) which must be called once it \n has completed with an error (which can be null) and an array of results.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array containing\n the concatenated results of the iterator function.\n\n__Example__\n\n```js\nasync.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){\n // files is now a list of filenames that exist in the 3 directories\n});\n```\n\n---------------------------------------\n\n\n### concatSeries(arr, iterator, callback)\n\nSame as async.concat, but executes in series instead of parallel.\n\n\n## Control Flow\n\n\n### series(tasks, [callback])\n\nRun an array of functions in series, each one running once the previous\nfunction has completed. If any functions in the series pass an error to its\ncallback, no more functions are run and the callback for the series is\nimmediately called with the value of the error. Once the tasks have completed,\nthe results are passed to the final callback as an array.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function and the results will be passed to the final callback as an object\ninstead of an array. This can be a more readable way of handling results from\nasync.series.\n\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed\n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n__Example__\n\n```js\nasync.series([\n function(callback){\n // do some stuff ...\n callback(null, 'one');\n },\n function(callback){\n // do some more stuff ...\n callback(null, 'two');\n }\n],\n// optional callback\nfunction(err, results){\n // results is now equal to ['one', 'two']\n});\n\n\n// an example using an object instead of an array\nasync.series({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equal to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n\n### parallel(tasks, [callback])\n\nRun an array of functions in parallel, without waiting until the previous\nfunction has completed. If any of the functions pass an error to its\ncallback, the main callback is immediately called with the value of the error.\nOnce the tasks have completed, the results are passed to the final callback as an\narray.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function and the results will be passed to the final callback as an object\ninstead of an array. This can be a more readable way of handling results from\nasync.parallel.\n\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed \n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n__Example__\n\n```js\nasync.parallel([\n function(callback){\n setTimeout(function(){\n callback(null, 'one');\n }, 200);\n },\n function(callback){\n setTimeout(function(){\n callback(null, 'two');\n }, 100);\n }\n],\n// optional callback\nfunction(err, results){\n // the results array will equal ['one','two'] even though\n // the second function had a shorter timeout.\n});\n\n\n// an example using an object instead of an array\nasync.parallel({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equals to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n\n### parallelLimit(tasks, limit, [callback])\n\nThe same as parallel only the tasks are executed in parallel with a maximum of \"limit\" \ntasks executing at any time.\n\nNote that the tasks are not executed in batches, so there is no guarantee that \nthe first \"limit\" tasks will complete before any others are started.\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed \n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* limit - The maximum number of tasks to run at any time.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n---------------------------------------\n\n\n### whilst(test, fn, callback)\n\nRepeatedly call fn, while test returns true. Calls the callback when stopped,\nor an error occurs.\n\n__Arguments__\n\n* test() - synchronous truth test to perform before each execution of fn.\n* fn(callback) - A function to call each time the test passes. The function is\n passed a callback(err) which must be called once it has completed with an \n optional error argument.\n* callback(err) - A callback which is called after the test fails and repeated\n execution of fn has stopped.\n\n__Example__\n\n```js\nvar count = 0;\n\nasync.whilst(\n function () { return count < 5; },\n function (callback) {\n count++;\n setTimeout(callback, 1000);\n },\n function (err) {\n // 5 seconds have passed\n }\n);\n```\n\n---------------------------------------\n\n\n### doWhilst(fn, test, callback)\n\nThe post check version of whilst. To reflect the difference in the order of operations `test` and `fn` arguments are switched. `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.\n\n---------------------------------------\n\n\n### until(test, fn, callback)\n\nRepeatedly call fn, until test returns true. Calls the callback when stopped,\nor an error occurs.\n\nThe inverse of async.whilst.\n\n---------------------------------------\n\n\n### doUntil(fn, test, callback)\n\nLike doWhilst except the test is inverted. Note the argument ordering differs from `until`.\n\n---------------------------------------\n\n\n### forever(fn, callback)\n\nCalls the asynchronous function 'fn' repeatedly, in series, indefinitely.\nIf an error is passed to fn's callback then 'callback' is called with the\nerror, otherwise it will never be called.\n\n---------------------------------------\n\n\n### waterfall(tasks, [callback])\n\nRuns an array of functions in series, each passing their results to the next in\nthe array. However, if any of the functions pass an error to the callback, the\nnext function is not executed and the main callback is immediately called with\nthe error.\n\n__Arguments__\n\n* tasks - An array of functions to run, each function is passed a \n callback(err, result1, result2, ...) it must call on completion. The first\n argument is an error (which can be null) and any further arguments will be \n passed as arguments in order to the next task.\n* callback(err, [results]) - An optional callback to run once all the functions\n have completed. This will be passed the results of the last task's callback.\n\n\n\n__Example__\n\n```js\nasync.waterfall([\n function(callback){\n callback(null, 'one', 'two');\n },\n function(arg1, arg2, callback){\n callback(null, 'three');\n },\n function(arg1, callback){\n // arg1 now equals 'three'\n callback(null, 'done');\n }\n], function (err, result) {\n // result now equals 'done' \n});\n```\n\n---------------------------------------\n\n### compose(fn1, fn2...)\n\nCreates a function which is a composition of the passed asynchronous\nfunctions. Each function consumes the return value of the function that\nfollows. Composing functions f(), g() and h() would produce the result of\nf(g(h())), only this version uses callbacks to obtain the return values.\n\nEach function is executed with the `this` binding of the composed function.\n\n__Arguments__\n\n* functions... - the asynchronous functions to compose\n\n\n__Example__\n\n```js\nfunction add1(n, callback) {\n setTimeout(function () {\n callback(null, n + 1);\n }, 10);\n}\n\nfunction mul3(n, callback) {\n setTimeout(function () {\n callback(null, n * 3);\n }, 10);\n}\n\nvar add1mul3 = async.compose(mul3, add1);\n\nadd1mul3(4, function (err, result) {\n // result now equals 15\n});\n```\n\n---------------------------------------\n\n### applyEach(fns, args..., callback)\n\nApplies the provided arguments to each function in the array, calling the\ncallback after all functions have completed. If you only provide the first\nargument then it will return a function which lets you pass in the\narguments as if it were a single function call.\n\n__Arguments__\n\n* fns - the asynchronous functions to all call with the same arguments\n* args... - any number of separate arguments to pass to the function\n* callback - the final argument should be the callback, called when all\n functions have completed processing\n\n\n__Example__\n\n```js\nasync.applyEach([enableSearch, updateSchema], 'bucket', callback);\n\n// partial application example:\nasync.each(\n buckets,\n async.applyEach([enableSearch, updateSchema]),\n callback\n);\n```\n\n---------------------------------------\n\n\n### applyEachSeries(arr, iterator, callback)\n\nThe same as applyEach only the functions are applied in series.\n\n---------------------------------------\n\n\n### queue(worker, concurrency)\n\nCreates a queue object with the specified concurrency. Tasks added to the\nqueue will be processed in parallel (up to the concurrency limit). If all\nworkers are in progress, the task is queued until one is available. Once\na worker has completed a task, the task's callback is called.\n\n__Arguments__\n\n* worker(task, callback) - An asynchronous function for processing a queued\n task, which must call its callback(err) argument when finished, with an \n optional error as an argument.\n* concurrency - An integer for determining how many worker functions should be\n run in parallel.\n\n__Queue objects__\n\nThe queue object returned by this function has the following properties and\nmethods:\n\n* length() - a function returning the number of items waiting to be processed.\n* concurrency - an integer for determining how many worker functions should be\n run in parallel. This property can be changed after a queue is created to\n alter the concurrency on-the-fly.\n* push(task, [callback]) - add a new task to the queue, the callback is called\n once the worker has finished processing the task.\n instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.\n* unshift(task, [callback]) - add a new task to the front of the queue.\n* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued\n* empty - a callback that is called when the last item from the queue is given to a worker\n* drain - a callback that is called when the last item from the queue has returned from the worker\n\n__Example__\n\n```js\n// create a queue object with concurrency 2\n\nvar q = async.queue(function (task, callback) {\n console.log('hello ' + task.name);\n callback();\n}, 2);\n\n\n// assign a callback\nq.drain = function() {\n console.log('all items have been processed');\n}\n\n// add some items to the queue\n\nq.push({name: 'foo'}, function (err) {\n console.log('finished processing foo');\n});\nq.push({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the queue (batch-wise)\n\nq.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the front of the queue\n\nq.unshift({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n```\n\n---------------------------------------\n\n\n### cargo(worker, [payload])\n\nCreates a cargo object with the specified payload. Tasks added to the\ncargo will be processed altogether (up to the payload limit). If the\nworker is in progress, the task is queued until it is available. Once\nthe worker has completed some tasks, each callback of those tasks is called.\n\n__Arguments__\n\n* worker(tasks, callback) - An asynchronous function for processing an array of\n queued tasks, which must call its callback(err) argument when finished, with \n an optional error as an argument.\n* payload - An optional integer for determining how many tasks should be\n processed per round; if omitted, the default is unlimited.\n\n__Cargo objects__\n\nThe cargo object returned by this function has the following properties and\nmethods:\n\n* length() - a function returning the number of items waiting to be processed.\n* payload - an integer for determining how many tasks should be\n process per round. This property can be changed after a cargo is created to\n alter the payload on-the-fly.\n* push(task, [callback]) - add a new task to the queue, the callback is called\n once the worker has finished processing the task.\n instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.\n* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued\n* empty - a callback that is called when the last item from the queue is given to a worker\n* drain - a callback that is called when the last item from the queue has returned from the worker\n\n__Example__\n\n```js\n// create a cargo object with payload 2\n\nvar cargo = async.cargo(function (tasks, callback) {\n for(var i=0; i\n### auto(tasks, [callback])\n\nDetermines the best order for running functions based on their requirements.\nEach function can optionally depend on other functions being completed first,\nand each function is run as soon as its requirements are satisfied. If any of\nthe functions pass an error to their callback, that function will not complete\n(so any other functions depending on it will not run) and the main callback\nwill be called immediately with the error. Functions also receive an object\ncontaining the results of functions which have completed so far.\n\nNote, all functions are called with a results object as a second argument, \nso it is unsafe to pass functions in the tasks object which cannot handle the\nextra argument. For example, this snippet of code:\n\n```js\nasync.auto({\n readData: async.apply(fs.readFile, 'data.txt', 'utf-8');\n}, callback);\n```\n\nwill have the effect of calling readFile with the results object as the last\nargument, which will fail:\n\n```js\nfs.readFile('data.txt', 'utf-8', cb, {});\n```\n\nInstead, wrap the call to readFile in a function which does not forward the \nresults object:\n\n```js\nasync.auto({\n readData: function(cb, results){\n fs.readFile('data.txt', 'utf-8', cb);\n }\n}, callback);\n```\n\n__Arguments__\n\n* tasks - An object literal containing named functions or an array of\n requirements, with the function itself the last item in the array. The key\n used for each function or array is used when specifying requirements. The \n function receives two arguments: (1) a callback(err, result) which must be \n called when finished, passing an error (which can be null) and the result of \n the function's execution, and (2) a results object, containing the results of\n the previously executed functions.\n* callback(err, results) - An optional callback which is called when all the\n tasks have been completed. The callback will receive an error as an argument\n if any tasks pass an error to their callback. Results will always be passed\n\tbut if an error occurred, no other tasks will be performed, and the results\n\tobject will only contain partial results.\n \n\n__Example__\n\n```js\nasync.auto({\n get_data: function(callback){\n // async code to get some data\n },\n make_folder: function(callback){\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n },\n write_file: ['get_data', 'make_folder', function(callback){\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n callback(null, filename);\n }],\n email_link: ['write_file', function(callback, results){\n // once the file is written let's email a link to it...\n // results.write_file contains the filename returned by write_file.\n }]\n});\n```\n\nThis is a fairly trivial example, but to do this using the basic parallel and\nseries functions would look like this:\n\n```js\nasync.parallel([\n function(callback){\n // async code to get some data\n },\n function(callback){\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n }\n],\nfunction(err, results){\n async.series([\n function(callback){\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n },\n function(callback){\n // once the file is written let's email a link to it...\n }\n ]);\n});\n```\n\nFor a complicated series of async tasks using the auto function makes adding\nnew tasks much easier and makes the code more readable.\n\n\n---------------------------------------\n\n\n### iterator(tasks)\n\nCreates an iterator function which calls the next function in the array,\nreturning a continuation to call the next one after that. It's also possible to\n'peek' the next iterator by doing iterator.next().\n\nThis function is used internally by the async module but can be useful when\nyou want to manually control the flow of functions in series.\n\n__Arguments__\n\n* tasks - An array of functions to run.\n\n__Example__\n\n```js\nvar iterator = async.iterator([\n function(){ sys.p('one'); },\n function(){ sys.p('two'); },\n function(){ sys.p('three'); }\n]);\n\nnode> var iterator2 = iterator();\n'one'\nnode> var iterator3 = iterator2();\n'two'\nnode> iterator3();\n'three'\nnode> var nextfn = iterator2.next();\nnode> nextfn();\n'three'\n```\n\n---------------------------------------\n\n\n### apply(function, arguments..)\n\nCreates a continuation function with some arguments already applied, a useful\nshorthand when combined with other control flow functions. Any arguments\npassed to the returned function are added to the arguments originally passed\nto apply.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to automatically apply when the\n continuation is called.\n\n__Example__\n\n```js\n// using apply\n\nasync.parallel([\n async.apply(fs.writeFile, 'testfile1', 'test1'),\n async.apply(fs.writeFile, 'testfile2', 'test2'),\n]);\n\n\n// the same process without using apply\n\nasync.parallel([\n function(callback){\n fs.writeFile('testfile1', 'test1', callback);\n },\n function(callback){\n fs.writeFile('testfile2', 'test2', callback);\n }\n]);\n```\n\nIt's possible to pass any number of additional arguments when calling the\ncontinuation:\n\n```js\nnode> var fn = async.apply(sys.puts, 'one');\nnode> fn('two', 'three');\none\ntwo\nthree\n```\n\n---------------------------------------\n\n\n### nextTick(callback)\n\nCalls the callback on a later loop around the event loop. In node.js this just\ncalls process.nextTick, in the browser it falls back to setImmediate(callback)\nif available, otherwise setTimeout(callback, 0), which means other higher priority\nevents may precede the execution of the callback.\n\nThis is used internally for browser-compatibility purposes.\n\n__Arguments__\n\n* callback - The function to call on a later loop around the event loop.\n\n__Example__\n\n```js\nvar call_order = [];\nasync.nextTick(function(){\n call_order.push('two');\n // call_order now equals ['one','two']\n});\ncall_order.push('one')\n```\n\n\n### times(n, callback)\n\nCalls the callback n times and accumulates results in the same manner\nyou would use with async.map.\n\n__Arguments__\n\n* n - The number of times to run the function.\n* callback - The function to call n times.\n\n__Example__\n\n```js\n// Pretend this is some complicated async factory\nvar createUser = function(id, callback) {\n callback(null, {\n id: 'user' + id\n })\n}\n// generate 5 users\nasync.times(5, function(n, next){\n createUser(n, function(err, user) {\n next(err, user)\n })\n}, function(err, users) {\n // we should now have 5 users\n});\n```\n\n\n### timesSeries(n, callback)\n\nThe same as times only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n\n## Utils\n\n\n### memoize(fn, [hasher])\n\nCaches the results of an async function. When creating a hash to store function\nresults against, the callback is omitted from the hash and an optional hash\nfunction can be used.\n\nThe cache of results is exposed as the `memo` property of the function returned\nby `memoize`.\n\n__Arguments__\n\n* fn - the function you to proxy and cache results from.\n* hasher - an optional function for generating a custom hash for storing\n results, it has all the arguments applied to it apart from the callback, and\n must be synchronous.\n\n__Example__\n\n```js\nvar slow_fn = function (name, callback) {\n // do something\n callback(null, result);\n};\nvar fn = async.memoize(slow_fn);\n\n// fn can now be used as if it were slow_fn\nfn('some name', function () {\n // callback\n});\n```\n\n\n### unmemoize(fn)\n\nUndoes a memoized function, reverting it to the original, unmemoized\nform. Comes handy in tests.\n\n__Arguments__\n\n* fn - the memoized function\n\n\n### log(function, arguments)\n\nLogs the result of an async function to the console. Only works in node.js or\nin browsers that support console.log and console.error (such as FF and Chrome).\nIf multiple arguments are returned from the async function, console.log is\ncalled on each argument in order.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, 'hello ' + name);\n }, 1000);\n};\n```\n```js\nnode> async.log(hello, 'world');\n'hello world'\n```\n\n---------------------------------------\n\n\n### dir(function, arguments)\n\nLogs the result of an async function to the console using console.dir to\ndisplay the properties of the resulting object. Only works in node.js or\nin browsers that support console.dir and console.error (such as FF and Chrome).\nIf multiple arguments are returned from the async function, console.dir is\ncalled on each argument in order.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, {hello: name});\n }, 1000);\n};\n```\n```js\nnode> async.dir(hello, 'world');\n{hello: 'world'}\n```\n\n---------------------------------------\n\n\n### noConflict()\n\nChanges the value of async back to its original value, returning a reference to the\nasync object.\n", "readmeFilename": "README.md", "_id": "async@0.2.9", - "dist": { - "shasum": "df63060fbf3d33286a76aaf6d55a2986d9ff8619" - }, - "_from": "async@~0.2.9", - "_resolved": "https://registry.npmjs.org/async/-/async-0.2.9.tgz" + "_from": "async@~0.2.9" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json 2013-10-25 01:44:52.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -27,9 +27,5 @@ "url": "https://github.com/felixge/node-delayed-stream/issues" }, "_id": "delayed-stream@0.0.5", - "dist": { - "shasum": "baee50472834ab3c3ac481905d546834a548b8fc" - }, - "_from": "delayed-stream@0.0.5", - "_resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" + "_from": "delayed-stream@0.0.5" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json 2013-10-25 01:44:51.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -28,9 +28,5 @@ "url": "https://github.com/felixge/node-combined-stream/issues" }, "_id": "combined-stream@0.0.4", - "dist": { - "shasum": "5c38e18675140e6f8aeac1dfc6fb350f16ac49fc" - }, - "_from": "combined-stream@~0.0.4", - "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.4.tgz" + "_from": "combined-stream@~0.0.4" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/form-data/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/form-data/package.json 2013-10-25 23:13:12.000000000 +0000 @@ -41,9 +41,5 @@ "url": "https://github.com/felixge/node-form-data/issues" }, "_id": "form-data@0.1.2", - "dist": { - "shasum": "a2776685729e771eb60b3ee3d290122f78c97da2" - }, - "_from": "form-data@~0.1.0", - "_resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.2.tgz" + "_from": "form-data@~0.1.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/boom/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/boom/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/boom/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/boom/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -42,9 +42,5 @@ "url": "https://github.com/spumko/boom/issues" }, "_id": "boom@0.4.2", - "dist": { - "shasum": "7a636e9ded4efcefb19cef4947a3c67dfaee911b" - }, - "_from": "boom@0.4.x", - "_resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz" + "_from": "boom@0.4.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -43,9 +43,5 @@ "url": "https://github.com/hueniverse/cryptiles/issues" }, "_id": "cryptiles@0.2.2", - "dist": { - "shasum": "ed91ff1f17ad13d3748288594f8a48a0d26f325c" - }, - "_from": "cryptiles@0.2.x", - "_resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz" + "_from": "cryptiles@0.2.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/hoek/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/hoek/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/hoek/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/hoek/package.json 2013-10-25 23:13:15.000000000 +0000 @@ -44,9 +44,5 @@ "url": "https://github.com/spumko/hoek/issues" }, "_id": "hoek@0.9.1", - "dist": { - "shasum": "3d322462badf07716ea7eb85baf88079cddce505" - }, - "_from": "hoek@0.9.x", - "_resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz" + "_from": "hoek@0.9.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/sntp/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/sntp/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/node_modules/sntp/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/node_modules/sntp/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -43,9 +43,5 @@ "url": "https://github.com/hueniverse/sntp/issues" }, "_id": "sntp@0.2.4", - "dist": { - "shasum": "fb885f18b0f3aad189f824862536bceeec750900" - }, - "_from": "sntp@0.2.x", - "_resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz" + "_from": "sntp@0.2.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/hawk/package.json 2013-10-25 01:44:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/hawk/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -48,9 +48,5 @@ "url": "https://github.com/hueniverse/hawk/issues" }, "_id": "hawk@1.0.0", - "dist": { - "shasum": "b90bb169807285411da7ffcb8dd2598502d3b52d" - }, - "_from": "hawk@~1.0.0", - "_resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz" + "_from": "hawk@~1.0.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -38,9 +38,5 @@ "url": "https://github.com/mcavage/node-asn1/issues" }, "_id": "asn1@0.1.11", - "dist": { - "shasum": "007e1b8d6667c4cea50ecfbd9aac8a08f599f57d" - }, - "_from": "asn1@0.1.11", - "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" + "_from": "asn1@0.1.11" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json 2013-10-25 23:13:14.000000000 +0000 @@ -16,9 +16,5 @@ "readme": "# node-assert-plus\n\nThis library is a super small wrapper over node's assert module that has two\nthings: (1) the ability to disable assertions with the environment variable\nNODE_NDEBUG, and (2) some API wrappers for argument testing. Like\n`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks\nlike this:\n\n var assert = require('assert-plus');\n\n function fooAccount(options, callback) {\n\t assert.object(options, 'options');\n\t\tassert.number(options.id, 'options.id);\n\t\tassert.bool(options.isManager, 'options.isManager');\n\t\tassert.string(options.name, 'options.name');\n\t\tassert.arrayOfString(options.email, 'options.email');\n\t\tassert.func(callback, 'callback');\n\n // Do stuff\n\t\tcallback(null, {});\n }\n\n# API\n\nAll methods that *aren't* part of node's core assert API are simply assumed to\ntake an argument, and then a string 'name' that's not a message; `AssertionError`\nwill be thrown if the assertion fails with a message like:\n\n AssertionError: foo (string) is required\n\tat test (/home/mark/work/foo/foo.js:3:9)\n\tat Object. (/home/mark/work/foo/foo.js:15:1)\n\tat Module._compile (module.js:446:26)\n\tat Object..js (module.js:464:10)\n\tat Module.load (module.js:353:31)\n\tat Function._load (module.js:311:12)\n\tat Array.0 (module.js:484:10)\n\tat EventEmitter._tickCallback (node.js:190:38)\n\nfrom:\n\n function test(foo) {\n\t assert.string(foo, 'foo');\n }\n\nThere you go. You can check that arrays are of a homogenous type with `Arrayof$Type`:\n\n function test(foo) {\n\t assert.arrayOfString(foo, 'foo');\n }\n\nYou can assert IFF an argument is not `undefined` (i.e., an optional arg):\n\n assert.optionalString(foo, 'foo');\n\nLastly, you can opt-out of assertion checking altogether by setting the\nenvironment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have\nlots of assertions, and don't want to pay `typeof ()` taxes to v8 in\nproduction.\n\nThe complete list of APIs is:\n\n* assert.bool\n* assert.buffer\n* assert.func\n* assert.number\n* assert.object\n* assert.string\n* assert.arrayOfBool\n* assert.arrayOfFunc\n* assert.arrayOfNumber\n* assert.arrayOfObject\n* assert.arrayOfString\n* assert.optionalBool\n* assert.optionalBuffer\n* assert.optionalFunc\n* assert.optionalNumber\n* assert.optionalObject\n* assert.optionalString\n* assert.optionalArrayOfBool\n* assert.optionalArrayOfFunc\n* assert.optionalArrayOfNumber\n* assert.optionalArrayOfObject\n* assert.optionalArrayOfString\n* assert.AssertionError\n* assert.fail\n* assert.ok\n* assert.equal\n* assert.notEqual\n* assert.deepEqual\n* assert.notDeepEqual\n* assert.strictEqual\n* assert.notStrictEqual\n* assert.throws\n* assert.doesNotThrow\n* assert.ifError\n\n# Installation\n\n npm install assert-plus\n\n## License\n\nThe MIT License (MIT)\nCopyright (c) 2012 Mark Cavage\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n## Bugs\n\nSee .\n", "readmeFilename": "README.md", "_id": "assert-plus@0.1.2", - "dist": { - "shasum": "48cf59454db6d5617d855c23049099480ae07f17" - }, - "_from": "assert-plus@0.1.2", - "_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.2.tgz" + "_from": "assert-plus@0.1.2" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json 2013-10-25 23:13:15.000000000 +0000 @@ -14,9 +14,5 @@ "readme": "Node-CType is a way to read and write binary data in structured and easy to use\nformat. Its name comes from the C header file.\n\nTo get started, simply clone the repository or use npm to install it. Once it is\nthere, simply require it.\n\ngit clone git://github.com/rmustacc/node-ctype\nnpm install ctype\nvar mod_ctype = require('ctype')\n\n\nThere are two APIs that you can use, depending on what abstraction you'd like.\nThe low level API let's you read and write individual integers and floats from\nbuffers. The higher level API let's you read and write structures of these. To\nillustrate this, let's looks look at how we would read and write a binary\nencoded x,y point.\n\nIn C we would define this structure as follows:\n\ntypedef struct point {\n\tuint16_t\tp_x;\n\tuint16_t\tp_y;\n} point_t;\n\nTo read a binary encoded point from a Buffer, we first need to create a CType\nparser (where we specify the endian and other options) and add the typedef.\n\nvar parser = new mod_ctype.Parser({ endian: 'big' });\nparser.typedef('point_t', [\n\t{ x: { type: 'uint16_t' } },\n\t{ y: { type: 'uint16_t' } }\n]);\n\nFrom here, given a buffer buf and an offset into it, we can read a point.\n\nvar out = parser.readData([ { point: { type: 'point_t' } } ], buffer, 0);\nconsole.log(out);\n{ point: { x: 23, y: 42 } }\n\nAnother way to get the same information would be to use the low level methods.\nNote that these require you to manually deal with the offset. Here's how we'd\nget the same values of x and y from the buffer.\n\nvar x = mod_ctype.ruint16(buf, 'big', 0);\nvar y = mod_ctype.ruint16(buf, 'big', 2);\nconsole.log(x + ', ' + y);\n23, 42\n\nThe true power of this API comes from the ability to define and nest typedefs,\njust as you would in C. By default, the following types are defined by default.\nNote that they return a Number, unless indicated otherwise.\n\n * int8_t\n * int16_t\n * int32_t\n * int64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * uint8_t\n * uint16_t\n * uint32_t\n * uint64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * float\n * double\n * char (either returns a buffer with that character or a uint8_t)\n * char[] (returns an object with the buffer and the number of characters read which is either the total amount requested or until the first 0)\n\n\nctf2json integration:\n\nNode-CType supports consuming the output of ctf2json. Once you read in a JSON file,\nall you have to do to add all the definitions it contains is:\n\nvar data, parser;\ndata = JSON.parse(parsedJSONData);\nparser = mod_ctype.parseCTF(data, { endian: 'big' });\n\nFor more documentation, see the file README.old. Full documentation is in the\nprocess of being rewritten as a series of manual pages which will be available\nin the repository and online for viewing.\n\nTo read the ctio manual page simple run, from the root of the workspace:\n\nman -Mman -s 3ctype ctio\n", "readmeFilename": "README", "_id": "ctype@0.5.2", - "dist": { - "shasum": "a228fd6c5ee38f525010c240d8da14209e0c25b2" - }, - "_from": "ctype@0.5.2", - "_resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.2.tgz" + "_from": "ctype@0.5.2" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/http-signature/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/http-signature/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -31,9 +31,5 @@ "url": "https://github.com/joyent/node-http-signature/issues" }, "_id": "http-signature@0.10.0", - "dist": { - "shasum": "624c92d64e2b86416c8be76681dec50f5e5bea0b" - }, - "_from": "http-signature@~0.10.0", - "_resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.0.tgz" + "_from": "http-signature@~0.10.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/json-stringify-safe/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/json-stringify-safe/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/json-stringify-safe/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/json-stringify-safe/package.json 2013-10-25 23:13:12.000000000 +0000 @@ -28,9 +28,5 @@ "url": "https://github.com/isaacs/json-stringify-safe/issues" }, "_id": "json-stringify-safe@5.0.0", - "dist": { - "shasum": "42de0e4afbc779e984da93cb0fafae2e647f0223" - }, - "_from": "json-stringify-safe@~5.0.0", - "_resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.0.tgz" + "_from": "json-stringify-safe@~5.0.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/mime/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/mime/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/mime/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/mime/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -31,9 +31,5 @@ "url": "https://github.com/broofa/node-mime/issues" }, "_id": "mime@1.2.11", - "dist": { - "shasum": "2e68433cd8b933cc360926e916e08e705e3bf1ae" - }, - "_from": "mime@~1.2.9", - "_resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + "_from": "mime@~1.2.9" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/node-uuid/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/node-uuid/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/node-uuid/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/node-uuid/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -30,9 +30,5 @@ "url": "https://github.com/broofa/node-uuid/issues" }, "_id": "node-uuid@1.4.1", - "dist": { - "shasum": "ebcc5c713c080b466142b080f0debc063b42e125" - }, - "_from": "node-uuid@~1.4.0", - "_resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.1.tgz" + "_from": "node-uuid@~1.4.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/oauth-sign/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/oauth-sign/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/oauth-sign/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/oauth-sign/package.json 2013-10-25 23:13:12.000000000 +0000 @@ -26,9 +26,5 @@ "url": "https://github.com/mikeal/oauth-sign/issues" }, "_id": "oauth-sign@0.3.0", - "dist": { - "shasum": "ede953b25907f6c2353888b62a2f8a0297f69f1b" - }, - "_from": "oauth-sign@~0.3.0", - "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.3.0.tgz" + "_from": "oauth-sign@~0.3.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/qs/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/qs/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/qs/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/qs/package.json 2013-10-25 23:13:12.000000000 +0000 @@ -33,9 +33,5 @@ "url": "https://github.com/visionmedia/node-querystring/issues" }, "_id": "qs@0.6.5", - "dist": { - "shasum": "5172fe6969e83a8bf51d3c72aa973a0dcafefe2e" - }, - "_from": "qs@~0.6.0", - "_resolved": "https://registry.npmjs.org/qs/-/qs-0.6.5.tgz" + "_from": "qs@~0.6.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/node_modules/tunnel-agent/package.json tilemill-0.10.1~saucy10/node_modules/request/node_modules/tunnel-agent/package.json --- tilemill-0.10.1~saucy9/node_modules/request/node_modules/tunnel-agent/package.json 2013-10-25 01:44:47.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/node_modules/tunnel-agent/package.json 2013-10-25 23:13:12.000000000 +0000 @@ -23,9 +23,5 @@ "url": "https://github.com/mikeal/tunnel-agent/issues" }, "_id": "tunnel-agent@0.3.0", - "dist": { - "shasum": "9327e5deaa41ffba0b9279555829afe6758aea9e" - }, - "_from": "tunnel-agent@~0.3.0", - "_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.3.0.tgz" + "_from": "tunnel-agent@~0.3.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/request/package.json tilemill-0.10.1~saucy10/node_modules/request/package.json --- tilemill-0.10.1~saucy9/node_modules/request/package.json 2013-10-25 01:44:43.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/request/package.json 2013-10-25 23:13:11.000000000 +0000 @@ -43,9 +43,5 @@ "readme": "# Request -- Simplified HTTP request method\n\n## Install\n\n
                    \n  npm install request\n
                    \n\nOr from source:\n\n
                    \n  git clone git://github.com/mikeal/request.git \n  cd request\n  npm link\n
                    \n\n## Super simple to use\n\nRequest is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default.\n\n```javascript\nvar request = require('request');\nrequest('http://www.google.com', function (error, response, body) {\n if (!error && response.statusCode == 200) {\n console.log(body) // Print the google web page.\n }\n})\n```\n\n## Streaming\n\nYou can stream any response to a file stream.\n\n```javascript\nrequest('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))\n```\n\nYou can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types, in this case `application/json`, and use the proper content-type in the PUT request if one is not already provided in the headers.\n\n```javascript\nfs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'))\n```\n\nRequest can also pipe to itself. When doing so the content-type and content-length will be preserved in the PUT headers.\n\n```javascript\nrequest.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))\n```\n\nNow let's get fancy.\n\n```javascript\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n if (req.method === 'PUT') {\n req.pipe(request.put('http://mysite.com/doodle.png'))\n } else if (req.method === 'GET' || req.method === 'HEAD') {\n request.get('http://mysite.com/doodle.png').pipe(resp)\n } \n }\n})\n```\n\nYou can also pipe() from a http.ServerRequest instance and to a http.ServerResponse instance. The HTTP method and headers will be sent as well as the entity-body data. Which means that, if you don't really care about security, you can do:\n\n```javascript\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n var x = request('http://mysite.com/doodle.png')\n req.pipe(x)\n x.pipe(resp)\n }\n})\n```\n\nAnd since pipe() returns the destination stream in node 0.5.x you can do one line proxying :)\n\n```javascript\nreq.pipe(request('http://mysite.com/doodle.png')).pipe(resp)\n```\n\nAlso, none of this new functionality conflicts with requests previous features, it just expands them.\n\n```javascript\nvar r = request.defaults({'proxy':'http://localproxy.com'})\n\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n r.get('http://google.com/doodle.png').pipe(resp)\n }\n})\n```\nYou can still use intermediate proxies, the requests will still follow HTTP forwards, etc.\n\n## Forms\n\n`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API.\n\nUrl encoded forms are simple\n\n```javascript\nrequest.post('http://service.com/upload', {form:{key:'value'}})\n// or\nrequest.post('http://service.com/upload').form({key:'value'})\n```\n\nFor `multipart/form-data` we use the [form-data](https://github.com/felixge/node-form-data) library by [@felixge](https://github.com/felixge). You don't need to worry about piping the form object or setting the headers, `request` will handle that for you.\n\n```javascript\nvar r = request.post('http://service.com/upload')\nvar form = r.form()\nform.append('my_field', 'my_value')\nform.append('my_buffer', new Buffer([1, 2, 3]))\nform.append('my_file', fs.createReadStream(path.join(__dirname, 'doodle.png'))\nform.append('remote_file', request('http://google.com/doodle.png'))\n```\n\n## HTTP Authentication\n\n```javascript\nrequest.get('http://some.server.com/').auth('username', 'password', false);\n// or\nrequest.get('http://some.server.com/', {\n 'auth': {\n 'user': 'username',\n 'pass': 'password',\n 'sendImmediately': false\n }\n});\n```\n\nIf passed as an option, `auth` should be a hash containing values `user` || `username`, `password` || `pass`, and `sendImmediately` (optional). The method form takes parameters `auth(username, password, sendImmediately)`.\n\n`sendImmediately` defaults to true, which will cause a basic authentication header to be sent. If `sendImmediately` is `false`, then `request` will retry with a proper authentication header after receiving a 401 response from the server (which must contain a `WWW-Authenticate` header indicating the required authentication method).\n\nDigest authentication is supported, but it only works with `sendImmediately` set to `false` (otherwise `request` will send basic authentication on the initial request, which will probably cause the request to fail).\n\n## OAuth Signing\n\n```javascript\n// Twitter OAuth\nvar qs = require('querystring')\n , oauth =\n { callback: 'http://mysite.com/callback/'\n , consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n }\n , url = 'https://api.twitter.com/oauth/request_token'\n ;\nrequest.post({url:url, oauth:oauth}, function (e, r, body) {\n // Ideally, you would take the body in the response\n // and construct a URL that a user clicks on (like a sign in button).\n // The verifier is only available in the response after a user has \n // verified with twitter that they are authorizing your app.\n var access_token = qs.parse(body)\n , oauth = \n { consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n , token: access_token.oauth_token\n , verifier: access_token.oauth_verifier\n }\n , url = 'https://api.twitter.com/oauth/access_token'\n ;\n request.post({url:url, oauth:oauth}, function (e, r, body) {\n var perm_token = qs.parse(body)\n , oauth = \n { consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n , token: perm_token.oauth_token\n , token_secret: perm_token.oauth_token_secret\n }\n , url = 'https://api.twitter.com/1/users/show.json?'\n , params = \n { screen_name: perm_token.screen_name\n , user_id: perm_token.user_id\n }\n ;\n url += qs.stringify(params)\n request.get({url:url, oauth:oauth, json:true}, function (e, r, user) {\n console.log(user)\n })\n })\n})\n```\n\n\n\n### request(options, callback)\n\nThe first argument can be either a url or an options object. The only required option is uri, all others are optional.\n\n* `uri` || `url` - fully qualified uri or a parsed url object from url.parse()\n* `qs` - object containing querystring values to be appended to the uri\n* `method` - http method, defaults to GET\n* `headers` - http headers, defaults to {}\n* `body` - entity body for PATCH, POST and PUT requests. Must be buffer or string.\n* `form` - when passed an object this will set `body` but to a querystring representation of value and adds `Content-type: application/x-www-form-urlencoded; charset=utf-8` header. When passed no option a FormData instance is returned that will be piped to request.\n* `auth` - A hash containing values `user` || `username`, `password` || `pass`, and `sendImmediately` (optional). See documentation above.\n* `json` - sets `body` but to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as json.\n* `multipart` - (experimental) array of objects which contains their own headers and `body` attribute. Sends `multipart/related` request. See example below.\n* `followRedirect` - follow HTTP 3xx responses as redirects. defaults to true.\n* `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects. defaults to false.\n* `maxRedirects` - the maximum number of redirects to follow, defaults to 10.\n* `encoding` - Encoding to be used on `setEncoding` of response data. If set to `null`, the body is returned as a Buffer.\n* `pool` - A hash object containing the agents for these requests. If omitted this request will use the global pool which is set to node's default maxSockets.\n* `pool.maxSockets` - Integer containing the maximum amount of sockets in the pool.\n* `timeout` - Integer containing the number of milliseconds to wait for a request to respond before aborting the request\t\n* `proxy` - An HTTP proxy to be used. Support proxy Auth with Basic Auth the same way it's supported with the `url` parameter by embedding the auth info in the uri.\n* `oauth` - Options for OAuth HMAC-SHA1 signing, see documentation above.\n* `hawk` - Options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example).\n* `strictSSL` - Set to `true` to require that SSL certificates be valid. Note: to use your own certificate authority, you need to specify an agent that was created with that ca as an option.\n* `jar` - Set to `true` if you want cookies to be remembered for future use, or define your custom cookie jar (see examples section)\n* `aws` - object containing aws signing information, should have the properties `key` and `secret` as well as `bucket` unless you're specifying your bucket as part of the path, or you are making a request that doesn't use a bucket (i.e. GET Services)\n* `httpSignature` - Options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options.\n* `localAddress` - Local interface to bind for network connections.\n\n\nThe callback argument gets 3 arguments. The first is an error when applicable (usually from the http.Client option not the http.ClientRequest object). The second is an http.ClientResponse object. The third is the response body String or Buffer.\n\n## Convenience methods\n\nThere are also shorthand methods for different HTTP METHODs and some other conveniences.\n\n### request.defaults(options) \n \nThis method returns a wrapper around the normal request API that defaults to whatever options you pass in to it.\n\n### request.put\n\nSame as request() but defaults to `method: \"PUT\"`.\n\n```javascript\nrequest.put(url)\n```\n\n### request.patch\n\nSame as request() but defaults to `method: \"PATCH\"`.\n\n```javascript\nrequest.patch(url)\n```\n\n### request.post\n\nSame as request() but defaults to `method: \"POST\"`.\n\n```javascript\nrequest.post(url)\n```\n\n### request.head\n\nSame as request() but defaults to `method: \"HEAD\"`.\n\n```javascript\nrequest.head(url)\n```\n\n### request.del\n\nSame as request() but defaults to `method: \"DELETE\"`.\n\n```javascript\nrequest.del(url)\n```\n\n### request.get\n\nAlias to normal request method for uniformity.\n\n```javascript\nrequest.get(url)\n```\n### request.cookie\n\nFunction that creates a new cookie.\n\n```javascript\nrequest.cookie('cookie_string_here')\n```\n### request.jar\n\nFunction that creates a new cookie jar.\n\n```javascript\nrequest.jar()\n```\n\n\n## Examples:\n\n```javascript\n var request = require('request')\n , rand = Math.floor(Math.random()*100000000).toString()\n ;\n request(\n { method: 'PUT'\n , uri: 'http://mikeal.iriscouch.com/testjs/' + rand\n , multipart: \n [ { 'content-type': 'application/json'\n , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})\n }\n , { body: 'I am an attachment' }\n ] \n }\n , function (error, response, body) {\n if(response.statusCode == 201){\n console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand)\n } else {\n console.log('error: '+ response.statusCode)\n console.log(body)\n }\n }\n )\n```\nCookies are disabled by default (else, they would be used in subsequent requests). To enable cookies set jar to true (either in defaults or in the options sent).\n\n```javascript\nvar request = request.defaults({jar: true})\nrequest('http://www.google.com', function () {\n request('http://images.google.com')\n})\n```\n\nIf you to use a custom cookie jar (instead of letting request use its own global cookie jar) you do so by setting the jar default or by specifying it as an option:\n\n```javascript\nvar j = request.jar()\nvar request = request.defaults({jar:j})\nrequest('http://www.google.com', function () {\n request('http://images.google.com')\n})\n```\nOR\n\n```javascript\nvar j = request.jar()\nvar cookie = request.cookie('your_cookie_here')\nj.add(cookie)\nrequest({url: 'http://www.google.com', jar: j}, function () {\n request('http://images.google.com')\n})\n```\n", "readmeFilename": "README.md", "_id": "request@2.25.0", - "dist": { - "shasum": "ee1bd08f66c332ad8e65eb694cd71e4111e4c1f2" - }, - "_from": "request@~2.25.0", - "_resolved": "https://registry.npmjs.org/request/-/request-2.25.0.tgz" + "_from": "request@~2.25.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/sax/package.json tilemill-0.10.1~saucy10/node_modules/sax/package.json --- tilemill-0.10.1~saucy9/node_modules/sax/package.json 2013-10-25 01:44:43.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/sax/package.json 2013-10-25 23:13:13.000000000 +0000 @@ -60,9 +60,5 @@ "url": "https://github.com/isaacs/sax-js/issues" }, "_id": "sax@0.5.5", - "dist": { - "shasum": "43058505069bd78d0e96ef56f8a3517edf9dfbd1" - }, - "_from": "sax@0.5.x", - "_resolved": "https://registry.npmjs.org/sax/-/sax-0.5.5.tgz" + "_from": "sax@0.5.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/semver/package.json tilemill-0.10.1~saucy10/node_modules/semver/package.json --- tilemill-0.10.1~saucy9/node_modules/semver/package.json 2012-10-11 02:23:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/semver/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -21,6 +21,14 @@ "semver": "./bin/semver" }, "readme": "semver(1) -- The semantic versioner for npm\n===========================================\n\n## Usage\n\n $ npm install semver\n\n semver.valid('1.2.3') // true\n semver.valid('a.b.c') // false\n semver.clean(' =v1.2.3 ') // '1.2.3'\n semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true\n semver.gt('1.2.3', '9.8.7') // false\n semver.lt('1.2.3', '9.8.7') // true\n\nAs a command-line utility:\n\n $ semver -h\n\n Usage: semver -v [-r ]\n Test if version(s) satisfy the supplied range(s),\n and sort them.\n\n Multiple versions or ranges may be supplied.\n\n Program exits successfully if any valid version satisfies\n all supplied ranges, and prints all satisfying versions.\n\n If no versions are valid, or ranges are not satisfied,\n then exits failure.\n\n Versions are printed in ascending order, so supplying\n multiple versions to the utility will just sort them.\n\n## Versions\n\nA version is the following things, in this order:\n\n* a number (Major)\n* a period\n* a number (minor)\n* a period\n* a number (patch)\n* OPTIONAL: a hyphen, followed by a number (build)\n* OPTIONAL: a collection of pretty much any non-whitespace characters\n (tag)\n\nA leading `\"=\"` or `\"v\"` character is stripped off and ignored.\n\n## Comparisons\n\nThe ordering of versions is done using the following algorithm, given\ntwo versions and asked to find the greater of the two:\n\n* If the majors are numerically different, then take the one\n with a bigger major number. `2.3.4 > 1.3.4`\n* If the minors are numerically different, then take the one\n with the bigger minor number. `2.3.4 > 2.2.4`\n* If the patches are numerically different, then take the one with the\n bigger patch number. `2.3.4 > 2.3.3`\n* If only one of them has a build number, then take the one with the\n build number. `2.3.4-0 > 2.3.4`\n* If they both have build numbers, and the build numbers are numerically\n different, then take the one with the bigger build number.\n `2.3.4-10 > 2.3.4-9`\n* If only one of them has a tag, then take the one without the tag.\n `2.3.4 > 2.3.4-beta`\n* If they both have tags, then take the one with the lexicographically\n larger tag. `2.3.4-beta > 2.3.4-alpha`\n* At this point, they're equal.\n\n## Ranges\n\nThe following range styles are supported:\n\n* `>1.2.3` Greater than a specific version.\n* `<1.2.3` Less than\n* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`\n* `~1.2.3` := `>=1.2.3 <1.3.0`\n* `~1.2` := `>=1.2.0 <2.0.0`\n* `~1` := `>=1.0.0 <2.0.0`\n* `1.2.x` := `>=1.2.0 <1.3.0`\n* `1.x` := `>=1.0.0 <2.0.0`\n\nRanges can be joined with either a space (which implies \"and\") or a\n`||` (which implies \"or\").\n\n## Functions\n\n* valid(v): Return the parsed version, or null if it's not valid.\n* inc(v, release): Return the version incremented by the release type\n (major, minor, patch, or build), or null if it's not valid.\n\n### Comparison\n\n* gt(v1, v2): `v1 > v2`\n* gte(v1, v2): `v1 >= v2`\n* lt(v1, v2): `v1 < v2`\n* lte(v1, v2): `v1 <= v2`\n* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent,\n even if they're not the exact same string. You already know how to\n compare strings.\n* neq(v1, v2): `v1 != v2` The opposite of eq.\n* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call\n the corresponding function above. `\"===\"` and `\"!==\"` do simple\n string comparison, but are included for completeness. Throws if an\n invalid comparison string is provided.\n* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if\n v2 is greater. Sorts in ascending order if passed to Array.sort().\n* rcompare(v1, v2): The reverse of compare. Sorts an array of versions\n in descending order when passed to Array.sort().\n\n\n### Ranges\n\n* validRange(range): Return the valid range or null if it's not valid\n* satisfies(version, range): Return true if the version satisfies the\n range.\n* maxSatisfying(versions, range): Return the highest version in the list\n that satisfies the range, or null if none of them do.\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/isaacs/node-semver/issues" + }, "_id": "semver@1.0.14", - "_from": "semver@1.0.x" + "dist": { + "shasum": "41c9efd7ee8801c95a2786f8028b775dad8b4bdc" + }, + "_from": "semver@1.0.x", + "_resolved": "https://registry.npmjs.org/semver/-/semver-1.0.14.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/sphericalmercator/package.json tilemill-0.10.1~saucy10/node_modules/sphericalmercator/package.json --- tilemill-0.10.1~saucy9/node_modules/sphericalmercator/package.json 2012-10-11 02:23:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/sphericalmercator/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -29,6 +29,10 @@ "test": "mocha" }, "readme": "[![Build Status](https://secure.travis-ci.org/mapbox/node-sphericalmercator.png?branch=master)](http://travis-ci.org/mapbox/node-sphericalmercator)\n\n`node-sphericalmercator` provides projection math for converting between\nmercator meters, screen pixels (of 256x256 or configurable-size tiles), and\nlatitude/longitude.\n\nCompatible with nodejs packages and in-browser.\n\n```javascript\n// By default, precomputes up to z30\nvar myProjectionObject = new SphericalMercator({\n size: 256\n});\n```\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/mapbox/node-sphericalmercator/issues" + }, "_id": "sphericalmercator@1.0.2", "_from": "sphericalmercator@~1.0.2" } diff -Nru tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/progress/package.json tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/progress/package.json --- tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/progress/package.json 2013-10-25 01:44:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/progress/package.json 2013-10-25 23:13:18.000000000 +0000 @@ -18,9 +18,5 @@ "readme": "# node-progress\n\n Flexible ascii progress bar\n\n## Installation\n\n npm install progress\n\n## Usage\n\n First we create a `ProgressBar`, giving it a format string\n as well as the `total`, telling the progress bar when it will\n be considered complete. After that all we need to do is `tick()` appropriately. \n\n```javascript\nvar ProgressBar = require('progress');\n\nvar bar = new ProgressBar(':bar', { total: 10 });\nvar timer = setInterval(function(){\nbar.tick();\n if (bar.complete) {\n console.log('\\ncomplete\\n');\n clearInterval(timer);\n }\n}, 100);\n```\n\n## Options:\n\n - `total` total number of ticks to complete\n - `stream` the output stream defaulting to stdout\n - `complete` completion character defaulting to \"=\"\n - `incomplete` incomplete character defaulting to \"-\"\n\n## Tokens:\n\n - `:bar` the progress bar itself\n - `:current` current tick number\n - `:total` total ticks\n - `:elapsed` time elapsed in seconds\n - `:percent` completion percentage\n - `:eta` estimated completion time in seconds\n\n## Examples\n\n### Download\n\n In our download example each tick has a variable influence, so we pass the chunk length which adjusts the progress bar appropriately relative to the total length. \n\n```javascript\nvar ProgressBar = require('../')\n , https = require('https');\n\nvar req = https.request({\n host: 'download.github.com'\n , port: 443\n , path: '/visionmedia-node-jscoverage-0d4608a.zip'\n});\n\nreq.on('response', function(res){\n var len = parseInt(res.headers['content-length'], 10);\n\n console.log();\n var bar = new ProgressBar(' downloading [:bar] :percent :etas', {\n complete: '='\n , incomplete: ' '\n , width: 20\n , total: len\n });\n\n res.on('data', function(chunk){\n bar.tick(chunk.length);\n });\n\n res.on('end', function(){\n console.log('\\n');\n });\n});\n\nreq.end();\n```\n\n The code above will generate a progress bar that looks like this:\n\n```\ndownloading [===== ] 29% 3.7s\n```\n\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2011 TJ Holowaychuk `<tj@vision-media.ca>`\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "readmeFilename": "Readme.md", "_id": "progress@1.0.1", - "dist": { - "shasum": "1b2993df7059184f74c6b93d1108c1d95e6fe601" - }, - "_from": "progress@~1.0.1", - "_resolved": "https://registry.npmjs.org/progress/-/progress-1.0.1.tgz" + "_from": "progress@~1.0.1" } diff -Nru tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/node_modules/keypress/package.json tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/node_modules/keypress/package.json --- tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/node_modules/keypress/package.json 2013-10-25 01:44:51.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/node_modules/keypress/package.json 2013-10-25 23:13:19.000000000 +0000 @@ -27,9 +27,5 @@ "url": "https://github.com/TooTallNate/keypress/issues" }, "_id": "keypress@0.1.0", - "dist": { - "shasum": "583baf5c6859c16a70a3fb79ea078efabe447297" - }, - "_from": "keypress@0.1.x", - "_resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz" + "_from": "keypress@0.1.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/package.json tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/package.json --- tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/package.json 2013-10-25 01:44:50.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/tar.gz/node_modules/commander/package.json 2013-10-25 23:13:19.000000000 +0000 @@ -36,9 +36,5 @@ "url": "https://github.com/visionmedia/commander.js/issues" }, "_id": "commander@1.1.1", - "dist": { - "shasum": "b6166f1985694f9c2b1e2f4ba037f4492b9c1487" - }, - "_from": "commander@1.1.x", - "_resolved": "https://registry.npmjs.org/commander/-/commander-1.1.1.tgz" + "_from": "commander@1.1.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/tar.gz/package.json tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/tar.gz/package.json --- tilemill-0.10.1~saucy9/node_modules/sqlite3/node_modules/tar.gz/package.json 2013-10-25 01:44:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/sqlite3/node_modules/tar.gz/package.json 2013-10-25 23:13:18.000000000 +0000 @@ -57,9 +57,5 @@ "readme": "# **node-tar.gz**\nNative gzip compression and decompression utility for Node.js.\n\n### **Installation**\n\nFor simple installation:\n\n npm install tar.gz\n\nIf you want to use the `targz` command line:\n\n npm intall -g tar.gz\n\n### **Usage**\n\nAt the moment this package can only compress a folder and everything that\nis inside it. To compress something is easy:\n\n var targz = require('tar.gz');\n var compress = new targz().compress('/path/to/compress', '/path/to/store.tar.gz', function(err){\n if(err)\n console.log(err);\n\n console.log('The compression has ended!');\n });\n\nWith the same easy you can extract a gziped file:\n\n var targz = require('tar.gz');\n var compress = new targz().extract('/path/to/stored.tar.gz', '/path/to/extract', function(err){\n if(err)\n console.log(err);\n\n console.log('The extraction has ended!');\n });\n\nYou can pass some configuration parameters to the constructor before compress:\n\n var targz = require('tar.gz');\n\n var level = 6 //the compression level from 0-9, default: 6\n var memLevel = 6 //the memory allocation level from 1-9, default: 6\n var proprietary = true //to include or not proprietary headers, default: true\n\n var compress = new targz(level, memLevel, proprietary).compress(...)\n\n### **Command line**\n\n $ targz -h\n\n Usage: targz [options]\n\n Options:\n\n -h, --help output usage information\n -V, --version output the version number\n -c, --compress Compress folder to archive\n -x, --extract Extract archive to folder\n -l, --level [n] Compression level from 0-9. Default 6.\n -m, --memory [n] Memory allocation level from 1-9. Default 6.\n -n, --noproprietary Remove proprietary headers.\n\n Examples:\n\n Default compression\n $ targz -c /folder/to/compres /path/to/archive.tar.gz\n\n Extracting some archive\n $ targz -x /path/to/archive.tar.gz /destination/folder\n\n Maximum compression\n $ targz -l 9 -m 9 -c /folder/to/compres /path/to/archive.tar.gz\n\n\n### **TODO**\n\n * Vows.js tests\n * Single file compression\n * Add more todos...\n\n\n### **License (MIT)**\n\nCopyright (C) 2012 Cranic Tecnologia e Informática LTDA\n\nPermission is hereby granted, free of charge, to any person obtaining \na copy of this software and associated documentation files \n(the \"Software\"), to deal in the Software without restriction, \nincluding without limitation the rights to use, copy, modify, merge, \npublish, distribute, sublicense, and/or sell copies of the Software, \nand to permit persons to whom the Software is furnished to do so, \nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be \nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS \nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF \nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. \nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY \nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, \nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE \nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "readmeFilename": "README.md", "_id": "tar.gz@0.1.1", - "dist": { - "shasum": "e914ce23b2fdc627575fbdb3485a5b228ed59947" - }, - "_from": "tar.gz@~0.1.1", - "_resolved": "https://registry.npmjs.org/tar.gz/-/tar.gz-0.1.1.tgz" + "_from": "tar.gz@~0.1.1" } diff -Nru tilemill-0.10.1~saucy9/node_modules/sqlite3/package.json tilemill-0.10.1~saucy10/node_modules/sqlite3/package.json --- tilemill-0.10.1~saucy9/node_modules/sqlite3/package.json 2013-10-25 01:44:46.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/sqlite3/package.json 2013-10-25 23:13:17.000000000 +0000 @@ -103,9 +103,5 @@ "progress" ], "_id": "sqlite3@2.1.18", - "dist": { - "shasum": "96c851c0486d1f51f08470295892407626937f34" - }, - "_from": "sqlite3@~2.1.17", - "_resolved": "https://registry.npmjs.org/sqlite3/-/sqlite3-2.1.18.tgz" + "_from": "sqlite3@~2.1.17" } diff -Nru tilemill-0.10.1~saucy9/node_modules/step/package.json tilemill-0.10.1~saucy10/node_modules/step/package.json --- tilemill-0.10.1~saucy9/node_modules/step/package.json 2012-10-11 02:23:08.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/step/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -15,6 +15,14 @@ }, "main": "lib/step", "readme": "# Step\n\nA simple control-flow library for node.JS that makes parallel execution, serial execution, and error handling painless.\n\n## How to install\n\nSimply copy or link the lib/step.js file into your `$HOME/.node_libraries` folder.\n\n## How to use\n\nThe step library exports a single function I call `Step`. It accepts any number of functions as arguments and runs them in serial order using the passed in `this` context as the callback to the next step.\n\n Step(\n function readSelf() {\n fs.readFile(__filename, this);\n },\n function capitalize(err, text) {\n if (err) throw err;\n return text.toUpperCase();\n },\n function showIt(err, newText) {\n if (err) throw err;\n console.log(newText);\n }\n );\n\nNotice that we pass in `this` as the callback to `fs.readFile`. When the file read completes, step will send the result as the arguments to the next function in the chain. Then in the `capitalize` function we're doing synchronous work so we can simple return the new value and Step will route it as if we called the callback.\n\nThe first parameter is reserved for errors since this is the node standard. Also any exceptions thrown are caught and passed as the first argument to the next function. As long as you don't nest callback functions inline your main functions this prevents there from ever being any uncaught exceptions. This is very important for long running node.JS servers since a single uncaught exception can bring the whole server down.\n\nAlso there is support for parallel actions:\n\n Step(\n // Loads two files in parallel\n function loadStuff() {\n fs.readFile(__filename, this.parallel());\n fs.readFile(\"/etc/passwd\", this.parallel());\n },\n // Show the result when done\n function showStuff(err, code, users) {\n if (err) throw err;\n console.log(code);\n console.log(users);\n }\n )\n\nHere we pass `this.parallel()` instead of `this` as the callback. It internally keeps track of the number of callbacks issued and preserves their order then giving the result to the next step after all have finished. If there is an error in any of the parallel actions, it will be passed as the first argument to the next step.\n\nAlso you can use group with a dynamic number of common tasks.\n\n Step(\n function readDir() {\n fs.readdir(__dirname, this);\n },\n function readFiles(err, results) {\n if (err) throw err;\n // Create a new group\n var group = this.group();\n results.forEach(function (filename) {\n if (/\\.js$/.test(filename)) {\n fs.readFile(__dirname + \"/\" + filename, 'utf8', group());\n }\n });\n },\n function showAll(err , files) {\n if (err) throw err;\n console.dir(files);\n }\n );\n\n*Note* that we both call `this.group()` and `group()`. The first reserves a slot in the parameters of the next step, then calling `group()` generates the individual callbacks and increments the internal counter.\n", + "readmeFilename": "README.markdown", + "bugs": { + "url": "https://github.com/creationix/step/issues" + }, "_id": "step@0.0.5", - "_from": "step@~0.0.5" + "dist": { + "shasum": "695d2e9074e0d646bf7e20650d2b76d6170611b8" + }, + "_from": "step@~0.0.5", + "_resolved": "https://registry.npmjs.org/step/-/step-0.0.5.tgz" } diff -Nru tilemill-0.10.1~saucy9/node_modules/tilelive/node_modules/optimist/node_modules/wordwrap/package.json tilemill-0.10.1~saucy10/node_modules/tilelive/node_modules/optimist/node_modules/wordwrap/package.json --- tilemill-0.10.1~saucy9/node_modules/tilelive/node_modules/optimist/node_modules/wordwrap/package.json 2013-10-25 01:44:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/tilelive/node_modules/optimist/node_modules/wordwrap/package.json 2013-10-25 23:13:16.000000000 +0000 @@ -40,9 +40,5 @@ "url": "https://github.com/substack/node-wordwrap/issues" }, "_id": "wordwrap@0.0.2", - "dist": { - "shasum": "624b9d3e54ad494743ebeb222c78828a452556bf" - }, - "_from": "wordwrap@~0.0.2", - "_resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz" + "_from": "wordwrap@~0.0.2" } diff -Nru tilemill-0.10.1~saucy9/node_modules/tilelive/node_modules/optimist/package.json tilemill-0.10.1~saucy10/node_modules/tilelive/node_modules/optimist/package.json --- tilemill-0.10.1~saucy9/node_modules/tilelive/node_modules/optimist/package.json 2013-10-25 01:44:46.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/tilelive/node_modules/optimist/package.json 2013-10-25 23:13:16.000000000 +0000 @@ -41,9 +41,5 @@ "url": "https://github.com/substack/node-optimist/issues" }, "_id": "optimist@0.4.0", - "dist": { - "shasum": "ea6f0d8d3b5583f5ab17430b0b47621a7a143469" - }, - "_from": "optimist@~0.4.0", - "_resolved": "https://registry.npmjs.org/optimist/-/optimist-0.4.0.tgz" + "_from": "optimist@~0.4.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/tilelive/package.json tilemill-0.10.1~saucy10/node_modules/tilelive/package.json --- tilemill-0.10.1~saucy9/node_modules/tilelive/package.json 2013-10-25 01:44:46.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/tilelive/package.json 2013-10-25 23:13:16.000000000 +0000 @@ -70,9 +70,5 @@ "url": "https://github.com/mapbox/tilelive.js/issues" }, "_id": "tilelive@4.5.2", - "dist": { - "shasum": "7086e10c9f742632056df9b0ffa364dc77d061e7" - }, - "_from": "tilelive@4.5.x", - "_resolved": "https://registry.npmjs.org/tilelive/-/tilelive-4.5.2.tgz" + "_from": "tilelive@4.5.x" } diff -Nru tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/node_modules/eio/package.json tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/node_modules/eio/package.json --- tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/node_modules/eio/package.json 2013-10-25 01:44:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/node_modules/eio/package.json 2013-10-25 23:13:17.000000000 +0000 @@ -34,9 +34,5 @@ "url": "https://github.com/developmentseed/node-eio/issues" }, "_id": "eio@0.2.2", - "dist": { - "shasum": "8c3dc1d94efceaa333e8074a1953d0e0adcec53c" - }, - "_from": "eio@~0.2.2", - "_resolved": "https://registry.npmjs.org/eio/-/eio-0.2.2.tgz" + "_from": "eio@~0.2.2" } diff -Nru tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/node_modules/mime/package.json tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/node_modules/mime/package.json --- tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/node_modules/mime/package.json 2013-10-25 01:44:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/node_modules/mime/package.json 2013-10-25 23:13:17.000000000 +0000 @@ -31,10 +31,6 @@ "url": "https://github.com/broofa/node-mime/issues" }, "_id": "mime@1.2.11", - "dist": { - "shasum": "2e68433cd8b933cc360926e916e08e705e3bf1ae" - }, "_from": "mime@~1.2.9", - "_resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/node_modules/underscore/package.json tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/node_modules/underscore/package.json --- tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/node_modules/underscore/package.json 2013-10-25 01:44:48.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/node_modules/underscore/package.json 2013-10-25 23:13:18.000000000 +0000 @@ -25,9 +25,6 @@ "url": "https://github.com/documentcloud/underscore/issues" }, "_id": "underscore@1.3.3", - "dist": { - "shasum": "d91d5cecb4818af5f1fcb65fda16fda15e917e3c" - }, "_from": "underscore@~1.3.3", - "_resolved": "https://registry.npmjs.org/underscore/-/underscore-1.3.3.tgz" + "scripts": {} } diff -Nru tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/package.json tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/package.json --- tilemill-0.10.1~saucy9/node_modules/tilelive-mapnik/package.json 2013-10-25 01:44:46.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/tilelive-mapnik/package.json 2013-10-25 23:13:16.000000000 +0000 @@ -61,9 +61,5 @@ "url": "https://github.com/mapbox/tilelive-mapnik/issues" }, "_id": "tilelive-mapnik@0.6.3", - "dist": { - "shasum": "2be57c6a662d94b1a59d9b344d690ec9900b2131" - }, - "_from": "tilelive-mapnik@~0.6.3", - "_resolved": "https://registry.npmjs.org/tilelive-mapnik/-/tilelive-mapnik-0.6.3.tgz" + "_from": "tilelive-mapnik@~0.6.3" } diff -Nru tilemill-0.10.1~saucy9/node_modules/topcube/package.json tilemill-0.10.1~saucy10/node_modules/topcube/package.json --- tilemill-0.10.1~saucy9/node_modules/topcube/package.json 2013-10-25 01:44:42.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/topcube/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -26,9 +26,5 @@ "url": "https://github.com/creationix/topcube/issues" }, "_id": "topcube@0.2.0", - "dist": { - "shasum": "e517f6eeaeac90325368612a21a374baf12a64ab" - }, - "_from": "https://github.com/creationix/topcube/tarball/master", - "_resolved": "https://github.com/creationix/topcube/tarball/master" + "_from": "topcube@~0.2.0" } diff -Nru tilemill-0.10.1~saucy9/node_modules/underscore/package.json tilemill-0.10.1~saucy10/node_modules/underscore/package.json --- tilemill-0.10.1~saucy9/node_modules/underscore/package.json 2013-10-25 01:44:42.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/underscore/package.json 2013-10-25 23:13:10.000000000 +0000 @@ -42,9 +42,5 @@ "url": "https://github.com/jashkenas/underscore/issues" }, "_id": "underscore@1.5.2", - "dist": { - "shasum": "07f3f49f2970cf72ab975f4504333a51584d1dbe" - }, - "_from": "underscore@~1.5.1", - "_resolved": "https://registry.npmjs.org/underscore/-/underscore-1.5.2.tgz" + "_from": "underscore@~1.5.1" } diff -Nru tilemill-0.10.1~saucy9/node_modules/wax/package.json tilemill-0.10.1~saucy10/node_modules/wax/package.json --- tilemill-0.10.1~saucy9/node_modules/wax/package.json 2012-10-11 02:23:11.000000000 +0000 +++ tilemill-0.10.1~saucy10/node_modules/wax/package.json 2013-10-25 23:13:17.000000000 +0000 @@ -37,6 +37,14 @@ "banner": "0.0.x" }, "readme": "# Wax\n\nTools for improving web maps. The centerpiece of the code is a client implementation of the [MBTiles interaction specification](https://github.com/mapbox/mbtiles-spec).\n\nFor full documentation of supported mapping APIs and how to use Wax see http://mapbox.github.com/wax.\n\n## Versions\n\nThere are three current development branches of Wax:\n\n* `master` is 6.x\n* 5.x supports **Modest Maps 1.0.0-alpha2 and later**, but supports the same API as 4.x\n* 4.x supports **Modest Maps 0.x**, older versions of Modest Maps\n\nTo find more detail of what has changed in each version, consult `CHANGELOG.md`.\n\n## Compatibility\n\n* [Google Maps API v3](https://developers.google.com/maps/)\n* [Leaflet 0.x.x](http://leaflet.cloudmade.com/)\n* [Modest Maps 1.x.x](http://modestmaps.com/)\n* [OpenLayers 2.11](http://openlayers.org/)\n* [ESRI ArcGIS API 2.8](http://help.arcgis.com/en/webapi/javascript/arcgis/)\n\n## Building Wax\n\nFor end users, a minified library is already provided in `dist/`.\n\nBut for developers you can rebuild a minified library by running:\n\n npm install --dev\n make\n\n## Includes\n\nWax currently includes three externals:\n\n* [reqwest](https://github.com/ded/reqwest) (MIT)\n* [mustache.js](https://github.com/janl/mustache.js) (MIT)\n* [html-sanitizer from Google Caja](http://code.google.com/p/google-caja/source/browse/trunk/src/com/google/caja/plugin/html-sanitizer.js) (Apache)\n\n## Authors\n\n- Tom MacWright (tmcw)\n- Young Hahn (yhahn)\n- Will White (willwhite)\n", + "readmeFilename": "README.md", + "bugs": { + "url": "https://github.com/mapbox/wax/issues" + }, "_id": "wax@6.4.2", - "_from": "wax@6.4.2" + "dist": { + "shasum": "ebdd074f53f268fb68e04f4be94aa5442282b3c3" + }, + "_from": "wax@6.4.2", + "_resolved": "https://registry.npmjs.org/wax/-/wax-6.4.2.tgz" } diff -Nru tilemill-0.10.1~saucy9/rebuild_cxx.sh tilemill-0.10.1~saucy10/rebuild_cxx.sh --- tilemill-0.10.1~saucy9/rebuild_cxx.sh 2013-10-25 19:54:14.000000000 +0000 +++ tilemill-0.10.1~saucy10/rebuild_cxx.sh 2013-10-25 23:12:37.000000000 +0000 @@ -16,15 +16,15 @@ esac done +BUILD_ROOT=${TILEMILL_ROOT}/node_root_dir + if [ "${RUN_SETUP}" = true ]; then NVER=`node -e "console.log(process.versions.node)"` echo "Running node-gyp setup for ${NVER}" node-gyp install ${NVER} BUILD_ROOT=${TILEMILL_ROOT}/node_root_dir cp -r ~/.node-gyp/${NVER}/ ${BUILD_ROOT} - #rm -rf ~/.node-gyp/ + rm -rf ~/.node-gyp/ fi -BUILD_ROOT=${TILEMILL_ROOT}/node_root_dir -CXX_MODULES=$(find ${TILEMILL_ROOT}/node_modules -name binding.gyp | sed 's,/binding.gyp,/,g') -for i in ${CXX_MODULES}; do cd $i && node-gyp rebuild --nodedir=${BUILD_ROOT}; done +npm rebuild --nodedir=${BUILD_ROOT} --build-from-source