mirror of
https://github.com/musix-org/musix-oss
synced 2025-06-17 04:26:00 +00:00
opus
This commit is contained in:
51
node_modules/node-pre-gyp/lib/build.js
generated
vendored
Normal file
51
node_modules/node-pre-gyp/lib/build.js
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = build;
|
||||
|
||||
exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp';
|
||||
|
||||
var napi = require('./util/napi.js');
|
||||
var compile = require('./util/compile.js');
|
||||
var handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
||||
var configure = require('./configure.js');
|
||||
|
||||
function do_build(gyp,argv,callback) {
|
||||
handle_gyp_opts(gyp,argv,function(err,result) {
|
||||
var final_args = ['build'].concat(result.gyp).concat(result.pre);
|
||||
if (result.unparsed.length > 0) {
|
||||
final_args = final_args.
|
||||
concat(['--']).
|
||||
concat(result.unparsed);
|
||||
}
|
||||
if (!err && result.opts.napi_build_version) {
|
||||
napi.swap_build_dir_in(result.opts.napi_build_version);
|
||||
}
|
||||
compile.run_gyp(final_args,result.opts,function(err) {
|
||||
if (result.opts.napi_build_version) {
|
||||
napi.swap_build_dir_out(result.opts.napi_build_version);
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function build(gyp, argv, callback) {
|
||||
|
||||
// Form up commands to pass to node-gyp:
|
||||
// We map `node-pre-gyp build` to `node-gyp configure build` so that we do not
|
||||
// trigger a clean and therefore do not pay the penalty of a full recompile
|
||||
if (argv.length && (argv.indexOf('rebuild') > -1)) {
|
||||
argv.shift(); // remove `rebuild`
|
||||
// here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means
|
||||
// "clean + configure + build" and triggers a full recompile
|
||||
compile.run_gyp(['clean'],{},function(err) {
|
||||
if (err) return callback(err);
|
||||
configure(gyp,argv,function(err) {
|
||||
if (err) return callback(err);
|
||||
return do_build(gyp,argv,callback);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
return do_build(gyp,argv,callback);
|
||||
}
|
||||
}
|
32
node_modules/node-pre-gyp/lib/clean.js
generated
vendored
Normal file
32
node_modules/node-pre-gyp/lib/clean.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = clean;
|
||||
|
||||
exports.usage = 'Removes the entire folder containing the compiled .node module';
|
||||
|
||||
var fs = require('fs');
|
||||
var rm = require('rimraf');
|
||||
var exists = require('fs').exists || require('path').exists;
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
var path = require('path');
|
||||
|
||||
function clean (gyp, argv, callback) {
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
var to_delete = opts.module_path;
|
||||
if (!to_delete) {
|
||||
return callback(new Error("module_path is empty, refusing to delete"));
|
||||
} else if (path.normalize(to_delete) == path.normalize(process.cwd())) {
|
||||
return callback(new Error("module_path is not set, refusing to delete"));
|
||||
} else {
|
||||
exists(to_delete, function(found) {
|
||||
if (found) {
|
||||
if (!gyp.opts.silent_clean) console.log('['+package_json.name+'] Removing "%s"', to_delete);
|
||||
return rm(to_delete, callback);
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
}
|
52
node_modules/node-pre-gyp/lib/configure.js
generated
vendored
Normal file
52
node_modules/node-pre-gyp/lib/configure.js
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = configure;
|
||||
|
||||
exports.usage = 'Attempts to configure node-gyp or nw-gyp build';
|
||||
|
||||
var napi = require('./util/napi.js');
|
||||
var compile = require('./util/compile.js');
|
||||
var handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
||||
|
||||
function configure(gyp, argv, callback) {
|
||||
handle_gyp_opts(gyp,argv,function(err,result) {
|
||||
var final_args = result.gyp.concat(result.pre);
|
||||
// pull select node-gyp configure options out of the npm environ
|
||||
var known_gyp_args = ['dist-url','python','nodedir','msvs_version'];
|
||||
known_gyp_args.forEach(function(key) {
|
||||
var val = gyp.opts[key] || gyp.opts[key.replace('-','_')];
|
||||
if (val) {
|
||||
final_args.push('--'+key+'='+val);
|
||||
}
|
||||
});
|
||||
// --ensure=false tell node-gyp to re-install node development headers
|
||||
// but it is only respected by node-gyp install, so we have to call install
|
||||
// as a separate step if the user passes it
|
||||
if (gyp.opts.ensure === false) {
|
||||
var install_args = final_args.concat(['install','--ensure=false']);
|
||||
compile.run_gyp(install_args,result.opts,function(err) {
|
||||
if (err) return callback(err);
|
||||
if (result.unparsed.length > 0) {
|
||||
final_args = final_args.
|
||||
concat(['--']).
|
||||
concat(result.unparsed);
|
||||
}
|
||||
compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) {
|
||||
return callback(err);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
if (result.unparsed.length > 0) {
|
||||
final_args = final_args.
|
||||
concat(['--']).
|
||||
concat(result.unparsed);
|
||||
}
|
||||
compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) {
|
||||
if (!err && result.opts.napi_build_version) {
|
||||
napi.swap_build_dir_out(result.opts.napi_build_version);
|
||||
}
|
||||
return callback(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
40
node_modules/node-pre-gyp/lib/info.js
generated
vendored
Normal file
40
node_modules/node-pre-gyp/lib/info.js
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = unpublish;
|
||||
|
||||
exports.usage = 'Lists all published binaries (requires aws-sdk)';
|
||||
|
||||
var fs = require('fs');
|
||||
var log = require('npmlog');
|
||||
var versioning = require('./util/versioning.js');
|
||||
var s3_setup = require('./util/s3_setup.js');
|
||||
var config = require('rc')("node_pre_gyp",{acl:"public-read"});
|
||||
|
||||
function unpublish(gyp, argv, callback) {
|
||||
var AWS = require("aws-sdk");
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var opts = versioning.evaluate(package_json, gyp.opts);
|
||||
s3_setup.detect(opts.hosted_path,config);
|
||||
AWS.config.update(config);
|
||||
var s3 = new AWS.S3();
|
||||
var s3_opts = { Bucket: config.bucket,
|
||||
Prefix: config.prefix
|
||||
};
|
||||
s3.listObjects(s3_opts, function(err, meta){
|
||||
if (err && err.code == 'NotFound') {
|
||||
return callback(new Error('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix));
|
||||
} else if(err) {
|
||||
return callback(err);
|
||||
} else {
|
||||
log.verbose(JSON.stringify(meta,null,1));
|
||||
if (meta && meta.Contents) {
|
||||
meta.Contents.forEach(function(obj) {
|
||||
console.log(obj.Key);
|
||||
});
|
||||
} else {
|
||||
console.error('['+package_json.name+'] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix );
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
}
|
285
node_modules/node-pre-gyp/lib/install.js
generated
vendored
Normal file
285
node_modules/node-pre-gyp/lib/install.js
generated
vendored
Normal file
@ -0,0 +1,285 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = install;
|
||||
|
||||
exports.usage = 'Attempts to install pre-built binary for module';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var log = require('npmlog');
|
||||
var existsAsync = fs.exists || path.exists;
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
var npgVersion = 'unknown';
|
||||
try {
|
||||
// Read own package.json to get the current node-pre-pyp version.
|
||||
var ownPackageJSON = fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8');
|
||||
npgVersion = JSON.parse(ownPackageJSON).version;
|
||||
} catch (e) {}
|
||||
|
||||
var http_get = {
|
||||
impl: undefined,
|
||||
type: undefined
|
||||
};
|
||||
|
||||
try {
|
||||
http_get.impl = require('request');
|
||||
http_get.type = 'request';
|
||||
log.warn("Using request for node-pre-gyp https download");
|
||||
} catch (e) {
|
||||
http_get.impl = require('needle');
|
||||
http_get.type = 'needle';
|
||||
log.warn("Using needle for node-pre-gyp https download");
|
||||
}
|
||||
|
||||
function download(uri,opts,callback) {
|
||||
log.http('GET', uri);
|
||||
|
||||
var req = null;
|
||||
|
||||
// Try getting version info from the currently running npm.
|
||||
var envVersionInfo = process.env.npm_config_user_agent ||
|
||||
'node ' + process.version;
|
||||
|
||||
var requestOpts = {
|
||||
uri: uri.replace('+','%2B'),
|
||||
headers: {
|
||||
'User-Agent': 'node-pre-gyp (v' + npgVersion + ', ' + envVersionInfo + ')'
|
||||
},
|
||||
follow_max: 10,
|
||||
};
|
||||
|
||||
if (opts.cafile) {
|
||||
try {
|
||||
requestOpts.ca = fs.readFileSync(opts.cafile);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
} else if (opts.ca) {
|
||||
requestOpts.ca = opts.ca;
|
||||
}
|
||||
|
||||
var proxyUrl = opts.proxy ||
|
||||
process.env.http_proxy ||
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.npm_config_proxy;
|
||||
if (proxyUrl) {
|
||||
if (/^https?:\/\//i.test(proxyUrl)) {
|
||||
log.verbose('download', 'using proxy url: "%s"', proxyUrl);
|
||||
requestOpts.proxy = proxyUrl;
|
||||
} else {
|
||||
log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl);
|
||||
}
|
||||
}
|
||||
try {
|
||||
req = http_get.impl.get(requestOpts.uri, requestOpts);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
if (req) {
|
||||
req.on('response', function (res) {
|
||||
log.http(res.statusCode, uri);
|
||||
});
|
||||
}
|
||||
return callback(null,req);
|
||||
}
|
||||
|
||||
function place_binary(from,to,opts,callback) {
|
||||
download(from,opts,function(err,req) {
|
||||
if (err) return callback(err);
|
||||
if (!req) return callback(new Error("empty req"));
|
||||
var badDownload = false;
|
||||
var hasResponse = false;
|
||||
|
||||
function afterExtract(err, extractCount) {
|
||||
if (err) return callback(err);
|
||||
if (badDownload) return callback(new Error("bad download"));
|
||||
if (extractCount === 0) {
|
||||
return callback(new Error('There was a fatal problem while downloading/extracting the tarball'));
|
||||
}
|
||||
log.info('tarball', 'done parsing tarball');
|
||||
callback();
|
||||
}
|
||||
|
||||
// for request compatibility
|
||||
req.on('error', function(err) {
|
||||
badDownload = true;
|
||||
if (!hasResponse) {
|
||||
hasResponse = true;
|
||||
return callback(err);
|
||||
}
|
||||
});
|
||||
|
||||
// for needle compatibility
|
||||
req.on('err', function(err) {
|
||||
badDownload = true;
|
||||
if (!hasResponse) {
|
||||
hasResponse = true;
|
||||
return callback(err);
|
||||
}
|
||||
});
|
||||
|
||||
req.on('close', function () {
|
||||
if (!hasResponse) {
|
||||
hasResponse = true;
|
||||
return callback(new Error('Connection closed while downloading tarball file'));
|
||||
}
|
||||
});
|
||||
|
||||
req.on('response', function(res) {
|
||||
// ignore redirects, needle handles these automatically.
|
||||
if (http_get.type === 'needle' && res.headers.hasOwnProperty('location') && res.headers.location !== '') {
|
||||
return;
|
||||
}
|
||||
if (hasResponse) {
|
||||
return;
|
||||
}
|
||||
hasResponse = true;
|
||||
if (res.statusCode !== 200) {
|
||||
badDownload = true;
|
||||
var err = new Error(res.statusCode + ' status code downloading tarball ' + from);
|
||||
err.statusCode = res.statusCode;
|
||||
return callback(err);
|
||||
}
|
||||
// start unzipping and untaring
|
||||
req.pipe(extract(to, afterExtract));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function extract_from_local(from, to, callback) {
|
||||
if (!fs.existsSync(from)) {
|
||||
return callback(new Error('Cannot find file ' + from));
|
||||
}
|
||||
log.info('Found local file to extract from ' + from);
|
||||
function afterExtract(err, extractCount) {
|
||||
if (err) return callback(err);
|
||||
if (extractCount === 0) {
|
||||
return callback(new Error('There was a fatal problem while extracting the tarball'));
|
||||
}
|
||||
log.info('tarball', 'done parsing tarball');
|
||||
callback();
|
||||
}
|
||||
fs.createReadStream(from).pipe(extract(to, afterExtract));
|
||||
}
|
||||
|
||||
function extract(to, callback) {
|
||||
var extractCount = 0;
|
||||
function filter_func(entry) {
|
||||
log.info('install','unpacking ' + entry.path);
|
||||
extractCount++;
|
||||
}
|
||||
|
||||
function afterTarball(err) {
|
||||
callback(err, extractCount);
|
||||
}
|
||||
|
||||
var tar = require('tar');
|
||||
return tar.extract({
|
||||
cwd: to,
|
||||
strip: 1,
|
||||
onentry: filter_func
|
||||
}).on('close', afterTarball).on('error', callback);
|
||||
}
|
||||
|
||||
|
||||
function do_build(gyp,argv,callback) {
|
||||
var args = ['rebuild'].concat(argv);
|
||||
gyp.todo.push( { name: 'build', args: args } );
|
||||
process.nextTick(callback);
|
||||
}
|
||||
|
||||
function print_fallback_error(err,opts,package_json) {
|
||||
var fallback_message = ' (falling back to source compile with node-gyp)';
|
||||
var full_message = '';
|
||||
if (err.statusCode !== undefined) {
|
||||
// If we got a network response it but failed to download
|
||||
// it means remote binaries are not available, so let's try to help
|
||||
// the user/developer with the info to debug why
|
||||
full_message = "Pre-built binaries not found for " + package_json.name + "@" + package_json.version;
|
||||
full_message += " and " + opts.runtime + "@" + (opts.target || process.versions.node) + " (" + opts.node_abi + " ABI, " + opts.libc + ")";
|
||||
full_message += fallback_message;
|
||||
log.warn("Tried to download(" + err.statusCode + "): " + opts.hosted_tarball);
|
||||
log.warn(full_message);
|
||||
log.http(err.message);
|
||||
} else {
|
||||
// If we do not have a statusCode that means an unexpected error
|
||||
// happened and prevented an http response, so we output the exact error
|
||||
full_message = "Pre-built binaries not installable for " + package_json.name + "@" + package_json.version;
|
||||
full_message += " and " + opts.runtime + "@" + (opts.target || process.versions.node) + " (" + opts.node_abi + " ABI, " + opts.libc + ")";
|
||||
full_message += fallback_message;
|
||||
log.warn(full_message);
|
||||
log.warn("Hit error " + err.message);
|
||||
}
|
||||
}
|
||||
|
||||
function install(gyp, argv, callback) {
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source;
|
||||
var update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary;
|
||||
var should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true');
|
||||
if (should_do_source_build) {
|
||||
log.info('build','requesting source compile');
|
||||
return do_build(gyp,argv,callback);
|
||||
} else {
|
||||
var fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build;
|
||||
var should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true');
|
||||
// but allow override from npm
|
||||
if (process.env.npm_config_argv) {
|
||||
var cooked = JSON.parse(process.env.npm_config_argv).cooked;
|
||||
var match = cooked.indexOf("--fallback-to-build");
|
||||
if (match > -1 && cooked.length > match && cooked[match+1] == "false") {
|
||||
should_do_fallback_build = false;
|
||||
log.info('install','Build fallback disabled via npm flag: --fallback-to-build=false');
|
||||
}
|
||||
}
|
||||
var opts;
|
||||
try {
|
||||
opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
opts.ca = gyp.opts.ca;
|
||||
opts.cafile = gyp.opts.cafile;
|
||||
|
||||
var from = opts.hosted_tarball;
|
||||
var to = opts.module_path;
|
||||
var binary_module = path.join(to,opts.module_name + '.node');
|
||||
existsAsync(binary_module,function(found) {
|
||||
if (found && !update_binary) {
|
||||
console.log('['+package_json.name+'] Success: "' + binary_module + '" already installed');
|
||||
console.log('Pass --update-binary to reinstall or --build-from-source to recompile');
|
||||
return callback();
|
||||
} else {
|
||||
if (!update_binary) log.info('check','checked for "' + binary_module + '" (not found)');
|
||||
mkdirp(to,function(err) {
|
||||
if (err) {
|
||||
after_place(err);
|
||||
} else {
|
||||
var fileName = from.startsWith('file://') && from.replace(/^file:\/\//, '');
|
||||
if (fileName) {
|
||||
extract_from_local(fileName, to, after_place);
|
||||
} else {
|
||||
place_binary(from,to,opts,after_place);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function after_place(err) {
|
||||
if (err && should_do_fallback_build) {
|
||||
print_fallback_error(err,opts,package_json);
|
||||
return do_build(gyp,argv,callback);
|
||||
} else if (err) {
|
||||
return callback(err);
|
||||
} else {
|
||||
console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed via remote');
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
203
node_modules/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
Normal file
203
node_modules/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
Normal file
@ -0,0 +1,203 @@
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = exports;
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var nopt = require('nopt');
|
||||
var log = require('npmlog');
|
||||
log.disableProgress();
|
||||
var napi = require('./util/napi.js');
|
||||
|
||||
var EE = require('events').EventEmitter;
|
||||
var inherits = require('util').inherits;
|
||||
var commands = [
|
||||
'clean',
|
||||
'install',
|
||||
'reinstall',
|
||||
'build',
|
||||
'rebuild',
|
||||
'package',
|
||||
'testpackage',
|
||||
'publish',
|
||||
'unpublish',
|
||||
'info',
|
||||
'testbinary',
|
||||
'reveal',
|
||||
'configure'
|
||||
];
|
||||
var aliases = {};
|
||||
|
||||
// differentiate node-pre-gyp's logs from npm's
|
||||
log.heading = 'node-pre-gyp';
|
||||
|
||||
exports.find = require('./pre-binding').find;
|
||||
|
||||
function Run() {
|
||||
var self = this;
|
||||
|
||||
this.commands = {};
|
||||
|
||||
commands.forEach(function (command) {
|
||||
self.commands[command] = function (argv, callback) {
|
||||
log.verbose('command', command, argv);
|
||||
return require('./' + command)(self, argv, callback);
|
||||
};
|
||||
});
|
||||
}
|
||||
inherits(Run, EE);
|
||||
exports.Run = Run;
|
||||
var proto = Run.prototype;
|
||||
|
||||
/**
|
||||
* Export the contents of the package.json.
|
||||
*/
|
||||
|
||||
proto.package = require('../package.json');
|
||||
|
||||
/**
|
||||
* nopt configuration definitions
|
||||
*/
|
||||
|
||||
proto.configDefs = {
|
||||
help: Boolean, // everywhere
|
||||
arch: String, // 'configure'
|
||||
debug: Boolean, // 'build'
|
||||
directory: String, // bin
|
||||
proxy: String, // 'install'
|
||||
loglevel: String, // everywhere
|
||||
};
|
||||
|
||||
/**
|
||||
* nopt shorthands
|
||||
*/
|
||||
|
||||
proto.shorthands = {
|
||||
release: '--no-debug',
|
||||
C: '--directory',
|
||||
debug: '--debug',
|
||||
j: '--jobs',
|
||||
silent: '--loglevel=silent',
|
||||
silly: '--loglevel=silly',
|
||||
verbose: '--loglevel=verbose',
|
||||
};
|
||||
|
||||
/**
|
||||
* expose the command aliases for the bin file to use.
|
||||
*/
|
||||
|
||||
proto.aliases = aliases;
|
||||
|
||||
/**
|
||||
* Parses the given argv array and sets the 'opts',
|
||||
* 'argv' and 'command' properties.
|
||||
*/
|
||||
|
||||
proto.parseArgv = function parseOpts (argv) {
|
||||
this.opts = nopt(this.configDefs, this.shorthands, argv);
|
||||
this.argv = this.opts.argv.remain.slice();
|
||||
var commands = this.todo = [];
|
||||
|
||||
// create a copy of the argv array with aliases mapped
|
||||
argv = this.argv.map(function (arg) {
|
||||
// is this an alias?
|
||||
if (arg in this.aliases) {
|
||||
arg = this.aliases[arg];
|
||||
}
|
||||
return arg;
|
||||
}, this);
|
||||
|
||||
// process the mapped args into "command" objects ("name" and "args" props)
|
||||
argv.slice().forEach(function (arg) {
|
||||
if (arg in this.commands) {
|
||||
var args = argv.splice(0, argv.indexOf(arg));
|
||||
argv.shift();
|
||||
if (commands.length > 0) {
|
||||
commands[commands.length - 1].args = args;
|
||||
}
|
||||
commands.push({ name: arg, args: [] });
|
||||
}
|
||||
}, this);
|
||||
if (commands.length > 0) {
|
||||
commands[commands.length - 1].args = argv.splice(0);
|
||||
}
|
||||
|
||||
// expand commands entries for multiple napi builds
|
||||
var dir = this.opts.directory;
|
||||
if (dir == null) dir = process.cwd();
|
||||
var package_json = JSON.parse(fs.readFileSync(path.join(dir,'package.json')));
|
||||
|
||||
this.todo = napi.expand_commands (package_json, this.opts, commands);
|
||||
|
||||
// support for inheriting config env variables from npm
|
||||
var npm_config_prefix = 'npm_config_';
|
||||
Object.keys(process.env).forEach(function (name) {
|
||||
if (name.indexOf(npm_config_prefix) !== 0) return;
|
||||
var val = process.env[name];
|
||||
if (name === npm_config_prefix + 'loglevel') {
|
||||
log.level = val;
|
||||
} else {
|
||||
// add the user-defined options to the config
|
||||
name = name.substring(npm_config_prefix.length);
|
||||
// avoid npm argv clobber already present args
|
||||
// which avoids problem of 'npm test' calling
|
||||
// script that runs unique npm install commands
|
||||
if (name === 'argv') {
|
||||
if (this.opts.argv &&
|
||||
this.opts.argv.remain &&
|
||||
this.opts.argv.remain.length) {
|
||||
// do nothing
|
||||
} else {
|
||||
this.opts[name] = val;
|
||||
}
|
||||
} else {
|
||||
this.opts[name] = val;
|
||||
}
|
||||
}
|
||||
}, this);
|
||||
|
||||
if (this.opts.loglevel) {
|
||||
log.level = this.opts.loglevel;
|
||||
}
|
||||
log.resume();
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the usage instructions for node-pre-gyp.
|
||||
*/
|
||||
|
||||
proto.usage = function usage () {
|
||||
var str = [
|
||||
'',
|
||||
' Usage: node-pre-gyp <command> [options]',
|
||||
'',
|
||||
' where <command> is one of:',
|
||||
commands.map(function (c) {
|
||||
return ' - ' + c + ' - ' + require('./' + c).usage;
|
||||
}).join('\n'),
|
||||
'',
|
||||
'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'),
|
||||
'node@' + process.versions.node
|
||||
].join('\n');
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Version number getter.
|
||||
*/
|
||||
|
||||
Object.defineProperty(proto, 'version', {
|
||||
get: function () {
|
||||
return this.package.version;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
||||
|
56
node_modules/node-pre-gyp/lib/package.js
generated
vendored
Normal file
56
node_modules/node-pre-gyp/lib/package.js
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = _package;
|
||||
|
||||
exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var log = require('npmlog');
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
var write = require('fs').createWriteStream;
|
||||
var existsAsync = fs.exists || path.exists;
|
||||
var mkdirp = require('mkdirp');
|
||||
var tar = require('tar');
|
||||
|
||||
function _package(gyp, argv, callback) {
|
||||
var packlist = require('npm-packlist');
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
var from = opts.module_path;
|
||||
var binary_module = path.join(from,opts.module_name + '.node');
|
||||
existsAsync(binary_module,function(found) {
|
||||
if (!found) {
|
||||
return callback(new Error("Cannot package because " + binary_module + " missing: run `node-pre-gyp rebuild` first"));
|
||||
}
|
||||
var tarball = opts.staged_tarball;
|
||||
var filter_func = function(entry) {
|
||||
// ensure directories are +x
|
||||
// https://github.com/mapnik/node-mapnik/issues/262
|
||||
log.info('package','packing ' + entry.path);
|
||||
return true;
|
||||
};
|
||||
mkdirp(path.dirname(tarball),function(err) {
|
||||
if (err) return callback(err);
|
||||
packlist({ path: from }).then(function(files) {
|
||||
var base = path.basename(from);
|
||||
files = files.map(function(file) {
|
||||
return path.join(base, file);
|
||||
});
|
||||
tar.create({
|
||||
portable: true,
|
||||
gzip: true,
|
||||
onentry: filter_func,
|
||||
file: tarball,
|
||||
cwd: path.dirname(from)
|
||||
}, files, function(err) {
|
||||
if (err) console.error('['+package_json.name+'] ' + err.message);
|
||||
else log.info('package','Binary staged at "' + tarball + '"');
|
||||
return callback(err);
|
||||
});
|
||||
}, callback);
|
||||
});
|
||||
});
|
||||
}
|
30
node_modules/node-pre-gyp/lib/pre-binding.js
generated
vendored
Normal file
30
node_modules/node-pre-gyp/lib/pre-binding.js
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
|
||||
var versioning = require('../lib/util/versioning.js');
|
||||
var napi = require('../lib/util/napi.js');
|
||||
var existsSync = require('fs').existsSync || require('path').existsSync;
|
||||
var path = require('path');
|
||||
|
||||
module.exports = exports;
|
||||
|
||||
exports.usage = 'Finds the require path for the node-pre-gyp installed module';
|
||||
|
||||
exports.validate = function(package_json,opts) {
|
||||
versioning.validate_config(package_json,opts);
|
||||
};
|
||||
|
||||
exports.find = function(package_json_path,opts) {
|
||||
if (!existsSync(package_json_path)) {
|
||||
throw new Error("package.json does not exist at " + package_json_path);
|
||||
}
|
||||
var package_json = require(package_json_path);
|
||||
versioning.validate_config(package_json,opts);
|
||||
var napi_build_version;
|
||||
if (napi.get_napi_build_versions (package_json, opts)) {
|
||||
napi_build_version = napi.get_best_napi_build_version(package_json, opts);
|
||||
}
|
||||
opts = opts || {};
|
||||
if (!opts.module_root) opts.module_root = path.dirname(package_json_path);
|
||||
var meta = versioning.evaluate(package_json,opts,napi_build_version);
|
||||
return meta.module;
|
||||
};
|
79
node_modules/node-pre-gyp/lib/publish.js
generated
vendored
Normal file
79
node_modules/node-pre-gyp/lib/publish.js
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = publish;
|
||||
|
||||
exports.usage = 'Publishes pre-built binary (requires aws-sdk)';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var log = require('npmlog');
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
var s3_setup = require('./util/s3_setup.js');
|
||||
var existsAsync = fs.exists || path.exists;
|
||||
var url = require('url');
|
||||
var config = require('rc')("node_pre_gyp",{acl:"public-read"});
|
||||
|
||||
function publish(gyp, argv, callback) {
|
||||
var AWS = require("aws-sdk");
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
var tarball = opts.staged_tarball;
|
||||
existsAsync(tarball,function(found) {
|
||||
if (!found) {
|
||||
return callback(new Error("Cannot publish because " + tarball + " missing: run `node-pre-gyp package` first"));
|
||||
}
|
||||
log.info('publish', 'Detecting s3 credentials');
|
||||
s3_setup.detect(opts.hosted_path,config);
|
||||
var key_name = url.resolve(config.prefix,opts.package_name);
|
||||
log.info('publish', 'Authenticating with s3');
|
||||
AWS.config.update(config);
|
||||
var s3 = new AWS.S3();
|
||||
var s3_opts = { Bucket: config.bucket,
|
||||
Key: key_name
|
||||
};
|
||||
var remote_package = 'https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key;
|
||||
log.info('publish', 'Checking for existing binary at ' + remote_package);
|
||||
s3.headObject(s3_opts, function(err, meta){
|
||||
if (meta) log.info('publish', JSON.stringify(meta));
|
||||
if (err && err.code == 'NotFound') {
|
||||
// we are safe to publish because
|
||||
// the object does not already exist
|
||||
log.info('publish', 'Preparing to put object');
|
||||
var s3_put = new AWS.S3();
|
||||
var s3_put_opts = { ACL: config.acl,
|
||||
Body: fs.createReadStream(tarball),
|
||||
Bucket: config.bucket,
|
||||
Key: key_name
|
||||
};
|
||||
log.info('publish', 'Putting object');
|
||||
try {
|
||||
s3_put.putObject(s3_put_opts, function(err, resp){
|
||||
log.info('publish', 'returned from putting object');
|
||||
if(err) {
|
||||
log.info('publish', 's3 putObject error: "' + err + '"');
|
||||
return callback(err);
|
||||
}
|
||||
if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"');
|
||||
log.info('publish', 'successfully put object');
|
||||
console.log('['+package_json.name+'] published to ' + remote_package);
|
||||
return callback();
|
||||
});
|
||||
} catch (err) {
|
||||
log.info('publish', 's3 putObject error: "' + err + '"');
|
||||
return callback(err);
|
||||
}
|
||||
} else if (err) {
|
||||
log.info('publish', 's3 headObject error: "' + err + '"');
|
||||
return callback(err);
|
||||
} else {
|
||||
log.error('publish','Cannot publish over existing version');
|
||||
log.error('publish',"Update the 'version' field in package.json and try again");
|
||||
log.error('publish','If the previous version was published in error see:');
|
||||
log.error('publish','\t node-pre-gyp unpublish');
|
||||
return callback(new Error('Failed publishing to ' + remote_package));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
21
node_modules/node-pre-gyp/lib/rebuild.js
generated
vendored
Normal file
21
node_modules/node-pre-gyp/lib/rebuild.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = rebuild;
|
||||
|
||||
exports.usage = 'Runs "clean" and "build" at once';
|
||||
|
||||
var fs = require('fs');
|
||||
var napi = require('./util/napi.js');
|
||||
|
||||
function rebuild (gyp, argv, callback) {
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var commands = [
|
||||
{ name: 'clean', args: [] },
|
||||
{ name: 'build', args: ['rebuild'] }
|
||||
];
|
||||
commands = napi.expand_commands(package_json, gyp.opts, commands);
|
||||
for (var i = commands.length; i !== 0; i--) {
|
||||
gyp.todo.unshift(commands[i-1]);
|
||||
}
|
||||
process.nextTick(callback);
|
||||
}
|
20
node_modules/node-pre-gyp/lib/reinstall.js
generated
vendored
Normal file
20
node_modules/node-pre-gyp/lib/reinstall.js
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = rebuild;
|
||||
|
||||
exports.usage = 'Runs "clean" and "install" at once';
|
||||
|
||||
var fs = require('fs');
|
||||
var napi = require('./util/napi.js');
|
||||
|
||||
function rebuild (gyp, argv, callback) {
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var installArgs = [];
|
||||
var napi_build_version = napi.get_best_napi_build_version(package_json, gyp.opts);
|
||||
if (napi_build_version != null) installArgs = [ napi.get_command_arg (napi_build_version) ];
|
||||
gyp.todo.unshift(
|
||||
{ name: 'clean', args: [] },
|
||||
{ name: 'install', args: installArgs }
|
||||
);
|
||||
process.nextTick(callback);
|
||||
}
|
33
node_modules/node-pre-gyp/lib/reveal.js
generated
vendored
Normal file
33
node_modules/node-pre-gyp/lib/reveal.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = reveal;
|
||||
|
||||
exports.usage = 'Reveals data on the versioned binary';
|
||||
|
||||
var fs = require('fs');
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
|
||||
function unix_paths(key, val) {
|
||||
return val && val.replace ? val.replace(/\\/g, '/') : val;
|
||||
}
|
||||
|
||||
function reveal(gyp, argv, callback) {
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
var hit = false;
|
||||
// if a second arg is passed look to see
|
||||
// if it is a known option
|
||||
//console.log(JSON.stringify(gyp.opts,null,1))
|
||||
var remain = gyp.opts.argv.remain[gyp.opts.argv.remain.length-1];
|
||||
if (remain && opts.hasOwnProperty(remain)) {
|
||||
console.log(opts[remain].replace(/\\/g, '/'));
|
||||
hit = true;
|
||||
}
|
||||
// otherwise return all options as json
|
||||
if (!hit) {
|
||||
console.log(JSON.stringify(opts,unix_paths,2));
|
||||
}
|
||||
return callback();
|
||||
}
|
81
node_modules/node-pre-gyp/lib/testbinary.js
generated
vendored
Normal file
81
node_modules/node-pre-gyp/lib/testbinary.js
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = testbinary;
|
||||
|
||||
exports.usage = 'Tests that the binary.node can be required';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var log = require('npmlog');
|
||||
var cp = require('child_process');
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
var path = require('path');
|
||||
|
||||
function testbinary(gyp, argv, callback) {
|
||||
var args = [];
|
||||
var options = {};
|
||||
var shell_cmd = process.execPath;
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
// skip validation for runtimes we don't explicitly support (like electron)
|
||||
if (opts.runtime &&
|
||||
opts.runtime !== 'node-webkit' &&
|
||||
opts.runtime !== 'node') {
|
||||
return callback();
|
||||
}
|
||||
var nw = (opts.runtime && opts.runtime === 'node-webkit');
|
||||
// ensure on windows that / are used for require path
|
||||
var binary_module = opts.module.replace(/\\/g, '/');
|
||||
if ((process.arch != opts.target_arch) ||
|
||||
(process.platform != opts.target_platform)) {
|
||||
var msg = "skipping validation since host platform/arch (";
|
||||
msg += process.platform+'/'+process.arch+")";
|
||||
msg += " does not match target (";
|
||||
msg += opts.target_platform+'/'+opts.target_arch+")";
|
||||
log.info('validate', msg);
|
||||
return callback();
|
||||
}
|
||||
if (nw) {
|
||||
options.timeout = 5000;
|
||||
if (process.platform === 'darwin') {
|
||||
shell_cmd = 'node-webkit';
|
||||
} else if (process.platform === 'win32') {
|
||||
shell_cmd = 'nw.exe';
|
||||
} else {
|
||||
shell_cmd = 'nw';
|
||||
}
|
||||
var modulePath = path.resolve(binary_module);
|
||||
var appDir = path.join(__dirname, 'util', 'nw-pre-gyp');
|
||||
args.push(appDir);
|
||||
args.push(modulePath);
|
||||
log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
|
||||
cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) {
|
||||
// check for normal timeout for node-webkit
|
||||
if (err) {
|
||||
if (err.killed === true && err.signal && err.signal.indexOf('SIG') > -1) {
|
||||
return callback();
|
||||
}
|
||||
var stderrLog = stderr.toString();
|
||||
log.info('stderr', stderrLog);
|
||||
if( /^\s*Xlib:\s*extension\s*"RANDR"\s*missing\s*on\s*display\s*":\d+\.\d+"\.\s*$/.test(stderrLog) ){
|
||||
log.info('RANDR', 'stderr contains only RANDR error, ignored');
|
||||
return callback();
|
||||
}
|
||||
return callback(err);
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
return;
|
||||
}
|
||||
args.push('--eval');
|
||||
args.push("require('" + binary_module.replace(/'/g, '\'') +"')");
|
||||
log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
|
||||
cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) {
|
||||
if (err) {
|
||||
return callback(err, { stdout:stdout, stderr:stderr});
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
}
|
55
node_modules/node-pre-gyp/lib/testpackage.js
generated
vendored
Normal file
55
node_modules/node-pre-gyp/lib/testpackage.js
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = testpackage;
|
||||
|
||||
exports.usage = 'Tests that the staged package is valid';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var log = require('npmlog');
|
||||
var existsAsync = fs.exists || path.exists;
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
var testbinary = require('./testbinary.js');
|
||||
var tar = require('tar');
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
function testpackage(gyp, argv, callback) {
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
var tarball = opts.staged_tarball;
|
||||
existsAsync(tarball, function(found) {
|
||||
if (!found) {
|
||||
return callback(new Error("Cannot test package because " + tarball + " missing: run `node-pre-gyp package` first"));
|
||||
}
|
||||
var to = opts.module_path;
|
||||
function filter_func(entry) {
|
||||
log.info('install','unpacking [' + entry.path + ']');
|
||||
}
|
||||
|
||||
mkdirp(to, function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
} else {
|
||||
tar.extract({
|
||||
file: tarball,
|
||||
cwd: to,
|
||||
strip: 1,
|
||||
onentry: filter_func
|
||||
}).then(after_extract, callback);
|
||||
}
|
||||
});
|
||||
|
||||
function after_extract() {
|
||||
testbinary(gyp,argv,function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
} else {
|
||||
console.log('['+package_json.name+'] Package appears valid');
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
43
node_modules/node-pre-gyp/lib/unpublish.js
generated
vendored
Normal file
43
node_modules/node-pre-gyp/lib/unpublish.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = unpublish;
|
||||
|
||||
exports.usage = 'Unpublishes pre-built binary (requires aws-sdk)';
|
||||
|
||||
var fs = require('fs');
|
||||
var log = require('npmlog');
|
||||
var versioning = require('./util/versioning.js');
|
||||
var napi = require('./util/napi.js');
|
||||
var s3_setup = require('./util/s3_setup.js');
|
||||
var url = require('url');
|
||||
var config = require('rc')("node_pre_gyp",{acl:"public-read"});
|
||||
|
||||
function unpublish(gyp, argv, callback) {
|
||||
var AWS = require("aws-sdk");
|
||||
var package_json = JSON.parse(fs.readFileSync('./package.json'));
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
s3_setup.detect(opts.hosted_path,config);
|
||||
AWS.config.update(config);
|
||||
var key_name = url.resolve(config.prefix,opts.package_name);
|
||||
var s3 = new AWS.S3();
|
||||
var s3_opts = { Bucket: config.bucket,
|
||||
Key: key_name
|
||||
};
|
||||
s3.headObject(s3_opts, function(err, meta) {
|
||||
if (err && err.code == 'NotFound') {
|
||||
console.log('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
|
||||
return callback();
|
||||
} else if(err) {
|
||||
return callback(err);
|
||||
} else {
|
||||
log.info('unpublish', JSON.stringify(meta));
|
||||
s3.deleteObject(s3_opts, function(err, resp) {
|
||||
if (err) return callback(err);
|
||||
log.info(JSON.stringify(resp));
|
||||
console.log('['+package_json.name+'] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
2070
node_modules/node-pre-gyp/lib/util/abi_crosswalk.json
generated
vendored
Normal file
2070
node_modules/node-pre-gyp/lib/util/abi_crosswalk.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
87
node_modules/node-pre-gyp/lib/util/compile.js
generated
vendored
Normal file
87
node_modules/node-pre-gyp/lib/util/compile.js
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports;
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var win = process.platform == 'win32';
|
||||
var existsSync = fs.existsSync || path.existsSync;
|
||||
var cp = require('child_process');
|
||||
|
||||
// try to build up the complete path to node-gyp
|
||||
/* priority:
|
||||
- node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887)
|
||||
- node-gyp on NODE_PATH
|
||||
- node-gyp inside npm on NODE_PATH (ignore on iojs)
|
||||
- node-gyp inside npm beside node exe
|
||||
*/
|
||||
function which_node_gyp() {
|
||||
var node_gyp_bin;
|
||||
if (process.env.npm_config_node_gyp) {
|
||||
try {
|
||||
node_gyp_bin = process.env.npm_config_node_gyp;
|
||||
if (existsSync(node_gyp_bin)) {
|
||||
return node_gyp_bin;
|
||||
}
|
||||
} catch (err) { }
|
||||
}
|
||||
try {
|
||||
var node_gyp_main = require.resolve('node-gyp');
|
||||
node_gyp_bin = path.join(path.dirname(
|
||||
path.dirname(node_gyp_main)),
|
||||
'bin/node-gyp.js');
|
||||
if (existsSync(node_gyp_bin)) {
|
||||
return node_gyp_bin;
|
||||
}
|
||||
} catch (err) { }
|
||||
if (process.execPath.indexOf('iojs') === -1) {
|
||||
try {
|
||||
var npm_main = require.resolve('npm');
|
||||
node_gyp_bin = path.join(path.dirname(
|
||||
path.dirname(npm_main)),
|
||||
'node_modules/node-gyp/bin/node-gyp.js');
|
||||
if (existsSync(node_gyp_bin)) {
|
||||
return node_gyp_bin;
|
||||
}
|
||||
} catch (err) { }
|
||||
}
|
||||
var npm_base = path.join(path.dirname(
|
||||
path.dirname(process.execPath)),
|
||||
'lib/node_modules/npm/');
|
||||
node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js');
|
||||
if (existsSync(node_gyp_bin)) {
|
||||
return node_gyp_bin;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.run_gyp = function(args,opts,callback) {
|
||||
var shell_cmd = '';
|
||||
var cmd_args = [];
|
||||
if (opts.runtime && opts.runtime == 'node-webkit') {
|
||||
shell_cmd = 'nw-gyp';
|
||||
if (win) shell_cmd += '.cmd';
|
||||
} else {
|
||||
var node_gyp_path = which_node_gyp();
|
||||
if (node_gyp_path) {
|
||||
shell_cmd = process.execPath;
|
||||
cmd_args.push(node_gyp_path);
|
||||
} else {
|
||||
shell_cmd = 'node-gyp';
|
||||
if (win) shell_cmd += '.cmd';
|
||||
}
|
||||
}
|
||||
var final_args = cmd_args.concat(args);
|
||||
var cmd = cp.spawn(shell_cmd, final_args, {cwd: undefined, env: process.env, stdio: [ 0, 1, 2]});
|
||||
cmd.on('error', function (err) {
|
||||
if (err) {
|
||||
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + err + ")"));
|
||||
}
|
||||
callback(null,opts);
|
||||
});
|
||||
cmd.on('close', function (code) {
|
||||
if (code && code !== 0) {
|
||||
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + code + ")"));
|
||||
}
|
||||
callback(null,opts);
|
||||
});
|
||||
};
|
103
node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js
generated
vendored
Normal file
103
node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports = handle_gyp_opts;
|
||||
|
||||
var fs = require('fs');
|
||||
var versioning = require('./versioning.js');
|
||||
var napi = require('./napi.js');
|
||||
|
||||
/*
|
||||
|
||||
Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp.
|
||||
|
||||
We massage the args and options slightly to account for differences in what commands mean between
|
||||
node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below)
|
||||
|
||||
Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether
|
||||
node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm.
|
||||
|
||||
We also try to preserve any command line options that might have been passed to npm or node-pre-gyp.
|
||||
But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all
|
||||
the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have
|
||||
to be very selective about what we pass through.
|
||||
|
||||
For example:
|
||||
|
||||
`npm install --build-from-source` will give:
|
||||
|
||||
argv == [ 'rebuild' ]
|
||||
gyp.opts.argv == { remain: [ 'install' ],
|
||||
cooked: [ 'install', '--fallback-to-build' ],
|
||||
original: [ 'install', '--fallback-to-build' ] }
|
||||
|
||||
`./bin/node-pre-gyp build` will give:
|
||||
|
||||
argv == []
|
||||
gyp.opts.argv == { remain: [ 'build' ],
|
||||
cooked: [ 'build' ],
|
||||
original: [ '-C', 'test/app1', 'build' ] }
|
||||
|
||||
*/
|
||||
|
||||
// select set of node-pre-gyp versioning info
|
||||
// to share with node-gyp
|
||||
var share_with_node_gyp = [
|
||||
'module',
|
||||
'module_name',
|
||||
'module_path',
|
||||
'napi_version',
|
||||
'node_abi_napi',
|
||||
'napi_build_version',
|
||||
'node_napi_label'
|
||||
];
|
||||
|
||||
function handle_gyp_opts(gyp, argv, callback) {
|
||||
|
||||
// Collect node-pre-gyp specific variables to pass to node-gyp
|
||||
var node_pre_gyp_options = [];
|
||||
// generate custom node-pre-gyp versioning info
|
||||
var napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
var opts = versioning.evaluate(JSON.parse(fs.readFileSync('./package.json')), gyp.opts, napi_build_version);
|
||||
share_with_node_gyp.forEach(function(key) {
|
||||
var val = opts[key];
|
||||
if (val) {
|
||||
node_pre_gyp_options.push('--' + key + '=' + val);
|
||||
} else if (key === 'napi_build_version') {
|
||||
node_pre_gyp_options.push('--' + key + '=0');
|
||||
} else {
|
||||
if (key !== 'napi_version' && key !== 'node_abi_napi')
|
||||
return callback(new Error("Option " + key + " required but not found by node-pre-gyp"));
|
||||
}
|
||||
});
|
||||
|
||||
// Collect options that follow the special -- which disables nopt parsing
|
||||
var unparsed_options = [];
|
||||
var double_hyphen_found = false;
|
||||
gyp.opts.argv.original.forEach(function(opt) {
|
||||
if (double_hyphen_found) {
|
||||
unparsed_options.push(opt);
|
||||
}
|
||||
if (opt == '--') {
|
||||
double_hyphen_found = true;
|
||||
}
|
||||
});
|
||||
|
||||
// We try respect and pass through remaining command
|
||||
// line options (like --foo=bar) to node-gyp
|
||||
var cooked = gyp.opts.argv.cooked;
|
||||
var node_gyp_options = [];
|
||||
cooked.forEach(function(value) {
|
||||
if (value.length > 2 && value.slice(0,2) == '--') {
|
||||
var key = value.slice(2);
|
||||
var val = cooked[cooked.indexOf(value)+1];
|
||||
if (val && val.indexOf('--') === -1) { // handle '--foo=bar' or ['--foo','bar']
|
||||
node_gyp_options.push('--' + key + '=' + val);
|
||||
} else { // pass through --foo
|
||||
node_gyp_options.push(value);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
var result = {'opts':opts,'gyp':node_gyp_options,'pre':node_pre_gyp_options,'unparsed':unparsed_options};
|
||||
return callback(null,result);
|
||||
}
|
205
node_modules/node-pre-gyp/lib/util/napi.js
generated
vendored
Normal file
205
node_modules/node-pre-gyp/lib/util/napi.js
generated
vendored
Normal file
@ -0,0 +1,205 @@
|
||||
"use strict";
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
module.exports = exports;
|
||||
|
||||
var versionArray = process.version
|
||||
.substr(1)
|
||||
.replace(/-.*$/, '')
|
||||
.split('.')
|
||||
.map(function(item) {
|
||||
return +item;
|
||||
});
|
||||
|
||||
var napi_multiple_commands = [
|
||||
'build',
|
||||
'clean',
|
||||
'configure',
|
||||
'package',
|
||||
'publish',
|
||||
'reveal',
|
||||
'testbinary',
|
||||
'testpackage',
|
||||
'unpublish'
|
||||
];
|
||||
|
||||
var napi_build_version_tag = 'napi_build_version=';
|
||||
|
||||
module.exports.get_napi_version = function(target) { // target may be undefined
|
||||
// returns the non-zero numeric napi version or undefined if napi is not supported.
|
||||
// correctly supporting target requires an updated cross-walk
|
||||
var version = process.versions.napi; // can be undefined
|
||||
if (!version) { // this code should never need to be updated
|
||||
if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2; // 9.3.0+
|
||||
else if (versionArray[0] === 8) version = 1; // 8.0.0+
|
||||
}
|
||||
return version;
|
||||
};
|
||||
|
||||
module.exports.get_napi_version_as_string = function(target) {
|
||||
// returns the napi version as a string or an empty string if napi is not supported.
|
||||
var version = module.exports.get_napi_version(target);
|
||||
return version ? ''+version : '';
|
||||
};
|
||||
|
||||
module.exports.validate_package_json = function(package_json, opts) { // throws Error
|
||||
|
||||
var binary = package_json.binary;
|
||||
var module_path_ok = pathOK(binary.module_path);
|
||||
var remote_path_ok = pathOK(binary.remote_path);
|
||||
var package_name_ok = pathOK(binary.package_name);
|
||||
var napi_build_versions = module.exports.get_napi_build_versions(package_json,opts,true);
|
||||
var napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json);
|
||||
|
||||
if (napi_build_versions) {
|
||||
napi_build_versions.forEach(function(napi_build_version){
|
||||
if (!(parseInt(napi_build_version,10) === napi_build_version && napi_build_version > 0)) {
|
||||
throw new Error("All values specified in napi_versions must be positive integers.");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) {
|
||||
throw new Error("When napi_versions is specified; module_path and either remote_path or " +
|
||||
"package_name must contain the substitution string '{napi_build_version}`.");
|
||||
}
|
||||
|
||||
if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) {
|
||||
throw new Error("When the substitution string '{napi_build_version}` is specified in " +
|
||||
"module_path, remote_path, or package_name; napi_versions must also be specified.");
|
||||
}
|
||||
|
||||
if (napi_build_versions && !module.exports.get_best_napi_build_version(package_json, opts) &&
|
||||
module.exports.build_napi_only(package_json)) {
|
||||
throw new Error(
|
||||
'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' +
|
||||
'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' +
|
||||
'This Node instance cannot run this module.');
|
||||
}
|
||||
|
||||
if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) {
|
||||
throw new Error(
|
||||
'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' +
|
||||
'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' +
|
||||
'This Node instance cannot run this module.');
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
function pathOK (path) {
|
||||
return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1);
|
||||
}
|
||||
|
||||
module.exports.expand_commands = function(package_json, opts, commands) {
|
||||
var expanded_commands = [];
|
||||
var napi_build_versions = module.exports.get_napi_build_versions(package_json, opts);
|
||||
commands.forEach(function(command){
|
||||
if (napi_build_versions && command.name === 'install') {
|
||||
var napi_build_version = module.exports.get_best_napi_build_version(package_json, opts);
|
||||
var args = napi_build_version ? [ napi_build_version_tag+napi_build_version ] : [ ];
|
||||
expanded_commands.push ({ name: command.name, args: args });
|
||||
} else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) {
|
||||
napi_build_versions.forEach(function(napi_build_version){
|
||||
var args = command.args.slice();
|
||||
args.push (napi_build_version_tag+napi_build_version);
|
||||
expanded_commands.push ({ name: command.name, args: args });
|
||||
});
|
||||
} else {
|
||||
expanded_commands.push (command);
|
||||
}
|
||||
});
|
||||
return expanded_commands;
|
||||
};
|
||||
|
||||
module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined
|
||||
var log = require('npmlog');
|
||||
var napi_build_versions = [];
|
||||
var supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
|
||||
// remove duplicates, verify each napi version can actaully be built
|
||||
if (package_json.binary && package_json.binary.napi_versions) {
|
||||
package_json.binary.napi_versions.forEach(function(napi_version) {
|
||||
var duplicated = napi_build_versions.indexOf(napi_version) !== -1;
|
||||
if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) {
|
||||
napi_build_versions.push(napi_version);
|
||||
} else if (warnings && !duplicated && supported_napi_version) {
|
||||
log.info('This Node instance does not support builds for N-API version', napi_version);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (opts && opts["build-latest-napi-version-only"]) {
|
||||
var latest_version = 0;
|
||||
napi_build_versions.forEach(function(napi_version) {
|
||||
if (napi_version > latest_version) latest_version = napi_version;
|
||||
});
|
||||
napi_build_versions = latest_version ? [ latest_version ] : [];
|
||||
}
|
||||
return napi_build_versions.length ? napi_build_versions : undefined;
|
||||
};
|
||||
|
||||
module.exports.get_napi_build_versions_raw = function(package_json) {
|
||||
var napi_build_versions = [];
|
||||
// remove duplicates
|
||||
if (package_json.binary && package_json.binary.napi_versions) {
|
||||
package_json.binary.napi_versions.forEach(function(napi_version) {
|
||||
if (napi_build_versions.indexOf(napi_version) === -1) {
|
||||
napi_build_versions.push(napi_version);
|
||||
}
|
||||
});
|
||||
}
|
||||
return napi_build_versions.length ? napi_build_versions : undefined;
|
||||
};
|
||||
|
||||
module.exports.get_command_arg = function(napi_build_version) {
|
||||
return napi_build_version_tag + napi_build_version;
|
||||
};
|
||||
|
||||
module.exports.get_napi_build_version_from_command_args = function(command_args) {
|
||||
for (var i = 0; i < command_args.length; i++) {
|
||||
var arg = command_args[i];
|
||||
if (arg.indexOf(napi_build_version_tag) === 0) {
|
||||
return parseInt(arg.substr(napi_build_version_tag.length),10);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
module.exports.swap_build_dir_out = function(napi_build_version) {
|
||||
if (napi_build_version) {
|
||||
var rm = require('rimraf');
|
||||
rm.sync(module.exports.get_build_dir(napi_build_version));
|
||||
fs.renameSync('build', module.exports.get_build_dir(napi_build_version));
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.swap_build_dir_in = function(napi_build_version) {
|
||||
if (napi_build_version) {
|
||||
var rm = require('rimraf');
|
||||
rm.sync('build');
|
||||
fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.get_build_dir = function(napi_build_version) {
|
||||
return 'build-tmp-napi-v'+napi_build_version;
|
||||
};
|
||||
|
||||
module.exports.get_best_napi_build_version = function(package_json, opts) {
|
||||
var best_napi_build_version = 0;
|
||||
var napi_build_versions = module.exports.get_napi_build_versions (package_json, opts);
|
||||
if (napi_build_versions) {
|
||||
var our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
|
||||
napi_build_versions.forEach(function(napi_build_version){
|
||||
if (napi_build_version > best_napi_build_version &&
|
||||
napi_build_version <= our_napi_version) {
|
||||
best_napi_build_version = napi_build_version;
|
||||
}
|
||||
});
|
||||
}
|
||||
return best_napi_build_version === 0 ? undefined : best_napi_build_version;
|
||||
};
|
||||
|
||||
module.exports.build_napi_only = function(package_json) {
|
||||
return package_json.binary && package_json.binary.package_name &&
|
||||
package_json.binary.package_name.indexOf('{node_napi_label}') === -1;
|
||||
};
|
26
node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html
generated
vendored
Normal file
26
node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Node-webkit-based module test</title>
|
||||
<script>
|
||||
function nwModuleTest(){
|
||||
var util = require('util');
|
||||
var moduleFolder = require('nw.gui').App.argv[0];
|
||||
try {
|
||||
require(moduleFolder);
|
||||
} catch(e) {
|
||||
if( process.platform !== 'win32' ){
|
||||
util.log('nw-pre-gyp error:');
|
||||
util.log(e.stack);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="nwModuleTest()">
|
||||
<h1>Node-webkit-based module test</h1>
|
||||
</body>
|
||||
</html>
|
9
node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json
generated
vendored
Normal file
9
node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"main": "index.html",
|
||||
"name": "nw-pre-gyp-module-test",
|
||||
"description": "Node-webkit-based module test.",
|
||||
"version": "0.0.1",
|
||||
"window": {
|
||||
"show": false
|
||||
}
|
||||
}
|
27
node_modules/node-pre-gyp/lib/util/s3_setup.js
generated
vendored
Normal file
27
node_modules/node-pre-gyp/lib/util/s3_setup.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports;
|
||||
|
||||
var url = require('url');
|
||||
|
||||
var URI_REGEX="^(.*)\.(s3(?:-.*)?)\.amazonaws\.com$";
|
||||
|
||||
module.exports.detect = function(to,config) {
|
||||
var uri = url.parse(to);
|
||||
var hostname_matches = uri.hostname.match(URI_REGEX);
|
||||
config.prefix = (!uri.pathname || uri.pathname == '/') ? '' : uri.pathname.replace('/','');
|
||||
if(!hostname_matches) {
|
||||
return;
|
||||
}
|
||||
if (!config.bucket) {
|
||||
config.bucket = hostname_matches[1];
|
||||
}
|
||||
if (!config.region) {
|
||||
var s3_domain = hostname_matches[2];
|
||||
if (s3_domain.slice(0,3) == 's3-' &&
|
||||
s3_domain.length >= 3) {
|
||||
// it appears the region is explicit in the url
|
||||
config.region = s3_domain.replace('s3-','');
|
||||
}
|
||||
}
|
||||
};
|
331
node_modules/node-pre-gyp/lib/util/versioning.js
generated
vendored
Normal file
331
node_modules/node-pre-gyp/lib/util/versioning.js
generated
vendored
Normal file
@ -0,0 +1,331 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = exports;
|
||||
|
||||
var path = require('path');
|
||||
var semver = require('semver');
|
||||
var url = require('url');
|
||||
var detect_libc = require('detect-libc');
|
||||
var napi = require('./napi.js');
|
||||
|
||||
var abi_crosswalk;
|
||||
|
||||
// This is used for unit testing to provide a fake
|
||||
// ABI crosswalk that emulates one that is not updated
|
||||
// for the current version
|
||||
if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) {
|
||||
abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK);
|
||||
} else {
|
||||
abi_crosswalk = require('./abi_crosswalk.json');
|
||||
}
|
||||
|
||||
var major_versions = {};
|
||||
Object.keys(abi_crosswalk).forEach(function(v) {
|
||||
var major = v.split('.')[0];
|
||||
if (!major_versions[major]) {
|
||||
major_versions[major] = v;
|
||||
}
|
||||
});
|
||||
|
||||
function get_electron_abi(runtime, target_version) {
|
||||
if (!runtime) {
|
||||
throw new Error("get_electron_abi requires valid runtime arg");
|
||||
}
|
||||
if (typeof target_version === 'undefined') {
|
||||
// erroneous CLI call
|
||||
throw new Error("Empty target version is not supported if electron is the target.");
|
||||
}
|
||||
// Electron guarantees that patch version update won't break native modules.
|
||||
var sem_ver = semver.parse(target_version);
|
||||
return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor;
|
||||
}
|
||||
module.exports.get_electron_abi = get_electron_abi;
|
||||
|
||||
function get_node_webkit_abi(runtime, target_version) {
|
||||
if (!runtime) {
|
||||
throw new Error("get_node_webkit_abi requires valid runtime arg");
|
||||
}
|
||||
if (typeof target_version === 'undefined') {
|
||||
// erroneous CLI call
|
||||
throw new Error("Empty target version is not supported if node-webkit is the target.");
|
||||
}
|
||||
return runtime + '-v' + target_version;
|
||||
}
|
||||
module.exports.get_node_webkit_abi = get_node_webkit_abi;
|
||||
|
||||
function get_node_abi(runtime, versions) {
|
||||
if (!runtime) {
|
||||
throw new Error("get_node_abi requires valid runtime arg");
|
||||
}
|
||||
if (!versions) {
|
||||
throw new Error("get_node_abi requires valid process.versions object");
|
||||
}
|
||||
var sem_ver = semver.parse(versions.node);
|
||||
if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series
|
||||
// https://github.com/mapbox/node-pre-gyp/issues/124
|
||||
return runtime+'-v'+versions.node;
|
||||
} else {
|
||||
// process.versions.modules added in >= v0.10.4 and v0.11.7
|
||||
// https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e
|
||||
return versions.modules ? runtime+'-v' + (+versions.modules) :
|
||||
'v8-' + versions.v8.split('.').slice(0,2).join('.');
|
||||
}
|
||||
}
|
||||
module.exports.get_node_abi = get_node_abi;
|
||||
|
||||
function get_runtime_abi(runtime, target_version) {
|
||||
if (!runtime) {
|
||||
throw new Error("get_runtime_abi requires valid runtime arg");
|
||||
}
|
||||
if (runtime === 'node-webkit') {
|
||||
return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']);
|
||||
} else if (runtime === 'electron') {
|
||||
return get_electron_abi(runtime, target_version || process.versions.electron);
|
||||
} else {
|
||||
if (runtime != 'node') {
|
||||
throw new Error("Unknown Runtime: '" + runtime + "'");
|
||||
}
|
||||
if (!target_version) {
|
||||
return get_node_abi(runtime,process.versions);
|
||||
} else {
|
||||
var cross_obj;
|
||||
// abi_crosswalk generated with ./scripts/abi_crosswalk.js
|
||||
if (abi_crosswalk[target_version]) {
|
||||
cross_obj = abi_crosswalk[target_version];
|
||||
} else {
|
||||
var target_parts = target_version.split('.').map(function(i) { return +i; });
|
||||
if (target_parts.length != 3) { // parse failed
|
||||
throw new Error("Unknown target version: " + target_version);
|
||||
}
|
||||
/*
|
||||
The below code tries to infer the last known ABI compatible version
|
||||
that we have recorded in the abi_crosswalk.json when an exact match
|
||||
is not possible. The reasons for this to exist are complicated:
|
||||
|
||||
- We support passing --target to be able to allow developers to package binaries for versions of node
|
||||
that are not the same one as they are running. This might also be used in combination with the
|
||||
--target_arch or --target_platform flags to also package binaries for alternative platforms
|
||||
- When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node
|
||||
version that is running in memory
|
||||
- So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look
|
||||
this info up for all versions
|
||||
- But we cannot easily predict what the future ABI will be for released versions
|
||||
- And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly
|
||||
by being fully available at install time.
|
||||
- So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release
|
||||
need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if
|
||||
you want the `--target` flag to keep working for the latest version
|
||||
- Which is impractical ^^
|
||||
- Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding.
|
||||
|
||||
In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that
|
||||
only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be
|
||||
ABI compatible with v0.10.33).
|
||||
|
||||
TODO: use semver module instead of custom version parsing
|
||||
*/
|
||||
var major = target_parts[0];
|
||||
var minor = target_parts[1];
|
||||
var patch = target_parts[2];
|
||||
// io.js: yeah if node.js ever releases 1.x this will break
|
||||
// but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616
|
||||
if (major === 1) {
|
||||
// look for last release that is the same major version
|
||||
// e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0
|
||||
while (true) {
|
||||
if (minor > 0) --minor;
|
||||
if (patch > 0) --patch;
|
||||
var new_iojs_target = '' + major + '.' + minor + '.' + patch;
|
||||
if (abi_crosswalk[new_iojs_target]) {
|
||||
cross_obj = abi_crosswalk[new_iojs_target];
|
||||
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
|
||||
console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target');
|
||||
break;
|
||||
}
|
||||
if (minor === 0 && patch === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (major >= 2) {
|
||||
// look for last release that is the same major version
|
||||
if (major_versions[major]) {
|
||||
cross_obj = abi_crosswalk[major_versions[major]];
|
||||
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
|
||||
console.log('Warning: but node-pre-gyp successfully choose ' + major_versions[major] + ' as ABI compatible target');
|
||||
}
|
||||
} else if (major === 0) { // node.js
|
||||
if (target_parts[1] % 2 === 0) { // for stable/even node.js series
|
||||
// look for the last release that is the same minor release
|
||||
// e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0
|
||||
while (--patch > 0) {
|
||||
var new_node_target = '' + major + '.' + minor + '.' + patch;
|
||||
if (abi_crosswalk[new_node_target]) {
|
||||
cross_obj = abi_crosswalk[new_node_target];
|
||||
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
|
||||
console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!cross_obj) {
|
||||
throw new Error("Unsupported target version: " + target_version);
|
||||
}
|
||||
// emulate process.versions
|
||||
var versions_obj = {
|
||||
node: target_version,
|
||||
v8: cross_obj.v8+'.0',
|
||||
// abi_crosswalk uses 1 for node versions lacking process.versions.modules
|
||||
// process.versions.modules added in >= v0.10.4 and v0.11.7
|
||||
modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined
|
||||
};
|
||||
return get_node_abi(runtime, versions_obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports.get_runtime_abi = get_runtime_abi;
|
||||
|
||||
var required_parameters = [
|
||||
'module_name',
|
||||
'module_path',
|
||||
'host'
|
||||
];
|
||||
|
||||
function validate_config(package_json,opts) {
|
||||
var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n';
|
||||
var missing = [];
|
||||
if (!package_json.main) {
|
||||
missing.push('main');
|
||||
}
|
||||
if (!package_json.version) {
|
||||
missing.push('version');
|
||||
}
|
||||
if (!package_json.name) {
|
||||
missing.push('name');
|
||||
}
|
||||
if (!package_json.binary) {
|
||||
missing.push('binary');
|
||||
}
|
||||
var o = package_json.binary;
|
||||
required_parameters.forEach(function(p) {
|
||||
if (missing.indexOf('binary') > -1) {
|
||||
missing.pop('binary');
|
||||
}
|
||||
if (!o || o[p] === undefined || o[p] === "") {
|
||||
missing.push('binary.' + p);
|
||||
}
|
||||
});
|
||||
if (missing.length >= 1) {
|
||||
throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n'));
|
||||
}
|
||||
if (o) {
|
||||
// enforce https over http
|
||||
var protocol = url.parse(o.host).protocol;
|
||||
if (protocol === 'http:') {
|
||||
throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted");
|
||||
}
|
||||
}
|
||||
napi.validate_package_json(package_json,opts);
|
||||
}
|
||||
|
||||
module.exports.validate_config = validate_config;
|
||||
|
||||
function eval_template(template,opts) {
|
||||
Object.keys(opts).forEach(function(key) {
|
||||
var pattern = '{'+key+'}';
|
||||
while (template.indexOf(pattern) > -1) {
|
||||
template = template.replace(pattern,opts[key]);
|
||||
}
|
||||
});
|
||||
return template;
|
||||
}
|
||||
|
||||
// url.resolve needs single trailing slash
|
||||
// to behave correctly, otherwise a double slash
|
||||
// may end up in the url which breaks requests
|
||||
// and a lacking slash may not lead to proper joining
|
||||
function fix_slashes(pathname) {
|
||||
if (pathname.slice(-1) != '/') {
|
||||
return pathname + '/';
|
||||
}
|
||||
return pathname;
|
||||
}
|
||||
|
||||
// remove double slashes
|
||||
// note: path.normalize will not work because
|
||||
// it will convert forward to back slashes
|
||||
function drop_double_slashes(pathname) {
|
||||
return pathname.replace(/\/\//g,'/');
|
||||
}
|
||||
|
||||
function get_process_runtime(versions) {
|
||||
var runtime = 'node';
|
||||
if (versions['node-webkit']) {
|
||||
runtime = 'node-webkit';
|
||||
} else if (versions.electron) {
|
||||
runtime = 'electron';
|
||||
}
|
||||
return runtime;
|
||||
}
|
||||
|
||||
module.exports.get_process_runtime = get_process_runtime;
|
||||
|
||||
var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||
var default_remote_path = '';
|
||||
|
||||
module.exports.evaluate = function(package_json,options,napi_build_version) {
|
||||
options = options || {};
|
||||
validate_config(package_json,options); // options is a suitable substitute for opts in this case
|
||||
var v = package_json.version;
|
||||
var module_version = semver.parse(v);
|
||||
var runtime = options.runtime || get_process_runtime(process.versions);
|
||||
var opts = {
|
||||
name: package_json.name,
|
||||
configuration: Boolean(options.debug) ? 'Debug' : 'Release',
|
||||
debug: options.debug,
|
||||
module_name: package_json.binary.module_name,
|
||||
version: module_version.version,
|
||||
prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '',
|
||||
build: module_version.build.length ? module_version.build.join('.') : '',
|
||||
major: module_version.major,
|
||||
minor: module_version.minor,
|
||||
patch: module_version.patch,
|
||||
runtime: runtime,
|
||||
node_abi: get_runtime_abi(runtime,options.target),
|
||||
node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime,options.target),
|
||||
napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported
|
||||
napi_build_version: napi_build_version || '',
|
||||
node_napi_label: napi_build_version ? 'napi-v' + napi_build_version : get_runtime_abi(runtime,options.target),
|
||||
target: options.target || '',
|
||||
platform: options.target_platform || process.platform,
|
||||
target_platform: options.target_platform || process.platform,
|
||||
arch: options.target_arch || process.arch,
|
||||
target_arch: options.target_arch || process.arch,
|
||||
libc: options.target_libc || detect_libc.family || 'unknown',
|
||||
module_main: package_json.main,
|
||||
toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119
|
||||
};
|
||||
// support host mirror with npm config `--{module_name}_binary_host_mirror`
|
||||
// e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25
|
||||
// > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/
|
||||
var host = process.env['npm_config_' + opts.module_name + '_binary_host_mirror'] || package_json.binary.host;
|
||||
opts.host = fix_slashes(eval_template(host,opts));
|
||||
opts.module_path = eval_template(package_json.binary.module_path,opts);
|
||||
// now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably
|
||||
if (options.module_root) {
|
||||
// resolve relative to known module root: works for pre-binding require
|
||||
opts.module_path = path.join(options.module_root,opts.module_path);
|
||||
} else {
|
||||
// resolve relative to current working directory: works for node-pre-gyp commands
|
||||
opts.module_path = path.resolve(opts.module_path);
|
||||
}
|
||||
opts.module = path.join(opts.module_path,opts.module_name + '.node');
|
||||
opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path;
|
||||
var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name;
|
||||
opts.package_name = eval_template(package_name,opts);
|
||||
opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name);
|
||||
opts.hosted_path = url.resolve(opts.host,opts.remote_path);
|
||||
opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name);
|
||||
return opts;
|
||||
};
|
Reference in New Issue
Block a user