1
0
mirror of https://github.com/musix-org/musix-oss synced 2025-07-01 20:13:38 +00:00
This commit is contained in:
MatteZ02
2020-03-03 22:30:50 +02:00
parent edfcc6f474
commit 30022c7634
11800 changed files with 1984416 additions and 1 deletions

1074
node_modules/grpc/src/client.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1444
node_modules/grpc/src/client_interceptors.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

356
node_modules/grpc/src/common.js generated vendored Normal file
View File

@ -0,0 +1,356 @@
/**
* @license
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
'use strict';
var constants = require('./constants');
/**
* Wrap a function to pass null-like values through without calling it. If no
* function is given, just uses the identity.
* @private
* @param {?function} func The function to wrap
* @return {function} The wrapped function
*/
exports.wrapIgnoreNull = function wrapIgnoreNull(func) {
if (!func) {
return x => x;
}
return function(arg) {
if (arg === null || arg === undefined) {
return null;
}
return func(arg);
};
};
/**
* The logger object for the gRPC module. Defaults to console.
* @private
*/
exports.logger = console;
/**
* The current logging verbosity. 0 corresponds to logging everything
* @private
*/
exports.logVerbosity = 0;
/**
* Log a message if the severity is at least as high as the current verbosity
* @private
* @param {Number} severity A value of the grpc.logVerbosity map
* @param {String} message The message to log
*/
exports.log = function log(severity, message) {
if (severity >= exports.logVerbosity) {
exports.logger.error(message);
}
};
/**
* Default options for loading proto files into gRPC
* @alias grpc~defaultLoadOptions
*/
exports.defaultGrpcOptions = {
convertFieldsToCamelCase: false,
binaryAsBase64: false,
longsAsStrings: true,
enumsAsStrings: true,
deprecatedArgumentOrder: false
};
/**
* Create an Error object from a status object
* @param {grpc~StatusObject} status The status object
* @return {Error} The resulting Error
*/
exports.createStatusError = function(status) {
let inverted = Object.keys(constants.status)
.reduce((acc, key) => {
acc[constants.status[key]] = key;
return acc;
}, {});
let statusName = inverted[status.code];
let message = `${status.code} ${statusName}: ${status.details}`;
let error = new Error(message);
error.code = status.code;
error.metadata = status.metadata;
error.details = status.details;
return error;
};
/**
* Get a method's type from its definition
* @param {grpc~MethodDefinition} method_definition
* @return {number}
*/
exports.getMethodType = function(method_definition) {
if (method_definition.requestStream) {
if (method_definition.responseStream) {
return constants.methodTypes.BIDI_STREAMING;
} else {
return constants.methodTypes.CLIENT_STREAMING;
}
} else {
if (method_definition.responseStream) {
return constants.methodTypes.SERVER_STREAMING;
} else {
return constants.methodTypes.UNARY;
}
}
};
/**
* Iterate over a collection of items, and run the given handler.
* Return the results as a flattened array of values.
*
* @private
*
* @param {Array} collection Array of items to process
* @param {Function} handler The function to call on each element in the array
* @return {Array} A flattened array of results.
*/
exports.flatMap = function(collection, handler) {
const mapped = collection.map(handler);
return mapped.reduce((acc, curr) => acc.concat(curr), []);
}
/**
* Given an array of property names and an array of values,
* combine the two into an object map.
* Equivalent to _.zipObject.
*
* @private
*
* @param props {Array<String>} Array of property names
* @param values {Array} Array of property values
* @return {Object} An object with the combined values
*/
exports.zipObject = function(props, values) {
return props.reduce((acc, curr, idx) => {
return Object.assign(acc, { [curr]: values[idx] });
}, {});
}
// JSDoc definitions that are used in multiple other modules
/**
* Represents the status of a completed request. If `code` is
* {@link grpc.status}.OK, then the request has completed successfully.
* Otherwise, the request has failed, `details` will contain a description of
* the error. Either way, `metadata` contains the trailing response metadata
* sent by the server when it finishes processing the call.
* @typedef {object} grpc~StatusObject
* @property {number} code The error code, a key of {@link grpc.status}
* @property {string} details Human-readable description of the status
* @property {grpc.Metadata} metadata Trailing metadata sent with the status,
* if applicable
*/
/**
* Describes how a request has failed. The member `message` will be the same as
* `details` in {@link grpc~StatusObject}, and `code` and `metadata` are the
* same as in that object.
* @typedef {Error} grpc~ServiceError
* @property {number} code The error code, a key of {@link grpc.status} that is
* not `grpc.status.OK`
* @property {grpc.Metadata} metadata Trailing metadata sent with the status,
* if applicable
*/
/**
* The EventEmitter class in the event standard module
* @external EventEmitter
* @see https://nodejs.org/api/events.html#events_class_eventemitter
*/
/**
* The Readable class in the stream standard module
* @external Readable
* @see https://nodejs.org/api/stream.html#stream_readable_streams
*/
/**
* The Writable class in the stream standard module
* @external Writable
* @see https://nodejs.org/api/stream.html#stream_writable_streams
*/
/**
* The Duplex class in the stream standard module
* @external Duplex
* @see https://nodejs.org/api/stream.html#stream_class_stream_duplex
*/
/**
* A serialization function
* @callback grpc~serialize
* @param {*} value The value to serialize
* @return {Buffer} The value serialized as a byte sequence
*/
/**
* A deserialization function
* @callback grpc~deserialize
* @param {Buffer} data The byte sequence to deserialize
* @return {*} The data deserialized as a value
*/
/**
* The deadline of an operation. If it is a date, the deadline is reached at
* the date and time specified. If it is a finite number, it is treated as
* a number of milliseconds since the Unix Epoch. If it is Infinity, the
* deadline will never be reached. If it is -Infinity, the deadline has already
* passed.
* @typedef {(number|Date)} grpc~Deadline
*/
/**
* An object that completely defines a service method signature.
* @typedef {Object} grpc~MethodDefinition
* @property {string} path The method's URL path
* @property {boolean} requestStream Indicates whether the method accepts
* a stream of requests
* @property {boolean} responseStream Indicates whether the method returns
* a stream of responses
* @property {grpc~serialize} requestSerialize Serialization
* function for request values
* @property {grpc~serialize} responseSerialize Serialization
* function for response values
* @property {grpc~deserialize} requestDeserialize Deserialization
* function for request data
* @property {grpc~deserialize} responseDeserialize Deserialization
* function for repsonse data
*/
/**
* @function MetadataListener
* @param {grpc.Metadata} metadata The response metadata.
* @param {function} next Passes metadata to the next interceptor.
*/
/**
* @function MessageListener
* @param {jspb.Message} message The response message.
* @param {function} next Passes a message to the next interceptor.
*/
/**
* @function StatusListener
* @param {grpc~StatusObject} status The response status.
* @param {function} next Passes a status to the next interceptor.
*/
/**
* A set of interceptor functions triggered by responses
* @typedef {object} grpc~Listener
* @property {MetadataListener=} onReceiveMetadata A function triggered by
* response metadata.
* @property {MessageListener=} onReceiveMessage A function triggered by a
* response message.
* @property {StatusListener=} onReceiveStatus A function triggered by a
* response status.
*/
/**
* @function MetadataRequester
* @param {grpc.Metadata} metadata The request metadata.
* @param {grpc~Listener} listener A listener wired to the previous layers
* in the interceptor stack.
* @param {function} next Passes metadata and a listener to the next
* interceptor.
*/
/**
* @function MessageRequester
* @param {jspb.Message} message The request message.
* @param {function} next Passes a message to the next interceptor.
*/
/**
* @function CloseRequester
* @param {function} next Calls the next interceptor.
*/
/**
* @function CancelRequester
* @param {function} next Calls the next interceptor.
*/
/**
* @function GetPeerRequester
* @param {function} next Calls the next interceptor.
* @return {string}
*/
/**
* @typedef {object} grpc~Requester
* @param {MetadataRequester=} start A function triggered when the call begins.
* @param {MessageRequester=} sendMessage A function triggered by the request
* message.
* @param {CloseRequester=} halfClose A function triggered when the client
* closes the call.
* @param {CancelRequester=} cancel A function triggered when the call is
* cancelled.
* @param {GetPeerRequester=} getPeer A function triggered when the endpoint is
* requested.
*/
/**
* An object that completely defines a service.
* @typedef {Object.<string, grpc~MethodDefinition>} grpc~ServiceDefinition
*/
/**
* An object that defines a protobuf type
* @typedef {object} grpc~ProtobufTypeDefinition
* @param {string} format The format of the type definition object
* @param {*} type The type definition object
* @param {Buffer[]} fileDescriptorProtos Binary protobuf file
* descriptors for all files loaded to construct this type
*/
/**
* An object that defines a package hierarchy with multiple services
* @typedef {Object.<string, grpc~ServiceDefinition|grpc~ProtobufTypeDefinition>} grpc~PackageDefinition
*/
/**
* A function for dynamically assigning an interceptor to a call.
* @function InterceptorProvider
* @param {grpc~MethodDefinition} method_definition The method to provide
* an interceptor for.
* @return {Interceptor|null} The interceptor to provide or nothing
*/
/**
* A function which can modify call options and produce methods to intercept
* RPC operations.
* @function Interceptor
* @param {object} options The grpc call options
* @param {NextCall} nextCall
* @return {InterceptingCall}
*/
/**
* A function which produces the next InterceptingCall.
* @function NextCall
* @param {object} options The grpc call options
* @return {InterceptingCall|null}
*/

266
node_modules/grpc/src/constants.js generated vendored Normal file
View File

@ -0,0 +1,266 @@
/**
* @license
* Copyright 2017 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/* The comments about status codes are copied verbatim (with some formatting
* modifications) from include/grpc/impl/codegen/status.h, for the purpose of
* including them in generated documentation.
*/
/**
* Enum of status codes that gRPC can return
* @memberof grpc
* @alias grpc.status
* @readonly
* @enum {number}
*/
exports.status = {
/** Not an error; returned on success */
OK: 0,
/** The operation was cancelled (typically by the caller). */
CANCELLED: 1,
/**
* Unknown error. An example of where this error may be returned is
* if a status value received from another address space belongs to
* an error-space that is not known in this address space. Also
* errors raised by APIs that do not return enough error information
* may be converted to this error.
*/
UNKNOWN: 2,
/**
* Client specified an invalid argument. Note that this differs
* from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
* that are problematic regardless of the state of the system
* (e.g., a malformed file name).
*/
INVALID_ARGUMENT: 3,
/**
* Deadline expired before operation could complete. For operations
* that change the state of the system, this error may be returned
* even if the operation has completed successfully. For example, a
* successful response from a server could have been delayed long
* enough for the deadline to expire.
*/
DEADLINE_EXCEEDED: 4,
/** Some requested entity (e.g., file or directory) was not found. */
NOT_FOUND: 5,
/**
* Some entity that we attempted to create (e.g., file or directory)
* already exists.
*/
ALREADY_EXISTS: 6,
/**
* The caller does not have permission to execute the specified
* operation. PERMISSION_DENIED must not be used for rejections
* caused by exhausting some resource (use RESOURCE_EXHAUSTED
* instead for those errors). PERMISSION_DENIED must not be
* used if the caller can not be identified (use UNAUTHENTICATED
* instead for those errors).
*/
PERMISSION_DENIED: 7,
/**
* Some resource has been exhausted, perhaps a per-user quota, or
* perhaps the entire file system is out of space.
*/
RESOURCE_EXHAUSTED: 8,
/**
* Operation was rejected because the system is not in a state
* required for the operation's execution. For example, directory
* to be deleted may be non-empty, an rmdir operation is applied to
* a non-directory, etc.
*
* A litmus test that may help a service implementor in deciding
* between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
*
* - Use UNAVAILABLE if the client can retry just the failing call.
* - Use ABORTED if the client should retry at a higher-level
* (e.g., restarting a read-modify-write sequence).
* - Use FAILED_PRECONDITION if the client should not retry until
* the system state has been explicitly fixed. E.g., if an "rmdir"
* fails because the directory is non-empty, FAILED_PRECONDITION
* should be returned since the client should not retry unless
* they have first fixed up the directory by deleting files from it.
* - Use FAILED_PRECONDITION if the client performs conditional
* REST Get/Update/Delete on a resource and the resource on the
* server does not match the condition. E.g., conflicting
* read-modify-write on the same resource.
*/
FAILED_PRECONDITION: 9,
/**
* The operation was aborted, typically due to a concurrency issue
* like sequencer check failures, transaction aborts, etc.
*
* See litmus test above for deciding between FAILED_PRECONDITION,
* ABORTED, and UNAVAILABLE.
*/
ABORTED: 10,
/**
* Operation was attempted past the valid range. E.g., seeking or
* reading past end of file.
*
* Unlike INVALID_ARGUMENT, this error indicates a problem that may
* be fixed if the system state changes. For example, a 32-bit file
* system will generate INVALID_ARGUMENT if asked to read at an
* offset that is not in the range [0,2^32-1], but it will generate
* OUT_OF_RANGE if asked to read from an offset past the current
* file size.
*
* There is a fair bit of overlap between FAILED_PRECONDITION and
* OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific
* error) when it applies so that callers who are iterating through
* a space can easily look for an OUT_OF_RANGE error to detect when
* they are done.
*/
OUT_OF_RANGE: 11,
/** Operation is not implemented or not supported/enabled in this service. */
UNIMPLEMENTED: 12,
/**
* Internal errors. Means some invariants expected by underlying
* system has been broken. If you see one of these errors,
* something is very broken.
*/
INTERNAL: 13,
/**
* The service is currently unavailable. This is a most likely a
* transient condition and may be corrected by retrying with
* a backoff.
*
* See litmus test above for deciding between FAILED_PRECONDITION,
* ABORTED, and UNAVAILABLE.
*/
UNAVAILABLE: 14,
/** Unrecoverable data loss or corruption. */
DATA_LOSS: 15,
/**
* The request does not have valid authentication credentials for the
* operation.
*/
UNAUTHENTICATED: 16
};
/* The comments about propagation bit flags are copied from
* include/grpc/impl/codegen/propagation_bits.h for the purpose of including
* them in generated documentation.
*/
/**
* Propagation flags: these can be bitwise or-ed to form the propagation option
* for calls.
*
* Users are encouraged to write propagation masks as deltas from the default.
* i.e. write `grpc.propagate.DEFAULTS & ~grpc.propagate.DEADLINE` to disable
* deadline propagation.
* @memberof grpc
* @alias grpc.propagate
* @enum {number}
*/
exports.propagate = {
DEADLINE: 1,
CENSUS_STATS_CONTEXT: 2,
CENSUS_TRACING_CONTEXT: 4,
CANCELLATION: 8,
DEFAULTS: 65535
};
/* Many of the following comments are copied from
* include/grpc/impl/codegen/grpc_types.h
*/
/**
* Call error constants. Call errors almost always indicate bugs in the gRPC
* library, and these error codes are mainly useful for finding those bugs.
* @memberof grpc
* @readonly
* @enum {number}
*/
const callError = {
OK: 0,
ERROR: 1,
NOT_ON_SERVER: 2,
NOT_ON_CLIENT: 3,
ALREADY_INVOKED: 5,
NOT_INVOKED: 6,
ALREADY_FINISHED: 7,
TOO_MANY_OPERATIONS: 8,
INVALID_FLAGS: 9,
INVALID_METADATA: 10,
INVALID_MESSAGE: 11,
NOT_SERVER_COMPLETION_QUEUE: 12,
BATCH_TOO_BIG: 13,
PAYLOAD_TYPE_MISMATCH: 14
};
exports.callError = callError;
/**
* Write flags: these can be bitwise or-ed to form write options that modify
* how data is written.
* @memberof grpc
* @alias grpc.writeFlags
* @readonly
* @enum {number}
*/
exports.writeFlags = {
/**
* Hint that the write may be buffered and need not go out on the wire
* immediately. GRPC is free to buffer the message until the next non-buffered
* write, or until writes_done, but it need not buffer completely or at all.
*/
BUFFER_HINT: 1,
/**
* Force compression to be disabled for a particular write
*/
NO_COMPRESS: 2
};
/**
* @memberof grpc
* @alias grpc.logVerbosity
* @readonly
* @enum {number}
*/
exports.logVerbosity = {
DEBUG: 0,
INFO: 1,
ERROR: 2
};
/**
* Method types: the supported RPC types
* @memberof grpc
* @alias grpc.methodTypes
* @readonly
* @enum {number}
*/
exports.methodTypes = {
UNARY: 0,
CLIENT_STREAMING: 1,
SERVER_STREAMING: 2,
BIDI_STREAMING: 3
};
/**
* Connectivity state values
* @memberof grpc
* @alias grpc.connectivityState
* @readonly
* @enum {number}
*/
exports.connectivityState = {
IDLE: 0,
CONNECTING: 1,
READY: 2,
TRANSIENT_FAILURE: 3,
SHUTDOWN: 4
};

276
node_modules/grpc/src/credentials.js generated vendored Normal file
View File

@ -0,0 +1,276 @@
/**
* @license
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* Credentials module
*
* This module contains factory methods for two different credential types:
* CallCredentials and ChannelCredentials. ChannelCredentials are things like
* SSL credentials that can be used to secure a connection, and are used to
* construct a Client object. CallCredentials genrally modify metadata, so they
* can be attached to an individual method call.
*
* CallCredentials can be composed with other CallCredentials to create
* CallCredentials. ChannelCredentials can be composed with CallCredentials
* to create ChannelCredentials. No combined credential can have more than
* one ChannelCredentials.
*
* For example, to create a client secured with SSL that uses Google
* default application credentials to authenticate:
*
* @example
* var channel_creds = credentials.createSsl(root_certs);
* (new GoogleAuth()).getApplicationDefault(function(err, credential) {
* var call_creds = credentials.createFromGoogleCredential(credential);
* var combined_creds = credentials.combineChannelCredentials(
* channel_creds, call_creds);
* var client = new Client(address, combined_creds);
* });
*
* @namespace grpc.credentials
*/
'use strict';
var grpc = require('./grpc_extension');
/**
* This cannot be constructed directly. Instead, instances of this class should
* be created using the factory functions in {@link grpc.credentials}
* @constructor grpc.credentials~CallCredentials
*/
var CallCredentials = grpc.CallCredentials;
/**
* This cannot be constructed directly. Instead, instances of this class should
* be created using the factory functions in {@link grpc.credentials}
* @constructor grpc.credentials~ChannelCredentials
*/
var ChannelCredentials = grpc.ChannelCredentials;
var Metadata = require('./metadata.js');
var common = require('./common.js');
var constants = require('./constants');
/**
* @external GoogleCredential
* @see https://github.com/google/google-auth-library-nodejs
*/
const PEM_CERT_HEADER = "-----BEGIN CERTIFICATE-----";
const PEM_CERT_FOOTER = "-----END CERTIFICATE-----";
function wrapCheckServerIdentityCallback(callback) {
return function(hostname, cert) {
// Parse cert from pem to a version that matches the tls.checkServerIdentity
// format.
// https://nodejs.org/api/tls.html#tls_tls_checkserveridentity_hostname_cert
var pemHeaderIndex = cert.indexOf(PEM_CERT_HEADER);
if (pemHeaderIndex === -1) {
return new Error("Unable to parse certificate PEM.");
}
cert = cert.substring(pemHeaderIndex);
var pemFooterIndex = cert.indexOf(PEM_CERT_FOOTER);
if (pemFooterIndex === -1) {
return new Error("Unable to parse certificate PEM.");
}
cert = cert.substring(PEM_CERT_HEADER.length, pemFooterIndex);
var rawBuffer = Buffer.from(cert.replace("\n", "").replace(" ", ""), "base64");
return callback(hostname, { raw: rawBuffer });
}
}
/**
* Create an SSL Credentials object. If using a client-side certificate, both
* the second and third arguments must be passed. Additional peer verification
* options can be passed in the fourth argument as described below.
* @memberof grpc.credentials
* @alias grpc.credentials.createSsl
* @kind function
* @param {Buffer=} root_certs The root certificate data
* @param {Buffer=} private_key The client certificate private key, if
* applicable
* @param {Buffer=} cert_chain The client certificate cert chain, if applicable
* @param {Function} verify_options.checkServerIdentity Optional callback
* receiving the expected hostname and peer certificate for additional
* verification. The callback should return an Error if verification
* fails and otherwise return undefined.
* @return {grpc.credentials~ChannelCredentials} The SSL Credentials object
*/
exports.createSsl = function(root_certs, private_key, cert_chain, verify_options) {
// The checkServerIdentity callback from gRPC core will receive the cert as a PEM.
// To better match the checkServerIdentity callback of Node, we wrap the callback
// to decode the PEM and populate a cert object.
if (verify_options && verify_options.checkServerIdentity) {
if (typeof verify_options.checkServerIdentity !== 'function') {
throw new TypeError("Value of checkServerIdentity must be a function.");
}
// Make a shallow clone of verify_options so our modification of the callback
// isn't reflected to the caller
var updated_verify_options = Object.assign({}, verify_options);
updated_verify_options.checkServerIdentity = wrapCheckServerIdentityCallback(
verify_options.checkServerIdentity);
arguments[3] = updated_verify_options;
}
return ChannelCredentials.createSsl.apply(this, arguments);
}
/**
* @callback grpc.credentials~metadataCallback
* @param {Error} error The error, if getting metadata failed
* @param {grpc.Metadata} metadata The metadata
*/
/**
* @callback grpc.credentials~generateMetadata
* @param {Object} params Parameters that can modify metadata generation
* @param {string} params.service_url The URL of the service that the call is
* going to
* @param {grpc.credentials~metadataCallback} callback
*/
/**
* Create a gRPC credentials object from a metadata generation function. This
* function gets the service URL and a callback as parameters. The error
* passed to the callback can optionally have a 'code' value attached to it,
* which corresponds to a status code that this library uses.
* @memberof grpc.credentials
* @alias grpc.credentials.createFromMetadataGenerator
* @param {grpc.credentials~generateMetadata} metadata_generator The function
* that generates metadata
* @return {grpc.credentials~CallCredentials} The credentials object
*/
exports.createFromMetadataGenerator = function(metadata_generator) {
return CallCredentials.createFromPlugin(function(service_url, cb_data,
callback) {
metadata_generator({service_url: service_url}, function(error, metadata) {
var code = constants.status.OK;
var message = '';
if (error) {
message = error.message;
if (error.hasOwnProperty('code') && Number.isFinite(error.code)) {
code = error.code;
} else {
code = constants.status.UNAUTHENTICATED;
}
if (!metadata) {
metadata = new Metadata();
}
}
callback(code, message, metadata._getCoreRepresentation(), cb_data);
});
});
};
function getAuthorizationHeaderFromGoogleCredential(google_credential, url, callback) {
// google-auth-library pre-v2.0.0 does not have getRequestHeaders
// but has getRequestMetadata, which is deprecated in v2.0.0
if (typeof google_credential.getRequestHeaders === 'function') {
google_credential.getRequestHeaders(url)
.then(function(header) {
callback(null, header.Authorization);
})
.catch(function(err) {
callback(err);
return;
});
} else {
google_credential.getRequestMetadata(url, function(err, header) {
if (err) {
callback(err);
return;
}
callback(null, header.Authorization);
});
}
}
/**
* Create a gRPC credential from a Google credential object.
* @memberof grpc.credentials
* @alias grpc.credentials.createFromGoogleCredential
* @param {external:GoogleCredential} google_credential The Google credential
* object to use
* @return {grpc.credentials~CallCredentials} The resulting credentials object
*/
exports.createFromGoogleCredential = function(google_credential) {
return exports.createFromMetadataGenerator(function(auth_context, callback) {
var service_url = auth_context.service_url;
getAuthorizationHeaderFromGoogleCredential(google_credential, service_url,
function(err, authHeader) {
if (err) {
common.log(constants.logVerbosity.INFO, 'Auth error:' + err);
callback(err);
return;
}
var metadata = new Metadata();
metadata.add('authorization', authHeader);
callback(null, metadata);
});
});
};
/**
* Combine a ChannelCredentials with any number of CallCredentials into a single
* ChannelCredentials object.
* @memberof grpc.credentials
* @alias grpc.credentials.combineChannelCredentials
* @param {grpc.credentials~ChannelCredentials} channel_credential The ChannelCredentials to
* start with
* @param {...grpc.credentials~CallCredentials} credentials The CallCredentials to compose
* @return {grpc.credentials~ChannelCredentials} A credentials object that combines all of the
* input credentials
*/
exports.combineChannelCredentials = function(channel_credential) {
var current = channel_credential;
for (var i = 1; i < arguments.length; i++) {
current = current.compose(arguments[i]);
}
return current;
};
/**
* Combine any number of CallCredentials into a single CallCredentials object
* @memberof grpc.credentials
* @alias grpc.credentials.combineCallCredentials
* @param {...grpc.credentials~CallCredentials} credentials The CallCredentials to compose
* @return {grpc.credentials~CallCredentials} A credentials object that combines all of the input
* credentials
*/
exports.combineCallCredentials = function() {
var current = arguments[0];
for (var i = 1; i < arguments.length; i++) {
current = current.compose(arguments[i]);
}
return current;
};
/**
* Create an insecure credentials object. This is used to create a channel that
* does not use SSL. This cannot be composed with anything.
* @memberof grpc.credentials
* @alias grpc.credentials.createInsecure
* @kind function
* @return {grpc.credentials~ChannelCredentials} The insecure credentials object
*/
exports.createInsecure = ChannelCredentials.createInsecure;

62
node_modules/grpc/src/grpc_extension.js generated vendored Normal file
View File

@ -0,0 +1,62 @@
/**
* @license
* Copyright 2016 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* @module
* @private
*/
'use strict';
var binary = require('node-pre-gyp/lib/pre-binding');
var path = require('path');
var binding_path =
binary.find(path.resolve(path.join(__dirname, '../package.json')));
var binding;
try {
binding = require(binding_path);
} catch (e) {
let fs = require('fs');
let searchPath = path.dirname(path.dirname(binding_path));
let searchName = path.basename(path.dirname(binding_path));
let foundNames;
try {
foundNames = fs.readdirSync(searchPath);
} catch (readDirError) {
let message = `The gRPC binary module was not installed. This may be fixed by running "npm rebuild"
Original error: ${e.message}`;
let error = new Error(message);
error.code = e.code;
throw error;
}
if (foundNames.indexOf(searchName) === -1) {
let message = `Failed to load gRPC binary module because it was not installed for the current system
Expected directory: ${searchName}
Found: [${foundNames.join(', ')}]
This problem can often be fixed by running "npm rebuild" on the current system
Original error: ${e.message}`;
let error = new Error(message);
error.code = e.code;
throw error;
} else {
e.message = `Failed to load ${binding_path}. ${e.message}`;
throw e;
}
}
module.exports = binding;

241
node_modules/grpc/src/metadata.js generated vendored Normal file
View File

@ -0,0 +1,241 @@
/**
* @license
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
'use strict';
var clone = require('lodash.clone');
var grpc = require('./grpc_extension');
const IDEMPOTENT_REQUEST_FLAG = 0x10;
const WAIT_FOR_READY_FLAG = 0x20;
const CACHEABLE_REQUEST_FLAG = 0x40;
const WAIT_FOR_READY_EXPLICITLY_SET_FLAG = 0x80;
const CORKED_FLAG = 0x100;
/**
* Class for storing metadata. Keys are normalized to lowercase ASCII.
* @memberof grpc
* @constructor
* @param {Object=} options Boolean options for the beginning of the call.
* These options only have any effect when passed at the beginning of
* a client request.
* @param {boolean=} [options.idempotentRequest=false] Signal that the request
* is idempotent
* @param {boolean=} [options.waitForReady=true] Signal that the call should
* not return UNAVAILABLE before it has started.
* @param {boolean=} [options.cacheableRequest=false] Signal that the call is
* cacheable. GRPC is free to use GET verb.
* @param {boolean=} [options.corked=false] Signal that the initial metadata
* should be corked.
* @example
* var metadata = new metadata_module.Metadata();
* metadata.set('key1', 'value1');
* metadata.add('key1', 'value2');
* metadata.get('key1') // returns ['value1', 'value2']
*/
function Metadata(options) {
this._internal_repr = {};
this.setOptions(options);
}
function normalizeKey(key) {
key = key.toLowerCase();
if (grpc.metadataKeyIsLegal(key)) {
return key;
} else {
throw new Error('Metadata key"' + key + '" contains illegal characters');
}
}
function validate(key, value) {
if (grpc.metadataKeyIsBinary(key)) {
if (!(value instanceof Buffer)) {
throw new Error('keys that end with \'-bin\' must have Buffer values');
}
} else {
if (typeof value !== 'string') {
throw new Error(
'keys that don\'t end with \'-bin\' must have String values');
}
if (!grpc.metadataNonbinValueIsLegal(value)) {
throw new Error('Metadata string value "' + value +
'" contains illegal characters');
}
}
}
/**
* Sets the given value for the given key, replacing any other values associated
* with that key. Normalizes the key.
* @param {String} key The key to set
* @param {String|Buffer} value The value to set. Must be a buffer if and only
* if the normalized key ends with '-bin'
*/
Metadata.prototype.set = function(key, value) {
key = normalizeKey(key);
validate(key, value);
this._internal_repr[key] = [value];
};
/**
* Adds the given value for the given key. Normalizes the key.
* @param {String} key The key to add to.
* @param {String|Buffer} value The value to add. Must be a buffer if and only
* if the normalized key ends with '-bin'
*/
Metadata.prototype.add = function(key, value) {
key = normalizeKey(key);
validate(key, value);
if (!this._internal_repr[key]) {
this._internal_repr[key] = [];
}
this._internal_repr[key].push(value);
};
/**
* Remove the given key and any associated values. Normalizes the key.
* @param {String} key The key to remove
*/
Metadata.prototype.remove = function(key) {
key = normalizeKey(key);
if (Object.prototype.hasOwnProperty.call(this._internal_repr, key)) {
delete this._internal_repr[key];
}
};
/**
* Gets a list of all values associated with the key. Normalizes the key.
* @param {String} key The key to get
* @return {Array.<String|Buffer>} The values associated with that key
*/
Metadata.prototype.get = function(key) {
key = normalizeKey(key);
if (Object.prototype.hasOwnProperty.call(this._internal_repr, key)) {
return this._internal_repr[key];
} else {
return [];
}
};
/**
* Get a map of each key to a single associated value. This reflects the most
* common way that people will want to see metadata.
* @return {Object.<String,String|Buffer>} A key/value mapping of the metadata
*/
Metadata.prototype.getMap = function() {
var result = {};
Object.keys(this._internal_repr).forEach(key => {
const values = this._internal_repr[key];
if(values.length > 0) {
result[key] = values[0];
}
});
return result;
};
/**
* Clone the metadata object.
* @return {grpc.Metadata} The new cloned object
*/
Metadata.prototype.clone = function() {
var copy = new Metadata();
Object.keys(this._internal_repr).forEach(key => {
const value = this._internal_repr[key];
copy._internal_repr[key] = clone(value);
});
copy.flags = this.flags;
return copy;
};
/**
* Set options on the metadata object
* @param {Object} options Boolean options for the beginning of the call.
* These options only have any effect when passed at the beginning of
* a client request.
* @param {boolean=} [options.idempotentRequest=false] Signal that the request
* is idempotent
* @param {boolean=} [options.waitForReady=true] Signal that the call should
* not return UNAVAILABLE before it has started.
* @param {boolean=} [options.cacheableRequest=false] Signal that the call is
* cacheable. GRPC is free to use GET verb.
* @param {boolean=} [options.corked=false] Signal that the initial metadata
* should be corked.
*/
Metadata.prototype.setOptions = function(options) {
let flags = 0;
if (options) {
if (options.idempotentRequest) {
flags |= IDEMPOTENT_REQUEST_FLAG;
}
if (options.hasOwnProperty('waitForReady')) {
flags |= WAIT_FOR_READY_EXPLICITLY_SET_FLAG;
if (options.waitForReady) {
flags |= WAIT_FOR_READY_FLAG;
}
}
if (options.cacheableRequest) {
flags |= CACHEABLE_REQUEST_FLAG;
}
if (options.corked) {
flags |= CORKED_FLAG;
}
}
this.flags = flags;
}
/**
* Metadata representation as passed to and the native addon
* @typedef {object} grpc~CoreMetadata
* @param {Object.<String, Array.<String|Buffer>>} metadata The metadata
* @param {number} flags Metadata flags
*/
/**
* Gets the metadata in the format used by interal code. Intended for internal
* use only. API stability is not guaranteed.
* @private
* @return {grpc~CoreMetadata} The metadata
*/
Metadata.prototype._getCoreRepresentation = function() {
return {
metadata: this._internal_repr,
flags: this.flags
};
};
/**
* Creates a Metadata object from a metadata map in the internal format.
* Intended for internal use only. API stability is not guaranteed.
* @private
* @param {grpc~CoreMetadata} metadata The metadata object from core
* @return {Metadata} The new Metadata object
*/
Metadata._fromCoreRepresentation = function(metadata) {
var newMetadata = new Metadata();
if (metadata) {
Object.keys(metadata.metadata).forEach(key => {
const value = metadata.metadata[key];
newMetadata._internal_repr[key] = clone(value);
});
}
newMetadata.flags = metadata.flags;
return newMetadata;
};
module.exports = Metadata;

173
node_modules/grpc/src/protobuf_js_5_common.js generated vendored Normal file
View File

@ -0,0 +1,173 @@
/**
* @license
* Copyright 2017 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* @module
* @private
*/
'use strict';
var camelCase = require('lodash.camelcase');
var client = require('./client');
var common = require('./common');
/**
* Get a function that deserializes a specific type of protobuf.
* @param {function()} cls The constructor of the message type to deserialize
* @param {bool=} binaryAsBase64 Deserialize bytes fields as base64 strings
* instead of Buffers. Defaults to false
* @param {bool=} longsAsStrings Deserialize long values as strings instead of
* objects. Defaults to true
* @return {function(Buffer):cls} The deserialization function
*/
exports.deserializeCls = function deserializeCls(cls, options) {
/**
* Deserialize a buffer to a message object
* @param {Buffer} arg_buf The buffer to deserialize
* @return {cls} The resulting object
*/
return function deserialize(arg_buf) {
// Convert to a native object with binary fields as Buffers (first argument)
// and longs as strings (second argument)
return cls.decode(arg_buf).toRaw(options.binaryAsBase64,
options.longsAsStrings);
};
};
var deserializeCls = exports.deserializeCls;
/**
* Get a function that serializes objects to a buffer by protobuf class.
* @param {function()} Cls The constructor of the message type to serialize
* @return {function(Cls):Buffer} The serialization function
*/
exports.serializeCls = function serializeCls(Cls) {
/**
* Serialize an object to a Buffer
* @param {Object} arg The object to serialize
* @return {Buffer} The serialized object
*/
return function serialize(arg) {
return Buffer.from(new Cls(arg).encode().toBuffer());
};
};
var serializeCls = exports.serializeCls;
/**
* Get the fully qualified (dotted) name of a ProtoBuf.Reflect value.
* @param {ProtoBuf.Reflect.Namespace} value The value to get the name of
* @return {string} The fully qualified name of the value
*/
exports.fullyQualifiedName = function fullyQualifiedName(value) {
if (value === null || value === undefined) {
return '';
}
var name = value.name;
var parent_name = fullyQualifiedName(value.parent);
if (parent_name !== '') {
name = parent_name + '.' + name;
}
return name;
};
var fullyQualifiedName = exports.fullyQualifiedName;
/**
* Return a map from method names to method attributes for the service.
* @param {ProtoBuf.Reflect.Service} service The service to get attributes for
* @param {Object=} options Options to apply to these attributes
* @return {Object} The attributes map
*/
exports.getProtobufServiceAttrs = function getProtobufServiceAttrs(service,
options) {
var prefix = '/' + fullyQualifiedName(service) + '/';
var binaryAsBase64, longsAsStrings;
if (options) {
binaryAsBase64 = options.binaryAsBase64;
longsAsStrings = options.longsAsStrings;
}
/* This slightly awkward construction is used to make sure we only use
lodash@3.10.1-compatible functions. A previous version used
_.fromPairs, which would be cleaner, but was introduced in lodash
version 4 */
return common.zipObject(service.children.map(function(method) {
return camelCase(method.name);
}), service.children.map(function(method) {
return {
originalName: method.name,
path: prefix + method.name,
requestStream: method.requestStream,
responseStream: method.responseStream,
requestType: method.resolvedRequestType,
responseType: method.resolvedResponseType,
requestSerialize: serializeCls(method.resolvedRequestType.build()),
requestDeserialize: deserializeCls(method.resolvedRequestType.build(),
options),
responseSerialize: serializeCls(method.resolvedResponseType.build()),
responseDeserialize: deserializeCls(method.resolvedResponseType.build(),
options)
};
}));
};
var getProtobufServiceAttrs = exports.getProtobufServiceAttrs;
/**
* Load a gRPC object from an existing ProtoBuf.Reflect object.
* @param {ProtoBuf.Reflect.Namespace} value The ProtoBuf object to load.
* @param {Object=} options Options to apply to the loaded object
* @return {Object<string, *>} The resulting gRPC object
*/
exports.loadObject = function loadObject(value, options) {
var result = {};
if (!value) {
return value;
}
if (value.hasOwnProperty('ns')) {
return loadObject(value.ns, options);
}
if (value.className === 'Namespace') {
Object.keys(value.children).forEach(key => {
const child = value.children[key];
result[child.name] = loadObject(child, options);
});
return result;
} else if (value.className === 'Service') {
return client.makeClientConstructor(getProtobufServiceAttrs(value, options),
options);
} else if (value.className === 'Message' || value.className === 'Enum') {
return value.build();
} else {
return value;
}
};
/**
* The primary purpose of this method is to distinguish between reflection
* objects from different versions of ProtoBuf.js. This is just a heuristic,
* checking for properties that are (currently) specific to this version of
* ProtoBuf.js
* @param {Object} obj The object to check
* @return {boolean} Whether the object appears to be a Protobuf.js 5
* ReflectionObject
*/
exports.isProbablyProtobufJs5 = function isProbablyProtobufJs5(obj) {
return Array.isArray(obj.children) && (typeof obj.build === 'function');
};

164
node_modules/grpc/src/protobuf_js_6_common.js generated vendored Normal file
View File

@ -0,0 +1,164 @@
/**
* @license
* Copyright 2017 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* @module
* @private
*/
'use strict';
var camelCase = require('lodash.camelcase');
var client = require('./client');
var common = require('./common');
/**
* Get a function that deserializes a specific type of protobuf.
* @param {function()} cls The constructor of the message type to deserialize
* @param {bool=} binaryAsBase64 Deserialize bytes fields as base64 strings
* instead of Buffers. Defaults to false
* @param {bool=} longsAsStrings Deserialize long values as strings instead of
* objects. Defaults to true
* @return {function(Buffer):cls} The deserialization function
*/
exports.deserializeCls = function deserializeCls(cls, options) {
var conversion_options = {
defaults: true,
bytes: options.binaryAsBase64 ? String : Buffer,
longs: options.longsAsStrings ? String : null,
enums: options.enumsAsStrings ? String : null,
oneofs: true
};
/**
* Deserialize a buffer to a message object
* @param {Buffer} arg_buf The buffer to deserialize
* @return {cls} The resulting object
*/
return function deserialize(arg_buf) {
return cls.toObject(cls.decode(arg_buf), conversion_options);
};
};
var deserializeCls = exports.deserializeCls;
/**
* Get a function that serializes objects to a buffer by protobuf class.
* @param {function()} Cls The constructor of the message type to serialize
* @return {function(Cls):Buffer} The serialization function
*/
exports.serializeCls = function serializeCls(cls) {
/**
* Serialize an object to a Buffer
* @param {Object} arg The object to serialize
* @return {Buffer} The serialized object
*/
return function serialize(arg) {
var message = cls.fromObject(arg);
return cls.encode(message).finish();
};
};
var serializeCls = exports.serializeCls;
/**
* Get the fully qualified (dotted) name of a ProtoBuf.Reflect value.
* @param {ProtoBuf.ReflectionObject} value The value to get the name of
* @return {string} The fully qualified name of the value
*/
exports.fullyQualifiedName = function fullyQualifiedName(value) {
if (value === null || value === undefined) {
return '';
}
var name = value.name;
var parent_fqn = fullyQualifiedName(value.parent);
if (parent_fqn !== '') {
name = parent_fqn + '.' + name;
}
return name;
};
var fullyQualifiedName = exports.fullyQualifiedName;
/**
* Return a map from method names to method attributes for the service.
* @param {ProtoBuf.Service} service The service to get attributes for
* @param {Object=} options Options to apply to these attributes
* @return {Object} The attributes map
*/
exports.getProtobufServiceAttrs = function getProtobufServiceAttrs(service,
options) {
var prefix = '/' + fullyQualifiedName(service) + '/';
service.resolveAll();
return common.zipObject(service.methodsArray.map(function(method) {
return camelCase(method.name);
}), service.methodsArray.map(function(method) {
return {
originalName: method.name,
path: prefix + method.name,
requestStream: !!method.requestStream,
responseStream: !!method.responseStream,
requestType: method.resolvedRequestType,
responseType: method.resolvedResponseType,
requestSerialize: serializeCls(method.resolvedRequestType),
requestDeserialize: deserializeCls(method.resolvedRequestType, options),
responseSerialize: serializeCls(method.resolvedResponseType),
responseDeserialize: deserializeCls(method.resolvedResponseType, options)
};
}));
};
var getProtobufServiceAttrs = exports.getProtobufServiceAttrs;
exports.loadObject = function loadObject(value, options) {
var result = {};
if (!value) {
return value;
}
if (value.hasOwnProperty('methods')) {
// It's a service object
var service_attrs = getProtobufServiceAttrs(value, options);
return client.makeClientConstructor(service_attrs);
}
if (value.hasOwnProperty('nested')) {
// It's a namespace or root object
if (value.nested !== null && value.nested !== undefined) {
var values = Object.keys(value.nested).map(key => value.nested[key]);
values.forEach(nested => {
result[nested.name] = loadObject(nested, options);
});
}
return result;
}
// Otherwise, it's not something we need to change
return value;
};
/**
* The primary purpose of this method is to distinguish between reflection
* objects from different versions of ProtoBuf.js. This is just a heuristic,
* checking for properties that are (currently) specific to this version of
* ProtoBuf.js
* @param {Object} obj The object to check
* @return {boolean} Whether the object appears to be a Protobuf.js 6
* ReflectionObject
*/
exports.isProbablyProtobufJs6 = function isProbablyProtobufJs6(obj) {
return (typeof obj.root === 'object') && (typeof obj.resolve === 'function');
};

1001
node_modules/grpc/src/server.js generated vendored Normal file

File diff suppressed because it is too large Load Diff