mirror of
https://github.com/musix-org/musix-oss
synced 2024-12-23 22:33:17 +00:00
330 lines
16 KiB
JavaScript
330 lines
16 KiB
JavaScript
/*! firebase-admin v8.9.2 */
|
|
"use strict";
|
|
/*!
|
|
* Copyright 2018 Google Inc.
|
|
*
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
* you may not use this file except in compliance with the License.
|
|
* You may obtain a copy of the License at
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
* See the License for the specific language governing permissions and
|
|
* limitations under the License.
|
|
*/
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var deep_copy_1 = require("../utils/deep-copy");
|
|
var utils = require("../utils");
|
|
var validator = require("../utils/validator");
|
|
var error_1 = require("../utils/error");
|
|
/**
|
|
* @param {any} obj The object to check for number field within.
|
|
* @param {string} key The entry key.
|
|
* @return {number} The corresponding number if available. Otherwise, NaN.
|
|
*/
|
|
function getNumberField(obj, key) {
|
|
if (typeof obj[key] !== 'undefined' && obj[key] !== null) {
|
|
return parseInt(obj[key].toString(), 10);
|
|
}
|
|
return NaN;
|
|
}
|
|
/**
|
|
* Converts a UserImportRecord to a UploadAccountUser object. Throws an error when invalid
|
|
* fields are provided.
|
|
* @param {UserImportRecord} user The UserImportRecord to conver to UploadAccountUser.
|
|
* @param {ValidatorFunction=} userValidator The user validator function.
|
|
* @return {UploadAccountUser} The corresponding UploadAccountUser to return.
|
|
*/
|
|
function populateUploadAccountUser(user, userValidator) {
|
|
var result = {
|
|
localId: user.uid,
|
|
email: user.email,
|
|
emailVerified: user.emailVerified,
|
|
displayName: user.displayName,
|
|
disabled: user.disabled,
|
|
photoUrl: user.photoURL,
|
|
phoneNumber: user.phoneNumber,
|
|
providerUserInfo: [],
|
|
tenantId: user.tenantId,
|
|
customAttributes: user.customClaims && JSON.stringify(user.customClaims),
|
|
};
|
|
if (typeof user.passwordHash !== 'undefined') {
|
|
if (!validator.isBuffer(user.passwordHash)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_PASSWORD_HASH);
|
|
}
|
|
result.passwordHash = utils.toWebSafeBase64(user.passwordHash);
|
|
}
|
|
if (typeof user.passwordSalt !== 'undefined') {
|
|
if (!validator.isBuffer(user.passwordSalt)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_PASSWORD_SALT);
|
|
}
|
|
result.salt = utils.toWebSafeBase64(user.passwordSalt);
|
|
}
|
|
if (validator.isNonNullObject(user.metadata)) {
|
|
if (validator.isNonEmptyString(user.metadata.creationTime)) {
|
|
result.createdAt = new Date(user.metadata.creationTime).getTime();
|
|
}
|
|
if (validator.isNonEmptyString(user.metadata.lastSignInTime)) {
|
|
result.lastLoginAt = new Date(user.metadata.lastSignInTime).getTime();
|
|
}
|
|
}
|
|
if (validator.isArray(user.providerData)) {
|
|
user.providerData.forEach(function (providerData) {
|
|
result.providerUserInfo.push({
|
|
providerId: providerData.providerId,
|
|
rawId: providerData.uid,
|
|
email: providerData.email,
|
|
displayName: providerData.displayName,
|
|
photoUrl: providerData.photoURL,
|
|
});
|
|
});
|
|
}
|
|
// Remove blank fields.
|
|
var key;
|
|
for (key in result) {
|
|
if (typeof result[key] === 'undefined') {
|
|
delete result[key];
|
|
}
|
|
}
|
|
if (result.providerUserInfo.length === 0) {
|
|
delete result.providerUserInfo;
|
|
}
|
|
// Validate the constructured user individual request. This will throw if an error
|
|
// is detected.
|
|
if (typeof userValidator === 'function') {
|
|
userValidator(result);
|
|
}
|
|
return result;
|
|
}
|
|
/**
|
|
* Class that provides a helper for building/validating uploadAccount requests and
|
|
* UserImportResult responses.
|
|
*/
|
|
var UserImportBuilder = /** @class */ (function () {
|
|
/**
|
|
* @param {UserImportRecord[]} users The list of user records to import.
|
|
* @param {UserImportOptions=} options The import options which includes hashing
|
|
* algorithm details.
|
|
* @param {ValidatorFunction=} userRequestValidator The user request validator function.
|
|
* @constructor
|
|
*/
|
|
function UserImportBuilder(users, options, userRequestValidator) {
|
|
this.requiresHashOptions = false;
|
|
this.validatedUsers = [];
|
|
this.userImportResultErrors = [];
|
|
this.indexMap = {};
|
|
this.validatedUsers = this.populateUsers(users, userRequestValidator);
|
|
this.validatedOptions = this.populateOptions(options, this.requiresHashOptions);
|
|
}
|
|
/**
|
|
* Returns the corresponding constructed uploadAccount request.
|
|
* @return {UploadAccountRequest} The constructed uploadAccount request.
|
|
*/
|
|
UserImportBuilder.prototype.buildRequest = function () {
|
|
var users = this.validatedUsers.map(function (user) {
|
|
return deep_copy_1.deepCopy(user);
|
|
});
|
|
return deep_copy_1.deepExtend({ users: users }, deep_copy_1.deepCopy(this.validatedOptions));
|
|
};
|
|
/**
|
|
* Populates the UserImportResult using the client side detected errors and the server
|
|
* side returned errors.
|
|
* @return {UserImportResult} The user import result based on the returned failed
|
|
* uploadAccount response.
|
|
*/
|
|
UserImportBuilder.prototype.buildResponse = function (failedUploads) {
|
|
var _this = this;
|
|
// Initialize user import result.
|
|
var importResult = {
|
|
successCount: this.validatedUsers.length,
|
|
failureCount: this.userImportResultErrors.length,
|
|
errors: deep_copy_1.deepCopy(this.userImportResultErrors),
|
|
};
|
|
importResult.failureCount += failedUploads.length;
|
|
importResult.successCount -= failedUploads.length;
|
|
failedUploads.forEach(function (failedUpload) {
|
|
importResult.errors.push({
|
|
// Map backend request index to original developer provided array index.
|
|
index: _this.indexMap[failedUpload.index],
|
|
error: new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_USER_IMPORT, failedUpload.message),
|
|
});
|
|
});
|
|
// Sort errors by index.
|
|
importResult.errors.sort(function (a, b) {
|
|
return a.index - b.index;
|
|
});
|
|
// Return sorted result.
|
|
return importResult;
|
|
};
|
|
/**
|
|
* Validates and returns the hashing options of the uploadAccount request.
|
|
* Throws an error whenever an invalid or missing options is detected.
|
|
* @param {UserImportOptions} options The UserImportOptions.
|
|
* @param {boolean} requiresHashOptions Whether to require hash options.
|
|
* @return {UploadAccountOptions} The populated UploadAccount options.
|
|
*/
|
|
UserImportBuilder.prototype.populateOptions = function (options, requiresHashOptions) {
|
|
var populatedOptions;
|
|
if (!requiresHashOptions) {
|
|
return {};
|
|
}
|
|
if (!validator.isNonNullObject(options)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_ARGUMENT, '"UserImportOptions" are required when importing users with passwords.');
|
|
}
|
|
if (!validator.isNonNullObject(options.hash)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.MISSING_HASH_ALGORITHM, "\"hash.algorithm\" is missing from the provided \"UserImportOptions\".");
|
|
}
|
|
if (typeof options.hash.algorithm === 'undefined' ||
|
|
!validator.isNonEmptyString(options.hash.algorithm)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_ALGORITHM, "\"hash.algorithm\" must be a string matching the list of supported algorithms.");
|
|
}
|
|
var rounds;
|
|
switch (options.hash.algorithm) {
|
|
case 'HMAC_SHA512':
|
|
case 'HMAC_SHA256':
|
|
case 'HMAC_SHA1':
|
|
case 'HMAC_MD5':
|
|
if (!validator.isBuffer(options.hash.key)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_KEY, "A non-empty \"hash.key\" byte buffer must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
populatedOptions = {
|
|
hashAlgorithm: options.hash.algorithm,
|
|
signerKey: utils.toWebSafeBase64(options.hash.key),
|
|
};
|
|
break;
|
|
case 'MD5':
|
|
case 'SHA1':
|
|
case 'SHA256':
|
|
case 'SHA512':
|
|
// MD5 is [0,8192] but SHA1, SHA256, and SHA512 are [1,8192]
|
|
rounds = getNumberField(options.hash, 'rounds');
|
|
var minRounds = options.hash.algorithm === 'MD5' ? 0 : 1;
|
|
if (isNaN(rounds) || rounds < minRounds || rounds > 8192) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_ROUNDS, "A valid \"hash.rounds\" number between " + minRounds + " and 8192 must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
populatedOptions = {
|
|
hashAlgorithm: options.hash.algorithm,
|
|
rounds: rounds,
|
|
};
|
|
break;
|
|
case 'PBKDF_SHA1':
|
|
case 'PBKDF2_SHA256':
|
|
rounds = getNumberField(options.hash, 'rounds');
|
|
if (isNaN(rounds) || rounds < 0 || rounds > 120000) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_ROUNDS, "A valid \"hash.rounds\" number between 0 and 120000 must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
populatedOptions = {
|
|
hashAlgorithm: options.hash.algorithm,
|
|
rounds: rounds,
|
|
};
|
|
break;
|
|
case 'SCRYPT':
|
|
if (!validator.isBuffer(options.hash.key)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_KEY, "A \"hash.key\" byte buffer must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
rounds = getNumberField(options.hash, 'rounds');
|
|
if (isNaN(rounds) || rounds <= 0 || rounds > 8) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_ROUNDS, "A valid \"hash.rounds\" number between 1 and 8 must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
var memoryCost = getNumberField(options.hash, 'memoryCost');
|
|
if (isNaN(memoryCost) || memoryCost <= 0 || memoryCost > 14) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_MEMORY_COST, "A valid \"hash.memoryCost\" number between 1 and 14 must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
if (typeof options.hash.saltSeparator !== 'undefined' &&
|
|
!validator.isBuffer(options.hash.saltSeparator)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_SALT_SEPARATOR, "\"hash.saltSeparator\" must be a byte buffer.");
|
|
}
|
|
populatedOptions = {
|
|
hashAlgorithm: options.hash.algorithm,
|
|
signerKey: utils.toWebSafeBase64(options.hash.key),
|
|
rounds: rounds,
|
|
memoryCost: memoryCost,
|
|
saltSeparator: utils.toWebSafeBase64(options.hash.saltSeparator || Buffer.from('')),
|
|
};
|
|
break;
|
|
case 'BCRYPT':
|
|
populatedOptions = {
|
|
hashAlgorithm: options.hash.algorithm,
|
|
};
|
|
break;
|
|
case 'STANDARD_SCRYPT':
|
|
var cpuMemCost = getNumberField(options.hash, 'memoryCost');
|
|
if (isNaN(cpuMemCost)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_MEMORY_COST, "A valid \"hash.memoryCost\" number must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
var parallelization = getNumberField(options.hash, 'parallelization');
|
|
if (isNaN(parallelization)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_PARALLELIZATION, "A valid \"hash.parallelization\" number must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
var blockSize = getNumberField(options.hash, 'blockSize');
|
|
if (isNaN(blockSize)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_BLOCK_SIZE, "A valid \"hash.blockSize\" number must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
var dkLen = getNumberField(options.hash, 'derivedKeyLength');
|
|
if (isNaN(dkLen)) {
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_DERIVED_KEY_LENGTH, "A valid \"hash.derivedKeyLength\" number must be provided for " +
|
|
("hash algorithm " + options.hash.algorithm + "."));
|
|
}
|
|
populatedOptions = {
|
|
hashAlgorithm: options.hash.algorithm,
|
|
cpuMemCost: cpuMemCost,
|
|
parallelization: parallelization,
|
|
blockSize: blockSize,
|
|
dkLen: dkLen,
|
|
};
|
|
break;
|
|
default:
|
|
throw new error_1.FirebaseAuthError(error_1.AuthClientErrorCode.INVALID_HASH_ALGORITHM, "Unsupported hash algorithm provider \"" + options.hash.algorithm + "\".");
|
|
}
|
|
return populatedOptions;
|
|
};
|
|
/**
|
|
* Validates and returns the users list of the uploadAccount request.
|
|
* Whenever a user with an error is detected, the error is cached and will later be
|
|
* merged into the user import result. This allows the processing of valid users without
|
|
* failing early on the first error detected.
|
|
* @param {UserImportRecord[]} users The UserImportRecords to convert to UnploadAccountUser
|
|
* objects.
|
|
* @param {ValidatorFunction=} userValidator The user validator function.
|
|
* @return {UploadAccountUser[]} The populated uploadAccount users.
|
|
*/
|
|
UserImportBuilder.prototype.populateUsers = function (users, userValidator) {
|
|
var _this = this;
|
|
var populatedUsers = [];
|
|
users.forEach(function (user, index) {
|
|
try {
|
|
var result = populateUploadAccountUser(user, userValidator);
|
|
if (typeof result.passwordHash !== 'undefined') {
|
|
_this.requiresHashOptions = true;
|
|
}
|
|
// Only users that pass client screening will be passed to backend for processing.
|
|
populatedUsers.push(result);
|
|
// Map user's index (the one to be sent to backend) to original developer provided array.
|
|
_this.indexMap[populatedUsers.length - 1] = index;
|
|
}
|
|
catch (error) {
|
|
// Save the client side error with respect to the developer provided array.
|
|
_this.userImportResultErrors.push({
|
|
index: index,
|
|
error: error,
|
|
});
|
|
}
|
|
});
|
|
return populatedUsers;
|
|
};
|
|
return UserImportBuilder;
|
|
}());
|
|
exports.UserImportBuilder = UserImportBuilder;
|