mirror of
https://github.com/musix-org/musix-oss
synced 2025-06-17 10:46:01 +00:00
700 lines
28 KiB
TypeScript
700 lines
28 KiB
TypeScript
/// <reference types="node" />
|
|
import { BodyResponseCallback, DecorateRequestOptions, GetConfig, Metadata, ServiceObject } from '@google-cloud/common';
|
|
import { Writable, Readable } from 'stream';
|
|
import * as http from 'http';
|
|
import { Storage } from './storage';
|
|
import { Bucket } from './bucket';
|
|
import { Acl } from './acl';
|
|
import { ResponseBody, Duplexify } from '@google-cloud/common/build/src/util';
|
|
export declare type GetExpirationDateResponse = [Date];
|
|
export interface GetExpirationDateCallback {
|
|
(err: Error | null, expirationDate?: Date | null, apiResponse?: Metadata): void;
|
|
}
|
|
export interface GetSignedUrlConfig {
|
|
action: 'read' | 'write' | 'delete' | 'resumable';
|
|
version?: 'v2' | 'v4';
|
|
cname?: string;
|
|
contentMd5?: string;
|
|
contentType?: string;
|
|
expires: string | number | Date;
|
|
extensionHeaders?: http.OutgoingHttpHeaders;
|
|
promptSaveAs?: string;
|
|
responseDisposition?: string;
|
|
responseType?: string;
|
|
}
|
|
export declare enum ActionToHTTPMethod {
|
|
read = "GET",
|
|
write = "PUT",
|
|
delete = "DELETE",
|
|
resumable = "POST"
|
|
}
|
|
export declare type GetSignedUrlResponse = [string];
|
|
export interface GetSignedUrlCallback {
|
|
(err: Error | null, url?: string): void;
|
|
}
|
|
export interface PolicyDocument {
|
|
string: string;
|
|
base64: string;
|
|
signature: string;
|
|
}
|
|
export declare type GetSignedPolicyResponse = [PolicyDocument];
|
|
export interface GetSignedPolicyCallback {
|
|
(err: Error | null, policy?: PolicyDocument): void;
|
|
}
|
|
export interface GetSignedPolicyOptions {
|
|
equals?: string[] | string[][];
|
|
expires: string | number | Date;
|
|
startsWith?: string[] | string[][];
|
|
acl?: string;
|
|
successRedirect?: string;
|
|
successStatus?: string;
|
|
contentLengthRange?: {
|
|
min?: number;
|
|
max?: number;
|
|
};
|
|
}
|
|
export interface GetFileMetadataOptions {
|
|
userProject?: string;
|
|
}
|
|
export declare type GetFileMetadataResponse = [Metadata, Metadata];
|
|
export interface GetFileMetadataCallback {
|
|
(err: Error | null, metadata?: Metadata, apiResponse?: Metadata): void;
|
|
}
|
|
export interface GetFileOptions extends GetConfig {
|
|
userProject?: string;
|
|
}
|
|
export declare type GetFileResponse = [File, Metadata];
|
|
export interface GetFileCallback {
|
|
(err: Error | null, file?: File, apiResponse?: Metadata): void;
|
|
}
|
|
export interface FileExistsOptions {
|
|
userProject?: string;
|
|
}
|
|
export declare type FileExistsResponse = [boolean];
|
|
export interface FileExistsCallback {
|
|
(err: Error | null, exists?: boolean): void;
|
|
}
|
|
export interface DeleteFileOptions {
|
|
userProject?: string;
|
|
}
|
|
export declare type DeleteFileResponse = [Metadata];
|
|
export interface DeleteFileCallback {
|
|
(err: Error | null, apiResponse?: Metadata): void;
|
|
}
|
|
export declare type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead';
|
|
export interface CreateResumableUploadOptions {
|
|
configPath?: string;
|
|
metadata?: Metadata;
|
|
origin?: string;
|
|
offset?: number;
|
|
predefinedAcl?: PredefinedAcl;
|
|
private?: boolean;
|
|
public?: boolean;
|
|
uri?: string;
|
|
userProject?: string;
|
|
}
|
|
export declare type CreateResumableUploadResponse = [string];
|
|
export interface CreateResumableUploadCallback {
|
|
(err: Error | null, uri?: string): void;
|
|
}
|
|
export interface CreateWriteStreamOptions extends CreateResumableUploadOptions {
|
|
contentType?: string;
|
|
gzip?: string | boolean;
|
|
resumable?: boolean;
|
|
validation?: string | boolean;
|
|
}
|
|
export interface MakeFilePrivateOptions {
|
|
strict?: boolean;
|
|
userProject?: string;
|
|
}
|
|
export declare type MakeFilePrivateResponse = [Metadata];
|
|
export interface MakeFilePrivateCallback extends SetFileMetadataCallback {
|
|
}
|
|
export interface IsPublicCallback {
|
|
(err: Error | null, resp?: boolean): void;
|
|
}
|
|
export declare type IsPublicResponse = [boolean];
|
|
export declare type MakeFilePublicResponse = [Metadata];
|
|
export interface MakeFilePublicCallback {
|
|
(err?: Error | null, apiResponse?: Metadata): void;
|
|
}
|
|
export declare type MoveResponse = [Metadata];
|
|
export interface MoveCallback {
|
|
(err: Error | null, destinationFile?: File | null, apiResponse?: Metadata): void;
|
|
}
|
|
export interface MoveOptions {
|
|
userProject?: string;
|
|
}
|
|
export declare type RotateEncryptionKeyOptions = string | Buffer | EncryptionKeyOptions;
|
|
export interface EncryptionKeyOptions {
|
|
encryptionKey?: string | Buffer;
|
|
kmsKeyName?: string;
|
|
}
|
|
export interface RotateEncryptionKeyCallback extends CopyCallback {
|
|
}
|
|
export declare type RotateEncryptionKeyResponse = CopyResponse;
|
|
export interface FileOptions {
|
|
encryptionKey?: string | Buffer;
|
|
generation?: number | string;
|
|
kmsKeyName?: string;
|
|
userProject?: string;
|
|
}
|
|
export interface CopyOptions {
|
|
destinationKmsKeyName?: string;
|
|
keepAcl?: string;
|
|
predefinedAcl?: string;
|
|
token?: string;
|
|
userProject?: string;
|
|
}
|
|
export declare type CopyResponse = [File, Metadata];
|
|
export interface CopyCallback {
|
|
(err: Error | null, file?: File | null, apiResponse?: Metadata): void;
|
|
}
|
|
export declare type DownloadResponse = [Buffer];
|
|
export declare type DownloadCallback = (err: RequestError | null, contents: Buffer) => void;
|
|
export interface DownloadOptions extends CreateReadStreamOptions {
|
|
destination?: string;
|
|
}
|
|
export interface CreateReadStreamOptions {
|
|
userProject?: string;
|
|
validation?: 'md5' | 'crc32c' | false | true;
|
|
start?: number;
|
|
end?: number;
|
|
decompress?: boolean;
|
|
}
|
|
export interface SaveOptions extends CreateWriteStreamOptions {
|
|
}
|
|
export interface SaveCallback {
|
|
(err?: Error | null): void;
|
|
}
|
|
export interface SetFileMetadataOptions {
|
|
userProject?: string;
|
|
}
|
|
export interface SetFileMetadataCallback {
|
|
(err?: Error | null, apiResponse?: Metadata): void;
|
|
}
|
|
export declare type SetFileMetadataResponse = [Metadata];
|
|
export declare type SetStorageClassResponse = [Metadata];
|
|
export interface SetStorageClassOptions {
|
|
userProject?: string;
|
|
}
|
|
export interface SetStorageClassCallback {
|
|
(err?: Error | null, apiResponse?: Metadata): void;
|
|
}
|
|
declare class RequestError extends Error {
|
|
code?: string;
|
|
errors?: Error[];
|
|
}
|
|
/**
|
|
* A File object is created from your {@link Bucket} object using
|
|
* {@link Bucket#file}.
|
|
*
|
|
* @class
|
|
*/
|
|
declare class File extends ServiceObject<File> {
|
|
/**
|
|
* Cloud Storage uses access control lists (ACLs) to manage object and
|
|
* bucket access. ACLs are the mechanism you use to share objects with other
|
|
* users and allow other users to access your buckets and objects.
|
|
*
|
|
* An ACL consists of one or more entries, where each entry grants permissions
|
|
* to an entity. Permissions define the actions that can be performed against
|
|
* an object or bucket (for example, `READ` or `WRITE`); the entity defines
|
|
* who the permission applies to (for example, a specific user or group of
|
|
* users).
|
|
*
|
|
* The `acl` object on a File instance provides methods to get you a list of
|
|
* the ACLs defined on your bucket, as well as set, update, and delete them.
|
|
*
|
|
* @see [About Access Control lists]{@link http://goo.gl/6qBBPO}
|
|
*
|
|
* @name File#acl
|
|
* @mixes Acl
|
|
*
|
|
* @example
|
|
* const {Storage} = require('@google-cloud/storage');
|
|
* const storage = new Storage();
|
|
* const myBucket = storage.bucket('my-bucket');
|
|
*
|
|
* const file = myBucket.file('my-file');
|
|
* //-
|
|
* // Make a file publicly readable.
|
|
* //-
|
|
* const options = {
|
|
* entity: 'allUsers',
|
|
* role: storage.acl.READER_ROLE
|
|
* };
|
|
*
|
|
* file.acl.add(options, function(err, aclObject) {});
|
|
*
|
|
* //-
|
|
* // If the callback is omitted, we'll return a Promise.
|
|
* //-
|
|
* file.acl.add(options).then(function(data) {
|
|
* const aclObject = data[0];
|
|
* const apiResponse = data[1];
|
|
* });
|
|
*/
|
|
acl: Acl;
|
|
bucket: Bucket;
|
|
storage: Storage;
|
|
kmsKeyName?: string;
|
|
userProject?: string;
|
|
name: string;
|
|
generation?: number;
|
|
parent: Bucket;
|
|
private encryptionKey?;
|
|
private encryptionKeyBase64?;
|
|
private encryptionKeyHash?;
|
|
private encryptionKeyInterceptor?;
|
|
/**
|
|
* @typedef {object} FileOptions Options passed to the File constructor.
|
|
* @property {string} [encryptionKey] A custom encryption key.
|
|
* @property {number} [generation] Generation to scope the file to.
|
|
* @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this
|
|
* object, if the object is encrypted by such a key. Limited availability;
|
|
* usable only by enabled projects.
|
|
* @property {string} [userProject] The ID of the project which will be
|
|
* billed for all requests made from File object.
|
|
*/
|
|
/**
|
|
* Constructs a file object.
|
|
*
|
|
* @param {Bucket} bucket The Bucket instance this file is
|
|
* attached to.
|
|
* @param {string} name The name of the remote file.
|
|
* @param {FileOptions} [options] Configuration options.
|
|
* @example
|
|
* const {Storage} = require('@google-cloud/storage');
|
|
* const storage = new Storage();
|
|
* const myBucket = storage.bucket('my-bucket');
|
|
*
|
|
* const file = myBucket.file('my-file');
|
|
*/
|
|
constructor(bucket: Bucket, name: string, options?: FileOptions);
|
|
copy(destination: string | Bucket | File, options?: CopyOptions): Promise<CopyResponse>;
|
|
copy(destination: string | Bucket | File, callback: CopyCallback): void;
|
|
copy(destination: string | Bucket | File, options: CopyOptions, callback: CopyCallback): void;
|
|
/**
|
|
* @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream.
|
|
* @property {string} [userProject] The ID of the project which will be
|
|
* billed for the request.
|
|
* @property {string|boolean} [validation] Possible values: `"md5"`,
|
|
* `"crc32c"`, or `false`. By default, data integrity is validated with a
|
|
* CRC32c checksum. You may use MD5 if preferred, but that hash is not
|
|
* supported for composite objects. An error will be raised if MD5 is
|
|
* specified but is not available. You may also choose to skip validation
|
|
* completely, however this is **not recommended**.
|
|
* @property {number} [start] A byte offset to begin the file's download
|
|
* from. Default is 0. NOTE: Byte ranges are inclusive; that is,
|
|
* `options.start = 0` and `options.end = 999` represent the first 1000
|
|
* bytes in a file or object. NOTE: when specifying a byte range, data
|
|
* integrity is not available.
|
|
* @property {number} [end] A byte offset to stop reading the file at.
|
|
* NOTE: Byte ranges are inclusive; that is, `options.start = 0` and
|
|
* `options.end = 999` represent the first 1000 bytes in a file or object.
|
|
* NOTE: when specifying a byte range, data integrity is not available.
|
|
* @property {boolean} [decompress=true] Disable auto decompression of the
|
|
* received data. By default this option is set to `true`.
|
|
* Applicable in cases where the data was uploaded with
|
|
* `gzip: true` option. See {@link File#createWriteStream}.
|
|
*/
|
|
/**
|
|
* Create a readable stream to read the contents of the remote file. It can be
|
|
* piped to a writable stream or listened to for 'data' events to read a
|
|
* file's contents.
|
|
*
|
|
* In the unlikely event there is a mismatch between what you downloaded and
|
|
* the version in your Bucket, your error handler will receive an error with
|
|
* code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best
|
|
* recourse is to try downloading the file again.
|
|
*
|
|
* For faster crc32c computation, you must manually install
|
|
* [`fast-crc32c`](http://www.gitnpm.com/fast-crc32c):
|
|
*
|
|
* $ npm install --save fast-crc32c
|
|
*
|
|
* NOTE: Readable streams will emit the `end` event when the file is fully
|
|
* downloaded.
|
|
*
|
|
* @param {CreateReadStreamOptions} [options] Configuration options.
|
|
* @returns {ReadableStream}
|
|
*
|
|
* @example
|
|
* //-
|
|
* // <h4>Downloading a File</h4>
|
|
* //
|
|
* // The example below demonstrates how we can reference a remote file, then
|
|
* // pipe its contents to a local file. This is effectively creating a local
|
|
* // backup of your remote data.
|
|
* //-
|
|
* const {Storage} = require('@google-cloud/storage');
|
|
* const storage = new Storage();
|
|
* const bucket = storage.bucket('my-bucket');
|
|
*
|
|
* const fs = require('fs');
|
|
* const remoteFile = bucket.file('image.png');
|
|
* const localFilename = '/Users/stephen/Photos/image.png';
|
|
*
|
|
* remoteFile.createReadStream()
|
|
* .on('error', function(err) {})
|
|
* .on('response', function(response) {
|
|
* // Server connected and responded with the specified status and headers.
|
|
* })
|
|
* .on('end', function() {
|
|
* // The file is fully downloaded.
|
|
* })
|
|
* .pipe(fs.createWriteStream(localFilename));
|
|
*
|
|
* //-
|
|
* // To limit the downloaded data to only a byte range, pass an options
|
|
* // object.
|
|
* //-
|
|
* const logFile = myBucket.file('access_log');
|
|
* logFile.createReadStream({
|
|
* start: 10000,
|
|
* end: 20000
|
|
* })
|
|
* .on('error', function(err) {})
|
|
* .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
|
|
*
|
|
* //-
|
|
* // To read a tail byte range, specify only `options.end` as a negative
|
|
* // number.
|
|
* //-
|
|
* const logFile = myBucket.file('access_log');
|
|
* logFile.createReadStream({
|
|
* end: -100
|
|
* })
|
|
* .on('error', function(err) {})
|
|
* .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
|
|
*/
|
|
createReadStream(options?: CreateReadStreamOptions): Readable;
|
|
createResumableUpload(options?: CreateResumableUploadOptions): Promise<CreateResumableUploadResponse>;
|
|
createResumableUpload(options: CreateResumableUploadOptions, callback: CreateResumableUploadCallback): void;
|
|
createResumableUpload(callback: CreateResumableUploadCallback): void;
|
|
/**
|
|
* @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().
|
|
* @property {string} [configPath] **This only applies to resumable
|
|
* uploads.** A full JSON file path to use with `gcs-resumable-upload`.
|
|
* This maps to the [configstore option by the same
|
|
* name](https://github.com/yeoman/configstore/tree/0df1ec950d952b1f0dfb39ce22af8e505dffc71a#configpath).
|
|
* @property {string} [contentType] Alias for
|
|
* `options.metadata.contentType`. If set to `auto`, the file name is used
|
|
* to determine the contentType.
|
|
* @property {string|boolean} [gzip] If true, automatically gzip the file.
|
|
* If set to `auto`, the contentType is used to determine if the file
|
|
* should be gzipped. This will set `options.metadata.contentEncoding` to
|
|
* `gzip` if necessary.
|
|
* @property {object} [metadata] See the examples below or
|
|
* [Objects: insert request
|
|
* body](https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON)
|
|
* for more details.
|
|
* @property {number} [offset] The starting byte of the upload stream, for
|
|
* resuming an interrupted upload. Defaults to 0.
|
|
* @property {string} [predefinedAcl] Apply a predefined set of access
|
|
* controls to this object.
|
|
*
|
|
* Acceptable values are:
|
|
* - **`authenticatedRead`** - Object owner gets `OWNER` access, and
|
|
* `allAuthenticatedUsers` get `READER` access.
|
|
*
|
|
* - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and
|
|
* project team owners get `OWNER` access.
|
|
*
|
|
* - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project
|
|
* team owners get `READER` access.
|
|
*
|
|
* - **`private`** - Object owner gets `OWNER` access.
|
|
*
|
|
* - **`projectPrivate`** - Object owner gets `OWNER` access, and project
|
|
* team members get access according to their roles.
|
|
*
|
|
* - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`
|
|
* get `READER` access.
|
|
* @property {boolean} [private] Make the uploaded file private. (Alias for
|
|
* `options.predefinedAcl = 'private'`)
|
|
* @property {boolean} [public] Make the uploaded file public. (Alias for
|
|
* `options.predefinedAcl = 'publicRead'`)
|
|
* @property {boolean} [resumable] Force a resumable upload. NOTE: When
|
|
* working with streams, the file format and size is unknown until it's
|
|
* completely consumed. Because of this, it's best for you to be explicit
|
|
* for what makes sense given your input.
|
|
* @property {string} [uri] The URI for an already-created resumable
|
|
* upload. See {@link File#createResumableUpload}.
|
|
* @property {string} [userProject] The ID of the project which will be
|
|
* billed for the request.
|
|
* @property {string|boolean} [validation] Possible values: `"md5"`,
|
|
* `"crc32c"`, or `false`. By default, data integrity is validated with a
|
|
* CRC32c checksum. You may use MD5 if preferred, but that hash is not
|
|
* supported for composite objects. An error will be raised if MD5 is
|
|
* specified but is not available. You may also choose to skip validation
|
|
* completely, however this is **not recommended**.
|
|
*/
|
|
/**
|
|
* Create a writable stream to overwrite the contents of the file in your
|
|
* bucket.
|
|
*
|
|
* A File object can also be used to create files for the first time.
|
|
*
|
|
* Resumable uploads are automatically enabled and must be shut off explicitly
|
|
* by setting `options.resumable` to `false`.
|
|
*
|
|
* Resumable uploads require write access to the $HOME directory. Through
|
|
* [`config-store`](http://www.gitnpm.com/configstore), some metadata is
|
|
* stored. By default, if the directory is not writable, we will fall back to
|
|
* a simple upload. However, if you explicitly request a resumable upload, and
|
|
* we cannot write to the config directory, we will return a
|
|
* `ResumableUploadError`.
|
|
*
|
|
* <p class="notice">
|
|
* There is some overhead when using a resumable upload that can cause
|
|
* noticeable performance degradation while uploading a series of small
|
|
* files. When uploading files less than 10MB, it is recommended that the
|
|
* resumable feature is disabled.
|
|
* </p>
|
|
*
|
|
* For faster crc32c computation, you must manually install
|
|
* [`fast-crc32c`](http://www.gitnpm.com/fast-crc32c):
|
|
*
|
|
* $ npm install --save fast-crc32c
|
|
*
|
|
* NOTE: Writable streams will emit the `finish` event when the file is fully
|
|
* uploaded.
|
|
*
|
|
* @see [Upload Options (Simple or Resumable)]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload}
|
|
* @see [Objects: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert}
|
|
*
|
|
* @param {CreateWriteStreamOptions} [options] Configuration options.
|
|
* @returns {WritableStream}
|
|
*
|
|
* @example
|
|
* const fs = require('fs');
|
|
* const {Storage} = require('@google-cloud/storage');
|
|
* const storage = new Storage();
|
|
* const myBucket = storage.bucket('my-bucket');
|
|
*
|
|
* const file = myBucket.file('my-file');
|
|
*
|
|
* //-
|
|
* // <h4>Uploading a File</h4>
|
|
* //
|
|
* // Now, consider a case where we want to upload a file to your bucket. You
|
|
* // have the option of using {@link Bucket#upload}, but that is just
|
|
* // a convenience method which will do the following.
|
|
* //-
|
|
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
|
|
* .pipe(file.createWriteStream())
|
|
* .on('error', function(err) {})
|
|
* .on('finish', function() {
|
|
* // The file upload is complete.
|
|
* });
|
|
*
|
|
* //-
|
|
* // <h4>Uploading a File with gzip compression</h4>
|
|
* //-
|
|
* fs.createReadStream('/Users/stephen/site/index.html')
|
|
* .pipe(file.createWriteStream({ gzip: true }))
|
|
* .on('error', function(err) {})
|
|
* .on('finish', function() {
|
|
* // The file upload is complete.
|
|
* });
|
|
*
|
|
* //-
|
|
* // Downloading the file with `createReadStream` will automatically decode
|
|
* // the file.
|
|
* //-
|
|
*
|
|
* //-
|
|
* // <h4>Uploading a File with Metadata</h4>
|
|
* //
|
|
* // One last case you may run into is when you want to upload a file to your
|
|
* // bucket and set its metadata at the same time. Like above, you can use
|
|
* // {@link Bucket#upload} to do this, which is just a wrapper around
|
|
* // the following.
|
|
* //-
|
|
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
|
|
* .pipe(file.createWriteStream({
|
|
* metadata: {
|
|
* contentType: 'image/jpeg',
|
|
* metadata: {
|
|
* custom: 'metadata'
|
|
* }
|
|
* }
|
|
* }))
|
|
* .on('error', function(err) {})
|
|
* .on('finish', function() {
|
|
* // The file upload is complete.
|
|
* });
|
|
*/
|
|
createWriteStream(options?: CreateWriteStreamOptions): Writable;
|
|
/**
|
|
* Delete failed resumable upload file cache.
|
|
*
|
|
* Resumable file upload cache the config file to restart upload in case of
|
|
* failure. In certain scenarios, the resumable upload will not works and
|
|
* upload file cache needs to be deleted to upload the same file.
|
|
*
|
|
* Following are some of the scenarios.
|
|
*
|
|
* Resumable file upload failed even though the file is successfully saved
|
|
* on the google storage and need to clean up a resumable file cache to
|
|
* update the same file.
|
|
*
|
|
* Resumable file upload failed due to pre-condition
|
|
* (i.e generation number is not matched) and want to upload a same
|
|
* file with the new generation number.
|
|
*
|
|
* @example
|
|
* const {Storage} = require('@google-cloud/storage');
|
|
* const storage = new Storage();
|
|
* const myBucket = storage.bucket('my-bucket');
|
|
*
|
|
* const file = myBucket.file('my-file', { generation: 0 });
|
|
* const contents = 'This is the contents of the file.';
|
|
*
|
|
* file.save(contents, function(err) {
|
|
* if (err) {
|
|
* file.deleteResumableCache();
|
|
* }
|
|
* });
|
|
*
|
|
*/
|
|
deleteResumableCache(): void;
|
|
download(options?: DownloadOptions): Promise<DownloadResponse>;
|
|
download(options: DownloadOptions, callback: DownloadCallback): void;
|
|
download(callback: DownloadCallback): void;
|
|
/**
|
|
* The Storage API allows you to use a custom key for server-side encryption.
|
|
*
|
|
* @see [Customer-supplied Encryption Keys]{@link https://cloud.google.com/storage/docs/encryption#customer-supplied}
|
|
*
|
|
* @param {string|buffer} encryptionKey An AES-256 encryption key.
|
|
* @returns {File}
|
|
*
|
|
* @example
|
|
* const crypto = require('crypto');
|
|
* const {Storage} = require('@google-cloud/storage');
|
|
* const storage = new Storage();
|
|
* const myBucket = storage.bucket('my-bucket');
|
|
*
|
|
* const encryptionKey = crypto.randomBytes(32);
|
|
*
|
|
* const fileWithCustomEncryption = myBucket.file('my-file');
|
|
* fileWithCustomEncryption.setEncryptionKey(encryptionKey);
|
|
*
|
|
* const fileWithoutCustomEncryption = myBucket.file('my-file');
|
|
*
|
|
* fileWithCustomEncryption.save('data', function(err) {
|
|
* // Try to download with the File object that hasn't had
|
|
* // `setEncryptionKey()` called:
|
|
* fileWithoutCustomEncryption.download(function(err) {
|
|
* // We will receive an error:
|
|
* // err.message === 'Bad Request'
|
|
*
|
|
* // Try again with the File object we called `setEncryptionKey()` on:
|
|
* fileWithCustomEncryption.download(function(err, contents) {
|
|
* // contents.toString() === 'data'
|
|
* });
|
|
* });
|
|
* });
|
|
*
|
|
* @example <caption>include:samples/encryption.js</caption>
|
|
* region_tag:storage_upload_encrypted_file
|
|
* Example of uploading an encrypted file:
|
|
*
|
|
* @example <caption>include:samples/encryption.js</caption>
|
|
* region_tag:storage_download_encrypted_file
|
|
* Example of downloading an encrypted file:
|
|
*/
|
|
setEncryptionKey(encryptionKey: string | Buffer): this;
|
|
getExpirationDate(): Promise<GetExpirationDateResponse>;
|
|
getExpirationDate(callback: GetExpirationDateCallback): void;
|
|
getSignedPolicy(options: GetSignedPolicyOptions): Promise<GetSignedPolicyResponse>;
|
|
getSignedPolicy(options: GetSignedPolicyOptions, callback: GetSignedPolicyCallback): void;
|
|
getSignedPolicy(callback: GetSignedPolicyCallback): void;
|
|
getSignedUrl(cfg: GetSignedUrlConfig): Promise<GetSignedUrlResponse>;
|
|
getSignedUrl(cfg: GetSignedUrlConfig, callback: GetSignedUrlCallback): void;
|
|
private getSignedUrlV2;
|
|
private getSignedUrlV4;
|
|
isPublic(): Promise<IsPublicResponse>;
|
|
isPublic(callback: IsPublicCallback): void;
|
|
makePrivate(options?: MakeFilePrivateOptions): Promise<MakeFilePrivateResponse>;
|
|
makePrivate(callback: MakeFilePrivateCallback): void;
|
|
makePrivate(options: MakeFilePrivateOptions, callback: MakeFilePrivateCallback): void;
|
|
makePublic(): Promise<MakeFilePublicResponse>;
|
|
makePublic(callback: MakeFilePublicCallback): void;
|
|
move(destination: string | Bucket | File, options?: MoveOptions): Promise<MoveResponse>;
|
|
move(destination: string | Bucket | File, callback: MoveCallback): void;
|
|
move(destination: string | Bucket | File, options: MoveOptions, callback: MoveCallback): void;
|
|
request(reqOpts: DecorateRequestOptions): Promise<[ResponseBody, Metadata]>;
|
|
request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void;
|
|
rotateEncryptionKey(options?: RotateEncryptionKeyOptions): Promise<RotateEncryptionKeyResponse>;
|
|
rotateEncryptionKey(callback: RotateEncryptionKeyCallback): void;
|
|
rotateEncryptionKey(options: RotateEncryptionKeyOptions, callback: RotateEncryptionKeyCallback): void;
|
|
save(data: any, options?: SaveOptions): Promise<void>;
|
|
save(data: any, callback: SaveCallback): void;
|
|
save(data: any, options: SaveOptions, callback: SaveCallback): void;
|
|
setStorageClass(storageClass: string, options?: SetStorageClassOptions): Promise<SetStorageClassResponse>;
|
|
setStorageClass(storageClass: string, options: SetStorageClassOptions, callback: SetStorageClassCallback): void;
|
|
setStorageClass(storageClass: string, callback?: SetStorageClassCallback): void;
|
|
/**
|
|
* Set a user project to be billed for all requests made from this File
|
|
* object.
|
|
*
|
|
* @param {string} userProject The user project.
|
|
*
|
|
* @example
|
|
* const {Storage} = require('@google-cloud/storage');
|
|
* const storage = new Storage();
|
|
* const bucket = storage.bucket('albums');
|
|
* const file = bucket.file('my-file');
|
|
*
|
|
* file.setUserProject('grape-spaceship-123');
|
|
*/
|
|
setUserProject(userProject: string): void;
|
|
/**
|
|
* This creates a gcs-resumable-upload upload stream.
|
|
*
|
|
* @see [gcs-resumable-upload]{@link https://github.com/stephenplusplus/gcs-resumable-upload}
|
|
*
|
|
* @param {Duplexify} stream - Duplexify stream of data to pipe to the file.
|
|
* @param {object=} options - Configuration object.
|
|
*
|
|
* @private
|
|
*/
|
|
startResumableUpload_(dup: Duplexify, options: CreateResumableUploadOptions): void;
|
|
/**
|
|
* Takes a readable stream and pipes it to a remote file. Unlike
|
|
* `startResumableUpload_`, which uses the resumable upload technique, this
|
|
* method uses a simple upload (all or nothing).
|
|
*
|
|
* @param {Duplexify} dup - Duplexify stream of data to pipe to the file.
|
|
* @param {object=} options - Configuration object.
|
|
*
|
|
* @private
|
|
*/
|
|
startSimpleUpload_(dup: Duplexify, options?: CreateResumableUploadOptions): void;
|
|
/**
|
|
* Create canonical headers for signing v4 url.
|
|
*
|
|
* The canonical headers for v4-signing a request demands header names are
|
|
* first lowercased, followed by sorting the header names.
|
|
* Then, construct the canonical headers part of the request:
|
|
* <lowercasedHeaderName> + ":" + Trim(<value>) + "\n"
|
|
* ..
|
|
* <lowercasedHeaderName> + ":" + Trim(<value>) + "\n"
|
|
*
|
|
* @param headers
|
|
* @private
|
|
*/
|
|
private getCanonicalHeaders;
|
|
}
|
|
export declare function emitWarning(): void;
|
|
/**
|
|
* Reference to the {@link File} class.
|
|
* @name module:@google-cloud/storage.File
|
|
* @see File
|
|
*/
|
|
export { File };
|