diff --git a/README.md b/README.md index 74a420023..f457077a0 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # oss-client [![NPM version][npm-image]][npm-url] +[![Node.js CI](https://github.com/node-modules/oss-client/actions/workflows/nodejs.yml/badge.svg)](https://github.com/node-modules/oss-client/actions/workflows/nodejs.yml) [![coverage][cov-image]][cov-url] [npm-image]: https://img.shields.io/npm/v/oss-client.svg?style=flat-square @@ -8,7 +9,7 @@ [cov-image]: http://codecov.io/github/node-modules/oss-client/coverage.svg?branch=master [cov-url]: http://codecov.io/github/node-modules/oss-client?branch=master -Aliyun OSS(Object Storage Service) Node.js Client. +Alibaba cloud OSS(Object Storage Service) Node.js Client. ## Install @@ -39,12 +40,11 @@ All operation use es7 async/await to implement. All api is async function. - [Data Regions](#data-regions) - [Create Account](#create-account) - [Create A Bucket Instance](#create-a-bucket-instance) - - [oss(options)](#ossoptions) + - [new OSSObject(options)](#new-ossobjectoptions) - [Object Operations](#object-operations) - [.put(name, file\[, options\])](#putname-file-options) - [.putStream(name, stream\[, options\])](#putstreamname-stream-options) - [.append(name, file\[, options\])](#appendname-file-options) - - [.getObjectUrl(name\[, baseUrl\])](#getobjecturlname-baseurl) - [.generateObjectUrl(name\[, baseUrl\])](#generateobjecturlname-baseurl) - [.head(name\[, options\])](#headname-options) - [.getObjectMeta(name\[, options\])](#getobjectmetaname-options) @@ -64,42 +64,11 @@ All operation use es7 async/await to implement. All api is async function. - [.restore(name\[, options\])](#restorename-options) - [.putSymlink(name, targetName\[, options\])](#putsymlinkname-targetname-options) - [.getSymlink(name\[, options\])](#getsymlinkname-options) - - [.initMultipartUpload(name\[, options\])](#initmultipartuploadname-options) - - [.uploadPart(name, uploadId, partNo, file, start, end\[, options\])](#uploadpartname-uploadid-partno-file-start-end-options) - - [.uploadPartCopy(name, uploadId, partNo, range, sourceData\[, options\])](#uploadpartcopyname-uploadid-partno-range-sourcedata-options) - - [.completeMultipartUpload(name, uploadId, parts\[, options\])](#completemultipartuploadname-uploadid-parts-options) - - [.multipartUpload(name, file\[, options\])](#multipartuploadname-file-options) - - [.multipartUploadCopy(name, sourceData\[, options\])](#multipartuploadcopyname-sourcedata-options) - - [.listParts(name, uploadId\[, query, options\])](#listpartsname-uploadid-query-options) - - [.listUploads(query\[, options\])](#listuploadsquery-options) - - [.abortMultipartUpload(name, uploadId\[, options\])](#abortmultipartuploadname-uploadid-options) - [.calculatePostSignature(policy)](#calculatepostsignaturepolicy) - [.getObjectTagging(name\[, options\])](#getobjecttaggingname-options) - [.putObjectTagging(name, tag\[, options\])](#putobjecttaggingname-tag-options) - [.deleteObjectTagging(name\[, options\])](#deleteobjecttaggingname-options) - [.processObjectSave(sourceObject, targetObject, process\[, targetBucket\])](#processobjectsavesourceobject-targetobject-process-targetbucket) - - [RTMP Operations](#rtmp-operations) - - [.putChannel(id, conf\[, options\])](#putchannelid-conf-options) - - [.getChannel(id\[, options\])](#getchannelid-options) - - [.deleteChannel(id\[, options\])](#deletechannelid-options) - - [.putChannelStatus(id, status\[, options\])](#putchannelstatusid-status-options) - - [.getChannelStatus(id\[, options\])](#getchannelstatusid-options) - - [.listChannels(query\[, options\])](#listchannelsquery-options) - - [.getChannelHistory(id\[, options\])](#getchannelhistoryid-options) - - [.createVod(id, name, time\[, options\])](#createvodid-name-time-options) - - [.getRtmpUrl(channelId\[, options\])](#getrtmpurlchannelid-options) - - [Create A Image Service Instance](#create-a-image-service-instance) - - [oss.ImageClient(options)](#ossimageclientoptions) - - [Image Operations](#image-operations) - - [imgClient.get(name, file\[, options\])](#imgclientgetname-file-options) - - [imgClient.getStream(name\[, options\])](#imgclientgetstreamname-options) - - [imgClient.getExif(name\[, options\])](#imgclientgetexifname-options) - - [imgClient.getInfo(name\[, options\])](#imgclientgetinfoname-options) - - [imgClient.putStyle(name, style\[, options\])](#imgclientputstylename-style-options) - - [imgClient.getStyle(name\[, options\])](#imgclientgetstylename-options) - - [imgClient.listStyle(\[options\])](#imgclientliststyleoptions) - - [imgClient.deleteStyle(name\[, options\])](#imgclientdeletestylename-options) - - [imgClient.signatureUrl(name)](#imgclientsignatureurlname) - [Known Errors](#known-errors) - [Contributors](#contributors) @@ -120,6 +89,8 @@ npm install oss-client 2. for example: +Commonjs + ```js const { OSSObject } = require('oss-client'); const ossObject = new OSSObject({ @@ -131,6 +102,20 @@ const ossObject = new OSSObject({ }); ``` +TypeScript and ESM + +```ts +import { OSSObject } from 'oss-client'; + +const ossObject = new OSSObject({ + region: '', + endpoint: '', + accessKeyId: '', + accessKeySecret: '', + bucket: '' +}); +``` + ## Data Regions [OSS current data regions](https://help.aliyun.com/document_detail/31837.html). @@ -145,7 +130,7 @@ After account created, you can create the OSS instance and get the `accessKeyId` Each OSS instance required `accessKeyId`, `accessKeySecret` and `bucket`. -## oss(options) +## new OSSObject(options) Create a Bucket store instance. @@ -154,7 +139,6 @@ options: - accessKeyId {String} access key you create on aliyun console website - accessKeySecret {String} access secret you create - [bucket] {String} the default bucket you want to access - If you don't have any bucket, please use `putBucket()` create one first. - [endpoint] {String} oss region domain. It takes priority over `region`. Set as extranet domain name, intranet domain name, accelerated domain name, etc. according to different needs. please see [endpoints](https://www.alibabacloud.com/help/doc-detail/31837.htm) - [region] {String} the bucket data region location, please see [Data Regions](#data-regions), default is `oss-cn-hangzhou`. @@ -451,18 +435,6 @@ object = await store.append('ossdemo/buffer', Buffer.from('bar'), { }); ``` -### .getObjectUrl(name[, baseUrl]) - -Get the Object url. -If provide `baseUrl`, will use `baseUrl` instead the default `endpoint`. - -e.g.: - -```js -const cdnUrl = store.getObjectUrl('foo/bar.jpg', 'https://mycdn.domian.com'); -// cdnUrl should be `https://mycdn.domian.com/foo/bar.jpg` -``` - ### .generateObjectUrl(name[, baseUrl]) Get the Object url. @@ -1499,1551 +1471,114 @@ const result = await store.getSymlink('ossdemo.txt', { versionId }) console.log(result.targetName) ``` -### .initMultipartUpload(name[, options]) - -Before transmitting data in the Multipart Upload mode, -you must call the Initiate Multipart Upload interface to notify the OSS to initiate a Multipart Upload event. -The Initiate Multipart Upload interface returns a globally unique Upload ID created by the OSS server to identify this Multipart Upload event. - -parameters: - -- name {String} object name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [mime] Mime file type e.g.: application/octet-stream - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - - [headers] {Object} extra headers - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - - [x-oss-server-side-encryption] - Specify the server-side encryption algorithm used to upload each part of this object,Type: string, Valid value: AES256 `x-oss-server-side-encryption: AES256` - - See more: [InitiateMultipartUpload](https://help.aliyun.com/document_detail/31992.html?#title-wh0-a2h-rur) - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - [x-oss-server-side-encryption] if set request header x-oss-server-side-encryption, will return - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name {String} object name store on OSS -- uploadId {String} upload id, use for uploadPart, completeMultipart - -example: - -```js - const result = await store.initMultipartUpload('object'); - console.log(result); -``` - -### .uploadPart(name, uploadId, partNo, file, start, end[, options]) +### .calculatePostSignature(policy) -After initiating a Multipart Upload event, you can upload data in parts based on the specified object name and Upload ID. +get postObject params parameters: -- name {String} object name -- uploadId {String} get by initMultipartUpload api -- partNo {Number} range is 1-10000, If this range is exceeded, OSS returns the InvalidArgument's error code. -- file {String} is FileName, the whole file
- Multipart Upload requires that the size of any Part other than the last Part is greater than 100KB. -- start {Number} part start bytes e.g: 102400 -- end {Number} part end bytes e.g: 204800 -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" +- policy {JSON or Object} policy must contain expiration and conditions. -example: +Success will return postObject Api params. -```js - const name = 'object'; - const result = await store.initMultipartUpload(name); - const uploadId = result.uploadId; - const file; //the data you want to upload, is a File or FileName(only in node) - //if file part is 10 - const partSize = 100 * 1024; - const fileSize = 10 * partSize;//you need to calculate - const dones = []; - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const part = await store.uploadPart(name, uploadId, i, file, start, end); - dones.push({ - number: i, - etag: part.etag - }); - console.log(part); - } +Object: - //end need to call completeMultipartUpload api -``` +- OSSAccessKeyId {String} +- Signature {String} +- policy {Object} response info -### .uploadPartCopy(name, uploadId, partNo, range, sourceData[, options]) +### .getObjectTagging(name[, options]) -Using Upload Part Copy, you can copy data from an existing object and upload a part of the data. -When copying a file larger than 1 GB, you must use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. +Obtains the tags of an object. parameters: -- name {String} object name -- uploadId {String} get by initMultipartUpload api -- partNo {Number} range is 1-10000, If this range is exceeded, OSS returns the InvalidArgument's error code. -- range {String} Multipart Upload requires that the size of any Part other than the last Part is greater than 100KB, range value like `0-102400` -- sourceData {Object} - - sourceKey {String} the source object name - - sourceBucketName {String} the source bucket name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout +- name {String} the object name +- [options] {Object} optional args - [versionId] {String} the version id of history object - - [headers] {Object} The following request header is used for the source objects specified by x-oss-copy-source. - - [x-oss-copy-source-if-match] default none
- If the ETAG value of the source object is equal to the ETAG value provided by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-none-match] default none
- If the source object has not been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-unmodified-since] default none
- If the time specified by the received parameter is the same as or later than the modification time of the file, the system transfers the file normally, and returns 200 OK; otherwise, the system returns 412 Precondition Failed. - - [x-oss-copy-source-if-modified-since] default none
- If the source object has been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" - -example: - -```js - const name = 'object'; - const result = await store.initMultipartUpload(name); - - const partSize = 100 * 1024;//100kb - //if file part is 10 - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const range = start + '-' + (end - 1); - const part = await store.uploadPartCopy(name, result.uploadId, i, range, { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }); - console.log(part); - } - - //end need complete api -``` -- use history object to uploadPartCopy +Success will return the channel information. -```js - const versionId = 'object versionId'; - const name = 'object'; - const result = await store.initMultipartUpload(name); - const partSize = 100 * 1024;//100kb - //if file part is 10 - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const range = start + '-' + (end - 1); - const part = await store.uploadPartCopy(name, result.uploadId, i, range, { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }, { - versionId - }); - console.log(part); - } +object: - //end need complete api -``` +- tag {Object} the tag of object +- res {Object} response info -### .completeMultipartUpload(name, uploadId, parts[, options]) +### .putObjectTagging(name, tag[, options]) -After uploading all data parts, you must call the Complete Multipart Upload API to complete Multipart Upload for the entire file. +Configures or updates the tags of an object. parameters: -- name {String} object name -- uploadId {String} get by initMultipartUpload api -- parts {Array} more part {Object} from uploadPartCopy, , each in the structure: - - number {Number} partNo - - etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
- - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. - - [host] {String} The host header value for initiating callback requests. - - body {String} The value of the request body when a callback is initiated, for example, key=${key}&etag=${etag}&my_var=${x:my_var}. - - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. - - [customValue] {Object} Custom parameters are a map of key-values
- e.g.: - - ```js - var customValue = {var1: 'value1', var2: 'value2'} - ``` - - - [headers] {Object} extra headers, detail see [CompleteMultipartUpload](https://help.aliyun.com/document_detail/31995.html?#title-nan-5y3-rjd) - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" -- data {Object} callback server response data , sdk use JSON.parse() return - -example: +- name {String} the object name +- tag {Object} tag, eg. `{var1: value1,var2:value2}` +- [options] {Object} optional args + - [versionId] {String} the version id of history object -```js +Success will return the channel information. - //init multipart - const name = 'object'; - const result = await store.initMultipartUpload(name); - - //upload part - const file; //the data you want to upload, this example size is 10 * 100 * 1024 - const fileSize;//you need to calculate - const partSize = 100 * 1024;//100kb - const done = []; - //if file part is 10 - for (let i = 1; i <= 10; i++) { - const start = partSize * (i -1); - const end = Math.min(start + partSize, fileSize); - const data = file.slice(start, end); - const part = store.uploadPart(name, result.uploadId, i, data, 0, data.length); - console.log(part); - done.push({ - number: i, - etag: part.res.headers.etag - }); - } +object: - //complete - const completeData = await store.completeMultipartUpload(name, result.uploadId, done); - console.log(completeData); -``` +- status {Number} response status +- res {Object} response info -### .multipartUpload(name, file[, options]) +### .deleteObjectTagging(name[, options]) -Upload file with [OSS multipart][oss-multipart].
-this function contains initMultipartUpload, uploadPart, completeMultipartUpload. -When you use multipartUpload api,if you encounter problems with ConnectionTimeoutError, you should handle ConnectionTimeoutError in your business code. How to resolve ConnectionTimeoutError, you can decrease `partSize` size 、 Increase `timeout` 、Retry request , -or give tips in your business code; +Deletes the tag of a specified object. parameters: -- name {String} object name -- file {String)|Buffer} file path or content buffer +- name {String} the object name +- tag {Object} tag, eg. `{var1: value1,var2:value2}` - [options] {Object} optional args - - [parallel] {Number} the number of parts to be uploaded in parallel - - [partSize] {Number} the suggested size for each part, defalut `1024 * 1024`(1MB), minimum `100 * 1024`(100KB) - - [progress] {Function} function | async | Promise, the progress callback called after each - successful upload of one part, it will be given three parameters: - (percentage {Number}, checkpoint {Object}, res {Object}) - - [checkpoint] {Object} the checkpoint to resume upload, if this is - provided, it will continue the upload from where interrupted, - otherwise a new multipart upload will be created. - - file {File} The file object selected by the user, if the browser is restarted, it needs the user to manually trigger the settings - - name {String} object key - - fileSize {Number} file size - - partSize {Number} part size - - uploadId {String} upload id - - doneParts {Array} An array of pieces that have been completed, including the object structure as follows - - number {Number} part number - - etag {String} part etag - - [meta] {Object} user meta, will send with `x-oss-meta-` prefix string - - [mime] {String} custom mime , will send with `Content-Type` entity header - - [callback] {Object} The callback parameter is composed of a JSON string encoded in Base64,detail [see](https://www.alibabacloud.com/help/doc-detail/31989.htm)
- - url {String} After a file is uploaded successfully, the OSS sends a callback request to this URL. - - [host] {String} The host header value for initiating callback requests. - - body {String} The value of the request body when a callback is initiated, for example, key=${key}&etag=${etag}&my_var=${x:my_var}. - - [contentType] {String} The Content-Type of the callback requests initiatiated, It supports application/x-www-form-urlencoded and application/json, and the former is the default value. - - [customValue] {Object} Custom parameters are a map of key-values
- e.g.: - - ```js - var customValue = {var1: 'value1', var2: 'value2'} - ``` - - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - - [timeout] {Number} Milliseconds before a request is considered to be timed out - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" -- data {Object} callback server response data, sdk use JSON.parse() return - -example: - -- Upload using multipart - -```js -const result = await store.multipartUpload('object', '/tmp/file'); -let savedCpt; -console.log(result); - -const result = await store.multipartUpload('object', '/tmp/file', { - parallel: 4, - partSize: 1024 * 1024, - progress: function (p, cpt, res) { - console.log(p); - savedCpt = cpt; - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -const result = await store.multipartUpload('object', '/tmp/file', { - checkpoint: savedCpt, - progress: function (p, cpt, res) { //progress is generator - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -``` - -- multipartUpload progress example - -```js - -//async function -async function asyncProgress(p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); -} - -const result1 = await store.multipartUpload('object', '/tmp/file', { - progress: asyncProgress -}); - -//function -function progress(p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); -} - -const result2 = await store.multipartUpload('object', '/tmp/file', { - progress: progress -}); - -``` - -- multipartUpload with abort + - [versionId] {String} the version id of history object -```js +Success will return the channel information. -//start upload -let abortCheckpoint; -store.multipartUpload('object', '/tmp/file', { - progress: function (p, cpt, res) { - abortCheckpoint = cpt; - } -}).then(res => { - // do something -}.catch(err => { - //if abort will catch abort event - if (err.name === 'abort') { - // handle abort - console.log('error: ', err.message) - } -})) +object: -// abort -store.abortMultipartUpload(abortCheckpoint.name, abortCheckpoint.uploadId) +- status {Number} response status +- res {Object} response info -``` +### .processObjectSave(sourceObject, targetObject, process[, targetBucket]) -- multipartUpload with cancel +Persistency indicates that images are asynchronously stored in the specified Bucket -```js +parameters: -//start upload -try { - const result = await store.multipartUpload('object', '/tmp/file', { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } - }); -} catch (err) { - //if cancel will catch cancel event - if (store.isCancel()) { - //do something - } -} +- sourceObject {String} source object name +- targetObject {String} target object name +- process {String} process string +- [targetBucket] {String} target bucket -//the other event to cancel, for example: click event -//to cancel upload must use the same client instance -store.cancel(); +Success will return the channel information. -``` +object: -- multipartUpload with capture `ConnectionTimeoutError` error +- status {Number} response status +- res {Object} response info ```js +const sourceObject = 'a.png' +const targetObject = 'b.png' +const process = 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00' -//start upload -try { - const result = await store.multipartUpload('object', '/tmp/file', { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } - }); -} catch (err) { - if (err.code === 'ConnectionTimeoutError') { - console.log("Woops,Woops ,timeout error!!!"); - // do ConnectionTimeoutError operation - } -} - +await this.store.processObjectSave(sourceObject, targetObject, process); ``` -### .multipartUploadCopy(name, sourceData[, options]) - -Copy file with [OSS multipart][oss-multipart].
-this function contains head, initMultipartUpload, uploadPartCopy, completeMultipartUpload.
-When copying a file larger than 1 GB, you should use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. +## Known Errors -parameters: +Each error return by OSS server will contains these properties: -- name {String} object name -- file {String|File} file path or HTML5 Web File -- [options] {Object} optional args - - [timeout] {Number} Milliseconds before a request is considered to be timed out - - [parallel] {Number} the number of parts to be uploaded in parallel - - [partSize] {Number} the suggested size for each part, defalut `1024 * 1024`(1MB), minimum `100 * 1024`(100KB) - - [versionId] {String} the version id of history object - - [progress] {Function} function | async | Promise, the progress callback called after each - successful upload of one part, it will be given three parameters: - (percentage {Number}, checkpoint {Object}, res {Object}) - - [checkpoint] {Object} the checkpoint to resume upload, if this is - provided, it will continue the upload from where interrupted, - otherwise a new multipart upload will be created. - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'Cache-Control' cache control for download, e.g.: `Cache-Control: public, no-cache` - - 'Content-Disposition' object name for download, e.g.: `Content-Disposition: somename` - - 'Content-Encoding' object content encoding for download, e.g.: `Content-Encoding: gzip` - - 'Expires' expires time for download, an absolute date and time. e.g.: `Tue, 08 Dec 2020 13:49:43 GMT` - - [copyheaders] {Object} only uploadPartCopy api used, detail [see](https://www.alibabacloud.com/help/doc-detail/31994.htm) - - [x-oss-copy-source-if-match] only uploadPartCopy api used, default none
- If the ETAG value of the source object is equal to the ETAG value provided by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-none-match] only uploadPartCopy api used, default none
- If the source object has not been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. - - [x-oss-copy-source-if-unmodified-since] only uploadPartCopy api used, default none
- If the time specified by the received parameter is the same as or later than the modification time of the file, the system transfers the file normally, and returns 200 OK; otherwise, the system returns 412 Precondition Failed. - - [x-oss-copy-source-if-modified-since] only uploadPartCopy api used, default none
- If the source object has been modified since the time specified by the user, the system performs the Copy Object operation; otherwise, the system returns the 412 Precondition Failed message. +- name {String} error name +- message {String} error message +- requestId {String} uuid for this request, if you meet some unhandled problem, + you can send this request id to OSS engineer to find out what's happend. +- hostId {String} OSS cluster name for this request -Success will return: +The following table lists the OSS error codes: -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- bucket {String} bucket name -- name name {String} object name store on OSS -- etag {String} object etag contains ", e.g.: "5B3C1A2E053D763E1B002CC607C5A0FE" - -example: - -- Copy using multipart - -```js -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}); -let savedCpt; -console.log(result); - -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}, { - parallel: 4, - partSize: 1024 * 1024, - progress: function (p, cpt, res) { - console.log(p); - savedCpt = cpt; - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -console.log(result); - -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}, { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } -}); - -console.log(result); - -``` - -- multipartUploadCopy with abort - -```js - -//start upload -let abortCheckpoint; -store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }, { - progress: function (p, cpt, res) { - abortCheckpoint = cpt; - } -}).then(res => { - // do something -}.catch(err => { - //if abort will catch abort event - if (err.name === 'abort') { - // handle abort - console.log('error: ', err.message) - } -})) - -//the other event to abort, for example: click event -//to abort upload must use the same client instance -store.abortMultipartUpload(abortCheckpoint.name, abortCheckpoint.uploadId) - -``` - -- multipartUploadCopy with cancel - -```js - -//start upload -try { - const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' - }, { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - } - }); -} catch (err) { - //if cancel will catch cancel event - if (store.isCancel()) { - //do something - } -} - -//the other event to cancel, for example: click event -//to cancel upload must use the same client instance -store.cancel(); - -``` - -- multipartUploadCopy with versionId - -```js - -const versionId = 'object versionId' -//start upload -const result = await store.multipartUploadCopy('object', { - sourceKey: 'sourceKey', - sourceBucketName: 'sourceBucketName' -}, { - checkpoint: savedCpt, - progress: function (p, cpt, res) { - console.log(p); - console.log(cpt); - console.log(res.headers['x-oss-request-id']); - }, - versionId -}); - -``` - -### .listParts(name, uploadId[, query, options]) - -The ListParts command can be used to list all successfully uploaded parts mapped to a specific upload ID, i.e.: those not completed and not -aborted. - -parameters: - -- name {String} object key -- uploadId {String} upload ID from initMultipartUpload api -- [query] {Object} query parameters - - [max-parts] {Number} The maximum part number in the response of the OSS. default value: 1000. - - [part-number-marker] {Number} Starting position of a specific list. A part is listed only when the part number is greater than the value of this parameter. - - [encoding-type] {String} Specify the encoding of the returned content and the encoding type. Optional value: url -- [options] {Object} optional args - - [timeout] {Number} the operation timeout - -Success will return: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- uploadId {String} upload ID -- bucket {String} Specify the bucket name. -- name {String} object name -- PartNumberMarker {Number} Starting position of the part numbers in the listing result. -- nextPartNumberMarker {Number} If not all results are returned this time, the response request includes the NextPartNumberMarker element to indicate the value of PartNumberMarker in the next request. -- maxParts {Number} upload ID -- isTruncated {Boolean} Whether the returned result list for List Parts is truncated. The “true” indicates that not all results are returned; “false” indicates that all results are returned. -- parts {Array} The container that saves part information, each in the structure: - - PartNumber {Number} Part number. - - LastModified {Date} Time when a part is uploaded. - - ETag {String} ETag value in the content of the uploaded part. - - Size {Number} Size of the uploaded part. - -example: - -- List uploaded part - -```js - -const result = await store.listParts('objcet', 'uploadId', { - 'max-parts': 1000 -}); -console.log(result); -``` - -### .listUploads(query[, options]) - -List on-going multipart uploads, i.e.: those not completed and not -aborted. - -parameters: - -- query {Object} query parameters - - [prefix] {String} the object key prefix - - [max-uploads] {Number} the max uploads to return - - [key-marker] {String} the object key marker, if `upload-id-marker` - is not provided, return uploads with `key > marker`, otherwise - return uploads with `key >= marker && uploadId > id-marker` - - [upload-id-marker] {String} the upload id marker, must be used - **WITH** `key-marker` -- [options] {Object} optional args - - [timeout] {Number} the operation timeout - -example: - -- List on-going multipart uploads - -```js - -const result = await store.listUploads({ - 'max-uploads': 100, - 'key-marker': 'my-object', - 'upload-id-marker': 'upload-id' -}); -console.log(result); -``` - -### .abortMultipartUpload(name, uploadId[, options]) - -Abort a multipart upload for object. - -parameters: - -- name {String} the object name -- uploadId {String} the upload id -- [options] {Object} optional args - - [timeout] {Number} the operation timeout - -example: - -- Abort a multipart upload - -```js -const result = await store.abortMultipartUpload('object', 'upload-id'); -console.log(result); -``` - -### .calculatePostSignature(policy) - -get postObject params - -parameters: - -- policy {JSON or Object} policy must contain expiration and conditions. - -Success will return postObject Api params. - -Object: - -- OSSAccessKeyId {String} -- Signature {String} -- policy {Object} response info - -### .getObjectTagging(name[, options]) - -Obtains the tags of an object. - -parameters: - -- name {String} the object name -- [options] {Object} optional args - - [versionId] {String} the version id of history object - -Success will return the channel information. - -object: - -- tag {Object} the tag of object -- res {Object} response info - -### .putObjectTagging(name, tag[, options]) - -Configures or updates the tags of an object. - -parameters: - -- name {String} the object name -- tag {Object} tag, eg. `{var1: value1,var2:value2}` -- [options] {Object} optional args - - [versionId] {String} the version id of history object - -Success will return the channel information. - -object: - -- status {Number} response status -- res {Object} response info - -### .deleteObjectTagging(name[, options]) - -Deletes the tag of a specified object. - -parameters: - -- name {String} the object name -- tag {Object} tag, eg. `{var1: value1,var2:value2}` -- [options] {Object} optional args - - [versionId] {String} the version id of history object - -Success will return the channel information. - -object: - -- status {Number} response status -- res {Object} response info - -### .processObjectSave(sourceObject, targetObject, process[, targetBucket]) - -Persistency indicates that images are asynchronously stored in the specified Bucket - -parameters: - -- sourceObject {String} source object name -- targetObject {String} target object name -- process {String} process string -- [targetBucket] {String} target bucket - -Success will return the channel information. - -object: - -- status {Number} response status -- res {Object} response info - -```js -const sourceObject = 'a.png' -const targetObject = 'b.png' -const process = 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00' - -await this.store.processObjectSave(sourceObject, targetObject, process); -``` - -## RTMP Operations - -All operations function is [async], except `getRtmpUrl`. - -async function format: `async functionName(...)`. - -### .putChannel(id, conf[, options]) - -Create a live channel. - -parameters: - -- id {String} the channel id -- conf {Object} the channel config - - [Description] {String} the channel description - - [Status] {String} the channel status: 'enabled' or 'disabled' - - [Target] {Object} - - [Type] {String} the data type for the channel, only 'HLS' is supported now - - [FragDuration] {Number} duration of a 'ts' segment - - [FragCount] {Number} the number of 'ts' segments in a 'm3u8' - - [PlaylistName] {String} the 'm3u8' name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel information. - -object: - -- publishUrls {Array} the publish urls -- playUrls {Array} the play urls -- res {Object} response info - -example: - -- Create a live channel - -```js -const cid = 'my-channel'; -const conf = { - Description: 'this is channel 1', - Status: 'enabled', - Target: { - Type: 'HLS', - FragDuration: '10', - FragCount: '5', - PlaylistName: 'playlist.m3u8' - } -}; - -const r = await this.store.putChannel(cid, conf); -console.log(r); -``` - -### .getChannel(id[, options]) - -Get live channel info. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel information. - -object: - -- data {Object} channel info, same as conf in [.putChannel](#putchannelid-conf-options) -- res {Object} response info - -example: - -- Get live channel info - -```js -const cid = 'my-channel'; - -const r = await this.store.getChannel(cid); -console.log(r); -``` - -### .deleteChannel(id[, options]) - -Delete a live channel. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the response infomation. - -object: - -- res {Object} response info - -example: - -- Delete a live channel - -```js -const cid = 'my-channel'; - -const r = await this.store.deleteChannel(cid); -console.log(r); -``` - -### .putChannelStatus(id, status[, options]) - -Change the live channel status. - -parameters: - -- id {String} the channel id -- status {String} the status: 'enabled' or 'disabled' -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the response information. - -object: - -- res {Object} response info - -example: - -- Disable a live channel - -```js -const cid = 'my-channel'; - -const r = await this.store.putChannelStatus(cid, 'disabled'); -console.log(r); -``` - -### .getChannelStatus(id[, options]) - -Get the live channel status. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel status information. - -object: - -- data {Object} - - Status {String} the channel status: 'Live' or 'Idle' - - [ConnectedTime] {String} the connected time of rtmp pushing - - [RemoteAddr] {String} the remote addr of rtmp pushing - - [Video] {Object} the video parameters (Width/Height/FrameRate/Bandwidth/Codec) - - [Audio] {Object} the audio parameters (Bandwidth/SampleRate/Codec) -- res {Object} response info - -example: - -- Get a live channel status - -```js -const cid = 'my-channel'; - -const r = await this.store.getChannelStatus(cid); -console.log(r); - -// { Status: 'Live', -// ConnectedTime: '2016-04-12T11:51:03.000Z', -// RemoteAddr: '42.120.74.98:53931', -// Video: -// { Width: '672', -// Height: '378', -// FrameRate: '29', -// Bandwidth: '60951', -// Codec: 'H264' }, -// Audio: { Bandwidth: '5959', SampleRate: '22050', Codec: 'AAC' } -// } -``` - -### .listChannels(query[, options]) - -List channels. - -parameters: - -- query {Object} parameters for list - - prefix {String}: the channel id prefix (returns channels with this prefix) - - marker {String}: the channle id marker (returns channels after this id) - - max-keys {Number}: max number of channels to return -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the channel list. - -object: - -- channels {Array} the channels, each in the structure: - - Name {String} the channel id - - Description {String} the channel description - - Status {String} the channel status - - LastModified {String} the last modification time of the channel - - PublishUrls {Array} the publish urls for the channel - - PlayUrls {Array} the play urls for the channel -- nextMarker: result.data.NextMarker || null, -- isTruncated: result.data.IsTruncated === 'true' -- res {Object} response info - -example: - -- List live channels - -```js -const r = await this.store.listChannels({ - prefix: 'my-channel', - 'max-keys': 3 -}); -console.log(r); -``` - -### .getChannelHistory(id[, options]) - -Get the live channel history. - -parameters: - -- id {String} the channel id -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the history information. - -object: - -- records {Object} the pushing records, each in the structure: - - StartTime {String} the start time - - EndTime {String} the end time - - RemoteAddr {String} the remote addr -- res {Object} response info - -example: - -- Get the live channel history - -```js -const cid = 'my-channel'; - -const r = await this.store.getChannelHistory(cid); -console.log(r); -``` - -### .createVod(id, name, time[, options]) - -Create a VOD playlist for the channel. - -parameters: - -- id {String} the channel id -- name {String} the playlist name -- time {Object} the duration time - - startTime {Number} the start time in epoch seconds - - endTime {Number} the end time in epoch seconds -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the response information. - -object: - -- res {Object} response info - -example: - -- Create a vod playlist of a live channel - -```js -const cid = 'my-channel'; - -const r = await this.store.createVod(cid, 're-play', { - startTime: 1460464870, - endTime: 1460465877 -}); -console.log(r); -``` - -### .getRtmpUrl(channelId[, options]) - -Get signatured rtmp url for publishing. - -parameters: - -- channelId {String} the channel id -- [options] {Object} optional parameters - - [expires] {Number} the expire time in seconds of the url - - [params] {Object} the additional paramters for url, e.g.: {playlistName: 'play.m3u8'} - - [timeout] {Number} the operation timeout - -Success will return the rtmp url. - -example: - -- Get a rtmp url. - -```js -const cid = 'my-channel'; - -const url = this.store.getRtmpUrl(this.cid, { - params: { - playlistName: 'play.m3u8' - }, - expires: 3600 -}); -console.log(url); -// rtmp://ossliveshow.oss-cn-hangzhou.aliyuncs.com/live/tl-channel?OSSAccessKeyId=T0cqQWBk2ThfRS6m&Expires=1460466188&Signature=%2BnzTtpyxUWDuQn924jdS6b51vT8%3D -``` - -## Create A Image Service Instance - -Each Image Service instance required `accessKeyId`, `accessKeySecret`, `bucket` and `imageHost`. - -### oss.ImageClient(options) - -Create a Image service instance. - -options: - -- imageHost {String} your image service domain that binding to a OSS bucket -- accessKeyId {String} access key you create on aliyun console website -- accessKeySecret {String} access secret you create -- bucket {String} the default bucket you want to access - If you don't have any bucket, please use `putBucket()` create one first. -- [region] {String} the bucket data region location, please see [Data Regions](#data-regions), - default is `oss-cn-hangzhou` - Current available: `oss-cn-hangzhou`, `oss-cn-qingdao`, `oss-cn-beijing`, `oss-cn-hongkong` and `oss-cn-shenzhen` -- [internal] {Boolean} access OSS with aliyun internal network or not, default is `false` - If your servers are running on aliyun too, you can set `true` to save lot of money. -- [timeout] {String|Number} instance level timeout for all operations, default is `60s` - -example: - -```js -const { OSSImage } = require('oss-client'); - -const imgClient = new OSSImage({ - accessKeyId: 'your access key', - accessKeySecret: 'your access secret', - bucket: 'my_image_bucket' - imageHost: 'thumbnail.myimageservice.com' -}); -``` - -## Image Operations - -All operations function is [async], except `imgClient.signatureUrl`. - -async function format: `async functionName(...)`. - -### imgClient.get(name, file[, options]) - -Get an image from the image channel. - -parameters: - -- name {String} image object name with operation style store on OSS -- [file] {String|WriteStream} file path or WriteStream instance to store the image - If `file` is null or ignore this parameter, function will return info contains `content` property. -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [headers] {Object} extra headers, detail see [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616.html) - - 'If-Modified-Since' object modified after this time will return 200 and object meta, - otherwise return 304 not modified - - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-Match' object etag equal this will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-None-Match' object etag not equal this will return 200 and object meta, - otherwise return 304 not modified - -Success will return the info contains response. - -object: - -- [content] {Buffer} file content buffer if `file` parameter is null or ignore -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -If object not exists, will throw NoSuchKeyError. - -example: - -- Get an exists image with a style and store it to the local file - -```js -const imagepath = '/home/ossdemo/demo.jpg'; -await imgClient.get('ossdemo/demo.jpg@200w_200h', filepath); -``` - -_ Store image to a writestream - -```js -await imgClient.get('ossdemo/demo.jpg@200w_200h', somestream); -``` - -- Get an image content buffer - -```js -const result = await imgClient.get('ossdemo/demo.jpg@200w_200h'); -console.log(Buffer.isBuffer(result.content)); -``` - -- Get a not exists object or a not image object - -```js -const imagepath = '/home/ossdemo/demo.jpg'; -await imgClient.get('ossdemo/not-exists-demo.jpg@200w_200h', filepath); -// will throw NoSuchKeyError -``` - -### imgClient.getStream(name[, options]) - -Get an image read stream. - -parameters: - -- name {String} image object name with operation style store on OSS -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - - [headers] {Object} extra headers - - 'If-Modified-Since' object modified after this time will return 200 and object meta, - otherwise return 304 not modified - - 'If-Unmodified-Since' object modified before this time will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-Match' object etag equal this will return 200 and object meta, - otherwise throw PreconditionFailedError - - 'If-None-Match' object etag not equal this will return 200 and object meta, - otherwise return 304 not modified - -Success will return the stream instance and response info. - -object: - -- stream {ReadStream} readable stream instance - if response status is not 200, stream will be `null`. -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) - -If object not exists, will throw NoSuchKeyError. - -example: - -- Get an exists image object stream - -```js -const result = await imgClient.getStream('ossdemo/demo.jpg@200w_200h'); -result.stream.pipe(fs.createWriteStream('some demo.jpg')); -``` - -### imgClient.getExif(name[, options]) - -Get a image exif info by image object name from the image channel. - -parameters: - -- name {String} image object name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the info contains response. - -object: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- data {Object} image exif object - -If object don't have exif, will throw 400 BadRequest. - -example: - -```js -const result = await imgClient.getExif('demo.jpg'); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/json", -// content - length: "148", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 11:06:32 GMT", -// "last-modified": "Mon, 30 Mar 2015 10:46:35 GMT" -// }, -// size: 148, -// aborted: false, -// rt: 461, -// keepAliveSocket: false -// }, -// data: { -// FileSize: 343683, -// ImageHeight: 1200, -// ImageWidth: 1600, -// Orientation: 1 -// } -// } - -``` - -### imgClient.getInfo(name[, options]) - -Get a image info and exif info by image object name from the image channel. - -parameters: - -- name {String} image object name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the info contains response. - -object: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- data {Object} image exif object - -example: - -```js -const result = await imgClient.getInfo('demo.jpg'); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/json", -// content - length: "148", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 11:06:32 GMT", -// "last-modified": "Mon, 30 Mar 2015 10:46:35 GMT" -// }, -// size: 148, -// aborted: false, -// rt: 461, -// keepAliveSocket: false -// }, -// data: { -// FileSize: 343683, -// Format: "jpg", -// ImageHeight: 1200, -// ImageWidth: 1600, -// Orientation: 1 -// } -// } - -``` - -### imgClient.putStyle(name, style[, options]) - -// TODO - -### imgClient.getStyle(name[, options]) - -Get a style by name from the image channel. - -parameters: - -- name {String} image style name -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the info contains response. - -object: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- data {Object} styles object - - Name {String} style name - - Content {String} style content - - CreateTime {String} style create time - - LastModifyTime {String} style last modify time - -example: - -```js -const result = await imgClient.getStyle('400'); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/xml", -// content - length: "234", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 10:58:20 GMT" -// }, -// size: 234, -// aborted: false, -// rt: 398, -// keepAliveSocket: false -// }, -// data: { -// Name: "400", -// Content: "400w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:34:21 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:34:21 GMT" -// } -// } -``` - -### imgClient.listStyle([options]) - -Get all styles from the image channel. - -parameters: - -- [options] {Object} optional parameters - - [timeout] {Number} the operation timeout - -Success will return the info contains response. - -object: - -- res {Object} response info, including - - status {Number} response status - - headers {Object} response headers - - size {Number} response size - - rt {Number} request total use time (ms) -- data {Array} styles array, a style object: - - Name {String} style name - - Content {String} style content - - CreateTime {String} style create time - - LastModifyTime {String} style last modify time - -example: - -```js -const result = await imgClient.listStyle(); -// resut: -// { -// res: { -// status: 200, -// statusCode: 200, -// headers: { -// server: "Tengine", -// content - type: "application/xml", -// content - length: "913", -// connection: "keep-alive", -// date: "Tue, 31 Mar 2015 10:47:32 GMT" -// }, -// size: 913, -// aborted: false, -// rt: 1911, -// keepAliveSocket: false -// }, -// data: [{ -// Name: "200-200", -// Content: "0e_200w_200h_0c_0i_0o_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:28:08 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:28:08 GMT" -// }, { -// Name: "800", -// Content: "800w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:29:15 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:29:15 GMT" -// }, { -// Name: "400", -// Content: "400w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:34:21 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:34:21 GMT" -// }, { -// Name: "600", -// Content: "600w_90Q_1x.jpg", -// CreateTime: "Thu, 19 Mar 2015 08:35:02 GMT", -// LastModifyTime: "Thu, 19 Mar 2015 08:35:02 GMT" -// }] -// } -``` - -### imgClient.deleteStyle(name[, options]) - -// TODO - -### imgClient.signatureUrl(name) - -Create a signature url for directly download. - -parameters: - -- name {String} image object name with operation style store on OSS -- [options] {Object} optional parameters - - [expires] {Number} after expires seconds, the url will become invalid, default is `1800` - - [timeout] {Number} the operation timeout - -Success will return full signature url. - -example: - -```js -const url = imgClient.signatureUrl(' -'); -// http://thumbnail.myimageservice.com/demo.jpg@200w_200h?OSSAccessKeyId=uZxyLARzYZtGwHKY&Expires=1427803849&Signature=JSPRe06%2FjQpQSj5zlx2ld1V%2B35I%3D -``` - -## Known Errors - -Each error return by OSS server will contains these properties: - -- name {String} error name -- message {String} error message -- requestId {String} uuid for this request, if you meet some unhandled problem, - you can send this request id to OSS engineer to find out what's happend. -- hostId {String} OSS cluster name for this request - -The following table lists the OSS error codes: - -[More code info](https://help.aliyun.com/knowledge_detail/32005.html) +[More code info](https://help.aliyun.com/knowledge_detail/32005.html) code | status | message | message in Chinese --- | --- | --- | --- @@ -3094,8 +1629,6 @@ RequestError | -1 | network error | 网络出现中断或异常 ConnectionTimeoutError | -2 | request connect timeout | 请求连接超时 SecurityTokenExpired | 403 | sts Security Token Expired | sts Security Token 超时失效 -[oss-multipart]: https://help.aliyun.com/document_detail/31992.html - ## Contributors diff --git a/index.d.ts b/index.d.ts deleted file mode 100644 index 0fca45132..000000000 --- a/index.d.ts +++ /dev/null @@ -1,1082 +0,0 @@ -// Forked from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/ali-oss/index.d.ts - -import { Readable, Writable } from 'stream'; -import { - IObjectSimple, - GetObjectOptions, - ListObjectsQuery, - RequestOptions, - ListObjectResult, - PutObjectOptions, - PutObjectResult, - NormalSuccessResponse, - HeadObjectOptions, - HeadObjectResult, - GetObjectResult, - GetStreamOptions, - GetStreamResult, - CopyObjectOptions, - CopyAndPutMetaResult, - StorageType, - OwnerType, - UserMeta, - ObjectCallback, -} from 'oss-interface'; - -export * from 'oss-interface'; - -export interface ClientOptions { - /** access secret you create */ - accessKeyId: string; - /** access secret you create */ - accessKeySecret: string; - /** used by temporary authorization */ - stsToken?: string | undefined; - /** the default bucket you want to access If you don't have any bucket, please use putBucket() create one first. */ - bucket?: string | undefined; - /** oss region domain. It takes priority over region. */ - endpoint?: string | undefined; - /** the bucket data region location, please see Data Regions, default is oss-cn-hangzhou. */ - region?: string | undefined; - /** access OSS with aliyun internal network or not, default is false. If your servers are running on aliyun too, you can set true to save lot of money. */ - internal?: boolean | undefined; - /** instruct OSS client to use HTTPS (secure: true) or HTTP (secure: false) protocol. */ - secure?: boolean | undefined; - /** instance level timeout for all operations, default is 60s */ - timeout?: string | number | undefined; - /** use custom domain name */ - cname?: boolean | undefined; - /** use time (ms) of refresh STSToken interval it should be less than sts info expire interval, default is 300000ms(5min) when sts info expires. */ - refreshSTSTokenInterval?: number; - /** used by auto set stsToken、accessKeyId、accessKeySecret when sts info expires. return value must be object contains stsToken、accessKeyId、accessKeySecret */ - refreshSTSToken?: () => Promise<{ accessKeyId: string, accessKeySecret: string, stsToken: string }>; -} - -/** - * Generate STS Authorization - */ -export class STS { - constructor(options: STSOptions); - - assumeRole( - roleArn: string, - /** - * RAM Policy config object or valid JSON string - */ - policy?: object | string, // TODO: RAM policy type - expirationSeconds?: number, - session?: string, - options?: { - timeout: number; - /** - * ctx param in urllib's request param - */ - ctx: any; - }, - ): Promise<{ credentials: Credentials }>; -} - -export interface Credentials { - /** - * STS access key id. - */ - AccessKeyId: string; - - /** - * STS access key secret. - */ - AccessKeySecret: string; - - /** - * STS token. - */ - SecurityToken: string; - - /** - * STS expiration UTC time in ISO format. - */ - Expiration: string; -} - -export interface STSOptions { - /** - * Access key id. - */ - accessKeyId: string; - - /** - * Access key secret. - */ - accessKeySecret: string; -} - -export interface Bucket { - name: string; - region: string; - creationDate: string; - StorageClass: StorageType; -} - -export type ACLType = 'public-read-write' | 'public-read' | 'private'; - -export type HTTPMethods = 'GET' | 'POST' | 'DELETE' | 'PUT'; - -export type RedundancyType = 'LRS' | 'ZRS'; - -export type RuleStatusType = 'Enabled' | 'Disabled'; - -export interface LifecycleRule { - /** rule id, if not set, OSS will auto create it with random string. */ - id?: string | undefined; - /** store prefix */ - prefix: string; - /** rule status, allow values: Enabled or Disabled */ - status: RuleStatusType; - /** expire after the days */ - days?: number | string | undefined; - /** expire date, e.g.: 2022-10-11T00:00:00.000Z date and days only set one. */ - date: string; -} - -export interface CORSRule { - /** configure for Access-Control-Allow-Origin header */ - allowedOrigin: string | string[]; - /** configure for Access-Control-Allow-Methods header */ - allowedMethod: string | string[]; - /** configure for Access-Control-Allow-Headers header */ - allowedHeader?: string | string[] | undefined; - /** configure for Access-Control-Expose-Headers header */ - exposeHeader?: string | string[] | undefined; - /** configure for Access-Control-Max-Age header */ - maxAgeSeconds?: string | string[] | undefined; -} - -export interface BucketPolicy { - Version: string; - Statement: Array<{ - Action: string[]; - Effect: 'Allow' | 'Deny'; - Principal: string[]; - Resource: string[]; - }>; -} - -export interface Checkpoint { - /** The file object selected by the user, if the browser is restarted, it needs the user to manually trigger the settings */ - file: any; - /** object key */ - name: string; - fileSize: number; - partSize: number; - uploadId: string; - doneParts: Array<{ number: number; etag: string }>; -} - -export interface ObjectPart { - PartNumber: number; - /** {Date} Time when a part is uploaded. */ - LastModified: any; - ETag: string; - size: number; -} - -export interface Upload { - name: string; - uploadId: string; - initiated: any; -} - -export interface Channel { - Name: string; - Description: string; - Status: string; - LastModified: string; - PublishUrls: string[]; - PlayUrls: string[]; -} - -export interface ChannelHistory { - StartTime: string; - EndTime: string; - /** the remote addr */ - RemoteAddr: string; -} - -// parameters type -export interface ListBucketsQueryType { - /** search buckets using prefix key */ - prefix?: string | undefined; - /** search start from marker, including marker key */ - marker?: string | undefined; - /** max buckets, default is 100, limit to 1000 */ - 'max-keys'?: string | number | undefined; -} - -export interface PutBucketOptions { - acl: ACLType; - dataRedundancyType: RedundancyType; - timeout: number; - storageClass: StorageType; -} - -export interface PutBucketWebsiteConfig { - /** default page, e.g.: index.html */ - index: string; - /** error page, e.g.: 'error.html' */ - error?: string | undefined; -} - -export interface ListV2ObjectsQuery { - /** search object using prefix key */ - prefix?: string; - /** search start from token, including token key */ - 'continuation-token'?: string; - /** only search current dir, not including subdir */ - delimiter?: string | number; - /** max objects, default is 100, limit to 1000 */ - 'max-keys'?: string; - /** - * The name of the object from which the list operation begins. - * If this parameter is specified, objects whose names are alphabetically greater than the start-after parameter value are returned. - */ - 'start-after'?: string; - /** Specifies whether to include the information about object owners in the response. */ - 'fetch-owner'?: boolean; - /** Specifies that the object names in the response are URL-encoded. */ - 'encoding-type'?: 'url' | ''; -} - -export interface PutStreamOptions { - /** the stream length, chunked encoding will be used if absent */ - contentLength?: number | undefined; - /** the operation timeout */ - timeout: number; - /** custom mime, will send with Content-Type entity header */ - mime: string; - meta: UserMeta; - callback: ObjectCallback; - headers?: object | undefined; -} - -export interface AppendObjectOptions { - /** specify the position which is the content length of the latest object */ - position?: string | undefined; - /** the operation timeout */ - timeout?: number | undefined; - /** custom mime, will send with Content-Type entity header */ - mime?: string | undefined; - meta?: UserMeta | undefined; - headers?: object | undefined; -} - -export interface AppendObjectResult { - name: string; - /** the url of oss */ - url: string; - res: NormalSuccessResponse; - /** the next position */ - nextAppendPosition: string; -} - -export interface DeleteMultiOptions { - /** quite mode or verbose mode, default is false */ - quiet?: boolean | undefined; - timeout?: number | undefined; -} - -export interface DeleteMultiResult { - /** deleted object names list */ - deleted?: string[] | undefined; - res: NormalSuccessResponse; -} - -export interface ResponseHeaderType { - 'content-type'?: string | undefined; - 'content-disposition'?: string | undefined; - 'cache-control'?: string | undefined; -} - -export interface SignatureUrlOptions { - /** after expires seconds, the url will become invalid, default is 1800 */ - expires?: number | undefined; - /** the HTTP method, default is 'GET' */ - method?: HTTPMethods | undefined; - /** set the request content type */ - 'Content-Type'?: string | undefined; - /** image process params, will send with x-oss-process e.g.: {process: 'image/resize,w_200'} */ - process?: string | undefined; - /** traffic limit, range: 819200~838860800 */ - trafficLimit?: number | undefined; - /** additional signature parameters in url */ - subResource?: object | undefined; - /** set the response headers for download */ - response?: ResponseHeaderType | undefined; - /** set the callback for the operation */ - callback?: ObjectCallback | undefined; -} - -export interface GetACLResult { - acl: ACLType; - res: NormalSuccessResponse; -} - -export interface InitMultipartUploadOptions { - timeout?: number | undefined; - /** Mime file type */ - mime?: string | undefined; - meta?: UserMeta | undefined; - headers?: object | undefined; -} - -export interface InitMultipartUploadResult { - res: { status: number; headers: object; size: number; rt: number }; - /** bucket name */ - bucket: string; - /** object name store on OSS */ - name: string; - /** upload id, use for uploadPart, completeMultipart */ - uploadId: string; -} - -export interface UploadPartResult { - name: string; - etag: string; - res: NormalSuccessResponse; -} - -export interface CompleteMultipartUploadOptions { - timeout?: number | undefined; - callback?: ObjectCallback | undefined; - headers?: object | undefined; -} - -export interface CompleteMultipartUploadResult { - bucket: string; - name: string; - etag: string; - data: object; - res: NormalSuccessResponse; -} - -export interface MultipartUploadOptions { - /** the number of parts to be uploaded in parallel */ - parallel?: number | undefined; - /** the suggested size for each part */ - partSize?: number | undefined; - /** the progress callback called after each successful upload of one part */ - progress?: ((...args: any[]) => any) | undefined; - /** the checkpoint to resume upload, if this is provided, it will continue the upload from where interrupted, otherwise a new multipart upload will be created. */ - checkpoint?: Checkpoint | undefined; - meta?: UserMeta | undefined; - mime?: string | undefined; - callback?: ObjectCallback | undefined; - headers?: object | undefined; - timeout?: number | undefined; - /** {Object} only uploadPartCopy api used, detail */ - copyheaders?: object | undefined; -} - -export interface MultipartUploadResult { - bucket: string; - name: string; - etag: string; - data: object; - res: NormalSuccessResponse; -} - -export interface MultipartUploadCopyResult { - bucket: string; - name: string; - etag: string; - res: NormalSuccessResponse; -} - -export interface MultipartUploadCopySourceData { - /** the source object name */ - sourceKey: string; - /** sourceData. the source bucket name */ - sourceBucketName: string; - /** data copy start byte offset, e.g: 0 */ - startOffset: number; - /** data copy end byte offset, e.g: 102400 */ - endOffset: number; -} - -export interface ListPartsQuery { - /** The maximum part number in the response of the OSS. default value: 1000. */ - 'max-parts': number; - /** Starting position of a specific list. A part is listed only when the part number is greater than the value of this parameter. */ - 'part-number-marker': number; - /** Specify the encoding of the returned content and the encoding type. Optional value: url */ - 'encoding-type': string; -} - -export interface ListPartsResult { - uploadId: string; - bucket: string; - name: string; - PartNumberMarker: number; - nextPartNumberMarker: number; - maxParts: number; - isTruncated: boolean; - parts: ObjectPart[]; - res: NormalSuccessResponse; -} - -export interface ListUploadsQuery { - prefix?: string | undefined; - 'max-uploads'?: number | undefined; - 'key-marker'?: string | undefined; - 'upload-id-marker'?: string | undefined; -} - -export interface ListUploadsResult { - res: NormalSuccessResponse; - bucket: string; - nextKeyMarker: any; - nextUploadIdMarker: any; - isTruncated: boolean; - uploads: Upload[]; -} - -export interface PutChannelConf { - Description?: string | undefined; - Status?: string | undefined; - Target?: { - Type: string; - FragDuration: number; - FragCount: number; - PlaylistName: string; - } | undefined; -} - -export interface PutChannelResult { - publishUrls: string[]; - playUrls: string[]; - res: NormalSuccessResponse; -} - -export interface GetChannelResult { - Status: string; - ConnectedTime?: string | undefined; - RemoteAddr?: string | undefined; - Video?: object | undefined; - Audio?: object | undefined; - res: NormalSuccessResponse; -} - -export interface ListChannelsQuery { - /** the channel id prefix (returns channels with this prefix) */ - prefix: string; - /** the channel id marker (returns channels after this id) */ - marker: string; - /** max number of channels to return */ - 'max-keys ': number; -} - -export interface ListChannelsResult { - channels: Channel[]; - nextMarker: string | null; - isTruncated: boolean; - res: NormalSuccessResponse; -} - -export interface ChannelHistoryResult { - records: ChannelHistory; - res: NormalSuccessResponse; -} - -export interface GetRtmpUrlOptions { - /** the expire time in seconds of the url */ - expires?: number | undefined; - /** the additional parameters for url, e.g.: {playlistName: 'play.m3u8'} */ - params?: object | undefined; - /** the operation timeout */ - timeout?: number | undefined; -} - -export interface GetBucketPolicyResult { - policy: BucketPolicy | null; - status: number; - res: NormalSuccessResponse; -} - -export interface PostObjectParams { - policy: string; - OSSAccessKeyId: string; - Signature: string; -} - -// cluster -export interface ClusterType { - host: string; - accessKeyId: string; - accessKeySecret: string; -} - -export interface ClusterOptions { - clusters: ClusterType[]; - schedule?: string | undefined; -} - -export class ClusterClient { - constructor(options: ClusterOptions); - - list(query: ListObjectsQuery | null, options: RequestOptions): Promise; - - /** - * @since 6.12.0 - */ - listV2(query: ListV2ObjectsQuery | null, options: RequestOptions): Promise; - - put(name: string, file: any, options?: PutObjectOptions): Promise; - - putStream( - name: string, - stream: any, - options?: PutStreamOptions, - ): Promise<{ name: string; res: NormalSuccessResponse }>; - - head(name: string, options?: HeadObjectOptions): Promise; - - get(name: string, file?: any, options?: GetObjectOptions): Promise; - - getStream(name?: string, options?: GetStreamOptions): Promise; - - delete(name: string, options?: RequestOptions): Promise; - - copy(name: string, sourceName: string, options?: CopyObjectOptions): Promise; - - putMeta(name: string, meta: UserMeta, options: RequestOptions): Promise; - - deleteMulti(names: string[], options?: DeleteMultiOptions): Promise; - - signatureUrl(name: string, options?: SignatureUrlOptions): string; - - asyncSignatureUrl(name: string, options?: SignatureUrlOptions): Promise; - - putACL(name: string, acl: ACLType, options?: RequestOptions): Promise; - - restore(name: string, options?: RequestOptions): Promise; -} - -// image -export interface ImageClientOptions { - /** your image service domain that binding to a OSS bucket */ - imageHost: string; - /** access key you create on aliyun console website */ - accessKeyId: string; - /** access secret you create */ - accessKeySecret: string; - /** the default bucket you want to access If you don't have any bucket, please use putBucket() create one first. */ - bucket: string; - /** the bucket data region location, please see Data Regions, default is oss-cn-hangzhou */ - region?: string | undefined; - /** access OSS with aliyun internal network or not, default is false If your servers are running on aliyun too, you can set true to save lot of money. */ - internal?: boolean | undefined; - /** instance level timeout for all operations, default is 60s */ - timeout?: string | number | undefined; -} - -export interface ImageGetOptions { - timeout?: number | undefined; - headers?: object | undefined; -} - -export interface StyleData { - /** style name */ - Name: string; - /** style content */ - Content: string; - /** style create time */ - CreateTime: string; - /** style last modify time */ - LastModifyTime: string; -} - -export class ImageClient { - constructor(options: ImageClientOptions); - - /** - * Get an image from the image channel. - */ - get(name: string, file?: any, options?: ImageGetOptions): Promise<{ content: any; res: NormalSuccessResponse }>; - - /** - * Get an image read stream. - */ - getStream(name: string, options?: ImageGetOptions): Promise<{ stream: any; res: NormalSuccessResponse }>; - - /** - * Get a image exif info by image object name from the image channel. - */ - getExif(name: string, options?: RequestOptions): Promise<{ data: object; res: NormalSuccessResponse }>; - - /** - * Get a image info and exif info by image object name from the image channel. - */ - getInfo(name: string, options?: RequestOptions): Promise<{ data: object; res: NormalSuccessResponse }>; - - /** - * todo - */ - putStyle( - name: string, - style: string, - options?: RequestOptions, - ): Promise<{ data: object; res: NormalSuccessResponse }>; - - /** - * Get a style by name from the image channel. - */ - getStyle(name: string, options?: RequestOptions): Promise<{ data: StyleData; res: NormalSuccessResponse }>; - - /** - * Get all styles from the image channel. - */ - listStyle(options?: RequestOptions): Promise; - - /** - * todo - */ - deleteStyle(styleName: string, options?: RequestOptions): Promise; - - /** - * Create a signature url for directly download. - */ - signatureUrl(name: string, options?: { expires?: string | undefined; timeout?: string | undefined }): string; - - /** - * Basically the same as signatureUrl, if refreshSTSToken is configured asyncSignatureUrl will refresh stsToken - */ - asyncSignatureUrl(name: string, options?: SignatureUrlOptions): Promise; -} - -// base Client -export class Client implements IObjectSimple { - constructor(options: ClientOptions); - - /******************************************* the bucket operations *************************************************/ - - // base operators - /** - * List buckets in this account. - */ - listBuckets(query: ListBucketsQueryType | null, options?: RequestOptions): Promise; - - /** - * Create a new bucket. - */ - putBucket( - name: string, - options?: PutBucketOptions, - ): Promise<{ bucket: string; res: NormalSuccessResponse }>; - - /** - * Use the bucket. - */ - useBucket(name: string): void; - - /** - * Delete an empty bucket. - */ - deleteBucket(name: string, options?: RequestOptions): Promise; - - /** - * Get bucket information,include CreationDate、ExtranetEndpoint、IntranetEndpoint、Location、Name、StorageClass、 Owner、AccessControlList - */ - getBucketInfo(name: string): Promise; - - /** - * Get bucket location - */ - getBucketLocation(name: string): Promise; - - // ACL operations - /** - * Update the bucket ACL. - */ - putBucketACL(name: string, acl: ACLType, options?: RequestOptions): Promise; - - /** - * Get the bucket ACL. - * acl - acl settings string - */ - getBucketACL(name: string, options?: RequestOptions): Promise<{ acl: string; res: NormalSuccessResponse }>; - - // logging operations - /** - * Update the bucket logging settings. Log file will create every one hour and name format: -YYYY-mm-DD-HH-MM-SS-UniqueString. - */ - putBucketLogging(name: string, prefix?: string, options?: RequestOptions): Promise; - - /** - * Get the bucket logging settings. - */ - getBucketLogging( - name: string, - options?: RequestOptions, - ): Promise<{ enable: boolean; prefix: string | null; res: NormalSuccessResponse }>; - - /** - * Delete the bucket logging settings. - */ - deleteBucketLogging(name: string, options?: RequestOptions): Promise; - - // Website operations - /** - * Set the bucket as a static website. - */ - putBucketWebsite(name: string, config: PutBucketWebsiteConfig): Promise; - - /** - * Get the bucket website config. - */ - getBucketWebsite( - name: string, - options?: RequestOptions, - ): Promise<{ index: string; error: string; res: NormalSuccessResponse }>; - - /** - * Delete the bucket website config. - */ - deleteBucketWebsite(name: string, options?: RequestOptions): Promise; - - // referer operations - /** - * Set the bucket request Referer white list. - */ - putBucketReferer( - name: string, - allowEmpty: boolean, - referers: string[], - options?: RequestOptions, - ): Promise; - - /** - * Get the bucket request Referer white list. - */ - getBucketReferer( - name: string, - options?: RequestOptions, - ): Promise<{ allowEmpty: boolean; referers: string[]; res: NormalSuccessResponse }>; - - /** - * Delete the bucket request Referer white list. - */ - deleteBucketReferer(name: string, options?: RequestOptions): Promise; - - // lifecycle operations - /** - * Set the bucket object lifecycle. - */ - putBucketLifecycle( - name: string, - rules: LifecycleRule[], - options?: RequestOptions, - ): Promise; - - /** - * Get the bucket object lifecycle. - */ - getBucketLifecycle( - name: string, - options?: RequestOptions, - ): Promise<{ rules: LifecycleRule[]; res: NormalSuccessResponse }>; - - /** - * Delete the bucket object lifecycle. - */ - deleteBucketLifecycle(name: string, options?: RequestOptions): Promise; - - // CORS operations - /** - * Set CORS rules of the bucket object - */ - putBucketCORS( - name: string, - rules: CORSRule[], - options?: RequestOptions, - ): Promise; - - /** - * Get CORS rules of the bucket object. - */ - getBucketCORS(name: string): Promise<{ rules: CORSRule[]; res: NormalSuccessResponse }>; - - /** - * Delete CORS rules of the bucket object. - */ - deleteBucketCORS(name: string): Promise; - - // policy operations - /** - * Adds or modify policy for a bucket. - */ - putBucketPolicy( - name: string, - policy: BucketPolicy, - options?: RequestOptions - ): Promise<{ - status: number, - res: NormalSuccessResponse, - }>; - - /** - * Obtains the policy for a bucket. - */ - getBucketPolicy(name: string, options?: RequestOptions): Promise; - - /** - * Deletes the policy added for a bucket. - */ - deleteBucketPolicy( - name: string, - options?: RequestOptions - ): Promise<{ - status: number, - res: NormalSuccessResponse, - }>; - - /********************************************************** Object operations ********************************************/ - /** - * List objects in the bucket. - */ - list(query: ListObjectsQuery | null, options?: RequestOptions): Promise; - - /** - * Add an object to the bucket. - */ - put(name: string, file: string | Buffer | Uint8Array | Readable, options?: PutObjectOptions): Promise; - - /** - * Add a stream object to the bucket. - */ - putStream( - name: string, - stream: any, - options?: PutStreamOptions, - ): Promise<{ name: string; res: NormalSuccessResponse }>; - - /** - * Append an object to the bucket, it's almost same as put, but it can add content to existing object rather than override it. - */ - append(name: string, file: any, options?: AppendObjectOptions): Promise; - - /** - * Get the Object url. If provide baseUrl, will use baseUrl instead the default endpoint. - */ - getObjectUrl(name: string, baseUrl?: string): string; - - /** - * Get the Object url. If provide baseUrl, will use baseUrl instead the default bucket and endpoint. Suggest use generateObjectUrl instead of getObjectUrl. - */ - generateObjectUrl(name: string, baseUrl?: string): string; - - /** - * Head an object and get the meta info. - */ - head(name: string, options?: HeadObjectOptions): Promise; - - /** - * Get an object from the bucket. - */ - get(name: string, options?: GetObjectOptions): Promise; - get(name: string, file: string | Writable, options?: GetObjectOptions): Promise; - - /** - * Get an object read stream. - */ - getStream(name?: string, options?: GetStreamOptions): Promise; - - /** - * Delete an object from the bucket. - */ - delete(name: string, options?: RequestOptions): Promise; - - /** - * Copy an object from sourceName to name. - */ - copy(name: string, sourceName: string, options?: CopyObjectOptions): Promise; - copy(name: string, sourceName: string, sourceBucket: string, options?: CopyObjectOptions): Promise; - - /** - * Set an exists object meta. - */ - putMeta(name: string, meta: UserMeta, options: RequestOptions): Promise; - - /** - * Delete multi objects in one request. - */ - deleteMulti(names: string[], options?: DeleteMultiOptions): Promise; - - /** - * Create a signature url for download or upload object. When you put object with signatureUrl ,you need to pass Content-Type.Please look at the example. - */ - signatureUrl(name: string, options?: SignatureUrlOptions): string; - - /** - * Basically the same as signatureUrl, if refreshSTSToken is configured asyncSignatureUrl will refresh stsToken - */ - asyncSignatureUrl(name: string, options?: SignatureUrlOptions): Promise; - - /** - * Set object's ACL. - */ - putACL(name: string, acl: ACLType, options?: RequestOptions): Promise; - - /** - * Get object's ACL. - */ - getACL(name: string, options?: RequestOptions): Promise; - - /** - * Restore Object. - */ - restore(name: string, options?: RequestOptions): Promise; - - /** - * multi upload - */ - initMultipartUpload(name: string, options?: InitMultipartUploadOptions): Promise; - - /** - * After initiating a Multipart Upload event, you can upload data in parts based on the specified object name and Upload ID. - */ - uploadPart( - name: string, - uploadId: string, - partNo: number, - file: any, - start: number, - end: number, - options?: RequestOptions, - ): Promise; - - /** - * Using Upload Part Copy, you can copy data from an existing object and upload a part of the data. - * When copying a file larger than 1 GB, you must use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. - */ - uploadPartCopy( - name: string, - uploadId: string, - partNo: number, - range: string, - sourceData: { sourceKey: string; sourceBucketName: string }, - options: { timeout?: number | undefined; headers?: object | undefined }, - ): Promise; - - /** - * After uploading all data parts, you must call the Complete Multipart Upload API to complete Multipart Upload for the entire file. - */ - completeMultipartUpload( - name: string, - uploadId: string, - parts: Array<{ number: number; etag: string }>, - options?: CompleteMultipartUploadOptions, - ): Promise; - - /** - * Upload file with OSS multipart. - */ - multipartUpload(name: string, file: any, options: MultipartUploadOptions): Promise; - - /** - * Copy file with OSS multipart. - * this function contains head, initMultipartUpload, uploadPartCopy, completeMultipartUpload. - * When copying a file larger than 1 GB, you should use the Upload Part Copy method. If you want to copy a file smaller than 1 GB, see Copy Object. - */ - multipartUploadCopy( - name: string, - sourceData: MultipartUploadCopySourceData, - options?: MultipartUploadOptions, - ): Promise; - - /** - * The ListParts command can be used to list all successfully uploaded parts mapped to a specific upload ID, i.e.: those not completed and not aborted. - */ - listParts( - name: string, - uploadId: string, - query?: ListPartsQuery, - options?: RequestOptions, - ): Promise; - - /** - * List on-going multipart uploads, i.e.: those not completed and not aborted. - */ - listUploads(query: ListUploadsQuery, options?: RequestOptions): Promise; - - /** - * Abort a multipart upload for object. - */ - abortMultipartUpload( - name: string, - uploadId: string, - options?: RequestOptions, - ): Promise; - - /** - * get postObject params. - */ - calculatePostSignature( - /** - * policy config object or JSON string - */ - policy: object | string - ): PostObjectParams; - - /************************************************ RTMP Operations *************************************************************/ - /** - * Create a live channel. - */ - putChannel(id: string, conf: PutChannelConf, options?: RequestOptions): Promise; - - /** - * Get live channel info. - */ - getChannel( - id: string, - options?: RequestOptions, - ): Promise<{ data: PutChannelConf; res: NormalSuccessResponse }>; - - /** - * Delete a live channel. - */ - deleteChannel(id: string, options?: RequestOptions): Promise; - - /** - * Change the live channel status. - */ - putChannelStatus(id: string, status?: string, options?: RequestOptions): Promise; - - /** - * Get the live channel status. - */ - getChannelStatus(id: string, options?: RequestOptions): Promise; - - /** - * List channels. - */ - listChannels(query: ListChannelsQuery, options?: RequestOptions): Promise; - - /** - * Get the live channel history. - */ - getChannelHistory(id: string, options?: RequestOptions): Promise; - - /** - * Create a VOD playlist for the channel. - */ - createVod( - id: string, - name: string, - time: { startTime: number; endTime: number }, - options?: RequestOptions, - ): Promise; - - /** - * Get signatured rtmp url for publishing. - */ - getRtmpUrl(channelId?: string, options?: GetRtmpUrlOptions): string; -} diff --git a/index.test-d.ts b/index.test-d.ts deleted file mode 100644 index ea9883aec..000000000 --- a/index.test-d.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { expectType } from 'tsd'; -import { Writable, Readable } from 'stream'; -import { - GetObjectOptions, - IObjectSimple, - SignatureUrlOptions, - ListObjectsQuery, - RequestOptions, - ListObjectResult, - PutObjectOptions, - PutObjectResult, - NormalSuccessResponse, - HeadObjectOptions, - HeadObjectResult, - GetObjectResult, - GetStreamOptions, - GetStreamResult, - CopyObjectOptions, - CopyAndPutMetaResult, - Client, - ImageClient, - ClusterClient, -} from '.'; - -const getObjectOptions = {} as GetObjectOptions; -expectType(getObjectOptions.process); - -class SimpleClient implements IObjectSimple { - async list(query: ListObjectsQuery | null, options: RequestOptions): Promise { - console.log(query, options); - return {} as any; - } - async put(name: string, file: string | Buffer | Uint8Array | Readable, options?: PutObjectOptions): Promise { - console.log(name, file, options); - return {} as any; - } - async head(name: string, options?: HeadObjectOptions): Promise { - console.log(name, options); - return {} as any; - } - - async get(name: string, options?: GetObjectOptions): Promise; - async get(name: string, file: string | Writable, options?: GetObjectOptions): Promise; - async get(name: string, file?: string | Writable | GetObjectOptions, options?: GetObjectOptions): Promise { - console.log(name, file, options); - return {} as any; - } - async getStream(name?: string, options?: GetStreamOptions): Promise { - console.log(name, options); - return {} as any; - } - async delete(name: string, options?: RequestOptions): Promise { - console.log(name, options); - return {} as any; - } - - async copy(name: string, sourceName: string, options?: CopyObjectOptions): Promise; - async copy(name: string, sourceName: string, sourceBucket: string, options?: CopyObjectOptions): Promise; - async copy(name: string, sourceName: string, sourceBucket?: string | CopyObjectOptions, options?: CopyObjectOptions): Promise { - console.log(name, sourceName, sourceBucket, options); - return {} as any; - } - - async asyncSignatureUrl(name: string, options?: SignatureUrlOptions) { - console.log(name, options); - return ''; - } -} - -const simpleClient = new SimpleClient(); -expectType>(simpleClient.get('foo')); -expectType>(simpleClient.get('foo', { timeout: 10 })); -expectType>(simpleClient.get('foo', 'file.path')); - -const ossClient = {} as Client; -expectType>(ossClient.get('foo')); -expectType>(ossClient.list({ 'max-keys': 100 })); - -const clusterClient = {} as ClusterClient; -expectType>(clusterClient.get('foo')); - -const imageClient = {} as ImageClient; -expectType>(imageClient.get('foo')); - -const bytes = {} as Uint8Array; -expectType>(simpleClient.put('foo', bytes)); diff --git a/lib/client.js b/lib/client.js deleted file mode 100644 index 12399efb6..000000000 --- a/lib/client.js +++ /dev/null @@ -1,353 +0,0 @@ -const debug = require('util').debuglog('oss-client:client'); -const sendToWormhole = require('stream-wormhole'); -const xml = require('xml2js'); -const merge = require('merge-descriptors'); -const utility = require('utility'); -const urllib = require('urllib'); -const pkg = require('../package.json'); -const signUtils = require('./common/signUtils'); -const _initOptions = require('./common/client/initOptions'); -const { createRequest } = require('./common/utils/createRequest'); -const { encoder } = require('./common/utils/encoder'); -const { getReqUrl } = require('./common/client/getReqUrl'); -const { setSTSToken } = require('./common/utils/setSTSToken'); -const { retry } = require('./common/utils/retry'); -const { isFunction } = require('./common/utils/isFunction'); - -function Client(options, ctx) { - if (!(this instanceof Client)) { - return new Client(options, ctx); - } - - if (options && options.inited) { - this.options = options; - } else { - this.options = Client.initOptions(options); - } - - // support custom agent and urllib client - if (this.options.urllib) { - this.urllib = this.options.urllib; - } else { - this.urllib = urllib; - } - this.ctx = ctx; - this.userAgent = this._getUserAgent(); - this.stsTokenFreshTime = new Date(); -} - -/** - * Expose `Client` - */ - -module.exports = Client; - -Client.initOptions = function initOptions(options) { - return _initOptions(options); -}; - -/** - * prototype - */ - -const proto = Client.prototype; - -/** - * Object operations - */ -merge(proto, require('./common/object')); -merge(proto, require('./object')); -merge(proto, require('./common/image')); -/** - * Bucket operations - */ -merge(proto, require('./common/bucket')); -merge(proto, require('./bucket')); -// multipart upload -merge(proto, require('./managed-upload')); -/** - * RTMP operations - */ -merge(proto, require('./rtmp')); - -/** - * common multipart-copy - */ -merge(proto, require('./common/multipart-copy')); -/** - * Common module parallel - */ -merge(proto, require('./common/parallel')); -/** - * Multipart operations - */ -merge(proto, require('./common/multipart')); -/** - * ImageClient class - */ -Client.ImageClient = require('./image')(Client); -/** - * Cluster Client class - */ -Client.ClusterClient = require('./cluster')(Client); - -/** - * STS Client class - */ -Client.STS = require('./sts'); - -/** - * get OSS signature - * @param {String} stringToSign stringToSign - * @return {String} the signature - */ -proto.signature = function signature(stringToSign) { - debug('authorization stringToSign: %s', stringToSign); - - return signUtils.computeSignature(this.options.accessKeySecret, stringToSign, this.options.headerEncoding); -}; - -proto._getReqUrl = getReqUrl; - -/** - * get author header - * - * "Authorization: OSS " + Access Key Id + ":" + Signature - * - * Signature = base64(hmac-sha1(Access Key Secret + "\n" - * + VERB + "\n" - * + CONTENT-MD5 + "\n" - * + CONTENT-TYPE + "\n" - * + DATE + "\n" - * + CanonicalizedOSSHeaders - * + CanonicalizedResource)) - * - * @param {String} method - * @param {String} resource - * @param {Object} header - * @return {String} - * - * @private - */ - -proto.authorization = function authorization(method, resource, subres, headers) { - const stringToSign = signUtils.buildCanonicalString(method.toUpperCase(), resource, { - headers, - parameters: subres, - }); - - return signUtils.authorization( - this.options.accessKeyId, - this.options.accessKeySecret, - stringToSign, - this.options.headerEncoding - ); -}; - -/** - * request oss server - * @param {Object} params - * - {String} object - * - {String} bucket - * - {Object} [headers] - * - {Object} [query] - * - {Buffer} [content] - * - {Stream} [stream] - * - {Stream} [writeStream] - * - {String} [mime] - * - {Boolean} [xmlResponse] - * - {Boolean} [customResponse] - * - {Number} [timeout] - * - {Object} [ctx] request context, default is `this.ctx` - * - * @private - */ - -proto.request = async function(params) { - if (this.options.retryMax) { - return await retry(request.bind(this), this.options.retryMax, { - errorHandler: err => { - const _errHandle = _err => { - if (params.stream) return false; - const statusErr = [ -1, -2 ].includes(_err.status); - const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); - return statusErr && requestErrorRetryHandle(_err); - }; - if (_errHandle(err)) return true; - return false; - }, - })(params); - } - return await request.call(this, params); - -}; - -async function request(params) { - if (this.options.stsToken && isFunction(this.options.refreshSTSToken)) { - await setSTSToken.call(this); - } - const reqParams = createRequest.call(this, params); - let result; - let reqErr; - // try ctx.httpclient first - const urllib = reqParams.params.ctx?.httpclient ?? reqParams.params.ctx?.urllib ?? this.urllib; - try { - result = await urllib.request(reqParams.url, reqParams.params); - debug('response %s %s, got %s, headers: %j', params.method, reqParams.url, result.status, result.headers); - } catch (err) { - reqErr = err; - } - let err; - if (result && params.successStatuses && params.successStatuses.indexOf(result.status) === -1) { - err = await this.requestError(result); - err.params = params; - } else if (reqErr) { - err = await this.requestError(reqErr); - } - - if (err) { - if (params.customResponse && result && result.res) { - // consume the response stream - await sendToWormhole(result.res); - } - - if (err.name === 'ResponseTimeoutError') { - err.message = `${err.message.split(',')[0]}, please increase the timeout or use multipartDownload.`; - } - throw err; - } - - if (params.xmlResponse) { - result.data = await this.parseXML(result.data); - } - return result; -} - -proto._getResource = function _getResource(params) { - let resource = '/'; - if (params.bucket) resource += `${params.bucket}/`; - if (params.object) resource += encoder(params.object, this.options.headerEncoding); - - return resource; -}; - -proto._escape = function _escape(name) { - return utility.encodeURIComponent(name).replace(/%2F/g, '/'); -}; - -/* - * Get User-Agent for node.js - * @example - * oss-client/1.0.0 Node.js/5.3.0 (darwin; arm64) - */ - -proto._getUserAgent = function _getUserAgent() { - const sdk = `${pkg.name}/${pkg.version}`; - const platform = `Node.js/${process.version.slice(1)} (${process.platform}; ${process.arch})`; - return `${sdk} ${platform}`; -}; - -/** - * thunkify xml.parseString - * @param {String|Buffer} str - * - * @private - */ - -proto.parseXML = function parseXMLThunk(str) { - return new Promise((resolve, reject) => { - if (Buffer.isBuffer(str)) { - str = str.toString(); - } - xml.parseString( - str, - { - explicitRoot: false, - explicitArray: false, - }, - (err, result) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); - }); -}; - -/** - * generater a request error with request response - * @param {Object} result - * - * @private - */ - -proto.requestError = async function requestError(result) { - let err = null; - if (result.name === 'ResponseTimeoutError') { - err = new Error(result.message); - err.name = result.name; - } else if (!result.data || !result.data.length) { - if (result.status === -1 || result.status === -2) { - // -1 is net error , -2 is timeout - err = new Error(result.message); - err.name = result.name; - err.status = result.status; - // try to use raw code first - err.code = result.code || result.name; - } else { - // HEAD not exists resource - if (result.status === 404) { - err = new Error('Object not exists'); - err.name = 'NoSuchKeyError'; - err.status = 404; - err.code = 'NoSuchKey'; - } else if (result.status === 412) { - err = new Error('Pre condition failed'); - err.name = 'PreconditionFailedError'; - err.status = 412; - err.code = 'PreconditionFailed'; - } else { - err = new Error(`Unknow error, status: ${result.status}, raw error: ${result}`); - err.name = 'UnknownError'; - err.status = result.status; - } - err.requestId = result.headers?.['x-oss-request-id']; - err.host = ''; - } - } else { - const message = String(result.data); - debug('request response error data: %s', message); - - let info; - try { - info = (await this.parseXML(message)) || {}; - } catch (error) { - debug(message); - error.message += `\nraw xml: ${message}`; - error.status = result.status; - error.requestId = result.headers['x-oss-request-id']; - return error; - } - - let msg = info.Message || `unknow request error, status: ${result.status}`; - if (info.Condition) { - msg += ` (condition: ${info.Condition})`; - } - err = new Error(msg); - err.name = info.Code ? `${info.Code}Error` : 'UnknownError'; - err.status = result.status; - err.code = info.Code; - err.requestId = info.RequestId; - err.hostId = info.HostId; - } - - debug('generate error %j', err); - return err; -}; - -proto.setSLDEnabled = function setSLDEnabled(enable) { - this.options.sldEnable = !!enable; - return this; -}; diff --git a/lib/common/bucket/abortBucketWorm.js b/lib/common/bucket/abortBucketWorm.js deleted file mode 100644 index 413d4fd0e..000000000 --- a/lib/common/bucket/abortBucketWorm.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -async function abortBucketWorm(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'worm', options); - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -} - -exports.abortBucketWorm = abortBucketWorm; diff --git a/lib/common/bucket/completeBucketWorm.js b/lib/common/bucket/completeBucketWorm.js deleted file mode 100644 index 443f7b154..000000000 --- a/lib/common/bucket/completeBucketWorm.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -async function completeBucketWorm(name, wormId, options) { - checkBucketName(name); - const params = this._bucketRequestParams('POST', name, { wormId }, options); - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -} - -exports.completeBucketWorm = completeBucketWorm; diff --git a/lib/common/bucket/deleteBucketEncryption.js b/lib/common/bucket/deleteBucketEncryption.js deleted file mode 100644 index d74a86298..000000000 --- a/lib/common/bucket/deleteBucketEncryption.js +++ /dev/null @@ -1,19 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; - -/** - * deleteBucketEncryption - * @param {String} bucketName - bucket name - */ -proto.deleteBucketEncryption = async function deleteBucketEncryption(bucketName) { - _checkBucketName(bucketName); - const params = this._bucketRequestParams('DELETE', bucketName, 'encryption'); - params.successStatuses = [ 204 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketInventory.js b/lib/common/bucket/deleteBucketInventory.js deleted file mode 100644 index d2cb362e8..000000000 --- a/lib/common/bucket/deleteBucketInventory.js +++ /dev/null @@ -1,20 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -/** - * deleteBucketInventory - * @param {String} bucketName - bucket name - * @param {String} inventoryId - inventory id - * @param {Object} options - options - */ -async function deleteBucketInventory(bucketName, inventoryId, options = {}) { - const subres = Object.assign({ inventory: '', inventoryId }, options.subres); - checkBucketName(bucketName); - const params = this._bucketRequestParams('DELETE', bucketName, subres, options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -} -exports.deleteBucketInventory = deleteBucketInventory; diff --git a/lib/common/bucket/deleteBucketLifecycle.js b/lib/common/bucket/deleteBucketLifecycle.js deleted file mode 100644 index 8d96683b0..000000000 --- a/lib/common/bucket/deleteBucketLifecycle.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; - -proto.deleteBucketLifecycle = async function deleteBucketLifecycle(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'lifecycle', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketPolicy.js b/lib/common/bucket/deleteBucketPolicy.js deleted file mode 100644 index ecd93c419..000000000 --- a/lib/common/bucket/deleteBucketPolicy.js +++ /dev/null @@ -1,21 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * deleteBucketPolicy - * @param {String} bucketName - bucket name - * @param {Object} options - */ - -proto.deleteBucketPolicy = async function deleteBucketPolicy(bucketName, options = {}) { - _checkBucketName(bucketName); - - const params = this._bucketRequestParams('DELETE', bucketName, 'policy', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketTags.js b/lib/common/bucket/deleteBucketTags.js deleted file mode 100644 index 657fffed9..000000000 --- a/lib/common/bucket/deleteBucketTags.js +++ /dev/null @@ -1,21 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * deleteBucketTags - * @param {String} name - bucket name - * @param {Object} options - */ - -proto.deleteBucketTags = async function deleteBucketTags(name, options = {}) { - _checkBucketName(name); - - const params = this._bucketRequestParams('DELETE', name, 'tagging', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/deleteBucketWebsite.js b/lib/common/bucket/deleteBucketWebsite.js deleted file mode 100644 index e8ea12fb3..000000000 --- a/lib/common/bucket/deleteBucketWebsite.js +++ /dev/null @@ -1,13 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; - -proto.deleteBucketWebsite = async function deleteBucketWebsite(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('DELETE', name, 'website', options); - params.successStatuses = [ 204 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; diff --git a/lib/common/bucket/extendBucketWorm.js b/lib/common/bucket/extendBucketWorm.js deleted file mode 100644 index ce1a89d3e..000000000 --- a/lib/common/bucket/extendBucketWorm.js +++ /dev/null @@ -1,22 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -async function extendBucketWorm(name, wormId, days, options) { - checkBucketName(name); - const params = this._bucketRequestParams('POST', name, { wormExtend: '', wormId }, options); - const paramlXMLObJ = { - ExtendWormConfiguration: { - RetentionPeriodInDays: days, - }, - }; - params.mime = 'xml'; - params.content = obj2xml(paramlXMLObJ, { headers: true }); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -} - -exports.extendBucketWorm = extendBucketWorm; diff --git a/lib/common/bucket/getBucketEncryption.js b/lib/common/bucket/getBucketEncryption.js deleted file mode 100644 index 3ac646254..000000000 --- a/lib/common/bucket/getBucketEncryption.js +++ /dev/null @@ -1,21 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketEncryption - * @param {String} bucketName - bucket name - */ - -proto.getBucketEncryption = async function getBucketEncryption(bucketName) { - _checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, 'encryption'); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - const encryption = result.data.ApplyServerSideEncryptionByDefault; - return { - encryption, - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketInventory.js b/lib/common/bucket/getBucketInventory.js deleted file mode 100644 index 55c9ad0ab..000000000 --- a/lib/common/bucket/getBucketInventory.js +++ /dev/null @@ -1,23 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { formatInventoryConfig } = require('../utils/formatInventoryConfig'); - -/** - * getBucketInventory - * @param {String} bucketName - bucket name - * @param {String} inventoryId - inventory id - * @param {Object} options - options - */ -async function getBucketInventory(bucketName, inventoryId, options = {}) { - const subres = Object.assign({ inventory: '', inventoryId }, options.subres); - checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, subres, options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - inventory: formatInventoryConfig(result.data), - }; -} -exports.getBucketInventory = getBucketInventory; diff --git a/lib/common/bucket/getBucketLifecycle.js b/lib/common/bucket/getBucketLifecycle.js deleted file mode 100644 index 4293acefb..000000000 --- a/lib/common/bucket/getBucketLifecycle.js +++ /dev/null @@ -1,33 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { formatObjKey } = require('../utils/formatObjKey'); - -const proto = exports; - -proto.getBucketLifecycle = async function getBucketLifecycle(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'lifecycle', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - let rules = result.data.Rule || null; - if (rules) { - if (!Array.isArray(rules)) { - rules = [ rules ]; - } - rules = rules.map(_ => { - if (_.ID) { - _.id = _.ID; - delete _.ID; - } - if (_.Tag && !Array.isArray(_.Tag)) { - _.Tag = [ _.Tag ]; - } - return formatObjKey(_, 'firstLowerCase'); - }); - } - return { - rules, - res: result.res, - }; -}; - diff --git a/lib/common/bucket/getBucketPolicy.js b/lib/common/bucket/getBucketPolicy.js deleted file mode 100644 index 03375c74f..000000000 --- a/lib/common/bucket/getBucketPolicy.js +++ /dev/null @@ -1,28 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketPolicy - * @param {String} bucketName - bucket name - * @param {Object} options - */ - -proto.getBucketPolicy = async function getBucketPolicy(bucketName, options = {}) { - _checkBucketName(bucketName); - - const params = this._bucketRequestParams('GET', bucketName, 'policy', options); - - const result = await this.request(params); - params.successStatuses = [ 200 ]; - let policy = null; - - if (result.res.status === 200) { - policy = JSON.parse(result.res.data.toString()); - } - - return { - policy, - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketRequestPayment.js b/lib/common/bucket/getBucketRequestPayment.js deleted file mode 100644 index 189bfac84..000000000 --- a/lib/common/bucket/getBucketRequestPayment.js +++ /dev/null @@ -1,25 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketRequestPayment - * @param {String} bucketName - bucket name - * @param {Object} options - */ - -proto.getBucketRequestPayment = async function getBucketRequestPayment(bucketName, options) { - options = options || {}; - - _checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, 'requestPayment', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - - const result = await this.request(params); - - return { - status: result.status, - res: result.res, - payer: result.data.Payer, - }; -}; diff --git a/lib/common/bucket/getBucketStat.d.ts b/lib/common/bucket/getBucketStat.d.ts deleted file mode 100644 index 0e71a3e77..000000000 --- a/lib/common/bucket/getBucketStat.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -declare type bucketStatRes = { - Storage: string; - ObjectCount: string; - MultipartUploadCount: string; - LiveChannelCount: string; - LastModifiedTime: string; - StandardStorage: string; - StandardObjectCount: string; - InfrequentAccessStorage: string; - InfrequentAccessRealStorage: string; - InfrequentAccessObjectCount: string; - ArchiveStorage: string; - ArchiveRealStorage: string; - ArchiveObjectCount: string; - ColdArchiveStorage: string; - ColdArchiveRealStorage: string; - ColdArchiveObjectCount: string; -}; -export declare function getBucketStat(this: any, name: string, options: {}): Promise<{ - res: any; - stat: bucketStatRes; -}>; -export {}; diff --git a/lib/common/bucket/getBucketStat.js b/lib/common/bucket/getBucketStat.js deleted file mode 100644 index 7d14e3429..000000000 --- a/lib/common/bucket/getBucketStat.js +++ /dev/null @@ -1,16 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); - -async function getBucketStat(name, options) { - name = name || this.options.bucket; - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'stat', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - return { - res: result.res, - stat: result.data, - }; -} - -exports.getBucketStat = getBucketStat; diff --git a/lib/common/bucket/getBucketTags.js b/lib/common/bucket/getBucketTags.js deleted file mode 100644 index 23aa5245e..000000000 --- a/lib/common/bucket/getBucketTags.js +++ /dev/null @@ -1,24 +0,0 @@ -const proto = exports; -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { formatTag } = require('../utils/formatTag'); -/** - * getBucketTags - * @param {String} name - bucket name - * @param {Object} options - * @return {Object} - */ - -proto.getBucketTags = async function getBucketTags(name, options = {}) { - _checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'tagging', options); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - const Tagging = await this.parseXML(result.data); - - - return { - status: result.status, - res: result.res, - tag: formatTag(Tagging), - }; -}; diff --git a/lib/common/bucket/getBucketVersioning.js b/lib/common/bucket/getBucketVersioning.js deleted file mode 100644 index 5c40780b7..000000000 --- a/lib/common/bucket/getBucketVersioning.js +++ /dev/null @@ -1,22 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); - -const proto = exports; -/** - * getBucketVersioning - * @param {String} bucketName - bucket name - */ - -proto.getBucketVersioning = async function getBucketVersioning(bucketName, options) { - _checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, 'versioning', options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - - const versionStatus = result.data.Status; - return { - status: result.status, - versionStatus, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketWebsite.js b/lib/common/bucket/getBucketWebsite.js deleted file mode 100644 index 5869ee9d4..000000000 --- a/lib/common/bucket/getBucketWebsite.js +++ /dev/null @@ -1,28 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { isObject } = require('../utils/isObject'); - -const proto = exports; - -proto.getBucketWebsite = async function getBucketWebsite(name, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'website', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - let routingRules = []; - if (result.data.RoutingRules && result.data.RoutingRules.RoutingRule) { - if (isObject(result.data.RoutingRules.RoutingRule)) { - routingRules = [ result.data.RoutingRules.RoutingRule ]; - } else { - routingRules = result.data.RoutingRules.RoutingRule; - } - } - return { - index: (result.data.IndexDocument && result.data.IndexDocument.Suffix) || '', - supportSubDir: (result.data.IndexDocument && result.data.IndexDocument.SupportSubDir) || 'false', - type: (result.data.IndexDocument && result.data.IndexDocument.Type), - routingRules, - error: (result.data.ErrorDocument && result.data.ErrorDocument.Key) || null, - res: result.res, - }; -}; diff --git a/lib/common/bucket/getBucketWorm.js b/lib/common/bucket/getBucketWorm.js deleted file mode 100644 index 504b2798a..000000000 --- a/lib/common/bucket/getBucketWorm.js +++ /dev/null @@ -1,19 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { dataFix } = require('../utils/dataFix'); - -async function getBucketWorm(name, options) { - checkBucketName(name); - const params = this._bucketRequestParams('GET', name, 'worm', options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - dataFix(result.data, { - lowerFirst: true, - rename: { - RetentionPeriodInDays: 'days', - }, - }); - return Object.assign(Object.assign({}, result.data), { res: result.res, status: result.status }); -} - -exports.getBucketWorm = getBucketWorm; diff --git a/lib/common/bucket/index.js b/lib/common/bucket/index.js deleted file mode 100644 index 3e394e079..000000000 --- a/lib/common/bucket/index.js +++ /dev/null @@ -1,34 +0,0 @@ -const merge = require('merge-descriptors'); - -const proto = exports; - -merge(proto, require('./getBucketRequestPayment')); -merge(proto, require('./putBucketRequestPayment')); -merge(proto, require('./putBucketEncryption')); -merge(proto, require('./getBucketEncryption')); -merge(proto, require('./deleteBucketEncryption')); -merge(proto, require('./getBucketTags')); -merge(proto, require('./putBucketTags')); -merge(proto, require('./deleteBucketTags')); -merge(proto, require('./putBucket')); -merge(proto, require('./getBucketWebsite')); -merge(proto, require('./putBucketWebsite')); -merge(proto, require('./deleteBucketWebsite')); -merge(proto, require('./getBucketLifecycle')); -merge(proto, require('./putBucketLifecycle')); -merge(proto, require('./deleteBucketLifecycle')); -merge(proto, require('./getBucketPolicy')); -merge(proto, require('./putBucketPolicy')); -merge(proto, require('./deleteBucketPolicy')); -merge(proto, require('./getBucketVersioning')); -merge(proto, require('./putBucketVersioning')); -merge(proto, require('./getBucketInventory')); -merge(proto, require('./deleteBucketInventory')); -merge(proto, require('./listBucketInventory')); -merge(proto, require('./putBucketInventory')); -merge(proto, require('./abortBucketWorm')); -merge(proto, require('./completeBucketWorm')); -merge(proto, require('./extendBucketWorm')); -merge(proto, require('./getBucketWorm')); -merge(proto, require('./initiateBucketWorm')); -merge(proto, require('./getBucketStat')); diff --git a/lib/common/bucket/initiateBucketWorm.d.ts b/lib/common/bucket/initiateBucketWorm.d.ts deleted file mode 100644 index c56703192..000000000 --- a/lib/common/bucket/initiateBucketWorm.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -export declare function initiateBucketWorm(this: any, name: string, days: string, options: any): Promise<{ - res: any; - wormId: any; - status: any; -}>; diff --git a/lib/common/bucket/initiateBucketWorm.js b/lib/common/bucket/initiateBucketWorm.js deleted file mode 100644 index 3fd66e25d..000000000 --- a/lib/common/bucket/initiateBucketWorm.js +++ /dev/null @@ -1,23 +0,0 @@ -const { obj2xml } = require('../utils/obj2xml'); -const { checkBucketName } = require('../utils/checkBucketName'); - -async function initiateBucketWorm(name, days, options) { - checkBucketName(name); - const params = this._bucketRequestParams('POST', name, 'worm', options); - const paramlXMLObJ = { - InitiateWormConfiguration: { - RetentionPeriodInDays: days, - }, - }; - params.mime = 'xml'; - params.content = obj2xml(paramlXMLObJ, { headers: true }); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - wormId: result.res.headers['x-oss-worm-id'], - status: result.status, - }; -} - -exports.initiateBucketWorm = initiateBucketWorm; diff --git a/lib/common/bucket/listBucketInventory.js b/lib/common/bucket/listBucketInventory.js deleted file mode 100644 index fc2c26031..000000000 --- a/lib/common/bucket/listBucketInventory.js +++ /dev/null @@ -1,26 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { formatInventoryConfig } = require('../utils/formatInventoryConfig'); - -/** - * listBucketInventory - * @param {String} bucketName - bucket name - * @param {Object} options - options - */ -async function listBucketInventory(bucketName, options = {}) { - const { continuationToken } = options; - const subres = Object.assign({ inventory: '' }, continuationToken && { 'continuation-token': continuationToken }, options.subres); - checkBucketName(bucketName); - const params = this._bucketRequestParams('GET', bucketName, subres, options); - params.successStatuses = [ 200 ]; - params.xmlResponse = true; - const result = await this.request(params); - const { data, res, status } = result; - return { - isTruncated: data.IsTruncated === 'true', - nextContinuationToken: data.NextContinuationToken, - inventoryList: formatInventoryConfig(data.InventoryConfiguration, true), - status, - res, - }; -} -exports.listBucketInventory = listBucketInventory; diff --git a/lib/common/bucket/putBucket.js b/lib/common/bucket/putBucket.js deleted file mode 100644 index 6282ac2b7..000000000 --- a/lib/common/bucket/putBucket.js +++ /dev/null @@ -1,31 +0,0 @@ -const proto = exports; -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -proto.putBucket = async function putBucket(name, options = {}) { - _checkBucketName(name, true); - const params = this._bucketRequestParams('PUT', name, '', options); - - const CreateBucketConfiguration = {}; - const paramlXMLObJ = { - CreateBucketConfiguration, - }; - - const storageClass = options.StorageClass || options.storageClass; - const dataRedundancyType = options.DataRedundancyType || options.dataRedundancyType; - if (storageClass || dataRedundancyType) { - storageClass && (CreateBucketConfiguration.StorageClass = storageClass); - dataRedundancyType && (CreateBucketConfiguration.DataRedundancyType = dataRedundancyType); - params.mime = 'xml'; - params.content = obj2xml(paramlXMLObJ, { headers: true }); - } - const { acl, headers = {} } = options; - acl && (headers['x-oss-acl'] = acl); - params.headers = headers; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - bucket: (result.headers.location && result.headers.location.substring(1)) || null, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketEncryption.js b/lib/common/bucket/putBucketEncryption.js deleted file mode 100644 index 4a0a0aba9..000000000 --- a/lib/common/bucket/putBucketEncryption.js +++ /dev/null @@ -1,35 +0,0 @@ -const proto = exports; -const { checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -/** - * putBucketEncryption - * @param {String} bucketName - bucket name - * @param {Object} options - options - */ -proto.putBucketEncryption = async function putBucketEncryption(bucketName, options) { - options = options || {}; - checkBucketName(bucketName); - const params = this._bucketRequestParams('PUT', bucketName, 'encryption', options); - params.successStatuses = [ 200 ]; - const paramXMLObj = { - ServerSideEncryptionRule: { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: options.SSEAlgorithm, - }, - }, - }; - if (options.KMSMasterKeyID !== undefined) { - paramXMLObj.ServerSideEncryptionRule.ApplyServerSideEncryptionByDefault.KMSMasterKeyID = options.KMSMasterKeyID; - } - const paramXML = obj2xml(paramXMLObj, { - headers: true, - }); - params.mime = 'xml'; - params.content = paramXML; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketInventory.d.ts b/lib/common/bucket/putBucketInventory.d.ts deleted file mode 100644 index 006faca52..000000000 --- a/lib/common/bucket/putBucketInventory.d.ts +++ /dev/null @@ -1,36 +0,0 @@ -declare type Field = 'Size | LastModifiedDate | ETag | StorageClass | IsMultipartUploaded | EncryptionStatus'; -interface Inventory { - id: string; - isEnabled: true | false; - prefix?: string; - OSSBucketDestination: { - format: 'CSV'; - accountId: string; - rolename: string; - bucket: string; - prefix?: string; - encryption?: { - 'SSE-OSS': ''; - } | { - 'SSE-KMS': { - keyId: string; - }; - }; - }; - frequency: 'Daily' | 'Weekly'; - includedObjectVersions: 'Current' | 'All'; - optionalFields?: { - field?: Field[]; - }; -} -/** - * putBucketInventory - * @param {String} bucketName - bucket name - * @param {Inventory} inventory - * @param {Object} options - */ -export declare function putBucketInventory(this: any, bucketName: string, inventory: Inventory, options?: any): Promise<{ - status: any; - res: any; -}>; -export {}; diff --git a/lib/common/bucket/putBucketInventory.js b/lib/common/bucket/putBucketInventory.js deleted file mode 100644 index c98c75b44..000000000 --- a/lib/common/bucket/putBucketInventory.js +++ /dev/null @@ -1,56 +0,0 @@ -const { checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -/** - * putBucketInventory - * @param {String} bucketName - bucket name - * @param {Inventory} inventory - inventory - * @param {Object} options - options - */ -async function putBucketInventory(bucketName, inventory, options = {}) { - const subres = Object.assign({ inventory: '', inventoryId: inventory.id }, options.subres); - checkBucketName(bucketName); - const { OSSBucketDestination, optionalFields, includedObjectVersions } = inventory; - const destinationBucketPrefix = 'acs:oss:::'; - const rolePrefix = `acs:ram::${OSSBucketDestination.accountId}:role/`; - const paramXMLObj = { - InventoryConfiguration: { - Id: inventory.id, - IsEnabled: inventory.isEnabled, - Filter: { - Prefix: inventory.prefix || '', - }, - Destination: { - OSSBucketDestination: { - Format: OSSBucketDestination.format, - AccountId: OSSBucketDestination.accountId, - RoleArn: `${rolePrefix}${OSSBucketDestination.rolename}`, - Bucket: `${destinationBucketPrefix}${OSSBucketDestination.bucket}`, - Prefix: OSSBucketDestination.prefix || '', - Encryption: OSSBucketDestination.encryption || '', - }, - }, - Schedule: { - Frequency: inventory.frequency, - }, - IncludedObjectVersions: includedObjectVersions, - OptionalFields: { - Field: (optionalFields === null || optionalFields === void 0 ? void 0 : optionalFields.field) || [], - }, - }, - }; - const paramXML = obj2xml(paramXMLObj, { - headers: true, - firstUpperCase: true, - }); - const params = this._bucketRequestParams('PUT', bucketName, subres, options); - params.successStatuses = [ 200 ]; - params.mime = 'xml'; - params.content = paramXML; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -} -exports.putBucketInventory = putBucketInventory; diff --git a/lib/common/bucket/putBucketLifecycle.js b/lib/common/bucket/putBucketLifecycle.js deleted file mode 100644 index dc973a12f..000000000 --- a/lib/common/bucket/putBucketLifecycle.js +++ /dev/null @@ -1,125 +0,0 @@ -/* eslint-disable no-use-before-define */ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { deepCopy } = require('../utils/deepCopy'); -const { isObject } = require('../utils/isObject'); -const { obj2xml } = require('../utils/obj2xml'); -const { checkObjectTag } = require('../utils/checkObjectTag'); -const { getStrBytesCount } = require('../utils/getStrBytesCount'); - -const proto = exports; - - -proto.putBucketLifecycle = async function putBucketLifecycle(name, rules, options) { - _checkBucketName(name); - - if (!Array.isArray(rules)) { - throw new Error('rules must be Array'); - } - - const params = this._bucketRequestParams('PUT', name, 'lifecycle', options); - const Rule = []; - const paramXMLObj = { - LifecycleConfiguration: { - Rule, - }, - }; - - rules.forEach(_ => { - defaultDaysAndDate2Expiration(_); // todo delete, 兼容旧版本 - checkRule(_); - if (_.id) { - _.ID = _.id; - delete _.id; - } - Rule.push(_); - }); - - const paramXML = obj2xml(paramXMLObj, { - headers: true, - firstUpperCase: true, - }); - - params.content = paramXML; - params.mime = 'xml'; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; - -// todo delete, 兼容旧版本 -function defaultDaysAndDate2Expiration(obj) { - if (obj.days) { - obj.expiration = { - days: obj.days, - }; - } - if (obj.date) { - obj.expiration = { - createdBeforeDate: obj.date, - }; - } -} - -function checkDaysAndDate(obj, key) { - const { days, createdBeforeDate } = obj; - if (!days && !createdBeforeDate) { - throw new Error(`${key} must includes days or createdBeforeDate`); - } else if (days && !/^[1-9][0-9]*$/.test(days)) { - throw new Error('days must be a positive integer'); - } else if (createdBeforeDate && !/\d{4}-\d{2}-\d{2}T00:00:00.000Z/.test(createdBeforeDate)) { - throw new Error('createdBeforeDate must be date and conform to iso8601 format'); - } -} - -function handleCheckTag(tag) { - if (!Array.isArray(tag) && !isObject(tag)) { - throw new Error('tag must be Object or Array'); - } - tag = isObject(tag) ? [ tag ] : tag; - const tagObj = {}; - const tagClone = deepCopy(tag); - tagClone.forEach(v => { - tagObj[v.key] = v.value; - }); - - checkObjectTag(tagObj); -} - -function checkRule(rule) { - if (rule.id && getStrBytesCount(rule.id) > 255) throw new Error('ID is composed of 255 bytes at most'); - - if (rule.prefix === undefined) throw new Error('Rule must includes prefix'); - - if (![ 'Enabled', 'Disabled' ].includes(rule.status)) throw new Error('Status must be Enabled or Disabled'); - - if (rule.transition) { - if (![ 'IA', 'Archive' ].includes(rule.transition.storageClass)) throw new Error('StorageClass must be IA or Archive'); - checkDaysAndDate(rule.transition, 'Transition'); - } - - if (rule.expiration) { - if (!rule.expiration.expiredObjectDeleteMarker) { - checkDaysAndDate(rule.expiration, 'Expiration'); - } else if (rule.expiration.days || rule.expiration.createdBeforeDate) { - throw new Error('expiredObjectDeleteMarker cannot be used with days or createdBeforeDate'); - } - } - - if (rule.abortMultipartUpload) { - checkDaysAndDate(rule.abortMultipartUpload, 'AbortMultipartUpload'); - } - - if (!rule.expiration && !rule.abortMultipartUpload && !rule.transition && !rule.noncurrentVersionTransition) { - throw new Error('Rule must includes expiration or abortMultipartUpload or transition or noncurrentVersionTransition'); - } - - if (rule.tag) { - if (rule.abortMultipartUpload) { - throw new Error('Tag cannot be used with abortMultipartUpload'); - } - handleCheckTag(rule.tag); - } -} - diff --git a/lib/common/bucket/putBucketPolicy.js b/lib/common/bucket/putBucketPolicy.js deleted file mode 100644 index eb40fe076..000000000 --- a/lib/common/bucket/putBucketPolicy.js +++ /dev/null @@ -1,27 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { policy2Str } = require('../utils/policy2Str'); -const { isObject } = require('../utils/isObject'); - -const proto = exports; -/** - * putBucketPolicy - * @param {String} bucketName - bucket name - * @param {Object} policy - bucket policy - * @param {Object} options - */ - -proto.putBucketPolicy = async function putBucketPolicy(bucketName, policy, options = {}) { - _checkBucketName(bucketName); - - if (!isObject(policy)) { - throw new Error('policy is not Object'); - } - const params = this._bucketRequestParams('PUT', bucketName, 'policy', options); - params.content = policy2Str(policy); - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketRequestPayment.js b/lib/common/bucket/putBucketRequestPayment.js deleted file mode 100644 index 0262ae3e4..000000000 --- a/lib/common/bucket/putBucketRequestPayment.js +++ /dev/null @@ -1,49 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -const proto = exports; -/** - * putBucketRequestPayment - * @param {String} bucketName - * @param {String} payer - * @param {Object} options - */ -const payerAll = [ 'BucketOwner', 'Requester' ]; - -proto.putBucketRequestPayment = async function putBucketRequestPayment( - bucketName, - payer, - options -) { - options = options || {}; - if (!payer || payerAll.indexOf(payer) < 0) { - throw new Error('payer must be BucketOwner or Requester'); - } - - _checkBucketName(bucketName); - const params = this._bucketRequestParams( - 'PUT', - bucketName, - 'requestPayment', - options - ); - params.successStatuses = [ 200 ]; - - const paramXMLObj = { - RequestPaymentConfiguration: { - Payer: payer, - }, - }; - const paramXML = obj2xml(paramXMLObj, { - headers: true, - }); - - params.mime = 'xml'; - params.content = paramXML; - - const result = await this.request(params); - return { - status: result.status, - res: result.res, - }; -}; diff --git a/lib/common/bucket/putBucketTags.js b/lib/common/bucket/putBucketTags.js deleted file mode 100644 index 3833aad5d..000000000 --- a/lib/common/bucket/putBucketTags.js +++ /dev/null @@ -1,39 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); -const { checkBucketTag } = require('../utils/checkBucketTag'); - -const proto = exports; -/** - * putBucketTags - * @param {String} name - bucket name - * @param {Object} tag - bucket tag, eg: `{a: "1", b: "2"}` - * @param {Object} options - */ - -proto.putBucketTags = async function putBucketTags(name, tag, options = {}) { - _checkBucketName(name); - checkBucketTag(tag); - const params = this._bucketRequestParams('PUT', name, 'tagging', options); - params.successStatuses = [ 200 ]; - tag = Object.keys(tag).map(key => ({ - Key: key, - Value: tag[key], - })); - - const paramXMLObj = { - Tagging: { - TagSet: { - Tag: tag, - }, - }, - }; - - params.mime = 'xml'; - params.content = obj2xml(paramXMLObj); - - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -}; diff --git a/lib/common/bucket/putBucketVersioning.js b/lib/common/bucket/putBucketVersioning.js deleted file mode 100644 index fa19a8397..000000000 --- a/lib/common/bucket/putBucketVersioning.js +++ /dev/null @@ -1,35 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -const proto = exports; -/** - * putBucketVersioning - * @param {String} name - bucket name - * @param {String} status - * @param {Object} options - */ - -proto.putBucketVersioning = async function putBucketVersioning(name, status, options = {}) { - _checkBucketName(name); - if (![ 'Enabled', 'Suspended' ].includes(status)) { - throw new Error('status must be Enabled or Suspended'); - } - const params = this._bucketRequestParams('PUT', name, 'versioning', options); - - const paramXMLObj = { - VersioningConfiguration: { - Status: status, - }, - }; - - params.mime = 'xml'; - params.content = obj2xml(paramXMLObj, { - headers: true, - }); - - const result = await this.request(params); - return { - res: result.res, - status: result.status, - }; -}; diff --git a/lib/common/bucket/putBucketWebsite.js b/lib/common/bucket/putBucketWebsite.js deleted file mode 100644 index 93445216b..000000000 --- a/lib/common/bucket/putBucketWebsite.js +++ /dev/null @@ -1,49 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const { obj2xml } = require('../utils/obj2xml'); - -const proto = exports; -proto.putBucketWebsite = async function putBucketWebsite(name, config = {}, options) { - _checkBucketName(name); - const params = this._bucketRequestParams('PUT', name, 'website', options); - const IndexDocument = { - Suffix: config.index || 'index.html', - }; - const WebsiteConfiguration = { - IndexDocument, - }; - let website = { - WebsiteConfiguration, - }; - - if (config.supportSubDir) { - IndexDocument.SupportSubDir = config.supportSubDir; - } - - if (config.type) { - IndexDocument.Type = config.type; - } - - if (config.error) { - WebsiteConfiguration.ErrorDocument = { - Key: config.error, - }; - } - - if (config.routingRules !== undefined) { - if (!Array.isArray(config.routingRules)) { - throw new Error('RoutingRules must be Array'); - } - WebsiteConfiguration.RoutingRules = { - RoutingRule: config.routingRules, - }; - } - - website = obj2xml(website); - params.content = website; - params.mime = 'xml'; - params.successStatuses = [ 200 ]; - const result = await this.request(params); - return { - res: result.res, - }; -}; diff --git a/lib/common/client/getReqUrl.js b/lib/common/client/getReqUrl.js deleted file mode 100644 index c43969265..000000000 --- a/lib/common/client/getReqUrl.js +++ /dev/null @@ -1,45 +0,0 @@ -const copy = require('copy-to'); -const { format: urlformat } = require('url'); -const merge = require('merge-descriptors'); -const isIP_1 = require('../utils/isIP'); -const { checkConfigValid } = require('../utils/checkConfigValid'); - -function getReqUrl(params) { - const ep = {}; - const isCname = this.options.cname; - checkConfigValid(this.options.endpoint, 'endpoint'); - copy(this.options.endpoint, false).to(ep); - if (params.bucket && !isCname && !isIP_1.isIP(ep.hostname) && !this.options.sldEnable) { - ep.host = `${params.bucket}.${ep.host}`; - } - let resourcePath = '/'; - if (params.bucket && (this.options.sldEnable)) { - resourcePath += `${params.bucket}/`; - } - if (params.object) { - // Preserve '/' in result url - resourcePath += this._escape(params.object).replace(/\+/g, '%2B'); - } - ep.pathname = resourcePath; - const query = {}; - if (params.query) { - merge(query, params.query); - } - if (params.subres) { - let subresAsQuery = {}; - if (typeof params.subres === 'string') { - subresAsQuery[params.subres] = ''; - } else if (Array.isArray(params.subres)) { - params.subres.forEach(k => { - subresAsQuery[k] = ''; - }); - } else { - subresAsQuery = params.subres; - } - merge(query, subresAsQuery); - } - ep.query = query; - return urlformat(ep); -} - -exports.getReqUrl = getReqUrl; diff --git a/lib/common/image/index.js b/lib/common/image/index.js deleted file mode 100644 index 1e319c289..000000000 --- a/lib/common/image/index.js +++ /dev/null @@ -1,5 +0,0 @@ -const merge = require('merge-descriptors'); - -const proto = exports; - -merge(proto, require('./processObjectSave')); diff --git a/lib/common/image/processObjectSave.js b/lib/common/image/processObjectSave.js deleted file mode 100644 index 904cb637b..000000000 --- a/lib/common/image/processObjectSave.js +++ /dev/null @@ -1,35 +0,0 @@ -const { checkBucketName: _checkBucketName } = require('../utils/checkBucketName'); -const querystring = require('querystring'); -const { base64encode } = require('utility'); - -const proto = exports; - -proto.processObjectSave = async function processObjectSave(sourceObject, targetObject, process, targetBucket) { - targetObject = this.#objectName(targetObject); - const params = this.#objectRequestParams('POST', sourceObject, { - subres: 'x-oss-process', - }); - - const bucketParam = targetBucket ? `,b_${base64encode(targetBucket)}` : ''; - targetObject = base64encode(targetObject); - - const content = { - 'x-oss-process': `${process}|sys/saveas,o_${targetObject}${bucketParam}`, - }; - params.content = querystring.stringify(content); - - const result = await this.request(params); - return { - res: result.res, - status: result.res.status, - }; -}; - -function checkArgs(name, key) { - if (!name) { - throw new Error(`${key} is required`); - } - if (typeof name !== 'string') { - throw new Error(`${key} must be String`); - } -} diff --git a/lib/common/multipart-copy.js b/lib/common/multipart-copy.js deleted file mode 100644 index c630b2d6d..000000000 --- a/lib/common/multipart-copy.js +++ /dev/null @@ -1,233 +0,0 @@ -/* eslint-disable no-async-promise-executor */ - -const debug = require('util').debuglog('oss-client:multipart-copy'); -const copy = require('copy-to'); - -const proto = exports; - - -/** - * Upload a part copy in a multipart from the source bucket/object - * used with initMultipartUpload and completeMultipartUpload. - * @param {String} name copy object name - * @param {String} uploadId the upload id - * @param {Number} partNo the part number - * @param {String} range like 0-102400 part size need to copy - * @param {Object} sourceData - * {String} sourceData.sourceKey the source object name - * {String} sourceData.sourceBucketName the source bucket name - * @param {Object} options - */ -/* eslint max-len: [0] */ -proto.uploadPartCopy = async function uploadPartCopy(name, uploadId, partNo, range, sourceData, options = {}) { - options.headers = options.headers || {}; - const versionId = options.versionId || (options.subres && options.subres.versionId) || null; - let copySource; - if (versionId) { - copySource = `/${sourceData.sourceBucketName}/${encodeURIComponent(sourceData.sourceKey)}?versionId=${versionId}`; - } else { - copySource = `/${sourceData.sourceBucketName}/${encodeURIComponent(sourceData.sourceKey)}`; - } - - options.headers['x-oss-copy-source'] = copySource; - if (range) { - options.headers['x-oss-copy-source-range'] = `bytes=${range}`; - } - - options.subres = { - partNumber: partNo, - uploadId, - }; - const params = this._objectRequestParams('PUT', name, options); - params.mime = options.mime; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - name, - etag: result.res.headers.etag, - res: result.res, - }; -}; - -/** - * @param {String} name copy object name - * @param {Object} sourceData - * {String} sourceData.sourceKey the source object name - * {String} sourceData.sourceBucketName the source bucket name - * {Number} sourceData.startOffset data copy start byte offset, e.g: 0 - * {Number} sourceData.endOffset data copy end byte offset, e.g: 102400 - * @param {Object} options - * {Number} options.partSize - */ -proto.multipartUploadCopy = async function multipartUploadCopy(name, sourceData, options = {}) { - this.resetCancelFlag(); - const { versionId = null } = options; - const metaOpt = { - versionId, - }; - const objectMeta = await this._getObjectMeta(sourceData.sourceBucketName, sourceData.sourceKey, metaOpt); - const fileSize = objectMeta.res.headers['content-length']; - sourceData.startOffset = sourceData.startOffset || 0; - sourceData.endOffset = sourceData.endOffset || fileSize; - - if (options.checkpoint && options.checkpoint.uploadId) { - return await this._resumeMultipartCopy(options.checkpoint, sourceData, options); - } - - const minPartSize = 100 * 1024; - - const copySize = sourceData.endOffset - sourceData.startOffset; - if (copySize < minPartSize) { - throw new Error(`copySize must not be smaller than ${minPartSize}`); - } - - if (options.partSize && options.partSize < minPartSize) { - throw new Error(`partSize must not be smaller than ${minPartSize}`); - } - - const init = await this.initMultipartUpload(name, options); - const { uploadId } = init; - const partSize = this._getPartSize(copySize, options.partSize); - - const checkpoint = { - name, - copySize, - partSize, - uploadId, - doneParts: [], - }; - - if (options && options.progress) { - await options.progress(0, checkpoint, init.res); - } - - return await this._resumeMultipartCopy(checkpoint, sourceData, options); -}; - -/* - * Resume multipart copy from checkpoint. The checkpoint will be - * updated after each successful part copy. - * @param {Object} checkpoint the checkpoint - * @param {Object} options - */ -proto._resumeMultipartCopy = async function _resumeMultipartCopy(checkpoint, sourceData, options) { - if (this.isCancel()) { - throw this._makeCancelEvent(); - } - const { versionId = null } = options; - const metaOpt = { - versionId, - }; - const { - copySize, partSize, uploadId, doneParts, name, - } = checkpoint; - - const partOffs = this._divideMultipartCopyParts(copySize, partSize, sourceData.startOffset); - const numParts = partOffs.length; - - const uploadPartCopyOptions = { - headers: {}, - }; - - if (options.copyheaders) { - copy(options.copyheaders).to(uploadPartCopyOptions.headers); - } - if (versionId) { - copy(metaOpt).to(uploadPartCopyOptions); - } - - const uploadPartJob = function uploadPartJob(self, partNo, source) { - return new Promise(async (resolve, reject) => { - try { - if (!self.isCancel()) { - const pi = partOffs[partNo - 1]; - const range = `${pi.start}-${pi.end - 1}`; - - let result; - try { - result = await self.uploadPartCopy(name, uploadId, partNo, range, source, uploadPartCopyOptions); - } catch (error) { - if (error.status === 404) { - throw self._makeAbortEvent(); - } - throw error; - } - if (!self.isCancel()) { - debug(`content-range ${result.res.headers['content-range']}`); - doneParts.push({ - number: partNo, - etag: result.res.headers.etag, - }); - checkpoint.doneParts = doneParts; - - if (options && options.progress) { - await options.progress(doneParts.length / numParts, checkpoint, result.res); - } - } - } - resolve(); - } catch (err) { - err.partNum = partNo; - reject(err); - } - }); - }; - - const all = Array.from(new Array(numParts), (x, i) => i + 1); - const done = doneParts.map(p => p.number); - const todo = all.filter(p => done.indexOf(p) < 0); - const defaultParallel = 5; - const parallel = options.parallel || defaultParallel; - - // upload in parallel - const errors = await this._parallelNode(todo, parallel, uploadPartJob, sourceData); - - const abortEvent = errors.find(err => err.name === 'abort'); - if (abortEvent) throw abortEvent; - - if (this.isCancel()) { - throw this._makeCancelEvent(); - } - - // check errors after all jobs are completed - if (errors && errors.length > 0) { - const err = errors[0]; - err.message = `Failed to copy some parts with error: ${err.toString()} part_num: ${err.partNum}`; - throw err; - } - - return await this.completeMultipartUpload(name, uploadId, doneParts, options); -}; - -proto._divideMultipartCopyParts = function _divideMultipartCopyParts(fileSize, partSize, startOffset) { - const numParts = Math.ceil(fileSize / partSize); - - const partOffs = []; - for (let i = 0; i < numParts; i++) { - const start = (partSize * i) + startOffset; - const end = Math.min(start + partSize, fileSize + startOffset); - - partOffs.push({ - start, - end, - }); - } - - return partOffs; -}; - -/** - * Get Object Meta - * @param {String} bucket bucket name - * @param {String} name object name - * @param {Object} options options - */ -proto._getObjectMeta = async function _getObjectMeta(bucket, name, options) { - const currentBucket = this.getBucket(); - this.setBucket(bucket); - const data = await this.head(name, options); - this.setBucket(currentBucket); - return data; -}; diff --git a/lib/common/multipart.js b/lib/common/multipart.js deleted file mode 100644 index 8ceb8f443..000000000 --- a/lib/common/multipart.js +++ /dev/null @@ -1,270 +0,0 @@ -const copy = require('copy-to'); -const callback = require('./callback'); -const { deepCopyWith } = require('./utils/deepCopy'); -const { omit } = require('./utils/omit'); - -const proto = exports; - -/** - * List the on-going multipart uploads - * https://help.aliyun.com/document_detail/31997.html - * @param {Object} query query - * @param {Object} options options - * @return {Array} the multipart uploads - */ -proto.listUploads = async function listUploads(query, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.subres = 'uploads'; - const params = this._objectRequestParams('GET', '', opt); - params.query = query; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - let uploads = result.data.Upload || []; - if (!Array.isArray(uploads)) { - uploads = [ uploads ]; - } - uploads = uploads.map(up => ({ - name: up.Key, - uploadId: up.UploadId, - initiated: up.Initiated, - })); - - return { - res: result.res, - uploads, - bucket: result.data.Bucket, - nextKeyMarker: result.data.NextKeyMarker, - nextUploadIdMarker: result.data.NextUploadIdMarker, - isTruncated: result.data.IsTruncated === 'true', - }; -}; - -/** - * List the done uploadPart parts - * @param {String} name object name - * @param {String} uploadId multipart upload id - * @param {Object} query query - * {Number} query.max-parts The maximum part number in the response of the OSS. Default value: 1000 - * {Number} query.part-number-marker Starting position of a specific list. - * {String} query.encoding-type Specify the encoding of the returned content and the encoding type. - * @param {Object} options options - * @return {Object} result - */ -proto.listParts = async function listParts(name, uploadId, query, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.subres = { - uploadId, - }; - const params = this._objectRequestParams('GET', name, opt); - params.query = query; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - uploadId: result.data.UploadId, - bucket: result.data.Bucket, - name: result.data.Key, - partNumberMarker: result.data.PartNumberMarker, - nextPartNumberMarker: result.data.NextPartNumberMarker, - maxParts: result.data.MaxParts, - isTruncated: result.data.IsTruncated, - parts: result.data.Part || [], - }; -}; - -/** - * Abort a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Object} options options - */ -proto.abortMultipartUpload = async function abortMultipartUpload(name, uploadId, options) { - this._stop(); - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.subres = { uploadId }; - const params = this._objectRequestParams('DELETE', name, opt); - params.successStatuses = [ 204 ]; - - const result = await this.request(params); - return { - res: result.res, - }; -}; - -/** - * Initiate a multipart upload transaction - * @param {String} name the object name - * @param {Object} options options - * @return {String} upload id - */ -proto.initMultipartUpload = async function initMultipartUpload(name, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.headers = opt.headers || {}; - this._convertMetaToHeaders(options.meta, opt.headers); - - opt.subres = 'uploads'; - const params = this._objectRequestParams('POST', name, opt); - params.mime = options.mime; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - bucket: result.data.Bucket, - name: result.data.Key, - uploadId: result.data.UploadId, - }; -}; - -/** - * Upload a part in a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Integer} partNo the part number - * @param {File} file upload File, whole File - * @param {Integer} start part start bytes e.g: 102400 - * @param {Integer} end part end bytes e.g: 204800 - * @param {Object} options options - */ -proto.uploadPart = async function uploadPart(name, uploadId, partNo, file, start, end, options) { - const data = { - size: end - start, - }; - const isBrowserEnv = process && process.browser; - isBrowserEnv - ? (data.content = await this._createBuffer(file, start, end)) - : (data.stream = await this._createStream(file, start, end)); - return await this._uploadPart(name, uploadId, partNo, data, options); -}; - -/** - * Complete a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Array} parts the uploaded parts, each in the structure: - * {Integer} number partNo - * {String} etag part etag uploadPartCopy result.res.header.etag - * @param {Object} options - * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 - * {String} options.callback.url the OSS sends a callback request to this URL - * {String} options.callback.host The host header value for initiating callback requests - * {String} options.callback.body The value of the request body when a callback is initiated - * {String} options.callback.contentType The Content-Type of the callback requests initiatiated - * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: - * customValue = { - * key1: 'value1', - * key2: 'value2' - * } - */ -proto.completeMultipartUpload = async function completeMultipartUpload(name, uploadId, parts, options) { - const completeParts = parts - .concat() - .sort((a, b) => a.number - b.number) - .filter((item, index, arr) => !index || item.number !== arr[index - 1].number); - let xml = '\n\n'; - for (let i = 0; i < completeParts.length; i++) { - const p = completeParts[i]; - xml += '\n'; - xml += `${p.number}\n`; - xml += `${p.etag}\n`; - xml += '\n'; - } - xml += ''; - - options = options || {}; - let opt = {}; - opt = deepCopyWith(options, _ => { - if (Buffer.isBuffer(_)) return null; - }); - opt.subres = { uploadId }; - opt.headers = omit(opt.headers, [ 'x-oss-server-side-encryption', 'x-oss-storage-class' ]); - - const params = this._objectRequestParams('POST', name, opt); - callback.encodeCallback(params, opt); - params.mime = 'xml'; - params.content = xml; - - if (!(params.headers && params.headers['x-oss-callback'])) { - params.xmlResponse = true; - } - params.successStatuses = [ 200 ]; - const result = await this.request(params); - - if (options.progress) { - await options.progress(1, null, result.res); - } - - const ret = { - res: result.res, - bucket: params.bucket, - name, - etag: result.res.headers.etag, - data: result.data, - }; - - if (params.headers && params.headers['x-oss-callback']) { - ret.data = JSON.parse(result.data.toString()); - } - - return ret; -}; - -/** - * Upload a part in a multipart upload transaction - * @param {String} name the object name - * @param {String} uploadId the upload id - * @param {Integer} partNo the part number - * @param {Object} data the body data - * @param {Object} options options - */ -proto._uploadPart = async function _uploadPart(name, uploadId, partNo, data, options) { - options = options || {}; - const opt = {}; - copy(options).to(opt); - opt.headers = { - 'Content-Length': data.size, - }; - - opt.subres = { - partNumber: partNo, - uploadId, - }; - const params = this._objectRequestParams('PUT', name, opt); - params.mime = opt.mime; - const isBrowserEnv = process && process.browser; - isBrowserEnv ? (params.content = data.content) : (params.stream = data.stream); - params.successStatuses = [ 200 ]; - params.disabledMD5 = options.disabledMD5; - - const result = await this.request(params); - - if (!result.res.headers.etag) { - throw new Error( - 'Please set the etag of expose-headers in OSS \n https://help.aliyun.com/document_detail/32069.html' - ); - } - if (data.stream) { - data.stream = null; - params.stream = null; - } - return { - name, - etag: result.res.headers.etag, - res: result.res, - }; -}; diff --git a/lib/common/parallel.js b/lib/common/parallel.js deleted file mode 100644 index 1b249f381..000000000 --- a/lib/common/parallel.js +++ /dev/null @@ -1,173 +0,0 @@ -const proto = exports; - -proto._parallelNode = async function _parallelNode(todo, parallel, fn, sourceData) { - const that = this; - // upload in parallel - const jobErr = []; - let jobs = []; - const tempBatch = todo.length / parallel; - const remainder = todo.length % parallel; - const batch = remainder === 0 ? tempBatch : ((todo.length - remainder) / parallel) + 1; - let taskIndex = 1; - for (let i = 0; i < todo.length; i++) { - if (that.isCancel()) { - break; - } - - if (sourceData) { - jobs.push(fn(that, todo[i], sourceData)); - } else { - jobs.push(fn(that, todo[i])); - } - - if (jobs.length === parallel || (taskIndex === batch && i === (todo.length - 1))) { - try { - taskIndex += 1; - /* eslint no-await-in-loop: [0] */ - await Promise.all(jobs); - } catch (err) { - jobErr.push(err); - } - jobs = []; - } - } - - return jobErr; -}; - -proto._parallel = function _parallel(todo, parallel, jobPromise) { - const that = this; - return new Promise(resolve => { - const _jobErr = []; - if (parallel <= 0 || !todo) { - resolve(_jobErr); - return; - } - - function onlyOnce(fn) { - return function(...args) { - if (fn === null) throw new Error('Callback was already called.'); - const callFn = fn; - fn = null; - callFn.apply(this, args); - }; - } - - function createArrayIterator(coll) { - let i = -1; - const len = coll.length; - return function next() { - return (++i < len && !that.isCancel()) ? { value: coll[i], key: i } : null; - }; - } - - const nextElem = createArrayIterator(todo); - let done = false; - let running = 0; - let looping = false; - - function iterateeCallback(err) { - running -= 1; - if (err) { - done = true; - _jobErr.push(err); - resolve(_jobErr); - } else if (done && running <= 0) { - done = true; - resolve(_jobErr); - } else if (!looping) { - /* eslint no-use-before-define: [0] */ - if (that.isCancel()) { - resolve(_jobErr); - } else { - replenish(); - } - } - } - - function iteratee(value, callback) { - jobPromise(value).then(result => { - callback(null, result); - }).catch(err => { - callback(err); - }); - } - - function replenish() { - looping = true; - while (running < parallel && !done && !that.isCancel()) { - const elem = nextElem(); - if (elem === null || _jobErr.length > 0) { - done = true; - if (running <= 0) { - resolve(_jobErr); - } - return; - } - running += 1; - iteratee(elem.value, onlyOnce(iterateeCallback)); - } - looping = false; - } - - replenish(); - }); -}; - -/** - * cancel operation, now can use with multipartUpload - * @param {Object} abort - * {String} anort.name object key - * {String} anort.uploadId upload id - * {String} anort.options timeout - */ -proto.cancel = function cancel(abort) { - this.options.cancelFlag = true; - - if (Array.isArray(this.multipartUploadStreams)) { - this.multipartUploadStreams.forEach(_ => { - if (_.destroyed === false) { - const err = { - name: 'cancel', - message: 'cancel', - }; - _.destroy(err); - } - }); - } - this.multipartUploadStreams = []; - if (abort) { - this.abortMultipartUpload(abort.name, abort.uploadId, abort.options); - } -}; - -proto.isCancel = function isCancel() { - return this.options.cancelFlag; -}; - -proto.resetCancelFlag = function resetCancelFlag() { - this.options.cancelFlag = false; -}; - -proto._stop = function _stop() { - this.options.cancelFlag = true; -}; - -// cancel is not error , so create an object -proto._makeCancelEvent = function _makeCancelEvent() { - const cancelEvent = { - status: 0, - name: 'cancel', - }; - return cancelEvent; -}; - -// abort is not error , so create an object -proto._makeAbortEvent = function _makeAbortEvent() { - const abortEvent = { - status: 0, - name: 'abort', - message: 'upload task has been abort', - }; - return abortEvent; -}; diff --git a/lib/common/utils/checkBucketTag.js b/lib/common/utils/checkBucketTag.js deleted file mode 100644 index 773bebc5e..000000000 --- a/lib/common/utils/checkBucketTag.js +++ /dev/null @@ -1,48 +0,0 @@ -const { checkValid } = require('./checkValid'); -const { isObject } = require('./isObject'); - -const commonRules = [ - { - validator: value => { - if (typeof value !== 'string') { - throw new Error('the key and value of the tag must be String'); - } - }, - }, -]; -const rules = { - key: [ - ...commonRules, - { - pattern: /^.{1,64}$/, - msg: 'tag key can be a maximum of 64 bytes in length', - }, - { - pattern: /^(?!https*:\/\/|Aliyun)/, - msg: 'tag key can not startsWith: http://, https://, Aliyun', - }, - ], - value: [ - ...commonRules, - { - pattern: /^.{0,128}$/, - msg: 'tag value can be a maximum of 128 bytes in length', - }, - ], -}; - -exports.checkBucketTag = tag => { - if (!isObject(tag)) { - throw new Error('bucket tag must be Object'); - } - const entries = Object.entries(tag); - if (entries.length > 20) { - throw new Error('maximum of 20 tags for a bucket'); - } - const rulesIndexKey = [ 'key', 'value' ]; - entries.forEach(keyValue => { - keyValue.forEach((item, index) => { - checkValid(item, rules[rulesIndexKey[index]]); - }); - }); -}; diff --git a/lib/common/utils/checkConfigValid.d.ts b/lib/common/utils/checkConfigValid.d.ts deleted file mode 100644 index d1bcb3fc0..000000000 --- a/lib/common/utils/checkConfigValid.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const checkConfigValid: (conf: any, key: 'endpoint' | 'region') => void; diff --git a/lib/common/utils/checkConfigValid.js b/lib/common/utils/checkConfigValid.js deleted file mode 100644 index 684cb1525..000000000 --- a/lib/common/utils/checkConfigValid.js +++ /dev/null @@ -1,27 +0,0 @@ -const checkConfigMap = { - endpoint: checkEndpoint, - region: /^[a-zA-Z0-9\-_]+$/, -}; - -function checkEndpoint(endpoint) { - if (typeof endpoint === 'string') { - return /^[a-zA-Z0-9._:/-]+$/.test(endpoint); - } else if (endpoint.host) { - return /^[a-zA-Z0-9._:/-]+$/.test(endpoint.host); - } - return false; -} - -exports.checkConfigValid = (conf, key) => { - if (checkConfigMap[key]) { - let isConfigValid = true; - if (checkConfigMap[key] instanceof Function) { - isConfigValid = checkConfigMap[key](conf); - } else { - isConfigValid = checkConfigMap[key].test(conf); - } - if (!isConfigValid) { - throw new Error(`The ${key} must be conform to the specifications`); - } - } -}; diff --git a/lib/common/utils/checkObjectTag.js b/lib/common/utils/checkObjectTag.js deleted file mode 100644 index 5fd30b911..000000000 --- a/lib/common/utils/checkObjectTag.js +++ /dev/null @@ -1,51 +0,0 @@ -const { checkValid } = require('./checkValid'); -const { isObject } = require('./isObject'); - -const commonRules = [ - { - validator: value => { - if (typeof value !== 'string') { - throw new Error('the key and value of the tag must be String'); - } - }, - }, - { - pattern: /^[a-zA-Z0-9 +-=._:/]+$/, - msg: 'tag can contain letters, numbers, spaces, and the following symbols: plus sign (+), hyphen (-), equal sign (=), period (.), underscore (_), colon (:), and forward slash (/)', - }, -]; - -const rules = { - key: [ - ...commonRules, - { - pattern: /^.{1,128}$/, - msg: 'tag key can be a maximum of 128 bytes in length', - }, - ], - value: [ - ...commonRules, - { - pattern: /^.{0,256}$/, - msg: 'tag value can be a maximum of 256 bytes in length', - }, - ], -}; - -function checkObjectTag(tag) { - if (!isObject(tag)) { - throw new Error('tag must be Object'); - } - const entries = Object.entries(tag); - if (entries.length > 10) { - throw new Error('maximum of 10 tags for a object'); - } - const rulesIndexKey = [ 'key', 'value' ]; - entries.forEach(keyValue => { - keyValue.forEach((item, index) => { - checkValid(item, rules[rulesIndexKey[index]]); - }); - }); -} - -exports.checkObjectTag = checkObjectTag; diff --git a/lib/common/utils/checkValid.js b/lib/common/utils/checkValid.js deleted file mode 100644 index 64a8497d4..000000000 --- a/lib/common/utils/checkValid.js +++ /dev/null @@ -1,11 +0,0 @@ -function checkValid(_value, _rules) { - _rules.forEach(rule => { - if (rule.validator) { - rule.validator(_value); - } else if (rule.pattern && !rule.pattern.test(_value)) { - throw new Error(rule.msg); - } - }); -} - -exports.checkValid = checkValid; diff --git a/lib/common/utils/createRequest.d.ts b/lib/common/utils/createRequest.d.ts deleted file mode 100644 index 666438c19..000000000 --- a/lib/common/utils/createRequest.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -interface ReqParams { - [propName: string]: any; -} -export declare function createRequest(this: any, params: any): { - url: any; - params: ReqParams; -}; -export {}; diff --git a/lib/common/utils/createRequest.js b/lib/common/utils/createRequest.js deleted file mode 100644 index e16be1381..000000000 --- a/lib/common/utils/createRequest.js +++ /dev/null @@ -1,92 +0,0 @@ -const debug = require('util').debuglog('oss-client:createRequest'); -const crypto = require('crypto'); -const mime = require('mime'); -const dateFormat = require('dateformat'); -const copy = require('copy-to'); -const path = require('path'); -const { encoder } = require('./encoder'); -const { isIP } = require('./isIP'); -const { setRegion } = require('./setRegion'); -const { getReqUrl } = require('../client/getReqUrl'); - -function getHeader(headers, name) { - return headers[name] || headers[name.toLowerCase()]; -} - -function delHeader(headers, name) { - delete headers[name]; - delete headers[name.toLowerCase()]; -} - -function createRequest(params) { - let date = new Date(); - if (this.options.amendTimeSkewed) { - date = +new Date() + this.options.amendTimeSkewed; - } - const headers = { - 'x-oss-date': dateFormat(new Date(), "UTC:ddd, dd mmm yyyy HH:MM:ss 'GMT'"), - }; - headers['User-Agent'] = this.userAgent; - if (this.options.isRequestPay) { - Object.assign(headers, { 'x-oss-request-payer': 'requester' }); - } - if (this.options.stsToken) { - headers['x-oss-security-token'] = this.options.stsToken; - } - copy(params.headers).to(headers); - if (!getHeader(headers, 'Content-Type')) { - if (params.mime && params.mime.indexOf('/') > 0) { - headers['Content-Type'] = params.mime; - } else { - headers['Content-Type'] = mime.getType(params.mime || path.extname(params.object || '')); - } - } - if (!getHeader(headers, 'Content-Type')) { - delHeader(headers, 'Content-Type'); - } - if (params.content) { - if (!params.disabledMD5) { - if (!params.headers || !params.headers['Content-MD5']) { - headers['Content-MD5'] = crypto.createHash('md5').update(Buffer.from(params.content, 'utf8')).digest('base64'); - } else { - headers['Content-MD5'] = params.headers['Content-MD5']; - } - } - if (!headers['Content-Length']) { - headers['Content-Length'] = params.content.length; - } - } - const { hasOwnProperty } = Object.prototype; - for (const k in headers) { - if (headers[k] && hasOwnProperty.call(headers, k)) { - headers[k] = encoder(String(headers[k]), this.options.headerEncoding); - } - } - const authResource = this._getResource(params); - headers.authorization = this.authorization(params.method, authResource, params.subres, headers, this.options.headerEncoding); - // const url = this._getReqUrl(params); - if (isIP(this.options.endpoint.hostname)) { - const { region, internal, secure } = this.options; - const hostInfo = setRegion(region, internal, secure); - headers.host = `${params.bucket}.${hostInfo.host}`; - } - const url = getReqUrl.bind(this)(params); - debug('request %s %s, with headers %j, !!stream: %s', params.method, url, headers, !!params.stream); - const timeout = params.timeout || this.options.timeout; - const reqParams = { - method: params.method, - content: params.content, - stream: params.stream, - headers, - timeout, - writeStream: params.writeStream, - customResponse: params.customResponse, - timing: true, - ctx: params.ctx || this.ctx, - }; - return { - url, - params: reqParams, - }; -} -exports.createRequest = createRequest; diff --git a/lib/common/utils/dataFix.d.ts b/lib/common/utils/dataFix.d.ts deleted file mode 100644 index da522adc2..000000000 --- a/lib/common/utils/dataFix.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -interface Rename { - [propName: string]: string; -} -interface Config { - lowerFirst?: boolean; - rename?: Rename; - remove?: string[]; - camel?: string[]; - bool?: string[]; -} -export declare function dataFix(o: object, conf: Config, finalKill?: Function): typeof dataFix | undefined; -export {}; diff --git a/lib/common/utils/dataFix.js b/lib/common/utils/dataFix.js deleted file mode 100644 index b9f4b9541..000000000 --- a/lib/common/utils/dataFix.js +++ /dev/null @@ -1,58 +0,0 @@ -'use strict'; -Object.defineProperty(exports, '__esModule', { value: true }); -exports.dataFix = void 0; -const isObject_1 = require('./isObject'); -const TRUE = [ 'true', 'TRUE', '1', 1 ]; -const FALSE = [ 'false', 'FALSE', '0', 0 ]; -function dataFix(o, conf, finalKill) { - if (!isObject_1.isObject(o)) { return; } - const { remove = [], rename = {}, camel = [], bool = [], lowerFirst = false } = conf; - // 删除不需要的数据 - remove.forEach(v => delete o[v]); - // 重命名 - Object.entries(rename).forEach(v => { - if (!o[v[0]]) { return; } - if (o[v[1]]) { return; } - o[v[1]] = o[v[0]]; - delete o[v[0]]; - }); - // 驼峰化 - camel.forEach(v => { - if (!o[v]) { return; } - const afterKey = v - .replace(/^(.)/, $0 => $0.toLowerCase()) - .replace(/-(\w)/g, (_, $1) => $1.toUpperCase()); - if (o[afterKey]) { return; } - o[afterKey] = o[v]; - // todo 暂时兼容以前数据,不做删除 - // delete o[v]; - }); - // 转换值为布尔值 - bool.forEach(v => { - o[v] = fixBool(o[v]); - }); - // finalKill - if (typeof finalKill === 'function') { - finalKill(o); - } - // 首字母转小写 - fixLowerFirst(o, lowerFirst); - return dataFix; -} -exports.dataFix = dataFix; -function fixBool(value) { - if (!value) { return false; } - if (TRUE.includes(value)) { return true; } - return FALSE.includes(value) ? false : value; -} -function fixLowerFirst(o, lowerFirst) { - if (lowerFirst) { - Object.keys(o).forEach(key => { - const lowerK = key.replace(/^\w/, match => match.toLowerCase()); - if (typeof o[lowerK] === 'undefined') { - o[lowerK] = o[key]; - delete o[key]; - } - }); - } -} diff --git a/lib/common/utils/dataFix.ts b/lib/common/utils/dataFix.ts deleted file mode 100644 index 8c1b73342..000000000 --- a/lib/common/utils/dataFix.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { isObject } from './isObject'; - -interface Rename { - [propName: string]: string; -} - -interface Config { - lowerFirst?: boolean; - rename?: Rename; - remove?: string[]; - camel?: string[]; - bool?: string[]; -} - -const TRUE = ['true', 'TRUE', '1', 1]; -const FALSE = ['false', 'FALSE', '0', 0]; - -export function dataFix(o: object, conf: Config, finalKill?: Function) { - if (!isObject(o)) return; - - const { - remove = [], - rename = {}, - camel = [], - bool = [], - lowerFirst = false, - } = conf; - - // 删除不需要的数据 - remove.forEach(v => delete o[v]); - - // 重命名 - Object.entries(rename).forEach(v => { - if (!o[v[0]]) return; - if (o[v[1]]) return; - o[v[1]] = o[v[0]]; - delete o[v[0]]; - }); - - // 驼峰化 - camel.forEach(v => { - if (!o[v]) return; - const afterKey = v - .replace(/^(.)/, $0 => $0.toLowerCase()) - .replace(/-(\w)/g, (_, $1) => $1.toUpperCase()); - if (o[afterKey]) return; - o[afterKey] = o[v]; - // todo 暂时兼容以前数据,不做删除 - // delete o[v]; - }); - - // 转换值为布尔值 - bool.forEach(v => { - o[v] = fixBool(o[v]); - }); - - // finalKill - if (typeof finalKill === 'function') { - finalKill(o); - } - - // 首字母转小写 - fixLowerFirst(o, lowerFirst); - - return dataFix; -} - -function fixBool(value) { - if (!value) return false; - - if (TRUE.includes(value)) return true; - - return FALSE.includes(value) ? false : value; -} - -function fixLowerFirst(o, lowerFirst) { - if (lowerFirst) { - Object.keys(o).forEach(key => { - const lowerK = key.replace(/^\w/, match => match.toLowerCase()); - if (typeof o[lowerK] === 'undefined') { - o[lowerK] = o[key]; - delete o[key]; - } - }); - } -} diff --git a/lib/common/utils/deepCopy.js b/lib/common/utils/deepCopy.js deleted file mode 100644 index 873beaa94..000000000 --- a/lib/common/utils/deepCopy.js +++ /dev/null @@ -1,36 +0,0 @@ -exports.deepCopy = obj => { - if (obj === null || typeof obj !== 'object') { - return obj; - } - if (Buffer.isBuffer(obj)) { - return obj.slice(); - } - const copy = Array.isArray(obj) ? [] : {}; - Object.keys(obj).forEach(key => { - copy[key] = exports.deepCopy(obj[key]); - }); - return copy; -}; - -exports.deepCopyWith = (obj, customizer) => { - function deepCopyWithHelper(value, innerKey, innerObject) { - const result = customizer(value, innerKey, innerObject); - if (result !== undefined) { return result; } - if (value === null || typeof value !== 'object') { - return value; - } - if (Buffer.isBuffer(value)) { - return value.slice(); - } - const copy = Array.isArray(value) ? [] : {}; - Object.keys(value).forEach(k => { - copy[k] = deepCopyWithHelper(value[k], k, value); - }); - return copy; - } - if (customizer) { - return deepCopyWithHelper(obj, '', null); - } - - return exports.deepCopy(obj); -}; diff --git a/lib/common/utils/encoder.js b/lib/common/utils/encoder.js deleted file mode 100644 index abe27eef1..000000000 --- a/lib/common/utils/encoder.js +++ /dev/null @@ -1,6 +0,0 @@ -function encoder(str, encoding = 'utf-8') { - if (encoding === 'utf-8') { return str; } - return Buffer.from(str).toString('latin1'); -} - -exports.encoder = encoder; diff --git a/lib/common/utils/formatInventoryConfig.d.ts b/lib/common/utils/formatInventoryConfig.d.ts deleted file mode 100644 index 2210338ae..000000000 --- a/lib/common/utils/formatInventoryConfig.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function formatInventoryConfig(inventoryConfig: any, toArray?: boolean): any; diff --git a/lib/common/utils/formatInventoryConfig.js b/lib/common/utils/formatInventoryConfig.js deleted file mode 100644 index 04f2342fb..000000000 --- a/lib/common/utils/formatInventoryConfig.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; -Object.defineProperty(exports, '__esModule', { value: true }); -exports.formatInventoryConfig = void 0; -const dataFix_1 = require('../utils/dataFix'); -const isObject_1 = require('../utils/isObject'); -const formatObjKey_1 = require('../utils/formatObjKey'); -function formatInventoryConfig(inventoryConfig, toArray = false) { - if (toArray && isObject_1.isObject(inventoryConfig)) { inventoryConfig = [ inventoryConfig ]; } - if (Array.isArray(inventoryConfig)) { - inventoryConfig = inventoryConfig.map(formatFn); - } else { - inventoryConfig = formatFn(inventoryConfig); - } - return inventoryConfig; -} -exports.formatInventoryConfig = formatInventoryConfig; -function formatFn(_) { - dataFix_1.dataFix(_, { bool: [ 'IsEnabled' ] }, conf => { - let _a, - _b; - // prefix - conf.prefix = conf.Filter.Prefix; - delete conf.Filter; - // OSSBucketDestination - conf.OSSBucketDestination = conf.Destination.OSSBucketDestination; - // OSSBucketDestination.rolename - conf.OSSBucketDestination.rolename = conf.OSSBucketDestination.RoleArn.replace(/.*\//, ''); - delete conf.OSSBucketDestination.RoleArn; - // OSSBucketDestination.bucket - conf.OSSBucketDestination.bucket = conf.OSSBucketDestination.Bucket.replace(/.*:::/, ''); - delete conf.OSSBucketDestination.Bucket; - delete conf.Destination; - // frequency - conf.frequency = conf.Schedule.Frequency; - delete conf.Schedule.Frequency; - // optionalFields - if (((_a = conf === null || conf === void 0 ? void 0 : conf.OptionalFields) === null || _a === void 0 ? void 0 : _a.Field) && !Array.isArray((_b = conf.OptionalFields) === null || _b === void 0 ? void 0 : _b.Field)) { conf.OptionalFields.Field = [ conf.OptionalFields.Field ]; } - }); - // firstLowerCase - _ = formatObjKey_1.formatObjKey(_, 'firstLowerCase', { exclude: [ 'OSSBucketDestination', 'SSE-OSS', 'SSE-KMS' ] }); - return _; -} diff --git a/lib/common/utils/formatInventoryConfig.ts b/lib/common/utils/formatInventoryConfig.ts deleted file mode 100644 index 827cdd9bf..000000000 --- a/lib/common/utils/formatInventoryConfig.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { dataFix } from '../utils/dataFix'; -import { isObject } from '../utils/isObject'; -import { formatObjKey } from '../utils/formatObjKey'; - -export function formatInventoryConfig(inventoryConfig, toArray = false) { - if (toArray && isObject(inventoryConfig)) inventoryConfig = [inventoryConfig]; - - if (Array.isArray(inventoryConfig)) { - inventoryConfig = inventoryConfig.map(formatFn); - } else { - inventoryConfig = formatFn(inventoryConfig); - } - return inventoryConfig; -} - -function formatFn(_) { - dataFix(_, { bool: ['IsEnabled'] }, conf => { - // prefix - conf.prefix = conf.Filter.Prefix; - delete conf.Filter; - // OSSBucketDestination - conf.OSSBucketDestination = conf.Destination.OSSBucketDestination; - // OSSBucketDestination.rolename - conf.OSSBucketDestination.rolename = conf.OSSBucketDestination.RoleArn.replace(/.*\//, ''); - delete conf.OSSBucketDestination.RoleArn; - // OSSBucketDestination.bucket - conf.OSSBucketDestination.bucket = conf.OSSBucketDestination.Bucket.replace(/.*:::/, ''); - delete conf.OSSBucketDestination.Bucket; - delete conf.Destination; - // frequency - conf.frequency = conf.Schedule.Frequency; - delete conf.Schedule.Frequency; - // optionalFields - if (conf?.OptionalFields?.Field && !Array.isArray(conf.OptionalFields?.Field)) conf.OptionalFields.Field = [conf.OptionalFields.Field]; - }); - // firstLowerCase - _ = formatObjKey(_, 'firstLowerCase', { exclude: ['OSSBucketDestination', 'SSE-OSS', 'SSE-KMS'] }); - return _; -} diff --git a/lib/common/utils/formatObjKey.d.ts b/lib/common/utils/formatObjKey.d.ts deleted file mode 100644 index 4e370577e..000000000 --- a/lib/common/utils/formatObjKey.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -interface Config { - exclude?: string[]; -} -declare type FormatObjKeyType = 'firstUpperCase' | 'firstLowerCase'; -export declare function formatObjKey(obj: any, type: FormatObjKeyType, options?: Config): any; -export {}; diff --git a/lib/common/utils/formatObjKey.js b/lib/common/utils/formatObjKey.js deleted file mode 100644 index 05d04b73c..000000000 --- a/lib/common/utils/formatObjKey.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; -Object.defineProperty(exports, '__esModule', { value: true }); -exports.formatObjKey = void 0; -function formatObjKey(obj, type, options) { - if (obj === null || typeof obj !== 'object') { - return obj; - } - let o; - if (Array.isArray(obj)) { - o = []; - for (let i = 0; i < obj.length; i++) { - o.push(formatObjKey(obj[i], type, options)); - } - } else { - o = {}; - Object.keys(obj).forEach(key => { - o[handelFormat(key, type, options)] = formatObjKey(obj[key], type, options); - }); - } - return o; -} -exports.formatObjKey = formatObjKey; -function handelFormat(key, type, options) { - let _a; - if (options && ((_a = options.exclude) === null || _a === void 0 ? void 0 : _a.includes(key))) { return key; } - if (type === 'firstUpperCase') { - key = key.replace(/^./, _ => _.toUpperCase()); - } else if (type === 'firstLowerCase') { - key = key.replace(/^./, _ => _.toLowerCase()); - } - return key; -} diff --git a/lib/common/utils/formatObjKey.ts b/lib/common/utils/formatObjKey.ts deleted file mode 100644 index ac3db57a9..000000000 --- a/lib/common/utils/formatObjKey.ts +++ /dev/null @@ -1,36 +0,0 @@ - -interface Config { - exclude?: string[]; -} - -type FormatObjKeyType = 'firstUpperCase' | 'firstLowerCase'; - -export function formatObjKey(obj: any, type: FormatObjKeyType, options?: Config) { - if (obj === null || typeof obj !== 'object') { - return obj; - } - - let o:any; - if (Array.isArray(obj)) { - o = []; - for (let i = 0; i < obj.length; i++) { - o.push(formatObjKey(obj[i], type, options)); - } - } else { - o = {}; - Object.keys(obj).forEach((key) => { - o[handelFormat(key, type, options)] = formatObjKey(obj[key], type, options); - }); - } - return o; -} - -function handelFormat(key: string, type: FormatObjKeyType, options?: Config) { - if (options && options.exclude?.includes(key)) return key; - if (type === 'firstUpperCase') { - key = key.replace(/^./, (_: string) => _.toUpperCase()); - } else if (type === 'firstLowerCase') { - key = key.replace(/^./, (_: string) => _.toLowerCase()); - } - return key; -} diff --git a/lib/common/utils/formatTag.js b/lib/common/utils/formatTag.js deleted file mode 100644 index 3234e53d1..000000000 --- a/lib/common/utils/formatTag.js +++ /dev/null @@ -1,19 +0,0 @@ -const { isObject } = require('./isObject'); - -function formatTag(obj) { - if (obj.Tagging !== undefined) { - obj = obj.Tagging.TagSet.Tag; - } else if (obj.TagSet !== undefined) { - obj = obj.TagSet.Tag; - } else if (obj.Tag !== undefined) { - obj = obj.Tag; - } - obj = obj && isObject(obj) ? [ obj ] : obj || []; - const tag = {}; - obj.forEach(item => { - tag[item.Key] = item.Value; - }); - return tag; -} - -exports.formatTag = formatTag; diff --git a/lib/common/utils/getStrBytesCount.js b/lib/common/utils/getStrBytesCount.js deleted file mode 100644 index 578db7586..000000000 --- a/lib/common/utils/getStrBytesCount.js +++ /dev/null @@ -1,14 +0,0 @@ -function getStrBytesCount(str) { - let bytesCount = 0; - for (let i = 0; i < str.length; i++) { - const c = str.charAt(i); - if (/^[\u00-\uff]$/.test(c)) { - bytesCount += 1; - } else { - bytesCount += 2; - } - } - return bytesCount; -} - -exports.getStrBytesCount = getStrBytesCount; diff --git a/lib/common/utils/isFunction.js b/lib/common/utils/isFunction.js deleted file mode 100644 index 78b531220..000000000 --- a/lib/common/utils/isFunction.js +++ /dev/null @@ -1,3 +0,0 @@ -exports.isFunction = v => { - return typeof v === 'function'; -}; diff --git a/lib/common/utils/isObject.d.ts b/lib/common/utils/isObject.d.ts deleted file mode 100644 index ef86365a9..000000000 --- a/lib/common/utils/isObject.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const isObject: (obj: any) => boolean; diff --git a/lib/common/utils/isObject.js b/lib/common/utils/isObject.js deleted file mode 100644 index 57ca615e0..000000000 --- a/lib/common/utils/isObject.js +++ /dev/null @@ -1,3 +0,0 @@ -exports.isObject = obj => { - return Object.prototype.toString.call(obj) === '[object Object]'; -}; diff --git a/lib/common/utils/omit.d.ts b/lib/common/utils/omit.d.ts deleted file mode 100644 index b4568cff6..000000000 --- a/lib/common/utils/omit.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function omit(originalObject: {}, keysToOmit: string[]): {}; diff --git a/lib/common/utils/omit.js b/lib/common/utils/omit.js deleted file mode 100644 index c74cadab8..000000000 --- a/lib/common/utils/omit.js +++ /dev/null @@ -1,9 +0,0 @@ -function omit(originalObject, keysToOmit) { - const cloneObject = Object.assign({}, originalObject); - for (const path of keysToOmit) { - delete cloneObject[path]; - } - return cloneObject; -} - -exports.omit = omit; diff --git a/lib/common/utils/retry.d.ts b/lib/common/utils/retry.d.ts deleted file mode 100644 index 27eff2577..000000000 --- a/lib/common/utils/retry.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function retry(this: any, func: Function, retryMax: number, config?: any): (...arg: any[]) => Promise; diff --git a/lib/common/utils/retry.js b/lib/common/utils/retry.js deleted file mode 100644 index f13af381e..000000000 --- a/lib/common/utils/retry.js +++ /dev/null @@ -1,27 +0,0 @@ -function retry(func, retryMax, config = {}) { - let retryNum = 0; - const { retryDelay = 500, errorHandler = () => true } = config; - const funcR = (...arg) => { - return new Promise((resolve, reject) => { - func(...arg) - .then(result => { - retryNum = 0; - resolve(result); - }) - .catch(err => { - if (retryNum < retryMax && errorHandler(err)) { - retryNum++; - setTimeout(() => { - resolve(funcR(...arg)); - }, retryDelay); - } else { - retryNum = 0; - reject(err); - } - }); - }); - }; - return funcR; -} - -exports.retry = retry; diff --git a/lib/common/utils/setRegion.d.ts b/lib/common/utils/setRegion.d.ts deleted file mode 100644 index 89ea7d062..000000000 --- a/lib/common/utils/setRegion.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import urlutil from 'url'; -export declare function setRegion(region: string, internal?: boolean, secure?: boolean): urlutil.UrlWithStringQuery; diff --git a/lib/common/utils/setRegion.js b/lib/common/utils/setRegion.js deleted file mode 100644 index 2a7eefa2f..000000000 --- a/lib/common/utils/setRegion.js +++ /dev/null @@ -1,16 +0,0 @@ -const { parse } = require('url'); -const { checkConfigValid } = require('./checkConfigValid'); - -function setRegion(region, internal = false, secure = false) { - checkConfigValid(region, 'region'); - const protocol = secure ? 'https://' : 'http://'; - let suffix = internal ? '-internal.aliyuncs.com' : '.aliyuncs.com'; - const prefix = 'vpc100-oss-cn-'; - // aliyun VPC region: https://help.aliyun.com/knowledge_detail/38740.html - if (region.substr(0, prefix.length) === prefix) { - suffix = '.aliyuncs.com'; - } - return parse(protocol + region + suffix); -} - -exports.setRegion = setRegion; diff --git a/lib/common/utils/setSTSToken.d.ts b/lib/common/utils/setSTSToken.d.ts deleted file mode 100644 index 210d77554..000000000 --- a/lib/common/utils/setSTSToken.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function setSTSToken(this: any): Promise; -export declare function checkCredentials(obj: any): void; diff --git a/lib/common/utils/setSTSToken.js b/lib/common/utils/setSTSToken.js deleted file mode 100644 index 6a3c5f9e1..000000000 --- a/lib/common/utils/setSTSToken.js +++ /dev/null @@ -1,35 +0,0 @@ -const { formatObjKey } = require('./formatObjKey'); - -async function setSTSToken() { - if (!this.options) { this.options = {}; } - const now = new Date(); - if (this.stsTokenFreshTime) { - if (+now - this.stsTokenFreshTime >= this.options.refreshSTSTokenInterval) { - this.stsTokenFreshTime = now; - let credentials = await this.options.refreshSTSToken(); - credentials = formatObjKey(credentials, 'firstLowerCase'); - if (credentials.securityToken) { - credentials.stsToken = credentials.securityToken; - } - checkCredentials(credentials); - Object.assign(this.options, credentials); - } - } else { - this.stsTokenFreshTime = now; - } - return null; -} - -exports.setSTSToken = setSTSToken; - -function checkCredentials(obj) { - const stsTokenKey = [ 'accessKeySecret', 'accessKeyId', 'stsToken' ]; - const objKeys = Object.keys(obj); - stsTokenKey.forEach(_ => { - if (!objKeys.find(key => key === _)) { - throw Error(`refreshSTSToken must return contains ${_}`); - } - }); -} - -exports.checkCredentials = checkCredentials; diff --git a/lib/image.js b/lib/image.js deleted file mode 100644 index 6829e8210..000000000 --- a/lib/image.js +++ /dev/null @@ -1,125 +0,0 @@ -module.exports = function(OssClient) { - function ImageClient(options) { - if (!(this instanceof ImageClient)) { - return new ImageClient(options); - } - if (!options.bucket) { - throw new Error('require bucket for image service instance'); - } - if (!options.imageHost) { - throw new Error('require imageHost for image service instance'); - } - - options.endpoint = options.imageHost; - this.ossClient = new OssClient(options); - this.ossClient.options.imageHost = options.imageHost; - } - - /** - * Image operations - */ - - ImageClient.prototype.get = async function get(name, file, options) { - return await this.ossClient.get(name, file, options); - }; - - ImageClient.prototype.getStream = async function getStream(name, options) { - return await this.ossClient.getStream(name, options); - }; - - ImageClient.prototype.getExif = async function getExif(name, options) { - const params = this.ossClient._objectRequestParams('GET', `${name}@exif`, options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.getInfo = async function getInfo(name, options) { - const params = this.ossClient._objectRequestParams('GET', `${name}@infoexif`, options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.putStyle = async function putStyle(styleName, style, options) { - const params = this.ossClient._objectRequestParams('PUT', `/?style&styleName=${styleName}`, options); - params.successStatuses = [ 200 ]; - params.content = `${'\n' + - '`; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.getStyle = async function getStyle(styleName, options) { - const params = this.ossClient._objectRequestParams('GET', `/?style&styleName=${styleName}`, options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data, - }; - }; - - ImageClient.prototype.listStyle = async function listStyle(options) { - const params = this.ossClient._objectRequestParams('GET', '/?style', options); - params.successStatuses = [ 200 ]; - - let result = await this.ossClient.request(params); - result = await this._parseResponse(result); - return { - res: result.res, - data: result.data.Style, - }; - }; - - ImageClient.prototype.deleteStyle = async function deleteStyle(styleName, options) { - const params = this.ossClient._objectRequestParams('DELETE', `/?style&styleName=${styleName}`, options); - params.successStatuses = [ 204 ]; - - const result = await this.ossClient.request(params); - return { - res: result.res, - }; - }; - - ImageClient.prototype.signatureUrl = function signatureUrl(name) { - return this.ossClient.signatureUrl(name); - }; - - ImageClient.prototype._parseResponse = async function _parseResponse(result) { - const str = result.data.toString(); - const type = result.res.headers['content-type']; - - if (type === 'application/json') { - const data = JSON.parse(str); - result.data = {}; - if (data) { - Object.keys(data).forEach(key => { - result.data[key] = parseFloat(data[key].value, 10) || data[key].value; - }); - } - } else if (type === 'application/xml') { - result.data = await this.ossClient.parseXML(str); - } - return result; - }; - - return ImageClient; -}; diff --git a/lib/index.js b/lib/index.js deleted file mode 100644 index 23dc982fd..000000000 --- a/lib/index.js +++ /dev/null @@ -1,4 +0,0 @@ -const Client = require('./client'); - -module.exports = Client; -module.exports.Client = Client; diff --git a/lib/managed-upload.js b/lib/managed-upload.js deleted file mode 100644 index b7c478d3c..000000000 --- a/lib/managed-upload.js +++ /dev/null @@ -1,367 +0,0 @@ -const fs = require('fs'); -const { stat: statFile } = require('fs/promises'); -const util = require('util'); -const path = require('path'); -const mime = require('mime'); -const { isReadable } = require('is-type-of'); -const { retry } = require('./common/utils/retry'); - -const proto = exports; - -/** - * Multipart operations - */ - -/** - * Upload a file to OSS using multipart uploads - * @param {String} name name - * @param {String|File|Buffer} file file - * @param {Object} options - * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 - * {String} options.callback.url the OSS sends a callback request to this URL - * {String} options.callback.host The host header value for initiating callback requests - * {String} options.callback.body The value of the request body when a callback is initiated - * {String} options.callback.contentType The Content-Type of the callback requests initiatiated - * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: - * customValue = { - * key1: 'value1', - * key2: 'value2' - * } - */ -proto.multipartUpload = async function multipartUpload(name, file, options) { - this.resetCancelFlag(); - options = options || {}; - if (options.checkpoint && options.checkpoint.uploadId) { - return await this._resumeMultipart(options.checkpoint, options); - } - - const minPartSize = 100 * 1024; - if (!options.mime) { - if (Buffer.isBuffer(file)) { - options.mime = ''; - } else { - options.mime = mime.getType(path.extname(file)); - } - } - options.headers = options.headers || {}; - this._convertMetaToHeaders(options.meta, options.headers); - - const fileSize = await this._getFileSize(file); - if (fileSize < minPartSize) { - options.contentLength = fileSize; - const result = await this.put(name, file, options); - if (options && options.progress) { - await options.progress(1); - } - - const ret = { - res: result.res, - bucket: this.options.bucket, - name, - etag: result.res.headers.etag, - }; - - if ((options.headers && options.headers['x-oss-callback']) || options.callback) { - ret.data = result.data; - } - - return ret; - } - - if (options.partSize && !(parseInt(options.partSize, 10) === options.partSize)) { - throw new Error('partSize must be int number'); - } - - if (options.partSize && options.partSize < minPartSize) { - throw new Error(`partSize must not be smaller than ${minPartSize}`); - } - - const initResult = await this.initMultipartUpload(name, options); - const { uploadId } = initResult; - const partSize = this._getPartSize(fileSize, options.partSize); - - const checkpoint = { - file, - name, - fileSize, - partSize, - uploadId, - doneParts: [], - }; - - if (options && options.progress) { - await options.progress(0, checkpoint, initResult.res); - } - - return await this._resumeMultipart(checkpoint, options); -}; - -/** - * Resume multipart upload from checkpoint. The checkpoint will be - * updated after each successful part upload. - * @param {Object} checkpoint the checkpoint - * @param {Object} options options - */ -proto._resumeMultipart = async function _resumeMultipart(checkpoint, options) { - const that = this; - if (this.isCancel()) { - throw this._makeCancelEvent(); - } - const { file, fileSize, partSize, uploadId, doneParts, name } = checkpoint; - - const partOffs = this._divideParts(fileSize, partSize); - const numParts = partOffs.length; - let uploadPartJob = retry( - (self, partNo) => { - // eslint-disable-next-line no-async-promise-executor - return new Promise(async (resolve, reject) => { - try { - if (!self.isCancel()) { - const pi = partOffs[partNo - 1]; - const stream = await self._createStream(file, pi.start, pi.end); - const data = { - stream, - size: pi.end - pi.start, - }; - - if (Array.isArray(self.multipartUploadStreams)) { - self.multipartUploadStreams.push(data.stream); - } else { - self.multipartUploadStreams = [ data.stream ]; - } - - const removeStreamFromMultipartUploadStreams = function() { - if (!stream.destroyed) { - stream.destroy(); - } - const index = self.multipartUploadStreams.indexOf(stream); - if (index !== -1) { - self.multipartUploadStreams.splice(index, 1); - } - }; - - stream.on('close', removeStreamFromMultipartUploadStreams); - stream.on('error', removeStreamFromMultipartUploadStreams); - - let result; - try { - result = await self._uploadPart(name, uploadId, partNo, data, { - timeout: options.timeout, - }); - } catch (error) { - removeStreamFromMultipartUploadStreams(); - if (error.status === 404) { - throw self._makeAbortEvent(); - } - throw error; - } - if (!self.isCancel()) { - doneParts.push({ - number: partNo, - etag: result.res.headers.etag, - }); - checkpoint.doneParts = doneParts; - - if (options.progress) { - await options.progress(doneParts.length / (numParts + 1), checkpoint, result.res); - } - } - } - resolve(); - } catch (err) { - err.partNum = partNo; - reject(err); - } - }); - }, - this.options.retryMax, - { - errorHandler: err => { - const _errHandle = _err => { - const statusErr = [ -1, -2 ].includes(_err.status); - const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); - return statusErr && requestErrorRetryHandle(_err); - }; - return !!_errHandle(err); - }, - } - ); - - const all = Array.from(new Array(numParts), (x, i) => i + 1); - const done = doneParts.map(p => p.number); - const todo = all.filter(p => done.indexOf(p) < 0); - - const defaultParallel = 5; - const parallel = options.parallel || defaultParallel; - - // upload in parallel - const jobErr = await this._parallel( - todo, - parallel, - value => new Promise((resolve, reject) => { - uploadPartJob(that, value) - .then(() => { - resolve(); - }) - .catch(reject); - }) - ); - - const abortEvent = jobErr.find(err => err.name === 'abort'); - if (abortEvent) throw abortEvent; - - if (this.isCancel()) { - uploadPartJob = null; - throw this._makeCancelEvent(); - } - - if (jobErr && jobErr.length > 0) { - jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${ - jobErr[0].partNum - }`; - throw jobErr[0]; - } - - return await this.completeMultipartUpload(name, uploadId, doneParts, options); -}; - -/** - * Get file size - * @param {Object} file file - */ -proto._getFileSize = async function _getFileSize(file) { - if (Buffer.isBuffer(file)) { - return file.length; - } else if (typeof file === 'string') { - const stat = await statFile(file); - return stat.size; - } - - throw new Error('_getFileSize requires Buffer/File/String.'); -}; - -/* - * Readable stream for Web File - */ -const { Readable } = require('stream'); - -function WebFileReadStream(file, options) { - if (!(this instanceof WebFileReadStream)) { - return new WebFileReadStream(file, options); - } - - Readable.call(this, options); - - this.file = file; - // this.reader = new FileReader(); - this.start = 0; - this.finish = false; - this.fileBuffer = null; -} -util.inherits(WebFileReadStream, Readable); - -WebFileReadStream.prototype.readFileAndPush = function readFileAndPush(size) { - if (this.fileBuffer) { - let pushRet = true; - while (pushRet && this.fileBuffer && this.start < this.fileBuffer.length) { - const { start } = this; - let end = start + size; - end = end > this.fileBuffer.length ? this.fileBuffer.length : end; - this.start = end; - pushRet = this.push(this.fileBuffer.slice(start, end)); - } - } -}; - -WebFileReadStream.prototype._read = function _read(size) { - if ( - (this.file && this.start >= this.file.size) || - (this.fileBuffer && this.start >= this.fileBuffer.length) || - this.finish || - (this.start === 0 && !this.file) - ) { - if (!this.finish) { - this.fileBuffer = null; - this.finish = true; - } - this.push(null); - return; - } - - const defaultReadSize = 16 * 1024; - size = size || defaultReadSize; - - const that = this; - this.reader.onload = function(e) { - that.fileBuffer = Buffer.from(new Uint8Array(e.target.result)); - that.file = null; - that.readFileAndPush(size); - }; - this.reader.onerror = function onload(e) { - const error = e.srcElement && e.srcElement.error; - if (error) { - throw error; - } - throw e; - }; - - if (this.start === 0) { - this.reader.readAsArrayBuffer(this.file); - } else { - this.readFileAndPush(size); - } -}; - -proto._createStream = function _createStream(file, start, end) { - if (isReadable(file)) { - return file; - } else if (Buffer.isBuffer(file)) { - const iterable = file.subarray(start, end); - // we can't use Readable.from() since it is only support in Node v10 - return new Readable({ - read() { - this.push(iterable); - this.push(null); - }, - }); - } else if (typeof file === 'string') { - return fs.createReadStream(file, { - start, - end: end - 1, - }); - } - throw new Error('_createStream requires Buffer/File/String.'); -}; - -proto._getPartSize = function _getPartSize(fileSize, partSize) { - const maxNumParts = 10 * 1000; - const defaultPartSize = 1 * 1024 * 1024; - - if (!partSize) partSize = defaultPartSize; - const safeSize = Math.ceil(fileSize / maxNumParts); - - if (partSize < safeSize) { - partSize = safeSize; - console.warn( - `partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000` - ); - } - return partSize; -}; - -proto._divideParts = function _divideParts(fileSize, partSize) { - const numParts = Math.ceil(fileSize / partSize); - - const partOffs = []; - for (let i = 0; i < numParts; i++) { - const start = partSize * i; - const end = Math.min(start + partSize, fileSize); - - partOffs.push({ - start, - end, - }); - } - - return partOffs; -}; diff --git a/lib/rtmp.js b/lib/rtmp.js deleted file mode 100644 index 53c6eed2d..000000000 --- a/lib/rtmp.js +++ /dev/null @@ -1,281 +0,0 @@ -const jstoxml = require('jstoxml'); -const utility = require('utility'); -const copy = require('copy-to'); -const urlutil = require('url'); - -const proto = exports; - -/** - * RTMP operations - */ - -/** - * Create a live channel - * @param {String} id the channel id - * @param {Object} conf the channel configuration - * @param {Object} options options - * @return {Object} result - */ -proto.putChannel = async function putChannel(id, conf, options) { - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('PUT', id, options); - params.xmlResponse = true; - params.content = jstoxml.toXML({ - LiveChannelConfiguration: conf, - }); - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - let publishUrls = result.data.PublishUrls.Url; - if (!Array.isArray(publishUrls)) { - publishUrls = [ publishUrls ]; - } - let playUrls = result.data.PlayUrls.Url; - if (!Array.isArray(playUrls)) { - playUrls = [ playUrls ]; - } - - return { - publishUrls, - playUrls, - res: result.res, - }; -}; - -/** - * Get the channel info - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.getChannel = async function getChannel(id, options) { - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('GET', id, options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - data: result.data, - res: result.res, - }; -}; - -/** - * Delete the channel - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.deleteChannel = async function deleteChannel(id, options) { - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('DELETE', id, options); - params.successStatuses = [ 204 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; - -/** - * Set the channel status - * @param {String} id the channel id - * @param {String} status the channel status - * @param {Object} options options - * @return {Object} result - */ -proto.putChannelStatus = async function putChannelStatus(id, status, options) { - options = options || {}; - options.subres = { - live: null, - status, - }; - - const params = this._objectRequestParams('PUT', id, options); - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; - -/** - * Get the channel status - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.getChannelStatus = async function getChannelStatus(id, options) { - options = options || {}; - options.subres = { - live: null, - comp: 'stat', - }; - - const params = this._objectRequestParams('GET', id, options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - data: result.data, - res: result.res, - }; -}; - -/** - * List the channels - * @param {Object} query the query parameters - * filter options: - * - prefix {String}: the channel id prefix (returns channels with this prefix) - * - marker {String}: the channle id marker (returns channels after this id) - * - max-keys {Number}: max number of channels to return - * @param {Object} options options - * @return {Object} result - */ -proto.listChannels = async function listChannels(query, options) { - // prefix, marker, max-keys - - options = options || {}; - options.subres = 'live'; - - const params = this._objectRequestParams('GET', '', options); - params.query = query; - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - let channels = result.data.LiveChannel || []; - if (!Array.isArray(channels)) { - channels = [ channels ]; - } - - channels = channels.map(x => { - x.PublishUrls = x.PublishUrls.Url; - if (!Array.isArray(x.PublishUrls)) { - x.PublishUrls = [ x.PublishUrls ]; - } - x.PlayUrls = x.PlayUrls.Url; - if (!Array.isArray(x.PlayUrls)) { - x.PlayUrls = [ x.PlayUrls ]; - } - - return x; - }); - - return { - channels, - nextMarker: result.data.NextMarker || null, - isTruncated: result.data.IsTruncated === 'true', - res: result.res, - }; -}; - -/** - * Get the channel history - * @param {String} id the channel id - * @param {Object} options options - * @return {Object} result - */ -proto.getChannelHistory = async function getChannelHistory(id, options) { - options = options || {}; - options.subres = { - live: null, - comp: 'history', - }; - - const params = this._objectRequestParams('GET', id, options); - params.xmlResponse = true; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - let records = result.data.LiveRecord || []; - if (!Array.isArray(records)) { - records = [ records ]; - } - return { - records, - res: result.res, - }; -}; - -/** - * Create vod playlist - * @param {String} id the channel id - * @param {String} name the playlist name - * @param {Object} time the begin and end time - * time: - * - startTime {Number}: the begin time in epoch seconds - * - endTime {Number}: the end time in epoch seconds - * @param {Object} options options - * @return {Object} result - */ -proto.createVod = async function createVod(id, name, time, options) { - options = options || {}; - options.subres = { - vod: null, - }; - copy(time).to(options.subres); - - const params = this._objectRequestParams('POST', `${id}/${name}`, options); - params.query = time; - params.successStatuses = [ 200 ]; - - const result = await this.request(params); - - return { - res: result.res, - }; -}; - -/** - * Get RTMP Url - * @param {String} channelId the channel id - * @param {Object} options options - * options: - * - expires {Number}: expire time in seconds - * - params {Object}: the parameters such as 'playlistName' - * @return {String} the RTMP url - */ -proto.getRtmpUrl = function(channelId, options) { - options = options || {}; - const expires = utility.timestamp() + (options.expires || 1800); - const res = { - bucket: this.options.bucket, - object: this._objectName(`live/${channelId}`), - }; - const resource = `/${res.bucket}/${channelId}`; - - options.params = options.params || {}; - const query = Object.keys(options.params).sort().map(x => `${x}:${options.params[x]}\n`) - .join(''); - - const stringToSign = `${expires}\n${query}${resource}`; - const signature = this.signature(stringToSign); - - const url = urlutil.parse(this._getReqUrl(res)); - url.protocol = 'rtmp:'; - url.query = { - OSSAccessKeyId: this.options.accessKeyId, - Expires: expires, - Signature: signature, - }; - copy(options.params).to(url.query); - - return url.format(); -}; diff --git a/package.json b/package.json index 7e4e16071..078073928 100644 --- a/package.json +++ b/package.json @@ -55,14 +55,10 @@ }, "homepage": "https://github.com/node-modules/oss-client", "dependencies": { - "address": "^1.2.0", - "copy-to": "^2.0.1", - "is-type-of": "^2.0.0", - "jstoxml": "^2.0.0", + "is-type-of": "^2.0.1", "mime": "^3.0.0", "ms": "^2.1.3", - "oss-interface": "^1.2.2", - "sdk-base": "^4.2.1", + "oss-interface": "^1.3.0", "stream-wormhole": "^2.0.0", "urllib": "^3.19.2", "utility": "^1.18.0", @@ -79,9 +75,6 @@ "eslint": "^8.25.0", "eslint-config-egg": "^13.0.0", "git-contributor": "^2.1.5", - "mm": "^3.2.0", - "sinon": "^1.17.7", - "tsd": "^0.28.1", "tshy": "^1.0.0", "tshy-after": "^1.0.0", "typescript": "^5.2.2" diff --git a/src/OSSObject.ts b/src/OSSObject.ts index f80dcb89e..1e3d00cc1 100644 --- a/src/OSSObject.ts +++ b/src/OSSObject.ts @@ -842,7 +842,7 @@ export class OSSObject extends OSSBaseClient implements IObjectSimple { 'x-oss-process': '', }, }); - + const bucketParam = targetBucket ? `,b_${Buffer.from(targetBucket).toString('base64')}` : ''; targetObject = Buffer.from(targetObject).toString('base64'); const content = { @@ -850,7 +850,7 @@ export class OSSObject extends OSSBaseClient implements IObjectSimple { }; params.content = Buffer.from(querystring.stringify(content)); params.successStatuses = [ 200 ]; - + const result = await this.request(params); return { res: result.res, diff --git a/test/OSSObject.test.ts b/test/OSSObject.test.ts index 124ffd60b..351b0df26 100644 --- a/test/OSSObject.test.ts +++ b/test/OSSObject.test.ts @@ -16,6 +16,8 @@ import { Readable } from 'node:stream'; describe('test/OSSObject.test.ts', () => { const tmpdir = os.tmpdir(); const prefix = config.prefix; + assert(config.oss.accessKeyId); + assert(config.oss.accessKeySecret); const ossObject = new OSSObject(config.oss); const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); @@ -1131,7 +1133,7 @@ describe('test/OSSObject.test.ts', () => { }); }); - describe('signatureUrl()', () => { + describe('signatureUrl() and asyncSignatureUrl()', () => { let name: string; let needEscapeName: string; before(async () => { @@ -1168,6 +1170,14 @@ describe('test/OSSObject.test.ts', () => { assert.equal(urlRes.data.toString(), result.content.toString()); }); + it('should asyncSignatureUrl get object ok', async () => { + const result = await ossObject.get(name); + const url = await ossObject.asyncSignatureUrl(name); + const urlRes = await urllib.request(url); + assert.equal(urlRes.status, 200); + assert.equal(urlRes.data.toString(), result.content.toString()); + }); + it('should signature url with response limitation', () => { const response = { 'content-type': 'xml', @@ -2242,7 +2252,7 @@ describe('test/OSSObject.test.ts', () => { const result = await ossObject.processObjectSave( name, target, - 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,' + 'image/watermark,text_aGVsbG8g5Zu+54mH5pyN5Yqh77yB,color_ff6a00,', ); assert.equal(result.res.status, 200); assert.equal(result.status, 200); diff --git a/test/bucket.test.js b/test/bucket.test.js deleted file mode 100644 index f77b5a3e5..000000000 --- a/test/bucket.test.js +++ /dev/null @@ -1,1492 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; -const ms = require('humanize-ms'); -const { metaSyncTime, timeout } = require('./config'); - -describe.skip('test/bucket.test.js', () => { - const { prefix, includesConf } = utils; - let store; - let bucket; - let bucketRegion; - const defaultRegion = config.region; - before(async () => { - store = oss(config); - config.region = defaultRegion; - store = oss(config); - bucket = `oss-client-test-bucket-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - bucketRegion = defaultRegion; - - const result = await store.putBucket(bucket, { timeout }); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - }); - // restore object will have cache - after(async () => { - await utils.cleanBucket(store, bucket); - }); - - describe('setBucket()', () => { - it('should check bucket name', async () => { - try { - const name = 'oss-client-test-bucket-/'; - await store.setBucket(name); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'The bucket must be conform to the specifications'); - } - }); - }); - - describe('getBucket()', () => { - it('should get bucket name', async () => { - const name = 'oss-client-test-bucket'; - await store.setBucket(name); - const res = store.getBucket(); - assert.equal(res, name); - }); - }); - - describe('putBucket()', () => { - let name; - let archvieBucket; - before(async () => { - name = `oss-client-test-putbucket-${prefix.replace(/[/.]/g, '-')}`; - name = name.substring(0, name.length - 1); - // just for archive bucket test - archvieBucket = `oss-client-archive-bucket-${prefix.replace(/[/.]/g, '-')}`; - archvieBucket = archvieBucket.substring(0, archvieBucket.length - 1); - await store.putBucket(archvieBucket, { StorageClass: 'Archive', timeout }); - }); - - it('should create a new bucket', async () => { - const result1 = await store.putBucket(name, { timeout }); - assert.equal(result1.bucket, name); - assert.equal(result1.res.status, 200); - }); - - it('should create an archive bucket', async () => { - await utils.sleep(ms(metaSyncTime)); - const result2 = await store.listBuckets( - {}, - { - timeout, - } - ); - const { buckets } = result2; - const m = buckets.some(item => item.name === archvieBucket); - assert(m === true); - buckets.map(item => { - if (item.name === archvieBucket) { - assert(item.StorageClass === 'Archive'); - } - return 1; - }); - }); - - // todo resume - // it('should create an ZRS bucket', async () => { - // const ZRS_name = `oss-client-zrs-${prefix.replace(/[/.]/g, '-').slice(0, -1)}`; - // const ZRS_put_res = await store.putBucket(ZRS_name, { - // dataRedundancyType: 'ZRS' - // }); - // assert.strictEqual(ZRS_put_res.res.status, 200); - // const ZRS_get_res = await store.getBucketInfo(ZRS_name); - // assert.strictEqual(ZRS_get_res.bucket.DataRedundancyType, 'ZRS'); - // await store.deleteBucket(ZRS_name); - // }); - - it('should create an public-read bucket', async () => { - const public_read_name = `oss-client-zrs-${prefix.replace(/[/.]/g, '-').slice(0, -1)}`; - const public_read_name_res = await store.putBucket(public_read_name, { - acl: 'public-read', - }); - assert.strictEqual(public_read_name_res.res.status, 200); - const public_read_name_get_res = await store.getBucketInfo(public_read_name); - assert.strictEqual(public_read_name_get_res.bucket.AccessControlList.Grant, 'public-read'); - await store.deleteBucket(public_read_name); - }); - - after(async () => { - const result = await store.deleteBucket(name); - assert(result.res.status === 200 || result.res.status === 204); - await store.deleteBucket(archvieBucket); - }); - }); - - describe('getBucketInfo', () => { - it('it should return correct bucketInfo when bucket exist', async () => { - const result = await store.getBucketInfo(bucket); - assert.equal(result.res.status, 200); - - assert.equal(result.bucket.Location, `${bucketRegion}`); - assert.equal(result.bucket.ExtranetEndpoint, `${bucketRegion}.aliyuncs.com`); - assert.equal(result.bucket.IntranetEndpoint, `${bucketRegion}-internal.aliyuncs.com`); - assert.equal(result.bucket.AccessControlList.Grant, 'private'); - assert.equal(result.bucket.StorageClass, 'Standard'); - }); - - it('it should return NoSuchBucketError when bucket not exist', async () => { - await utils.throws(async () => { - await store.getBucketInfo('not-exists-bucket'); - }, 'NoSuchBucketError'); - }); - }); - - describe('getBucketLoaction', () => { - it('it should return loaction this.region', async () => { - const result = await store.getBucketLocation(bucket); - assert.equal(result.location, bucketRegion); - }); - - it('it should return NoSuchBucketError when bucket not exist', async () => { - await utils.throws(async () => { - await store.getBucketLocation('not-exists-bucket'); - }, 'NoSuchBucketError'); - }); - }); - - describe('deleteBucket()', () => { - it('should delete not exists bucket throw NoSuchBucketError', async () => { - await utils.throws(async () => { - await store.deleteBucket('not-exists-bucket'); - }, 'NoSuchBucketError'); - }); - - it('should delete not empty bucket throw BucketNotEmptyError', async () => { - store.useBucket(bucket); - await store.put('oss-client-test-bucket.txt', __filename); - utils.sleep(ms(metaSyncTime)); - await utils.throws(async () => { - await store.deleteBucket(bucket); - }, 'BucketNotEmptyError'); - await store.delete('oss-client-test-bucket.txt'); - }); - }); - - describe('putBucketACL()', () => { - it('should set bucket acl to public-read-write', async () => { - const resultAcl = await store.putBucketACL(bucket, 'public-read-write'); - assert.equal(resultAcl.res.status, 200); - assert.equal(resultAcl.bucket, bucket); - - // Need wait some time for bucket meta sync - await utils.sleep(ms(metaSyncTime)); - - const r = await store.getBucketACL(bucket); - assert.equal(r.res.status, 200); - // skip it, data will be delay - // assert.equal(r.acl, 'public-read-write'); - }); - - it('should create and set acl when bucket not exists', async () => { - const bucketacl = `${bucket}-new`; - const putresult = await store.putBucketACL(bucketacl, 'public-read'); - assert.equal(putresult.res.status, 200); - assert.equal(putresult.bucket, bucketacl); - - await utils.sleep(ms(metaSyncTime)); - - const getresult = await store.getBucketACL(bucketacl); - assert.equal(getresult.res.status, 200); - assert.equal(getresult.acl, 'public-read'); - - await store.deleteBucket(bucketacl); - }); - }); - - describe('listBuckets()', () => { - let listBucketsPrefix; - before(async () => { - // create 2 buckets - listBucketsPrefix = `oss-client-list-buckets-${prefix.replace(/[/.]/g, '-')}`; - await Promise.all( - Array(2) - .fill(1) - .map((v, i) => store.putBucket(listBucketsPrefix + i)) - ); - }); - - it('should list buckets by prefix', async () => { - const result = await store.listBuckets( - { - prefix: listBucketsPrefix, - 'max-keys': 20, - }, - { - timeout, - } - ); - - assert(Array.isArray(result.buckets)); - assert.equal(result.buckets.length, 2); - assert(!result.isTruncated); - assert.equal(result.nextMarker, null); - assert(result.owner); - assert.equal(typeof result.owner.id, 'string'); - assert.equal(typeof result.owner.displayName, 'string'); - - for (let i = 0; i < 2; i++) { - const name = listBucketsPrefix + i; - assert.equal(result.buckets[i].name, name); - } - }); - - it('should list buckets by subres', async () => { - const tag = { - a: '1', - b: '2', - }; - const putTagBukcet = `${listBucketsPrefix}0`; - await store.putBucketTags(putTagBukcet, tag); - const { buckets } = await store.listBuckets({ - prefix: listBucketsPrefix, - subres: { - tagging: Object.entries(tag) - .map(_ => _.map(inner => `"${inner.toString()}"`).join(':')) - .join(','), - }, - }); - - if (buckets && buckets.length && buckets[0]) { - assert.deepStrictEqual(buckets[0].tag, tag); - } else { - assert(false); - } - }); - - after(async () => { - await Promise.all( - Array(2) - .fill(1) - .map((v, i) => store.deleteBucket(listBucketsPrefix + i)) - ); - }); - }); - - describe('putBucketLogging(), getBucketLogging(), deleteBucketLogging()', () => { - it('should create, get and delete the logging', async () => { - let result = await store.putBucketLogging(bucket, 'logs/'); - assert.equal(result.res.status, 200); - // put again will be fine - result = await store.putBucketLogging(bucket, 'logs/'); - assert.equal(result.res.status, 200); - - // get the logging setttings - result = await store.getBucketLogging(bucket); - assert.equal(result.res.status, 200); - - // delete it - result = await store.deleteBucketLogging(bucket); - assert.equal(result.res.status, 204); - }); - }); - - describe('putBucketWebsite(), getBucketWebsite(), deleteBucketWebsite()', () => { - it('should get and delete the website settings', async () => { - await store.putBucketWebsite(bucket, { - index: 'index.html', - }); - - await utils.sleep(ms(metaSyncTime)); - - // get - const get = await store.getBucketWebsite(bucket); - assert.equal(typeof get.index, 'string'); - assert.equal(get.res.status, 200); - - // delete it - const del = await store.deleteBucketWebsite(bucket); - assert.equal(del.res.status, 204); - }); - - it('should create when RoutingRules is Array or Object', async () => { - const routingRule1 = { - RuleNumber: '1', - Condition: { - KeyPrefixEquals: 'abc/', - HttpErrorCodeReturnedEquals: '404', - }, - Redirect: { - RedirectType: 'Mirror', - MirrorUsingRole: 'false', - MirrorUserLastModified: 'false', - PassQueryString: 'true', - MirrorIsExpressTunnel: 'false', - MirrorPassOriginalSlashes: 'false', - MirrorAllowHeadObject: 'false', - MirrorURL: 'http://www.test.com/', - MirrorPassQueryString: 'true', - MirrorFollowRedirect: 'true', - MirrorCheckMd5: 'true', - MirrorHeaders: { - PassAll: 'true', - Pass: [ 'myheader-key1', 'myheader-key2' ], - Remove: [ 'remove1', 'remove2' ], - Set: { - Key: 'myheader-key5', - Value: 'myheader-value5', - }, - }, - }, - }; - const routingRules = [ - { - RuleNumber: '2', - Condition: { - KeyPrefixEquals: 'a1bc/', - HttpErrorCodeReturnedEquals: '404', - }, - Redirect: { - RedirectType: 'Mirror', - MirrorUsingRole: 'false', - MirrorUserLastModified: 'false', - MirrorAllowHeadObject: 'false', - MirrorIsExpressTunnel: 'false', - MirrorPassOriginalSlashes: 'false', - PassQueryString: 'true', - MirrorURL: 'http://www.test1.com/', - MirrorPassQueryString: 'true', - MirrorFollowRedirect: 'true', - MirrorCheckMd5: 'true', - MirrorHeaders: { - PassAll: 'true', - Pass: [ 'myheader-key12', 'myheader-key22' ], - Remove: [ 'remove1', 'remove2' ], - Set: { - Key: 'myheader-key5', - Value: 'myheader-value5', - }, - }, - }, - }, - ]; - const website = { - index: 'index1.html', - supportSubDir: 'true', - type: '1', - error: 'error1.html', - routingRules, - }; - - const result1 = await store.putBucketWebsite(bucket, website); - assert.strictEqual(result1.res.status, 200); - const rules1 = await store.getBucketWebsite(bucket); - assert(includesConf(rules1.routingRules, routingRules)); - assert.strictEqual(rules1.supportSubDir, website.supportSubDir); - assert.strictEqual(rules1.type, website.type); - - website.routingRules = [ routingRule1 ]; - const result2 = await store.putBucketWebsite(bucket, website); - assert.strictEqual(result2.res.status, 200); - const rules2 = await store.getBucketWebsite(bucket); - assert(includesConf(rules2.routingRules, website.routingRules)); - }); - - it('should throw error when RoutingRules is not Array', async () => { - const website = { - index: 'index1.html', - supportSubDir: 'true', - type: '1', - error: 'error1.html', - routingRules: '', - }; - - try { - await store.putBucketWebsite(bucket, website); - assert(false); - } catch (error) { - assert.strictEqual(error.message, 'RoutingRules must be Array'); - } - try { - website.RoutingRules = 0; - await store.putBucketWebsite(bucket, website); - assert(false); - } catch (error) { - assert.strictEqual(error.message, 'RoutingRules must be Array'); - } - }); - }); - - describe('putBucketReferer(), getBucketReferer(), deleteBucketReferer()', () => { - it('should create, get and delete the referer', async () => { - const putresult = await store.putBucketReferer(bucket, true, [ 'http://npm.taobao.org' ], { timeout }); - assert.equal(putresult.res.status, 200); - - // put again will be fine - const referers = [ 'http://npm.taobao.org', 'https://npm.taobao.org', 'http://cnpmjs.org' ]; - const putReferer = await store.putBucketReferer(bucket, false, referers, { timeout }); - assert.equal(putReferer.res.status, 200); - - await utils.sleep(ms(metaSyncTime)); - - // get - const getReferer = await store.getBucketReferer(bucket); - assert(Array.isArray(getReferer.referers)); - assert.equal(typeof getReferer.allowEmpty, 'boolean'); - assert.equal(getReferer.res.status, 200); - - // delete it - const deleteResult = await store.deleteBucketReferer(bucket); - assert.equal(deleteResult.res.status, 200); - }); - }); - - describe('putBucketCORS(), getBucketCORS(), deleteBucketCORS()', () => { - afterEach(async () => { - // delete it - const result = await store.deleteBucketCORS(bucket, { timeout }); - assert.equal(result.res.status, 204); - }); - - it('should create, get and delete the cors', async () => { - const rules = [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - allowedHeader: '*', - exposeHeader: 'Content-Length', - maxAgeSeconds: '30', - }, - ]; - const putResult = await store.putBucketCORS(bucket, rules); - assert.equal(putResult.res.status, 200); - - const getResult = await store.getBucketCORS(bucket, { timeout }); - assert.equal(getResult.res.status, 200); - assert.deepEqual(getResult.rules, [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - allowedHeader: '*', - exposeHeader: 'Content-Length', - maxAgeSeconds: '30', - }, - ]); - }); - - it('should overwrite cors', async () => { - const rules1 = [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - timeout, - }, - ]; - const putCorsResult1 = await store.putBucketCORS(bucket, rules1); - assert.equal(putCorsResult1.res.status, 200); - - await utils.sleep(ms(metaSyncTime)); - - const getCorsResult1 = await store.getBucketCORS(bucket, { timeout }); - assert.equal(getCorsResult1.res.status, 200); - assert.deepEqual(getCorsResult1.rules, [ - { - allowedOrigin: '*', - allowedMethod: 'GET', - }, - ]); - - const rules2 = [ - { - allowedOrigin: 'localhost', - allowedMethod: 'HEAD', - }, - ]; - const putCorsResult2 = await store.putBucketCORS(bucket, rules2); - assert.equal(putCorsResult2.res.status, 200); - - await utils.sleep(ms(metaSyncTime)); - - const getCorsResult2 = await store.getBucketCORS(bucket, { timeout }); - assert.equal(getCorsResult2.res.status, 200); - assert.deepEqual(getCorsResult2.rules, [ - { - allowedOrigin: 'localhost', - allowedMethod: 'HEAD', - }, - ]); - }); - - it('should check rules', async () => { - try { - await store.putBucketCORS(bucket); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'rules is required'); - } - }); - - it('should check allowedOrigin', async () => { - try { - await store.putBucketCORS(bucket, [{}]); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'allowedOrigin is required'); - } - }); - - it('should check allowedMethod', async () => { - try { - const rules = [ - { - allowedOrigin: '*', - }, - ]; - await store.putBucketCORS(bucket, rules); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'allowedMethod is required'); - } - }); - - it('should throw error when rules not exist', async () => { - try { - await store.getBucketCORS(bucket); - throw new Error('should not run'); - } catch (err) { - assert(err.message === 'The CORS Configuration does not exist.'); - } - }); - }); - - describe('putBucketRequestPayment(), getBucketRequestPayment()', () => { - it('should create, get the request payment', async () => { - try { - await store.putBucketRequestPayment(bucket, 'Requester'); - const result = await store.getBucketRequestPayment(bucket); - assert(result.payer === 'Requester', 'payer should be Requester'); - } catch (err) { - assert(false); - } - }); - - it('should throw error when payer is not BucketOwner or Requester', async () => { - try { - await store.putBucketRequestPayment(bucket, 'requester'); - } catch (err) { - assert(err.message.includes('payer must be BucketOwner or Requester')); - } - }); - }); - - describe('getBucketTags() putBucketTags() deleteBucketTags()', () => { - it('should get the tags of bucket', async () => { - try { - const result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - } catch (error) { - assert(false, error); - } - }); - - it('should configures or updates the tags of bucket', async () => { - let result; - try { - const tag = { a: '1', b: '2' }; - result = await store.putBucketTags(bucket, tag); - assert.strictEqual(result.status, 200); - - result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, tag); - } catch (error) { - assert(false, error); - } - - try { - const tag = { a: '1' }; - result = await store.putBucketTags(bucket, tag); - assert.strictEqual(result.status, 200); - - result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, tag); - } catch (error) { - assert(false, error); - } - }); - - it('maximum of 20 tags for a bucket', async () => { - try { - const tag = {}; - Array(21) - .fill(1) - .forEach((_, index) => { - tag[index] = index; - }); - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('maximum of 20 tags for a bucket', error.message); - } - }); - - it('tag key can be a maximum of 64 bytes in length', async () => { - try { - const key = new Array(65).fill('1').join(''); - const tag = { [key]: '1' }; - - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('tag key can be a maximum of 64 bytes in length', error.message); - } - }); - - it('tag value can be a maximum of 128 bytes in length', async () => { - try { - const value = new Array(129).fill('1').join(''); - const tag = { a: value }; - - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('tag value can be a maximum of 128 bytes in length', error.message); - } - }); - - it('should throw error when the type of tag is not Object', async () => { - try { - const tag = [{ a: 1 }]; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert(error.message.includes('tag must be Object')); - } - }); - - it('should throw error when the type of tag value is number', async () => { - try { - const tag = { a: 1 }; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('the key and value of the tag must be String', error.message); - } - }); - - it('should throw error when the type of tag value is Object', async () => { - try { - const tag = { a: { inner: '1' } }; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('the key and value of the tag must be String', error.message); - } - }); - - it('should throw error when the type of tag value is Array', async () => { - try { - const tag = { a: [ '1', '2' ] }; - await store.putBucketTags(bucket, tag); - } catch (error) { - assert.strictEqual('the key and value of the tag must be String', error.message); - } - }); - - it('should delete the tags of bucket', async () => { - let result; - try { - const tag = { a: '1', b: '2' }; - await store.putBucketTags(bucket, tag); - - result = await store.deleteBucketTags(bucket); - assert.strictEqual(result.status, 204); - - result = await store.getBucketTags(bucket); - assert.strictEqual(result.status, 200); - assert.deepEqual(result.tag, {}); - } catch (error) { - assert(false, error); - } - }); - }); - - describe('putBucketEncryption(), getBucketEncryption(), deleteBucketEncryption()', () => { - it('should create, get and delete the bucket encryption', async () => { - // put with AES256 - const putresult1 = await store.putBucketEncryption(bucket, { - SSEAlgorithm: 'AES256', - }); - assert.equal(putresult1.res.status, 200); - // put again with KMS will be fine - // const putresult2 = await store.putBucketEncryption(bucket, { - // SSEAlgorithm: 'KMS', - // KMSMasterKeyID: '1b2c3132-b2ce-4ba3-a4dd-9885904099ad' - // }); - // assert.equal(putresult2.res.status, 200); - // await utils.sleep(ms(metaSyncTime)); - // get - const getBucketEncryption = await store.getBucketEncryption(bucket); - assert.equal(getBucketEncryption.res.status, 200); - assert.deepEqual(getBucketEncryption.encryption, { - SSEAlgorithm: 'AES256', - // KMSMasterKeyID: '1b2c3132-b2ce-4ba3-a4dd-9885904099ad' - }); - // delete - const deleteResult = await store.deleteBucketEncryption(bucket); - assert.equal(deleteResult.res.status, 204); - }); - }); - - describe('putBucketLifecycle()', () => { - // todo delete - it('should put the lifecycle with old api', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - days: 1, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration2', - prefix: 'logs/', - status: 'Enabled', - date: '2020-02-18T00:00:00.000Z', - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with expiration and id', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const getBucketLifecycle = await store.getBucketLifecycle(bucket); - assert(getBucketLifecycle.rules.length > 0 && getBucketLifecycle.rules.find(v => v.id === 'expiration1')); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'expiration2', - prefix: 'logs/', - status: 'Enabled', - expiration: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with AbortMultipartUpload', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload1', - prefix: 'logs/', - status: 'Enabled', - abortMultipartUpload: { - days: 1, - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload2', - prefix: 'logs/', - status: 'Enabled', - abortMultipartUpload: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with empty prefix (whole bucket)', async () => { - const putresult = await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload1', - prefix: '', // empty prefix (whole bucket) - status: 'Enabled', - abortMultipartUpload: { - days: 1, - }, - }, - ]); - assert.equal(putresult.res.status, 200); - }); - - it('should put the lifecycle with Transition', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'logs/', - status: 'Enabled', - transition: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - storageClass: 'Archive', - }, - expiration: { - createdBeforeDate: '2020-02-17T00:00:00.000Z', - }, - tag: { - key: 'test', - value: '123', - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'logs/', - status: 'Enabled', - transition: { - days: 20, - storageClass: 'Archive', - }, - tag: { - key: 'test', - value: '123', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - }); - - it('should put the lifecycle with expiration and Tag', async () => { - const putresult1 = await store.putBucketLifecycle(bucket, [ - { - id: 'tag1', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - tag: { - key: 1, - value: '2', - }, - }, - ]); - assert.equal(putresult1.res.status, 200); - - const putresult2 = await store.putBucketLifecycle(bucket, [ - { - id: 'tag2', - prefix: 'logs/', - status: 'Enabled', - expiration: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - tag: { - key: 1, - value: '2', - }, - }, - ]); - assert.equal(putresult2.res.status, 200); - - const putresult3 = await store.putBucketLifecycle(bucket, [ - { - id: 'tag2', - prefix: 'logs/', - status: 'Enabled', - expiration: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - }, - tag: [ - { - key: 1, - value: '2', - }, - { - key: 'testkey', - value: 'testvalue', - }, - ], - }, - ]); - assert.equal(putresult3.res.status, 200); - }); - - it('should throw error when id more than 255 bytes ', async () => { - const testID = Array(256).fill('a').join(''); - try { - await store.putBucketLifecycle(bucket, [ - { - id: testID, - prefix: 'testid/', - status: 'Enabled', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('255')); - } - }); - - it('should throw error when no prefix', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'prefix', - status: 'Enabled', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('prefix')); - } - }); - - it('should throw error when status is not Enabled or Disabled', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'status', - prefix: 'fix/', - status: 'test', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('Enabled or Disabled')); - } - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'status', - prefix: 'fix/', - status: '', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('Enabled or Disabled')); - } - }); - - it('should throw error when storageClass is not Archive or IA', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'storageClass', - prefix: 'fix/', - status: 'Enabled', - transition: { - createdBeforeDate: '2020-02-18T00:00:00.000Z', - storageClass: 'test', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('IA or Archive')); - } - }); - - it('should throw error when transition must have days or createdBeforeDate', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'storageClass', - prefix: 'fix/', - status: 'Enabled', - transition: { - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('days or createdBeforeDate')); - } - }); - - it('should throw error when days of transition is not a positive integer', async () => { - const errorMessage = 'a positive integer'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - days: 1.1, - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - days: 'asd', - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when createdBeforeDate of transition is not iso8601 format', async () => { - const errorMessage = 'iso8601'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - createdBeforeDate: new Date().toISOString(), // eg: YYYY-MM-DDT00:00:00.000Z - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'transition', - prefix: 'fix/', - status: 'Enabled', - transition: { - createdBeforeDate: new Date().toString(), - storageClass: 'Archive', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when abortMultipartUpload must have days or createdBeforeDate', async () => { - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'storageClass', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: {}, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes('days or createdBeforeDate')); - } - }); - - it('should throw error when days of abortMultipartUpload is not a positive integer', async () => { - const errorMessage = 'a positive integer'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - days: 1.1, - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - days: 'a', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when createdBeforeDate of abortMultipartUpload is not iso8601 format', async () => { - const errorMessage = 'iso8601'; - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - createdBeforeDate: new Date().toISOString(), // eg: YYYY-MM-DDT00:00:00.000Z - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - - try { - await store.putBucketLifecycle(bucket, [ - { - id: 'abortMultipartUpload', - prefix: 'fix/', - status: 'Enabled', - abortMultipartUpload: { - createdBeforeDate: new Date().toString(), // eg: YYYY-MM-DDT00:00:00.000Z - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when rule have no expiration or abortMultipartUpload', async () => { - const errorMessage = 'expiration or abortMultipartUpload'; - try { - await store.putBucketLifecycle(bucket, [ - { - prefix: 'expirationAndAbortMultipartUpload/', - status: 'Enabled', - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - - it('should throw error when tag is used with abortMultipartUpload', async () => { - const errorMessage = 'Tag cannot be used with abortMultipartUpload'; - try { - await store.putBucketLifecycle(bucket, [ - { - prefix: 'expirationAndAbortMultipartUpload/', - status: 'Enabled', - abortMultipartUpload: { - days: 1, - }, - expiration: { - days: 1, - }, - tag: { - value: '1', - key: 'test', - }, - }, - ]); - assert(false); - } catch (error) { - assert(error.message.includes(errorMessage)); - } - }); - }); - - describe('getBucketLifecycle()', () => { - it('should get the lifecycle', async () => { - const putresult = await store.putBucketLifecycle(bucket, [ - { - id: 'get_test', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - tag: [ - { - key: 'test', - value: '1', - }, - { - key: 'test1', - value: '2', - }, - ], - }, - ]); - assert.equal(putresult.res.status, 200); - - const getBucketLifecycle = await store.getBucketLifecycle(bucket); - assert(getBucketLifecycle.rules.length > 0); - assert.equal(getBucketLifecycle.res.status, 200); - }); - }); - - describe('deleteBucketLifecycle()', () => { - it('should delete the lifecycle', async () => { - const putresult = await store.putBucketLifecycle(bucket, [ - { - id: 'delete', - prefix: 'logs/', - status: 'Enabled', - expiration: { - days: 1, - }, - tag: [ - { - key: 'test', - value: '1', - }, - { - key: 'test1', - value: '2', - }, - ], - }, - ]); - assert.equal(putresult.res.status, 200); - - // delete it - const deleteResult = await store.deleteBucketLifecycle(bucket); - assert.equal(deleteResult.res.status, 204); - }); - }); - - describe('getBucketPolicy() putBucketPolicy() deleteBucketPolicy()', () => { - it('should put, get, delete, when policy is Object', async () => { - try { - const policy = { - Version: '1', - Statement: [ - { - Action: [ 'oss:PutObject', 'oss:GetObject' ], - Effect: 'Deny', - Principal: [ '1234567890' ], - Resource: [ 'acs:oss:*:1234567890:*/*' ], - }, - ], - }; - const result = await store.putBucketPolicy(bucket, policy); - assert.strictEqual(result.status, 200); - const result1 = await store.getBucketPolicy(bucket); - assert.deepStrictEqual(policy, result1.policy); - const result2 = await store.deleteBucketPolicy(bucket); - assert.strictEqual(result2.status, 204); - const result3 = await store.getBucketPolicy(bucket); - assert.deepStrictEqual(null, result3.policy); - } catch (err) { - assert(false, err.message); - } - }); - it('should throw error, when policy is not Object', async () => { - try { - await store.putBucketPolicy(bucket, 'policy'); - assert(false); - } catch (err) { - assert(true); - } - }); - }); - describe('inventory()', () => { - const inventory = { - id: 'default', - isEnabled: false, - prefix: 'ttt', - OSSBucketDestination: { - format: 'CSV', - accountId: '1817184078010220', - rolename: 'AliyunOSSRole', - bucket, - prefix: 'test', - }, - frequency: 'Daily', - includedObjectVersions: 'All', - optionalFields: { - field: [ 'Size', 'LastModifiedDate' ], - }, - }; - - describe('putBucketInventory', () => { - before(() => { - inventory.OSSBucketDestination.bucket = bucket; - }); - it('should put bucket inventory', async () => { - try { - await store.putBucketInventory(bucket, inventory); - } catch (err) { - assert(false, err); - } - }); - it('should return inventory array when inventory is one config', async () => { - const inventoryRes = await store.listBucketInventory(bucket); - assert(Array.isArray(inventoryRes.inventoryList)); - assert(inventoryRes.inventoryList.length === 1); - assert.strictEqual(inventoryRes.status, 200); - }); - it('should put bucket inventory when no optionalFields or no field', async () => { - try { - inventory.id = 'test_optionalFields'; - delete inventory.optionalFields; - await store.putBucketInventory(bucket, inventory); - - inventory.id = 'test_field'; - inventory.optionalFields = {}; - await store.putBucketInventory(bucket, inventory); - - inventory.id = 'test_field_is_one'; - inventory.optionalFields = { - field: [ 'Size' ], - }; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - it('should put bucket inventory when no prefix', async () => { - try { - inventory.id = 'test_prefix'; - delete inventory.prefix; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - it('should put bucket inventory when no OSSBucketDestination prefix', async () => { - try { - inventory.id = 'test_OSSBucketDestination_prefix'; - delete inventory.OSSBucketDestination.prefix; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - it('should put bucket inventory when has encryption', async () => { - try { - inventory.id = 'test_encryption_SSE-OSS'; - inventory.OSSBucketDestination.encryption = { 'SSE-OSS': '' }; - await store.putBucketInventory(bucket, inventory); - assert(true); - } catch (err) { - assert(false, err); - } - }); - }); - describe('getBucketInventory', () => { - let testGetInventory; - it('should get bucket inventory by inventoryId', async () => { - try { - const result = await store.getBucketInventory(bucket, inventory.id); - testGetInventory = result.inventory; - assert(includesConf(testGetInventory, inventory)); - } catch (err) { - assert(false); - } - }); - it('should return Field array when Field value is one length Array', async () => { - try { - assert( - testGetInventory.optionalFields && - testGetInventory.optionalFields.field && - Array.isArray(testGetInventory.optionalFields.field) && - testGetInventory.optionalFields.field.length === 1 - ); - } catch (err) { - assert(false); - } - }); - }); - describe('listBucketInventory', () => { - before(async () => { - let _index = 0; - async function putInventoryList() { - await Promise.all( - new Array(1).fill(1).map(() => { - _index++; - return store.putBucketInventory(bucket, Object.assign({}, inventory, { id: `test_list_${_index}` })); - }) - ); - } - - await putInventoryList(); - }); - it('should list bucket inventory', async () => { - const inventoryRes = await store.listBucketInventory(bucket); - assert.strictEqual(inventoryRes.status, 200); - }); - }); - describe('deleteBucketInventory', () => { - it('should delete bukcet inventory', async () => { - let inventoryList = []; - let isTruncated; - let continuationToken; - do { - const inventoryRes = await store.listBucketInventory(bucket, { continuationToken }); - inventoryList = [ ...inventoryList, ...inventoryRes.inventoryList ]; - isTruncated = inventoryRes.isTruncated; - continuationToken = inventoryRes.nextContinuationToken; - } while (isTruncated); - try { - // avoid Qps limit - do { - const list = inventoryList.splice(0, 10); - // eslint-disable-next-line no-loop-func - await Promise.all(list.map(_ => store.deleteBucketInventory(bucket, _.id))); - utils.sleep(400); - } while (inventoryList.length); - assert(true); - } catch (err) { - assert(false, err); - } - }); - }); - - describe('bucket response status code', async () => { - it('success getBucketInfo, status code should be 200', async () => { - const result = await store.getBucketInfo(bucket); - assert.equal(result.res.status, 200); - }); - it('no equivalent bucket ,status code should be 404', async () => { - try { - await store.getBucketInfo('adasdasdxcvmxvnxvmdfsdfsdf'); - } catch (err) { - assert.equal(err.status, 404); - } - }); - it('bucket name already exists,status code should be 409', async () => { - try { - await store.putBucket(bucket); - } catch (err) { - assert.equal(err.status, 409); - } - }); - }); - describe('getBucketStat', () => { - it('should get bucket stat', async () => { - const result = await store.getBucketStat(bucket); - assert.equal(typeof result.stat, 'object'); - assert.equal(result.res.status, 200); - }); - }); - }); -}); diff --git a/test/bucket_worm.test.js b/test/bucket_worm.test.js deleted file mode 100644 index 8c5829a3c..000000000 --- a/test/bucket_worm.test.js +++ /dev/null @@ -1,80 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; -const timeout = require('./config').timeout; - -describe.skip('test/bucket_worm.test.js', () => { - const { prefix } = utils; - let store; - let bucket; - const defaultRegion = config.region; - before(async () => { - store = oss(config); - config.region = defaultRegion; - store = oss(config); - bucket = `oss-client-test-worm-bucket-worm-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - - const result = await store.putBucket(bucket, { timeout }); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - }); - // github CI will remove buckets - // restore object will have cache - // after(async () => { - // await utils.cleanBucket(store, bucket); - // }); - describe('worm()', () => { - describe('initiateBucketWorm()', () => { - it('should init bucket worm', async () => { - try { - await store.initiateBucketWorm(bucket, '1'); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('abortBucketWorm()', () => { - it('should abort bucket worm', async () => { - try { - await store.abortBucketWorm(bucket); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('completeBucketWorm(), getBucketWorm()', () => { - it('should complete bucket worm', async () => { - const { wormId } = await store.initiateBucketWorm(bucket, '1'); - try { - await store.completeBucketWorm(bucket, wormId); - assert(true); - } catch (error) { - assert(false, error); - } - - try { - const result = await store.getBucketWorm(bucket); - assert(result.wormId); - } catch (error) { - assert(false, error); - } - }); - }); - describe('extendBucketWorm()', () => { - it('should extend bucket worm', async () => { - try { - const { wormId, days } = await store.getBucketWorm(bucket); - await store.extendBucketWorm(bucket, wormId, (days * 1 + 1).toString()); - const result = await store.getBucketWorm(bucket); - assert(result.days - days === 1); - } catch (error) { - assert(false, error); - } - }); - }); - }); -}); diff --git a/test/bukcet_worm.test.js b/test/bukcet_worm.test.js deleted file mode 100644 index 96bfd5235..000000000 --- a/test/bukcet_worm.test.js +++ /dev/null @@ -1,87 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; -const { timeout } = require('./config'); - -describe.skip('test/bucket.test.js', () => { - const { prefix } = utils; - let store; - let bucket; - const defaultRegion = config.region; - before(async () => { - store = oss(config); - config.region = defaultRegion; - store = oss(config); - bucket = `oss-client-test-worm2-bucket-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - - const result = await store.putBucket(bucket, { timeout }); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - }); - - // github CI will remove buckets - // restore object will have cache - // after(async () => { - // await utils.cleanBucket(store, bucket); - // }); - - describe('worm()', () => { - describe('initiateBucketWorm()', () => { - it('should init bucket worm', async () => { - try { - await store.initiateBucketWorm(bucket, '1'); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('abortBucketWorm()', () => { - it('should abort bucket worm', async () => { - try { - await store.abortBucketWorm(bucket); - assert(true); - } catch (error) { - assert(false, error); - } - }); - }); - describe('completeBucketWorm(), getBucketWorm()', () => { - it('should complete bucket worm', async () => { - const { wormId } = await store.initiateBucketWorm(bucket, '1'); - try { - await store.completeBucketWorm(bucket, wormId); - assert(true); - } catch (error) { - assert(false, error); - } - - try { - const result = await store.getBucketWorm(bucket); - assert(result.wormId); - } catch (error) { - assert(false, error); - } - }); - }); - describe('extendBucketWorm()', () => { - it('should extend bucket worm', async () => { - try { - const { wormId, days } = await store.getBucketWorm(bucket); - await store.extendBucketWorm( - bucket, - wormId, - (days * 1 + 1).toString() - ); - const result = await store.getBucketWorm(bucket); - assert(result.days - days === 1); - } catch (error) { - assert(false, error); - } - }); - }); - }); - -}); diff --git a/test/client.test.js b/test/client.test.js deleted file mode 100644 index f8d1d101c..000000000 --- a/test/client.test.js +++ /dev/null @@ -1,396 +0,0 @@ -const assert = require('assert'); -const { Client } = require('..'); -const config = require('./config').oss; - -describe('test/client.test.js', () => { - it('init stsTokenFreshTime', () => { - const store = new Client(config); - const now = new Date(); - if (!store.stsTokenFreshTime) { - throw new Error('not init stsTokenFreshTime'); - } - assert(true, +now <= +store.stsTokenFreshTime); - }); - - it('should init with region', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://oss-cn-hangzhou.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - internal: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://oss-cn-hangzhou-internal.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - internal: true, - secure: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'https://oss-cn-hangzhou-internal.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'vpc100-oss-cn-beijing', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://vpc100-oss-cn-beijing.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'vpc100-oss-cn-shenzhen', - internal: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://vpc100-oss-cn-shenzhen.aliyuncs.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'https://vpc100-oss-cn-hangzhou.aliyuncs.com/' - ); - }); - - it('should init with cname: foo.bar.com', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'foo.bar.com', - cname: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://foo.bar.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://foo.bar.com', - cname: true, - }); - - assert.equal( - store.options.endpoint.format(), - 'http://foo.bar.com/' - ); - }); - - it('should init with endpoint: http://test.oss.com', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://test.oss.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://test.oss.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - secure: true, - endpoint: 'test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'https://test.oss.com/' - ); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'https://test.oss.com', - }); - - assert.equal( - store.options.endpoint.format(), - 'https://test.oss.com/' - ); - }); - - it('should init with ip address: http://127.0.0.1', () => { - const store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: '127.0.0.1', - }); - - assert.equal( - store.options.endpoint.format(), - 'http://127.0.0.1/' - ); - }); - - it('should create request url with bucket', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - }); - - let params = { - bucket: 'gems', - }; - - let url = store._getReqUrl(params); - assert.equal(url, 'http://gems.oss-cn-hangzhou.aliyuncs.com/'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'test.oss.com', - }); - - params = { - bucket: 'gems', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://gems.test.oss.com/'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'foo.bar.com', - cname: true, - }); - - params = { - bucket: 'gems', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://foo.bar.com/'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://127.0.0.1:6000', - }); - - params = { - bucket: 'gems', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://127.0.0.1:6000/'); - }); - - it('should create request url with bucket/object/subres', () => { - let store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - }); - - let params = { - bucket: 'gems', - object: 'hello', - }; - - let url = store._getReqUrl(params); - assert.equal(url, 'http://gems.oss-cn-hangzhou.aliyuncs.com/hello'); - - params = { - bucket: 'gems', - object: 'hello', - subres: { acl: '', mime: '' }, - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://gems.oss-cn-hangzhou.aliyuncs.com/hello?acl=&mime='); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'test.oss.com', - }); - - params = { - bucket: 'gems', - object: 'hello', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://gems.test.oss.com/hello'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'foo.bar.com', - cname: true, - }); - - params = { - bucket: 'gems', - object: 'hello', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://foo.bar.com/hello'); - - store = new Client({ - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'http://127.0.0.1:3000', - }); - - params = { - bucket: 'gems', - object: 'hello', - }; - - url = store._getReqUrl(params); - assert.equal(url, 'http://127.0.0.1:3000/hello'); - }); - - it('should set User-Agent', async () => { - const store = new Client(config); - store.useBucket(config.bucket); - const result = await store.getBucketInfo(); - assert.equal(result.res.status, 200); - assert(result.bucket.Name === config.bucket); - }); - - it('should trim access id/key', () => { - const store = new Client({ - accessKeyId: ' \tfoo\t\n ', - accessKeySecret: ' \tbar\n\r ', - region: 'oss-cn-hangzhou', - }); - - assert.equal(store.options.accessKeyId, 'foo'); - assert.equal(store.options.accessKeySecret, 'bar'); - }); - - describe('checkConfigValid', () => { - it('should success when endpoint is invalid', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - } catch (error) { - assert(false); - } - }); - it('should throw when endpoint includes invalid character', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - assert(false); - } catch (error) { - assert(error.message.includes('endpoint')); - } - }); - it('should throw when endpoint change to invalid character', async () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - endpoint: 'vpc100-oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - const store = new Client(checkConfig); - const invalidHost = 'vpc100-oss-cn-hangzhou.《》.com'; - store.options.endpoint.host = invalidHost; - store.options.endpoint.hostname = invalidHost; - await store.listBuckets(); - assert(false); - } catch (error) { - assert(error.message.includes('endpoint')); - } - }); - it('should success when region is valid', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - } catch (error) { - assert(false); - } - }); - it('should throw when region includes invalid character', () => { - const checkConfig = { - accessKeyId: 'foo', - accessKeySecret: 'bar', - region: 'oss-cn-?hangzhou', - internal: true, - secure: true, - }; - try { - new Client(checkConfig); - assert(false); - } catch (error) { - assert(error.message.includes('region')); - } - }); - }); -}); diff --git a/test/dataFix.test.js b/test/dataFix.test.js deleted file mode 100644 index cd1d7921a..000000000 --- a/test/dataFix.test.js +++ /dev/null @@ -1,225 +0,0 @@ -const assert = require('assert'); -const { dataFix } = require('../lib/common/utils/dataFix'); -const { sleep } = require('./utils'); - -describe('dataFix()', () => { - before(async () => { - await sleep(1000); - }); - describe('data is not object', () => { - it('should return without handle', () => { - const data = 'string'; - - const conf = { - remove: [ 'rm', 'rm2' ], - }; - dataFix(data, conf); - }); - }); - - describe('remove : array - remove unwanted props', () => { - it('should remove what is not needed', () => { - const data = { - rmNot: 'do NOT remove me', - rm: [], - rm2: 'what ever value dos NOT matter', - }; - - const conf = { - remove: [ 'rm', 'rm2' ], - }; - - dataFix(data, conf); - - assert(!conf.remove.find(_ => Object.prototype.hasOwnProperty.call(data, _))); - assert(Object.prototype.hasOwnProperty.call(data, 'rmNot')); - }); - }); - - describe('lowerFirst : boolean - turn key into first-letter-lower-case', () => { - const One = 'One'; - const Another = 'Another'; - const Both = 'Both'; - const both = 'both'; - const data = { - One, - Another, - Both, - both, - }; - - dataFix(data, { - lowerFirst: true, - }); - - it('should covert and remove the Old', () => { - assert(!data.One); - assert(!data.Another); - assert(data.one); - assert(data.another); - }); - - it('should not covert if lower-case will replace existed', () => { - assert.strictEqual(Both, data.Both); - assert.strictEqual(both, data.both); - }); - }); - - describe('bool : array - turn values into boolean if can be converted', () => { - const cannotConvertNumber2 = 2; - const cannotConvertOtherString = 'cannot convert'; - const data = { - trueB: true, - trueL: 'true', - trueU: 'TRUE', - true1: '1', - true1N: 1, - falseB: false, - falseL: 'false', - falseU: 'FALSE', - false0: '0', - false0N: 0, - falseNull: null, - cannotConvertNumber2, - cannotConvertOtherString, - }; - - dataFix(data, { - bool: [ - 'trueB', - 'trueL', - 'trueU', - 'true1', - 'true1N', - 'falseB', - 'falseL', - 'falseU', - 'false0', - 'false0N', - 'falseNull', - 'cannotConvertNumber2', - 'cannotConvertOtherString', - 'nonExist', - ], - }); - - it('should boolean true/false remain boolean', () => { - assert.strictEqual(data.trueB, true); - assert.strictEqual(data.falseB, false); - }); - - it('should convert true TURE 1 (number or string) to boolean true', () => { - assert.strictEqual(data.trueL, true); - assert.strictEqual(data.trueU, true); - assert.strictEqual(data.true1, true); - assert.strictEqual(data.true1N, true); - }); - - it('should convert false FALSE 0 (number or string) to boolean false', () => { - assert.strictEqual(data.falseL, false); - assert.strictEqual(data.falseU, false); - assert.strictEqual(data.false0, false); - assert.strictEqual(data.false0N, false); - }); - - it('should convert null / undefined to false', () => { - assert.strictEqual(data.falseNull, false); - assert.strictEqual(data.nonExist, false); - }); - - it('should leave those cannot be converted as is', () => { - assert.strictEqual(cannotConvertNumber2, data.cannotConvertNumber2); - assert.strictEqual( - cannotConvertOtherString, - data.cannotConvertOtherString - ); - }); - }); - - describe('rename : object - rename bad prop keys into better names', () => { - const existValue = 123456; - const renameToAlready = 'rename to already'; - const alreadyExist = 'already'; - const data = { - existValue, - renameToAlready, - alreadyExist, - }; - - dataFix(data, { - rename: { - existValue: 'existValueRenamed', - nonExistValue: 'nonExistValueRenamed', - renameToAlready: 'alreadyExist', - }, - }); - - it('should replace existed values with new name and same value', () => { - assert(!data.existValue); - assert.strictEqual(data.existValueRenamed, existValue); - }); - - it('should not add prop when the prop-to-be-renamed does NOT exist', () => { - assert(!data.nonExistValueRenamed); - assert(!data.nonExistValue); - }); - - it('should not rename if a name already exist', () => { - assert.strictEqual(data.alreadyExist, alreadyExist); - assert.strictEqual(data.renameToAlready, renameToAlready); - }); - }); - - describe('camel : array - turn key into camel string', () => { - const Both = 'Both'; - const both = 'bothBoth'; - const data = { - One: 'One', - 'Another-another': 'Another-another', - 'Both-both': Both, - bothBoth: both, - }; - - dataFix(data, { - camel: [ ...Object.keys(data), 'noExistkey' ], - }); - - it('should covert and remove the Old', () => { - assert(data.one); - assert(data.anotherAnother); - }); - - it('should not covert if camel will replace existed', () => { - assert.strictEqual(Both, data['Both-both']); - assert.strictEqual(both, data.bothBoth); - }); - - it('should not covert if camel origin key is not exist', () => { - // eslint-disable-next-line no-prototype-builtins - assert(!data.hasOwnProperty('NoExistkey')); - }); - }); - - describe('finalKill: function', () => { - it('should correct fix data', () => { - const data = { - test: 1, - test1: 2, - needDelete: 'del', - needDelete1: 'del', - }; - - const delKey = 'needDelete'; - const addKey = 'addKey'; - dataFix(data, {}, o => { - Object.keys(o).forEach(_ => { - if (_.includes(delKey)) delete o[_]; - }); - o[addKey] = addKey; - }); - - assert(!Object.keys(data).find(_ => _.includes(delKey))); - assert.strictEqual(data.addKey, addKey); - }); - }); -}); diff --git a/test/multipart.test.js b/test/multipart.test.js deleted file mode 100644 index 47c6a4f57..000000000 --- a/test/multipart.test.js +++ /dev/null @@ -1,869 +0,0 @@ -const fs = require('fs'); -const assert = require('assert'); -const { md5 } = require('utility'); -const mm = require('mm'); -const sinon = require('sinon'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; - -describe('test/multipart.test.js', () => { - // only run on v18 - if (!process.version.startsWith('v18.')) return; - - const { prefix } = utils; - const bucket = config.bucket; - let store; - before(async () => { - store = oss(config); - store.useBucket(bucket); - }); - - describe('listUploads()', () => { - beforeEach(async () => { - const result = await store.listUploads({ - 'max-uploads': 1000, - }); - const uploads = result.uploads || []; - await Promise.all(uploads.map(_ => store.abortMultipartUpload(_.name, _.uploadId))); - }); - - it('should list by key marker', async () => { - const name = `${prefix}multipart/list-key`; - const ids = ( - await Promise.all( - Array(5) - .fill(1) - .map((v, i) => store.initMultipartUpload(name + i)) - ) - ).map(_ => _.uploadId); - // list all uploads - let result = await store.listUploads({ - 'max-uploads': 10, - }); - const all = result.uploads.map(up => up.uploadId); - assert.deepEqual(all, ids); - - // after 1 - result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': name + 0, - }); - const after1 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after1, ids.slice(1)); - - // after 5 - result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': name + 4, - }); - const after5 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after5.length, 0); - }); - - it('should list by id marker', async () => { - const name = `${prefix}multipart/list-id`; - const ids = ( - await Promise.all( - Array(5) - .fill(1) - .map(_ => store.initMultipartUpload(name)) - ) - ) - .map(_ => _.uploadId) - .sort(); - - // list all uploads - let result = await store.listUploads({ - 'max-uploads': 10, - }); - const all = result.uploads.map(up => up.uploadId); - assert.deepEqual(all, ids); - - // after 1: upload id marker alone is ignored - result = await store.listUploads({ - 'max-uploads': 10, - 'upload-id-marker': ids[1], - }); - const after1 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after1, ids); - - // after 5: upload id marker alone is ignored - result = await store.listUploads({ - 'max-uploads': 10, - 'upload-id-marker': ids[4], - }); - const after5 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after5, ids); - }); - - it('should list by id & key marker', async () => { - const fooName = `${prefix}multipart/list-foo`; - const fooIds = ( - await Promise.all( - Array(5) - .fill(1) - .map(_ => store.initMultipartUpload(fooName)) - ) - ) - .map(_ => _.uploadId) - .sort(); - - const barName = `${prefix}multipart/list-bar`; - const barIds = ( - await Promise.all( - Array(5) - .fill(5) - .map(_ => store.initMultipartUpload(barName)) - ) - ) - .map(_ => _.uploadId) - .sort(); - - // after 1 - let result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': barName, - 'upload-id-marker': barIds[0], - }); - const after1 = result.uploads.map(up => up.uploadId); - after1.sort(); - const sort1 = barIds.slice(1).concat(fooIds).sort(); - assert.deepEqual(after1, sort1); - - // after 5 - result = await store.listUploads({ - 'max-uploads': 10, - 'key-marker': barName, - 'upload-id-marker': barIds[4], - }); - const after5 = result.uploads.map(up => up.uploadId); - assert.deepEqual(after5, fooIds); - }); - }); - - describe('multipartUpload()', () => { - afterEach(mm.restore); - - it('should initMultipartUpload with x-oss-server-side-encryption', async () => { - const name = 'multipart-x-oss-server-side-encryption'; - const result = await store.initMultipartUpload(name, { - headers: { - 'x-oss-server-side-encryption': 'AES256', - }, - }); - - assert.equal(result.res.headers['x-oss-server-side-encryption'], 'AES256'); - }); - - it('should multipartUpload with x-oss-server-side-encryption', async () => { - const name = 'multipart-x-oss-server-side-encryption'; - const fileName = await utils.createTempFile('multipart-fallback', 1003 * 1020); - const result = await store.multipartUpload(name, fileName, { - headers: { - 'x-oss-server-side-encryption': 'KMS', - }, - }); - assert.equal(result.res.headers['x-oss-server-side-encryption'], 'KMS'); - }); - - it('should fallback to putStream when file size is smaller than 100KB', async () => { - const fileName = await utils.createTempFile('multipart-fallback', 100 * 1024 - 1); - const name = `${prefix}multipart/fallback`; - let progress = 0; - - const putStreamSpy = sinon.spy(store, 'putStream'); - const uploadPartSpy = sinon.spy(store, '_uploadPart'); - - const result = await store.multipartUpload(name, fileName, { - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(putStreamSpy.callCount, 1); - assert.equal(uploadPartSpy.callCount, 0); - assert.equal(progress, 1); - - assert.equal(typeof result.bucket, 'string'); - assert.equal(typeof result.etag, 'string'); - - store.putStream.restore(); - store._uploadPart.restore(); - }); - - it('should use default partSize when not specified', () => { - const partSize = store._getPartSize(1024 * 1024, null); - assert.equal(partSize, 1 * 1024 * 1024); - }); - - it('should use user specified partSize', () => { - const partSize = store._getPartSize(1024 * 1024, 200 * 1024); - assert.equal(partSize, 200 * 1024); - }); - - it('should not exceeds max part number', () => { - const fileSize = 10 * 1024 * 1024 * 1024; - const maxNumParts = 10 * 1000; - - const partSize = store._getPartSize(fileSize, 100 * 1024); - assert.equal(partSize, Math.ceil(fileSize / maxNumParts)); - }); - - it('should upload file using multipart upload', async () => { - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file`; - let progress = 0; - const result = await store.multipartUpload(name, fileName, { - partSize: 100 * 1024, - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(progress, 13); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - const fileBuf = fs.readFileSync(fileName); - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should upload file using multipart upload with exception', async () => { - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file-exception`; - const clientTmp = oss(config); - clientTmp.useBucket(bucket); - - const stubUploadPart = sinon.stub(clientTmp, '_uploadPart'); - stubUploadPart.throws('TestUploadPartException'); - - let errorMsg; - let errPartNum; - try { - await clientTmp.multipartUpload(name, fileName); - } catch (err) { - errorMsg = err.message; - errPartNum = err.partNum; - } - assert.equal(errorMsg, 'Failed to upload some parts with error: TestUploadPartException part_num: 1'); - assert.equal(errPartNum, 1); - clientTmp._uploadPart.restore(); - }); - - it('should upload Node.js Buffer using multipart upload', async () => { - // create a buffer with 1M random data - const fileName = await utils.createTempFile('multipart-upload-buffer', 1024 * 1024); - const fileBuf = fs.readFileSync(fileName); - - const name = `${prefix}multipart/upload-buffer`; - const result = await store.multipartUpload(name, fileBuf, { - partSize: 100 * 1024, - }); - - assert.equal(result.res.status, 200); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should resume Node.js Buffer upload using checkpoint', async () => { - const uploadPart = store._uploadPart; - mm(store, '_uploadPart', function* (name, uploadId, partNo, data) { - if (partNo === 5) { - throw new Error('mock upload part fail.'); - } else { - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-buffer', 1024 * 1024); - const fileBuf = fs.readFileSync(fileName); - - const name = `${prefix}multipart/upload-buffer`; - let lastCpt = {}; - let progress = 0; - try { - await store.multipartUpload(name, fileBuf, { - partSize: 100 * 1024, - progress(percent, cpt) { - progress++; - lastCpt = cpt; - }, - }); - // should not succeed - assert(false); - } catch (err) { - // pass - } - - mm.restore(); - const result = await store.multipartUpload(name, fileBuf, { - checkpoint: lastCpt, - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(progress, 13); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should resume upload using checkpoint', async () => { - const uploadPart = store._uploadPart; - mm(store, '_uploadPart', function* (name, uploadId, partNo, data) { - if (partNo === 5) { - throw new Error('mock upload part fail.'); - } else { - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file`; - const cptFile = '/tmp/.oss/cpt.json'; - let progress = 0; - try { - await store.multipartUpload(name, fileName, { - partSize: 100 * 1024, - progress(percent, cpt) { - progress++; - fs.writeFileSync(cptFile, JSON.stringify(cpt)); - }, - }); - // should not succeed - assert(false); - } catch (err) { - // pass - } - - mm.restore(); - const result = await store.multipartUpload(name, fileName, { - checkpoint: JSON.parse(fs.readFileSync(cptFile)), - progress() { - progress++; - }, - }); - assert.equal(result.res.status, 200); - assert.equal(progress, 13); - - const object = await store.get(name); - assert.equal(object.res.status, 200); - const fileBuf = fs.readFileSync(fileName); - assert.equal(object.content.length, fileBuf.length); - // avoid comparing buffers directly for it may hang when generating diffs - assert.deepEqual(md5(object.content), md5(fileBuf)); - }); - - it('should return requestId in init, upload part, complete', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const result = await store.multipartUpload(name, fileName, { - progress(p, checkpoint, res) { - assert.equal(true, res && Object.keys(res).length !== 0); - }, - }); - assert.equal(true, result.res && Object.keys(result.res).length !== 0); - assert.equal(result.res.status, 200); - }); - - it('should upload with uploadPart', async () => { - const fileName = await utils.createTempFile('upload-with-upload-part', 10 * 100 * 1024); - - const name = `${prefix}multipart/upload-with-upload-part`; - - const init = await store.initMultipartUpload(name); - const { uploadId } = init; - const partSize = 100 * 1024; - const parts = await Promise.all( - Array(10) - .fill(1) - .map((v, i) => - store.uploadPart( - name, - uploadId, - i + 1, - fileName, - i * partSize, - Math.min((i + 1) * partSize, 10 * 100 * 1024) - ) - ) - ); - const dones = parts.map((_, i) => ({ - number: i + 1, - etag: _.etag, - })); - - const result = await store.completeMultipartUpload(name, uploadId, dones); - assert.equal(result.res.status, 200); - assert(result.data.Location.startsWith('https://')); - assert.equal(typeof result.data.Bucket, 'string'); - assert.equal(result.data.Key, name); - assert.equal(typeof result.data.ETag, 'string'); - }); - - it('should upload partSize be int number and greater then minPartSize', async () => { - // create a file with 1M random data - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - - const name = `${prefix}multipart/upload-file`; - let progress = 0; - try { - const result = await store.multipartUpload(name, fileName, { - partSize: 14.56, - progress() { - progress++; - }, - }); - } catch (e) { - assert.equal('partSize must be int number', e.message); - } - - try { - await store.multipartUpload(name, fileName, { - partSize: 1, - progress() { - progress++; - }, - }); - } catch (e) { - assert.ok(e.message.startsWith('partSize must not be smaller')); - } - }); - - it('should skip doneParts when re-upload mutilpart files', async () => { - const PART_SIZE = 1024 * 100; - const FILE_SIZE = 1024 * 500; - const SUSPENSION_LIMIT = 3; - const object = `multipart-${Date.now()}`; - const fileName = await utils.createTempFile(object, FILE_SIZE); - const uploadPart = store._uploadPart; - let checkpoint; - mm(store, '_uploadPart', function(name, uploadId, partNo, data) { - if (partNo === SUSPENSION_LIMIT) { - throw new Error('mock upload part fail.'); - } else { - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - try { - await store.multipartUpload(object, fileName, { - parallel: 1, - partSize: PART_SIZE, - progress: (percentage, c) => { - checkpoint = c; - }, - }); - } catch (e) { - assert.strictEqual(checkpoint.doneParts.length, SUSPENSION_LIMIT - 1); - } - mm.restore(); - const uploadPartSpy = sinon.spy(store, '_uploadPart'); - await store.multipartUpload(object, fileName, { - parallel: 1, - partSize: PART_SIZE, - checkpoint, - }); - assert.strictEqual(uploadPartSpy.callCount, FILE_SIZE / PART_SIZE - SUSPENSION_LIMIT + 1); - store._uploadPart.restore(); - }); - }); - - describe('requestError()', () => { - it('should request timeout exception', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const stubNetError = sinon.stub(store.urllib, 'request'); - const netErr = new Error('TestTimeoutErrorException'); - netErr.status = -2; - netErr.code = 'ConnectionTimeoutError'; - netErr.name = 'ConnectionTimeoutError'; - stubNetError.throws(netErr); - let timeoutErr; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - timeoutErr = err; - } - - assert.equal(true, timeoutErr && Object.keys(timeoutErr).length !== 0); - assert.equal(timeoutErr.status, -2); - store.urllib.request.restore(); - }); - - it('should request net exception', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const stubNetError = sinon.stub(store.urllib, 'request'); - const netErr = new Error('TestNetErrorException'); - netErr.status = -1; - netErr.code = 'RequestError'; - netErr.name = 'RequestError'; - stubNetError.throws(netErr); - - let netErrs; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - netErrs = err; - } - - assert.equal(true, netErr && Object.keys(netErrs).length !== 0); - assert.equal(netErrs.status, -1); - store.urllib.request.restore(); - }); - - it('should request throw ResponseTimeoutError', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - - const stubNetError = sinon.stub(store.urllib, 'request'); - const netErr = new Error('ResponseTimeoutError'); - netErr.status = -1; - netErr.code = 'ResponseTimeoutError'; - netErr.name = 'ResponseTimeoutError'; - stubNetError.throws(netErr); - - let netErrs; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - netErrs = err; - } - assert.strictEqual(netErrs.name, 'ResponseTimeoutError'); - store.urllib.request.restore(); - }); - - it('should request throw abort event', async () => { - const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); // 1m - const name = `${prefix}multipart/upload-file`; - const stubNetError = sinon.stub(store, '_uploadPart'); - const netErr = new Error('Not Found'); - netErr.status = 404; - netErr.code = 'Not Found'; - netErr.name = 'Not Found'; - stubNetError.throws(netErr); - let netErrs; - try { - await store.multipartUpload(name, fileName); - } catch (err) { - netErrs = err; - } - assert.strictEqual(netErrs.status, 0); - assert.strictEqual(netErrs.name, 'abort'); - store._uploadPart.restore(); - }); - }); - - describe('multipartCopy()', () => { - let fileName; - let name; - before(async () => { - fileName = await utils.createTempFile('multipart-upload-file-copy', 2 * 1024 * 1024); - name = `${prefix}multipart/upload-file-with-copy`; - await store.multipartUpload(name, fileName); - }); - - it('should multipart copy copy size err', async () => { - const file = await utils.createTempFile('multipart-upload-file', 50 * 1024); - const objectKey = `${prefix}multipart/upload-file-with-copy-small`; - await store.multipartUpload(objectKey, file); - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-small-new`; - let copyErr = null; - try { - await client.multipartUploadCopy(copyName, { - sourceKey: objectKey, - sourceBucketName: bucket, - }); - } catch (err) { - copyErr = err; - } - - assert.equal(copyErr.message, 'copySize must not be smaller than 102400'); - }); - - it('should multipart copy part size err', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-new`; - let partSizeErr = null; - try { - await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 50 * 1024, - } - ); - } catch (err) { - partSizeErr = err; - } - - assert.equal(partSizeErr.message, 'partSize must not be smaller than 102400'); - }); - - it('should copy with upload part copy', async () => { - const client = store; - - // create a file with 1M random data - const fileNamez = await utils.createTempFile('multipart-upload-file-temp-copy', 10 * 100 * 1024); - - const key = `${prefix}multipart/upload-file-temp-copy`; - await client.multipartUpload(key, fileNamez); - - const copyName = `${prefix}multipart/upload-file-with-copy-new`; - const sourceData = { - sourceKey: name, - sourceBucketName: bucket, - }; - const objectMeta = await client._getObjectMeta(sourceData.sourceBucketName, sourceData.sourceKey, {}); - const fileSize = objectMeta.res.headers['content-length']; - - const result = await client.initMultipartUpload(copyName); - - const partSize = 100 * 1024; // 100kb - const dones = []; - const uploadFn = async i => { - const start = partSize * (i - 1); - const end = Math.min(start + partSize, fileSize); - const range = `${start}-${end - 1}`; - const part = await store.uploadPartCopy(copyName, result.uploadId, i, range, sourceData, {}); - dones.push({ - number: i, - etag: part.res.headers.etag, - }); - }; - - await Promise.all( - Array(10) - .fill(1) - .map((v, i) => uploadFn(i + 1)) - ); - - const complete = await client.completeMultipartUpload(copyName, result.uploadId, dones); - - assert.equal(complete.res.status, 200); - }); - - it('should copy with multipart upload copy', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-new`; - const result = await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 256 * 1024, - } - ); - - assert.equal(result.res.status, 200); - }); - - it('should multipart upload copy with parallel = 1', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-parallel-1`; - const result = await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 256 * 1024, - parallel: 1, - } - ); - - assert.equal(result.res.status, 200); - }); - - it('should multipart copy with cancel and resume', async () => { - const client = store; - const copyName = `${prefix}multipart/upload-file-with-copy-cancel`; - let tempCheckpoint = null; - try { - await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 100 * 1024, - progress(p, checkpoint) { - tempCheckpoint = checkpoint; - if (p > 0.5) { - client.cancel(); - } - }, - } - ); - } catch (err) { - assert.equal(client.isCancel(), true); - } - - const result = await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - partSize: 100 * 1024, - checkpoint: tempCheckpoint, - progress(p) { - assert.equal(p > 0.5, true); - }, - } - ); - - assert.equal(result.res.status, 200); - }); - - it('should multipart copy with exception', async () => { - const copyName = `${prefix}multipart/upload-file-with-copy-exception`; - const clientTmp = oss(config); - clientTmp.useBucket(bucket); - /* eslint no-unused-vars: [0] */ - const stubUploadPart = sinon.stub( - clientTmp, - 'uploadPartCopy', - async (objectKey, uploadId, partNo, range, sourceData, options) => { - if (partNo === 1) { - throw new Error('TestErrorException'); - } - } - ); - - let errorMsg; - let errPartNum; - try { - await clientTmp.multipartUploadCopy(copyName, { - sourceKey: name, - sourceBucketName: bucket, - }); - } catch (err) { - errorMsg = err.message; - errPartNum = err.partNum; - } - assert.equal(errorMsg, 'Failed to copy some parts with error: Error: TestErrorException part_num: 1'); - assert.equal(errPartNum, 1); - stubUploadPart.restore(); - }); - - it('should upload copy with list part', async () => { - const tempFileName = await utils.createTempFile('multipart-upload-list-part', 2 * 1024 * 1024); - const tempName = `${prefix}multipart/upload-list-part`; - await store.multipartUpload(tempName, tempFileName); - const client = store; - const copyName = `${prefix}multipart/upload-list-part-copy`; - let uploadIdz = null; - try { - await client.multipartUploadCopy( - copyName, - { - sourceKey: name, - sourceBucketName: bucket, - }, - { - parallel: 1, - partSize: 100 * 1024, - progress(p, checkpoint) { - if (p === 0) { - uploadIdz = checkpoint.uploadId; - } - if (p > 0.5) { - client.cancel(); - } - }, - } - ); - } catch (err) { - /* eslint no-empty: [0] */ - } - - const result = await store.listParts( - copyName, - uploadIdz, - { - 'max-parts': 1000, - }, - {} - ); - - assert.equal(result.res.status, 200); - }); - }); - - describe('multipartUploadStreams', () => { - afterEach(mm.restore); - it('multipartUploadStreams.length', async () => { - const uploadPart = store._uploadPart; - let i = 0; - const LIMIT = 1; - mm(store, '_uploadPart', function(name, uploadId, partNo, data) { - if (i === LIMIT) { - throw new Error('mock upload part fail.'); - } else { - i++; - return uploadPart.call(this, name, uploadId, partNo, data); - } - }); - - const fileName = await utils.createTempFile(`multipart-upload-file-${Date.now()}`, 1024 * 1024); - const name = `${prefix}multipart/upload-file-${Date.now()}`; - const name1 = `${prefix}multipart/upload-file-1-${Date.now()}`; - try { - await Promise.all([ store.multipartUpload(name, fileName), store.multipartUpload(name1, fileName) ]); - } catch (e) {} - mm.restore(); - await Promise.all([ store.multipartUpload(name, fileName), store.multipartUpload(name1, fileName) ]); - assert.strictEqual(store.multipartUploadStreams.length, 0); - }); - - it('destroy the stream when multipartUploaded and the cancel method is called', async () => { - const fileName = await utils.createTempFile(`multipart-upload-file-${Date.now()}`, 1024 * 1024); - let stream; - mm(store, '_uploadPart', (_name, _uploadId, _partNo, data) => { - stream = data.stream; - throw new Error('mock upload part fail.'); - }); - - const name = `${prefix}multipart/upload-file-${Date.now()}`; - try { - await store.multipartUpload(name, fileName); - } catch (e) { - store.cancel(); - } - mm.restore(); - assert.strictEqual(stream.destroyed, true); - }); - }); -}); diff --git a/test/rtmp.test.js b/test/rtmp.test.js deleted file mode 100644 index e3c471e45..000000000 --- a/test/rtmp.test.js +++ /dev/null @@ -1,219 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const oss = require('..'); -const config = require('./config').oss; - -describe.skip('test/rtmp.test.js', () => { - const { prefix } = utils; - let store; - let bucket; - let bucketRegion; - let cid; - let conf; - before(async () => { - store = oss(config); - bucket = `oss-client-test-bucket-rtmp-${prefix.replace(/[/.]/g, '-')}`; - bucket = bucket.substring(0, bucket.length - 1); - store.useBucket(bucket); - - const result = await store.putBucket(bucket, bucketRegion); - assert.equal(result.bucket, bucket); - assert.equal(result.res.status, 200); - - cid = 'channel-1'; - conf = { - Description: 'this is channel 1', - Status: 'enabled', - Target: { - Type: 'HLS', - FragDuration: '10', - FragCount: '5', - PlaylistName: 'playlist.m3u8', - }, - }; - }); - - describe('put/get/deleteChannel()', () => { - it('should create a new channel', async () => { - const tempCid = cid; - const tempConf = conf; - - let result = await store.putChannel(tempCid, tempConf); - assert.equal(result.res.status, 200); - assert(Array.isArray(result.publishUrls)); - assert(result.publishUrls.length > 0); - assert(Array.isArray(result.playUrls)); - assert(result.playUrls.length > 0); - - result = await store.getChannel(tempCid); - assert.equal(result.res.status, 200); - assert.deepEqual(result.data, conf); - - result = await store.deleteChannel(tempCid); - assert.equal(result.res.status, 204); - - await utils.throws(async () => { - await store.getChannel(tempCid); - }, err => { - assert.equal(err.status, 404); - }); - }); - }); - - describe('put/getChannelStatus()', () => { - let statusConfCid; - before(async () => { - statusConfCid = 'live channel 2'; - const statusConf = conf; - statusConf.Description = 'this is live channel 2'; - await store.putChannel(statusConfCid, statusConf); - }); - - after(async () => { - await store.deleteChannel(statusConfCid); - }); - - it('should disable channel', async () => { - let result = await store.getChannelStatus(statusConfCid); - assert.equal(result.res.status, 200); - assert.equal(result.data.Status, 'Idle'); - - // TODO: verify ConnectedTime/RemoteAddr/Video/Audio when not idle - - result = await store.putChannelStatus(statusConfCid, 'disabled'); - assert.equal(result.res.status, 200); - - result = await store.getChannelStatus(statusConfCid); - assert.equal(result.res.status, 200); - assert.equal(result.data.Status, 'Disabled'); - }); - }); - - describe('listChannels()', () => { - let channelNum; - let channelPrefix; - before(async () => { - channelNum = 10; - channelPrefix = 'channel-list-'; - await Promise.all(Array(channelNum).fill(1).map((_, i) => { - conf.Description = i; - return store.putChannel(channelPrefix + i, conf); - })); - }); - - after(async () => { - await Promise.all(Array(channelNum).fill(1).map((_, i) => store.deleteChannel(channelPrefix + i))); - }); - - it('list channels using prefix/marker/max-keys', async () => { - const query = { - prefix: 'channel-list-', - marker: 'channel-list-4', - 'max-keys': 3, - }; - - const result = await store.listChannels(query); - - assert.equal(result.res.status, 200); - assert.equal(result.nextMarker, 'channel-list-7'); - assert.equal(result.isTruncated, true); - - const { channels } = result; - assert.equal(channels.length, 3); - assert.equal(channels[0].Name, channelPrefix + 5); - assert.equal(channels[1].Name, channelPrefix + 6); - assert.equal(channels[2].Name, channelPrefix + 7); - }); - }); - - describe('getChannelHistory()', () => { - let historyCid; - before(async () => { - historyCid = 'channel-3'; - const historyconf = conf; - historyconf.Description = 'this is live channel 3'; - await store.putChannel(historyCid, historyconf); - }); - - after(async () => { - await store.deleteChannel(historyCid); - }); - - it('should get channel history', async () => { - const result = await store.getChannelHistory(historyCid); - - assert.equal(result.res.status, 200); - assert(Array.isArray(result.records)); - assert.equal(result.records.length, 0); - - // TODO: verify LiveRecord when history exists - // verify wish OBS or ffmpeg - }); - }); - - describe('createVod()', () => { - let createVodCid; - before(async () => { - createVodCid = 'channel-4'; - const createVodConf = conf; - createVodConf.Description = 'this is live channel 4'; - const result = await store.putChannel(createVodCid, createVodConf); - assert.equal(result.res.status, 200); - const url = store.getRtmpUrl(createVodCid, { - params: { - playlistName: 'vod.m3u8', - }, - expires: 3600, - }); - console.log(url); - }); - - after(async () => { - await store.deleteChannel(createVodCid); - }); - - // this case need have data in server - it.skip('should create vod playlist', async () => { - const name = 'vod.m3u8'; - const now = Date.now(); - - try { - const result = await store.createVod(cid, name, { - startTime: Math.floor((now - 100) / 1000), - endTime: Math.floor(now / 1000), - }); - - assert.equal(result.res.status, 200); - } catch (err) { - console.error(err); - } - }); - }); - - describe('getRtmpUrl()', () => { - let getRtmpUrlCid; - before(async () => { - getRtmpUrlCid = 'channel-5'; - const getRtmpUrlConf = conf; - getRtmpUrlConf.Description = 'this is live channel 5'; - const result = await store.putChannel(getRtmpUrlCid, getRtmpUrlConf); - assert.equal(result.res.status, 200); - }); - - after(async () => { - await store.deleteChannel(getRtmpUrlCid); - }); - - it('should get rtmp url', () => { - const name = 'vod.m3u8'; - const url = store.getRtmpUrl(getRtmpUrlCid, { - params: { - playlistName: name, - }, - expires: 3600, - }); - console.log(url); - // verify the url is ok used by OBS or ffmpeg - }); - }); -}); diff --git a/test/sts.test.js b/test/sts.test.js deleted file mode 100644 index fe5492e8f..000000000 --- a/test/sts.test.js +++ /dev/null @@ -1,229 +0,0 @@ -const assert = require('assert'); -const utils = require('./utils'); -const sts = require('..').STS; -const OSS = require('..'); -const config = require('./config').oss; -const stsConfig = require('./config').sts; -const mm = require('mm'); - -describe.skip('test/sts.test.js', () => { - const { prefix } = utils; - describe('assumeRole()', () => { - it('should assume role', async () => { - const stsClient = sts(stsConfig); - const result = await stsClient.assumeRole(stsConfig.roleArn); - assert.strictEqual(result.res.status, 200); - }); - - it('should assume role with policy', async () => { - const stsClient = sts(stsConfig); - const policy = { - Statement: [ - { - Action: [ 'oss:*' ], - Effect: 'Allow', - Resource: [ 'acs:oss:*:*:*' ], - }, - ], - Version: '1', - }; - const result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - }); - - it('should assume role with policy string', async () => { - const stsClient = sts(stsConfig); - const policy = ` - { - "Statement": [ - { - "Action": [ - "oss:*" - ], - "Effect": "Allow", - "Resource": ["acs:oss:*:*:*"] - } - ], - "Version": "1" - }`; - const result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - }); - - it('should handle error in assume role', async () => { - const stsClient = sts(stsConfig); - const policy = ` - { - "Statements": [ - { - "Action": [ - "oss:*" - ], - "Effect": "Allow", - "Resource": ["acs:oss:*:*:*"] - } - ], - "Version": "1" - }`; - - try { - await stsClient.assumeRole(stsConfig.roleArn, policy); - assert(false); - } catch (err) { - err.message.should.match(/InvalidParameter.PolicyGrammar/); - } - }); - - it('should list objects using STS', async () => { - const stsClient = sts(stsConfig); - let result = await stsClient.assumeRole(stsConfig.roleArn); - assert.strictEqual(result.res.status, 200); - - const ossClient = new OSS({ - region: config.region, - accessKeyId: result.credentials.AccessKeyId, - accessKeySecret: result.credentials.AccessKeySecret, - stsToken: result.credentials.SecurityToken, - bucket: stsConfig.bucket, - }); - - const name = `${prefix}oss-client/oss/sts-put1.js`; - result = await ossClient.put(name, __filename); - assert.strictEqual(result.res.status, 200); - - result = await ossClient.list({ - 'max-keys': 10, - }); - - assert.strictEqual(result.res.status, 200); - }); - - it('should delete multi objects using STS', async () => { - const stsClient = sts(stsConfig); - - let policy = { - Statement: [ - { - Action: [ 'oss:PutObject' ], - Effect: 'Allow', - Resource: [ 'acs:oss:*:*:*' ], - }, - ], - Version: '1', - }; - - let result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - - let ossClient = new OSS({ - region: config.region, - accessKeyId: result.credentials.AccessKeyId, - accessKeySecret: result.credentials.AccessKeySecret, - stsToken: result.credentials.SecurityToken, - bucket: stsConfig.bucket, - }); - - const name1 = `${prefix}oss-client/oss/sts-put1.js`; - const name2 = `${prefix}oss-client/oss/sts-put2.js`; - result = await ossClient.put(name1, __filename); - assert.strictEqual(result.res.status, 200); - - result = await ossClient.put(name2, __filename); - assert.strictEqual(result.res.status, 200); - - try { - await ossClient.deleteMulti([ name1, name2 ]); - assert(false); - } catch (err) { - err.message.should.match(/Access denied by authorizer's policy/); - } - - policy = { - Statement: [ - { - Action: [ 'oss:DeleteObject' ], - Effect: 'Allow', - Resource: [ 'acs:oss:*:*:*' ], - }, - ], - Version: '1', - }; - - result = await stsClient.assumeRole(stsConfig.roleArn, policy); - assert.strictEqual(result.res.status, 200); - - ossClient = new OSS({ - region: config.region, - accessKeyId: result.credentials.AccessKeyId, - accessKeySecret: result.credentials.AccessKeySecret, - stsToken: result.credentials.SecurityToken, - bucket: stsConfig.bucket, - }); - - result = await ossClient.deleteMulti([ name1, name2 ]); - assert.strictEqual(result.res.status, 200); - }); - }); - - describe('refreshSTSToken()', () => { - let stsClient; - let store; - before(async () => { - stsClient = sts(stsConfig); - const { credentials } = await stsClient.assumeRole(stsConfig.roleArn); - const testRefreshSTSTokenConf = { - region: config.region, - accessKeyId: credentials.AccessKeyId, - accessKeySecret: credentials.AccessKeySecret, - stsToken: credentials.SecurityToken, - bucket: stsConfig.bucket, - refreshSTSTokenInterval: 1000, - }; - store = new OSS(testRefreshSTSTokenConf); - }); - - it('should refresh sts token when token is expired', async () => { - try { - store.options.refreshSTSToken = async () => { - mm.restore(); - const { credentials } = await stsClient.assumeRole(stsConfig.roleArn); - return credentials; - }; - const ak = store.options.accessKeyId; - await store.listBuckets(); - assert.strictEqual(ak, store.options.accessKeyId); - await utils.sleep(2000); - await store.listBuckets(); - assert.notStrictEqual(ak, store.options.accessKeyId); - } catch (error) { - assert(false, error); - } - }); - - it('asyncSignatureUrl will should use refreshSTSToken', async () => { - const { credentials } = await stsClient.assumeRole(stsConfig.roleArn); - let flag = false; - - store = new OSS({ - region: config.region, - accessKeyId: credentials.AccessKeyId, - accessKeySecret: credentials.AccessKeySecret, - stsToken: credentials.SecurityToken, - refreshSTSToken: () => { - flag = true; - return { - accessKeyId: 'b', - accessKeySecret: 'b', - stsToken: 'b', - }; - }, - bucket: stsConfig.bucket, - refreshSTSTokenInterval: 1000, - }); - await utils.sleep(2000); - await store.asyncSignatureUrl('test.txt'); - - assert(flag); - }); - }); -}); diff --git a/test/util/isIP.test.ts b/test/util/isIP.test.ts index c83ff8562..9c8c1a963 100644 --- a/test/util/isIP.test.ts +++ b/test/util/isIP.test.ts @@ -1,4 +1,4 @@ -import assert from 'node:assert'; +import { strict as assert } from 'node:assert'; import { isIP } from '../../src/util/index.js'; describe('test/util/isIP.test.ts', () => { diff --git a/test/utils.js b/test/utils.js deleted file mode 100644 index ffd930f24..000000000 --- a/test/utils.js +++ /dev/null @@ -1,174 +0,0 @@ -const assert = require('assert'); -const fs = require('fs'); -const urlutil = require('url'); -const { isObject } = require('../lib/common/utils/isObject'); - -exports.throws = async function(block, checkError) { - try { - await block(); - } catch (err) { - if (typeof checkError === 'function') { - return checkError(err); - } - // throws(block, errorName) - if (typeof checkError === 'string') { - return assert.equal(err.name, checkError); - } - // throw(block, RegExp) - if (!checkError.test(err.toString())) { - throw new Error(`expected ${err.toString()} to match ${checkError.toString()}`); - } - return false; - } - throw new Error(`${block.toString()} should throws error`); -}; - -exports.sleep = function(ms) { - return new Promise(resolve => { - setTimeout(() => { - resolve(); - }, ms); - }); -}; - -exports.cleanBucket = async function(store, bucket, multiversion) { - store.useBucket(bucket); - let result; - const options = { versionId: null }; - - if (!multiversion) { - try { - await store.getBucketVersions({ - 'max-keys': 1000, - }); - multiversion = true; - } catch (error) { - multiversion = false; - } - } - - async function handleDelete(deleteKey) { - if (multiversion) { - result = await store.getBucketVersions({ - 'max-keys': 1000, - }); - } else { - result = await store.list({ - 'max-keys': 1000, - }); - } - result[deleteKey] = result[deleteKey] || []; - - await Promise.all(result[deleteKey] - .map(_ => store.delete(_.name, multiversion ? - Object.assign({}, options, { versionId: _.versionId }) : - options))); - } - await handleDelete('objects'); - if (multiversion) { - await handleDelete('deleteMarker'); - } - - result = await store.listUploads({ - 'max-uploads': 1000, - }); - const uploads = result.uploads || []; - await Promise.all(uploads.map(_ => store.abortMultipartUpload(_.name, _.uploadId))); - - const channels = (await store.listChannels()).channels.map(_ => _.Name); - await Promise.all(channels.map(_ => store.deleteChannel(_))); - await store.deleteBucket(bucket); -}; - -exports.prefix = `${process.platform}-${process.version}-${new Date().getTime()}/`; - -exports.createTempFile = async function createTempFile(name, size) { - const tmpdir = '/tmp/.oss/'; - if (!fs.existsSync(tmpdir)) { - fs.mkdirSync(tmpdir); - } - - await new Promise((resolve, reject) => { - const rs = fs.createReadStream('/dev/urandom', { - start: 0, - end: size - 1, - }); - const ws = fs.createWriteStream(tmpdir + name); - rs.pipe(ws); - ws.on('finish', (err, res) => { - if (err) { - reject(err); - } else { - resolve(res); - } - }); - }); - - return tmpdir + name; -}; - -/* - * cb = { - * url: 'd.rockuw.com:4567', - * query: {user: 'me'}, - * contentType: 'application/json', - * body: '{"hello": "world"}' - * }; - */ -exports.encodeCallback = function(cb) { - const url = urlutil.parse(cb.url); - url.query = cb.query; - - const json = { - callbackUrl: url.format(), - callbackBody: cb.body, - callbackBodyType: cb.contentType || 'application/x-www-form-urlencoded', - }; - - return Buffer.from(JSON.stringify(json)).toString('base64'); -}; - -// 如果配置属性值是数组 则判断配置的数组是不是数据的子数组。 -// 如果配置属性值是对象 则判断数据包含的属性值包不包含配置项属性值。 -// 如果配置属性值是简单数据类型 则判断数据的有配置的属性且值相等 -exports.includesConf = function includesConf(data, conf) { - if (conf === null || typeof conf !== 'object') { - return data === conf; - } - - let valid = true; - if (Array.isArray(conf)) { - if (!Array.isArray(data)) return false; - for (let i = 0; i < conf.length; i++) { - let itemValid = false; - for (let j = 0; j < data.length; j++) { - if (includesConf(data[j], conf[i])) { - itemValid = true; - break; - } - } - if (!itemValid) return false; - } - return valid; - } - - const keys = Object.keys(conf); - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - if (!isObject(conf[key]) && !Array.isArray(conf[key])) { - if (conf[key] !== data[key]) { - valid = false; - break; - } - } else if (isObject(conf[key]) || Array.isArray(conf[key])) { - if (!includesConf(data[key], conf[key])) { - valid = false; - break; - } - } else if (conf[key] !== data[key]) { - valid = false; - break; - } - } - return valid; -}; diff --git a/test/utils/checkConfigValid.test.js b/test/utils/checkConfigValid.test.js deleted file mode 100644 index 6db743bd8..000000000 --- a/test/utils/checkConfigValid.test.js +++ /dev/null @@ -1,55 +0,0 @@ -const assert = require('assert'); -const { checkConfigValid } = require('../../lib/common/utils/checkConfigValid'); - -describe('test/utils/checkConfigValid.test.js', () => { - describe('endpoint', () => { - it('should success when endpoint is valid', () => { - try { - const endpoint = 'testa_-.com'; - checkConfigValid(endpoint, 'endpoint'); - assert(true); - } catch (error) { - assert(false); - } - }); - it('should throw when endpoint includes invalid character', () => { - const errorStr = '中~!@#$%^&*()+={}[]|\\";\',<>?'; - errorStr.split('').map(_ => `test-a_b.${_}.com`).forEach( - str => { - try { - checkConfigValid(str, 'endpoint'); - assert(false); - } catch (error) { - assert(error.message.includes('endpoint')); - } - } - ); - }); - }); - - describe('region', () => { - it('should success when region is valid', () => { - try { - const region = 'oss-cn-hangzhou'; - checkConfigValid(region, 'region'); - assert(true); - } catch (error) { - assert(false); - } - }); - it('should throw when region includes invalid character', () => { - const errorStr = '中~!@#$%^&*()+={}[]|\\";\',<>?'; - errorStr.split('').map(_ => `oss-${_}hangzhou`).forEach( - str => { - try { - checkConfigValid(str, 'region'); - assert(false); - } catch (error) { - assert(error.message.includes('region')); - } - } - ); - }); - }); - -}); diff --git a/test/utils/deepCopy.test.js b/test/utils/deepCopy.test.js deleted file mode 100644 index b73863c17..000000000 --- a/test/utils/deepCopy.test.js +++ /dev/null @@ -1,47 +0,0 @@ -const assert = require('assert'); -const { deepCopy, deepCopyWith } = require('../../lib/common/utils/deepCopy'); - -describe('utils/deepCopy()', () => { - it('should copy big Buffers correctly', () => { - // 2^30 - 1 ~ 1GB is max size on 32-bit computer - // See https://nodejs.org/api/buffer.html#buffer_buffer_constants_max_length - const numberBytes = Math.pow(2, 30) - 1; - const obj = { - buffer: Buffer.alloc(numberBytes), - }; - const copy = deepCopy(obj); - assert.strictEqual(Object.keys(obj).length, Object.keys(copy).length); - assert(obj.buffer.equals(copy.buffer)); - }); - - it('should skip some properties when use deepCopyWith', () => { - const numberBytes = Math.pow(2, 30) - 1; - const obj = { - a: 1, - b: { - c: 2, - }, - buffer: Buffer.alloc(numberBytes), - }; - const copy1 = deepCopyWith(obj, (_, key) => { - if (key === 'buffer') return null; - }); - assert.deepStrictEqual(copy1, { - a: 1, - b: { - c: 2, - }, - buffer: null, - }); - - const copy2 = deepCopyWith(obj); - assert.deepStrictEqual(obj.a, copy2.a); - assert.deepStrictEqual(obj.b, copy2.b); - assert(obj.buffer.equals(copy2.buffer)); - - const copy3 = deepCopyWith(obj, () => {}); - assert.deepStrictEqual(obj.a, copy3.a); - assert.deepStrictEqual(obj.b, copy3.b); - assert(obj.buffer.equals(copy3.buffer)); - }); -}); diff --git a/test/utils/omit.test.js b/test/utils/omit.test.js deleted file mode 100644 index 9ea7d1694..000000000 --- a/test/utils/omit.test.js +++ /dev/null @@ -1,28 +0,0 @@ -const assert = require('assert'); -const { omit } = require('../../lib/common/utils/omit'); - -describe('omit test case', () => { - const originObject = { - name: 'man', - age: '38', - sex: 'male', - children: { - name: 'child', - age: '18', - }, - }; - - it('should return new object', () => { - const newObject = omit(originObject, []); - assert(newObject !== originObject); - }); - it('should remove properties', () => { - const newObject = omit(originObject, [ 'age' ]); - assert.equal(newObject.age, undefined); - }); - it('should not remove children node name', () => { - const newObject = omit(originObject, [ 'name' ]); - assert.equal(newObject.name, undefined); - assert.equal(newObject.children.name, 'child'); - }); -}); diff --git a/test/utils/retry.test.js b/test/utils/retry.test.js deleted file mode 100644 index 7e42102e8..000000000 --- a/test/utils/retry.test.js +++ /dev/null @@ -1,126 +0,0 @@ -const assert = require('assert'); -const { md5 } = require('utility'); -const mm = require('mm'); -const fs = require('fs'); -const OSS = require('../..'); -const config = require('../config').oss; -const utils = require('../utils'); - -describe('test/retry.test.js', () => { - let store; - const RETRY_MAX = 3; - let testRetryCount = 0; - const bucket = config.bucket; - before(async () => { - store = new OSS({ - ...config, - retryMax: RETRY_MAX, - }); - store.useBucket(bucket); - }); - beforeEach(() => { - testRetryCount = 0; - const originRequest = store.urllib.request; - mm(store.urllib, 'request', async (url, params) => { - if (testRetryCount < RETRY_MAX) { - testRetryCount++; - const e = new Error('net error'); - e.status = -1; - e.headers = {}; - throw e; - } else { - return await originRequest(url, params); - } - }); - }); - afterEach(() => { - mm.restore(); - }); - - it.skip('set retryMax to test request auto retry when networkError or timeout', async () => { - const res = await store.listBuckets(); - assert.strictEqual(res.res.status, 200); - assert.strictEqual(testRetryCount, RETRY_MAX); - }); - - it('should throw when retry count bigger than options retryMax', async () => { - mm.error(store.urllib, 'request', { - status: -1, // timeout - headers: {}, - }); - try { - await store.listBuckets(); - assert(false, 'should throw error'); - } catch (error) { - assert(error.status === -1); - } - }); - - it('should not retry when err.status is not -1 or -2', async () => { - mm.error(store.urllib, 'request', { - status: -3, - headers: {}, - }); - try { - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1 * 1024); - await store.put(name, fileName); - assert(false, 'should throw error'); - } catch (error) { - assert.strictEqual(error.status, -3); - } - }); - - it('should succeed when put with filename', async () => { - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1 * 1024); - const res = await store.put(name, fileName); - assert.strictEqual(res.res.status, 200); - assert.strictEqual(testRetryCount, RETRY_MAX); - const onlineFile = await store.get(name); - assert.strictEqual(md5(fs.readFileSync(fileName)), md5(onlineFile.content)); - }); - - it('should succeed when multipartUpload with filename', async () => { - mm.restore(); - const originRequest = store.urllib.request; - const UPLOAD_PART_SEQ = 1; - let CurrentRequsetTimer = 0; - mm(store.urllib, 'request', async (url, params) => { - // skip mock when initMultipartUpload - if (CurrentRequsetTimer < UPLOAD_PART_SEQ) { - CurrentRequsetTimer++; - return originRequest(url, params); - } - // mock net error when upload part - if (testRetryCount < RETRY_MAX) { - testRetryCount++; - const e = new Error('net error'); - e.status = -1; - e.headers = {}; - throw e; - } else { - return originRequest(url, params); - } - }); - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1.5 * 1024 * 1024); - const res = await store.multipartUpload(name, fileName); - assert.strictEqual(res.res.status, 200); - assert.strictEqual(testRetryCount, RETRY_MAX); - const onlineFile = await store.get(name); - assert.strictEqual(onlineFile.content.length, 1.5 * 1024 * 1024); - assert.strictEqual(md5(fs.readFileSync(fileName)), md5(onlineFile.content)); - }); - - it('should fail when put with stream', async () => { - const name = `oss-client-test-retry-file-${Date.now()}`; - const fileName = await utils.createTempFile(name, 1 * 1024); - try { - await store.put(name, fs.createReadStream(fileName)); - assert(false, 'should not reach here'); - } catch (e) { - assert.strictEqual(e.status, -1); - } - }); -});