From 8be0a484835de12777409c8be0aa252060c94271 Mon Sep 17 00:00:00 2001 From: Prakash Senthil Vel <23444145+prakashsvmx@users.noreply.github.com> Date: Mon, 20 May 2024 08:38:20 +0530 Subject: [PATCH] migrate copyObject api to ts (#1289) --- README.md | 2 +- docs/API.md | 35 +- examples/compose-object-test-example.js | 136 ------- examples/compose-object-test-example.mjs | 126 ++++++ .../{compose-object.js => compose-object.mjs} | 11 +- examples/{copy-object.js => copy-object.mjs} | 16 +- src/internal/client.ts | 340 +++++++++++++++- src/internal/type.ts | 30 ++ src/internal/xml-parser.ts | 41 +- src/minio.d.ts | 31 +- src/minio.js | 367 +----------------- src/transformers.js | 5 - src/xml-parsers.js | 27 -- tests/unit/test.js | 28 +- 14 files changed, 567 insertions(+), 628 deletions(-) delete mode 100644 examples/compose-object-test-example.js create mode 100644 examples/compose-object-test-example.mjs rename examples/{compose-object.js => compose-object.mjs} (88%) rename examples/{copy-object.js => copy-object.mjs} (72%) diff --git a/README.md b/README.md index 9cde67a9..87b0c639 100644 --- a/README.md +++ b/README.md @@ -237,7 +237,7 @@ The complete API Reference is available here: - [remove-object-tagging.mjs](https://github.com/minio/minio-js/blob/master/examples/remove-object-tagging.js) - [set-object-legal-hold.mjs](https://github.com/minio/minio-js/blob/master/examples/set-object-legalhold.mjs) - [get-object-legal-hold.mjs](https://github.com/minio/minio-js/blob/master/examples/get-object-legal-hold.mjs) -- [compose-object.js](https://github.com/minio/minio-js/blob/master/examples/compose-object.js) +- [compose-object.mjs](https://github.com/minio/minio-js/blob/master/examples/compose-object.js) - [select-object-content.mjs](https://github.com/minio/minio-js/blob/master/examples/select-object-content.mjs) #### Presigned Operations diff --git a/docs/API.md b/docs/API.md index 48787929..d4e96de9 100644 --- a/docs/API.md +++ b/docs/API.md @@ -1089,32 +1089,25 @@ minioClient.fPutObject('mybucket', '40mbfile', file, metaData, function (err, ob -### copyObject(bucketName, objectName, sourceObject, conditions[, callback]) +### copyObject(targetBucketName, targetObjectName, sourceBucketNameAndObjectName [,conditions]) Copy a source object into a new object in the specified bucket. **Parameters** -| Param | Type | Description | -| ------------------------------------- | ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `bucketName` | _string_ | Name of the bucket. | -| `objectName` | _string_ | Name of the object. | -| `sourceObject` | _string_ | Path of the file to be copied. | -| `conditions` | _CopyConditions_ | Conditions to be satisfied before allowing object copy. | -| `callback(err, {etag, lastModified})` | _function_ | Non-null `err` indicates error, `etag` _string_ and lastModified _Date_ are the etag and the last modified date of the object newly copied. If no callback is passed, a `Promise` is returned. | +| Param | Type | Description | +| ------------------------------- | ---------------- | ------------------------------------------------------- | +| `targetBucketName` | _string_ | Name of the bucket. | +| `targetObjectName` | _string_ | Name of the object. | +| `sourceBucketNameAndObjectName` | _string_ | Path of the file to be copied. | +| `conditions` | _CopyConditions_ | Conditions to be satisfied before allowing object copy. | **Example** ```js const conds = new Minio.CopyConditions() conds.setMatchETag('bd891862ea3e22c93ed53a098218791d') -minioClient.copyObject('mybucket', 'newobject', '/mybucket/srcobject', conds, function (e, data) { - if (e) { - return console.log(e) - } - console.log('Successfully copied the object:') - console.log('etag = ' + data.etag + ', lastModified = ' + data.lastModified) -}) +await minioClient.copyObject('mybucket', 'newobject', '/mybucket/srcobject', conds) ``` @@ -1490,7 +1483,7 @@ const legalholdStatus = await minioClient.setObjectLegalHold('bucketName', 'obje -### composeObject(destObjConfig, sourceObjectList [, callback]) +### composeObject(destObjConfig, sourceObjectList) Compose an object from parts @@ -1500,7 +1493,6 @@ Compose an object from parts | ------------------ | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | `destObjConfig` | _object_ | Destination Object configuration of the type [CopyDestinationOptions](https://github.com/minio/minio-js/blob/master/src/helpers.js) | | `sourceObjectList` | _object[]_ | Array of object(parts) source to compose into an object. Each part configuration should be of type [CopySourceOptions](https://github.com/minio/minio-js/blob/master/src/helpers.js) | -| `callback(err)` | _function_ | Callback function is called with non `null` value in case of error. If no callback is passed, a `Promise` is returned. | **Example 1** @@ -1534,14 +1526,7 @@ const destOption = new minio.CopyDestinationOptions({ }) //using Promise style. -const composePromise = minioClient.composeObject(destOption, sourceList) -composePromise - .then((result) => { - console.log('Success...') - }) - .catch((e) => { - console.log('error', e) - }) +await minioClient.composeObject(destOption, sourceList) ``` diff --git a/examples/compose-object-test-example.js b/examples/compose-object-test-example.js deleted file mode 100644 index 8beb0324..00000000 --- a/examples/compose-object-test-example.js +++ /dev/null @@ -1,136 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2021 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY, my-bucketname and my-objectname -// are dummy values, please replace them with original values. -import fs from 'node:fs' -import os from 'node:os' - -import * as Minio from 'minio' -import splitFile from 'split-file' - -const s3Client = new Minio.Client({ - endPoint: 's3.amazonaws.com', - accessKey: 'YOUR-ACCESSKEYID', - secretKey: 'YOUR-SECRETACCESSKEY', -}) - -const oneMB = 1024 * 1024 - -// Create a bucket prior to running: mc mb local/source-bucket -function sampleRunComposeObject() { - const tmpDir = os.tmpdir() - - const bucketName = 'source-bucket' - // generate 100 MB buffer and write to a file. - const local100mbFileToBeSplitAndComposed = Buffer.alloc(100 * oneMB, 0) - - const composedObjName = '_100-mb-file-to-test-compose' - const tmpSubDir = `${tmpDir}/compose` - const fileToSplit = `${tmpSubDir}/${composedObjName}` - const partObjNameList = [] - - fs.mkdir(tmpSubDir, { recursive: true }, function (err) { - if (err) { - console.log(err) - } else { - console.log('New Temp directory successfully created.') - } - }) - - try { - fs.writeFileSync(fileToSplit, local100mbFileToBeSplitAndComposed) - console.log('Written 100 MB File ') - // 100 MB split into 26 MB part size. ( just to test unequal parts ). But change as required. - - splitFile - .splitFileBySize(fileToSplit, 26 * oneMB) - .then((names) => { - console.log('Split and write 100 MB File(s) ', names) - const putPartRequests = names.map((partFileName) => { - const partObjName = partFileName.slice((tmpSubDir + '/').length) - partObjNameList.push(partObjName) - return s3Client.fPutObject(bucketName, partObjName, partFileName, {}) - }) - - Promise.all(putPartRequests) - .then(() => { - console.log('Uploaded part Files: ', names) - const sourcePartObjList = partObjNameList.map((partObjName) => { - return new Minio.CopySourceOptions({ - Bucket: bucketName, - Object: partObjName, - }) - }) - - const destObjConfig = new Minio.CopyDestinationOptions({ - Bucket: bucketName, - Object: composedObjName, - }) - - s3Client - .composeObject(destObjConfig, sourcePartObjList) - .then(() => { - console.log('Composed to a single file: ', composedObjName) - - /** Begin Clean up ***/ - // To verify that the parts are uploaded properly, comment the below code blocks and verify - const sourcePartObjList = partObjNameList.map((partObjName) => { - return s3Client.removeObject(bucketName, partObjName) - }) - - Promise.all(sourcePartObjList) - .then(() => { - console.log('Removed source parts: ') - - // Uncomment to remove the composed object itself. commented for verification. - /* - s3Client.removeObject(bucketName, composedObjName).then(()=>{ - console.log("Clean up: Removed the composed Object ") - }).catch(()=>{ - console.log("Error removing composed object", er) - }) - */ - }) - .catch((er) => { - console.log('Error removing parts used in composing', er) - }) - - /** End Clean up **/ - - // Clean up generated parts locally - fs.rmSync(tmpSubDir, { recursive: true, force: true }) - console.log('Clean up temp parts directory : ') - }) - .catch((e) => { - console.log('Error Composing parts into an object', e) - }) - }) - .catch((e) => { - console.log('Error Uploading parts ', e) - }) - }) - .catch((e) => { - // this is a client error not related to compose object - console.log('Error Splitting files into parts ', e) - }) - } catch (err) { - // this is a client error not related to compose object - console.log('Error Creating local files ', err) - } -} - -sampleRunComposeObject() diff --git a/examples/compose-object-test-example.mjs b/examples/compose-object-test-example.mjs new file mode 100644 index 00000000..f7762f42 --- /dev/null +++ b/examples/compose-object-test-example.mjs @@ -0,0 +1,126 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2021 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY, my-bucketname and my-objectname +// are dummy values, please replace them with original values. +import fs from 'node:fs' +import os from 'node:os' + +import * as Minio from 'minio' +import splitFile from 'split-file' + +const s3Client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'minio', + secretKey: 'minio123', + useSSL: false, + port: 22000, + //partSize: 5 * 1024 * 1024 +}) + +const oneMB = 1024 * 1024 + +// Create a bucket prior to running: mc mb local/source-bucket +const sampleRunComposeObject = async () => { + const tmpDir = os.tmpdir() + + const bucketName = 'source-bucket' + // generate 100 MB buffer and write to a file. + const local100mbFileToBeSplitAndComposed = Buffer.alloc(100 * oneMB, 0) + + const composedObjName = '_100-mb-file-to-test-compose' + const tmpSubDir = `${tmpDir}/compose` + const fileToSplit = `${tmpSubDir}/${composedObjName}` + const partObjNameList = [] + + fs.mkdir(tmpSubDir, { recursive: true }, function (err) { + if (err) { + console.log(err) + } else { + console.log('New Temp directory successfully created.') + } + }) + + try { + fs.writeFileSync(fileToSplit, local100mbFileToBeSplitAndComposed) + console.log('Written 100 MB File ') + // 100 MB split into 26 MB part size. ( just to test unequal parts ). But change as required. + + const names = await splitFile.splitFileBySize(fileToSplit, 26 * oneMB) + + console.log('Split and write 100 MB File(s) ', names) + const putPartRequests = names.map((partFileName) => { + const partObjName = partFileName.slice((tmpSubDir + '/').length) + partObjNameList.push(partObjName) + return s3Client.fPutObject(bucketName, partObjName, partFileName, {}) + }) + await Promise.all(putPartRequests) + + console.log('Uploaded part Files: ', names) + const sourcePartObjList = partObjNameList.map((partObjName) => { + return new Minio.CopySourceOptions({ + Bucket: bucketName, + Object: partObjName, + }) + }) + + const destObjConfig = new Minio.CopyDestinationOptions({ + Bucket: bucketName, + Object: composedObjName, + }) + + try { + const result = await s3Client.composeObject(destObjConfig, sourcePartObjList) + console.log(result) + console.log('Composed to a single file: ', composedObjName) + } catch (err) { + console.log('Composed to a single file: ', composedObjName) + + /** Begin Clean up ***/ + // To verify that the parts are uploaded properly, comment the below code blocks and verify + const sourcePartObjList = partObjNameList.map((partObjName) => { + return s3Client.removeObject(bucketName, partObjName) + }) + + Promise.all(sourcePartObjList) + .then(() => { + console.log('Removed source parts: ') + + // Uncomment to remove the composed object itself. commented for verification. + /* + s3Client.removeObject(bucketName, composedObjName).then(()=>{ + console.log("Clean up: Removed the composed Object ") + }).catch(()=>{ + console.log("Error removing composed object", er) + }) + */ + }) + .catch((er) => { + console.log('Error removing parts used in composing', er) + }) + + /** End Clean up **/ + + // Clean up generated parts locally + fs.rmSync(tmpSubDir, { recursive: true, force: true }) + console.log('Clean up temp parts directory : ') + } + } catch (e) { + console.log('Error Creating local files ', e) + } +} + +sampleRunComposeObject() diff --git a/examples/compose-object.js b/examples/compose-object.mjs similarity index 88% rename from examples/compose-object.js rename to examples/compose-object.mjs index b8b08d6f..c50c8cbb 100644 --- a/examples/compose-object.js +++ b/examples/compose-object.mjs @@ -51,7 +51,7 @@ const sourceList = [ const destOption = new Minio.CopyDestinationOptions({ Bucket: bucketName, - Object: '100MB.zip', + Object: 'object-name', /** Other possible options */ /* Encryption:{ type:Helpers.ENCRYPTION_TYPES.KMS, @@ -67,11 +67,4 @@ const destOption = new Minio.CopyDestinationOptions({ */ }) -const composePromise = s3Client.composeObject(destOption, sourceList) -composePromise - .then((result) => { - console.log('ComposeObject Success...', result) - }) - .catch((e) => { - console.log('composeObject Promise Error', e) - }) +await s3Client.composeObject(destOption, sourceList) diff --git a/examples/copy-object.js b/examples/copy-object.mjs similarity index 72% rename from examples/copy-object.js rename to examples/copy-object.mjs index 9e0107cc..dd75ae99 100644 --- a/examples/copy-object.js +++ b/examples/copy-object.mjs @@ -14,7 +14,7 @@ * limitations under the License. */ -// Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY, my-bucketname, my-objectname, +// Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY, my-target-bucketname, my-target-objectname, // my-src-bucketname and my-src-objectname are dummy values, please replace // them with original values. @@ -29,16 +29,4 @@ const s3Client = new Minio.Client({ const conds = new Minio.CopyConditions() conds.setMatchETag('bd891862ea3e22c93ed53a098218791d') -s3Client.copyObject( - 'my-bucketname', - 'my-objectname', - '/my-src-bucketname/my-src-objectname', - conds, - function (e, data) { - if (e) { - return console.log(e) - } - console.log('Successfully copied the object:') - console.log('etag = ' + data.etag + ', lastModified = ' + data.lastModified) - }, -) +await s3Client.copyObject('my-target-bucketname', 'my-target-objectname', '/my-src-bucketname/my-src-objectname', conds) diff --git a/src/internal/client.ts b/src/internal/client.ts index e81a0a53..d18ba913 100644 --- a/src/internal/client.ts +++ b/src/internal/client.ts @@ -1,5 +1,6 @@ import * as crypto from 'node:crypto' import * as fs from 'node:fs' +import type { IncomingHttpHeaders } from 'node:http' import * as http from 'node:http' import * as https from 'node:https' import * as path from 'node:path' @@ -15,13 +16,23 @@ import xml2js from 'xml2js' import { CredentialProvider } from '../CredentialProvider.ts' import * as errors from '../errors.ts' import type { SelectResults } from '../helpers.ts' -import { DEFAULT_REGION, LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from '../helpers.ts' +import { + CopyDestinationOptions, + CopySourceOptions, + DEFAULT_REGION, + LEGAL_HOLD_STATUS, + RETENTION_MODES, + RETENTION_VALIDITY_UNITS, +} from '../helpers.ts' import { signV4 } from '../signing.ts' import { fsp, streamPromise } from './async.ts' +import { CopyConditions } from './copy-conditions.ts' import { Extensions } from './extensions.ts' import { + calculateEvenSplits, extractMetadata, getContentLength, + getSourceVersionId, getVersionId, hashBinary, insertContentType, @@ -40,6 +51,8 @@ import { isValidPrefix, isVirtualHostStyle, makeDateLong, + PART_CONSTRAINTS, + partsRequired, prependXAMZMeta, readableStream, sanitizeETag, @@ -59,6 +72,9 @@ import type { BucketItemStat, BucketStream, BucketVersioningConfiguration, + CopyObjectParams, + CopyObjectResult, + CopyObjectResultV2, EncryptionConfig, GetObjectLegalHoldOptions, GetObjectRetentionOpts, @@ -90,16 +106,18 @@ import type { Tags, Transport, UploadedObjectInfo, + UploadPartConfig, VersionIdentificator, } from './type.ts' import type { ListMultipartResult, UploadedPart } from './xml-parser.ts' -import * as xmlParsers from './xml-parser.ts' import { parseCompleteMultipart, parseInitiateMultipart, parseObjectLegalHoldConfig, parseSelectObjectContentResponse, + uploadPartParser, } from './xml-parser.ts' +import * as xmlParsers from './xml-parser.ts' const xml = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) @@ -2334,6 +2352,7 @@ export class TypedClient { const body = await readAsString(res) return xmlParsers.parseLifecycleConfig(body) } + async setBucketEncryption(bucketName: string, encryptionConfig?: EncryptionConfig): Promise { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) @@ -2465,4 +2484,321 @@ export class TypedClient { const query = `uploadId=${removeUploadId}` await this.makeRequestAsyncOmit({ method, bucketName, objectName, query }, '', [204]) } + + private async copyObjectV1( + targetBucketName: string, + targetObjectName: string, + sourceBucketNameAndObjectName: string, + conditions?: null | CopyConditions, + ) { + if (typeof conditions == 'function') { + conditions = null + } + + if (!isValidBucketName(targetBucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + targetBucketName) + } + if (!isValidObjectName(targetObjectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${targetObjectName}`) + } + if (!isString(sourceBucketNameAndObjectName)) { + throw new TypeError('sourceBucketNameAndObjectName should be of type "string"') + } + if (sourceBucketNameAndObjectName === '') { + throw new errors.InvalidPrefixError(`Empty source prefix`) + } + + if (conditions != null && !(conditions instanceof CopyConditions)) { + throw new TypeError('conditions should be of type "CopyConditions"') + } + + const headers: RequestHeaders = {} + headers['x-amz-copy-source'] = uriResourceEscape(sourceBucketNameAndObjectName) + + if (conditions) { + if (conditions.modified !== '') { + headers['x-amz-copy-source-if-modified-since'] = conditions.modified + } + if (conditions.unmodified !== '') { + headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified + } + if (conditions.matchETag !== '') { + headers['x-amz-copy-source-if-match'] = conditions.matchETag + } + if (conditions.matchETagExcept !== '') { + headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept + } + } + + const method = 'PUT' + + const res = await this.makeRequestAsync({ + method, + bucketName: targetBucketName, + objectName: targetObjectName, + headers, + }) + const body = await readAsString(res) + return xmlParsers.parseCopyObject(body) + } + + private async copyObjectV2( + sourceConfig: CopySourceOptions, + destConfig: CopyDestinationOptions, + ): Promise { + if (!(sourceConfig instanceof CopySourceOptions)) { + throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') + } + if (!(destConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + if (!destConfig.validate()) { + return Promise.reject() + } + if (!destConfig.validate()) { + return Promise.reject() + } + + const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) + + const bucketName = destConfig.Bucket + const objectName = destConfig.Object + + const method = 'PUT' + + const res = await this.makeRequestAsync({ method, bucketName, objectName, headers }) + const body = await readAsString(res) + const copyRes = xmlParsers.parseCopyObject(body) + const resHeaders: IncomingHttpHeaders = res.headers + + const sizeHeaderValue = resHeaders && resHeaders['content-length'] + const size = typeof sizeHeaderValue === 'number' ? sizeHeaderValue : undefined + + return { + Bucket: destConfig.Bucket, + Key: destConfig.Object, + LastModified: copyRes.lastModified, + MetaData: extractMetadata(resHeaders as ResponseHeader), + VersionId: getVersionId(resHeaders as ResponseHeader), + SourceVersionId: getSourceVersionId(resHeaders as ResponseHeader), + Etag: sanitizeETag(resHeaders.etag), + Size: size, + } + } + + async copyObject(source: CopySourceOptions, dest: CopyDestinationOptions): Promise + async copyObject( + targetBucketName: string, + targetObjectName: string, + sourceBucketNameAndObjectName: string, + conditions?: CopyConditions, + ): Promise + async copyObject(...allArgs: CopyObjectParams): Promise { + if (typeof allArgs[0] === 'string') { + const [targetBucketName, targetObjectName, sourceBucketNameAndObjectName, conditions] = allArgs as [ + string, + string, + string, + CopyConditions?, + ] + return await this.copyObjectV1(targetBucketName, targetObjectName, sourceBucketNameAndObjectName, conditions) + } + const [source, dest] = allArgs as [CopySourceOptions, CopyDestinationOptions] + return await this.copyObjectV2(source, dest) + } + + async uploadPart(partConfig: { + bucketName: string + objectName: string + uploadID: string + partNumber: number + headers: RequestHeaders + }) { + const { bucketName, objectName, uploadID, partNumber, headers } = partConfig + + const method = 'PUT' + const query = `uploadId=${uploadID}&partNumber=${partNumber}` + const requestOptions = { method, bucketName, objectName: objectName, query, headers } + + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsString(res) + const partRes = uploadPartParser(body) + + return { + etag: sanitizeETag(partRes.ETag), + key: objectName, + part: partNumber, + } + } + + async composeObject( + destObjConfig: CopyDestinationOptions, + sourceObjList: CopySourceOptions[], + ): Promise | CopyObjectResult> { + const sourceFilesLength = sourceObjList.length + + if (!Array.isArray(sourceObjList)) { + throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') + } + if (!(destObjConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + + if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, + ) + } + + for (let i = 0; i < sourceFilesLength; i++) { + const sObj = sourceObjList[i] as CopySourceOptions + if (!sObj.validate()) { + return false + } + } + + if (!(destObjConfig as CopyDestinationOptions).validate()) { + return false + } + + const getStatOptions = (srcConfig: CopySourceOptions) => { + let statOpts = {} + if (!_.isEmpty(srcConfig.VersionID)) { + statOpts = { + versionId: srcConfig.VersionID, + } + } + return statOpts + } + const srcObjectSizes: number[] = [] + let totalSize = 0 + let totalParts = 0 + + const sourceObjStats = sourceObjList.map((srcItem) => + this.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), + ) + + const srcObjectInfos = await Promise.all(sourceObjStats) + + const validatedStats = srcObjectInfos.map((resItemStat, index) => { + const srcConfig: CopySourceOptions | undefined = sourceObjList[index] + + let srcCopySize = resItemStat.size + // Check if a segment is specified, and if so, is the + // segment within object bounds? + if (srcConfig && srcConfig.MatchRange) { + // Since range is specified, + // 0 <= src.srcStart <= src.srcEnd + // so only invalid case to check is: + const srcStart = srcConfig.Start + const srcEnd = srcConfig.End + if (srcEnd >= srcCopySize || srcStart < 0) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, + ) + } + srcCopySize = srcEnd - srcStart + 1 + } + + // Only the last source may be less than `absMinPartSize` + if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, + ) + } + + // Is data to copy too large? + totalSize += srcCopySize + if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { + throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) + } + + // record source size + srcObjectSizes[index] = srcCopySize + + // calculate parts needed for current source + totalParts += partsRequired(srcCopySize) + // Do we need more parts than we are allowed? + if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, + ) + } + + return resItemStat + }) + + if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { + return await this.copyObject(sourceObjList[0] as CopySourceOptions, destObjConfig) // use copyObjectV2 + } + + // preserve etag to avoid modification of object while copying. + for (let i = 0; i < sourceFilesLength; i++) { + ;(sourceObjList[i] as CopySourceOptions).MatchETag = (validatedStats[i] as BucketItemStat).etag + } + + const splitPartSizeList = validatedStats.map((resItemStat, idx) => { + return calculateEvenSplits(srcObjectSizes[idx] as number, sourceObjList[idx] as CopySourceOptions) + }) + + const getUploadPartConfigList = (uploadId: string) => { + const uploadPartConfigList: UploadPartConfig[] = [] + + splitPartSizeList.forEach((splitSize, splitIndex: number) => { + if (splitSize) { + const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize + + const partIndex = splitIndex + 1 // part index starts from 1. + const totalUploads = Array.from(startIdx) + + const headers = (sourceObjList[splitIndex] as CopySourceOptions).getHeaders() + + totalUploads.forEach((splitStart, upldCtrIdx) => { + const splitEnd = endIdx[upldCtrIdx] + + const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` + headers['x-amz-copy-source'] = `${sourceObj}` + headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` + + const uploadPartConfig = { + bucketName: destObjConfig.Bucket, + objectName: destObjConfig.Object, + uploadID: uploadId, + partNumber: partIndex, + headers: headers, + sourceObj: sourceObj, + } + + uploadPartConfigList.push(uploadPartConfig) + }) + } + }) + + return uploadPartConfigList + } + + const uploadAllParts = async (uploadList: UploadPartConfig[]) => { + const partUploads = uploadList.map(async (item) => { + return this.uploadPart(item) + }) + // Process results here if needed + return await Promise.all(partUploads) + } + + const performUploadParts = async (uploadId: string) => { + const uploadList = getUploadPartConfigList(uploadId) + const partsRes = await uploadAllParts(uploadList) + return partsRes.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) + } + + const newUploadHeaders = destObjConfig.getHeaders() + + const uploadId = await this.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders) + try { + const partsDone = await performUploadParts(uploadId) + return await this.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone) + } catch (err) { + return await this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId) + } + } } diff --git a/src/internal/type.ts b/src/internal/type.ts index 6bfbe005..0009e44d 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -1,6 +1,9 @@ import type * as http from 'node:http' import type { Readable as ReadableStream } from 'node:stream' +import type { CopyDestinationOptions, CopySourceOptions } from '../helpers.ts' +import type { CopyConditions } from './copy-conditions.ts' + export type VersionIdentificator = { versionId?: string } @@ -416,3 +419,30 @@ export type RemoveObjectsResponse = VersionId?: string } } + +export type CopyObjectResultV1 = { + etag: string + lastModified: string | Date +} +export type CopyObjectResultV2 = { + Bucket?: string + Key?: string + LastModified: string | Date + MetaData?: ResponseHeader + VersionId?: string | null + SourceVersionId?: string | null + Etag?: string + Size?: number +} + +export type CopyObjectResult = CopyObjectResultV1 | CopyObjectResultV2 +export type CopyObjectParams = [CopySourceOptions, CopyDestinationOptions] | [string, string, string, CopyConditions?] + +export type UploadPartConfig = { + bucketName: string + objectName: string + uploadID: string + partNumber: number + headers: RequestHeaders + sourceObj: string +} diff --git a/src/internal/xml-parser.ts b/src/internal/xml-parser.ts index ed71f5a0..95da2e89 100644 --- a/src/internal/xml-parser.ts +++ b/src/internal/xml-parser.ts @@ -8,7 +8,13 @@ import * as errors from '../errors.ts' import { SelectResults } from '../helpers.ts' import { isObject, parseXml, readableStream, sanitizeETag, sanitizeObjectKey, toArray } from './helper.ts' import { readAsString } from './response.ts' -import type { BucketItemFromList, BucketItemWithMetadata, ObjectLockInfo, ReplicationConfig } from './type.ts' +import type { + BucketItemFromList, + BucketItemWithMetadata, + CopyObjectResultV1, + ObjectLockInfo, + ReplicationConfig, +} from './type.ts' import { RETENTION_VALIDITY_UNITS } from './type.ts' // parse XML response for bucket region @@ -566,3 +572,36 @@ export function removeObjectsParser(xml: string) { } return [] } + +// parse XML response for copy object +export function parseCopyObject(xml: string): CopyObjectResultV1 { + const result: CopyObjectResultV1 = { + etag: '', + lastModified: '', + } + + let xmlobj = parseXml(xml) + if (!xmlobj.CopyObjectResult) { + throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') + } + xmlobj = xmlobj.CopyObjectResult + if (xmlobj.ETag) { + result.etag = xmlobj.ETag.replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + } + if (xmlobj.LastModified) { + result.lastModified = new Date(xmlobj.LastModified) + } + + return result +} + +export function uploadPartParser(xml: string) { + const xmlObj = parseXml(xml) + const respEl = xmlObj.CopyPartResult + return respEl +} diff --git a/src/minio.d.ts b/src/minio.d.ts index 4599a391..67cf6632 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -1,13 +1,7 @@ // imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts /* eslint-disable @typescript-eslint/no-explicit-any */ -import type { - CopyDestinationOptions, - CopySourceOptions, - LEGAL_HOLD_STATUS, - RETENTION_MODES, - RETENTION_VALIDITY_UNITS, -} from './helpers.ts' +import type { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './helpers.ts' import type { ClientOptions, NoResultCallback, RemoveOptions } from './internal/client.ts' import { TypedClient } from './internal/client.ts' import { CopyConditions } from './internal/copy-conditions.ts' @@ -147,29 +141,6 @@ export class Client extends TypedClient { listObjectsV2(bucketName: string, prefix?: string, recursive?: boolean, startAfter?: string): BucketStream - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - callback: ResultCallback, - ): void - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - ): Promise - - removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void - removeIncompleteUpload(bucketName: string, objectName: string): Promise - composeObject( - destObjConfig: CopyDestinationOptions, - sourceObjList: CopySourceOptions[], - callback: ResultCallback, - ): void - composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise - // Presigned operations presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void presignedUrl( diff --git a/src/minio.js b/src/minio.js index 53cb2473..f88528fd 100644 --- a/src/minio.js +++ b/src/minio.js @@ -16,22 +16,15 @@ import * as Stream from 'node:stream' -import async from 'async' -import _ from 'lodash' import * as querystring from 'query-string' import xml2js from 'xml2js' import * as errors from './errors.ts' -import { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' import { callbackify } from './internal/callbackify.js' import { TypedClient } from './internal/client.ts' import { CopyConditions } from './internal/copy-conditions.ts' import { - calculateEvenSplits, - extractMetadata, getScope, - getSourceVersionId, - getVersionId, isBoolean, isFunction, isNumber, @@ -42,12 +35,8 @@ import { isValidObjectName, isValidPrefix, makeDateLong, - PART_CONSTRAINTS, - partsRequired, pipesetup, - sanitizeETag, uriEscape, - uriResourceEscape, } from './internal/helper.ts' import { PostPolicy } from './internal/post-policy.ts' import { NotificationConfig, NotificationPoller } from './notification.ts' @@ -85,137 +74,6 @@ export class Client extends TypedClient { } this.userAgent = `${this.userAgent} ${appName}/${appVersion}` } - // Copy the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `srcObject` _string_: path of the source object to be copied - // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) - // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object - copyObjectV1(arg1, arg2, arg3, arg4, arg5) { - var bucketName = arg1 - var objectName = arg2 - var srcObject = arg3 - var conditions, cb - if (typeof arg4 == 'function' && arg5 === undefined) { - conditions = null - cb = arg4 - } else { - conditions = arg4 - cb = arg5 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(srcObject)) { - throw new TypeError('srcObject should be of type "string"') - } - if (srcObject === '') { - throw new errors.InvalidPrefixError(`Empty source prefix`) - } - - if (conditions !== null && !(conditions instanceof CopyConditions)) { - throw new TypeError('conditions should be of type "CopyConditions"') - } - - var headers = {} - headers['x-amz-copy-source'] = uriResourceEscape(srcObject) - - if (conditions !== null) { - if (conditions.modified !== '') { - headers['x-amz-copy-source-if-modified-since'] = conditions.modified - } - if (conditions.unmodified !== '') { - headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified - } - if (conditions.matchETag !== '') { - headers['x-amz-copy-source-if-match'] = conditions.matchETag - } - if (conditions.matchEtagExcept !== '') { - headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept - } - } - - var method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - /** - * Internal Method to perform copy of an object. - * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions - * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions - * @param cb __function__ called with null if there is an error - * @returns Promise if no callack is passed. - */ - copyObjectV2(sourceConfig, destConfig, cb) { - if (!(sourceConfig instanceof CopySourceOptions)) { - throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') - } - if (!(destConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - if (!destConfig.validate()) { - return false - } - if (!destConfig.validate()) { - return false - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) - - const bucketName = destConfig.Bucket - const objectName = destConfig.Object - - const method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - const transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => { - const resHeaders = response.headers - - const copyObjResponse = { - Bucket: destConfig.Bucket, - Key: destConfig.Object, - LastModified: data.LastModified, - MetaData: extractMetadata(resHeaders), - VersionId: getVersionId(resHeaders), - SourceVersionId: getSourceVersionId(resHeaders), - Etag: sanitizeETag(resHeaders.etag), - Size: +resHeaders['content-length'], - } - - return cb(null, copyObjResponse) - }) - }) - } - - // Backward compatibility for Copy Object API. - copyObject(...allArgs) { - if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { - return this.copyObjectV2(...arguments) - } - return this.copyObjectV1(...arguments) - } - // list a batch of objects listObjectsQuery(bucketName, prefix, marker, listQueryOpts = {}) { if (!isValidBucketName(bucketName)) { @@ -757,230 +615,8 @@ export class Client extends TypedClient { return listener } - - /** - * Internal method to upload a part during compose object. - * @param partConfig __object__ contains the following. - * bucketName __string__ - * objectName __string__ - * uploadID __string__ - * partNumber __number__ - * headers __object__ - * @param cb called with null incase of error. - */ - uploadPartCopy(partConfig, cb) { - const { bucketName, objectName, uploadID, partNumber, headers } = partConfig - - const method = 'PUT' - let query = `uploadId=${uploadID}&partNumber=${partNumber}` - const requestOptions = { method, bucketName, objectName: objectName, query, headers } - return this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - let partCopyResult = Buffer.from('') - if (e) { - return cb(e) - } - pipesetup(response, transformers.uploadPartTransformer()) - .on('data', (data) => { - partCopyResult = data - }) - .on('error', cb) - .on('end', () => { - let uploadPartCopyRes = { - etag: sanitizeETag(partCopyResult.ETag), - key: objectName, - part: partNumber, - } - - cb(null, uploadPartCopyRes) - }) - }) - } - - composeObject(destObjConfig = {}, sourceObjList = [], cb) { - const me = this // many async flows. so store the ref. - const sourceFilesLength = sourceObjList.length - - if (!Array.isArray(sourceObjList)) { - throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') - } - if (!(destObjConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - - if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, - ) - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - for (let i = 0; i < sourceFilesLength; i++) { - if (!sourceObjList[i].validate()) { - return false - } - } - - if (!destObjConfig.validate()) { - return false - } - - const getStatOptions = (srcConfig) => { - let statOpts = {} - if (!_.isEmpty(srcConfig.VersionID)) { - statOpts = { - versionId: srcConfig.VersionID, - } - } - return statOpts - } - const srcObjectSizes = [] - let totalSize = 0 - let totalParts = 0 - - const sourceObjStats = sourceObjList.map((srcItem) => - me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), - ) - - return Promise.all(sourceObjStats) - .then((srcObjectInfos) => { - const validatedStats = srcObjectInfos.map((resItemStat, index) => { - const srcConfig = sourceObjList[index] - - let srcCopySize = resItemStat.size - // Check if a segment is specified, and if so, is the - // segment within object bounds? - if (srcConfig.MatchRange) { - // Since range is specified, - // 0 <= src.srcStart <= src.srcEnd - // so only invalid case to check is: - const srcStart = srcConfig.Start - const srcEnd = srcConfig.End - if (srcEnd >= srcCopySize || srcStart < 0) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, - ) - } - srcCopySize = srcEnd - srcStart + 1 - } - - // Only the last source may be less than `absMinPartSize` - if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, - ) - } - - // Is data to copy too large? - totalSize += srcCopySize - if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { - throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) - } - - // record source size - srcObjectSizes[index] = srcCopySize - - // calculate parts needed for current source - totalParts += partsRequired(srcCopySize) - // Do we need more parts than we are allowed? - if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, - ) - } - - return resItemStat - }) - - if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { - return this.copyObject(sourceObjList[0], destObjConfig, cb) // use copyObjectV2 - } - - // preserve etag to avoid modification of object while copying. - for (let i = 0; i < sourceFilesLength; i++) { - sourceObjList[i].MatchETag = validatedStats[i].etag - } - - const splitPartSizeList = validatedStats.map((resItemStat, idx) => { - const calSize = calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) - return calSize - }) - - function getUploadPartConfigList(uploadId) { - const uploadPartConfigList = [] - - splitPartSizeList.forEach((splitSize, splitIndex) => { - const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize - - let partIndex = splitIndex + 1 // part index starts from 1. - const totalUploads = Array.from(startIdx) - - const headers = sourceObjList[splitIndex].getHeaders() - - totalUploads.forEach((splitStart, upldCtrIdx) => { - let splitEnd = endIdx[upldCtrIdx] - - const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` - headers['x-amz-copy-source'] = `${sourceObj}` - headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` - - const uploadPartConfig = { - bucketName: destObjConfig.Bucket, - objectName: destObjConfig.Object, - uploadID: uploadId, - partNumber: partIndex, - headers: headers, - sourceObj: sourceObj, - } - - uploadPartConfigList.push(uploadPartConfig) - }) - }) - - return uploadPartConfigList - } - - const performUploadParts = (uploadId) => { - const uploadList = getUploadPartConfigList(uploadId) - - async.map(uploadList, me.uploadPartCopy.bind(me), (err, res) => { - if (err) { - this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId).then( - () => cb(), - (err) => cb(err), - ) - return - } - const partsDone = res.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) - return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone).then( - (result) => cb(null, result), - (err) => cb(err), - ) - }) - } - - const newUploadHeaders = destObjConfig.getHeaders() - - me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders).then( - (uploadId) => { - performUploadParts(uploadId) - }, - (err) => { - cb(err, null) - }, - ) - }) - .catch((error) => { - cb(error, null) - }) - } } -// Promisify various public-facing APIs on the Client module. -Client.prototype.copyObject = promisify(Client.prototype.copyObject) - Client.prototype.presignedUrl = promisify(Client.prototype.presignedUrl) Client.prototype.presignedGetObject = promisify(Client.prototype.presignedGetObject) Client.prototype.presignedPutObject = promisify(Client.prototype.presignedPutObject) @@ -988,7 +624,6 @@ Client.prototype.presignedPostPolicy = promisify(Client.prototype.presignedPostP Client.prototype.getBucketNotification = promisify(Client.prototype.getBucketNotification) Client.prototype.setBucketNotification = promisify(Client.prototype.setBucketNotification) Client.prototype.removeAllBucketNotification = promisify(Client.prototype.removeAllBucketNotification) -Client.prototype.composeObject = promisify(Client.prototype.composeObject) // refactored API use promise internally Client.prototype.makeBucket = callbackify(Client.prototype.makeBucket) @@ -1032,3 +667,5 @@ Client.prototype.removeBucketEncryption = callbackify(Client.prototype.removeBuc Client.prototype.getObjectRetention = callbackify(Client.prototype.getObjectRetention) Client.prototype.removeObjects = callbackify(Client.prototype.removeObjects) Client.prototype.removeIncompleteUpload = callbackify(Client.prototype.removeIncompleteUpload) +Client.prototype.copyObject = callbackify(Client.prototype.copyObject) +Client.prototype.composeObject = callbackify(Client.prototype.composeObject) diff --git a/src/transformers.js b/src/transformers.js index 9225650a..aa883dd8 100644 --- a/src/transformers.js +++ b/src/transformers.js @@ -95,11 +95,6 @@ export function getHashSummer(enableSHA256) { // Following functions return a stream object that parses XML // and emits suitable Javascript objects. -// Parses CopyObject response. -export function getCopyObjectTransformer() { - return getConcater(xmlParsers.parseCopyObject) -} - // Parses listObjects response. export function getListObjectsTransformer() { return getConcater(xmlParsers.parseListObjects) diff --git a/src/xml-parsers.js b/src/xml-parsers.js index 44c1505d..522edc30 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.js @@ -25,33 +25,6 @@ const fxpWithoutNumParser = new XMLParser({ }, }) -// parse XML response for copy object -export function parseCopyObject(xml) { - var result = { - etag: '', - lastModified: '', - } - - var xmlobj = parseXml(xml) - if (!xmlobj.CopyObjectResult) { - throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') - } - xmlobj = xmlobj.CopyObjectResult - if (xmlobj.ETag) { - result.etag = xmlobj.ETag.replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - } - if (xmlobj.LastModified) { - result.lastModified = new Date(xmlobj.LastModified) - } - - return result -} - // parse XML response for bucket notification export function parseBucketNotification(xml) { var result = { diff --git a/tests/unit/test.js b/tests/unit/test.js index c604e114..62713234 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -1622,30 +1622,32 @@ describe('Client', function () { describe('Compose Object APIs', () => { describe('composeObject(destObjConfig, sourceObjectList,cb)', () => { - it('should fail on null destination config', (done) => { + it('should fail on null destination config', async () => { try { - client.composeObject(null, function () {}) - } catch (e) { - done() + await client.composeObject(null) + } catch (err) { + return } + throw new Error('callback should receive error') }) - - it('should fail on no array source config', (done) => { + it('should fail on no array source config', async () => { try { const destOptions = new CopyDestinationOptions({ Bucket: 'test-bucket', Object: 'test-object' }) - client.composeObject(destOptions, 'non-array', function () {}) - } catch (e) { - done() + await client.composeObject(destOptions, 'non-array') + } catch (err) { + return } + throw new Error('callback should receive error') }) - it('should fail on null source config', (done) => { + it('should fail on null source config', async () => { try { const destOptions = new CopyDestinationOptions({ Bucket: 'test-bucket', Object: 'test-object' }) - client.composeObject(destOptions, null, function () {}) - } catch (e) { - done() + await client.composeObject(destOptions, null) + } catch (err) { + return } + throw new Error('callback should receive error') }) }) })