diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 79b23831..c6df26ea 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -15,7 +15,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: '20.x' + node-version: '22.x' cache: 'npm' - run: npm ci @@ -30,7 +30,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: '20.x' + node-version: '22.x' cache: 'npm' - run: npm ci diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2ac4a49b..1e30679e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,7 +15,7 @@ jobs: strategy: max-parallel: 5 matrix: - node_version: [ 20.x, 18.x, 16.x ] + node_version: [22.x, 20.x, 18.x, 16.x ] os: [ ubuntu-latest, windows-latest ] env: diff --git a/examples/compose-object-test-example.mjs b/examples/compose-object-test-example.mjs index 8b6194a1..979ffefc 100644 --- a/examples/compose-object-test-example.mjs +++ b/examples/compose-object-test-example.mjs @@ -78,6 +78,9 @@ const sampleRunComposeObject = async () => { const destObjConfig = new Minio.CopyDestinationOptions({ Bucket: bucketName, Object: composedObjName, + Headers: { + 'Content-Type': 'application/octet-stream', //example to set headers + }, }) try { diff --git a/package-lock.json b/package-lock.json index c23c1bc7..38f336d3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "minio", - "version": "8.0.3", + "version": "8.0.4", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "minio", - "version": "8.0.3", + "version": "8.0.4", "license": "Apache-2.0", "dependencies": { "async": "^3.2.4", diff --git a/package.json b/package.json index c3bc2ed8..e7c72322 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "minio", - "version": "8.0.3", + "version": "8.0.4", "description": "S3 Compatible Cloud Storage client", "main": "./dist/main/minio.js", "module": "./dist/esm/minio.mjs", diff --git a/src/helpers.ts b/src/helpers.ts index 54791449..40bec966 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -177,6 +177,10 @@ export interface ICopyDestinationOptions { RetainUntilDate?: string Mode?: RETENTION_MODES MetadataDirective?: 'COPY' | 'REPLACE' + /** + * Extra headers for the target object + */ + Headers?: Record } export class CopyDestinationOptions { @@ -189,6 +193,7 @@ export class CopyDestinationOptions { private readonly RetainUntilDate?: string private readonly Mode?: RETENTION_MODES private readonly MetadataDirective?: string + private readonly Headers?: Record constructor({ Bucket, @@ -200,6 +205,7 @@ export class CopyDestinationOptions { RetainUntilDate, Mode, MetadataDirective, + Headers, }: ICopyDestinationOptions) { this.Bucket = Bucket this.Object = Object @@ -210,6 +216,7 @@ export class CopyDestinationOptions { this.Mode = Mode // retention mode this.RetainUntilDate = RetainUntilDate this.MetadataDirective = MetadataDirective + this.Headers = Headers } getHeaders(): RequestHeaders { @@ -254,6 +261,12 @@ export class CopyDestinationOptions { headerOptions[key] = value } } + if (this.Headers) { + for (const [key, value] of Object.entries(this.Headers)) { + headerOptions[key] = value + } + } + return headerOptions } diff --git a/src/internal/client.ts b/src/internal/client.ts index b8df18b5..fd3bd592 100644 --- a/src/internal/client.ts +++ b/src/internal/client.ts @@ -124,6 +124,7 @@ import { parseListObjects, parseObjectLegalHoldConfig, parseSelectObjectContentResponse, + uploadPartParser, } from './xml-parser.ts' import * as xmlParsers from './xml-parser.ts' @@ -904,7 +905,7 @@ export class TypedClient { * Creates the bucket `bucketName`. * */ - async makeBucket(bucketName: string, region: Region = '', makeOpts: MakeBucketOpt = {}): Promise { + async makeBucket(bucketName: string, region: Region = '', makeOpts?: MakeBucketOpt): Promise { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -917,7 +918,7 @@ export class TypedClient { if (!isString(region)) { throw new TypeError('region should be of type "string"') } - if (!isObject(makeOpts)) { + if (makeOpts && !isObject(makeOpts)) { throw new TypeError('makeOpts should be of type "object"') } @@ -943,7 +944,7 @@ export class TypedClient { const method = 'PUT' const headers: RequestHeaders = {} - if (makeOpts.ObjectLocking) { + if (makeOpts && makeOpts.ObjectLocking) { headers['x-amz-bucket-object-lock-enabled'] = true } @@ -1009,7 +1010,7 @@ export class TypedClient { /** * Callback is called with readable stream of the object content. */ - async getObject(bucketName: string, objectName: string, getOpts: GetObjectOpts = {}): Promise { + async getObject(bucketName: string, objectName: string, getOpts?: GetObjectOpts): Promise { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -1032,7 +1033,7 @@ export class TypedClient { objectName: string, offset: number, length = 0, - getOpts: GetObjectOpts = {}, + getOpts?: GetObjectOpts, ): Promise { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) @@ -1060,17 +1061,26 @@ export class TypedClient { } } - const sseHeaders: Record = { - ...(getOpts.SSECustomerAlgorithm && { - 'X-Amz-Server-Side-Encryption-Customer-Algorithm': getOpts.SSECustomerAlgorithm, - }), - ...(getOpts.SSECustomerKey && { 'X-Amz-Server-Side-Encryption-Customer-Key': getOpts.SSECustomerKey }), - ...(getOpts.SSECustomerKeyMD5 && { 'X-Amz-Server-Side-Encryption-Customer-Key-MD5': getOpts.SSECustomerKeyMD5 }), + let query = '' + let headers: RequestHeaders = { + ...(range !== '' && { range }), } - const headers: RequestHeaders = { - ...prependXAMZMeta(sseHeaders), - ...(range !== '' && { range }), + if (getOpts) { + const sseHeaders: Record = { + ...(getOpts.SSECustomerAlgorithm && { + 'X-Amz-Server-Side-Encryption-Customer-Algorithm': getOpts.SSECustomerAlgorithm, + }), + ...(getOpts.SSECustomerKey && { 'X-Amz-Server-Side-Encryption-Customer-Key': getOpts.SSECustomerKey }), + ...(getOpts.SSECustomerKeyMD5 && { + 'X-Amz-Server-Side-Encryption-Customer-Key-MD5': getOpts.SSECustomerKeyMD5, + }), + } + query = qs.stringify(getOpts) + headers = { + ...prependXAMZMeta(sseHeaders), + ...headers, + } } const expectedStatusCodes = [200] @@ -1079,25 +1089,19 @@ export class TypedClient { } const method = 'GET' - const query = qs.stringify(getOpts) return await this.makeRequestAsync({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes) } /** * download object content to a file. - * This method will create a temp file named `${filename}.${etag}.part.minio` when downloading. + * This method will create a temp file named `${filename}.${base64(etag)}.part.minio` when downloading. * * @param bucketName - name of the bucket * @param objectName - name of the object * @param filePath - path to which the object data will be written to * @param getOpts - Optional object get option */ - async fGetObject( - bucketName: string, - objectName: string, - filePath: string, - getOpts: GetObjectOpts = {}, - ): Promise { + async fGetObject(bucketName: string, objectName: string, filePath: string, getOpts?: GetObjectOpts): Promise { // Input validation. if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) @@ -1112,7 +1116,8 @@ export class TypedClient { const downloadToTmpFile = async (): Promise => { let partFileStream: stream.Writable const objStat = await this.statObject(bucketName, objectName, getOpts) - const partFile = `${filePath}.${objStat.etag}.part.minio` + const encodedEtag = Buffer.from(objStat.etag).toString('base64') + const partFile = `${filePath}.${encodedEtag}.part.minio` await fsp.mkdir(path.dirname(filePath), { recursive: true }) @@ -1152,7 +1157,8 @@ export class TypedClient { /** * Stat information of the object. */ - async statObject(bucketName: string, objectName: string, statOpts: StatObjectOpts = {}): Promise { + async statObject(bucketName: string, objectName: string, statOpts?: StatObjectOpts): Promise { + const statOptDef = statOpts || {} if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -1160,11 +1166,11 @@ export class TypedClient { throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) } - if (!isObject(statOpts)) { + if (!isObject(statOptDef)) { throw new errors.InvalidArgumentError('statOpts should be of type "object"') } - const query = qs.stringify(statOpts) + const query = qs.stringify(statOptDef) const method = 'HEAD' const res = await this.makeRequestAsyncOmit({ method, bucketName, objectName, query }) @@ -1573,7 +1579,7 @@ export class TypedClient { /** * Uploads the object using contents from a file */ - async fPutObject(bucketName: string, objectName: string, filePath: string, metaData: ObjectMetaData = {}) { + async fPutObject(bucketName: string, objectName: string, filePath: string, metaData?: ObjectMetaData) { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -1584,12 +1590,12 @@ export class TypedClient { if (!isString(filePath)) { throw new TypeError('filePath should be of type "string"') } - if (!isObject(metaData)) { + if (metaData && !isObject(metaData)) { throw new TypeError('metaData should be of type "object"') } // Inserts correct `content-type` attribute based on metaData and filePath - metaData = insertContentType(metaData, filePath) + metaData = insertContentType(metaData || {}, filePath) const stat = await fsp.lstat(filePath) return await this.putObject(bucketName, objectName, fs.createReadStream(filePath), stat.size, metaData) } @@ -1944,7 +1950,7 @@ export class TypedClient { /** * Get the tags associated with a bucket OR an object */ - async getObjectTagging(bucketName: string, objectName: string, getOpts: GetObjectOpts = {}): Promise { + async getObjectTagging(bucketName: string, objectName: string, getOpts?: GetObjectOpts): Promise { const method = 'GET' let query = 'tagging' @@ -1954,7 +1960,7 @@ export class TypedClient { if (!isValidObjectName(objectName)) { throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) } - if (!isObject(getOpts)) { + if (getOpts && !isObject(getOpts)) { throw new errors.InvalidArgumentError('getOpts should be of type "object"') } @@ -2669,8 +2675,10 @@ export class TypedClient { const query = `uploadId=${uploadID}&partNumber=${partNumber}` const requestOptions = { method, bucketName, objectName: objectName, query, headers } const res = await this.makeRequestAsync(requestOptions, payload) + const body = await readAsString(res) + const partRes = uploadPartParser(body) return { - etag: sanitizeETag(res.headers.etag), + etag: sanitizeETag(partRes.ETag), key: objectName, part: partNumber, } diff --git a/src/internal/xml-parser.ts b/src/internal/xml-parser.ts index 99bc2c3b..b6db0eaa 100644 --- a/src/internal/xml-parser.ts +++ b/src/internal/xml-parser.ts @@ -333,8 +333,8 @@ export type ListMultipartResult = { uploads: { key: string uploadId: UploadID - initiator: unknown - owner: unknown + initiator?: { id: string; displayName: string } + owner?: { id: string; displayName: string } storageClass: unknown initiated: Date }[] @@ -381,13 +381,19 @@ export function parseListMultipart(xml: string): ListMultipartResult { if (xmlobj.Upload) { toArray(xmlobj.Upload).forEach((upload) => { - const key = upload.Key - const uploadId = upload.UploadId - const initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } - const owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } - const storageClass = upload.StorageClass - const initiated = new Date(upload.Initiated) - result.uploads.push({ key, uploadId, initiator, owner, storageClass, initiated }) + const uploadItem: ListMultipartResult['uploads'][number] = { + key: upload.Key, + uploadId: upload.UploadId, + storageClass: upload.StorageClass, + initiated: new Date(upload.Initiated), + } + if (upload.Initiator) { + uploadItem.initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } + } + if (upload.Owner) { + uploadItem.owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } + } + result.uploads.push(uploadItem) }) } return result @@ -728,3 +734,9 @@ export function parseListObjects(xml: string) { } return result } + +export function uploadPartParser(xml: string) { + const xmlObj = parseXml(xml) + const respEl = xmlObj.CopyPartResult + return respEl +} diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index 6ededba7..7e1c4ace 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -4066,19 +4066,26 @@ describe('functional tests', function () { * 7. Remove bucket. (Clean up) */ - var _100mbFileToBeSplitAndComposed = Buffer.alloc(100 * 1024 * 1024, 0) - let composeObjectTestBucket = 'minio-js-test-compose-obj-' + uuid.v4() + const _100mbFileToBeSplitAndComposed = Buffer.alloc(100 * 1024 * 1024, 0) + const composeObjectTestBucket = 'minio-js-test-compose-obj-' + uuid.v4() before(() => client.makeBucket(composeObjectTestBucket, '')) after(() => client.removeBucket(composeObjectTestBucket)) const composedObjName = '_100-mb-file-to-test-compose' const tmpSubDir = `${tmpDir}/compose` - var fileToSplit = `${tmpSubDir}/${composedObjName}` + const fileToSplit = `${tmpSubDir}/${composedObjName}` let partFilesNamesWithPath = [] let partObjNameList = [] let isSplitSuccess = false step(`Create a local file of 100 MB and split `, (done) => { try { + if (!fs.existsSync(tmpSubDir)) { + fs.mkdirSync(tmpSubDir, { recursive: true }, function (err) { + if (err) { + done(err) + } + }) + } fs.writeFileSync(fileToSplit, _100mbFileToBeSplitAndComposed) // 100 MB split into 26 MB part size. splitFile @@ -4088,11 +4095,11 @@ describe('functional tests', function () { isSplitSuccess = true done() }) - .catch(() => { - done() + .catch((err) => { + done(err) }) } catch (err) { - done() + done(err) } }) @@ -4130,12 +4137,15 @@ describe('functional tests', function () { Object: composedObjName, }) - client.composeObject(destObjConfig, sourcePartObjList).then((e) => { - if (e) { - return done(e) - } - done() - }) + client + .composeObject(destObjConfig, sourcePartObjList) + .then((e) => { + if (!e) { + return done(e) + } + done() + }) + .catch(done) } else { done() } @@ -4195,7 +4205,7 @@ describe('functional tests', function () { step('Clean up temp directory part files', (done) => { if (isSplitSuccess) { - fs.rmdirSync(tmpSubDir) + fs.rmSync(tmpSubDir, { recursive: true, force: true }) } done() }) diff --git a/tests/unit/test.js b/tests/unit/test.js index 72e559b8..8d1175ef 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -32,7 +32,7 @@ import { partsRequired, } from '../../src/internal/helper.ts' import { joinHostPort } from '../../src/internal/join-host-port.ts' -import { parseListObjects } from '../../src/internal/xml-parser.ts' +import { parseListMultipart, parseListObjects } from '../../src/internal/xml-parser.ts' import * as Minio from '../../src/minio.js' const Package = { version: 'development' } @@ -2304,6 +2304,39 @@ describe('xml-parser', () => { }) }) }) + + describe('#listMultipart()', () => { + describe('should handle missing owner and initiator', () => { + // example response from GCS + const xml = ` + + + some-bucket + + + + some-file.pdf + / + + 1000 + false + + some-file.pdf + ABPnzm4aGoV3sjevTkVeaWV6lvBFtdjcZegTJg8MUfTue1t6lgRIy6_JEoM0km3CNE218x00 + STANDARD + 2024-12-17T08:16:52.396303Z + + + ` + + it('should parse list incomplete', () => { + const { uploads } = parseListMultipart(xml) + assert.equal(uploads.length, 1) + assert.equal(uploads[0].key, 'some-file.pdf') + }) + }) + }) }) describe('join-host-port', () => {