8000 CLDSRV-620: Ignore trailing checksums in upload requests by fredmnl · Pull Request #5757 · scality/cloudserver · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

CLDSRV-620: Ignore trailing checksums in upload requests #5757

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 17, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion constants.js
10000
Original file line number Diff line number Diff line change
Expand Up @@ -187,13 +187,13 @@ const constants = {
// Session name of the backbeat lifecycle assumed role session.
backbeatLifecycleSessionName: 'backbeat-lifecycle',
unsupportedSignatureChecksums: new Set([
'STREAMING-UNSIGNED-PAYLOAD-TRAILER',
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD',
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER',
]),
supportedSignatureChecksums: new Set([
'UNSIGNED-PAYLOAD',
'STREAMING-UNSIGNED-PAYLOAD-TRAILER',
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
]),
ipv4Regex: /^(\d{1,3}\.){3}\d{1,3}(\/(3[0-2]|[12]?\d))?$/,
Expand Down
15 changes: 15 additions & 0 deletions lib/api/apiUtils/object/prepareStream.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const V4Transform = require('../../../auth/streamingV4/V4Transform');
const TrailingChecksumTransform = require('../../../auth/streamingV4/trailingChecksumTransform');

/**
* Prepares the stream if the chunks are sent in a v4 Auth request
Expand All @@ -24,11 +25,25 @@ function prepareStream(stream, streamingV4Params, log, errCb) {
}
const v4Transform = new V4Transform(streamingV4Params, log, errCb);
stream.pipe(v4Transform);
v4Transform.headers = stream.headers;
return v4Transform;
}
return stream;
}

function stripTrailingChecksumStream(stream, log) {
// don't do anything if we are not in the correct integrity check mode
if (stream.headers['x-amz-content-sha256'] !== 'STREAMING-UNSIGNED-PAYLOAD-TRAILER') {
return stream;
}

const trailingChecksumTransform = new TrailingChecksumTransform(log);
stream.pipe(trailingChecksumTransform);
trailingChecksumTransform.headers = stream.headers;
return trailingChecksumTransform;
Comment on lines +40 to +43
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good to confirm as well if we are still enforcing the common md5 check in this mode (both stored in the object md and that we properly fail in case of mismatch). Could be through a new unit/ft test.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So I checked and when the trailing checksum mode is activated, the client does not send md5 checksums. Therefore in this mode we do not check integrity. This is not a first, there are already ways to upload where cloudserver does not check payload integrity. In fact, it only checks payload integrity in two scenarios: content-md5 header is provided, or streaming AuthV4 is used (in this case, the signature verification also indirectly verifies payload integrity).

In the absence of integrity check in cloudserver, integrity is still verified at the TCP level (although it's quite a weak verification) and at the HTTPS level.

I have circled back with David to triple check that this is what we want from a product standpoint.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see, if we all align, good to mention it in the release notes: integrity is not guaranteed by the S3 service with trailing checksums PUTs.

}

module.exports = {
prepareStream,
stripTrailingChecksumStream,
};
7 changes: 4 additions & 3 deletions lib/api/apiUtils/object/storeObject.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const { errors, jsutil } = require('arsenal');

const { data } = require('../../../data/wrapper');
const { prepareStream } = require('./prepareStream');
const { prepareStream, stripTrailingChecksumStream } = require('./prepareStream');

/**
* Check that `hashedStream.completedHash` matches header `stream.contentMD5`
Expand Down Expand Up @@ -58,10 +58,11 @@ function checkHashMatchMD5(stream, hashedStream, dataRetrievalInfo, log, cb) {
function dataStore(objectContext, cipherBundle, stream, size,
streamingV4Params, backendInfo, log, cb) {
const cbOnce = jsutil.once(cb);
const dataStream = prepareStream(stream, streamingV4Params, log, cbOnce);
if (!dataStream) {
const dataStreamTmp = prepareStream(stream, streamingV4Params, log, cbOnce);
if (!dataStreamTmp) {
return process.nextTick(() => cb(errors.InvalidArgument));
}
const dataStream = stripTrailingChecksumStream(dataStreamTmp, log, cbOnce);
return data.put(
cipherBundle, dataStream, size, objectContext, backendInfo, log,
(err, dataRetrievalInfo, hashedStream) => {
Expand Down
6 changes: 4 additions & 2 deletions lib/api/apiUtils/object/validateChecksumHeaders.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ const { unsupportedSignatureChecksums, supportedSignatureChecksums } = require('
function validateChecksumHeaders(headers) {
// If the x-amz-trailer header is present the request is using one of the
// trailing checksum algorithms, which are not supported.
if (headers['x-amz-trailer'] !== undefined) {
return errors.BadRequest.customizeDescription('trailing checksum is not supported');

if (headers['x-amz-trailer'] !== undefined &&
headers['x-amz-content-sha256'] !== 'STREAMING-UNSIGNED-PAYLOAD-TRAILER') {
return errors.BadRequest.customizeDescription('signed trailing checksum is not supported');
}

const signatureChecksum = headers['x-amz-content-sha256'];
Expand Down
2 changes: 1 addition & 1 deletion lib/auth/streamingV4/V4Transform.js
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ class V4Transform extends Transform {
* @param {Buffer} chunk - chunk from request body
* @param {string} encoding - Data encoding
* @param {function} callback - Callback(err, justDataChunk, encoding)
* @return {function }executes callback with err if applicable
* @return {function} executes callback with err if applicable
*/
_transform(chunk, encoding, callback) {
// 'chunk' here is the node streaming chunk
Expand Down
119 changes: 119 additions & 0 deletions lib/auth/streamingV4/trailingChecksumTransform.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
const { Transform } = require('stream');
const { errors } = require('arsenal');
const { maximumAllowedPartSize } = require('../../../constants');

/**
* This class is designed to handle the chunks sent in a streaming
* unsigned playload trailer request. In this iteration, we are not checking
* the checksums, but we are removing them from the stream.
* S3C-9732 will deal with checksum verification.
*/
class TrailingChecksumTransform extends Transform {
/**
* @constructor
* @param {object} log - logger object
*/
constructor(log) {
super({});
this.log = log;
this.chunkSizeBuffer = Buffer.alloc(0);
this.bytesToDiscard = 0; // when trailing \r\n are present, we discard them but they can be in different chunks
this.bytesToRead = 0; // when a chunk is advertised, the size is put here and we forward all bytes
this.streamClosed = false;
}

/**
* This function is executed when there is no more data to be read but before the stream is closed
* We will verify that the trailing checksum structure was upheld
*
* @param {function} callback - Callback(err, data)
* @return {function} executes callback with err if applicable
*/
_flush(callback) {
if (!this.streamClosed) {
this.log.error('stream ended without closing chunked encoding');
return callback(errors.InvalidArgument);
}
return callback();
}

/**
* This function will remove the trailing checksum from the stream
*
* @param {Buffer} chunkInput - chunk from request body
* @param {string} encoding - Data encoding
* @param {function} callback - Callback(err, justDataChunk, encoding)
* @return {function} executes callback with err if applicable
*/
_transform(chunkInput, encoding, callback) {
let chunk = chunkInput;
while (chunk.byteLength > 0 && !this.streamClosed) {
if (this.bytesToDiscard > 0) {
const toDiscard = Math.min(this.bytesToDiscard, chunk.byteLength);
chunk = chunk.subarray(toDiscard);
this.bytesToDiscard -= toDiscard;
continue;
}
// forward up to bytesToRead bytes from the chunk, restart processing on leftover
if (this.bytesToRead > 0) {
const toRead = Math.min(this.bytesToRead, chunk.byteLength);
this.push(chunk.subarray(0, toRead));
chunk = chunk.subarray(toRead);
this.bytesToRead -= toRead;
if (this.bytesToRead === 0) {
this.bytesToDiscard = 2;
}
continue;
}

// we are now looking for the chunk size field
// no need to look further than 10 bytes since the field cannot be bigger: the max
// chunk size is 5GB (see constants.maximumAllowedPartSize)
const lineBreakIndex = chunk.subarray(0, 10).indexOf('\r');
const bytesToKeep = lineBreakIndex === -1 ? chunk.byteLength : lineBreakIndex;
if (this.chunkSizeBuffer.byteLength + bytesToKeep > 10) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we not only check the number of bytes in the buffer, but also the actual value, and ensure we don't exceed the max supported (5GB)? See constants.maximumAllowedPartSize for the constant you can use.

this.log.error('chunk size field too big', {
chunkSizeBuffer: this.chunkSizeBuffer.subarray(0, 11).toString('hex'),
chunkSizeBufferLength: this.chunkSizeBuffer.length,
truncatedChunk: chunk.subarray(0, 10).toString('hex'),
});
// if bigger, the chunk would be over 5 GB
// returning early to avoid a DoS by memory exhaustion
return callback(errors.InvalidArgument);
}
if (lineBreakIndex === -1) {
// no delimiter, we'll keep the chunk for later
this.chunkSizeBuffer = Buffer.concat([this.chunkSizeBuffer, chunk]);
return callback();
}

this.chunkSizeBuffer = Buffer.concat([this.chunkSizeBuffer, chunk.subarray(0, lineBreakIndex)]);
chunk = chunk.subarray(lineBreakIndex);

// chunk-size is sent in hex
const chunkSizeStr = this.chunkSizeBuffer.toString();
const dataSize = parseInt(chunkSizeStr, 16);
// we check that the parsing is correct (parseInt returns a partial parse when it fails)
if (isNaN(dataSize) || dataSize.toString(16) !== chunkSizeStr.toLowerCase()) {
this.log.error('invalid chunk size', { chunkSizeBuffer: chunkSizeStr });
return callback(errors.InvalidArgument);
}
this.chunkSizeBuffer = Buffer.alloc(0);
if (dataSize === 0) {
// TODO: check if the checksum is correct (S3C-9732)
// last chunk, no more data to read, the stream is closed
this.streamClosed = true;
}
if (dataSize > maximumAllowedPartSize) {
this.log.error('chunk size too big', { dataSize });
return callback(errors.EntityTooLarge);
}
this.bytesToRead = dataSize;
this.bytesToDiscard = 2;
}

return callback();
}
}

module.exports = TrailingChecksumTransform;
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "s3",
"version": "7.10.52",
"version": "7.10.53",
"description": "S3 connector",
"main": "index.js",
"engines": {
Expand Down
133 changes: 133 additions & 0 deletions tests/functional/raw-node/test/trailingChecksums.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
const assert = require('assert');
const async = require('async');
const { makeS3Request } = require('../utils/makeRequest');
const HttpRequestAuthV4 = require('../utils/HttpRequestAuthV4');

const bucket = 'testunsupportedchecksumsbucket';
const objectKey = 'key';
const objData = Buffer.alloc(1024, 'a');
// note this is not the correct checksum in objDataWithTrailingChecksum
const objDataWithTrailingChecksum = '10\r\n0123456789abcdef\r\n' +
'10\r\n0123456789abcdef\r\n' +
'0\r\nx-amz-checksum-crc64nvme:YeIDuLa7tU0=\r\n';
const objDataWithoutTrailingChecksum = '0123456789abcdef0123456789abcdef';

const config = require('../../config.json');
const authCredentials = {
accessKey: config.accessKey,
secretKey: config.secretKey,
};

const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;

describe('trailing checksum requests:', () => {
before(done => {
makeS3Request({
method: 'PUT',
authCredentials,
bucket,
}, err => {
assert.ifError(err);
done();
});
});

after(done => {
async.series([
next => makeS3Request({
method: 'DELETE',
authCredentials,
bucket,
objectKey,
}, next),
next => makeS3Request({
method: 'DELETE',
authCredentials,
bucket,
}, next),
], err => {
assert.ifError(err);
done();
});
});

it('should accept unsigned trailing checksum', done => {
const req = new HttpRequestAuthV4(
`http://localhost:8000/${bucket}/${objectKey}`,
Object.assign(
{
method: 'PUT',
headers: {
'content-length': objDataWithTrailingChecksum.length,
'x-amz-decoded-content-length': objDataWithoutTrailingChecksum.length,
'x-amz-content-sha256': 'STREAMING-UNSIGNED-PAYLOAD-TRAILER',
'x-amz-trailer': 'x-amz-checksum-crc64nvme',
},
},
authCredentials
),
res => {
assert.strictEqual(res.statusCode, 200);
res.on('data', () => {});
res.on('end', done);
}
);

req.on('error', err => {
assert.ifError(err);
});

req.write(objDataWithTrailingChecksum);

req.once('drain', () => {
req.end();
});
});

it('should have correct object content for unsigned trailing checksum', done => {
makeS3Request({
method: 'GET',
authCredentials,
bucket,
objectKey,
}, (err, res) => {
assert.ifError(err);
assert.strictEqual(res.statusCode, 200);
// check that the object data is the input stripped of the trailing checksum
assert.strictEqual(res.body, objDataWithoutTrailingChecksum);
return done();
});
});

itSkipIfAWS('should respond with BadRequest for signed trailing checksum', done => {
const req = new HttpRequestAuthV4(
`http://localhost:8000/${bucket}/${objectKey}`,
Object.assign(
{
method: 'PUT',
headers: {
'content-length': objData.length,
'x-amz-content-sha256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
'x-amz-trailer': 'x-amz-checksum-sha256',
},
},
authCredentials
),
res => {
assert.strictEqual(res.statusCode, 400);
res.on('data', () => {});
res.on('end', done);
}
);

req.on('error', err => {
assert.ifError(err);
});

req.write(objData);

req.once('drain', () => {
req.end();
});
});
});
Loading
Loading
0