From 19ada29c988fc071042b06d82b79bd3c78875498 Mon Sep 17 00:00:00 2001 From: tuanaiseo Date: Sun, 5 Apr 2026 18:10:19 +0700 Subject: [PATCH] fix(security): unbounded memory consumption when buffering s3 obj `getObject` converts the entire S3 response stream into a Buffer via `streamToBuffer` with no size limit. Large objects can exhaust Lambda memory and cause denial of service if object size is attacker-influenced or not strictly controlled. Affected files: s3-service.js Signed-off-by: tuanaiseo <221258316+tuanaiseo@users.noreply.github.com> --- lib/s3-service.js | 37 +++++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/lib/s3-service.js b/lib/s3-service.js index 0ed5ece..074e7ad 100644 --- a/lib/s3-service.js +++ b/lib/s3-service.js @@ -8,7 +8,31 @@ // Require AWS SDK const { S3Client, GetObjectCommand } = require('@aws-sdk/client-s3'); // AWS SDK const { getSignedUrl } = require('@aws-sdk/s3-request-presigner'); -const { streamToBuffer } = require('./utils'); +const MAX_BUFFERED_OBJECT_SIZE = parseInt( + process.env.AWS_S3_MAX_BUFFERED_OBJECT_SIZE || + process.env.AWS_S3_MAX_OBJECT_SIZE || + 10485760, + 10 +); + +const streamToBufferWithLimit = async (stream, maxSize) => { + const chunks = []; + let size = 0; + + for await (const chunk of stream) { + const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk); + size += buffer.length; + + if (size > maxSize) { + if (typeof stream.destroy === 'function') stream.destroy(); + throw new Error('S3 object exceeds maximum allowed buffered size'); + } + + chunks.push(buffer); + } + + return Buffer.concat(chunks); +}; // Export exports.client = new S3Client(); @@ -21,9 +45,18 @@ exports.getObject = (params) => { if (!res.Body) return res; + if ( + Number.isFinite(MAX_BUFFERED_OBJECT_SIZE) && + MAX_BUFFERED_OBJECT_SIZE > 0 && + typeof res.ContentLength === 'number' && + res.ContentLength > MAX_BUFFERED_OBJECT_SIZE + ) { + throw new Error('S3 object exceeds maximum allowed buffered size'); + } + return { ...res, - Body: await streamToBuffer(res.Body), + Body: await streamToBufferWithLimit(res.Body, MAX_BUFFERED_OBJECT_SIZE), }; }, };