Reputation: 191
I'm trying to make a upload mechanism to upload large files on S3 and also abort the process if I want. below is my frontend code for uploading files
<input type="file" id="multipartInput">
<button id="multipartInputBtn">send file</button>
document.getElementById('multipartInputBtn').addEventListener('click', async () => {
const multipartInput_fileInput = document.getElementById('multipartInput');
const file = multipartInput_fileInput.files[0];
const fileName = file.name;
const fileSize = file.size;
const url = `https://uniquestring.execute-api.ap-south-1.amazonaws.com/dev`;
try {
let res = await axios.post(`${url}/getUploadId`, { fileName: fileName });
const uploadId = res.data.uploadId;
console.log(res);
console.log('Inside uploadMultipartFile');
const chunkSize = 100 * 1024 * 1024; // 100MiB
const chunkCount = Math.floor(fileSize / chunkSize) + 1;
console.log(`chunkCount: ${chunkCount}`);
let multiUploadArray = [];
for (let uploadCount = 1; uploadCount < chunkCount + 1; uploadCount++) {
let start = (uploadCount - 1) * chunkSize;
let end = uploadCount * chunkSize;
let fileBlob = uploadCount < chunkCount ? file.slice(start, end) : file.slice(start);
let getSignedUrlRes = await axios.post(`${url}/getUploadPart`, {
fileName: fileName,
partNumber: uploadCount,
uploadId: uploadId
});
let uploadChunck = await fetch(preSignedUrl, {
method: 'PUT',
body: fileBlob
});
console.log(uploadChunck);
console.log(`preSignedUrl ${uploadCount} : ${preSignedUrl}`);
console.log(fileBlob);
// Start sending files to S3 part by part
let uploadChunck = await axios.put(presignedUrl, fileBlob)
console.log(uploadChunck)
}
} catch (err) {
console.log(err, err.stack);
}
});
With this method I'm able to upload file but according to docs I tried to implement the abort multiPart Upload I implement my frontend logic accordingly
<button id="abortUploadBtn">Abort Upload</button>
document.getElementById('abortUploadBtn').addEventListener('click', () => {
const multipartInput_fileInput = document.getElementById('multipartInput');
const file = multipartInput_fileInput.files[0];
const fileName = file.name;
const uploadId = sessionStorage.getItem('uploadId');
const url = `https://68qb5fre0e.execute-api.ap-south-1.amazonaws.com/dev`;
console.log({ fileName: fileName, uploadId: uploadId });
axios
.post(`${url}/abortUpload`, { fileName: fileName, uploadId: uploadId })
.then((r) => console.log(r))
.catch((err) => console.error(err));
});
const AWS = require('aws-sdk');
const s3 = new AWS.S3({ signatureVersion: 'v4', region: 'ap-south-1' });
exports.handler = async (event) => {
const body = JSON.parse(event.body);
try {
let params = {
Bucket: process.env.bucketName,
Key: body.fileName,
UploadId: body.uploadId
};
const completeUpload = await s3.abortMultipartUpload(params).promise();
return {
statusCode: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify({ completeUpload: completeUpload })
};
} catch (err) {
console.log(err);
return {
statusCode: 500,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify({ error: err, details: err.stack })
};
}
};
which leads me to this cloudwatch error
2021-03-11T11:51:43.367Z 58cba880-bbc2-4522-8c0f-8c73b2d3cd8f INFO AccessDenied: Access Denied
at Request.extractError (/var/task/node_modules/aws-sdk/lib/services/s3.js:712:35)
at Request.callListeners (/var/task/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/var/task/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/var/task/node_modules/aws-sdk/lib/request.js:688:14)
at Request.transition (/var/task/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/task/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /var/task/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request.<anonymous> (/var/task/node_modules/aws-sdk/lib/request.js:38:9)
at Request.<anonymous> (/var/task/node_modules/aws-sdk/lib/request.js:690:12)
at Request.callListeners (/var/task/node_modules/aws-sdk/lib/sequential_executor.js:116:18) {
code: 'AccessDenied',
region: null,
time: 2021-03-11T11:51:43.363Z,
requestId: 'BBF9AC13DD0C54FA',
extendedRequestId: '/aQMS0mNmAEimuZ8nL0p+iDY9ndePTHU5fsfsnlbOH+anaWB4BOZH2bvVbn4wXefimH7gOh5za3U=',
cfId: undefined,
statusCode: 403,
retryable: false,
retryDelay: 5.35506042250955
}
But Ironically if I try to strictly use the method advised in docs locally on my PC (as mentioned below) it absolutely works fine
var params = {
Bucket: "examplebucket",
Key: "bigobject",
UploadId: "xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--"
};
s3.abortMultipartUpload(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
/*
data = {
}
*/
});
{}
What should I do to get this work done?
Edit
I've Attach my serverless.yml
to highlight permissions
provider:
name: aws
runtime: nodejs14.x
lambdaHashingVersion: 20201221
stage: dev
region: ap-south-1
apiGateway:
shouldStartNameWithService: true
iam:
role:
statements:
- Effect: "Allow"
Action:
- "s3:GetObject"
- "s3:PutObject"
Resource: "arn:aws:s3:::${self:custom.bucketName}/*"
environment:
bucketName: ${self:custom.bucketName}
Upvotes: 0
Views: 1642
Reputation: 9655
provider:
name: aws
runtime: nodejs14.x
lambdaHashingVersion: 20201221
stage: dev
region: ap-south-1
apiGateway:
shouldStartNameWithService: true
iam:
role:
statements:
- Effect: "Allow"
Action:
- "s3:GetObject"
- "s3:PutObject"
- "s3:AbortMultipartUpload"
Resource: "arn:aws:s3:::${self:custom.bucketName}/*"
environment:
bucketName: ${self:custom.bucketName}
You need AbortMultipartUpload permissions, I have amended the call in the template above.
Upvotes: 1