Reputation: 2186
I have a video stored in s3 bucket with authenticated-read ACL.
I need to read and make a trailer with ffmpeg (nodejs)
Here's the code I use to generate the trailer
exports.generatePreview = (req, res) => {
const getParams = {
Bucket: S3_CREDENTIALS.bucketName,
Key: req.params.key
}
s3.getSignedUrl('getObject', getParams, (err, signedRequest) => {
console.log(signedRequest, err, 'getSignedUrl')
ffmpeg(new URL(signedRequest))
.size('640x?')
.aspect('4:3')
.seekInput('3:00')
.duration('0:30')
.then(function (video) {
s3.putObject({ Bucket: S3_CREDENTIALS.bucketName, key: 'preview_' + req.body.key, Body: video }, function (err, data) {
console.log(err, data)
})
});
});
}
Unfortunately, the constructor path seems not to read remote url. If I try to execute an ffmpeg command line with the same signedurl (i.e. ffmpeg -i "https://[bucketname].s3.eu-west-1.amazonaws.com/[key.mp4]?[signedParams]" -vn -acodec pcm_s16le -ar 44100 -ac 2 video.wav
)
The error I get is that the signedRequest url 'The input file does not exist'
It seems fs.readFileSync https is not supported even if I try the request with http with the same result. fs.readFileSync(signedurl)
=> gives the same result
How to overcome this issue?
Upvotes: 1
Views: 4383
Reputation: 3842
If you're using node-ffmpeg this isn't possible because the library only accepts a string pointing to a local path, but fluent-ffmpeg does support readstreams so give that a try.
For example (untested, just spitballing):
const ffmpeg = require('fluent-ffmpeg');
const stream = require('stream');
exports.generatePreview = (req, res) => {
let params = {Bucket: S3_CREDENTIALS.bucketName, Key: req.params.key};
// Retrieve object stream
let readStream = s3.getObject(params).createReadStream();
// Set up the ffmpeg process
let ffmpegProcess = new ffmpeg(readStream)
//Add your args here
.toFormat('mp4');
ffmpegProcess.on('error', (err, stdout, stderr) => {
// Handle errors here
}).on('end', () => {
// Processing is complete
}).pipe(() => {
// Create a new stream
let pt = new stream.PassThrough();
// Reuse the same params object and set the Body to the stream
params.Key = 'preview_' + req.body.key;
params.Body = pt;
// Upload and wait for the result
s3.upload(params, (err, data) => {
if (err)
return console.error(err);
console.log("done");
})
});
});
This will have high memory requirements so if this is a Lambda function you might play around with retrieving only the first X bytes of the file and converting only that.
Upvotes: 3