Reputation: 21
context: Two javascript classes in separate files, each integrating a different external service and being called in a express.js router.
See "problematic code" below:
routes.post('/aws', upload.single('file'), async (req, res) => {
const transcribeParams = JSON.parse(req.body.options)
const bucket = 'bucket-name'
const data = await ( await ( await awsTranscribe.Upload(req.file, bucket)).CreateJob(transcribeParams)).GetJob()
res.send(data)
})
class AmazonS3 {
constructor() {
this.Upload = this.Upload
}
async Upload(file, bucket) {
const uploadParams = {
Bucket: bucket,
Body: fs.createReadStream(file.path),
Key: file.filename,
}
this.data = await s3.upload(uploadParams).promise()
return this
}
}
class Transcribe extends AwsS3 {
constructor() {
super()
this.CreateJob = this.CreateJob
this.GetJob = this.GetJob
}
async CreateJob(params) {
if(this.data?.Location) {
params.Media = { ...params.Media, MediaFileUri: this.data.Location }
}
this.data = await transcribeService.startTranscriptionJob(params).promise()
return this
}
async GetJob(jobName) {
if(this.data?.TranscriptionJob?.TranscriptionJobName) {
jobName = this.data.TranscriptionJob.TranscriptionJobName
}
this.data = await transcribeService.getTranscriptionJob({TranscriptionJobName: jobName}).promise()
return this
}
}
problem: the problem is with the chained awaits in the router file:
await ( await ( await awsTranscribe.Upload...
Yes, it does work, but it would be horrible for another person to maintain this code in the future.
How can i make so it would be just
awsTranscribe.Upload(req.file, bucket).CreateJob(transcribeParams).GetJob()
without the .then?
Upvotes: 1
Views: 171
Reputation: 707396
I got interested in whether it was possible to take an object with several async
methods and somehow make them automatically chainable. Well, you can:
function chain(obj, methodsArray) {
if (!methodsArray || !methodsArray.length) {
throw new Error("methodsArray argument must be array of chainable method names");
}
const methods = new Set(methodsArray);
let lastPromise = Promise.resolve();
const proxy = new Proxy(obj, {
get(target, prop, receiver) {
if (prop === "_promise") {
return function() {
return lastPromise;
}
}
const val = Reflect.get(target, prop, receiver);
if (typeof val !== "function" || !methods.has(prop)) {
// no chaining if it's not a function
// or it's not listed as a chainable method
return val;
} else {
// return a stub function
return function(...args) {
// chain a function call
lastPromise = lastPromise.then(() => {
return val.apply(obj, args);
//return Reflect.apply(val, obj, ...args);
});
return proxy;
}
}
}
});
return proxy;
}
function delay(t) {
return new Promise(resolve => {
setTimeout(resolve, t);
});
}
function log(...args) {
if (!log.start) {
log.start = Date.now();
}
const delta = Date.now() - log.start;
const deltaPad = (delta + "").padStart(6, "0");
console.log(`${deltaPad}: `, ...args)
}
class Transcribe {
constructor() {
this.greeting = "Hello";
}
async createJob(params) {
log(`createJob: ${this.greeting}`);
return delay(200);
}
async getJob(jobName) {
log(`getJob: ${this.greeting}`);
return delay(100);
}
}
const t = new Transcribe();
const obj = chain(t, ["getJob", "createJob"]);
log("begin");
obj.createJob().getJob()._promise().then(() => {
log("end");
});
There's a placeholder for your Transcribe
class that has two asynchronous methods that return a promise.
Then, there's a chain()
function that returns a proxy to an object that makes a set of passed in method names be chainable which allows you to then do something like this:
const t = new Transcribe();
// make chainable proxy
const obj = chain(t, ["getJob", "createJob"]);
obj.createJob().getJob()
or
await obj.createJob().getJob()._promise()
I wouldn't necessarily say this is production-ready code, but it is an interesting feasibility demonstration and (for me) a chance to learn more about a Javascript proxy object.
Here's a different approach that (instead of the proxy object) adds method stubs to a promise to make things chainable:
function chain(orig, methodsArray) {
let masterP = Promise.resolve();
function addMethods(dest) {
for (const m of methodsArray) {
dest[m] = function(...args) {
// chain onto master promise to force sequencing
masterP = masterP.then(result => {
return orig[m].apply(orig, ...args);
});
// add methods to the latest promise befor returning it
addMethods(masterP);
return masterP;
}
}
}
// add method to our returned promise
addMethods(masterP);
return masterP;
}
function delay(t) {
return new Promise(resolve => {
setTimeout(resolve, t);
});
}
function log(...args) {
if (!log.start) {
log.start = Date.now();
}
const delta = Date.now() - log.start;
const deltaPad = (delta + "").padStart(6, "0");
console.log(`${deltaPad}: `, ...args)
}
class Transcribe {
constructor() {
this.greeting = "Hello";
this.cntr = 0;
}
async createJob(params) {
log(`createJob: ${this.greeting}`);
++this.cntr;
return delay(200);
}
async getJob(jobName) {
log(`getJob: ${this.greeting}`);
++this.cntr;
return delay(100);
}
}
const t = new Transcribe();
log("begin");
chain(t, ["getJob", "createJob"]).createJob().getJob().then(() => {
log(`cntr = ${t.cntr}`);
log("end");
});
Since this returns an actual promise (with additional methods attached), you can directly use .then()
or await
with it without the separate ._promise()
that the first implementation required.
So, you can now do something like this:
const t = new Transcribe();
chain(t, ["getJob", "createJob"]).createJob().getJob().then(() => {
log(`cntr = ${t.cntr}`);
});
or:
const t = new Transcribe();
await chain(t, ["getJob", "createJob"]).createJob().getJob();
log(`cntr = ${t.cntr}`);
And, here's a third version where it creates a thenable object (a pseudo-promise) with the added methods on it (if it bothers you to add methods to an existing promise):
function chain(orig, methodsArray) {
if (!methodsArray || !methodsArray.length) {
throw new Error("methodsArray argument must be array of chainable method names");
}
let masterP = Promise.resolve();
function makeThenable() {
let obj = {};
for (const m of methodsArray) {
obj[m] = function(...args) {
// chain onto master promise to force sequencing
masterP = masterP.then(result => {
return orig[m].apply(orig, ...args);
});
return makeThenable();
}
}
obj.then = function(onFulfill, onReject) {
return masterP.then(onFulfill, onReject);
}
obj.catch = function(onReject) {
return masterP.catch(onReject);
}
obj.finally = function(onFinally) {
return masterP.finally(onFinally);
}
return obj;
}
return makeThenable();
}
function delay(t) {
return new Promise(resolve => {
setTimeout(resolve, t);
});
}
function log(...args) {
if (!log.start) {
log.start = Date.now();
}
const delta = Date.now() - log.start;
const deltaPad = (delta + "").padStart(6, "0");
console.log(`${deltaPad}: `, ...args)
}
class Transcribe {
constructor() {
this.greeting = "Hello";
this.cntr = 0;
}
async createJob(params) {
log(`createJob: ${this.greeting}`);
++this.cntr;
return delay(200);
}
async getJob(jobName) {
log(`getJob: ${this.greeting}`);
++this.cntr;
return delay(100);
}
}
const t = new Transcribe();
log("begin");
chain(t, ["getJob", "createJob"]).createJob().getJob().then(() => {
log(`cntr = ${t.cntr}`);
log("end");
});
Upvotes: 0
Reputation: 664548
The problem is with the chained awaits in the router file:
await ( await ( await awsTranscribe.Upload...
No, that's fine. In particular it would be trivial to refactor it to separate lines:
routes.post('/aws', upload.single('file'), async (req, res) => {
const transcribeParams = JSON.parse(req.body.options)
const bucket = 'bucket-name'
const a = await awsTranscribe.Upload(req.file, bucket);
const b = await b.CreateJob(transcribeParams);
const c = await b.GetJob();
res.send(c);
});
Your actual problem is that a
, b
, and c
all refer to the same object awsTranscribe
. Your code would also "work" if it was written
routes.post('/aws', upload.single('file'), async (req, res) => {
const transcribeParams = JSON.parse(req.body.options)
const bucket = 'bucket-name'
await awsTranscribe.Upload(req.file, bucket);
await awsTranscribe.CreateJob(transcribeParams);
await awsTranscribe.GetJob();
res.send(awsTranscribe);
});
The horrible thing is that you are passing your data between these methods through the mutable awsTranscribe.data
property - even storing different kinds of data in it at different times! One could change the order of method calls and it would completely break in non-obvious and hard-to-debug ways.
Also it seems that multiple requests share the same awsTranscribe
instance. This will not work with concurrent requests. Anything is possible from just "not working" to responding with the job data from a different user (request)! You absolutely need to fix that, then look at ugly syntax later.
What you really should do is get rid of the class
es. There's no reason to use stateful objects here, this is plain procedural code. Write simple functions, taking parameters and returning values:
export async function uploadFile(file, bucket) {
const uploadParams = {
Bucket: bucket,
Body: fs.createReadStream(file.path),
Key: file.filename,
};
const data = s3.upload(uploadParams).promise();
return data.Location;
}
export async function createTranscriptionJob(location, params) {
params = {
...params,
Media: {
...params.Media,
MediaFileUri: location,
},
};
const data = await transcribeService.startTranscriptionJob(params).promise();
return data.TranscriptionJob;
}
async function getTranscriptionJob(job) {
const jobName = job.TranscriptionJobName;
return transcribeService.getTranscriptionJob({TranscriptionJobName: jobName}).promise();
}
Then you can import and call them as
routes.post('/aws', upload.single('file'), async (req, res) => {
const transcribeParams = JSON.parse(req.body.options)
const bucket = 'bucket-name'
const location = await uploadFile(req.file, bucket);
const job = await createTranscriptionJob(location, transcribeParams);
const data = await getTranscriptionJob(job);
res.send(c);
});
Upvotes: 1