Reputation: 215
My Lambda function is triggered by an SQS queue, receiving batches of up to 10 items at a time. I want to iterate through those queue records and upsert each one into DynamoDB.
The code for the Lambda function looks like:
const AWS = require('aws-sdk');
AWS
.config
.update({accessKeyId: process.env.AWS_KEY, secretAccessKey: process.env.AWS_SECRET});
AWS
.config
.update({region: "us-west-1"});
exports.handler = async(event) => {
///some other stuff to set up the variables below
event
.Records
.forEach((record) => {
var docClient = new AWS
.DynamoDB
.DocumentClient({convertEmptyValues: true, endpoint: "dynamodb.us-west-1.amazonaws.com"});
var db_params = {
TableName: entity,
Key: entityKey,
"UpdateExpression": update_expression,
"ExpressionAttributeNames": expression_attribute_names,
"ExpressionAttributeValues": expression_attribute_values,
"ReturnValues": "ALL_NEW"
};
console.log('executing updateObjectPromise');
docClient
.update(db_params)
.promise()
.then((item) => {
console.log(entity + " inserted");
console.log(item);
return item;
})
.catch((error) => {
console.log("ERROR: ");
console.log(error);
return error;
});
console.log("end of function");
});
};
This code iterates but the update promise never runs or finishes. await
doesn't work because the iterator is not asynchronous. I've tried using the async npm package and using an asynchronous iterator but even with await, the call to update gives the same behavior. It appears no matter what I do, the call either never runs or isn't completed before the function ends. I haven't found any examples of anybody doing something similar. Can anybody assist?
Upvotes: 2
Views: 2186
Reputation: 16127
Callback of forEach
function is a sync function, if you want to use async/await
syntax for a array, you have to know how async/await
work.
You return item
in .then
scope ???? (I think, you want to return the item for the lambda function). When you do not use await
keyword, or return anything for a async
lambda function, it maybe throws timedout error.
Quick fix for your case: Just use old school for loop
(we have some way to fix this issue)
exports.handler = async (event) => {
///some other stuff to set up the variables below
let results = [];
var docClient = new AWS
.DynamoDB
.DocumentClient({ convertEmptyValues: true, endpoint: "dynamodb.us-west-1.amazonaws.com" });
try {
for (let i = 0; i < event.Records.length; i++) {
var db_params = {
TableName: entity,
Key: entityKey,
"UpdateExpression": update_expression,
"ExpressionAttributeNames": expression_attribute_names,
"ExpressionAttributeValues": expression_attribute_values,
"ReturnValues": "ALL_NEW"
};
console.log('executing updateObjectPromise');
let item = await docClient
.update(db_params)
.promise();
console.log(entity + " inserted");
console.log(item);
results.push(item);
}
} catch (error) {
console.log("ERROR: ");
console.log(error);
throw error;
}
return results; // return and finish lambda function
};
Upvotes: 1
Reputation: 16037
const AWS = require('aws-sdk')
AWS
.config
.update({ accessKeyId: process.env.AWS_KEY, secretAccessKey: process.env.AWS_SECRET })
AWS
.config
.update({ region: 'us-west-1' })
// Initialize this only once.
const docClient = new AWS
.DynamoDB
.DocumentClient({ convertEmptyValues: true, endpoint: 'dynamodb.us-west-1.amazonaws.com' })
exports.handler = async (event) => {
///some other stuff to set up the variables below
const promises = event.Records.map(record => {
const db_params = {
TableName: entity,
Key: entityKey,
'UpdateExpression': update_expression,
'ExpressionAttributeNames': expression_attribute_names,
'ExpressionAttributeValues': expression_attribute_values,
'ReturnValues': 'ALL_NEW',
}
return docClient
.update(db_params)
.promise()
.then((item) => {
console.log(entity + ' inserted')
console.log(item)
return item
})
.catch((error) => {
console.log('ERROR: ')
console.log(error)
return error
})
})
return Promise.all(promises)
}
Upvotes: 1