Reputation: 133
I have a node lambda function that queries mongoDb using mongoose.
About 20% of the time, seemingly randomly, I get the following error upon trying to connect: MongoNetworkTimeoutError: connection timed out
While MongoDb seems to recommend using context.callbackWaitsForEmptyEventLoop = false and trying to reuse the same connection between calls, I read other posts that said the fix for this would be to actively re-open a connection every time. I tried that but it's still happening. Does anyone have any ideas?
Here's my code:
let conn = mongoose.createConnection(process.env.MONGO_URI, {
bufferCommands: false, // Disable mongoose buffering
bufferMaxEntries: 0, // and MongoDB driver buffering
useNewUrlParser: true,
useUnifiedTopology: true,
socketTimeoutMS: 45000,
keepAlive: true,
reconnectTries: 10
})
try {
await conn
console.log('Connected correctly to server')
} catch (err) {
console.log('Error connecting to DB')
console.log(err)
console.log(err.stack)
}
await conn
And here's the full error output from Cloudwatch:
{
"errorType": "Runtime.UnhandledPromiseRejection",
"errorMessage": "MongoNetworkTimeoutError: connection timed out",
"reason": {
"errorType": "MongoNetworkTimeoutError",
"errorMessage": "connection timed out",
"name": "MongoNetworkTimeoutError",
"stack": [
"MongoNetworkTimeoutError: connection timed out",
" at connectionFailureError (/var/task/node_modules/mongodb/lib/core/connection/connect.js:342:14)",
" at TLSSocket.<anonymous> (/var/task/node_modules/mongodb/lib/core/connection/connect.js:310:16)",
" at Object.onceWrapper (events.js:420:28)",
" at TLSSocket.emit (events.js:314:20)",
" at TLSSocket.EventEmitter.emit (domain.js:483:12)",
" at TLSSocket.Socket._onTimeout (net.js:484:8)",
" at listOnTimeout (internal/timers.js:554:17)",
" at processTimers (internal/timers.js:497:7)"
]
},
"promise": {},
"stack": [
"Runtime.UnhandledPromiseRejection: MongoNetworkTimeoutError: connection timed out",
" at process.<anonymous> (/var/runtime/index.js:35:15)",
" at process.emit (events.js:326:22)",
" at process.EventEmitter.emit (domain.js:483:12)",
" at processPromiseRejections (internal/process/promises.js:209:33)",
" at processTicksAndRejections (internal/process/task_queues.js:98:32)",
" at runNextTicks (internal/process/task_queues.js:66:3)",
" at listOnTimeout (internal/timers.js:523:9)",
" at processTimers (internal/timers.js:497:7)"
]
}
Upvotes: 5
Views: 3874
Reputation: 31
I had the same issue (I have an express app, but that doesn't matter). The solution was to move the database connection object outside the handler method and cache/reuse it.
'use strict'
const serverless = require('serverless-http')
const MongoClient = require('mongodb').MongoClient
const api = require('./modules/api')
const SecureConfig = require('./modules/secureConfig')
let dbObject = null
const getDBConnection = async (event, context) => {
try {
if (dbObject && dbObject.serverConfig.isConnected()) return dbObject
const client = await MongoClient.connect(SecureConfig.mongodb.host, SecureConfig.mongodb.mongoConfig)
dbObject = client.db(SecureConfig.mongodb.db)
return dbObject
} catch(err) {
throw(err)
}
}
module.exports.handler = async (event, context) => {
const db = await getDBConnection()
const server = serverless(api.default(db));
try {
/**
* Lambda’s context object exposes a callbackWaitsForEmptyEventLoop property,
* that effectively allows a Lambda function to return its result to the caller
* without requiring that the MongoDB database connection be closed.
* This allows the Lambda function to reuse a MongoDB connection across calls.
*/
context.callbackWaitsForEmptyEventLoop = false
return await server(event, context)
} catch (error) {
console.error('Lambda handler root error.')
throw error
}
}
You can find more details here: https://www.mongodb.com/blog/post/optimizing-aws-lambda-performance-with-mongodb-atlas-and-nodejs
Upvotes: 2