Reputation: 2599
I want to import MySQL data with some change in it. like join and json inside json. I can insert one by one it. But it is taking lot of time. I am doing this by following code. this code in for loop
config.bucket.insert('User|'+page._id, JSON.stringify(page), function(err, res) {
if (err) {
console.log('User|'+page._id+' operation failed '+ err);
return;
}
console.log('User|'+page._id);
});
So, I need. How to do bulk insert with different key in Cochbase and node js ??
page._id is different key.I need it also in insert json
Upvotes: 0
Views: 348
Reputation: 575
I'm pretty sure that this is what your are looking for: https://github.com/couchbaselabs/devguide-examples/blob/master/nodejs/bulkES6.js
function bulkUpdateAsyncPattern(){
return new Promise(
(resolve, reject) => {
var completed = 0;
var runFlag = false;
var startTime = process.hrtime();
// Function for modify one document, during bulk loop. Notice,
// this is only in scope for bulkUpdateAsyncPattern
function modifyOne() {
// First Check if the bulk pattern loop is done
if (completed >= totalDocs && !runFlag) {
runFlag = true;
var time = process.hrtime(startTime);
console.log("====");
console.log(" Bulk Pattern Processing Loop Took: " + parseInt((time[0] * 1000) +
(time[1] / 1000000)) + " ms for: " + getMultiArray.length +
" items");
resolve();
} else {
if (completed <= totalDocs) {
// Modify One Document
bucket.mutateIn('test' + completed, 0, 0)
.counter('rev', 1, false)
.execute( function(err, res) {
if (err) console.log(" Error modifying:", err.message);
// This will fire WHEN and only WHEN a callback is received.
if (res) {
// Increment completed count
completed++;
// Recursive call to modify
modifyOne();
}
});
}
}
}
// The loop that sets up a "buffer" of queued operations
// This sets up a number of requests always in the buffer waiting to execute
for (var i = 0; i < opsGroup; ++i) {
modifyOne();
}
});
}
Upvotes: 1