Reputation: 89
How do I process a range of files (fs.readdir or fs.readdirSync) one by one in sequence that manipulate a database with data that the next file depends on?
So, for each iteration I actually need to read data just inserted from the previous operation.
I've fiddled with stuff like this, but get all sorts of errors; 'error: deadlock detected' or errors related to the previous data not being updated before the next operation runs.
const Processes = require('processes');
function readFiles () {
fs.readdir( 'path/to/folder', ( err, files ) => {
if ( err )return;
if ( files.length ) {
files.forEach( file => {
let fileData = '';
let fileStream = fs.createReadStream( 'path/to/folder/' + file );
if ( ! fileStream ) {
return;
} else {
fileStream.on('data', chunk => {
fileData += chunk;
});
fileStream.on('end', async () => {
await Processes.processFileWithDatabaseOperations( 'path/to/folder/' + file );
});
}
});
}
});
}
// processes.js
export function processFileWithDatabaseOperations ( file ) {
return new Promise(( resolve, reject ) => {
fs.readFile( file, 'latin1', ( err, data ) => {
if ( err )return;
// Here insert data into the database based on the previous operation
});
});
}
Upvotes: 0
Views: 17
Reputation: 89
SOLUTION:
const Processes = require('processes');
function readFiles () {
fs.readdir('path/to/folder', async ( err, files ) => {
if ( err ) return;
if ( files.length ) {
for ( let i = 0; i < files.length; i++ ) { // array.forEach does not support async / await
await processFileWithDatabaseOperations( 'path/to/folder' + files[ i ] )
.then(( result ) => {
console.log(`${result}`);
});
};
}
});
}
// processes.js
export function processFileWithDatabaseOperations ( file ) {
return new Promise(( resolve, reject ) => {
fs.readFile( file, 'latin1', ( err, data ) => {
if ( err )return;
resolve( "Whenever all operations to the db has been done" );
});
});
}
Upvotes: 0