gvoigt
gvoigt

Reputation: 311

readline doesn't stop line reading after rl.close() emit in nodejs

I have the following file I want to read line by line and stop reading it once I have found "nameserver 8.8.8.8".

nameserver 8.8.8.8
nameserver 45.65.85.3
nameserver 40.98.3.3

I am using nodejs and the readline module to do so

const readline = require('readline');
const fs = require('fs');

function check_resolv_nameserver(){
  // flag indicates whether namerserver_line was found or not
  var nameserver_flag = false;

  const rl = readline.createInterface({
    input: fs.createReadStream('file_to_read.conf')
  });

  rl.on('line', (line) => {
    console.log(`Line from file: ${line}`);
    if (line === 'nameserver 8.8.8.8'){
      console.log('Found the right file. Reading lines should stop here.');
      nameserver_flag = true;
      rl.close();
    }
  });

  rl.on('close', function(){
    if (nameserver_flag === true){
      console.log('Found nameserver 8.8.8.8');
    }
    else {
      console.log('Could not find nameserver 8.8.8.8');
    }
  });
}

check_resolv_nameserver();

Since I emit a close event with rl.close() as soon as I read the first match, I would expect my Code to read only the first line and then stop reading further. But instead my output looks like this

Line from file: nameserver 8.8.8.8
Found the right file. Reading lines should stop here.
Found nameserver 8.8.8.8
Line from file: nameserver 45.65.85.3
Line from file: nameserver 40.98.3.3

How can I make readline stop after first match and let me proceed with a something else?

Upvotes: 21

Views: 21607

Answers (6)

on_knight
on_knight

Reputation: 1

You can declare a line event listener and remove it when needed.

const lineEventListener = (line) => {
  // do sth
  // Close
  rl.close();
  rl.removeListener('line', lineEventListener);
}
rl.on('line', lineEventListener);

Upvotes: 0

What works for me was add on pause a resume, it allow me to edit the file after read.

var lineReader = require('readline').createInterface({
    input: require('fs').createReadStream(require('path').resolve('test.js'))
});
lineReader.on('line', function (line) { console.log(line) }) /*loop all lines*/
.on('pause', function () { 
    /* resume after read lines is finished to close file */
    lineReader.resume(); 
}) 
.on('close', function () { 
    /*action after file read is close*/ 
    console.log('Close ok')
}); 

Upvotes: 0

Oz Shabat
Oz Shabat

Reputation: 1622

for those of you who can't make the linereader stop, do this (in your readline callback):

lineReader.close()
lineReader.removeAllListeners()

Upvotes: 23

Breck
Breck

Reputation: 2335

It appears readline buffers some lines, so you'll have to add your own check.

Example:

#! /usr/bin/node

const fs = require('fs')
const readline = require('readline')

const reader = readline.createInterface({
    input: fs.createReadStream('test.js')
})

let wasRead = false

reader.on('line', line => {
    if (wasRead) return undefined
    console.log('hello world')
    wasRead = true
    reader.close()
})

Upvotes: 3

Deunz
Deunz

Reputation: 1941

I Searched for a long time I didn't get the chance to get this working... SO I managed to get what I want thanks to a node module : line-reader

It's good as it can read from file but also from buffer.

Here is a simple code sample where you can read 2 lines then stop.

const lineReader = require('line-reader');
const stream = require('stream');

let bufferStream = new stream.PassThrough();
bufferStream.end(yourBuffer);

let lineNumber = 0;
lineReader.eachLine(bufferStream, function(line) {
    lineNumber++;
    if (lineNumber === 1 || lineNumber === 2) {
        // Perform whatever
    } else {

        // returning false breaks the reading
        return false;
    }

}, async function finished (err) {
    if (err) {
        // throw error or whatever
    }

    // Do after reading processing here
});

EDIT: I Found a clean way to achieve everything exactly as planned :

1st create a splitter to read string chunks

class Splitter extends Transform {
    constructor(options){
        super(options);
        this.splitSize = options.splitSize;
        this.buffer = Buffer.alloc(0);
        this.continueThis = true;
    }
    stopIt() {
        this.continueThis = false;
    }

    _transform(chunk, encoding, cb){

        this.buffer = Buffer.concat([this.buffer, chunk]);

        while ((this.buffer.length > this.splitSize || this.buffer.length === 1) && this.continueThis){
            try {
                let chunk = this.buffer.slice(0, this.splitSize);

                this.push(chunk);
                this.buffer = this.buffer.slice(this.splitSize);
                if (this.buffer[0] === 26){
                    console.log('EOF : ' + this.buffer[0]);
                }
            } catch (err) {
                console.log('ERR OCCURED => ', err);
                break;
            }
        }
        console.log('WHILE FINISHED');
        cb();
    }
}

Then pipe it to your stream :

let bufferStream = new stream.PassThrough();
bufferStream.end(hugeBuffer);
let splitter = new Splitter({splitSize : 170}); // In my case I have 170 length lines, so I want to process them line by line
let lineNr = 0;
bufferStream
      .pipe(splitter)
      .on('data', async function(line){

          line = line.toString().trim();

          splitter.pause(); // pause stream so you can perform long time processing with await
          lineNr++;

         if (lineNr === 1){
              // DO stuff with 1st line

         } else {
              splitter.stopIt(); // Break the stream and stop reading so we just read 1st line
         }

         splitter.resume() // resumestream so you can process next chunk
    }).on('error', function(err){
            console.log('Error while reading file.' + err);
            // whatever
     }).on('end', async function(){
           console.log('end event');

           // Stream has ended, do whatever...

    });

This code enables perfect read streams, line by line. No need to use the Splitter if the whole file is not so long

Upvotes: 0

Gergo
Gergo

Reputation: 2290

You should close the stream as well:

const readline = require('readline');
const fs = require('fs');
const readStream = fs.createReadStream('file_to_read.conf');

// More code here ...

const rl = readline.createInterface({
    input: readStream
  });

// Rest of your code

rl.close();
readStream.destroy();

Upvotes: 2

Related Questions