Reputation: 289
I'm using this example to read a large file:
var fs = require('fs');
var readline = require('readline');
var stream = require('stream');
var instream = fs.createReadStream('your/file');
var outstream = new stream;
var rl = readline.createInterface(instream, outstream);
rl.on('line', function(line) {
// process line here
});
rl.on('close', function() {
// do something on finish here
});
And I want to know when line
is the last line of file. I read the docs but can not find a solution. I already tried:
rl.on('line', function(line) {
if (line == '' || line == require("os").EOL)
console.log('eof');
});
but it didnt work.
Do you have any suggestions. Thanks for reading.
Upvotes: 5
Views: 28656
Reputation: 1693
2025 Answer.
import fs from 'node:fs/promises';
/**
* Reads a file from the end and returns the final line.
*
* This version supports files that end with:
* - "\n" (Unix-style, including modern macOS)
* - "\r\n" (Windows-style)
* - "\r" (older Mac-style, HL7, etc.)
*
* @param {string} filePath - The path to the file.
* @param {number} [minLength=1] - Minimum length for the returned line.
* @return {Promise<string>} - The last line of the file that meets minLength, or an empty string otherwise.
*/
export async function getLastLine(filePath, minLength = 1) {
const stats = await fs.stat(filePath);
let fileSize = stats.size;
// If file is empty, return an empty string
if (fileSize === 0) return '';
const fileHandle = await fs.open(filePath, 'r');
const bufferSize = 1024;
const buffer = Buffer.alloc(bufferSize);
let remainder = '';
let lastLineFound = false;
try {
while (!lastLineFound && fileSize > 0) {
const readSize = Math.min(bufferSize, fileSize);
fileSize -= readSize;
// Read a chunk from the current file offset (fileSize)
const { bytesRead } = await fileHandle.read(buffer, 0, readSize, fileSize);
const chunkStr = buffer.toString('utf8', 0, bytesRead);
// Combine with any remainder from the previous chunk
const combined = chunkStr + remainder;
// Find the last occurrence of EOL markers
const lastCRIndex = combined.lastIndexOf('\r');
const lastLFIndex = combined.lastIndexOf('\n');
const boundaryIndex = Math.max(lastCRIndex, lastLFIndex);
if (boundaryIndex !== -1) {
// The part after this boundary is our last line
remainder = combined.slice(boundaryIndex + 1);
lastLineFound = true;
} else {
// No line break found in this chunk; keep reading backward
remainder = combined;
}
}
} finally {
await fileHandle.close();
}
// Clean up any trailing CR or LF from the final line
remainder = remainder.replace(/[\r\n]+$/, '');
// Return only if it meets the minimum length
return remainder.length >= minLength ? remainder : '';
}
The problem with Axel's answer is he assumes the last line of a file will be empty or an EOL. This is not the case.
// fileTools.js
const fs = require('fs');
const readline = require('readline');
const Stream = require('stream');
exports.getLastLine = (fileName, minLength) => {
let inStream = fs.createReadStream(fileName);
let outStream = new Stream;
return new Promise((resolve, reject)=> {
let rl = readline.createInterface(inStream, outStream);
let lastLine = '';
rl.on('line', function (line) {
if (line.length >= minLength) {
lastLine = line;
}
});
rl.on('error', reject)
rl.on('close', function () {
resolve(lastLine)
});
})
}
To use:
const getLastLine = require('./fileTools.js').getLastLine
const fileName = 'C:\\someWinDir\\somelog.log'
const minLineLength = 1
getLastLine(fileName, 1)
.then((lastLine)=> {
console.log(lastLine)
})
.catch((err)=> {
console.error(err)
})
Upvotes: 11
Reputation: 7169
Adding a dependency for https://github.com/alexbbt/read-last-lines and as a consequence to mz/fs
, seemed like a lot to me for one function.
Also the code of read-last-lines
seems a bit dated.
I took the code of read-last-lines
and just refactored it a bit. All credit goes to read-last-lines
.
The following code basically reads a file character by character from back to front and counts how many line breaks it encountered.
I did not test if this is actually faster than streaming through all lines of a huge CSV file.
import * as fs from "fs";
const NEW_LINE_CHARACTERS = ["\n"];
async function readPreviousChar(
stat: fs.Stats,
file: number,
currentCharacterCount: number,
encoding: BufferEncoding = "utf-8"
): Promise<string> {
return new Promise((resolve, reject) => {
fs.read(
file,
Buffer.alloc(1),
0,
1,
stat.size - 1 - currentCharacterCount,
(err, bytesRead, buffer) => {
if (err) {
reject(err);
} else {
resolve(buffer.toString(encoding));
}
}
);
});
}
/**
* Read in the last `n` lines of a file
* @param {string} inputFilePath - file (direct or relative path to file.)
* @param {int} maxLineCount - max number of lines to read in.
* @param {encoding} encoding - specifies the character encoding to be used, or 'buffer'. defaults to 'utf8'.
*
* @return {promise} a promise resolved with the lines or rejected with an error.
*/
export async function readLastLines(
inputFilePath: string,
maxLineCount: number,
encoding: BufferEncoding = "utf-8"
): Promise<string> {
if (!fs.existsSync(inputFilePath)) throw new Error(`File ${inputFilePath} does not exist.`);
const [stat, file] = await Promise.all([
new Promise<fs.Stats>((resolve, reject) =>
// Load file Stats.
fs.stat(inputFilePath, (err, stat) => {
if (err) {
reject(err);
} else {
resolve(stat);
}
})
),
new Promise<number>((resolve, reject) =>
// Open file for reading.
fs.open(inputFilePath, "r", (err, file) => {
if (err) {
reject(err);
} else {
resolve(file);
}
})
),
]);
let chars = 0;
let lineCount = 0;
let lines = "";
while (lines.length < stat.size && lineCount < maxLineCount) {
const nextCharacter = await readPreviousChar(stat, file, chars, encoding);
lines = nextCharacter + lines;
if (NEW_LINE_CHARACTERS.includes(nextCharacter) && lines.length > 1) {
lineCount++;
}
chars++;
if (lines.length > stat.size) {
lines = lines.substring(lines.length - stat.size);
}
}
if (NEW_LINE_CHARACTERS.includes(lines.substring(0, 1))) {
lines = lines.substring(1);
}
fs.closeSync(file);
return lines;
}
Upvotes: 0
Reputation: 748
Save the current line, and when you get to the end of the file you have the last line. The input stream emits an "end" event when all of its data is consumed. https://nodejs.org/api/stream.html#stream_event_end
var fs = require('fs');
var readline = require('readline');
var stream = require('stream');
var instream = fs.createReadStream('your/file');
var outstream = new stream;
var rl = readline.createInterface(instream, outstream);
var currentLine;
rl.on('line', function(line) {
currentLine = line;
// process line here
});
instream.on('end', function() {
// currentLine is now the last line
// use currentLine here
});
You could alternatively use the rl.on('end')
event, but that emits for a few more reasons, like an interrupt or calling rl.close()
, but those might not affect you.
https://nodejs.org/api/readline.html#readline_event_close
Upvotes: 3
Reputation: 381
To read last N lines of a large file efficiently, we can use this npm package read-last-lines
https://www.npmjs.com/package/read-last-lines
Example reading last 50 lines of a file :
const readLastLines = require('read-last-lines');
readLastLines.read('path/to/file', 50)
.then((lines) => console.log(lines));
Upvotes: 6
Reputation: 957
I used exec(tail -n 1 <filepath>)
in my mocha test suite because I know exactly what it does and it does it with fewer lines of code...
Perhaps its a little less optimal when building an application though
const { exec } = require("child_process");
exec("tail -n 1 nginx_access.log", (error, stdout, stderr) => {
console.log(stdout)
})
Upvotes: 7
Reputation: 1506
Save the line you receive inside a global variable, then display it once you reach the end of the file.
var lastLine = '';
rl.on('line', function(line) {
if (line == '' || line == require("os").EOL) {
console.log('eof, last line is', lastLine);
return;
}
lastLine = line;
});
Upvotes: 5