nodejs synchronization read large file line by line?
I have a large file (utf8). I know fs.createReadStream
can create stream to read a large file, but not synchronized. So i try to use fs.readSync
, but read text is broken like "迈�"
.
var fs = require('fs');
var uti开发者_运维知识库l = require('util');
var textPath = __dirname + '/people-daily.txt';
var fd = fs.openSync(textPath, "r");
var text = fs.readSync(fd, 4, 0, "utf8");
console.log(util.inspect(text, true, null));
For large files, readFileSync
can be inconvenient, as it loads the whole file in memory. A different synchronous approach is to iteratively call readSync
, reading small bits of data at a time, and processing the lines as they come. The following bit of code implements this approach and synchronously processes one line at a time from the file 'test.txt':
var fs = require('fs');
var filename = 'test.txt'
var fd = fs.openSync(filename, 'r');
var bufferSize = 1024;
var buffer = new Buffer(bufferSize);
var leftOver = '';
var read, line, idxStart, idx;
while ((read = fs.readSync(fd, buffer, 0, bufferSize, null)) !== 0) {
leftOver += buffer.toString('utf8', 0, read);
idxStart = 0
while ((idx = leftOver.indexOf("\n", idxStart)) !== -1) {
line = leftOver.substring(idxStart, idx);
console.log("one line read: " + line);
idxStart = idx + 1;
}
leftOver = leftOver.substring(idxStart);
}
use https://github.com/nacholibre/node-readlines
var lineByLine = require('n-readlines');
var liner = new lineByLine('./textFile.txt');
var line;
var lineNumber = 0;
while (line = liner.next()) {
console.log('Line ' + lineNumber + ': ' + line.toString('ascii'));
lineNumber++;
}
console.log('end of line reached');
Use readFileSync:
fs.readFileSync(filename, [encoding]) Synchronous version of fs.readFile. Returns the contents of the filename.
If encoding is specified then this function returns a string. Otherwise it returns a buffer.
On a side note, since you are using node, I'd recommend using asynchronous functions.
I built a simpler version JB Kohn's answer that uses split() on the buffer. It works on the larger files I tried.
/*
* Synchronously call fn(text, lineNum) on each line read from file descriptor fd.
*/
function forEachLine (fd, fn) {
var bufSize = 64 * 1024;
var buf = new Buffer(bufSize);
var leftOver = '';
var lineNum = 0;
var lines, n;
while ((n = fs.readSync(fd, buf, 0, bufSize, null)) !== 0) {
lines = buf.toString('utf8', 0 , n).split('\n');
lines[0] = leftOver+lines[0]; // add leftover string from previous read
while (lines.length > 1) { // process all but the last line
fn(lines.shift(), lineNum);
lineNum++;
}
leftOver = lines.shift(); // save last line fragment (may be '')
}
if (leftOver) { // process any remaining line
fn(leftOver, lineNum);
}
}
two potential problems,
- 3bytes BOM at the beginning you did not skip
- first 4bytes cannot be well format to UTF8's chars( utf8 is not fixed length )
精彩评论