我试图读取一个大文件一行在一次。我在Quora上找到了一个关于这个主题的问题,但我错过了一些联系,把整个事情联系在一起。

 var Lazy=require("lazy");
 new Lazy(process.stdin)
     .lines
     .forEach(
          function(line) { 
              console.log(line.toString()); 
          }
 );
 process.stdin.resume();

我想要弄清楚的是如何一次从文件中读取一行,而不是像本例中那样读取STDIN。

我试着:

 fs.open('./VeryBigFile.csv', 'r', '0666', Process);

 function Process(err, fd) {
    if (err) throw err;
    // DO lazy read 
 }

但这并不奏效。我知道在必要时我可以使用PHP之类的东西,但我想弄清楚这个问题。

我不认为其他答案会起作用,因为文件比我运行它的服务器的内存大得多。


当前回答

我有一个小模块,它做得很好,被相当多的其他项目使用npm readline注意,在节点v10中有一个本地readline模块,所以我重新发布了我的模块linebyline https://www.npmjs.com/package/linebyline

如果你不想使用这个模块,函数很简单:

var fs = require('fs'),
EventEmitter = require('events').EventEmitter,
util = require('util'),
newlines = [
  13, // \r
  10  // \n
];
var readLine = module.exports = function(file, opts) {
if (!(this instanceof readLine)) return new readLine(file);

EventEmitter.call(this);
opts = opts || {};
var self = this,
  line = [],
  lineCount = 0,
  emit = function(line, count) {
    self.emit('line', new Buffer(line).toString(), count);
  };
  this.input = fs.createReadStream(file);
  this.input.on('open', function(fd) {
    self.emit('open', fd);
  })
  .on('data', function(data) {
   for (var i = 0; i < data.length; i++) {
    if (0 <= newlines.indexOf(data[i])) { // Newline char was found.
      lineCount++;
      if (line.length) emit(line, lineCount);
      line = []; // Empty buffer.
     } else {
      line.push(data[i]); // Buffer new line data.
     }
   }
 }).on('error', function(err) {
   self.emit('error', err);
 }).on('end', function() {
  // Emit last line if anything left over since EOF won't trigger it.
  if (line.length){
     lineCount++;
     emit(line, lineCount);
  }
  self.emit('end');
 }).on('close', function() {
   self.emit('close');
 });
};
util.inherits(readLine, EventEmitter);

其他回答

这是我最喜欢的浏览文件的方式,是使用现代async/await进行渐进式(不是“slurp”或全内存方式)文件读取的简单本机解决方案。在处理大型文本文件时,我发现这是一种“自然”的解决方案,而不必求助于readline包或任何非核心依赖项。

let buf = '';
for await ( const chunk of fs.createReadStream('myfile') ) {
    const lines = buf.concat(chunk).split(/\r?\n/);
    buf = lines.pop();
    for( const line of lines ) {
        console.log(line);
    }
}
if(buf.length) console.log(buf);  // last line, if file does not end with newline

您可以在fs中调整编码。creatererestream或使用chunk.toString(<arg>)。这也让你更好地微调线分裂到你的口味,即。使用.split(/\n+/)跳过空行,用{highWaterMark: <chunkSize>}控制块大小。

Don't forget to create a function like processLine(line) to avoid repeating the line processing code twice due to the ending buf leftover. Unfortunately, the ReadStream instance does not update its end-of-file flags in this setup, so there's no way, afaik, to detect within the loop that we're in the last iteration without some more verbose tricks like comparing the file size from a fs.Stats() with .bytesRead. Hence the final buf processing solution, unless you're absolutely sure your file ends with a newline \n, in which case the for await loop should suffice.

★如果你更喜欢事件异步版本,这将是它:

let buf = '';
fs.createReadStream('myfile')
.on('data', chunk => {
    const lines = buf.concat(chunk).split(/\r?\n/);
    buf = lines.pop();
    for( const line of lines ) {
        console.log(line);
    }
})
.on('end', () => buf.length && console.log(buf) );

★现在如果你不介意导入流核心包,那么这是等效的管道流版本,它允许链接转换,如gzip解压:

const { Writable } = require('stream');
let buf = '';
fs.createReadStream('myfile').pipe(
    new Writable({
        write: (chunk, enc, next) => {
            const lines = buf.concat(chunk).split(/\r?\n/);
            buf = lines.pop();
            for (const line of lines) {
                console.log(line);
            }
            next();
        }
    })
).on('finish', () => buf.length && console.log(buf) );

自从发布了我最初的答案后,我发现split是一个非常容易使用的节点模块,用于文件中的行读取;它也接受可选参数。

var split = require('split');
fs.createReadStream(file)
    .pipe(split())
    .on('data', function (line) {
      //each chunk now is a seperate line! 
    });

还没有对非常大的文件进行测试。如果有,请告诉我们。

如果你想逐行读取一个文件,并将其写入另一个文件:

var fs = require('fs');
var readline = require('readline');
var Stream = require('stream');

function readFileLineByLine(inputFile, outputFile) {

   var instream = fs.createReadStream(inputFile);
   var outstream = new Stream();
   outstream.readable = true;
   outstream.writable = true;

   var rl = readline.createInterface({
      input: instream,
      output: outstream,
      terminal: false
   });

   rl.on('line', function (line) {
        fs.appendFileSync(outputFile, line + '\n');
   });
};
require('fs').readFileSync('file.txt', 'utf-8').split(/\r?\n/).forEach(function(line){
  console.log(line);
})

2019年更新

Nodejs官方文档中已经发布了一个很棒的例子。在这里

这需要在您的机器上安装最新的Nodejs。> 11.4

const fs = require('fs');
const readline = require('readline');

async function processLineByLine() {
  const fileStream = fs.createReadStream('input.txt');

  const rl = readline.createInterface({
    input: fileStream,
    crlfDelay: Infinity
  });
  // Note: we use the crlfDelay option to recognize all instances of CR LF
  // ('\r\n') in input.txt as a single line break.

  for await (const line of rl) {
    // Each line in input.txt will be successively available here as `line`.
    console.log(`Line from file: ${line}`);
  }
}

processLineByLine();