Node.js: reading a large text file only partially

I need to read a very large csv file (> 80 MB and growing).

I usually have to parse the last 1% of the file. But getting to this part takes a few minutes. Is there a way that I'm just starting to read on the N network? Or, alternatively, could I read the stream from start to finish?

I am currently using fast-csv to read a file:

    // convert csv into postgres copy file
    csv.fromPath(filepath, {
        headers: false
    }).transform(function(data) {
        // check if record meets condition 
        var dt = parseInt(data[0]);
        var date = new Date(dt * 1000);
        var mom = moment(date);
        if (mom.isAfter('2014-01-01 00:00')) {  
            // transform data and return object
            return transform(data);
        }
        return null;
    }).pipe(csv.createWriteStream({
        headers: true
    })).pipe(fs.createWriteStream(outpath, {    
        encoding: "utf8"
    })).on('finish', function() { 
        // do postgres import
    });
+4
source share
1 answer

Using a combination of node fs.stat, fs.open, fs.read, you can find the size of the file and simply read the last 1% of the buffer:

var fs = require('fs');
var filename = 'csv.csv';

fs.stat(filename, function(err, stat) {

    if(err) throw err;

    var bytesToRead = Math.ceil(0.01 * stat.size); // last 1%
    var startingPosition = stat.size - bytesToRead;
    var readBuffer = new Buffer(bytesToRead);

    fs.open(filename, 'r', function(err, fd){

        if(err) throw err;

        fs.read(fd, readBuffer, 0, bytesToRead, startingPosition, 
            function(err, bytesRead){
                if(err) throw err;
                console.log(readBuffer.toString());
            });
    });

});

You cannot start reading from line N, because you will need to read everything to find out where the newline characters are.

+2
source

All Articles