Node.js / read the first 100 bytes of the file

I try to read the file in parts: the first 100 bytes, and then .. I try to read the first 100 bytes of the /npm file:

 app.post('/random', function(req, res) { var start = req.body.start; var fileName = './npm'; var contentLength = req.body.contentlength; var file = randomAccessFile(fileName + 'read'); console.log("Start is: " + start); console.log("ContentLength is: " + contentLength); fs.open(fileName, 'r', function(status, fd) { if (status) { console.log(status.message); return; } var buffer = new Buffer(contentLength); fs.read(fd, buffer, start, contentLength, 0, function(err, num) { console.log(buffer.toString('utf-8', 0, num)); }); }); 

output:

 Start is: 0 ContentLength is: 100 

and the following error:

 fs.js:457 binding.read(fd, buffer, offset, length, position, wrapper); ^ Error: Length extends beyond buffer at Object.fs.read (fs.js:457:11) at C:\NodeInst\node\FileSys.js:132:12 at Object.oncomplete (fs.js:107:15) 

What could be the reason?

+13
javascript
source share
2 answers

You mix the argument offset and position. From the docs :

offset is the offset in the buffer to start writing to.

position - an integer that determines where to start reading from a file. If the position is zero, the data will be read from the current file position.

You should change your code to this:

  fs.read(fd, buffer, 0, contentLength, start, function(err, num) { console.log(buffer.toString('utf-8', 0, num)); }); 

Basically, offset will be the index that fs.read will write to the buffer. Say you have a buffer of length 10 like this: <Buffer 01 02 03 04 05 06 07 08 09 0a> and you will read from /dev/zero , which is basically just zeros, and set the offset to 3 and set the length to 4 then you will receive the following: <Buffer 01 02 03 00 00 00 00 08 09 0a> .

 fs.open('/dev/zero', 'r', function(status, fd) { if (status) { console.log(status.message); return; } var buffer = new Buffer([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); fs.read(fd, buffer, 3, 4, 0, function(err, num) { console.log(buffer); }); }); 

Also, to do what you could try using fs.createStream :

 app.post('/random', function(req, res) { var start = req.body.start; var fileName = './npm'; var contentLength = req.body.contentlength; fs.createReadStream(fileName, { start : start, end: contentLength - 1 }) .pipe(res); }); 
+16
source share

Starting at node 10, there is an experimental Readable[Symbol.asyncIterator] (which is no longer experimental at node v12).

 'use strict'; const fs = require('fs'); async function run() { const file = 'hello.csv'; const stream = fs.createReadStream(file, { encoding: 'utf8', start: 0, end: 100 }); for await (const chunk of stream) { console.log('${file} >>> ${chunk}'); } // or if you don't want the for-await-loop const stream = fs.createReadStream(file, { encoding: 'utf8', start: 0, end: 100 }); const firstByte = await stream[Symbol.asyncIterator]().next(); console.log('${file} >>> ${firstByte.value}'); } run(); 

Prints the first bites

0
source share

All Articles