Iterating through lines in a file using Node.js and CoffeeScript

I repeat the lines in the file using Node.js with CoffeScript and the following function:

each_line_in = (stream, func) ->
    fs.stat stream.path, (err, stats) ->
        previous = []
        stream.on 'data', (d) ->
            start = cur = 0
            for c in d
                cur++
                if c == 10
                    previous.push(d.slice(start, cur))
                    func previous.join('')
                    previous = []
                    start = cur
            previous.push(d.slice(start, cur)) if start != cur

Is there a better way to do this without reading the entire file in memory? And by “better,” I mean shorter, built into Node.js, faster or Correct. If I were writing Python, I would do something like this:

def each_line_in(file_obj, func):
    [ func(l) for l in file_obj ]

I saw this question that Peteris Krumin uses the "lazy" module , but I would like to accomplish this without adding an external dependency.

+5
source share
2 answers

Here's a pretty effective approach:

eachLineIn = (filePath, func) ->

  blockSize = 4096
  buffer = new Buffer(blockSize)
  fd = fs.openSync filePath, 'r'
  lastLine = ''

  callback = (err, bytesRead) ->
    throw err if err
    if bytesRead is blockSize
      fs.read fd, buffer, 0, blockSize, null, callback

    lines = buffer.toString('utf8', 0, bytesRead).split '\n'
    lines[0] = lastLine + lines[0]
    [completeLines..., lastLine] = lines
    func(line) for line in completeLines
    return

  fs.read fd, buffer, 0, blockSize, 0, callback
  return

, blockSize .

, , \n. , , split, :

.split(/(\\r\\n)|\\r|\\n/)
+6

ReadStream, . stream = fs.createReadStream(filepath)

for_each_line = (stream, func) ->
  last = ""
  stream.on('data', (chunk) ->
    lines = (last + chunk).split("\n")
    [lines...,last] = lines
    for line in lines
      func(line)
  )
  stream.on('end', () ->
    func(last)
  )

createReadStream .

'\n', . , , "\n".

.

0

All Articles