javascript FileReader - parsing long file in chunks javascript FileReader - parsing long file in chunks javascript javascript

javascript FileReader - parsing long file in chunks


FileReader API is asynchronous so you should handle it with block calls. A for loop wouldn't do the trick since it wouldn't wait for each read to complete before reading the next chunk.Here's a working approach.

function parseFile(file, callback) {    var fileSize   = file.size;    var chunkSize  = 64 * 1024; // bytes    var offset     = 0;    var self       = this; // we need a reference to the current object    var chunkReaderBlock = null;    var readEventHandler = function(evt) {        if (evt.target.error == null) {            offset += evt.target.result.length;            callback(evt.target.result); // callback for handling read chunk        } else {            console.log("Read error: " + evt.target.error);            return;        }        if (offset >= fileSize) {            console.log("Done reading file");            return;        }        // of to the next chunk        chunkReaderBlock(offset, chunkSize, file);    }    chunkReaderBlock = function(_offset, length, _file) {        var r = new FileReader();        var blob = _file.slice(_offset, length + _offset);        r.onload = readEventHandler;        r.readAsText(blob);    }    // now let's start the read with the first block    chunkReaderBlock(offset, chunkSize, file);}


You can take advantage of Response (part of fetch) to convert most things to anything else blob, text, json and also get a ReadableStream that can help you read the blob in chunks 👍

var dest = new WritableStream({  write (str) {    console.log(str)  }})var blob = new Blob(['bloby']);(blob.stream ? blob.stream() : new Response(blob).body)  // Decode the binary-encoded response to string  .pipeThrough(new TextDecoderStream())  .pipeTo(dest)  .then(() => {    console.log('done')  })