Split an uploaded file into multiple chunks using javascript Split an uploaded file into multiple chunks using javascript google-chrome google-chrome

Split an uploaded file into multiple chunks using javascript


There is no need for reading the content into ram with the FileReaderusing base64 will only increase the size of what you need to upload, base64 takes up ~33% more in size

Use Blob.slice to get chunks

// simulate a file from a inputconst file = new File(['a'.repeat(1000000)], 'test.txt')const chunkSize = 40000const url = 'https://httpbin.org/post'for (let start = 0; start < file.size; start += chunkSize) {  const chunk = file.slice(start, start + chunkSize + 1)  const fd = new FormData()  fd.set('data', chunk)    await fetch(url, { method: 'post', body: fd }).then(res => res.text())}


You could avoid having to base64 encode by using a FileReader and then sending as binary:

const url = 'http://www.example.com/upload';document.getElementById('file-uploader').addEventListener('change', function(e) {    const size = 40000;    var reader = new FileReader();    var buf;    var file = document.getElementById('file-uploader').files[0];    reader.onload = function(e) {        buf = new Uint8Array(e.target.result);        for (var i = 0;  i < buf.length; i += size) {            var fd = new FormData();            fd.append('fname', [file.name, i+1, 'of', buf.length].join('-'));            fd.append('data', new Blob([buf.subarray(i, i + size)]));            var oReq = new XMLHttpRequest();            oReq.open("POST", url, true);            oReq.onload = function (oEvent) {               // Uploaded.            };            oReq.send(fd);        }    }          reader.readAsArrayBuffer(file);});
<input type="file" id="file-uploader"/>