I have an upload button on my page which user can click to upload a video. If video is large, Javascript's File Reader can't handle it. So I've implemented a function that slices uploaded video into chunks with 'data:application/octet-stream;base64,' label at the beginning. Meanwhile the function sends each chunk to the NodeJS backend, where I use some 'fs' approaches to merge the video file back and save it as an mp4 file on my disc. The problem is that the file is 'empty'. It's recognized by my PC as an mp4 file, there is no error of wrong file format. But the video has no content and it's 0 seconds long. How can I merge chunks properly to make the mp4 file work?
Frontend:
let reader;
let file;
const slice_size = 1024;
const start_chunks_upload = (e) => {
event.preventDefault();
reader = new FileReader();
file = e.target.files[0];
upload_file_chunks(0);
}
const upload_file_chunks = (start) => {
const next_slice = start + slice_size + 1;
const blob = file.slice(start, next_slice);
reader.onloadend = (e) => {
if (e.target.readyState !== FileReader.DONE) {
return;
}
$.ajax({
type: 'POST',
url: 'http://localhost:5000/video',
data: {
video: e.target.result.replace('data:application/octet-stream;base64,', ''),
is_done: next_slice < file.size ? null : true,
},
success: function(data) {
const size_done = start + slice_size;
const percent_done = Math.floor((size_done / file.size) * 100);
console.log(percent_done + '%');
if (next_slice < file.size) {
upload_file_chunks(next_slice);
} else {
console.log('done');
}
}
});
};
reader.readAsDataURL(blob);
}
video_upload_button.addEventListener('change', (e) => start_chunks_upload(e));
Backend:
let string = '';
app.post('/video', (req, res) => {
string += req.body.video;
if (req.body.is_done) {
let buff = new Buffer.from(string, 'base64');
fs.writeFileSync('/one.mp4', buff);
fs.writeFile("/two.mp4", string, 'base64', function(err) {
console.log(err);
});
fs.appendFile('/log.txt', string, function (err) {
if (err) {
console.log(err);
}
})
} else {
return res.json({ok: 1,});
}
})