Reputation: 1096
For IE11 in this code base64 file is converted to Blob, and then a download link is created. But with a large base64 file (~ >5Mb), the browser hangs at the moment when Blob is created:
new Blob(byteArrays, {type: contentType});
How is it possible to solve this problem?
var fullFileName = 'example.test';
var b64file = '';
var contentType = 'application/octet-stream';
b64toBlob(b64file, contentType, 512, function(blob){
if (typeof MouseEvent != "function") { //for IE
$('#ie_download').off('click').on('click', function(){
window.navigator.msSaveBlob(blob, fullFileName);
})
.show();
success();
return;
}
//other browsers
var blobUrl = URL.createObjectURL(blob);
var jqLink = $('<a style="display: none" target="_blank">Save</a>');
$('#download')
.attr('download', fullFileName)
.attr('href', blobUrl)
.show();
success();
});
function success () {
$('#waiting').hide();
}
function b64toBlob(b64Data, contentType, sliceSize, resultCb) {
contentType = contentType || '';
sliceSize = sliceSize || 512;
var byteCharacters = atob(b64Data);
var byteArrays = [];
var offset = 0;
setTimeout(function generateByteArrays () {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
offset += sliceSize;
if (offset < byteCharacters.length) {
setTimeout(generateByteArrays, 0);
}
else {
resultCb(new Blob(byteArrays, {type: contentType}));
}
}, 0);
}
#download, #ie_download {
display: none;
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<div id='waiting'>waiting...</div>
<a id='download'>Save</a>
<a id='ie_download'>IE Save</a>
Upvotes: 1
Views: 1445
Reputation:
Update
I just noticed that the segment size is 512 bytes. This is extremely small and will with a 5 mb file create 10,240 array slices which IE seem to do a very slow operation with (ie. create buffer, copy content, check next slice, create new buffer of old size and next slice's size, copy old buffer + new content etc.).
You should be able to use at least 1000x larger slice size (0.5 mb) and with that not block IE11.
Demo using original code with a larger slice size:
setTimeout(test, 10);
function test() {
// Generate a big base-64 string, chop off data-uri
// NOTE: Initial creation will take a couple of seconds...
var c = document.createElement("canvas"); c.width = c.height = 6000;
var ctx = c.getContext("2d"); // create some lines to degrade compression ratio...
for(var i = 0, r = Math.random.bind(Math), w = c.width, h = c.height; i < 500; i++) {
ctx.moveTo(r()*w, r()*h);ctx.lineTo(r()*w, r()*h);
}
ctx.stroke();
var base64 = c.toDataURL();
base64 = base64.substr(base64.indexOf(",")+1);
// OK, now we have a raw base64 string we can use to test
document.querySelector("out").innerHTML = "Converting...";
// Increase sliceSize by x1024
b64toBlob(base64, "application/octet-stream", 512<<10, function(blob) {
document.querySelector("out").innerHTML = "Blob size: " + blob.size;
});
function b64toBlob(b64Data, contentType, sliceSize, resultCb) {
contentType = contentType || '';
sliceSize = sliceSize || (512<<10);
var byteCharacters = atob(b64Data);
var byteArrays = [];
var offset = 0;
setTimeout(function generateByteArrays () {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
offset += sliceSize;
if (offset < byteCharacters.length) {
setTimeout(generateByteArrays, 5);
}
else {
resultCb(new Blob(byteArrays, {type: contentType}));
}
}, 5);
}
}
<out>Creating test data...</out>
Due to a bug in IE11 one cannot use XMLHttpRequest()
with a data-uri and response type "blob", otherwise you could have used that to do all these operations for you.
var c = document.createElement("canvas"); c.width = c.height = 4000;
var ctx = c.getContext("2d"); // create some lines to degrade compression ratio...
for(var i = 0, r = Math.random.bind(Math), w = c.width, h = c.height; i < 200; i++) {
ctx.moveTo(r()*w, r()*h);ctx.lineTo(r()*w, r()*h);
}
ctx.stroke();
var base64 = c.toDataURL();
base64 = base64.substr(base64.indexOf(",")+1);
b64toBlob(base64, "application/octet-stream", function(blob) {
console.log(blob)
})
// Using XMLHttpRequest to do the work (won't work in IE11...)
function b64toBlob(base64, mimeType, callback) {
var xhr = new XMLHttpRequest();
xhr.responseType = "blob";
xhr.onload = function() {
callback(this.response)
};
xhr.open("GET", "data:" + mimeType + ";base64," + base64);
xhr.send();
}
Old answer (still applicable/recommended)
Increase the timeout to something like 7-10ms and see if that unblock the loop (or use even higher value if still blocking).
A timeout of 0 is in effect beating the purpose of this asynchronous segmentation approach.
Upvotes: 3