Kaushal
Kaushal

Reputation: 11

Large file upload for office 365(StartUpload,ContinueUpload,FinishUpload) not working as expected - SharePoint

When I am trying to upload large file using 3 new methods (StartUpload, ContinueUpload, FinishUpload) by uploading chunks of file then final uploaded file is corrupt file and size is also greater than actual file. I have used Rest API to upload large files.

Steps followed are as follows:-

  1. Create HTML for input file.

<input name="FileUpload" type="file" id="uploadInput" className="inputFile" multiple="false" onchange="upload(this.files[0])" />

  1. Below method is start point of code:

    • Creating Global variable for siteurl

var Tasks = {
  urlName: window.location.origin + "/",
  siteName: '/sites/ABC',
};

  1. Calling Upload() method

    • First Create Dummy File with size 0 in folder to continue with large file upload.

    • Create FileReader object and then start creating chunks of file with 3 parameters(offset,length,method(i.e. start/continue/finishupload)) and push chunks into an array.

    • Creating unique id for upload i.e. uploadID
    • Calling UploadFile method

function upload(file) {
  var docLibraryName = "/sites/ABC/Shared Documents";
  var fileName = $("#uploadInput").val().replace(/C:\\fakepath\\/i, '');
  var folderName = "";
  createDummaryFile(docLibraryName, fileName, folderName)
  var fr = new FileReader();
  var offset = 0;
  var total = file.size;
  var length = 1000000 > total ? total : 1000000;
  var chunks = [];
  fr.onload = evt => {
    while (offset < total) {
      if (offset + length > total)
        length = total - offset;
      chunks.push({
        offset,
        length,
        method: getUploadMethod(offset, length, total)
      });
      offset += length;
    }
    for (var i = 0; i < chunks.length; i++)
      console.log(chunks[i]);
    if (chunks.length > 0) {
      const id = getGuid();
      uploadFile(evt.target.result, id, docLibraryName, fileName, chunks, 0);
    }
  };
  fr.readAsArrayBuffer(file);
}

function createDummaryFile(libraryName, fileName, folderName) {
  return new Promise((resolve, reject) => {
    var endpoint = Tasks.urlName + Tasks.siteName + "/_api/web/GetFolderByServerRelativeUrl('" + libraryName + "/" + folderName + "')/Files/add(url=@TargetFileName,overwrite='true')?" +
      "&@TargetFileName='" + fileName + "'";
    var url;
    const headers = {
      "accept": "application/json;odata=verbose"
    };
    performUpload(endpoint, headers, libraryName, fileName, folderName, convertDataBinaryString(0));
  });
}

function S4() {
  return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1);
}

function getGuid() {
  return (S4() + S4() + "-" + S4() + "-4" + S4().substr(0, 3) + "-" + S4() + "-" + S4() + S4() + S4()).toLowerCase();
}
//check position for selecting method

function getUploadMethod(offset, length, total) {
  if (offset + length + 1 > total) {
    return 'finishupload';
  } else if (offset === 0) {
    return 'startupload';
  } else if (offset < total) {
    return 'continueupload';
  }
  return null;
}

  1. Upload file method

    • Convert arraybuffer to blob chunks to start uploading file

    • Start actual file chunks upload using methods and offset of 1mb we created earlier (uploadFileChunk method)

    • Start loop for chunk and call same method

function uploadFile(result, id, libraryPath, fileName, chunks, index) {
  const data = convertFileToBlobChunks(result, chunks[index]);
  var response = uploadFileChunk(id, libraryPath, fileName, chunks[index], data);
  index += 1;
  if (index < chunks.length)
    uploadFile(result, id, libraryPath, fileName, chunks, index, chunks[index].offset);
}

function convertFileToBlobChunks(result, chunkInfo) {
  var arrayBuffer = chunkInfo.method === 'finishupload' ? result.slice(chunkInfo.offset) : result.slice(chunkInfo.offset, chunkInfo.offset + chunkInfo.length);
  return convertDataBinaryString(arrayBuffer);
}

function convertDataBinaryString(data) {
  var fileData = '';
  var byteArray = new Uint8Array(data);
  for (var i = 0; i < byteArray.byteLength; i++) {
    fileData += String.fromCharCode(byteArray[i]);
  }
  return fileData;
}

  1. UploadFileChunk method to actually start uploading file chunks)

    • Form string if startupload then no fileoffset and if continueupload and finishupload then it will have fileoffset.
    • Call performupload method to start uploading using rest api

function uploadFileChunk(id, libraryPath, fileName, chunk, data) {
  new Promise((resolve, reject) => {
    var offset = chunk.offset === 0 ? '' : ',fileOffset=' + chunk.offset;
    var folderName = "";
    var endpoint = Tasks.urlName + Tasks.siteName + "/_api/web/getfilebyserverrelativeurl('" + libraryPath + "/" + fileName + "')/" + chunk.method + "(uploadId=guid'" + id + "'" + offset + ")";
    const headers = {
      "Accept": "application/json; odata=verbose",
      "Content-Type": "application/octet-stream"
    };
    performUpload(endpoint, headers, libraryPath, fileName, folderName, data);
  });
}

function performUpload(endpoint, headers, libraryName, fileName, folderName, fileData) {
  new Promise((resolve, reject) => {
    var digest = $("#__REQUESTDIGEST").val();
    $.ajax({
      url: endpoint,
      async: false,
      method: "POST",
      headers: headers,
      data: fileData,
      binaryStringRequestBody: true,
      success: function(data) {},
      error: err => reject(err.responseText)
    });
  });
}

Please suggest why file uploaded is corrupted and having size less or greater than actual file?

Thanks in advance.

Upvotes: 1

Views: 1684

Answers (1)

Brian
Brian

Reputation: 21

I had the same problem with this code. I changed convertFileToBlobChunks to just return the ArrayBuffer.

function convertFileToBlobChunks(result, chunkInfo) {
  var arrayBuffer = chunkInfo.method === 'finishupload' ? 
  result.slice(chunkInfo.offset) : result.slice(chunkInfo.offset, chunkInfo.offset + chunkInfo.length);

  return arrayBuffer;
}

I also removed "Content-Type": "application/octet-stream" from the header.

After doing that it uploaded fine.

Upvotes: 2

Related Questions