skeferstat
skeferstat

Reputation: 13

Wrong size of the file after upload to the blob container

I have uploaded a text file of 100 GB to azure blob. this file has the size of 107,374,182,400 bytes on my hard disk. After uploading it appears in azure portal size: 3,13 GiB, content-type: application/octet-stream, type: Block blob. When I download this file directly from the Azure protal (button download) I get a file with size 3.12 GB (3,355,443,200 bytes). Of course, this is not the desired result. What am I doing wrong with the transfer?

This is my source code:

public void UploadStreamAsync(string file)
    {
        
         var fileInfo = new FileInfo(file);

        CloudBlockBlob myBlob = _container.GetBlockBlobReference(fileInfo.Name);

        var blockSize = 32 * 1024 * 1024;
        myBlob.StreamWriteSizeInBytes = blockSize;
        var fileName = fileInfo.FullName;
        long bytesToUpload = fileInfo.Length;
        long fileSize = bytesToUpload;

        if (bytesToUpload < blockSize)
        {
            CancellationToken ca = new CancellationToken();
            var ado = myBlob.UploadFromFileAsync(fileName);
            Console.WriteLine(ado.Status); //Does Not Help Much
            ado.ContinueWith(t =>
            {
                _logger.LogInformation("Status = " + t.Status);
                _logger.LogInformation("It is over"); //this is working OK
            });
        }
        else
        {
            List<string> blockIds = new List<string>();
            int index = 1;
            long startPosition = 0;
            long bytesUploaded = 0;
            do
            {
                var bytesToRead = Math.Min(blockSize, bytesToUpload);
                var blobContents = new byte[bytesToRead];
                using (FileStream fs = new FileStream(fileName, FileMode.Open))
                {
                    fs.Position = startPosition;
                    fs.Read(blobContents, 0, (int)bytesToRead);
                }
                ManualResetEvent mre = new ManualResetEvent(false);
                var blockId = Convert.ToBase64String(Encoding.UTF8.GetBytes(index.ToString("d6")));
                Console.WriteLine("Now uploading block # " + index.ToString("d6"));
                blockIds.Add(blockId);
                var ado = myBlob.PutBlockAsync(blockId, new MemoryStream(blobContents), null);
                ado.ContinueWith(t =>
                {
                    bytesUploaded += bytesToRead;
                    bytesToUpload -= bytesToRead;
                    startPosition += bytesToRead;
                    index++;
                    double percentComplete = (double)bytesUploaded / (double)fileSize;
                    Console.WriteLine("Percent complete = " + percentComplete.ToString("P"));
                    _logger.LogInformation("Percent complete = " + percentComplete.ToString("P"));
                    mre.Set();
                });
                mre.WaitOne();
            }
            while (bytesToUpload > 0);
            Console.WriteLine("Now committing block list");
            _logger.LogInformation("Now committing block list");
            foreach (var blockId in blockIds)
            {
                _logger.LogInformation("BlockId: " + blockId);
            }
           
            var pbl = myBlob.PutBlockListAsync(blockIds);
            pbl.ContinueWith(t =>
            {
                Console.WriteLine("Blob uploaded completely.");
                _logger.LogInformation("Blob uploaded completely.");
            });
        }
       
    }

Upvotes: 1

Views: 1250

Answers (1)

RamaraoAdapa
RamaraoAdapa

Reputation: 3137

Thank you @skeferstat for the comment. Converting it as an answer.

Adding the missing content-type has fixed the issue.

Upvotes: 1

Related Questions