Reputation: 81
I am trying to upload multi image to azure blob service, everything looks fine, even though, I missed some thing error I am getting
events.js:292 throw er; // Unhandled 'error' event ^
Error: ENOENT: no such file or directory, open 'logo192.png' Emitted 'error' event on ReadStream instance at: at internal/fs/streams.js:163:14 at FSReqCallback.oncomplete (fs.js:159:23) { errno: -2, code: 'ENOENT', syscall: 'open', path: 'logo192.png' }
const uuidv1 = require("uuid/v1");
var azure = require("azure-storage");
var fs = require("fs");
const containerName = "image";
const fileUpload = async (req, res, next) => {
let imageStatus = [];
console.log(req.files);
await req.files.forEach((reqfile, i) => {
const blobName = uuidv1() + "-" + reqfile.originalname;
const blobSvc = azure.createBlobService();
const file = reqfile.originalname;
const stream = fs.createReadStream(file).pipe(blobSvc.createWriteStreamToBlockBlob(containerName, blobName, { blockIdPrefix: "block" }));
let dataLength = 0;
stream
.on("data", function (chunk) {
dataLength += chunk.length;
})
.on("end", function () {
console.log("The length was:", dataLength);
});
blobSvc.createBlockBlobFromStream(containerName, blobName, stream, dataLength, function (error, result, response) {
if (!error) {
console.log("ok Blob uploaded");
imageStatus.push({
imageName: result.name,
imagePath: result.name,
});
} else {
console.log(error);
}
});
});
res.status(200).json({ imageStatus });
};
module.exports = fileUpload;
console output of req.files
[
{
fieldname: 'files',
originalname: 'logo192.png',
encoding: '7bit',
mimetype: 'image/png',
buffer: <Buffer 89 50 4e 47 0d 0a 1a 0a 00 00 00 0d 49 48 44 52 00 00 00 c0 00 00 00 c0 08 03 00 00 00 65 02 9c 35 00 00 00 87 50 4c 54 45 00 00 00 64 da fb 61 da fc ... 5297 more bytes>,
size: 5347
}
]
Upvotes: 0
Views: 1119
Reputation: 23111
If you want to upload images to Azure blob in node application, I suggest you new SDK @azure/storage-blob
. The SDk is azure-storage
is Legacy.
For example
npm install into-stream @azure/storage-blob multer
PS:
into-stream
is used to convert Buffer to streammulter
is used to handle file data. It will read file data as Buffer.const {
BlobServiceClient,
StorageSharedKeyCredential,
newPipeline,
} = require("@azure/storage-blob");
//define multer
const multer = require("multer");
const inMemoryStorage = multer.memoryStorage();
const uploadStrategy = multer({ storage: inMemoryStorage }).array("images");
// configure storage
const accountNmae = "jimtestdiag924";
const accountKey =
"";
const sharedKeyCredential = new StorageSharedKeyCredential(
accountNmae,
accountKey,
);
const pipeline = newPipeline(sharedKeyCredential);
const blobServiceClient = new BlobServiceClient(
`https://${accountNmae}.blob.core.windows.net`,
pipeline,
);
const uploadOptions = { bufferSize: 4 * 1024 * 1024, maxConcurrency: 20 };
const getStream = require("into-stream");
const contanierName = "images";
const fileUpload = async (req, res, next) => {
let imageStatus = [];
console.log(req.files);
try {
await req.files.forEach(async (reqfile, i) => {
const blobName = reqfile.originalname;
const stream = getStream(reqfile.buffer);
const containerClient = blobServiceClient.getContainerClient(
contanierName,
);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
await blockBlobClient.uploadStream(
stream,
uploadOptions.bufferSize,
uploadOptions.maxConcurrency,
{ blobHTTPHeaders: { blobContentType: reqfile.mimetype } },
);
});
res.status(200).json({ imageStatus });
res.render("success", { message: "File uploaded to Azure Blob storage." });
} catch (err) {
throw err;
}
};
module.exports = fileUpload;
For more details, please refer to here.
Upvotes: 1