node.js - 下载 azure blob 流到 zip nodejs

标签 node.js azure zip

我已经从 azure blob 实现了 getBlobToStream()。工作正常,但现在我想将流转换为 zip,然后下载它。但我每次都尝试过但都失败了。这是我的单流下载的工作代码

  fileModel.findOne({ _id: req.params.id, canceled: false, isVisible: true, linkExpired: false, isBlocked: false, isDeleted: false }).exec(async function (error, result) {
    if (error) {
      resolve(error);
    } else {
      const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob");
      const storage = require('azure-storage');
      const blobService = storage.createBlobService(process.env.BLOB_ACCOUNT, process.env.BLOB_ACCOUNT_KEY); 
      blobService.getBlobToStream(result.containerName, result.blobName, res, function (error, blob) {
        if (!error) { 
          console.log(blob); 
          res.end()
        } else {
          console.log(error);
          res.end();
        }
      });
    }
  })
})

帮助我将流下载到 zip。另外,请告诉我这可能吗?

最佳答案

getBlobToStream 方法来自旧的 azure-storage 包。在新的@azure/storage-blob中,它是download。下面是使用 archiver 的代码使用 fs 压缩下载流并保存到文件“test.zip”。


async function main() {
    const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob");
 
    const STORAGE_ACCOUNT_NAME = "<your storage account name>";
    const ACCOUNT_ACCESS_KEY = "<your storage account key>";
  
    const containerName = "<your container name>";
    const blobName = "<your blob name>";

    const zipFilePath = "D:\\test.zip"; // a path where the output zip file would get saved

    const credentials = new StorageSharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
    const blobServiceClient = new BlobServiceClient(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`,credentials);
    const containerClient = blobServiceClient.getContainerClient(containerName);
    const blobClient = containerClient.getBlobClient(blobName);

    const response = await blobClient.download(0); // download from 0 offset 
    await streamToCompressed(response.blobDownloadStream, zipFilePath, blobName);
}

async function streamToCompressed(readableStream, outputFilePath, blobName) {  
  return new Promise((resolve, reject) => {  

    const fs = require("fs");
    const archiver = require('archiver');

    // create a file to stream archive data to. 
    // In case you want to directly stream output in http response of express, just grab 'res' in that case instead of creating file stream
    const output = fs.createWriteStream(outputFilePath);
    const archive = archiver('zip', {
      zlib: { level: 9 } // Sets the compression level.
    });

    // listen for all archive data to be written
    // 'close' event is fired only when a file descriptor is involved
    output.on('close', () => {
      console.log(archive.pointer() + ' total bytes');
      console.log('archiver has been finalized and the output file descriptor has closed.');
      resolve();
    });

    // good practice to catch warnings (ie stat failures and other non-blocking errors)
    archive.on('warning', (err) => {
      if (err.code === 'ENOENT') {
        // log warning
      } else {
        // throw error
        throw err;
      }
    });
    
    // good practice to catch this error explicitly
    archive.on('error', (err) => {
      throw err;
    });    
 
    // pipe archive data to the file
    archive.pipe(output);

    // finalize the archive (ie we are done appending files but streams have to finish yet)
    archive.append(readableStream, { name: blobName })
            .finalize();

    readableStream.on("error", reject); 
  });
}

main().then(() => console.log('Done')).catch((ex) => console.log(ex.message));

多文件支持更新:


async function main() {
    const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob");
 
    const STORAGE_ACCOUNT_NAME = "<your storage account name>";
    const ACCOUNT_ACCESS_KEY = "<your storage account key>";

    const containerName = "<your container name>";
    const blobNames = [ "blob 1 name", "blob 2 name" ];

    const zipFilePath = "D:\\test.zip";

    const credentials = new StorageSharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
    const blobServiceClient = new BlobServiceClient(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`,credentials);
    const containerClient = blobServiceClient.getContainerClient(containerName);

    const streamDict = {}; // to have a map of blobName and it's corresponding stream

    for(const i in blobNames)
    {
      const blobName = blobNames[i];
      const blobClient = containerClient.getBlobClient(blobName);
      const response = await blobClient.download(0); // download from 0 offset 
      streamDict[blobName] = response.blobDownloadStream;
    }

    await streamsToCompressed(streamDict, zipFilePath);
}

async function streamsToCompressed(streamDict, outputFilePath) {  
  return new Promise((resolve, reject) => {  

    const fs = require("fs");
    const archiver = require('archiver');

    // create a file to stream archive data to. 
    // In case you want to directly stream output in http response of express, just grab 'res' in that case instead of creating file stream
    const output = fs.createWriteStream(outputFilePath);
    const archive = archiver('zip', {
      zlib: { level: 9 } // Sets the compression level.
    });

    // listen for all archive data to be written
    // 'close' event is fired only when a file descriptor is involved
    output.on('close', () => {
      console.log(archive.pointer() + ' total bytes');
      console.log('archiver has been finalized and the output file descriptor has closed.');
    });

    // good practice to catch warnings (ie stat failures and other non-blocking errors)
    archive.on('warning', (err) => {
      if (err.code === 'ENOENT') {
        // log warning
      } else {
        // throw error
        throw err;
      }
    });
    
    // good practice to catch this error explicitly
    archive.on('error', (err) => {
      throw err;
    });    
 
    // pipe archive data to the file
    archive.pipe(output);
    
    for(const blobName in streamDict) {
        const readableStream = streamDict[blobName];
        
        // finalize the archive (ie we are done appending files but streams have to finish yet)
        archive.append(readableStream, { name: blobName });

        readableStream.on("error", reject);
    }

    archive.finalize();
    resolve();
  });
}

main().then(() => console.log('Done')).catch((ex) => console.log(ex.message));

关于node.js - 下载 azure blob 流到 zip nodejs,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/63950743/

相关文章:

c# - 是否可以使用 C# 对 FTP 中的文件夹进行 ZIP 压缩?

javascript - 如何将 Stripe Connect 与 Node.js 集成?

java - 无法使用适用于 Java 的 Azure 管理库在订阅中列出 azure 应用程序服务(Spring Boot 应用程序)

node.js - process.hrtime 返回不匹配的秒和毫秒

Azure SQL Server 防火墙规则使用 PowerShell 添加多个 IP 地址以进行数据库连接

azure - 如何使用 "contact me"发布选项在 azure 市场中发布 Web 应用程序

c# - 如何获得正确的 ZipArchive.CreateEntry 名称编码?

ios - 如何为 MacOSX 准备一个文件,该文件将被提取到特定目录而不是其他地方?

node.js - npm install : fetchPackageMetaData error for material-design-icons@^3. 0.1 意外的文件结尾

node.js - .ebextensions 与 CodePipeline 和 Elastic Beanstalk