1
我正在使用以下程序将大文件上传到azure blob存储。 当上传一个小文件小于500KB,程序工作正常,否则 我在下面的行收到一个错误:将大文件上传到azure blob
blob.PutBlock(blockIdBase64,流,NULL);
为“类型‘Microsoft.WindowsAzure.Storage.StorageException’的未处理的异常在Microsoft.WindowsAzure.Storage.dll 发生其他信息:远程服务器返回一个错误:(400)错误的请求”
没有关于异常的细节,所以我不确定最新的问题。有没有关于这可能是错误的东西在下面的程序中的任何建议:
class Program
{
static void Main(string[] args)
{
string accountName = "newstg";
string accountKey = "fFB86xx5jbCj1A3dC41HtuIZwvDwLnXg==";
// list of all uploaded block ids. need for commiting them at the end
var blockIdList = new List<string>();
StorageCredentials creds = new StorageCredentials(accountName, accountKey);
CloudStorageAccount storageAccount = new CloudStorageAccount(creds, useHttps: true);
CloudBlobClient client = storageAccount.CreateCloudBlobClient();
CloudBlobContainer sampleContainer = client.GetContainerReference("newcontainer2");
string fileName = @"C:\sample.pptx";
CloudBlockBlob blob = sampleContainer.GetBlockBlobReference("APictureFile6");
using (var file = new FileStream(fileName, FileMode.Open, FileAccess.Read))
{
int blockSize = 1;
// block counter
var blockId = 0;
// open file
while (file.Position < file.Length)
{
// calculate buffer size (blockSize in KB)
var bufferSize = blockSize * 1024 < file.Length - file.Position ? blockSize * 1024 : file.Length - file.Position;
var buffer = new byte[bufferSize];
// read data to buffer
file.Read(buffer, 0, buffer.Length);
// save data to memory stream and put to storage
using (var stream = new MemoryStream(buffer))
{
// set stream position to start
stream.Position = 0;
// convert block id to Base64 Encoded string
var blockIdBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(blockId.ToString(CultureInfo.InvariantCulture)));
blob.PutBlock(blockIdBase64, stream, null);
blockIdList.Add(blockIdBase64);
// increase block id
blockId++;
}
}
file.Close();
}
blob.PutBlockList(blockIdList);
}
}
为什么你仍然使用低级别的PutBlock API? Azure库有更简单的上传数据的方法。 – usr
@usr:你能否给我推荐任何使用流并可能使用chunking代替缓冲区来避免内存问题的库? – user1400915
Google针对“Azure .NET上传文件”。这是一个解决的问题。 – usr