A task was canceled Exception when trying to upload file to S3 bucket - amazon-s3

A task was canceled exception is thrown when Im trying to call fileTransferUtility.UploadAsync to upload a file i S3. I'm using dot net core 2.0 and trying to Upload file to S3.What is that i'm doing wrong in the below code?
Is is something to do with Timeout? If so how to set time for s3 bucket? or Do I have to set some properties on S3 bucket?
Below is my controller code:
public class UploadController : Controller
{
private IHostingEnvironment _hostingEnvironment;
private AmazonS3Client _s3Client = new AmazonS3Client(RegionEndpoint.APSoutheast1);
private string _bucketName = "fileupload";//this is my Amazon Bucket name
private static string _bucketSubdirectory = String.Empty;
private string uploadWithKeyName = "testFile";
public UploadController(IHostingEnvironment environment)
{
_hostingEnvironment = environment;
}
[HttpPost("UploadExcelData")]
public async Task PostExcelData()
{
var files = Request.Form.Files;
var stringVal = Request.Form.Keys;
long size = files.Sum(f => f.Length);
foreach (var formFile in files)
{
if (formFile.Length > 0)
{
var filename = ContentDispositionHeaderValue
.Parse(formFile.ContentDisposition)
.FileName
.TrimStart().ToString();
filename = _hostingEnvironment.WebRootPath + $#"\uploads" + $#"\{formFile.FileName}";
size += formFile.Length;
using (var fs = System.IO.File.Create(filename))
{
formFile.CopyTo(fs);
fs.Flush();
}//these code snippets saves the uploaded files to the project directory
await UploadToS3(filename);//this is the method to upload saved file to S3
}
}
// return Ok();
}
public async Task UploadToS3(string filePath)
{
try
{
TransferUtility fileTransferUtility = new
TransferUtility(_s3Client);
string bucketName;
if (_bucketSubdirectory == "" || _bucketSubdirectory == null)
{
bucketName = _bucketName; //no subdirectory just bucket name
}
else
{ // subdirectory and bucket name
bucketName = _bucketName + #"/" + _bucketSubdirectory;
}
// 1. Upload a file, file name is used as the object key name.
await fileTransferUtility.UploadAsync(filePath, bucketName, uploadWithKeyName).ConfigureAwait(false);
Console.WriteLine("Upload 1 completed");
}
catch (AmazonS3Exception s3Exception)
{
Console.WriteLine(s3Exception.Message,
s3Exception.InnerException);
}
catch (Exception ex)
{
Console.WriteLine("Unknown error", ex.Message);
}
}
}

I forgot to pass the credentials :
private AmazonS3Client _s3Client = new AmazonS3Client(DynamoDbCRUD.Credentials.AccessKey,DynamoDbCRUD.Credentials.SecretKey, RegionEndpoint.APSoutheast1);
This line works fine.

Related

Using Google Drive Api on android studio?

i am working on app that integrates google drive api to upload/download files from the user's Google Drive account. i am using
this documentation google drive api v3, but it dont work,
i got this error ->
'java.net.MalformedURLException: Attempt to invoke virtual method 'int java.lang.String.length()' on a null object reference', i can login into google account and log out, but cant uplodad files
public Task<String> newFile(String filepath) {
return Tasks.call(executor, () -> {
File fmdt = new File();
fmdt.setName("contents.json");
java.io.File fp = new java.io.File(filepath);
FileContent mediaContent = new FileContent("application/json",fp);
File mf = null;
try {
mf = driveService.files().create(fmdt, mediaContent).setFields("id").execute();
} catch (Exception e) {
e.printStackTrace();
Log.d("usr","drive-> "+e.getMessage() );
}
if (mf == null) {
throw new IOException("null result");
}
return mf.getId();
});
}
this is how i solve, i guest you re actualli singin on goggle
//the class that handle upload request
public class GoogleDriveHelper {
private final Executor executor = Executors.newSingleThreadExecutor();
private final Drive driveService;
public GoogleDriveHelper(Drive mDriveService) {
this.driveService = mDriveService;
}
public Task<String> newFile(String fileName, String path, String parentId, File file) {
return Tasks.call(executor, () -> {
//use parent to save in especific folder
File fmdt = new File()
.setParents(Collections.singletonList(parentId))
.setMimeType("application/octet-stream")
.setName(nfileName);
File mf = null;
FileContent mediaContent = new FileContent("application/octet-stream", file);
try {
mf = driveService.files().create(fmdt, mediaContent).setFields("id").execute();
} catch (Exception e) {
e.printStackTrace();
}
if (mf == null) {
throw new IOException("null result");
}
return mf.getId();
});
}}
//inicialize google drive service
val credential =
GoogleAccountCredential.usingOAuth2(
ctx,
Collections.singleton(DriveScopes.DRIVE_FILE)
)
credential.selectedAccount = account!!.account
gService = Drive.Builder(
AndroidHttp.newCompatibleTransport(),
GsonFactory(),
credential
).
setApplicationName(APPLICATION_NAME).build();
gDrivehelper = GoogleDriveHelper(gService)

Download the file as a zip in ASP.NET Core

I am designing an educational site. When the user downloads a training course, I want this download (training course) to be done in the form of compression (zipper), please give a solution
My code:
public Tuple<byte[],string,string> DownloadFile(long episodeId)
{
var episode=_context.CourseEpisodes.Find(episodeId);
string filepath = Path.Combine(Directory.GetCurrentDirectory(),
"wwwroot/courseFiles",
episode.FileName);
string fileName = episode.FileName;
if(episode.IsFree)
{
byte[] file = System.IO.File.ReadAllBytes(filepath);
return Tuple.Create(file, "application/force-download",fileName);
}
if(_httpContextAccessor.HttpContext.User.Identity.IsAuthenticated)
{
if(IsuserIncorse(_httpContextAccessor.HttpContext.User.Identity.Name,
episode.CourseId))
{
byte[] file = System.IO.File.ReadAllBytes(filepath);
return Tuple.Create(file, "application/force-download", fileName);
}
}
return null;
}
I write a demo to show how to download zip file from .net core:
First , Add NuGet package SharpZipLib , create an Image Folder in wwwroot and put some picture in it.
controller
public class HomeController : Controller
{
private IHostingEnvironment _IHosting;
public HomeController(IHostingEnvironment IHosting)
{
_IHosting = IHosting;
}
public IActionResult Index()
{
return View();
}
public FileResult DownLoadZip()
{
var webRoot = _IHosting.WebRootPath;
var fileName = "MyZip.zip";
var tempOutput = webRoot + "/Images/" + fileName;
using (ZipOutputStream IzipOutputStream = new ZipOutputStream(System.IO.File.Create(tempOutput)))
{
IzipOutputStream.SetLevel(9);
byte[] buffer = new byte[4096];
var imageList = new List<string>();
imageList.Add(webRoot + "/Images/1202.png");
imageList.Add(webRoot + "/Images/1data.png");
imageList.Add(webRoot + "/Images/aaa.png");
for (int i = 0; i < imageList.Count; i++)
{
ZipEntry entry = new ZipEntry(Path.GetFileName(imageList[i]));
entry.DateTime= DateTime.Now;
entry.IsUnicodeText = true;
IzipOutputStream.PutNextEntry(entry);
using (FileStream oFileStream = System.IO.File.OpenRead(imageList[i]))
{
int sourceBytes;
do
{
sourceBytes = oFileStream.Read(buffer, 0, buffer.Length);
IzipOutputStream.Write(buffer, 0, sourceBytes);
}while (sourceBytes > 0);
}
}
IzipOutputStream.Finish();
IzipOutputStream.Flush();
IzipOutputStream.Close();
}
byte[] finalResult = System.IO.File.ReadAllBytes(tempOutput);
if (System.IO.File.Exists(tempOutput)) {
System.IO.File.Delete(tempOutput);
}
if (finalResult == null || !finalResult.Any()) {
throw new Exception(String.Format("Nothing found"));
}
return File(finalResult, "application/zip", fileName);
}
}
when I click the downloadZip ,it will download a .zip file
The simple example that follows illustrates the use of the static ZipFile.CreateFromDirectory method which, despite the fact that it is in the System.IO.Compression namespace , actually resides in the System.IO.Compression.FileSystem assembly, so you need to add a reference to that in your controller.
[HttpPost]
public FileResult Download()
{
List<string> files = new List<string> { "filepath1", "filepath2" };
var archive = Server.MapPath("~/archive.zip");
var temp = Server.MapPath("~/temp");
// clear any existing archive
if (System.IO.File.Exists(archive))
{
System.IO.File.Delete(archive);
}
// empty the temp folder
Directory.EnumerateFiles(temp).ToList().ForEach(f => System.IO.File.Delete(f));
// copy the selected files to the temp folder
files.ForEach(f => System.IO.File.Copy(f, Path.Combine(temp, Path.GetFileName(f))));
// create a new archive
ZipFile.CreateFromDirectory(temp, archive);
return File(archive, "application/zip", "archive.zip");
}
Answer from Source - MikesDotNetting

ASP.Net Core - EC2 to S3 file upload with Access Denied

I have developed a .NET Core 3.1 Web API which allows the users to upload their documents to S3 bucket. When I deploy the API to AWS ElasticBeansTalk EC2 instance and call the endpoint which uploads the file to S3, I get an error "Access Denied".
By the way, I have created IAM policy and role to give full access to S3 from my EC2 instance. I have also copied the .aws folder which contains credentials file onto the EC2 instance.
API Controller Action
public async Task<ApiResponse> UpdateProfilePic([FromBody]UploadProfilePicRequest model)
{
using (Stream stream = model.profilePicData.Base64StringToStream(out string header))
{
var tags = new List<KeyValuePair<string, string>>();
var metaData = new List<KeyValuePair<string, string>>();
metaData.Add(new KeyValuePair<string, string>("Content-Disposition", $"attachment; filename=\"{model.filename}\""));
if (_host.IsDevelopment())
{
tags.Add(new KeyValuePair<string, string>("public", "yes"));
}
await AmazonS3Uploader.UploadFileAsync(stream, "myDir/", model.fileId, tags, metaData);
}
}
The AmazonS3Helper class shown below:
using Amazon;
using Amazon.Runtime;
using Amazon.Runtime.CredentialManagement;
using Amazon.S3;
using Amazon.S3.Model;
using Amazon.S3.Transfer;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
namespace UploderApp.Services
{
public static class AmazonS3Uploader
{
private static readonly RegionEndpoint bucketRegion = RegionEndpoint.APSouth1;
private static readonly IAmazonS3 s3Client = new AmazonS3Client(GetAwsCredentials(), bucketRegion);
private static readonly string S3Bucket = "abc-test";
private static AWSCredentials GetAwsCredentials()
{
var chain = new CredentialProfileStoreChain();
if (chain.TryGetAWSCredentials("MYPROFILE", out AWSCredentials awsCredentials))
{
return awsCredentials;
}
return null;
}
public static async Task UploadFileAsync(Stream fileStream, string virtualDirectory, string keyName)
{
try
{
using (var fileTransferUtility = new TransferUtility(s3Client))
{
//Upload data from a type of System.IO.Stream.
await fileTransferUtility.UploadAsync(fileStream, S3Bucket, virtualDirectory + keyName).ConfigureAwait(true);
}
}
catch (AmazonS3Exception e)
{
throw new Exception($"Error encountered on server. Message:'{e.Message}' when writing an object");
}
}
public static async Task UploadFileAsync(Stream stream, string virtualDirectory, string keyName, List<KeyValuePair<string, string>> tags = null, List<KeyValuePair<string, string>> metadata = null)
{
try
{
// Specify advanced settings.
var fileTransferUtilityRequest = new TransferUtilityUploadRequest
{
BucketName = S3Bucket,
InputStream = stream,
StorageClass = S3StorageClass.Standard,
Key = virtualDirectory + keyName
};
if (metadata != null)
{
foreach (var item in metadata)
{
fileTransferUtilityRequest.Metadata.Add(item.Key, item.Value);
}
}
if (tags != null)
{
fileTransferUtilityRequest.TagSet = new List<Tag>();
foreach (var tag in tags)
{
fileTransferUtilityRequest.TagSet.Add(new Tag { Key = tag.Key, Value = tag.Value });
}
}
using (var fileTransferUtility = new TransferUtility(s3Client))
{
await fileTransferUtility.UploadAsync(fileTransferUtilityRequest).ConfigureAwait(true);
}
}
catch (AmazonS3Exception e)
{
throw new Exception($"Error encountered on server. Message:'{e.Message}' when writing an object");
}
}
}
}
However, if I create a console application and use the above class without any modifications, it uploads the file from the same EC2 instance.
Code from the Main function of my Console Application.
public static void Main()
{
var file = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "/Screenshot.png";
try
{
var tags = new List<KeyValuePair<string, string>>();
var metaData = new List<KeyValuePair<string, string>>();
metaData.Add(new KeyValuePair<string, string>("Content-Disposition", $"attachment; filename=\"profile-pic.png\""));
using (var stream = new FileStream(file, FileMode.Open))
{
AmazonS3Uploader.UploadFileAsync(stream, "mydir/", "screenshot.png", tags, metaData).GetAwaiter().GetResult();
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
This is very strange. Can anybody help me to understand the root cause, please?
Edit:1
Output of the aws s3 ls s3://abc-test is shown below
Edit:2
Uploading the EC2 folder to S3

Files uploaded but not appearing on server

I use the code stated here to upload files through a webapi http://bartwullems.blogspot.pe/2013/03/web-api-file-upload-set-filename.html. I also made the following api to list all the files I have :
[HttpPost]
[Route("sharepoint/imageBrowser/listFiles")]
[SharePointContextFilter]
public async Task<HttpResponseMessage> Read()
{
string pathImages = HttpContext.Current.Server.MapPath("~/Content/images");
DirectoryInfo d = new DirectoryInfo(pathImages);//Assuming Test is your Folder
FileInfo[] Files = d.GetFiles(); //Getting Text files
List<object> lst = new List<object>();
foreach (FileInfo f in Files)
{
lst.Add(new
{
name = f.Name,
type = "f",
size = f.Length
});
}
return Request.CreateResponse(HttpStatusCode.OK, lst);
}
When calling this api, all the files uploaded are listed. But when I go to azure I dont see any of them (Content.png is a file I manually uploaded to azure)
Why are the files listed if they dont appear on azure.
According to your description, I suggest you could firstly use azure kudu console to locate the right folder in the azure web portal to see the image file.
Open kudu console:
In the kudu click the debug console and locate the site\wwwroot\yourfilefolder
If you find your file is still doesn't upload successfully, I guess there maybe something wrong with your upload codes. I suggest you could try below codes.
Notice: You need add image folder in the wwwort folder.
{
public class UploadingController : ApiController
{
public async Task<HttpResponseMessage> PostFile()
{
// Check if the request contains multipart/form-data.
if (!Request.Content.IsMimeMultipartContent())
{
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
}
string root = Environment.GetEnvironmentVariable("HOME").ToString() + "\\site\\wwwroot\\images";
//string root = HttpContext.Current.Server.MapPath("~/images");
var provider = new FilenameMultipartFormDataStreamProvider(root);
try
{
StringBuilder sb = new StringBuilder(); // Holds the response body
// Read the form data and return an async task.
await Request.Content.ReadAsMultipartAsync(provider);
// This illustrates how to get the form data.
foreach (var key in provider.FormData.AllKeys)
{
foreach (var val in provider.FormData.GetValues(key))
{
sb.Append(string.Format("{0}: {1}\n", key, val));
}
}
// This illustrates how to get the file names for uploaded files.
foreach (var file in provider.FileData)
{
FileInfo fileInfo = new FileInfo(file.LocalFileName);
sb.Append(string.Format("Uploaded file: {0} ({1} bytes)\n", fileInfo.Name, fileInfo.Length));
}
return new HttpResponseMessage()
{
Content = new StringContent(sb.ToString())
};
}
catch (System.Exception e)
{
return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, e);
}
}
}
public class FilenameMultipartFormDataStreamProvider : MultipartFormDataStreamProvider
{
public FilenameMultipartFormDataStreamProvider(string path) : base(path)
{
}
public override string GetLocalFileName(System.Net.Http.Headers.HttpContentHeaders headers)
{
var name = !string.IsNullOrWhiteSpace(headers.ContentDisposition.FileName) ? headers.ContentDisposition.FileName : Guid.NewGuid().ToString();
return name.Replace("\"", string.Empty);
}
}
}
Result:

Amazon S3 .NET Core how to upload a file

I would like to upload a file with Amazon S3 inside a .NET Core project. Is there any reference on how to create and use an AmazonS3 client? All i can find in AmazonS3 documentation for .Net Core is this(http://docs.aws.amazon.com/sdk-for-net/v3/developer-guide/net-dg-config-netcore.html) which is not very helpfull.
I did using IFormFile, like this:
(You need to install AWSSDK.S3)
public async Task UploadFileToS3(IFormFile file)
{
using (var client = new AmazonS3Client("yourAwsAccessKeyId", "yourAwsSecretAccessKey", RegionEndpoint.USEast1))
{
using (var newMemoryStream = new MemoryStream())
{
file.CopyTo(newMemoryStream);
var uploadRequest = new TransferUtilityUploadRequest
{
InputStream = newMemoryStream,
Key = file.FileName,
BucketName = "yourBucketName",
CannedACL = S3CannedACL.PublicRead
};
var fileTransferUtility = new TransferUtility(client);
await fileTransferUtility.UploadAsync(uploadRequest);
}
}
}
For simple file uploading in a .netcore project, I followed this link.
After finishing the simple file upload procedure, I followed the documentation on this and this links, which were very helpful. Following two links were also helpful for a quick start.
https://github.com/awslabs/aws-sdk-net-samples/blob/master/ConsoleSamples/AmazonS3Sample/AmazonS3Sample/S3Sample.cs
http://www.c-sharpcorner.com/article/fileupload-to-aws-s3-using-asp-net/
This was my final code snippets in the controller for file upload (I skipped the view part, which is elaborately explained in the link shared above).
[HttpPost("UploadFiles")]
public IActionResult UploadFiles(List<IFormFile> files)
{
long size = files.Sum(f => f.Length);
foreach (var formFile in files)
{
if (formFile.Length > 0)
{
var filename = ContentDispositionHeaderValue
.Parse(formFile.ContentDisposition)
.FileName
.TrimStart().ToString();
filename = _hostingEnvironment.WebRootPath + $#"\uploads" + $#"\{formFile.FileName}";
size += formFile.Length;
using (var fs = System.IO.File.Create(filename))
{
formFile.CopyTo(fs);
fs.Flush();
}//these code snippets saves the uploaded files to the project directory
uploadToS3(filename);//this is the method to upload saved file to S3
}
}
return RedirectToAction("Index", "Library");
}
This is the method to upload files to Amazon S3:
private IHostingEnvironment _hostingEnvironment;
private AmazonS3Client _s3Client = new AmazonS3Client(RegionEndpoint.EUWest2);
private string _bucketName = "mis-pdf-library";//this is my Amazon Bucket name
private static string _bucketSubdirectory = String.Empty;
public UploadController(IHostingEnvironment environment)
{
_hostingEnvironment = environment;
}
public void uploadToS3(string filePath)
{
try
{
TransferUtility fileTransferUtility = new
TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.EUWest2));
string bucketName;
if (_bucketSubdirectory == "" || _bucketSubdirectory == null)
{
bucketName = _bucketName; //no subdirectory just bucket name
}
else
{ // subdirectory and bucket name
bucketName = _bucketName + #"/" + _bucketSubdirectory;
}
// 1. Upload a file, file name is used as the object key name.
fileTransferUtility.Upload(filePath, bucketName);
Console.WriteLine("Upload 1 completed");
}
catch (AmazonS3Exception s3Exception)
{
Console.WriteLine(s3Exception.Message,
s3Exception.InnerException);
}
}
This was all for uploading files in Amazon S3 bucket. I worked on .netcore 2.0 and also, don't forget to add necessary dependencies for using Amazon API. These were:
AWSSDK.Core
AWSSDK.Extensions.NETCore.Setup
AWSSDK.S3
Hope, this would help.
I wrote a complete sample for upload a file to Amazon AWS S3 with asp.net core mvc
you can see my sample project in github link:
https://github.com/NevitFeridi/AWS_Upload_Sample_ASPCoreMVC
There was a function for uploading file to S3 using Amazon.S3 SDK in the HomeController.
In this function " UploadFileToAWSAsync " you can find every things you need
// you must set your accessKey and secretKey
// for getting your accesskey and secretKey go to your Aws amazon console
string AWS_accessKey = "xxxxxxx";
string AWS_secretKey = "xxxxxxxxxxxxxx";
string AWS_bucketName = "my-uswest";
string AWS_defaultFolder = "MyTest_Folder";
protected async Task<string> UploadFileToAWSAsync(IFormFile myfile, string subFolder = "")
{
var result = "";
try
{
var s3Client = new AmazonS3Client(AWS_accessKey, AWS_secretKey, Amazon.RegionEndpoint.USWest2);
var bucketName = AWS_bucketName;
var keyName = AWS_defaultFolder;
if (!string.IsNullOrEmpty(subFolder))
keyName = keyName + "/" + subFolder.Trim();
keyName = keyName + "/" + myfile.FileName;
var fs = myfile.OpenReadStream();
var request = new Amazon.S3.Model.PutObjectRequest
{
BucketName = bucketName,
Key = keyName,
InputStream = fs,
ContentType = myfile.ContentType,
CannedACL = S3CannedACL.PublicRead
};
await s3Client.PutObjectAsync(request);
result = string.Format("http://{0}.s3.amazonaws.com/{1}", bucketName, keyName);
}
catch (Exception ex)
{
result = ex.Message;
}
return result;
}
Addition to #Tiago's answers, AWSS3 SDK is changed a bit, so here is the updated method:
public async Task UploadImage(IFormFile file)
{
var credentials = new BasicAWSCredentials("access", "secret key");
var config = new AmazonS3Config
{
RegionEndpoint = Amazon.RegionEndpoint.EUNorth1
};
using var client = new AmazonS3Client(credentials, config);
await using var newMemoryStream = new MemoryStream();
file.CopyTo(newMemoryStream);
var uploadRequest = new TransferUtilityUploadRequest
{
InputStream = newMemoryStream,
Key = file.FileName,
BucketName = "your-bucket-name",
CannedACL = S3CannedACL.PublicRead
};
var fileTransferUtility = new TransferUtility(client);
await fileTransferUtility.UploadAsync(uploadRequest);
}
Per AWS SDK docs, .Net Core support was added in late 2016.
https://aws.amazon.com/sdk-for-net/
So the instructions for uploading files to S3 should be identical to any other instructions for .Net.
The "getting started" guide for the AWS SDK for .Net is literally the case you describe of connecting and uploading a file to S3 - and included as a sample project ready for you to run if you've installed the "AWS Toolkit for Visual Studio" (which should be installed with the .Net AWS SDK).
So all you need to do is open visual studio, find their sample S3 project, or you can look at it here:
// simple object put
PutObjectRequest request = new PutObjectRequest()
{
ContentBody = "this is a test",
BucketName = bucketName,
Key = keyName
};
PutObjectResponse response = client.PutObject(request);
This assumes you have instantiated an Amazon.S3.AmazonS3Client after including the namespace, and configured it with your own credentials.
You first need to install in the Package Manager Console:
Install-package AWSSDK.Extensions.NETCORE.Setup
Install-package AWSSDK.S3
Then you need to have the credentials file in the directory:
C:\Users\username\.aws\credentials
The credential file should have this format:
[default]
aws_access_key_id=[Write your access key in here]
aws_secret_access_key=[Write your secret access key in here]
region=[Write your region here]
I uploaded in github an example of a basic CRUD in ASP.NET CORE for S3 buckets.
We came across an issue when implementing a High-Level API in a .net core solution. When clients had low bandwidth 3mb/s approx an error was thrown by Amazon S3 (The XML you provided was not well-formed). To resolve this issue we had to make an implementation using the low-level API.
https://docs.aws.amazon.com/en_us/AmazonS3/latest/dev/LLuploadFileDotNet.html
// Create list to store upload part responses.
List<UploadPartResponse> uploadResponses = new List<UploadPartResponse>();
// Setup information required to initiate the multipart upload.
InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest{
BucketName = bucketName,
Key = pathbucket
};
//Add metadata to file
string newDate = DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss");
// Initiate the upload.
InitiateMultipartUploadResponse initResponse = await s3Client.InitiateMultipartUploadAsync(initiateRequest);
int uploadmb = 5;
// Upload parts.
long contentLength = new FileInfo(zippath).Length;
long partSize = uploadmb * (long)Math.Pow(2, 20); // 5 MB
try
{
long filePosition = 0;
for (int i = 1; filePosition < contentLength; i++) {
UploadPartRequest uploadRequest = new UploadPartRequest{
BucketName = bucketName,
Key = pathbucket,
UploadId = initResponse.UploadId,
PartNumber = i,
PartSize = partSize,
FilePosition = filePosition,
FilePath = zippath
};
// Track upload progress.
uploadRequest.StreamTransferProgress += new EventHandler<StreamTransferProgressArgs>(UploadPartProgressEventCallback);
// Upload a part and add the response to our list.
uploadResponses.Add(await s3Client.UploadPartAsync(uploadRequest));
filePosition += partSize;
}
// Setup to complete the upload.
CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest {
BucketName = bucketName,
Key = pathbucket,
UploadId = initResponse.UploadId
};
completeRequest.AddPartETags(uploadResponses);
// Complete the upload.
CompleteMultipartUploadResponse completeUploadResponse = await s3Client.CompleteMultipartUploadAsync(completeRequest);
}
catch (Exception exception)
{
Console.WriteLine("An AmazonS3Exception was thrown: { 0}", exception.Message);
// Abort the upload.
AbortMultipartUploadRequest abortMPURequest = new AbortMultipartUploadRequest {
BucketName = bucketName,
Key = keyName,
UploadId = initResponse.UploadId
};
}