I've tried downloading file from FTP server in chunks using the FtpWebRequest and merging the chunks to original file. The process works fine for 4GB or lower files but when trying the same process for 8 or 9GB files I'm getting an error
Here is the error I'm getting
Here is the sample code I've worked out
private static long limitSize = Convert.ToInt64(ConfigurationManager.AppSettings["LimitSize"]);
public static void DownloadFromFTP()
{
var guid = Guid.NewGuid().ToString();
var path = $"{System.Environment.CurrentDirectory}/UploadedFiles/{guid}";
try
{
string strFilePath = ConfigurationManager.AppSettings["FilePath"];
NetworkCredential credentials = new NetworkCredential(ConfigurationManager.AppSettings["UserName"], ConfigurationManager.AppSettings["Password"]);
Console.WriteLine("Starting...");
string name = ConfigurationManager.AppSettings["FileName"];
var strFolderPath = ConfigurationManager.AppSettings["Key"] + name;
FtpWebRequest sizeRequest = (FtpWebRequest)WebRequest.Create(strFilePath);
sizeRequest.KeepAlive = false;
sizeRequest.Credentials = credentials;
sizeRequest.Method = WebRequestMethods.Ftp.GetFileSize;
long size = sizeRequest.GetResponse().ContentLength;
Console.WriteLine($"File has {size} bytes");
double filesizecheck = Convert.ToDouble(size) / Convert.ToDouble(limitSize);
int chunks = Convert.ToInt32(Math.Ceiling(filesizecheck));
long chunkLength = size / chunks;
List<Task> tasks = new List<Task>();
if (!System.IO.Directory.Exists(path))
{
System.IO.Directory.CreateDirectory(path);
}
var filepath = $"{path}/{name}";
for (int chunk = 0; chunk < chunks; chunk++)
{
int i = chunk;
tasks.Add(Task.Run(() =>
{
try
{
FtpWebRequest request = (FtpWebRequest)WebRequest.Create(strFilePath);
request.Credentials = credentials;
request.Method = WebRequestMethods.Ftp.DownloadFile;
request.UseBinary = true;
request.UsePassive = true;
request.KeepAlive = false;
request.ContentOffset = chunkLength * i;
long toread =
(i < chunks - 1) ? chunkLength : size - request.ContentOffset;
Console.WriteLine(
$"Downloading chunk {i + 1}/{chunks} with {toread} bytes ...");
using (Stream ftpStream = request.GetResponse().GetResponseStream())
using (Stream fileStream = File.Create(filepath + "." + i))
{
var bufferSize = Convert.ToInt32(ConfigurationManager.AppSettings["BufferSize"]);
byte[] buffer = new byte[bufferSize];
int read;
while (((read = (int)Math.Min(buffer.Length, toread)) > 0) &&
((read = ftpStream.Read(buffer, 0, read)) > 0))
{
fileStream.Write(buffer, 0, read);
toread -= read;
}
}
Console.WriteLine($"Downloaded chunk {i + 1}/{chunks}");
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex}");
Console.ReadKey();
}
}));
}
Console.WriteLine("Started all chunks downloads, waiting for them to complete...");
Task.WaitAll(tasks.ToArray());
CombineMultipleFilesIntoSingleFile(path, filepath);
var result = UploadToS3(filepath, strFolderPath, size, path).Result;
Console.WriteLine("Done");
Console.ReadKey();
}
catch (Exception ex)
{
Console.WriteLine("Exception " + ex.Message);
DeleteFiles(path);
Console.ReadKey();
}
Console.ReadKey();
Console.ReadKey();
}
Here is the method to merge the files
private static void CombineMultipleFilesIntoSingleFile(string inputDirectoryPath, string outputFilePath)
{
string[] inputFilePaths = Directory.GetFiles(inputDirectoryPath);
Console.WriteLine("Number of files: {0}.", inputFilePaths.Length);
using (var outputStream = File.Create(outputFilePath))
{
foreach (var inputFilePath in inputFilePaths)
{
using (var inputStream = File.OpenRead(inputFilePath))
{
// Buffer size can be passed as the second argument.
inputStream.CopyTo(outputStream);
}
Console.WriteLine("The file {0} has been processed.", inputFilePath);
}
}
}
App Config settings
<add key="LimitSize" value="10000000"/>
<add key="BufferSize" value="10240"/>
