Fast .NET CLI Downloader

There are plenty of GUI download managers, but I wanted a simple one that I can call from the command line.

With the newish dotnet run app.cs style, it became easier to do this.

Here is the full source code. Make sure the file is executable, and then you can run it with downloader.cs someLargeFile.iso and it will download the file, in parts, to the current directory, then it stitches them together. If you have to cancel a download, it will resume.

Downloader CLI
Downloader CLI
  1#!/usr/bin/dotnet run
  2
  3using System.Diagnostics;
  4using System.Net.Http.Headers;
  5
  6const int DefaultChunks = 8;
  7const int MaxRetries = 5;
  8const int RetryDelayMs = 1000;
  9const int ProgressUpdateMs = 250;
 10
 11if (args.Length < 1)
 12{
 13    Console.Error.WriteLine("Usage: dotnet run downloader.cs <url> [output-file] [chunks]");
 14    Console.Error.WriteLine("  url          - URL to download");
 15    Console.Error.WriteLine("  output-file  - Output filename (default: derived from URL)");
 16    Console.Error.WriteLine("  chunks       - Number of parallel streams (default: 8)");
 17    return 1;
 18}
 19
 20var url = args[0];
 21var outputFile = args.Length > 1 ? args[1] : Path.GetFileName(new Uri(url).LocalPath);
 22var chunks = args.Length > 2 ? int.Parse(args[2]) : DefaultChunks;
 23
 24if (string.IsNullOrWhiteSpace(outputFile) || outputFile == "/")
 25    outputFile = "download";
 26
 27Console.WriteLine($"URL:     {url}");
 28Console.WriteLine($"Output:  {outputFile}");
 29Console.WriteLine($"Streams: {chunks}");
 30Console.WriteLine();
 31
 32using var client = new HttpClient { Timeout = TimeSpan.FromMinutes(30) };
 33client.DefaultRequestHeaders.UserAgent.ParseAdd("DownloaderCLI/1.0");
 34
 35// Probe the server for content-length and range support
 36using var headReq = new HttpRequestMessage(HttpMethod.Head, url);
 37using var headResp = await client.SendAsync(headReq);
 38headResp.EnsureSuccessStatusCode();
 39
 40var totalSize = headResp.Content.Headers.ContentLength ?? -1;
 41var acceptRanges = headResp.Headers.Contains("Accept-Ranges")
 42    && headResp.Headers.GetValues("Accept-Ranges").Any(v => v.Contains("bytes", StringComparison.OrdinalIgnoreCase));
 43
 44if (totalSize <= 0 || !acceptRanges)
 45{
 46    Console.WriteLine(totalSize <= 0
 47        ? "Server did not report content length - falling back to single-stream download."
 48        : "Server does not support range requests - falling back to single-stream download.");
 49    Console.WriteLine();
 50    await SingleStreamDownload(client, url, outputFile);
 51    return 0;
 52}
 53
 54Console.WriteLine($"Size:    {FormatBytes(totalSize)}");
 55Console.WriteLine($"Ranges:  supported");
 56Console.WriteLine();
 57
 58var sw = Stopwatch.StartNew();
 59var chunkInfos = BuildChunks(totalSize, chunks);
 60var progress = new long[chunkInfos.Count];
 61
 62// Progress reporter
 63using var cts = new CancellationTokenSource();
 64var progressTask = Task.Run(async () =>
 65{
 66    while (!cts.Token.IsCancellationRequested)
 67    {
 68        PrintProgress(progress, chunkInfos, totalSize, sw.Elapsed);
 69        try { await Task.Delay(ProgressUpdateMs, cts.Token); } catch (TaskCanceledException) { break; }
 70    }
 71});
 72
 73// Download all chunks in parallel
 74var tempFiles = new string[chunkInfos.Count];
 75var downloadTasks = new Task[chunkInfos.Count];
 76
 77for (int i = 0; i < chunkInfos.Count; i++)
 78{
 79    var idx = i;
 80    var (start, end) = chunkInfos[idx];
 81    tempFiles[idx] = $"{outputFile}.part{idx}";
 82    downloadTasks[idx] = DownloadChunk(client, url, start, end, tempFiles[idx], progress, idx);
 83}
 84
 85await Task.WhenAll(downloadTasks);
 86
 87cts.Cancel();
 88await progressTask;
 89PrintProgress(progress, chunkInfos, totalSize, sw.Elapsed);
 90Console.WriteLine();
 91Console.WriteLine();
 92
 93// Reassemble
 94Console.Write("Reassembling... ");
 95await Reassemble(tempFiles, outputFile);
 96Console.WriteLine("done.");
 97
 98// Cleanup temp files
 99foreach (var f in tempFiles)
100    if (File.Exists(f)) File.Delete(f);
101
102sw.Stop();
103var info = new FileInfo(outputFile);
104Console.WriteLine($"Completed in {sw.Elapsed.TotalSeconds:F1}s - {FormatBytes(info.Length)} @ {FormatBytes((long)(info.Length / sw.Elapsed.TotalSeconds))}/s");
105return 0;
106
107// ---- helper methods ----
108
109static List<(long Start, long End)> BuildChunks(long totalSize, int count)
110{
111    var chunkSize = totalSize / count;
112    var result = new List<(long, long)>(count);
113    for (int i = 0; i < count; i++)
114    {
115        var start = i * chunkSize;
116        var end = (i == count - 1) ? totalSize - 1 : start + chunkSize - 1;
117        result.Add((start, end));
118    }
119    return result;
120}
121
122static async Task DownloadChunk(HttpClient client, string url, long start, long end,
123    string tempFile, long[] progress, int index)
124{
125    for (int attempt = 1; attempt <= MaxRetries; attempt++)
126    {
127        try
128        {
129            // Resume from where we left off if retrying
130            long existingBytes = 0;
131            if (File.Exists(tempFile))
132            {
133                existingBytes = new FileInfo(tempFile).Length;
134                if (existingBytes >= end - start + 1)
135                {
136                    progress[index] = end - start + 1;
137                    return; // already complete
138                }
139            }
140
141            using var req = new HttpRequestMessage(HttpMethod.Get, url);
142            req.Headers.Range = new RangeHeaderValue(start + existingBytes, end);
143
144            using var resp = await client.SendAsync(req, HttpCompletionOption.ResponseHeadersRead);
145            resp.EnsureSuccessStatusCode();
146
147            await using var stream = await resp.Content.ReadAsStreamAsync();
148            await using var fs = new FileStream(tempFile, existingBytes > 0 ? FileMode.Append : FileMode.Create,
149                FileAccess.Write, FileShare.None, 81920, useAsync: true);
150
151            var buffer = new byte[81920];
152            long downloaded = existingBytes;
153            int bytesRead;
154
155            while ((bytesRead = await stream.ReadAsync(buffer)) > 0)
156            {
157                await fs.WriteAsync(buffer.AsMemory(0, bytesRead));
158                downloaded += bytesRead;
159                Interlocked.Exchange(ref progress[index], downloaded);
160            }
161
162            return; // success
163        }
164        catch (Exception ex) when (attempt < MaxRetries)
165        {
166            Console.Error.WriteLine($"\n  [chunk {index}] attempt {attempt} failed: {ex.Message} - retrying...");
167            await Task.Delay(RetryDelayMs * attempt);
168        }
169    }
170}
171
172static async Task SingleStreamDownload(HttpClient client, string url, string outputFile)
173{
174    var sw = Stopwatch.StartNew();
175    using var resp = await client.GetAsync(url, HttpCompletionOption.ResponseHeadersRead);
176    resp.EnsureSuccessStatusCode();
177
178    var total = resp.Content.Headers.ContentLength ?? -1;
179    await using var stream = await resp.Content.ReadAsStreamAsync();
180    await using var fs = new FileStream(outputFile, FileMode.Create, FileAccess.Write, FileShare.None, 81920, true);
181
182    var buffer = new byte[81920];
183    long downloaded = 0;
184    int bytesRead;
185    var lastUpdate = DateTimeOffset.UtcNow;
186
187    while ((bytesRead = await stream.ReadAsync(buffer)) > 0)
188    {
189        await fs.WriteAsync(buffer.AsMemory(0, bytesRead));
190        downloaded += bytesRead;
191
192        if ((DateTimeOffset.UtcNow - lastUpdate).TotalMilliseconds >= ProgressUpdateMs)
193        {
194            lastUpdate = DateTimeOffset.UtcNow;
195            var pct = total > 0 ? (double)downloaded / total * 100 : 0;
196            var speed = downloaded / sw.Elapsed.TotalSeconds;
197            Console.Write($"\r  [{pct,5:F1}%] {FormatBytes(downloaded)}{(total > 0 ? $" / {FormatBytes(total)}" : "")}  {FormatBytes((long)speed)}/s   ");
198        }
199    }
200
201    sw.Stop();
202    Console.WriteLine($"\r  [100.0%] {FormatBytes(downloaded)}  {FormatBytes((long)(downloaded / sw.Elapsed.TotalSeconds))}/s - done.          ");
203}
204
205static async Task Reassemble(string[] parts, string outputFile)
206{
207    await using var outFs = new FileStream(outputFile, FileMode.Create, FileAccess.Write, FileShare.None, 81920, true);
208    foreach (var part in parts)
209    {
210        await using var inFs = new FileStream(part, FileMode.Open, FileAccess.Read, FileShare.Read, 81920, true);
211        await inFs.CopyToAsync(outFs);
212    }
213}
214
215static void PrintProgress(long[] progress, List<(long Start, long End)> chunks, long totalSize, TimeSpan elapsed)
216{
217    long totalDownloaded = progress.Sum();
218    var pct = (double)totalDownloaded / totalSize * 100;
219    var speed = elapsed.TotalSeconds > 0 ? totalDownloaded / elapsed.TotalSeconds : 0;
220    var eta = speed > 0 ? TimeSpan.FromSeconds((totalSize - totalDownloaded) / speed) : TimeSpan.Zero;
221
222    // Per-chunk mini bars
223    var bars = new List<string>();
224    for (int i = 0; i < chunks.Count; i++)
225    {
226        var chunkSize = chunks[i].End - chunks[i].Start + 1;
227        var chunkPct = (double)progress[i] / chunkSize;
228        const int miniBarWidth = 8;
229        var filled = chunkPct <= 0
230            ? 0
231            : Math.Clamp((int)Math.Ceiling(chunkPct * miniBarWidth), 1, miniBarWidth);
232        bars.Add(new string('█', filled) + new string('░', miniBarWidth - filled));
233    }
234
235    Console.Write($"\r  [{pct,5:F1}%] {FormatBytes(totalDownloaded)} / {FormatBytes(totalSize)}  " +
236                  $"{FormatBytes((long)speed)}/s  ETA {eta:mm\\:ss}  " +
237                  $"[{string.Join('|', bars)}]   ");
238}
239
240static string FormatBytes(long bytes)
241{
242    string[] units = ["B", "KB", "MB", "GB", "TB"];
243    double val = bytes;
244    int unit = 0;
245    while (val >= 1024 && unit < units.Length - 1) { val /= 1024; unit++; }
246    return $"{val:F1} {units[unit]}";
247}

Let me know if this is of use to you.

comments powered by Disqus

Related