From d82f082e18c10592644f1ff9b4313892d0010a24 Mon Sep 17 00:00:00 2001 From: reito Date: Mon, 16 Oct 2023 01:58:48 +0800 Subject: [PATCH] =?UTF-8?q?=E6=9B=B4=E6=96=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Updator.Common/CompressionProvider/Brotli.cs | 2 +- Updator.Common/StorageProvider/TencentCos.cs | 2 +- Updator.Downloader.CLI/DownloaderMeta.cs | 2 +- Updator.Downloader.CLI/Program.cs | 2 +- Updator.Downloader.CLI/Strings.Designer.cs | 9 + Updator.Downloader.CLI/Strings.resx | 3 + Updator.Downloader.CLI/Strings.zh.resx | 3 + Updator.Uploader/Program.cs | 388 +++++++++++-------- 8 files changed, 237 insertions(+), 174 deletions(-) diff --git a/Updator.Common/CompressionProvider/Brotli.cs b/Updator.Common/CompressionProvider/Brotli.cs index 3d01347..109986c 100644 --- a/Updator.Common/CompressionProvider/Brotli.cs +++ b/Updator.Common/CompressionProvider/Brotli.cs @@ -10,7 +10,7 @@ public async Task Compress(Stream src, Stream dst) { await zipStream.FlushAsync(); await dst.FlushAsync(); await zipStream.DisposeAsync(); - zipStream.Close(); + zipStream.Close(); } public async Task Decompress(Stream src, Stream dst) { diff --git a/Updator.Common/StorageProvider/TencentCos.cs b/Updator.Common/StorageProvider/TencentCos.cs index cde47d1..ba7e072 100644 --- a/Updator.Common/StorageProvider/TencentCos.cs +++ b/Updator.Common/StorageProvider/TencentCos.cs @@ -205,7 +205,7 @@ public async Task RefreshRoot() { PurgePathCacheRequest req = new PurgePathCacheRequest() { Paths = new[] {_config.cdnRefreshPath}, UrlEncode = true, - FlushType = "delete" + FlushType = "flush" }; if (req.Paths.Length == 0) diff --git a/Updator.Downloader.CLI/DownloaderMeta.cs b/Updator.Downloader.CLI/DownloaderMeta.cs index efcebda..3730c7a 100644 --- a/Updator.Downloader.CLI/DownloaderMeta.cs +++ b/Updator.Downloader.CLI/DownloaderMeta.cs @@ -1,5 +1,5 @@ namespace Updator.Downloader.CLI; public class DownloaderMeta { - public const int Version = 33; + public const int Version = 34; } \ No newline at end of file diff --git a/Updator.Downloader.CLI/Program.cs b/Updator.Downloader.CLI/Program.cs index c0030d3..dce5aca 100644 --- a/Updator.Downloader.CLI/Program.cs +++ b/Updator.Downloader.CLI/Program.cs @@ -343,7 +343,7 @@ await AnsiConsole.Progress() if (desc.reinstallBuildId is {Count: > 0}) { foreach (var id in desc.reinstallBuildId) { if (oldDesc.buildId < id) { - AnsiConsole.Write($"[yellow]Removing existing files...[/]"); + AnsiConsole.MarkupLine(Strings.RemoveOld); Directory.Delete(distRoot, true); break; } diff --git a/Updator.Downloader.CLI/Strings.Designer.cs b/Updator.Downloader.CLI/Strings.Designer.cs index 57f1e21..aa7314a 100644 --- a/Updator.Downloader.CLI/Strings.Designer.cs +++ b/Updator.Downloader.CLI/Strings.Designer.cs @@ -149,6 +149,15 @@ internal static string No { } } + /// + /// Looks up a localized string similar to [yellow]Removing existing files...[/]. + /// + internal static string RemoveOld { + get { + return ResourceManager.GetString("RemoveOld", resourceCulture); + } + } + /// /// Looks up a localized string similar to [palegreen1]Self update succeed.[/]. /// diff --git a/Updator.Downloader.CLI/Strings.resx b/Updator.Downloader.CLI/Strings.resx index 6882961..11b5ac0 100644 --- a/Updator.Downloader.CLI/Strings.resx +++ b/Updator.Downloader.CLI/Strings.resx @@ -75,4 +75,7 @@ [red]Cannot write new version back, please close/delete the old version manually.[/] + + [yellow]Removing existing files...[/] + \ No newline at end of file diff --git a/Updator.Downloader.CLI/Strings.zh.resx b/Updator.Downloader.CLI/Strings.zh.resx index e833395..1c34f5c 100644 --- a/Updator.Downloader.CLI/Strings.zh.resx +++ b/Updator.Downloader.CLI/Strings.zh.resx @@ -68,4 +68,7 @@ [red]无法将新版本写回原位置,请检查老版本窗口是否已关闭,或尝试手动删除老版本[/] + + [yellow]本次版本升级将移除老文件[/] + \ No newline at end of file diff --git a/Updator.Uploader/Program.cs b/Updator.Uploader/Program.cs index 03a0d5a..4bb8731 100644 --- a/Updator.Uploader/Program.cs +++ b/Updator.Uploader/Program.cs @@ -2,6 +2,7 @@ using System.Collections.Concurrent; using System.Text; +using System.Text.Encodings.Web; using System.Text.Json; using System.Text.Json.Serialization; using CommandLine; @@ -13,26 +14,30 @@ using Uploader.StorageProvider; // Initialize logger -ILogger logger = LoggerFactory.Create(builder => { - builder.AddSimpleConsole(o => { - o.IncludeScopes = true; - o.TimestampFormat = "HH:mm:ss "; - o.SingleLine = true; - }); - builder.AddFile("./uploader.log"); - builder.SetMinimumLevel(LogLevel.Trace); - }) - .CreateLogger("Uploader"); - -var parsed = new Parser(a => { - a.AllowMultiInstance = true; - a.IgnoreUnknownArguments = true; +ILogger logger = LoggerFactory.Create(builder => +{ + builder.AddSimpleConsole(o => + { + o.IncludeScopes = true; + o.TimestampFormat = "HH:mm:ss "; + o.SingleLine = true; + }); + builder.AddFile("./uploader.log"); + builder.SetMinimumLevel(LogLevel.Trace); +}).CreateLogger("Uploader"); + +var parsed = new Parser(a => +{ + a.AllowMultiInstance = true; + a.IgnoreUnknownArguments = true; }).ParseArguments(args); -if (parsed.Value == null) { - foreach (var e in parsed.Errors) { - logger.LogError(e.ToString()); - } - return -1; +if (parsed.Value == null) +{ + foreach (var e in parsed.Errors) + { + logger.LogError(e.ToString()); + } + return -1; } var options = parsed.Value; @@ -41,67 +46,82 @@ var configString = string.Empty; var configPath = string.Empty; -if (!string.IsNullOrWhiteSpace(options.ConfigFile)) { - if (File.Exists(options.ConfigFile)) { - logger.LogInformation($"Using config file: {options.ConfigFile}"); - configString = File.ReadAllText(options.ConfigFile); - configPath = options.ConfigFile; - } else { - logger.LogError("Specified path is not a config file"); - return -1; - } +if (!string.IsNullOrWhiteSpace(options.ConfigFile)) +{ + if (File.Exists(options.ConfigFile)) + { + logger.LogInformation($"Using config file: {options.ConfigFile}"); + configString = File.ReadAllText(options.ConfigFile); + configPath = options.ConfigFile; + } + else + { + logger.LogError("Specified path is not a config file"); + return -1; + } } -if (!string.IsNullOrWhiteSpace(options.Base64ConfigFile)) { - logger.LogInformation($"Using base64 config file"); - configString = Encoding.UTF8.GetString(Convert.FromBase64String(options.Base64ConfigFile)); +if (!string.IsNullOrWhiteSpace(options.Base64ConfigFile)) +{ + logger.LogInformation($"Using base64 config file"); + configString = Encoding.UTF8.GetString(Convert.FromBase64String(options.Base64ConfigFile)); } -if (!File.Exists("./config.json")) { - File.WriteAllText("./config.json", JsonSerializer.Serialize(new Config(), new JsonSerializerOptions() { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault, - WriteIndented = true - })); - logger.LogInformation($"Config file not found, writing a default one."); - return -1; +if (!File.Exists("./config.json")) +{ + File.WriteAllText("./config.json", JsonSerializer.Serialize(new Config(), new JsonSerializerOptions() + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault, + WriteIndented = true, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping + })); + logger.LogInformation($"Config file not found, writing a default one."); + return -1; } -if (string.IsNullOrWhiteSpace(configString)) { - configString = File.ReadAllText("./config.json"); - configPath = "./config.json"; +if (string.IsNullOrWhiteSpace(configString)) +{ + configString = File.ReadAllText("./config.json"); + configPath = "./config.json"; } var config = JsonDocument.Parse(configString).Deserialize(); -if (!string.IsNullOrWhiteSpace(options.DistributionRoot)) { - logger.LogInformation($"Overwrite distributionRoot"); - config.distributionRoot = options.DistributionRoot; +if (!string.IsNullOrWhiteSpace(options.DistributionRoot)) +{ + logger.LogInformation($"Overwrite distributionRoot"); + config.distributionRoot = options.DistributionRoot; } // Initialize providers logger.LogInformation($"Providers: {config.storage} {config.checksum} {config.compression}"); -IStorageProvider storage = config.storage switch { - "cos" => new TencentCos(config.cos), - _ => null +IStorageProvider storage = config.storage switch +{ + "cos" => new TencentCos(config.cos), + _ => null }; -if (storage == null) { - logger.LogError("No effective storage provider"); - return -1; +if (storage == null) +{ + logger.LogError("No effective storage provider"); + return -1; } -IChecksumProvider check = config.checksum switch { - "crc64" => new Crc64(), - _ => null +IChecksumProvider check = config.checksum switch +{ + "crc64" => new Crc64(), + _ => null }; -if (check == null) { - logger.LogError("No effective checksum provider"); - return -1; +if (check == null) +{ + logger.LogError("No effective checksum provider"); + return -1; } -ICompressionProvider compress = config.compression switch { - "brotli" => new Brotli(), - "gzip" => new GZip(), - _ => new Raw() +ICompressionProvider compress = config.compression switch +{ + "brotli" => new Brotli(), + "gzip" => new GZip(), + _ => new Raw() }; // Scan files in the folder @@ -110,149 +130,177 @@ root.Scan(); // Create description -var desc = new DistDescription { - projectName = config.projectName, - versionString = config.versionString, - buildId = config.buildId, - channel = config.channel, - executable = config.executable, - compression = config.compression, - checksum = config.checksum, - updateLogs = config.updateLogs.ToList(), - passBuildId = config.passBuildId +var desc = new DistDescription +{ + projectName = config.projectName, + versionString = config.versionString, + buildId = config.buildId, + channel = config.channel, + executable = config.executable, + compression = config.compression, + checksum = config.checksum, + updateLogs = config.updateLogs?.ToList(), + passBuildId = config.passBuildId, + reinstallBuildId = config.reinstallBuildId?.ToList() }; // Check old description var compressionMismatch = true; var storageDesc = new MemoryStream(); await storage.DownloadAsync("__description.json", storageDesc); -if (storageDesc.Length != 0) { - try { - var oldDesc = JsonDocument.Parse(storageDesc.ToArray()).Deserialize(); - if (config.autoIncreaseBuildId) { - logger.LogInformation("Auto updating build id"); - if (desc.buildId > oldDesc.buildId) { - logger.LogInformation( - $"Forced build id {desc.buildId} because it is larger than existing {oldDesc.buildId}"); - } else { - desc.buildId = oldDesc.buildId + 1; - config.buildId = desc.buildId; - logger.LogInformation($"Successfully increased build id {desc.buildId}"); - } - } - // If compression methods are same, it can skip re-upload - if (oldDesc.compression == desc.compression) { - compressionMismatch = false; - logger.LogInformation($"Compression type match"); - } - } catch (Exception) { - // ignored - } -} else { - logger.LogWarning("Failed to get storage description"); +if (storageDesc.Length != 0) +{ + try + { + var oldDesc = JsonDocument.Parse(storageDesc.ToArray()).Deserialize(); + if (config.autoIncreaseBuildId) + { + logger.LogInformation("Auto updating build id"); + if (desc.buildId > oldDesc.buildId) + { + logger.LogInformation($"Forced build id {desc.buildId} because it is larger than existing {oldDesc.buildId}"); + } + else + { + desc.buildId = oldDesc.buildId + 1; + config.buildId = desc.buildId; + logger.LogInformation($"Successfully increased build id {desc.buildId}"); + } + } + // If compression methods are same, it can skip re-upload + if (oldDesc.compression == desc.compression) + { + compressionMismatch = false; + logger.LogInformation($"Compression type match"); + } + } + catch (Exception) + { + // ignored + } +} +else +{ + logger.LogWarning("Failed to get storage description"); } // Upload files concurrently. ConcurrentBag uploadedObjectKeys = new(); ConcurrentBag descFiles = new(); -await Parallel.ForEachAsync(root.Items, async (item, _) => { - using var ms = new MemoryStream(); - var fs = item.fileInfo.OpenRead(); - await compress.Compress(fs, ms); - await fs.DisposeAsync(); - fs.Close(); - ms.Position = 0; - var checksum = await check.CalculateChecksum(ms); - - descFiles.Add(new() { - checksum = checksum, - objectKey = item.storageObjectKey - }); - - var upload = false; - - // Check if need re-upload - if (compressionMismatch) { - upload = true; - } else { - var same = await storage.CheckSameAsync(item.storageObjectKey, checksum); - logger.LogTrace($"Checking {item.storageObjectKey} -> {(same ? "same" : "not same")}"); - if (!same) { - upload = true; - } - } - - if (upload) { - logger.LogTrace($"Uploading {item.storageObjectKey}"); - ms.Position = 0; - await storage.UploadAsync(item.storageObjectKey, ms); - logger.LogTrace($"Uploaded {item.storageObjectKey} -> done"); - uploadedObjectKeys.Add(item.storageObjectKey); - } +await Parallel.ForEachAsync(root.Items, async (item, _) => +{ + using var ms = new MemoryStream(); + var fs = item.fileInfo.OpenRead(); + await compress.Compress(fs, ms); + await fs.DisposeAsync(); + fs.Close(); + ms.Position = 0; + var checksum = await check.CalculateChecksum(ms); + + descFiles.Add(new() + { + checksum = checksum, + objectKey = item.storageObjectKey + }); + + var upload = false; + + // Check if need re-upload + if (compressionMismatch) + { + upload = true; + } + else + { + var same = await storage.CheckSameAsync(item.storageObjectKey, checksum); + logger.LogTrace($"Checking {item.storageObjectKey} -> {(same ? "same" : "not same")}"); + if (!same) + { + upload = true; + } + } + + if (upload) + { + logger.LogTrace($"Uploading {item.storageObjectKey}"); + ms.Position = 0; + await storage.UploadAsync(item.storageObjectKey, ms); + logger.LogTrace($"Uploaded {item.storageObjectKey} -> done"); + uploadedObjectKeys.Add(item.storageObjectKey); + } }); desc.files.AddRange(descFiles); // Write logs if there's -if (options.UpdateLogs != null) { - var updateLogs = options.UpdateLogs.Where(a => !string.IsNullOrWhiteSpace(a)).Distinct().ToList(); - if (updateLogs.Any()) { - logger.LogInformation($"Writing update logs."); - var updateLog = new DistUpdateLog() { - buildId = desc.buildId, - items = new() { - {"_", updateLogs} - }, - versionString = desc.versionString - }; - desc.updateLogs.Add(updateLog); - config.updateLogs.Add(updateLog); - } +if (options.UpdateLogs != null) +{ + var updateLogs = options.UpdateLogs.Where(a => !string.IsNullOrWhiteSpace(a)).Distinct().ToList(); + if (updateLogs.Any()) + { + logger.LogInformation($"Writing update logs."); + var updateLog = new DistUpdateLog() + { + buildId = desc.buildId, + items = new() + { + { "_", updateLogs } + }, + versionString = desc.versionString + }; + desc.updateLogs.Add(updateLog); + config.updateLogs.Add(updateLog); + } } -if (!string.IsNullOrWhiteSpace(configPath) && !options.NoWriteBack) { - logger.LogInformation($"Save update logs to config.json"); - var configText = JsonSerializer.Serialize(config, new JsonSerializerOptions() { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault, - WriteIndented = true - }); - File.WriteAllText(configPath, configText); +if (!string.IsNullOrWhiteSpace(configPath) && !options.NoWriteBack) +{ + logger.LogInformation($"Save update logs to config.json"); + var configText = JsonSerializer.Serialize(config, new JsonSerializerOptions() + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault, + WriteIndented = true, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping + }); + File.WriteAllText(configPath, configText); } // Write description -var descText = JsonSerializer.Serialize(desc, new JsonSerializerOptions() { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault, - WriteIndented = true +var descText = JsonSerializer.Serialize(desc, new JsonSerializerOptions() +{ + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault, + WriteIndented = true, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping }); await storage.UploadAsync("__description.json", new MemoryStream(Encoding.UTF8.GetBytes(descText))); uploadedObjectKeys.Add("__description.json"); logger.LogInformation("Uploaded storage description"); // Refresh CDN if provider has such interface -if (storage is ICdnRefresh cdn) { - logger.LogInformation("Refresh CDN"); - await cdn.RefreshObjectKeys(uploadedObjectKeys); - await cdn.RefreshRoot(); +if (storage is ICdnRefresh cdn) +{ + logger.LogInformation("Refresh CDN"); + await cdn.RefreshObjectKeys(uploadedObjectKeys); + await cdn.RefreshRoot(); } logger.LogInformation("Done"); return 0; -file class Options { - [Option("config", Required = false, HelpText = "Config file to be processed.")] - public string ConfigFile { get; set; } +file class Options +{ + [Option("config", Required = false, HelpText = "Config file to be processed.")] + public string ConfigFile { get; set; } - [Option("base64", Required = false, HelpText = "Base64 encoded config file to be processed.")] - public string Base64ConfigFile { get; set; } + [Option("base64", Required = false, HelpText = "Base64 encoded config file to be processed.")] + public string Base64ConfigFile { get; set; } - [Option("distribution-root", Required = false, HelpText = "Override `distributionRoot`.")] - public string DistributionRoot { get; set; } + [Option("distribution-root", Required = false, HelpText = "Override `distributionRoot`.")] + public string DistributionRoot { get; set; } - [Option("update-log", Required = false, Default = null, - HelpText = "Add a line to update log in (set or auto-increased) `buildId` and current `versionString`.")] - public IEnumerable UpdateLogs { get; set; } + [Option("update-log", Required = false, Default = null, HelpText = "Add a line to update log in (set or auto-increased) `buildId` and current `versionString`.")] + public IEnumerable UpdateLogs { get; set; } - [Option("no-write-back", Required = false, Default = false, - HelpText = "Disable write updated config.json back to file")] - public bool NoWriteBack { get; set; } -} \ No newline at end of file + [Option("no-write-back", Required = false, Default = false, HelpText = "Disable write updated config.json back to file。")] + public bool NoWriteBack { get; set; } +}