diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json new file mode 100644 index 0000000..da200cd --- /dev/null +++ b/.config/dotnet-tools.json @@ -0,0 +1,12 @@ +{ + "version": 1, + "isRoot": true, + "tools": { + "cake.tool": { + "version": "4.0.0", + "commands": [ + "dotnet-cake" + ] + } + } +} \ No newline at end of file diff --git a/.editorconfig b/.editorconfig index 20a49b6..a48cb6d 100644 --- a/.editorconfig +++ b/.editorconfig @@ -11,7 +11,7 @@ insert_final_newline = false trim_trailing_whitespace = true # .NET Code files -[*.{cs,csx,cake,vb}] +[*.{cs,csx,cake,vb,vbx,h,cpp,idl}] indent_style = tab tab_width = 4 insert_final_newline = true @@ -21,12 +21,12 @@ insert_final_newline = true indent_style = tab tab_width = 4 -# Visual Studio XML Project Files -[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}] +# MSBuild project files +[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj,msbuildproj}] indent_size = 2 -# Various XML Configuration Files -[*.{xml,config,props,targets,nuspec,resx,ruleset,vsixmanifest,vsct}] +# Xml config files +[*.{xml,config,props,targets,nuspec,resx,ruleset,vsixmanifest,vsct,runsettings}] indent_size = 2 # JSON Files diff --git a/.gitattributes b/.gitattributes index bdb0cab..0940d5d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,17 +1,99 @@ # Auto detect text files and perform LF normalization * text=auto +# +# The above will handle all files NOT found below +# + +# Documents +*.bibtex text diff=bibtex +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain +*.md text +*.tex text diff=tex +*.adoc text +*.textile text +*.mustache text +*.csv text +*.tab text +*.tsv text +*.txt text +*.sql text + +# Graphics +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.tif binary +*.tiff binary +*.ico binary +# SVG treated as an asset (binary) by default. +*.svg text +# If you want to treat it as binary, +# use the following line instead. +# *.svg binary +*.eps binary + +# Scripts +*.bash text eol=lf +*.fish text eol=lf +*.sh text eol=lf +# These are explicitly windows files and should use crlf +*.bat text eol=crlf +*.cmd text eol=crlf +*.ps1 text eol=crlf + +# Serialisation +*.json text +*.toml text +*.xml text +*.yaml text +*.yml text + +# Archives +*.7z binary +*.gz binary +*.tar binary +*.tgz binary +*.zip binary + # Custom for Visual Studio -*.cs diff=csharp +*.cs diff=csharp +*.sln text eol=crlf +*.csproj text eol=crlf +*.vbproj text eol=crlf +*.vcxproj text eol=crlf +*.vcproj text eol=crlf +*.dbproj text eol=crlf +*.fsproj text eol=crlf +*.lsproj text eol=crlf +*.wixproj text eol=crlf +*.modelproj text eol=crlf +*.sqlproj text eol=crlf +*.wwaproj text eol=crlf +*.xproj text eol=crlf +*.props text eol=crlf +*.filters text eol=crlf +*.vcxitems text eol=crlf # Standard to msysgit -*.doc diff=astextplain -*.DOC diff=astextplain -*.docx diff=astextplain -*.DOCX diff=astextplain -*.dot diff=astextplain -*.DOT diff=astextplain -*.pdf diff=astextplain -*.PDF diff=astextplain -*.rtf diff=astextplain -*.RTF diff=astextplain +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain + +# The macOS codesign tool is extremely picky, and requires LF line endings. +*.plist eol=lf diff --git a/.gitignore b/.gitignore index 4e35409..5fa8eb2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,11 @@ -# Created with the help of https://www.gitignore.io/api/visualstudio -# Edit at https://www.gitignore.io/?templates=visualstudio +# Created with the help of https://www.toptal.com/developers/gitignore/api/visualstudio (formerly https://www.gitignore.io/api/visualstudio) +# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudio ### VisualStudio ### ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. ## -## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore # User-specific files *.rsuser @@ -27,12 +27,14 @@ mono_crash.* [Rr]eleases/ x64/ x86/ +[Ww][Ii][Nn]32/ [Aa][Rr][Mm]/ [Aa][Rr][Mm]64/ bld/ [Bb]in/ [Oo]bj/ [Ll]og/ +[Ll]ogs/ # Visual Studio 2015/2017 cache/options directory .vs/ @@ -64,6 +66,9 @@ project.lock.json project.fragment.lock.json artifacts/ +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + # StyleCop StyleCopReport.xml @@ -72,6 +77,7 @@ StyleCopReport.xml *_p.c *_h.h *.ilk +*.meta *.obj *.iobj *.pch @@ -88,6 +94,7 @@ StyleCopReport.xml *.tmp_proj *_wpftmp.csproj *.log +*.tlog *.vspscc *.vssscc .builds @@ -129,9 +136,6 @@ _ReSharper*/ *.[Rr]e[Ss]harper *.DotSettings.user -# JustCode is a .NET coding add-in -.JustCode - # TeamCity is a build add-in _TeamCity* @@ -142,6 +146,11 @@ _TeamCity* .axoCover/* !.axoCover/settings.json +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + # Visual Studio code coverage results *.coverage *.coveragexml @@ -289,6 +298,15 @@ node_modules/ # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) *.vbw +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files + # Visual Studio LightSwitch build output **/*.HTMLClient/GeneratedArtifacts **/*.DesktopClient/GeneratedArtifacts @@ -311,9 +329,10 @@ paket-files/ __pycache__/ *.pyc -# Cake - Uncomment if you are using it +# Cake +.cake/** tools/** -!tools/packages.config +BuildArtifacts/ # Tabs Studio *.tss @@ -345,13 +364,46 @@ ASALocalRun/ # Local History for Visual Studio .localhistory/ +# Visual Studio History (VSHistory) files +.vshistory/ + # BeatPulse healthcheck temp database healthchecksdb # Backup folder for Package Reference Convert tool in Visual Studio 2017 MigrationBackup/ +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml + +### VisualStudio Patch ### +# Additional files built by Visual Studio + # WinMerge *.bak -# End of https://www.gitignore.io/api/visualstudio \ No newline at end of file +# End of https://www.toptal.com/developers/gitignore/api/visualstudio \ No newline at end of file diff --git a/GitReleaseManager.yaml b/GitReleaseManager.yaml index 689d616..b282b60 100644 --- a/GitReleaseManager.yaml +++ b/GitReleaseManager.yaml @@ -26,6 +26,7 @@ issue-labels-include: - Bug - New Feature - Improvement + - Enhancement - Documentation - Security issue-labels-exclude: diff --git a/GitVersion.yml b/GitVersion.yml index 366d8f2..c43da67 100644 --- a/GitVersion.yml +++ b/GitVersion.yml @@ -1,16 +1,23 @@ mode: ContinuousDelivery branches: + master: + regex: (master|main) + mode: ContinuousDelivery + tag: + increment: Patch + prevent-increment-of-merged-branch-version: true + track-merge-target: false feature: regex: feature(s)?[/-] mode: ContinuousDeployment develop: regex: dev(elop)?(ment)?$ mode: ContinuousDeployment - tag: alpha + tag: beta hotfix: regex: hotfix(es)?[/-] mode: ContinuousDeployment - tag: beta + tag: hotfix release: regex: release(s)?[/-] mode: ContinuousDeployment diff --git a/README.md b/README.md index de41423..96f9f24 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Picton -[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://jericho.mit-license.org/) +[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://jericho.mit-license.org/) [![Build status](https://ci.appveyor.com/api/projects/status/2tl8wuancvf3awap?svg=true)](https://ci.appveyor.com/project/Jericho/picton.messaging) [![Coverage Status](https://coveralls.io/repos/github/Jericho/Picton.Messaging/badge.svg?branch=master)](https://coveralls.io/github/Jericho/Picton.Messaging?branch=master) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bhttps%3A%2F%2Fgithub.com%2FJericho%2FPicton.Messaging.svg?type=shield)](https://app.fossa.io/projects/git%2Bhttps%3A%2F%2Fgithub.com%2FJericho%2FPicton.Messaging?ref=badge_shield) @@ -32,7 +32,7 @@ In December 2017 version 2.0 was released with a much more efficient method of f Picton.Messaging is available as a Nuget package. -[![NuGet Version](http://img.shields.io/nuget/v/Picton.Messaging.svg)](https://www.nuget.org/packages/Picton.Messaging/) +[![NuGet Version](https://img.shields.io/nuget/v/Picton.Messaging.svg)](https://www.nuget.org/packages/Picton.Messaging/) ## Installation @@ -57,26 +57,64 @@ namespace WorkerRole1 { public class MyWorkerRole : RoleEntryPoint { - private AsyncMessagePump _messagePump; + private readonly CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + private readonly ManualResetEvent runCompleteEvent = new ManualResetEvent(false); public override void Run() { - Trace.TraceInformation("MyWorkerRole is running"); - _messagePump.Start(); + Trace.TraceInformation("WorkerRole is running"); + + try + { + this.RunAsync(this.cancellationTokenSource.Token).Wait(); + } + finally + { + this.runCompleteEvent.Set(); + } } public override bool OnStart() { - var storageAccount = CloudStorageAccount.DevelopmentStorageAccount; - var queueName = "myqueue"; - var poisonQueueName = "myqueue-poison"; + // Use TLS 1.2 for Service Bus connections + ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12; + + // Set the maximum number of concurrent connections + ServicePointManager.DefaultConnectionLimit = 12; + + // For information on handling configuration changes + // see the MSDN topic at https://go.microsoft.com/fwlink/?LinkId=166357. + + bool result = base.OnStart(); + + Trace.TraceInformation("WorkerRole has been started"); + + return result; + } + + public override void OnStop() + { + Trace.TraceInformation("WorkerRole is stopping"); + + this.cancellationTokenSource.Cancel(); + this.runCompleteEvent.WaitOne(); - // Configure the message pump - _messagePump = new AsyncMessagePump(queueName, storageAccount, 25, poisonQueueName, TimeSpan.FromMinutes(1), 3) + base.OnStop(); + + Trace.TraceInformation("WorkerRole has stopped"); + } + + private async Task RunAsync(CancellationToken cancellationToken) + { + var connectionString = "<-- insert connection string for your Azure account -->"; + var queueName = "<-- insert the name of your Azure queue -->"; + + // Configure the message pump + var messagePump = new AsyncMessagePump(connectionString, queueName, 10, null, TimeSpan.FromMinutes(1), 3) { - OnMessage = (message, cancellationToken) => + OnMessage = (message, cancellationToken) => { - Debug.WriteLine(message.AsString); + Debug.WriteLine("Received message of type {message.Content.GetType()}"); }, OnError = (message, exception, isPoison) => { @@ -84,17 +122,8 @@ namespace WorkerRole1 } }; - Trace.TraceInformation("MyWorkerRole started"); - - return base.OnStart(); - } - - public override void OnStop() - { - Trace.TraceInformation("MyWorkerRole is stopping"); - _messagePump.Stop(); - base.OnStop(); - Trace.TraceInformation("MyWorkerRole has stopped"); + // Start the message pump + await messagePump.StartAsync(cancellationToken); } } } diff --git a/Source/PackageIcon.png b/Source/PackageIcon.png new file mode 100644 index 0000000..e62fdce Binary files /dev/null and b/Source/PackageIcon.png differ diff --git a/Source/Picton.Messaging.IntegrationTests/ColoredConsoleLogProvider.cs b/Source/Picton.Messaging.IntegrationTests/ColoredConsoleLogProvider.cs deleted file mode 100644 index b57132c..0000000 --- a/Source/Picton.Messaging.IntegrationTests/ColoredConsoleLogProvider.cs +++ /dev/null @@ -1,114 +0,0 @@ -namespace Picton.Messaging.IntegrationTests -{ - using Logging; - using System; - using System.Collections.Generic; - using System.Globalization; - - // Inspired by: https://github.com/damianh/LibLog/blob/master/src/LibLog.Example.ColoredConsoleLogProvider/ColoredConsoleLogProvider.cs - public class ColoredConsoleLogProvider : ILogProvider - { - private static readonly Dictionary Colors = new Dictionary - { - {LogLevel.Fatal, ConsoleColor.Red}, - {LogLevel.Error, ConsoleColor.Yellow}, - {LogLevel.Warn, ConsoleColor.Magenta}, - {LogLevel.Info, ConsoleColor.White}, - {LogLevel.Debug, ConsoleColor.Gray}, - {LogLevel.Trace, ConsoleColor.DarkGray} - }; - private readonly LogLevel _minLevel = LogLevel.Trace; - - public ColoredConsoleLogProvider(LogLevel minLevel = LogLevel.Trace) - { - _minLevel = minLevel; - } - - /// - /// Gets the specified named logger. - /// - /// Name of the logger. - /// The logger reference. - public Logger GetLogger(string name) - { - return (logLevel, messageFunc, exception, formatParameters) => - { - // messageFunc is null when checking if logLevel is enabled - if (messageFunc == null) return (logLevel >= _minLevel); - - if (logLevel >= _minLevel) - { - // Please note: locking is important to ensure that multiple threads - // don't attempt to change the foreground color at the same time - lock (Console.Out) - { - if (Colors.TryGetValue(logLevel, out ConsoleColor consoleColor)) - { - var originalForground = Console.ForegroundColor; - try - { - Console.ForegroundColor = consoleColor; - WriteMessage(logLevel, name, messageFunc, formatParameters, exception); - } - finally - { - Console.ForegroundColor = originalForground; - } - } - else - { - WriteMessage(logLevel, name, messageFunc, formatParameters, exception); - } - } - } - - return true; - }; - } - - /// - /// Opens a nested diagnostics context. Not supported in EntLib logging. - /// - /// The message to add to the diagnostics context. - /// A disposable that when disposed removes the message from the context. - public IDisposable OpenNestedContext(string message) - { - return NullDisposable.Instance; - } - - /// - /// Opens a mapped diagnostics context. Not supported in EntLib logging. - /// - /// A key. - /// A value. - /// A disposable that when disposed removes the map from the context. - public IDisposable OpenMappedContext(string key, object value, bool destructure = false) - { - return NullDisposable.Instance; - } - - private class NullDisposable : IDisposable - { - internal static readonly IDisposable Instance = new NullDisposable(); - - public void Dispose() - { } - } - - private static void WriteMessage( - LogLevel logLevel, - string name, - Func messageFunc, - object[] formatParameters, - Exception exception) - { - var message = messageFunc(); - if (formatParameters?.Length > 0) message = string.Format(CultureInfo.InvariantCulture, message, formatParameters); - if (exception != null) - { - message = message + "|" + exception; - } - Console.WriteLine("{0} | {1} | {2} | {3}", DateTime.UtcNow, logLevel, name, message); - } - } -} diff --git a/Source/Picton.Messaging.IntegrationTests/Datadog/DatadogFormatter.cs b/Source/Picton.Messaging.IntegrationTests/Datadog/DatadogFormatter.cs deleted file mode 100644 index d9839b5..0000000 --- a/Source/Picton.Messaging.IntegrationTests/Datadog/DatadogFormatter.cs +++ /dev/null @@ -1,44 +0,0 @@ -using App.Metrics; -using App.Metrics.Formatters; -using App.Metrics.Serialization; -using System.IO; -using System.Threading; -using System.Threading.Tasks; - -namespace Picton.Messaging.IntegrationTests.Datadog -{ - /// - /// Formatter for encoding Metrics in Datadog JSON - /// - public class DatadogFormatter : IMetricsOutputFormatter - { - private readonly DatadogFormatterOptions _options; - - /// - /// Constructor - /// - public DatadogFormatter(DatadogFormatterOptions options) - { - _options = options; - } - - /// - public Task WriteAsync(Stream output, MetricsDataValueSource metricsData, CancellationToken cancellationToken = new CancellationToken()) - { - var serializer = new MetricSnapshotSerializer(); - using (var streamWriter = new StreamWriter(output)) - { - using (var writer = new MetricSnapshotDatadogWriter(streamWriter, _options)) - serializer.Serialize(writer, metricsData, this.MetricFields); - } - - return Task.CompletedTask; - } - - /// - public MetricsMediaTypeValue MediaType => new MetricsMediaTypeValue("application", "com.datadoghq.metrics", "v1", "json"); - - /// - public MetricFields MetricFields { get; set; } - } -} diff --git a/Source/Picton.Messaging.IntegrationTests/Datadog/DatadogFormatterOptions.cs b/Source/Picton.Messaging.IntegrationTests/Datadog/DatadogFormatterOptions.cs deleted file mode 100644 index 987bbe3..0000000 --- a/Source/Picton.Messaging.IntegrationTests/Datadog/DatadogFormatterOptions.cs +++ /dev/null @@ -1,14 +0,0 @@ - -namespace Picton.Messaging.IntegrationTests.Datadog -{ - /// - /// Options for data reported to Datadog - /// - public class DatadogFormatterOptions - { - /// - /// The Hostname that is reported. Usually Environment.MachineName - /// - public string Hostname { get; set; } - } -} diff --git a/Source/Picton.Messaging.IntegrationTests/Datadog/MetricJson.cs b/Source/Picton.Messaging.IntegrationTests/Datadog/MetricJson.cs deleted file mode 100644 index 85fdcbb..0000000 --- a/Source/Picton.Messaging.IntegrationTests/Datadog/MetricJson.cs +++ /dev/null @@ -1,13 +0,0 @@ -namespace Picton.Messaging.IntegrationTests.Datadog -{ - /// - /// For serializing http://docs.datadoghq.com/api/?lang=console#metrics - /// - class MetricJson - { - public string Metric { get; set; } - public object[][] Points { get; set; } - public string Host { get; set; } - public string[] Tags { get; set; } - } -} diff --git a/Source/Picton.Messaging.IntegrationTests/Datadog/MetricSnapshotDatadogWriter.cs b/Source/Picton.Messaging.IntegrationTests/Datadog/MetricSnapshotDatadogWriter.cs deleted file mode 100644 index 7e8c4f2..0000000 --- a/Source/Picton.Messaging.IntegrationTests/Datadog/MetricSnapshotDatadogWriter.cs +++ /dev/null @@ -1,137 +0,0 @@ -using App.Metrics; -using App.Metrics.Serialization; -using Newtonsoft.Json; -using Newtonsoft.Json.Serialization; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; - -namespace Picton.Messaging.IntegrationTests.Datadog -{ - class MetricSnapshotDatadogWriter : IMetricSnapshotWriter - { - private StreamWriter _streamWriter; - private readonly DatadogFormatterOptions _options; - private readonly List _metrics = new List(); - - static readonly JsonSerializerSettings JsonSettings = new JsonSerializerSettings - { - ContractResolver = new LowercaseContractResolver() - }; - - private class LowercaseContractResolver : DefaultContractResolver - { - protected override string ResolvePropertyName(string propertyName) - { - return propertyName.ToLower(); - } - } - - public MetricSnapshotDatadogWriter(StreamWriter streamWriter, DatadogFormatterOptions options) - { - _streamWriter = streamWriter; - _options = options; - } - - public void Dispose() - { - if (_streamWriter != null) - { - Flush(); - - _streamWriter?.Dispose(); - _streamWriter = null; - } - } - - /// - public void Write(string context, string name, string field, object value, MetricTags tags, DateTime timestamp) - { - Write(context, name, new[] { field }, new[] { value }, tags, timestamp); - } - - /// - public void Write(string context, string name, IEnumerable columns, IEnumerable values, MetricTags tags, DateTime timestamp) - { - var posixTimestamp = (timestamp - new DateTimeOffset(1970, 1, 1, 0, 0, 0, TimeSpan.Zero)).TotalSeconds; //TODO: Check this - - var dict = columns.Zip(values, (column, value) => new { column, value }).ToDictionary(p => p.column, p => p.value); - - switch (tags.Values[Array.IndexOf(tags.Keys, "mtype")]) - { - case "apdex": - Write(posixTimestamp, context, name, "count", dict["samples"]); - Write(posixTimestamp, context, name, "score", dict["score"]); - Write(posixTimestamp, context, name, "satisfied", dict["satisfied"]); - Write(posixTimestamp, context, name, "tolerating", dict["tolerating"]); - Write(posixTimestamp, context, name, "frustrating", dict["frustrating"]); - break; - case "gauge": - Write(posixTimestamp, context, name, dict["value"]); - break; - case "counter": - if (dict.ContainsKey("value")) - Write(posixTimestamp, context, name, dict["value"]); - break; - case "meter": - Write(posixTimestamp, context, name, "count", dict["count.meter"]); - Write(posixTimestamp, context, name, "15m", dict["rate15m"]); - Write(posixTimestamp, context, name, "5m", dict["rate5m"]); - Write(posixTimestamp, context, name, "1m", dict["rate1m"]); - Write(posixTimestamp, context, name, "avg", dict["rate.mean"]); - break; - case "timer": - Write(posixTimestamp, context, name, "1mrate", dict["rate1m"]); - Write(posixTimestamp, context, name, "5mrate", dict["rate5m"]); - Write(posixTimestamp, context, name, "15mrate", dict["rate15m"]); - WriteHistogram(posixTimestamp, context, name, dict); - break; - case "histogram": - WriteHistogram(posixTimestamp, context, name, dict); - break; - } - } - - private void WriteHistogram(double posixTimestamp, string context, string name, Dictionary dict) - { - Write(posixTimestamp, context, name, "count", dict["count.hist"]); - - Write(posixTimestamp, context, name, "max", dict["max"]); - Write(posixTimestamp, context, name, "avg", dict["mean"]); - Write(posixTimestamp, context, name, "median", dict["median"]); - Write(posixTimestamp, context, name, "min", dict["min"]); - Write(posixTimestamp, context, name, "stdDev", dict["stddev"]); - - Write(posixTimestamp, context, name, "75percentile", dict["p75"]); - Write(posixTimestamp, context, name, "95percentile", dict["p95"]); - Write(posixTimestamp, context, name, "98percentile", dict["p98"]); - Write(posixTimestamp, context, name, "99percentile", dict["p99"]); - Write(posixTimestamp, context, name, "999percentile", dict["p999"]); - } - - private void Write(double timestamp, string context, string name, string subname, object value) - { - Write(timestamp, context, name + "." + subname, value); - } - - private void Write(double timestamp, string context, string name, object value) - { - _metrics.Add(new MetricJson - { - Host = _options.Hostname, - Metric = context + "." + name, - //Tags = tags.Values, - Points = new[] - { - new[] { timestamp, value } - } - }); - } - - private void Flush() - { - _streamWriter.Write(JsonConvert.SerializeObject(new SeriesJson { Series = _metrics.ToArray() }, JsonSettings)); - } - } -} diff --git a/Source/Picton.Messaging.IntegrationTests/Datadog/SeriesJson.cs b/Source/Picton.Messaging.IntegrationTests/Datadog/SeriesJson.cs deleted file mode 100644 index b8c1f86..0000000 --- a/Source/Picton.Messaging.IntegrationTests/Datadog/SeriesJson.cs +++ /dev/null @@ -1,7 +0,0 @@ -namespace Picton.Messaging.IntegrationTests.Datadog -{ - class SeriesJson - { - public MetricJson[] Series { get; set; } - } -} diff --git a/Source/Picton.Messaging.IntegrationTests/MyMessageHandler.cs b/Source/Picton.Messaging.IntegrationTests/MyMessageHandler.cs index f4c8a90..bb3542a 100644 --- a/Source/Picton.Messaging.IntegrationTests/MyMessageHandler.cs +++ b/Source/Picton.Messaging.IntegrationTests/MyMessageHandler.cs @@ -1,20 +1,19 @@ -using Picton.Messaging.Logging; +using Microsoft.Extensions.Logging; using Picton.Messaging.Messages; namespace Picton.Messaging.IntegrationTests { public class MyMessageHandler : IMessageHandler { - private readonly Logger _logger; + private readonly ILogger _log; - public MyMessageHandler() + public MyMessageHandler(ILogger log) { - var logProvider = new ColoredConsoleLogProvider(Logging.LogLevel.Debug); - _logger = logProvider.GetLogger("MyMessageHandler"); + _log = log; } public void Handle(MyMessage message) { - _logger(Logging.LogLevel.Debug, () => message.MessageContent); + _log.LogInformation(message.MessageContent); } } } diff --git a/Source/Picton.Messaging.IntegrationTests/Picton.Messaging.IntegrationTests.csproj b/Source/Picton.Messaging.IntegrationTests/Picton.Messaging.IntegrationTests.csproj index 4f36fde..cf7430b 100644 --- a/Source/Picton.Messaging.IntegrationTests/Picton.Messaging.IntegrationTests.csproj +++ b/Source/Picton.Messaging.IntegrationTests/Picton.Messaging.IntegrationTests.csproj @@ -2,7 +2,7 @@ Exe - netcoreapp3.1 + net7.0 Picton.Messaging.IntegrationTests Picton.Messaging.IntegrationTests @@ -12,7 +12,13 @@ - + + + + + + + diff --git a/Source/Picton.Messaging.IntegrationTests/Program.cs b/Source/Picton.Messaging.IntegrationTests/Program.cs index 271ac0d..2967d64 100644 --- a/Source/Picton.Messaging.IntegrationTests/Program.cs +++ b/Source/Picton.Messaging.IntegrationTests/Program.cs @@ -1,180 +1,51 @@ -using App.Metrics; -using App.Metrics.Scheduling; -using Picton.Managers; -using Picton.Messaging.IntegrationTests.Datadog; -using Picton.Messaging.Logging; +using Logzio.DotNet.NLog; +using Microsoft.Extensions.DependencyInjection; +using NLog.Config; +using NLog.Extensions.Logging; +using NLog.Targets; using System; -using System.Diagnostics; using System.Threading.Tasks; namespace Picton.Messaging.IntegrationTests { class Program { - static void Main() + public static async Task Main() { - // Ensure the storage emulator is running - AzureEmulatorManager.EnsureStorageEmulatorIsStarted(); - - // If you want to see tracing from the Picton libary, change the LogLevel to 'Trace' - var minLogLevel = Logging.LogLevel.Debug; - - // Configure logging to the console - var logProvider = new ColoredConsoleLogProvider(minLogLevel); - var logger = logProvider.GetLogger("Main"); - LogProvider.SetCurrentLogProvider(logProvider); - - // Ensure the Console is tall enough and centered on the screen - Console.WindowHeight = Math.Min(60, Console.LargestWindowHeight); - ConsoleUtils.CenterConsole(); - - // Configure where metrics are published to. By default, don't publish metrics - var metrics = (IMetricsRoot)null; - - // In this example, I'm publishing metrics to a DataDog account - var datadogApiKey = Environment.GetEnvironmentVariable("DATADOG_APIKEY"); - if (!string.IsNullOrEmpty(datadogApiKey)) - { - metrics = new MetricsBuilder() - .Report.OverHttp(o => - { - o.HttpSettings.RequestUri = new Uri($"https://app.datadoghq.com/api/v1/series?api_key={datadogApiKey}"); - o.MetricsOutputFormatter = new DatadogFormatter(new DatadogFormatterOptions { Hostname = Environment.MachineName }); - o.FlushInterval = TimeSpan.FromSeconds(2); - }) - .Build(); - - // Send metrics to Datadog - var sendMetricsJob = new AppMetricsTaskScheduler( - TimeSpan.FromSeconds(2), - async () => - { - await Task.WhenAll(metrics.ReportRunner.RunAllAsync()); - }); - sendMetricsJob.Start(); - } - - // Setup the message queue in Azure storage emulator - var connectionString = "UseDevelopmentStorage=true"; - var queueName = "myqueue"; - var numberOfMessages = 25; - - logger(Logging.LogLevel.Info, () => "Begin integration tests..."); - - var stringMessagesLogger = logProvider.GetLogger("StringMessages"); - AddStringMessagesToQueue(numberOfMessages, queueName, connectionString, stringMessagesLogger).Wait(); - ProcessSimpleMessages(queueName, connectionString, stringMessagesLogger, metrics); - - var simpleMessagesLogger = logProvider.GetLogger("SimpleMessages"); - AddSimpleMessagesToQueue(numberOfMessages, queueName, connectionString, simpleMessagesLogger).Wait(); - ProcessSimpleMessages(queueName, connectionString, simpleMessagesLogger, metrics); - - var messagesWithHandlerLogger = logProvider.GetLogger("MessagesWithHandler"); - AddMessagesWithHandlerToQueue(numberOfMessages, queueName, connectionString, messagesWithHandlerLogger).Wait(); - ProcessMessagesWithHandlers(queueName, connectionString, messagesWithHandlerLogger, metrics); - - // Flush the console key buffer - while (Console.KeyAvailable) Console.ReadKey(true); - - // Wait for user to press a key - logger(Logging.LogLevel.Info, () => "Press any key to exit..."); - Console.ReadKey(); - } - - public static async Task AddStringMessagesToQueue(int numberOfMessages, string queueName, string connectionString, Logger logger) - { - logger(Logging.LogLevel.Info, () => $"Adding {numberOfMessages} string messages to the queue..."); - - var queueManager = new QueueManager(connectionString, queueName); - await queueManager.ClearAsync().ConfigureAwait(false); - for (var i = 0; i < numberOfMessages; i++) - { - await queueManager.AddMessageAsync($"Hello world {i}").ConfigureAwait(false); - } + var services = new ServiceCollection(); + ConfigureServices(services); + using var serviceProvider = services.BuildServiceProvider(); + var app = serviceProvider.GetService(); + return await app.RunAsync().ConfigureAwait(false); } - public static async Task AddSimpleMessagesToQueue(int numberOfMessages, string queueName, string connectionString, Logger logger) + private static void ConfigureServices(ServiceCollection services) { - logger(Logging.LogLevel.Info, () => $"Adding {numberOfMessages} simple messages to the queue..."); - - var queueManager = new QueueManager(connectionString, queueName); - await queueManager.ClearAsync().ConfigureAwait(false); - for (var i = 0; i < numberOfMessages; i++) - { - await queueManager.AddMessageAsync($"Hello world {i}").ConfigureAwait(false); - } + services + .AddLogging(loggingBuilder => loggingBuilder.AddNLog(GetNLogConfiguration())) + .AddTransient(); } - public static void ProcessSimpleMessages(string queueName, string connectionString, Logger logger, IMetrics metrics) + private static LoggingConfiguration GetNLogConfiguration() { - Stopwatch sw = null; + // Configure logging + var nLogConfig = new LoggingConfiguration(); - // Configure the message pump - var messagePump = new AsyncMessagePump(connectionString, queueName, 10, null, TimeSpan.FromMinutes(1), 3, metrics) + // Send logs to logz.io + var logzioToken = Environment.GetEnvironmentVariable("LOGZIO_TOKEN"); + if (!string.IsNullOrEmpty(logzioToken)) { - OnMessage = (message, cancellationToken) => - { - logger(Logging.LogLevel.Debug, () => message.Content.ToString()); - } - }; - - // Stop the message pump when the queue is empty. - messagePump.OnQueueEmpty = cancellationToken => - { - // Stop the timer - if (sw.IsRunning) sw.Stop(); - - // Stop the message pump - logger(Logging.LogLevel.Debug, () => "Asking the 'simple' message pump to stop"); - messagePump.Stop(); - logger(Logging.LogLevel.Debug, () => "The 'simple' message pump has been stopped"); - }; - - // Start the message pump - sw = Stopwatch.StartNew(); - logger(Logging.LogLevel.Debug, () => "The 'simple' message pump is starting"); - messagePump.Start(); - - // Display summary - logger(Logging.LogLevel.Info, () => $"\tDone in {sw.Elapsed.ToDurationString()}"); - } - - public static async Task AddMessagesWithHandlerToQueue(int numberOfMessages, string queueName, string connectionString, Logger logger) - { - logger(Logging.LogLevel.Info, () => $"Adding {numberOfMessages} messages with handlers to the queue..."); - - var queueManager = new QueueManager(connectionString, queueName); - await queueManager.ClearAsync().ConfigureAwait(false); - for (var i = 0; i < numberOfMessages; i++) - { - await queueManager.AddMessageAsync(new MyMessage { MessageContent = $"Hello world {i}" }).ConfigureAwait(false); + var logzioTarget = new LogzioTarget { Token = logzioToken }; + logzioTarget.ContextProperties.Add(new TargetPropertyWithContext("source", "PictonMessaging_integration_tests")); + logzioTarget.ContextProperties.Add(new TargetPropertyWithContext("PictonMessaging-Version", typeof(AsyncMessagePump).Assembly.GetName().Version.ToString(3))); + nLogConfig.AddTarget("Logzio", logzioTarget); + nLogConfig.AddRule(NLog.LogLevel.Debug, NLog.LogLevel.Fatal, "Logzio", "*"); } - } - - public static void ProcessMessagesWithHandlers(string queueName, string connectionString, Logger logger, IMetrics metrics) - { - Stopwatch sw = null; - - // Configure the message pump - var messagePump = new AsyncMessagePumpWithHandlers(connectionString, queueName, 10, null, TimeSpan.FromMinutes(1), 3, metrics); - messagePump.OnQueueEmpty = cancellationToken => - { - // Stop the timer - if (sw.IsRunning) sw.Stop(); - - // Stop the message pump - logger(Logging.LogLevel.Debug, () => "Asking the message pump with handlers to stop"); - messagePump.Stop(); - logger(Logging.LogLevel.Debug, () => "The message pump with handlers has been stopped"); - }; - - // Start the message pump - sw = Stopwatch.StartNew(); - logger(Logging.LogLevel.Debug, () => "The message pump with handlers is starting"); - messagePump.Start(); - - // Display summary - logger(Logging.LogLevel.Info, () => $"\tDone in {sw.Elapsed.ToDurationString()}"); + // Send logs to console + var consoleTarget = new ColoredConsoleTarget(); + nLogConfig.AddTarget("ColoredConsole", consoleTarget); + nLogConfig.AddRule(NLog.LogLevel.Info, NLog.LogLevel.Fatal, "ColoredConsole", "*"); + return nLogConfig; } } } diff --git a/Source/Picton.Messaging.IntegrationTests/TestsRunner.cs b/Source/Picton.Messaging.IntegrationTests/TestsRunner.cs new file mode 100644 index 0000000..f2172c1 --- /dev/null +++ b/Source/Picton.Messaging.IntegrationTests/TestsRunner.cs @@ -0,0 +1,239 @@ +using App.Metrics; +using App.Metrics.Scheduling; +using Microsoft.Extensions.Logging; +using Picton.Managers; +using System; +using System.Diagnostics; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace Picton.Messaging.IntegrationTests +{ + internal class TestsRunner + { + private enum ResultCodes + { + Success = 0, + Exception = 1, + Cancelled = 1223 + } + + private readonly ILogger _logger; + + public TestsRunner(ILogger logger) + { + _logger = logger; + } + + public async Task RunAsync() + { + // Configure Console + var cts = new CancellationTokenSource(); + Console.CancelKeyPress += (s, e) => + { + e.Cancel = true; + cts.Cancel(); + }; + + // Ensure the Console is tall enough and centered on the screen + if (OperatingSystem.IsWindows()) Console.WindowHeight = Math.Min(60, Console.LargestWindowHeight); + Utils.CenterConsole(); + + // Configure where metrics are published to. By default, don't publish metrics + var metrics = (IMetricsRoot)null; + + // In this example, I'm publishing metrics to a DataDog account + var datadogApiKey = Environment.GetEnvironmentVariable("DATADOG_APIKEY"); + if (!string.IsNullOrEmpty(datadogApiKey)) + { + metrics = new MetricsBuilder() + .Report.ToDatadogHttp( + options => + { + options.Datadog.BaseUri = new Uri("https://app.datadoghq.com/api/v1/series"); + options.Datadog.ApiKey = datadogApiKey; + options.FlushInterval = TimeSpan.FromSeconds(2); + }) + .Build(); + + // Send metrics to Datadog + var sendMetricsJob = new AppMetricsTaskScheduler( + TimeSpan.FromSeconds(2), + async () => + { + await Task.WhenAll(metrics.ReportRunner.RunAllAsync()); + }); + sendMetricsJob.Start(); + } + + // Start Azurite before running the tests. It will be automaticaly stopped when "emulator" goes out of scope + using (var emulator = new AzuriteManager()) + { + var connectionString = "UseDevelopmentStorage=true"; + var queueName = "myqueue"; + var numberOfMessages = 25; + var numberOfTenants = 5; + + // Run the integration tests + await RunAsyncMessagePumpTests(connectionString, queueName, numberOfMessages, metrics, cts.Token).ConfigureAwait(false); + await RunAsyncMessagePumpWithHandlersTests(connectionString, queueName, numberOfMessages, metrics, cts.Token).ConfigureAwait(false); + await RunMultiTenantAsyncMessagePumpTests(connectionString, queueName, numberOfTenants, numberOfMessages, metrics, cts.Token).ConfigureAwait(false); + } + + // Prompt user to press a key in order to allow reading the log in the console + var promptLog = new StringWriter(); + await promptLog.WriteLineAsync("\n\n**************************************************").ConfigureAwait(false); + await promptLog.WriteLineAsync("Press any key to exit...").ConfigureAwait(false); + Utils.Prompt(promptLog.ToString()); + + // Return code indicating success/failure + var resultCode = (int)ResultCodes.Success; + + return await Task.FromResult(resultCode); + } + + private async Task RunAsyncMessagePumpTests(string connectionString, string queueName, int numberOfMessages, IMetrics metrics, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) return; + + _logger.LogInformation("**************************************************"); + _logger.LogInformation("Testing AsyncMessagePump..."); + + // Add messages to the queue + _logger.LogInformation("Adding {numberOfMessages} string messages to the {queueName} queue...", numberOfMessages, queueName); + var queueManager = new QueueManager(connectionString, queueName); + await queueManager.ClearAsync().ConfigureAwait(false); + for (var i = 0; i < numberOfMessages; i++) + { + await queueManager.AddMessageAsync($"Hello world {i}").ConfigureAwait(false); + } + + // Process the messages + Stopwatch sw = null; + + // Configure the message pump + var messagePump = new AsyncMessagePump(connectionString, queueName, 10, null, TimeSpan.FromMinutes(1), 3, _logger, metrics) + { + OnMessage = (message, cancellationToken) => + { + _logger.LogInformation(message.Content.ToString()); + } + }; + + // Stop the message pump when the queue is empty. + var cts = new CancellationTokenSource(); + messagePump.OnQueueEmpty = cancellationToken => + { + // Stop the timer + if (sw.IsRunning) sw.Stop(); + + // Stop the message pump + _logger.LogDebug("Asking the message pump to stop..."); + cts.Cancel(); + }; + + // Start the message pump + sw = Stopwatch.StartNew(); + _logger.LogDebug("The message pump is starting..."); + await messagePump.StartAsync(cts.Token).ConfigureAwait(false); + + // Display summary + _logger.LogInformation($"\tDone in {sw.Elapsed.ToDurationString()}"); + } + + private async Task RunAsyncMessagePumpWithHandlersTests(string connectionString, string queueName, int numberOfMessages, IMetrics metrics, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) return; + + _logger.LogInformation("**************************************************"); + _logger.LogInformation("Testing AsyncMessagePumpWithHandlers..."); + + // Add messages to the queue + _logger.LogInformation("Adding {numberOfMessages} messages with handlers to the {queueName} queue...", numberOfMessages, queueName); + var queueManager = new QueueManager(connectionString, queueName); + await queueManager.ClearAsync().ConfigureAwait(false); + for (var i = 0; i < numberOfMessages; i++) + { + await queueManager.AddMessageAsync(new MyMessage { MessageContent = $"Hello world {i}" }).ConfigureAwait(false); + } + + // Process the messages + Stopwatch sw = null; + + // Configure the message pump + var cts = new CancellationTokenSource(); + var messagePump = new AsyncMessagePumpWithHandlers(connectionString, queueName, 10, null, TimeSpan.FromMinutes(1), 3, _logger, metrics); + messagePump.OnQueueEmpty = cancellationToken => + { + // Stop the timer + if (sw.IsRunning) sw.Stop(); + + // Stop the message pump + _logger.LogDebug("Asking the message pump with handlers to stop..."); + cts.Cancel(); + }; + + // Start the message pump + sw = Stopwatch.StartNew(); + _logger.LogDebug("The message pump with handlers is starting..."); + await messagePump.StartAsync(cts.Token); + + // Display summary + _logger.LogInformation($"\tDone in {sw.Elapsed.ToDurationString()}"); + } + + private async Task RunMultiTenantAsyncMessagePumpTests(string connectionString, string queueNamePrefix, int numberOfTenants, int numberOfMessages, IMetrics metrics, CancellationToken cancellationToken) + { + if (cancellationToken.IsCancellationRequested) return; + + _logger.LogInformation("**************************************************"); + _logger.LogInformation("Testing AsyncMultiTenantMessagePump..."); + + // Add messages to the tenant queues + for (int i = 0; i < numberOfTenants; i++) + { + var queueManager = new QueueManager(connectionString, $"{queueNamePrefix}{i:00}"); + await queueManager.ClearAsync().ConfigureAwait(false); + var numberOfMessagesForThisTenant = numberOfMessages * (i + 1); // Each tenant receives a different number of messages + for (var j = 0; j < numberOfMessagesForThisTenant; j++) + { + await queueManager.AddMessageAsync($"Hello world {j} to tenant {i:00}").ConfigureAwait(false); + } + } + + // Process the messages + Stopwatch sw = null; + + // Configure the message pump + var cts = new CancellationTokenSource(); + var messagePump = new AsyncMultiTenantMessagePump(connectionString, queueNamePrefix, 10, null, TimeSpan.FromMinutes(1), 3, null, null, _logger, metrics) + { + OnMessage = (tenantId, message, cancellationToken) => + { + var messageContent = message.Content.ToString(); + _logger.LogInformation($"{tenantId} - {messageContent}", tenantId, messageContent); + } + }; + + // Stop the message pump when all tenant queues are empty. + messagePump.OnEmpty = cancellationToken => + { + // Stop the timer + if (sw.IsRunning) sw.Stop(); + + // Stop the message pump + _logger.LogDebug("Asking the multi-tenant message pump to stop..."); + cts.Cancel(); + }; + + // Start the message pump + sw = Stopwatch.StartNew(); + _logger.LogDebug("The multi-tenant message pump is starting..."); + await messagePump.StartAsync(cts.Token); + + // Display summary + _logger.LogInformation($"\tDone in {sw.Elapsed.ToDurationString()}"); + } + } +} diff --git a/Source/Picton.Messaging.IntegrationTests/ConsoleUtils.cs b/Source/Picton.Messaging.IntegrationTests/Utils.cs similarity index 69% rename from Source/Picton.Messaging.IntegrationTests/ConsoleUtils.cs rename to Source/Picton.Messaging.IntegrationTests/Utils.cs index 61f37a4..15777bb 100644 --- a/Source/Picton.Messaging.IntegrationTests/ConsoleUtils.cs +++ b/Source/Picton.Messaging.IntegrationTests/Utils.cs @@ -2,7 +2,7 @@ namespace Picton.Messaging.IntegrationTests { - public static class ConsoleUtils + internal static class Utils { public static void CenterConsole() { @@ -23,10 +23,21 @@ public static void CenterConsole() var consoleWidth = consoleInfo.Right - consoleInfo.Left; var consoleHeight = consoleInfo.Bottom - consoleInfo.Top; - var left = monitorInfo.Monitor.Left + (monitorWidth - consoleWidth) / 2; - var top = monitorInfo.Monitor.Top + (monitorHeight - consoleHeight) / 2; + var left = monitorInfo.Monitor.Left + ((monitorWidth - consoleWidth) / 2); + var top = monitorInfo.Monitor.Top + ((monitorHeight - consoleHeight) / 2); NativeMethods.MoveWindow(hWin, left, top, consoleWidth, consoleHeight, false); } + + public static char Prompt(string prompt) + { + while (Console.KeyAvailable) + { + Console.ReadKey(false); + } + Console.Out.WriteLine(prompt); + var result = Console.ReadKey(); + return result.KeyChar; + } } } diff --git a/Source/Picton.Messaging.UnitTests/AsyncMessagePumpTests.cs b/Source/Picton.Messaging.UnitTests/AsyncMessagePumpTests.cs index a945df9..c071d66 100644 --- a/Source/Picton.Messaging.UnitTests/AsyncMessagePumpTests.cs +++ b/Source/Picton.Messaging.UnitTests/AsyncMessagePumpTests.cs @@ -6,6 +6,7 @@ using System; using System.Linq; using System.Threading; +using System.Threading.Tasks; using Xunit; namespace Picton.Messaging.UnitTests @@ -17,7 +18,7 @@ public void Null_cloudQueue_throws() { Should.Throw(() => { - var messagePump = new AsyncMessagePump((QueueManager)null, null, 1, TimeSpan.FromMinutes(1), 1, null); + var messagePump = new AsyncMessagePump((QueueManager)null, null, 1, TimeSpan.FromMinutes(1), 1, null, null); }); } @@ -28,9 +29,9 @@ public void Number_of_concurrent_tasks_too_small_throws() { var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); var mockQueueClient = MockUtils.GetMockQueueClient(); - var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object); + var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object, false); - var messagePump = new AsyncMessagePump(queueManager, null, 0, TimeSpan.FromMinutes(1), 1, null); + var messagePump = new AsyncMessagePump(queueManager, null, 0, TimeSpan.FromMinutes(1), 1, null, null); }); } @@ -41,9 +42,9 @@ public void DequeueCount_too_small_throws() { var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); var mockQueueClient = MockUtils.GetMockQueueClient(); - var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object); + var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object, false); - var messagePump = new AsyncMessagePump(queueManager, null, 1, TimeSpan.FromMinutes(1), 0, null); + var messagePump = new AsyncMessagePump(queueManager, null, 1, TimeSpan.FromMinutes(1), 0, null, null); }); } @@ -53,33 +54,18 @@ public void Start_without_OnMessage_throws() // Arrange var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); var mockQueueClient = MockUtils.GetMockQueueClient(); - var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object); - - var messagePump = new AsyncMessagePump(queueManager, null, 1, TimeSpan.FromMinutes(1), 3, null); - - // Act - Should.Throw(() => messagePump.Start()); - } + var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object, true); - [Fact] - public void Stopping_without_starting() - { - // Arrange - var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); - var mockQueueClient = MockUtils.GetMockQueueClient(); - var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object); + var cts = new CancellationTokenSource(); - var messagePump = new AsyncMessagePump(queueManager, null, 1, TimeSpan.FromMinutes(1), 3, null); + var messagePump = new AsyncMessagePump(queueManager, null, 1, TimeSpan.FromMinutes(1), 3, null, null); // Act - messagePump.Stop(); - - // Nothing to assert. - // We simply want to make sure that no exception is thrown + Should.ThrowAsync(() => messagePump.StartAsync(cts.Token)); } [Fact] - public void No_message_processed_when_queue_is_empty() + public async Task No_message_processed_when_queue_is_empty() { // Arrange var onMessageInvokeCount = 0; @@ -89,6 +75,8 @@ public void No_message_processed_when_queue_is_empty() var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); var mockQueueClient = MockUtils.GetMockQueueClient(); + var cts = new CancellationTokenSource(); + mockQueueClient .Setup(q => q.GetPropertiesAsync(It.IsAny())) .ReturnsAsync((CancellationToken cancellationToken) => @@ -112,34 +100,26 @@ public void No_message_processed_when_queue_is_empty() OnError = (message, exception, isPoison) => { Interlocked.Increment(ref onErrorInvokeCount); + }, + OnQueueEmpty = cancellationToken => + { + Interlocked.Increment(ref onQueueEmptyInvokeCount); + cts.Cancel(); } }; - messagePump.OnQueueEmpty = cancellationToken => - { - Interlocked.Increment(ref onQueueEmptyInvokeCount); - messagePump.Stop(); - }; // Act - messagePump.Start(); + await messagePump.StartAsync(cts.Token); // Assert onMessageInvokeCount.ShouldBe(0); onQueueEmptyInvokeCount.ShouldBe(1); onErrorInvokeCount.ShouldBe(0); - - // You would expect the 'GetMessagesAsync' method to be invoked only once, but unfortunately we can't be sure. - // It will be invoked a small number of times (probably once or twice, maybe three times but not more than that). - // However we can't be more precise because we stop the message pump on another thread and 'GetMessagesAsync' may - // be invoked a few times while we wait for the message pump to stop. - // - // What this means is that there is no way to precisely assert the number of times the method has been invoked. - // The only thing we know for sure, is that 'GetMessagesAsync' has been invoked at least once - mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.AtLeast(1)); + mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); } [Fact] - public void Message_processed() + public async Task Message_processed() { // Arrange var onMessageInvokeCount = 0; @@ -152,6 +132,8 @@ public void Message_processed() var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); var mockQueueClient = MockUtils.GetMockQueueClient(); + var cts = new CancellationTokenSource(); + mockQueueClient .Setup(q => q.GetPropertiesAsync(It.IsAny())) .ReturnsAsync((CancellationToken cancellationToken) => @@ -209,27 +191,27 @@ public void Message_processed() cloudMessage = null; } } + }, + OnQueueEmpty = cancellationToken => + { + Interlocked.Increment(ref onQueueEmptyInvokeCount); + cts.Cancel(); } }; - messagePump.OnQueueEmpty = cancellationToken => - { - Interlocked.Increment(ref onQueueEmptyInvokeCount); - messagePump.Stop(); - }; // Act - messagePump.Start(); + await messagePump.StartAsync(cts.Token); // Assert onMessageInvokeCount.ShouldBe(1); onQueueEmptyInvokeCount.ShouldBe(1); onErrorInvokeCount.ShouldBe(0); - mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.AtLeast(2)); + mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(2)); mockQueueClient.Verify(q => q.DeleteMessageAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(1)); } [Fact] - public void Poison_message_is_rejected() + public async Task Poison_message_is_rejected() { // Arrange var onMessageInvokeCount = 0; @@ -245,6 +227,8 @@ public void Poison_message_is_rejected() var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); var mockQueueClient = MockUtils.GetMockQueueClient(); + var cts = new CancellationTokenSource(); + mockQueueClient .Setup(q => q.GetPropertiesAsync(It.IsAny())) .ReturnsAsync((CancellationToken cancellationToken) => @@ -304,28 +288,28 @@ public void Poison_message_is_rejected() cloudMessage = null; } } + }, + OnQueueEmpty = cancellationToken => + { + Interlocked.Increment(ref onQueueEmptyInvokeCount); + cts.Cancel(); } }; - messagePump.OnQueueEmpty = cancellationToken => - { - Interlocked.Increment(ref onQueueEmptyInvokeCount); - messagePump.Stop(); - }; // Act - messagePump.Start(); + await messagePump.StartAsync(cts.Token); // Assert onMessageInvokeCount.ShouldBe(1); onQueueEmptyInvokeCount.ShouldBe(1); onErrorInvokeCount.ShouldBe(1); isRejected.ShouldBeTrue(); - mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.AtLeast(2)); + mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(2)); mockQueueClient.Verify(q => q.DeleteMessageAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(1)); } [Fact] - public void Poison_message_is_moved() + public async Task Poison_message_is_moved() { // Arrange var onMessageInvokeCount = 0; @@ -342,6 +326,8 @@ public void Poison_message_is_moved() var mockQueueClient = MockUtils.GetMockQueueClient(); var mockPoisonQueueClient = MockUtils.GetMockQueueClient("my_poison_queue"); + var cts = new CancellationTokenSource(); + mockQueueClient .Setup(q => q.GetPropertiesAsync(It.IsAny())) .ReturnsAsync((CancellationToken cancellationToken) => @@ -411,29 +397,29 @@ public void Poison_message_is_moved() cloudMessage = null; } } + }, + OnQueueEmpty = cancellationToken => + { + Interlocked.Increment(ref onQueueEmptyInvokeCount); + cts.Cancel(); } }; - messagePump.OnQueueEmpty = cancellationToken => - { - Interlocked.Increment(ref onQueueEmptyInvokeCount); - messagePump.Stop(); - }; // Act - messagePump.Start(); + await messagePump.StartAsync(cts.Token); // Assert onMessageInvokeCount.ShouldBe(1); onQueueEmptyInvokeCount.ShouldBe(1); onErrorInvokeCount.ShouldBe(1); isRejected.ShouldBeTrue(); - mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.AtLeast(2)); + mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(2)); mockQueueClient.Verify(q => q.DeleteMessageAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(1)); mockPoisonQueueClient.Verify(q => q.SendMessageAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(1)); } [Fact] - public void Exceptions_in_OnQueueEmpty_are_ignored() + public async Task Exceptions_in_OnQueueEmpty_are_ignored() { // Arrange var onMessageInvokeCount = 0; @@ -446,6 +432,8 @@ public void Exceptions_in_OnQueueEmpty_are_ignored() var mockBlobContainerClient = MockUtils.GetMockBlobContainerClient(); var mockQueueClient = MockUtils.GetMockQueueClient(); + var cts = new CancellationTokenSource(); + mockQueueClient .Setup(q => q.GetPropertiesAsync(It.IsAny())) .ReturnsAsync((CancellationToken cancellationToken) => @@ -459,7 +447,7 @@ public void Exceptions_in_OnQueueEmpty_are_ignored() var queueManager = new QueueManager(mockBlobContainerClient.Object, mockQueueClient.Object); - var messagePump = new AsyncMessagePump(queueManager, null, 1, TimeSpan.FromMinutes(1), 3, null) + var messagePump = new AsyncMessagePump(queueManager, null, 1, TimeSpan.FromMinutes(1), 3, null, null) { OnMessage = (message, cancellationToken) => { @@ -468,34 +456,34 @@ public void Exceptions_in_OnQueueEmpty_are_ignored() OnError = (message, exception, isPoison) => { Interlocked.Increment(ref onErrorInvokeCount); - } - }; - messagePump.OnQueueEmpty = cancellationToken => - { - Interlocked.Increment(ref onQueueEmptyInvokeCount); - - // Simulate an exception (only the first time) - lock (lockObject) + }, + OnQueueEmpty = cancellationToken => { - if (!exceptionSimulated) + Interlocked.Increment(ref onQueueEmptyInvokeCount); + + // Simulate an exception (only the first time) + lock (lockObject) { - exceptionSimulated = true; - throw new Exception("This dummy exception should be ignored"); + if (!exceptionSimulated) + { + exceptionSimulated = true; + throw new Exception("This dummy exception should be ignored"); + } } - } - // Stop the message pump - messagePump.Stop(); + // Stop the message pump + cts.Cancel(); + } }; // Act - messagePump.Start(); + await messagePump.StartAsync(cts.Token); // Assert onMessageInvokeCount.ShouldBe(0); onQueueEmptyInvokeCount.ShouldBeGreaterThan(0); onErrorInvokeCount.ShouldBe(0); - mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.AtLeast(1)); + mockQueueClient.Verify(q => q.ReceiveMessagesAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(2)); } } } diff --git a/Source/Picton.Messaging.UnitTests/MockUtils.cs b/Source/Picton.Messaging.UnitTests/MockUtils.cs index de2b98b..bcf3ca8 100644 --- a/Source/Picton.Messaging.UnitTests/MockUtils.cs +++ b/Source/Picton.Messaging.UnitTests/MockUtils.cs @@ -19,7 +19,15 @@ internal static Mock GetMockBlobContainerClient(string cont { var mockContainerUri = new Uri(BLOB_STORAGE_URL + containerName); var blobContainerInfo = BlobsModelFactory.BlobContainerInfo(ETag.All, DateTimeOffset.UtcNow); - var mockBlobContainer = new Mock(MockBehavior.Strict, mockContainerUri, (BlobClientOptions)null); + var mockBlobContainer = new Mock(MockBehavior.Strict); + + mockBlobContainer + .SetupGet(m => m.Name) + .Returns(containerName); + + mockBlobContainer + .SetupGet(m => m.Uri) + .Returns(mockContainerUri); mockBlobContainer .Setup(c => c.CreateIfNotExists(It.IsAny(), It.IsAny>(), It.IsAny(), It.IsAny())) @@ -37,20 +45,42 @@ internal static Mock GetMockBlobContainerClient(string cont return mockBlobContainer; } + internal static Mock GetMockBlobClient(string blobName) + { + var mockBlobUri = new Uri(BLOB_STORAGE_URL + blobName); + var mockBlobClient = new Mock(MockBehavior.Strict); + + mockBlobClient + .SetupGet(m => m.Name) + .Returns(blobName); + + mockBlobClient + .SetupGet(m => m.Uri) + .Returns(mockBlobUri); + + return mockBlobClient; + } + internal static Mock GetMockQueueClient(string queueName = "myqueue") { var mockQueueStorageUri = new Uri(QUEUE_STORAGE_URL + queueName); - var mockQueueClient = new Mock(MockBehavior.Strict, mockQueueStorageUri, (QueueClientOptions)null); + var mockQueueClient = new Mock(MockBehavior.Strict); mockQueueClient - .SetupGet(q => q.MessageMaxBytes) - .Returns(int.MaxValue); + .SetupGet(m => m.MaxPeekableMessages) + .Returns(32); + mockQueueClient - .SetupGet(q => q.MaxPeekableMessages) - .Returns(10); + .SetupGet(m => m.MessageMaxBytes) + .Returns(65536); + + mockQueueClient + .SetupGet(m => m.Uri) + .Returns(mockQueueStorageUri); + mockQueueClient .Setup(c => c.CreateIfNotExists(It.IsAny>(), It.IsAny())) - .Returns(new MockAzureResponse(200, "ok")) + .Returns((Response)null) .Verifiable(); return mockQueueClient; diff --git a/Source/Picton.Messaging.UnitTests/Picton.Messaging.UnitTests.csproj b/Source/Picton.Messaging.UnitTests/Picton.Messaging.UnitTests.csproj index a2020a8..29bff2a 100644 --- a/Source/Picton.Messaging.UnitTests/Picton.Messaging.UnitTests.csproj +++ b/Source/Picton.Messaging.UnitTests/Picton.Messaging.UnitTests.csproj @@ -1,22 +1,37 @@ - net461;net472;netcoreapp3.1 + net48;net7.0 Picton.Messaging.UnitTests Picton.Messaging.UnitTests - - - - - + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + all runtime; build; native; contentfiles; analyzers + + + $(DefineConstants);RELEASE;TRACE + false + portable + true + + diff --git a/Source/Picton.Messaging.sln b/Source/Picton.Messaging.sln index e4b66d7..9b381ec 100644 --- a/Source/Picton.Messaging.sln +++ b/Source/Picton.Messaging.sln @@ -1,7 +1,7 @@  Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 16 -VisualStudioVersion = 16.0.28803.452 +# Visual Studio Version 17 +VisualStudioVersion = 17.1.32328.378 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{53D250B0-3F11-4CD4-AF30-5F636B405D87}" ProjectSection(SolutionItems) = preProject diff --git a/Source/Picton.Messaging/AsyncMessagePump.cs b/Source/Picton.Messaging/AsyncMessagePump.cs index 5c2f485..35dd09c 100644 --- a/Source/Picton.Messaging/AsyncMessagePump.cs +++ b/Source/Picton.Messaging/AsyncMessagePump.cs @@ -1,7 +1,7 @@ using App.Metrics; +using Microsoft.Extensions.Logging; using Picton.Interfaces; using Picton.Managers; -using Picton.Messaging.Logging; using Picton.Messaging.Utilities; using System; using System.Collections.Concurrent; @@ -20,18 +20,14 @@ public class AsyncMessagePump { #region FIELDS - private static readonly ILog _logger = LogProvider.For(); - private readonly IQueueManager _queueManager; private readonly IQueueManager _poisonQueueManager; private readonly int _concurrentTasks; private readonly TimeSpan? _visibilityTimeout; private readonly int _maxDequeueCount; + private readonly ILogger _logger; private readonly IMetrics _metrics; - private CancellationTokenSource _cancellationTokenSource; - private ManualResetEvent _safeToExitHandle; - #endregion #region PROPERTIES @@ -66,7 +62,7 @@ public class AsyncMessagePump /// /// /// - /// If this property is not set, the default logic is to pause for 2 seconds. + /// If this property is not set, the default logic is to pause for 1.5 seconds. /// public Action OnQueueEmpty { get; set; } @@ -86,10 +82,11 @@ public class AsyncMessagePump /// Name of the queue where messages are automatically moved to when they fail to be processed after 'maxDequeueCount' attempts. You can indicate that you do not want messages to be automatically moved by leaving this value empty. In such a scenario, you are responsible for handling so called 'poison' messages. /// The visibility timeout. /// The maximum dequeue count. + /// The logger. /// The system where metrics are published. [ExcludeFromCodeCoverage] - public AsyncMessagePump(string connectionString, string queueName, int concurrentTasks = 25, string poisonQueueName = null, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, IMetrics metrics = null) - : this(new QueueManager(connectionString, queueName), string.IsNullOrEmpty(poisonQueueName) ? null : new QueueManager(connectionString, poisonQueueName), concurrentTasks, visibilityTimeout, maxDequeueCount, metrics) + public AsyncMessagePump(string connectionString, string queueName, int concurrentTasks = 25, string poisonQueueName = null, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, ILogger logger = null, IMetrics metrics = null) + : this(new QueueManager(connectionString, queueName), string.IsNullOrEmpty(poisonQueueName) ? null : new QueueManager(connectionString, poisonQueueName), concurrentTasks, visibilityTimeout, maxDequeueCount, logger, metrics) { } @@ -101,20 +98,22 @@ public AsyncMessagePump(string connectionString, string queueName, int concurren /// The number of concurrent tasks. /// The visibility timeout. /// The maximum dequeue count. + /// The logger. /// The system where metrics are published. - public AsyncMessagePump(QueueManager queueManager, QueueManager poisonQueueManager, int concurrentTasks = 25, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, IMetrics metrics = null) + public AsyncMessagePump(QueueManager queueManager, QueueManager poisonQueueManager, int concurrentTasks = 25, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, ILogger logger = null, IMetrics metrics = null) { - if (concurrentTasks < 1) throw new ArgumentException("Number of concurrent tasks must be greather than zero", nameof(concurrentTasks)); - if (maxDequeueCount < 1) throw new ArgumentException("Number of retries must be greather than zero", nameof(maxDequeueCount)); + if (concurrentTasks < 1) throw new ArgumentOutOfRangeException("Number of concurrent tasks must be greather than zero", nameof(concurrentTasks)); + if (maxDequeueCount < 1) throw new ArgumentOutOfRangeException("Number of retries must be greather than zero", nameof(maxDequeueCount)); _queueManager = queueManager ?? throw new ArgumentNullException(nameof(queueManager)); + _poisonQueueManager = poisonQueueManager; _concurrentTasks = concurrentTasks; _visibilityTimeout = visibilityTimeout; _maxDequeueCount = maxDequeueCount; + _logger = logger; _metrics = metrics ?? TurnOffMetrics(); - _poisonQueueManager = poisonQueueManager; - InitMessagePump(); + InitDefaultActions(); } #endregion @@ -124,43 +123,28 @@ public AsyncMessagePump(QueueManager queueManager, QueueManager poisonQueueManag /// /// Starts the message pump. /// + /// The cancellation token. /// OnMessage. - public void Start() + /// A representing the asynchronous operation. + public async Task StartAsync(CancellationToken cancellationToken) { if (OnMessage == null) throw new ArgumentNullException(nameof(OnMessage)); - _logger.Trace($"{nameof(AsyncMessagePump)} starting..."); - - _cancellationTokenSource = new CancellationTokenSource(); - _safeToExitHandle = new ManualResetEvent(false); - - ProcessMessages(_visibilityTimeout, _cancellationTokenSource.Token).Wait(); - - _cancellationTokenSource.Dispose(); - - _logger.Trace($"{nameof(AsyncMessagePump)} ready to exit"); - _safeToExitHandle.Set(); - } - - /// - /// Stops the message pump. - /// - public void Stop() - { - // Don't attempt to stop the message pump if it's already in the process of stopping - if (_cancellationTokenSource?.IsCancellationRequested ?? false) return; - - // Stop the message pump - _logger.Trace($"{nameof(AsyncMessagePump)} stopping..."); - if (_cancellationTokenSource != null) _cancellationTokenSource.Cancel(); - if (_safeToExitHandle != null) _safeToExitHandle.WaitOne(); - _logger.Trace($"{nameof(AsyncMessagePump)} stopped, exiting safely"); + _logger?.LogTrace("AsyncMessagePump starting..."); + await ProcessMessagesAsync(_visibilityTimeout, cancellationToken).ConfigureAwait(false); + _logger?.LogTrace("AsyncMessagePump stopping..."); } #endregion #region PRIVATE METHODS + private void InitDefaultActions() + { + OnError = (message, exception, isPoison) => _logger?.LogError(exception, "An error occured when processing a message"); + OnQueueEmpty = (cancellationToken) => Task.Delay(1500, cancellationToken).Wait(); + } + private IMetrics TurnOffMetrics() { var metricsTurnedOff = new MetricsBuilder(); @@ -172,13 +156,7 @@ private IMetrics TurnOffMetrics() return metricsTurnedOff.Build(); } - private void InitMessagePump() - { - OnQueueEmpty = cancellationToken => Task.Delay(1500, cancellationToken).Wait(); - OnError = (message, exception, isPoison) => _logger.ErrorException("An error occured when processing a message", exception); - } - - private async Task ProcessMessages(TimeSpan? visibilityTimeout = null, CancellationToken cancellationToken = default) + private async Task ProcessMessagesAsync(TimeSpan? visibilityTimeout, CancellationToken cancellationToken) { var runningTasks = new ConcurrentDictionary(); var semaphore = new SemaphoreSlim(_concurrentTasks, _concurrentTasks); @@ -205,7 +183,7 @@ private async Task ProcessMessages(TimeSpan? visibilityTimeout = null, Cancellat } catch (Exception e) { - _logger.InfoException("An error occured while fetching messages from the Azure queue. The error was caught and ignored.", e.GetBaseException()); + _logger?.LogError(e.GetBaseException(), "An error occured while fetching messages from the Azure queue. The error was caught and ignored."); } } @@ -213,7 +191,8 @@ private async Task ProcessMessages(TimeSpan? visibilityTimeout = null, Cancellat if (messages.Any()) { - _logger.Trace($"Fetched {messages.Count()} message(s) from the queue."); + var messagesCount = messages.Count(); + _logger?.LogTrace("Fetched {messagesCount} message(s) from the queue.", messagesCount); foreach (var message in messages) { @@ -222,16 +201,16 @@ private async Task ProcessMessages(TimeSpan? visibilityTimeout = null, Cancellat } else { - _logger.Trace("The queue is empty, no messages fetched."); + _logger?.LogTrace("The queue is empty, no messages fetched."); try { // The queue is empty - OnQueueEmpty?.Invoke(cancellationToken); _metrics.Measure.Counter.Increment(Metrics.QueueEmptyCounter); + OnQueueEmpty?.Invoke(cancellationToken); } catch (Exception e) { - _logger.InfoException("An error occured when handling an empty queue. The error was caught and ignored.", e.GetBaseException()); + _logger?.LogError(e.GetBaseException(), "An error occured when handling an empty queue. The error was caught and ignored."); } } } @@ -249,14 +228,14 @@ private async Task ProcessMessages(TimeSpan? visibilityTimeout = null, Cancellat var count = await _queueManager.GetApproximateMessageCountAsync(cancellationToken).ConfigureAwait(false); _metrics.Measure.Gauge.SetValue(Metrics.QueuedCloudMessagesGauge, count); } - catch (TaskCanceledException) + catch (Exception e) when (e is TaskCanceledException || e is OperationCanceledException) { // The message pump is shutting down. // This exception can be safely ignored. } catch (Exception e) { - _logger.InfoException("An error occured while checking how many message are waiting in the Azure queue. The error was caught and ignored.", e.GetBaseException()); + _logger?.LogError(e.GetBaseException(), "An error occured while checking how many message are waiting in the Azure queue. The error was caught and ignored."); } }, TimeSpan.FromMilliseconds(5000), @@ -273,7 +252,7 @@ private async Task ProcessMessages(TimeSpan? visibilityTimeout = null, Cancellat } catch (Exception e) { - _logger.InfoException("An error occured while checking how many message are waiting in the memory queue. The error was caught and ignored.", e.GetBaseException()); + _logger?.LogError(e.GetBaseException(), "An error occured while checking how many message are waiting in the memory queue. The error was caught and ignored."); } return Task.CompletedTask; @@ -363,7 +342,7 @@ private async Task ProcessMessages(TimeSpan? visibilityTimeout = null, Cancellat // Task pump has been canceled, wait for the currently running tasks to complete await Task.WhenAll(runningTasks.Values).UntilCancelled().ConfigureAwait(false); } - - #endregion } + + #endregion } diff --git a/Source/Picton.Messaging/AsyncMessagePumpWithHandlers.cs b/Source/Picton.Messaging/AsyncMessagePumpWithHandlers.cs index 12bfa52..4ee1bee 100644 --- a/Source/Picton.Messaging/AsyncMessagePumpWithHandlers.cs +++ b/Source/Picton.Messaging/AsyncMessagePumpWithHandlers.cs @@ -1,14 +1,15 @@ using App.Metrics; using Microsoft.Extensions.DependencyModel; +using Microsoft.Extensions.Logging; using Picton.Managers; -using Picton.Messaging.Logging; using Picton.Messaging.Messages; -using Picton.Messaging.Utilities; using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Reflection; using System.Threading; +using System.Threading.Tasks; namespace Picton.Messaging { @@ -20,9 +21,11 @@ public class AsyncMessagePumpWithHandlers { #region FIELDS - private static readonly ILog _logger = LogProvider.For(); - private static readonly IDictionary _messageHandlers = GetMessageHandlers(); + private static readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim(); + private static IDictionary _messageHandlers; + + private readonly ILogger _logger; private readonly AsyncMessagePump _messagePump; #endregion @@ -79,10 +82,11 @@ public Action OnQueueEmpty /// Name of the queue where messages are automatically moved to when they fail to be processed after 'maxDequeueCount' attempts. You can indicate that you do not want messages to be automatically moved by leaving this value empty. In such a scenario, you are responsible for handling so called 'poinson' messages. /// The visibility timeout. /// The maximum dequeue count. + /// The logger. /// The system where metrics are published. [ExcludeFromCodeCoverage] - public AsyncMessagePumpWithHandlers(string connectionString, string queueName, int concurrentTasks = 25, string poisonQueueName = null, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, IMetrics metrics = null) - : this(new QueueManager(connectionString, queueName), string.IsNullOrEmpty(poisonQueueName) ? null : new QueueManager(connectionString, poisonQueueName), concurrentTasks, visibilityTimeout, maxDequeueCount, metrics) + public AsyncMessagePumpWithHandlers(string connectionString, string queueName, int concurrentTasks = 25, string poisonQueueName = null, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, ILogger logger = null, IMetrics metrics = null) + : this(new QueueManager(connectionString, queueName), string.IsNullOrEmpty(poisonQueueName) ? null : new QueueManager(connectionString, poisonQueueName), concurrentTasks, visibilityTimeout, maxDequeueCount, logger, metrics) { } @@ -94,10 +98,13 @@ public AsyncMessagePumpWithHandlers(string connectionString, string queueName, i /// The number of concurrent tasks. /// The visibility timeout. /// The maximum dequeue count. + /// The logger. /// The system where metrics are published. - public AsyncMessagePumpWithHandlers(QueueManager queueManager, QueueManager poisonQueueManager, int concurrentTasks = 25, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, IMetrics metrics = null) + public AsyncMessagePumpWithHandlers(QueueManager queueManager, QueueManager poisonQueueManager, int concurrentTasks = 25, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, ILogger logger = null, IMetrics metrics = null) { - _messagePump = new AsyncMessagePump(queueManager, poisonQueueManager, concurrentTasks, visibilityTimeout, maxDequeueCount, metrics) + _logger = logger; + + _messagePump = new AsyncMessagePump(queueManager, poisonQueueManager, concurrentTasks, visibilityTimeout, maxDequeueCount, logger, metrics) { OnMessage = (message, cancellationToken) => { @@ -110,12 +117,23 @@ public AsyncMessagePumpWithHandlers(QueueManager queueManager, QueueManager pois foreach (var handlerType in handlers) { - var handler = Activator.CreateInstance(handlerType); + object handler = null; + if (handlerType.GetConstructor(new[] { typeof(ILogger) }) != null) + { + handler = Activator.CreateInstance(handlerType, new[] { (object)logger }); + } + else + { + handler = Activator.CreateInstance(handlerType); + } + var handlerMethod = handlerType.GetMethod("Handle", new[] { contentType }); handlerMethod.Invoke(handler, new[] { message.Content }); } } }; + + DiscoverMessageHandlersIfNecessary(logger); } #endregion @@ -125,43 +143,67 @@ public AsyncMessagePumpWithHandlers(QueueManager queueManager, QueueManager pois /// /// Starts the message pump. /// - public void Start() - { - _messagePump.Start(); - } - - /// - /// Stops the message pump. - /// - public void Stop() + /// The cancellation token. + /// OnMessage. + /// A representing the asynchronous operation. + public Task StartAsync(CancellationToken cancellationToken) { - _messagePump.Stop(); + return _messagePump.StartAsync(cancellationToken); } #endregion #region PRIVATE METHODS - private static IDictionary GetMessageHandlers() + private static void DiscoverMessageHandlersIfNecessary(ILogger logger) { - _logger.Trace("Discovering message handlers."); + try + { + _lock.EnterUpgradeableReadLock(); + + if (_messageHandlers == null) + { + try + { + _lock.EnterWriteLock(); + + if (_messageHandlers == null) + { + _messageHandlers = GetMessageHandlers(null); + } + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + } + finally + { + if (_lock.IsUpgradeableReadLockHeld) _lock.ExitUpgradeableReadLock(); + } + } + + private static IDictionary GetMessageHandlers(ILogger logger) + { + logger?.LogTrace("Discovering message handlers."); var assemblies = GetLocalAssemblies(); var assembliesCount = assemblies.Length; - if (assembliesCount == 0) _logger.Trace($"Did not find any local assembly."); - else if (assembliesCount == 1) _logger.Trace("Found 1 local assembly."); - else _logger.Trace($"Found {assemblies.Count()} local assemblies."); + if (assembliesCount == 0) logger?.LogTrace($"Did not find any local assembly."); + else if (assembliesCount == 1) logger?.LogTrace("Found 1 local assembly."); + else logger?.LogTrace($"Found {assemblies.Count()} local assemblies."); var typesWithMessageHandlerInterfaces = assemblies .SelectMany(x => x.GetTypes()) - .Where(t => !GetTypeInfo(t).IsInterface) + .Where(t => !t.GetTypeInfo().IsInterface) .Select(type => new { Type = type, MessageTypes = type .GetInterfaces() - .Where(i => GetTypeInfo(i).IsGenericType) + .Where(i => i.GetTypeInfo().IsGenericType) .Where(i => i.GetGenericTypeDefinition() == typeof(IMessageHandler<>)) .SelectMany(i => i.GetGenericArguments()) }) @@ -169,9 +211,9 @@ private static IDictionary GetMessageHandlers() .ToArray(); var classesCount = typesWithMessageHandlerInterfaces.Length; - if (classesCount == 0) _logger.Trace($"Did not find any class implementing the 'IMessageHandler' interface."); - else if (classesCount == 1) _logger.Trace("Found 1 class implementing the 'IMessageHandler' interface."); - else _logger.Trace($"Found {typesWithMessageHandlerInterfaces.Count()} classes implementing the 'IMessageHandler' interface."); + if (classesCount == 0) logger?.LogTrace("Did not find any class implementing the 'IMessageHandler' interface."); + else if (classesCount == 1) logger?.LogTrace("Found 1 class implementing the 'IMessageHandler' interface."); + else logger?.LogTrace($"Found {typesWithMessageHandlerInterfaces.Count()} classes implementing the 'IMessageHandler' interface."); var oneTypePerMessageHandler = typesWithMessageHandlerInterfaces .SelectMany(t => t.MessageTypes, (t, messageType) => @@ -190,27 +232,6 @@ private static IDictionary GetMessageHandlers() return messageHandlers; } -#if NETFULL - private static Type GetTypeInfo(Type type) - { - return type; - } - - private static Assembly[] GetLocalAssemblies() - { - var callingAssembly = Assembly.GetCallingAssembly(); - var path = new Uri(System.IO.Path.GetDirectoryName(callingAssembly.Location)).AbsolutePath; - - return AppDomain.CurrentDomain.GetAssemblies() - .Where(x => !x.IsDynamic && new Uri(x.CodeBase).AbsolutePath.Contains(path)) - .ToArray(); - } -#else - private static TypeInfo GetTypeInfo(Type type) - { - return type.GetTypeInfo(); - } - private static Assembly[] GetLocalAssemblies() { var dependencies = DependencyContext.Default.RuntimeLibraries; @@ -236,7 +257,6 @@ private static bool IsCandidateLibrary(RuntimeLibrary library) !string.Equals(library.Type, "package", StringComparison.OrdinalIgnoreCase) && !string.Equals(library.Type, "referenceassembly", StringComparison.OrdinalIgnoreCase); } -#endif #endregion } diff --git a/Source/Picton.Messaging/AsyncMultiTenantMessagePump.cs b/Source/Picton.Messaging/AsyncMultiTenantMessagePump.cs new file mode 100644 index 0000000..e4db98d --- /dev/null +++ b/Source/Picton.Messaging/AsyncMultiTenantMessagePump.cs @@ -0,0 +1,483 @@ +using App.Metrics; +using Azure; +using Azure.Storage.Blobs; +using Azure.Storage.Queues; +using Azure.Storage.Queues.Models; +using Microsoft.Extensions.Logging; +using Picton.Interfaces; +using Picton.Managers; +using Picton.Messaging.Utilities; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; + +namespace Picton.Messaging +{ + /// + /// High performance message processor (also known as a message "pump") for Azure storage queues. + /// Designed to monitor multiple Azure storage queues that follow the following naming convention: + /// a common prefix followed by a unique tenant identifier. + /// + public class AsyncMultiTenantMessagePump + { + #region FIELDS + + private readonly Func _queueManagerFactory; + + private readonly ConcurrentDictionary> _tenantQueueManagers = new ConcurrentDictionary>(); + private readonly RoundRobinList _tenantIds = new RoundRobinList(Array.Empty()); + + private readonly IQueueManager _poisonQueueManager; + private readonly string _connectionString; + private readonly string _queueNamePrefix; + private readonly int _concurrentTasks; + private readonly TimeSpan? _visibilityTimeout; + private readonly int _maxDequeueCount; + private readonly ILogger _logger; + private readonly IMetrics _metrics; + + #endregion + + #region PROPERTIES + + /// + /// Gets or sets the logic to execute when a message is retrieved from the queue. + /// + /// + /// If exception is thrown when calling OnMessage, it will regard this queue message as failed. + /// + public Action OnMessage { get; set; } + + /// + /// Gets or sets the logic to execute when an error occurs. + /// + /// + /// + /// OnError = (message, exception, isPoison) => Trace.TraceError("An error occured: {0}", exception); + /// + /// + /// + /// When isPoison is set to true, you should copy this message to a poison queue because it will be deleted from the original queue. + /// + public Action OnError { get; set; } + + /// + /// Gets or sets the logic to execute when all tenant queues are empty. + /// + /// + /// + /// OnEmpty = cancellationToken => Task.Delay(2500, cancellationToken).Wait(); + /// + /// + /// + /// If this property is not set, the default logic is to do nothing. + /// + public Action OnEmpty { get; set; } + + #endregion + + #region CONSTRUCTOR + + /// + /// Initializes a new instance of the class. + /// + /// + /// A connection string includes the authentication information required for your application to access data in an Azure Storage account at runtime. + /// For more information, https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string. + /// + /// Queues name prefix. + /// The number of concurrent tasks. + /// Name of the queue where messages are automatically moved to when they fail to be processed after 'maxDequeueCount' attempts. You can indicate that you do not want messages to be automatically moved by leaving this value empty. In such a scenario, you are responsible for handling so called 'poison' messages. + /// The visibility timeout. + /// The maximum dequeue count. + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request to the queue. + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request to the blob storage. + /// + /// The logger. + /// The system where metrics are published. + public AsyncMultiTenantMessagePump(string connectionString, string queueNamePrefix, int concurrentTasks = 25, string poisonQueueName = null, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, QueueClientOptions queueClientOptions = null, BlobClientOptions blobClientOptions = null, ILogger logger = null, IMetrics metrics = null) + { + if (concurrentTasks < 1) throw new ArgumentOutOfRangeException("Number of concurrent tasks must be greather than zero", nameof(concurrentTasks)); + if (maxDequeueCount < 1) throw new ArgumentOutOfRangeException("Number of retries must be greather than zero", nameof(maxDequeueCount)); + + _queueManagerFactory = (tenantId) => + { + var blobContainerClient = new BlobContainerClient(connectionString, $"{queueNamePrefix}{tenantId}-oversized-messages", blobClientOptions); + var queueClient = new QueueClient(connectionString, $"{queueNamePrefix}{tenantId}", queueClientOptions); + return new QueueManager(blobContainerClient, queueClient, true); + }; + + _connectionString = connectionString ?? throw new ArgumentNullException(connectionString); + _queueNamePrefix = queueNamePrefix ?? throw new ArgumentNullException(queueNamePrefix); + _concurrentTasks = concurrentTasks; + _poisonQueueManager = string.IsNullOrEmpty(poisonQueueName) ? null : new QueueManager(connectionString, poisonQueueName); + _visibilityTimeout = visibilityTimeout; + _maxDequeueCount = maxDequeueCount; + _logger = logger; + _metrics = metrics ?? TurnOffMetrics(); + + InitDefaultActions(); + } + + #endregion + + #region PUBLIC METHODS + + /// + /// Starts the message pump. + /// + /// The cancellation token. + /// OnMessage. + /// A representing the asynchronous operation. + public async Task StartAsync(CancellationToken cancellationToken) + { + if (OnMessage == null) throw new ArgumentNullException(nameof(OnMessage)); + + _logger?.LogTrace("AsyncMultiTenantMessagePump starting message pump..."); + await ProcessMessagesAsync(_visibilityTimeout, cancellationToken).ConfigureAwait(false); + _logger?.LogTrace("AsyncMultiTenantMessagePump stopping message pump..."); + } + + #endregion + + #region PRIVATE METHODS + + private void InitDefaultActions() + { + OnError = (tenantId, message, exception, isPoison) => _logger?.LogError(exception, "An error occured when processing a message for tenant {tenantId}", tenantId); + } + + private IMetrics TurnOffMetrics() + { + var metricsTurnedOff = new MetricsBuilder(); + metricsTurnedOff.Configuration.Configure(new MetricsOptions() + { + Enabled = false, + ReportingEnabled = false + }); + return metricsTurnedOff.Build(); + } + + private async Task ProcessMessagesAsync(TimeSpan? visibilityTimeout, CancellationToken cancellationToken) + { + var runningTasks = new ConcurrentDictionary(); + var semaphore = new SemaphoreSlim(_concurrentTasks, _concurrentTasks); + var queuedMessages = new ConcurrentQueue<(string TenantId, CloudMessage Message)>(); + + // Define the task that discovers queues that follow the naming convention + RecurrentCancellableTask.StartNew( + async () => + { + try + { + var queueServiceClient = new QueueServiceClient(_connectionString); + var response = queueServiceClient.GetQueuesAsync(QueueTraits.None, _queueNamePrefix, cancellationToken); + await foreach (Page queues in response.AsPages()) + { + foreach (var queue in queues.Values) + { + if (!queue.Name.Equals(_queueNamePrefix, StringComparison.OrdinalIgnoreCase)) + { + _tenantIds.Add(queue.Name.TrimStart(_queueNamePrefix)); + } + } + } + + // Randomize where we start + if (_tenantIds.Current == null) + { + var randomIndex = RandomGenerator.Instance.GetInt32(0, _tenantIds.Count); + _tenantIds.ResetTo(randomIndex); + } + } + catch (Exception e) when (e is TaskCanceledException || e is OperationCanceledException) + { + // The message pump is shutting down. + // This exception can be safely ignored. + } + catch (Exception e) + { + _logger?.LogError(e.GetBaseException(), "An error occured while fetching the Azure queues that match the naming convention. The error was caught and ignored."); + } + }, + TimeSpan.FromMilliseconds(15000), + cancellationToken, + TaskCreationOptions.LongRunning); + + // Brief pause to ensure the task defined above runs at least once before we start processing messages + await Task.Delay(500, cancellationToken).ConfigureAwait(false); + + // Define the task that fetches messages from the Azure queue + RecurrentCancellableTask.StartNew( + async () => + { + // Fetch messages from Azure when the number of items in the concurrent queue falls below an "acceptable" level. + if (!cancellationToken.IsCancellationRequested && queuedMessages.Count <= _concurrentTasks / 2) + { + await foreach (var message in FetchMessages(visibilityTimeout, cancellationToken)) + { + queuedMessages.Enqueue(message); + } + } + }, + TimeSpan.FromMilliseconds(500), + cancellationToken, + TaskCreationOptions.LongRunning); + + // Define the task that checks how many messages are queued in Azure + RecurrentCancellableTask.StartNew( + async () => + { + var count = 0; + foreach (var kvp in _tenantQueueManagers) + { + var tenantId = kvp.Key; + (var queueManager, var lastFetched, var fetchDelay) = kvp.Value.Value; + + try + { + var properties = await queueManager.GetPropertiesAsync(cancellationToken).ConfigureAwait(false); + + count += properties.ApproximateMessagesCount; + } + catch (Exception e) when (e is TaskCanceledException || e is OperationCanceledException) + { + // The message pump is shutting down. + // This exception can be safely ignored. + } + catch (RequestFailedException rfe) when (rfe.ErrorCode == "QueueNotFound") + { + // The queue has been deleted + _tenantIds.Remove(tenantId); + _tenantQueueManagers.TryRemove(tenantId, out _); + } + catch (Exception e) + { + _logger?.LogError(e.GetBaseException(), "An error occured while checking how many message are waiting in Azure. The error was caught and ignored."); + } + } + + _metrics.Measure.Gauge.SetValue(Metrics.QueuedCloudMessagesGauge, count); + }, + TimeSpan.FromMilliseconds(5000), + cancellationToken, + TaskCreationOptions.LongRunning); + + // Define the task that checks how many messages are queued in memory + RecurrentCancellableTask.StartNew( + () => + { + try + { + _metrics.Measure.Gauge.SetValue(Metrics.QueuedMemoryMessagesGauge, queuedMessages.Count); + } + catch (Exception e) + { + _logger?.LogError(e.GetBaseException(), "An error occured while checking how many message are waiting in the memory queue. The error was caught and ignored."); + } + + return Task.CompletedTask; + }, + TimeSpan.FromMilliseconds(5000), + cancellationToken, + TaskCreationOptions.LongRunning); + + // Define the task pump + var pumpTask = Task.Run(async () => + { + while (!cancellationToken.IsCancellationRequested) + { + await semaphore.WaitAsync(cancellationToken).ConfigureAwait(false); + + // Retrieved the next message from the queue and process it + var runningTask = Task.Run( + async () => + { + var messageProcessed = false; + + if (cancellationToken.IsCancellationRequested) return messageProcessed; + + using (_metrics.Measure.Timer.Time(Metrics.MessageProcessingTimer)) + { + if (queuedMessages.TryDequeue(out (string TenantId, CloudMessage Message) result)) + { + var tenantInfo = GetTenantInfo(result.TenantId); + + try + { + // Process the message + OnMessage?.Invoke(result.TenantId, result.Message, cancellationToken); + + // Delete the processed message from the queue + // PLEASE NOTE: we use "CancellationToken.None" to ensure a processed message is deleted from the queue even when the message pump is shutting down + await tenantInfo.QueueManager.DeleteMessageAsync(result.Message, CancellationToken.None).ConfigureAwait(false); + } + catch (Exception ex) + { + var isPoison = result.Message.DequeueCount > _maxDequeueCount; + OnError?.Invoke(result.TenantId, result.Message, ex, isPoison); + if (isPoison) + { + // PLEASE NOTE: we use "CancellationToken.None" to ensure a processed message is deleted from the queue and moved to poison queue even when the message pump is shutting down + if (_poisonQueueManager != null) + { + result.Message.Metadata["PoisonExceptionMessage"] = ex.GetBaseException().Message; + result.Message.Metadata["PoisonExceptionDetails"] = ex.GetBaseException().ToString(); + + await _poisonQueueManager.AddMessageAsync(result.Message.Content, result.Message.Metadata, null, null, CancellationToken.None).ConfigureAwait(false); + } + + await tenantInfo.QueueManager.DeleteMessageAsync(result.Message, CancellationToken.None).ConfigureAwait(false); + } + } + + messageProcessed = true; + } + } + + // Increment the counter if we processed a message + if (messageProcessed) _metrics.Measure.Counter.Increment(Metrics.MessagesProcessedCounter); + + // Return a value indicating whether we processed a message or not + return messageProcessed; + }, + CancellationToken.None); + + // Add the task to the dictionary of tasks (allows us to keep track of the running tasks) + runningTasks.TryAdd(runningTask, runningTask); + + // Complete the task + runningTask.ContinueWith( + t => + { + semaphore.Release(); + runningTasks.TryRemove(t, out Task taskToBeRemoved); + }, TaskContinuationOptions.ExecuteSynchronously) + .IgnoreAwait(); + } + }); + + // Run the task pump until canceled + await pumpTask.UntilCancelled().ConfigureAwait(false); + + // Task pump has been canceled, wait for the currently running tasks to complete + await Task.WhenAll(runningTasks.Values).UntilCancelled().ConfigureAwait(false); + } + + private async IAsyncEnumerable<(string TenantId, CloudMessage Message)> FetchMessages(TimeSpan? visibilityTimeout, [EnumeratorCancellation] CancellationToken cancellationToken) + { + var messageCount = 0; + var originalTenant = _tenantIds.Current; + + using (_metrics.Measure.Timer.Time(Metrics.MessagesFetchingTimer)) + { + do + { + var tenantId = _tenantIds.MoveToNextItem(); + var tenantInfo = GetTenantInfo(tenantId); + + if (!cancellationToken.IsCancellationRequested && tenantInfo.LastFetched.Add(tenantInfo.FetchDelay) < DateTime.UtcNow) + { + IEnumerable messages = null; + + try + { + messages = await tenantInfo.QueueManager.GetMessagesAsync(_concurrentTasks, visibilityTimeout, cancellationToken).ConfigureAwait(false); + } + catch (Exception e) when (e is TaskCanceledException || e is OperationCanceledException) + { + // The message pump is shutting down. + // This exception can be safely ignored. + } + catch (RequestFailedException rfe) when (rfe.ErrorCode == "QueueNotFound") + { + // The queue has been deleted + _tenantIds.Remove(tenantId); + _tenantQueueManagers.TryRemove(tenantId, out _); + } + catch (Exception e) + { + _logger?.LogError(e.GetBaseException(), "An error occured while fetching messages for tenant {tenantId}. The error was caught and ignored.", tenantId); + } + + if (messages != null && messages.Any()) + { + var messagesCount = messages.Count(); + _logger?.LogTrace("Fetched {messagesCount} message(s) for tenant {tenantId}.", messagesCount, tenantId); + + foreach (var message in messages) + { + Interlocked.Increment(ref messageCount); + yield return (tenantId, message); + } + + // Reset the Fetch delay to zero to indicate that we can fetch more messages from this queue as soon as possible + _tenantQueueManagers[tenantId] = new Lazy<(QueueManager, DateTime, TimeSpan)>(() => (tenantInfo.QueueManager, DateTime.UtcNow, TimeSpan.Zero)); + } + else + { + _logger?.LogTrace("There are no messages for tenant {tenantId}.", tenantId); + _metrics.Measure.Counter.Increment(Metrics.QueueEmptyCounter); + + // Set a "resonable" fetch delay to ensure we don't query an empty queue too often + var delay = tenantInfo.FetchDelay.Add(TimeSpan.FromSeconds(5)); + if (delay.TotalSeconds > 15) delay = TimeSpan.FromSeconds(15); + + _tenantQueueManagers[tenantId] = new Lazy<(QueueManager, DateTime, TimeSpan)>(() => (tenantInfo.QueueManager, DateTime.UtcNow, delay)); + } + } + } + + // Stop when we either retrieved the desired number of messages OR we have looped through all the known tenants + while (messageCount < (_concurrentTasks * 2) && (string.IsNullOrEmpty(originalTenant) || originalTenant != _tenantIds.Current)); + } + + if (messageCount == 0) + { + _logger?.LogTrace("All tenant queues are empty, no messages fetched."); + try + { + // All queues are empty + _metrics.Measure.Counter.Increment(Metrics.QueueEmptyCounter); + OnEmpty?.Invoke(cancellationToken); + } + catch (Exception e) when (e is TaskCanceledException || e is OperationCanceledException) + { + // The message pump is shutting down. + // This exception can be safely ignored. + } + catch (Exception e) + { + _logger?.LogError(e.GetBaseException(), "An error occured when handling empty queues. The error was caught and ignored."); + } + } + } + + private (QueueManager QueueManager, DateTime LastFetched, TimeSpan FetchDelay) GetTenantInfo(string tenantId) + { + var lazyQueueManager = _tenantQueueManagers.GetOrAdd(tenantId, tenantId => + { + return new Lazy<(QueueManager, DateTime, TimeSpan)>(() => + { + _tenantIds.Add(tenantId); + return (_queueManagerFactory.Invoke(tenantId), DateTime.MinValue, TimeSpan.Zero); + }); + }); + + return lazyQueueManager.Value; + } + + #endregion + } +} diff --git a/Source/Picton.Messaging/AsyncMultiTenantMessagePumpWithHandlers.cs b/Source/Picton.Messaging/AsyncMultiTenantMessagePumpWithHandlers.cs new file mode 100644 index 0000000..41c38ac --- /dev/null +++ b/Source/Picton.Messaging/AsyncMultiTenantMessagePumpWithHandlers.cs @@ -0,0 +1,239 @@ +using App.Metrics; +using Azure.Storage.Blobs; +using Azure.Storage.Queues; +using Microsoft.Extensions.DependencyModel; +using Microsoft.Extensions.Logging; +using Picton.Messaging.Messages; +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; + +namespace Picton.Messaging +{ + /// + /// High performance message processor (also known as a message "pump") for Azure storage queues. + /// Designed to monitor an Azure storage queue and process the message as quickly and efficiently as possible. + /// + public class AsyncMultiTenantMessagePumpWithHandlers + { + #region FIELDS + + private static readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim(); + + private static IDictionary _messageHandlers; + + private readonly ILogger _logger; + private readonly AsyncMultiTenantMessagePump _messagePump; + + #endregion + + #region PROPERTIES + + /// + /// Gets or sets the logic to execute when an error occurs. + /// + /// + /// + /// OnError = (tenantId, message, exception, isPoison) => Trace.TraceError("An error occured: {0}", exception); + /// + /// + /// + /// When isPoison is set to true, you should copy this message to a poison queue because it will be deleted from the original queue. + /// + public Action OnError + { + get { return _messagePump.OnError; } + set { _messagePump.OnError = value; } + } + + #endregion + + #region CONSTRUCTOR + + /// + /// Initializes a new instance of the class. + /// + /// + /// A connection string includes the authentication information required for your application to access data in an Azure Storage account at runtime. + /// For more information, https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string. + /// + /// Queues name prefix. + /// The number of concurrent tasks. + /// Name of the queue where messages are automatically moved to when they fail to be processed after 'maxDequeueCount' attempts. You can indicate that you do not want messages to be automatically moved by leaving this value empty. In such a scenario, you are responsible for handling so called 'poinson' messages. + /// The visibility timeout. + /// The maximum dequeue count. + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request to the queue. + /// + /// + /// Optional client options that define the transport pipeline + /// policies for authentication, retries, etc., that are applied to + /// every request to the blob storage. + /// + /// The logger. + /// The system where metrics are published. + [ExcludeFromCodeCoverage] + public AsyncMultiTenantMessagePumpWithHandlers(string connectionString, string queueNamePrefix, int concurrentTasks = 25, string poisonQueueName = null, TimeSpan? visibilityTimeout = null, int maxDequeueCount = 3, QueueClientOptions queueClientOptions = null, BlobClientOptions blobClientOptions = null, ILogger logger = null, IMetrics metrics = null) + { + _logger = logger; + + _messagePump = new AsyncMultiTenantMessagePump(connectionString, queueNamePrefix, concurrentTasks, poisonQueueName, visibilityTimeout, maxDequeueCount, queueClientOptions, blobClientOptions, logger, metrics) + { + OnMessage = (tenantId, message, cancellationToken) => + { + var contentType = message.Content.GetType(); + + if (!_messageHandlers.TryGetValue(contentType, out Type[] handlers)) + { + throw new Exception($"Received a message of type {contentType.FullName} but could not find a class implementing IMessageHandler<{contentType.FullName}>"); + } + + foreach (var handlerType in handlers) + { + object handler = null; + if (handlerType.GetConstructor(new[] { typeof(ILogger) }) != null) + { + handler = Activator.CreateInstance(handlerType, new[] { (object)logger }); + } + else + { + handler = Activator.CreateInstance(handlerType); + } + + var handlerMethod = handlerType.GetMethod("Handle", new[] { contentType }); + handlerMethod.Invoke(handler, new[] { message.Content }); + } + } + }; + + DiscoverMessageHandlersIfNecessary(logger); + } + + #endregion + + #region PUBLIC METHODS + + /// + /// Starts the message pump. + /// + /// The cancellation token. + /// OnMessage. + /// A representing the asynchronous operation. + public Task StartAsync(CancellationToken cancellationToken) + { + return _messagePump.StartAsync(cancellationToken); + } + + #endregion + + #region PRIVATE METHODS + + private static void DiscoverMessageHandlersIfNecessary(ILogger logger) + { + try + { + _lock.EnterUpgradeableReadLock(); + + if (_messageHandlers == null) + { + try + { + _lock.EnterWriteLock(); + + _messageHandlers ??= GetMessageHandlers(null); + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + } + finally + { + if (_lock.IsUpgradeableReadLockHeld) _lock.ExitUpgradeableReadLock(); + } + } + + private static IDictionary GetMessageHandlers(ILogger logger) + { + logger?.LogTrace("Discovering message handlers."); + + var assemblies = GetLocalAssemblies(); + + var assembliesCount = assemblies.Length; + if (assembliesCount == 0) logger?.LogTrace($"Did not find any local assembly."); + else if (assembliesCount == 1) logger?.LogTrace("Found 1 local assembly."); + else logger?.LogTrace($"Found {assemblies.Length} local assemblies."); + + var typesWithMessageHandlerInterfaces = assemblies + .SelectMany(x => x.GetTypes()) + .Where(t => !t.GetTypeInfo().IsInterface) + .Select(type => new + { + Type = type, + MessageTypes = type + .GetInterfaces() + .Where(i => i.GetTypeInfo().IsGenericType) + .Where(i => i.GetGenericTypeDefinition() == typeof(IMessageHandler<>)) + .SelectMany(i => i.GetGenericArguments()) + }) + .Where(t => t.MessageTypes != null && t.MessageTypes.Any()) + .ToArray(); + + var classesCount = typesWithMessageHandlerInterfaces.Length; + if (classesCount == 0) logger?.LogTrace("Did not find any class implementing the 'IMessageHandler' interface."); + else if (classesCount == 1) logger?.LogTrace("Found 1 class implementing the 'IMessageHandler' interface."); + else logger?.LogTrace($"Found {typesWithMessageHandlerInterfaces.Length} classes implementing the 'IMessageHandler' interface."); + + var oneTypePerMessageHandler = typesWithMessageHandlerInterfaces + .SelectMany(t => t.MessageTypes, (t, messageType) => + new + { + t.Type, + MessageType = messageType + }) + .ToArray(); + + var messageHandlers = oneTypePerMessageHandler + .GroupBy(h => h.MessageType) + .ToDictionary(group => group.Key, group => group.Select(t => t.Type) + .ToArray()); + + return messageHandlers; + } + + private static Assembly[] GetLocalAssemblies() + { + var dependencies = DependencyContext.Default.RuntimeLibraries; + + var assemblies = new List(); + foreach (var library in dependencies) + { + if (IsCandidateLibrary(library)) + { + var assembly = Assembly.Load(new AssemblyName(library.Name)); + assemblies.Add(assembly); + } + } + + return assemblies.ToArray(); + } + + private static bool IsCandidateLibrary(RuntimeLibrary library) + { + return !library.Name.StartsWith("Microsoft.", StringComparison.OrdinalIgnoreCase) && + !library.Name.StartsWith("System.", StringComparison.OrdinalIgnoreCase) && + !library.Name.StartsWith("NetStandard.", StringComparison.OrdinalIgnoreCase) && + !string.Equals(library.Type, "package", StringComparison.OrdinalIgnoreCase) && + !string.Equals(library.Type, "referenceassembly", StringComparison.OrdinalIgnoreCase); + } + + #endregion + } +} diff --git a/Source/Picton.Messaging/Picton.Messaging.csproj b/Source/Picton.Messaging/Picton.Messaging.csproj index fd60be9..b3b89ee 100644 --- a/Source/Picton.Messaging/Picton.Messaging.csproj +++ b/Source/Picton.Messaging/Picton.Messaging.csproj @@ -1,7 +1,8 @@ - net461;net472;netstandard2.0 + net48;netstandard2.1;net7.0 + preview anycpu true Library @@ -13,6 +14,8 @@ true true snupkg + true + README.md @@ -26,24 +29,22 @@ MIT https://github.com/Jericho/Picton.Messaging https://github.com/identicons/jericho.png + PackageIcon.png false Picton Picton.Messaging Azure messaging - - - all - runtime; build; native; contentfiles; analyzers - - - - - + + + + + + - + @@ -52,17 +53,10 @@ - - $(DefineConstants);NETFULL - - - - $(DefineConstants);NETSTANDARD - - - - true - + + + + diff --git a/Source/Picton.Messaging/Utilities/RoundRobinList.cs b/Source/Picton.Messaging/Utilities/RoundRobinList.cs new file mode 100644 index 0000000..702f47d --- /dev/null +++ b/Source/Picton.Messaging/Utilities/RoundRobinList.cs @@ -0,0 +1,160 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; + +namespace Picton.Messaging.Utilities +{ + internal class RoundRobinList + { + private static readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim(); + private readonly LinkedList _linkedList; + private LinkedListNode _current; + + /// + /// Initializes a new instance of the class. + /// + /// The items. + public RoundRobinList(IEnumerable list) + { + _linkedList = new LinkedList(list); + } + + public T Current + { + get + { + return _current == default ? default : _current.Value; + } + } + + public int Count => _linkedList.Count; + + /// + /// Reset the Round Robin to point to the first item. + /// + public void Reset() + { + try + { + _lock.EnterWriteLock(); + _current = _linkedList.First; + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + + /// + /// Reset the Round Robin to point to the specified item. + /// + public void ResetTo(T item) + { + try + { + _lock.EnterWriteLock(); + _current = _linkedList.Find(item); + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + + /// + /// Reset the Round Robin to point to the item at the specified index. + /// + public void ResetTo(int index) + { + try + { + _lock.EnterWriteLock(); + _current = _linkedList.Find(_linkedList.ElementAt(index)); + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + + /// + /// Move to the next item in the list. + /// + /// The item. + public T MoveToNextItem() + { + try + { + _lock.EnterUpgradeableReadLock(); + + if (_linkedList.Count == 0) throw new InvalidOperationException("List is empty."); + + try + { + _lock.EnterWriteLock(); + + if (_linkedList.Count == 0) throw new InvalidOperationException("List is empty."); + _current = _current == null ? _linkedList.First : _current.Next ?? _current.List.First; + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + finally + { + if (_lock.IsUpgradeableReadLockHeld) _lock.ExitUpgradeableReadLock(); + } + + return _current.Value; + } + + /// + /// Remove an item from the list. + /// + /// The item. + public bool Remove(T item) + { + try + { + _lock.EnterWriteLock(); + + return _linkedList.Remove(item); + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + + public void Add(T item) + { + try + { + _lock.EnterUpgradeableReadLock(); + + if (!_linkedList.Contains(item)) + { + try + { + _lock.EnterWriteLock(); + + if (!_linkedList.Contains(item)) + { + _linkedList.AddLast(item); + } + } + finally + { + if (_lock.IsWriteLockHeld) _lock.ExitWriteLock(); + } + } + } + finally + { + if (_lock.IsUpgradeableReadLockHeld) _lock.ExitUpgradeableReadLock(); + } + } + } +} diff --git a/appveyor.psm1 b/appveyor.psm1 new file mode 100644 index 0000000..40eb9f0 --- /dev/null +++ b/appveyor.psm1 @@ -0,0 +1,67 @@ +# Inspired by: https://github.com/PowerShell/PSScriptAnalyzer/blob/master/tools/appveyor.psm1 + +$ErrorActionPreference = 'Stop' + +# Implements the AppVeyor 'install' step and installs the desired .NET SDK if not already installed. +function Invoke-AppVeyorInstall { + + Write-Verbose -Verbose "Determining the desired version of .NET SDK" + $globalDotJson = Get-Content (Join-Path $PSScriptRoot 'global.json') -Raw | ConvertFrom-Json + $desiredDotNetCoreSDKVersion = $globalDotJson.sdk.version + Write-Verbose -Verbose "We have determined that the desired version of the .NET SDK is $desiredDotNetCoreSDKVersion" + + Write-Verbose -Verbose "Checking availability of .NET SDK $desiredDotNetCoreSDKVersion" + $desiredDotNetCoreSDKVersionPresent = (dotnet --list-sdks) -match $desiredDotNetCoreSDKVersion + + if (-not $desiredDotNetCoreSDKVersionPresent) { + Write-Verbose -Verbose "We have determined that the desired version of the .NET SDK is not present on this machine" + Write-Verbose -Verbose "Installing .NET SDK $desiredDotNetCoreSDKVersion" + $originalSecurityProtocol = [Net.ServicePointManager]::SecurityProtocol + try { + [Net.ServicePointManager]::SecurityProtocol = [Net.ServicePointManager]::SecurityProtocol -bor [Net.SecurityProtocolType]::Tls12 + if ($IsLinux -or $isMacOS) { + Invoke-WebRequest 'https://dot.net/v1/dotnet-install.sh' -OutFile dotnet-install.sh + + # Normally we would execute dotnet-install.sh like so: + # bash dotnet-install.sh --version $desiredDotNetCoreSDKVersion + # + # and we would also update the PATH environment variable like so: + # $OLDPATH = [System.Environment]::GetEnvironmentVariable("PATH") + # $NEWPATH = "/home/appveyor/.dotnet$([System.IO.Path]::PathSeparator)$OLDPATH" + # [Environment]::SetEnvironmentVariable("PATH", "$NEWPATH") + # + # This is supposed to result in the desired .NET SDK to be installed side-by-side + # with the other version(s) of the SDK already installed. However, my experience + # on Ubuntu images in Appveyor has been that the recently installed SDK is the only + # one detected and the previous versions are no longer detected as being installed. + # + # This whole thing is problematic because GitVersion.Tool 5.7 is not compatible with + # .NET 6 (in fact, it doesn't even install) and you must have .NET 5 installed side-by-side + # with .NET 6 in order to install and use GitVersion.Tool + # + # I spent a whole day trying to find a solution but ultimately the only reliable solution + # I was able to come up with is to install in the default location (which is /usr/share/dotnet) + # using 'sudo' because you need admin privileges to access the default install location. + # + # November 2022: I tried removing this workaround since GetVersion.Tool was updated more + # than 2 years ago but it led to another problem: https://ci.appveyor.com/project/Jericho/zoomnet/builds/48579496/job/pymt60j9b53ayxta#L78 + # + # Therefore this workaround seems like a permanent solution. + + sudo bash dotnet-install.sh --version $desiredDotNetCoreSDKVersion --install-dir /usr/share/dotnet + } + else { + Invoke-WebRequest 'https://dot.net/v1/dotnet-install.ps1' -OutFile dotnet-install.ps1 + .\dotnet-install.ps1 -Version $desiredDotNetCoreSDKVersion + } + } + finally { + [Net.ServicePointManager]::SecurityProtocol = $originalSecurityProtocol + Remove-Item .\dotnet-install.* + } + Write-Verbose -Verbose "Installed .NET SDK $desiredDotNetCoreSDKVersion" + } + else { + Write-Verbose -Verbose "We have determined that the desired version of the .NET SDK is already installed on this machine" + } +} diff --git a/appveyor.yml b/appveyor.yml index 3950b47..93c0eef 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,27 +1,48 @@ -# Before Build +# environment variables +environment: + DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true # For faster CI builds + DOTNET_ROLL_FORWARD: Major # See https://github.com/GitTools/GitVersion/issues/2906 until 5.8.0 is released + +# scripts that are called at very beginning, before repo cloning init: - git config --global core.autocrlf true -# Build script +# scripts that run after cloning repository +install: + - ps: Import-Module .\appveyor.psm1; Invoke-AppveyorInstall + +# scripts to run before build +before_build: + - dotnet --info + +# to run your custom scripts instead of automatic MSBuild build_script: - - ps: .\build.ps1 -bootstrap - - ps: .\build.ps1 -Target AppVeyor + - ps: .\build.ps1 build.cake --target=AppVeyor + +# scripts to run after build +after_build: # Tests test: off # Branches to build branches: - # Whitelist only: - develop - master + - main - /release/.*/ - /hotfix/.*/ # Build cache cache: - - tools -> build.cake, tools\packages.config + - .cake -> build.cake # Environment configuration -image: Visual Studio 2019 +image: + - Ubuntu1804 + - Visual Studio 2022 + +# Skip builds for doc changes +skip_commits: + message: /\(doc\).*/ diff --git a/build.cake b/build.cake index 5df61a3..12e8934 100644 --- a/build.cake +++ b/build.cake @@ -1,19 +1,15 @@ -// Install modules -#module nuget:?package=Cake.DotNetTool.Module&version=0.4.0 - -// Install .NET tools -#tool dotnet:?package=BenchmarkDotNet.Tool&version=0.12.0 - // Install tools. -#tool nuget:?package=GitVersion.CommandLine&version=5.2.4 -#tool nuget:?package=GitReleaseManager&version=0.11.0 -#tool nuget:?package=OpenCover&version=4.7.922 -#tool nuget:?package=ReportGenerator&version=4.5.2 -#tool nuget:?package=coveralls.io&version=1.4.2 -#tool nuget:?package=xunit.runner.console&version=2.4.1 +#tool dotnet:?package=GitVersion.Tool&version=5.12.0 +#tool dotnet:?package=coveralls.net&version=4.0.1 +#tool nuget:https://f.feedz.io/jericho/jericho/nuget/?package=GitReleaseManager&version=0.17.0-collaborators0003 +#tool nuget:?package=ReportGenerator&version=5.2.0 +#tool nuget:?package=xunit.runner.console&version=2.6.4 +#tool nuget:?package=CodecovUploader&version=0.7.1 // Install addins. -#addin nuget:?package=Cake.Coveralls&version=0.10.1 +#addin nuget:?package=Cake.Coveralls&version=1.1.0 +#addin nuget:?package=Cake.Git&version=3.0.0 +#addin nuget:?package=Cake.Codecov&version=1.0.1 /////////////////////////////////////////////////////////////////////////////// @@ -23,6 +19,8 @@ var target = Argument("target", "Default"); var configuration = Argument("configuration", "Release"); +if (IsRunningOnUnix()) target = "Run-Unit-Tests"; + /////////////////////////////////////////////////////////////////////////////// // GLOBAL VARIABLES @@ -31,41 +29,80 @@ var configuration = Argument("configuration", "Release"); var libraryName = "Picton.Messaging"; var gitHubRepo = "Picton.Messaging"; -var testCoverageFilter = "+[Picton.Messaging]* -[Picton.Messaging]Picton.Messaging.Properties.* -[Picton.Messaging]Picton.Messaging.Models.* -[Picton.Messaging]Picton.Messaging.Logging.*"; -var testCoverageExcludeByAttribute = "*.ExcludeFromCodeCoverage*"; -var testCoverageExcludeByFile = "*/*Designer.cs;*/*AssemblyInfo.cs"; +var testCoverageFilters = new[] +{ + "+[Picton.Messaging]*", + "-[Picton.Messaging]Picton.Messaging.Properties.*", + "-[Picton.Messaging]Picton.Messaging.Models.*", + "-[Picton.Messaging]*System.Text.Json.SourceGeneration*" +}; +var testCoverageExcludeAttributes = new[] +{ + "Obsolete", + "GeneratedCodeAttribute", + "CompilerGeneratedAttribute", + "ExcludeFromCodeCoverageAttribute" +}; +var testCoverageExcludeFiles = new[] + { + "**/AssemblyInfo.cs" +}; var nuGetApiUrl = Argument("NUGET_API_URL", EnvironmentVariable("NUGET_API_URL")); var nuGetApiKey = Argument("NUGET_API_KEY", EnvironmentVariable("NUGET_API_KEY")); -var myGetApiUrl = Argument("MYGET_API_URL", EnvironmentVariable("MYGET_API_URL")); -var myGetApiKey = Argument("MYGET_API_KEY", EnvironmentVariable("MYGET_API_KEY")); - var gitHubToken = Argument("GITHUB_TOKEN", EnvironmentVariable("GITHUB_TOKEN")); var gitHubUserName = Argument("GITHUB_USERNAME", EnvironmentVariable("GITHUB_USERNAME")); var gitHubPassword = Argument("GITHUB_PASSWORD", EnvironmentVariable("GITHUB_PASSWORD")); var gitHubRepoOwner = Argument("GITHUB_REPOOWNER", EnvironmentVariable("GITHUB_REPOOWNER") ?? gitHubUserName); +var coverallsToken = Argument("COVERALLS_REPO_TOKEN", EnvironmentVariable("COVERALLS_REPO_TOKEN")); +var codecovToken = Argument("CODECOV_TOKEN", EnvironmentVariable("CODECOV_TOKEN")); + var sourceFolder = "./Source/"; var outputDir = "./artifacts/"; var codeCoverageDir = $"{outputDir}CodeCoverage/"; var benchmarkDir = $"{outputDir}Benchmark/"; +var coverageFile = $"{codeCoverageDir}coverage.{DefaultFramework}.xml"; +var solutionFile = $"{sourceFolder}{libraryName}.sln"; +var sourceProject = $"{sourceFolder}{libraryName}/{libraryName}.csproj"; +var integrationTestsProject = $"{sourceFolder}{libraryName}.IntegrationTests/{libraryName}.IntegrationTests.csproj"; var unitTestsProject = $"{sourceFolder}{libraryName}.UnitTests/{libraryName}.UnitTests.csproj"; var benchmarkProject = $"{sourceFolder}{libraryName}.Benchmark/{libraryName}.Benchmark.csproj"; -var versionInfo = GitVersion(new GitVersionSettings() { OutputType = GitVersionOutput.Json }); -var milestone = versionInfo.MajorMinorPatch; +var buildBranch = Context.GetBuildBranch(); +var repoName = Context.GetRepoName(); + +var versionInfo = (GitVersion)null; // Will be calculated in SETUP +var milestone = string.Empty; // Will be calculated in SETUP + var cakeVersion = typeof(ICakeContext).Assembly.GetName().Version.ToString(); var isLocalBuild = BuildSystem.IsLocalBuild; -var isMainBranch = StringComparer.OrdinalIgnoreCase.Equals("master", BuildSystem.AppVeyor.Environment.Repository.Branch); -var isMainRepo = StringComparer.OrdinalIgnoreCase.Equals($"{gitHubRepoOwner}/{gitHubRepo}", BuildSystem.AppVeyor.Environment.Repository.Name); +var isMainBranch = StringComparer.OrdinalIgnoreCase.Equals("main", buildBranch); +var isMainRepo = StringComparer.OrdinalIgnoreCase.Equals($"{gitHubRepoOwner}/{gitHubRepo}", repoName); var isPullRequest = BuildSystem.AppVeyor.Environment.PullRequest.IsPullRequest; -var isTagged = ( - BuildSystem.AppVeyor.Environment.Repository.Tag.IsTag && - !string.IsNullOrWhiteSpace(BuildSystem.AppVeyor.Environment.Repository.Tag.Name) -); -var isBenchmarkPresent = FileExists(benchmarkProject); +var isTagged = BuildSystem.AppVeyor.Environment.Repository.Tag.IsTag && !string.IsNullOrWhiteSpace(BuildSystem.AppVeyor.Environment.Repository.Tag.Name); +var isIntegrationTestsProjectPresent = FileExists(integrationTestsProject); +var isUnitTestsProjectPresent = FileExists(unitTestsProject); +var isBenchmarkProjectPresent = FileExists(benchmarkProject); + +var publishingError = false; + +// Generally speaking, we want to honor all the TFM configured in the source project and the unit test project. +// However, there are a few scenarios where a single framework is sufficient. Here are a few examples that come to mind: +// - when building source project on Ubuntu +// - when running unit tests on Ubuntu +// - when calculating code coverage +// FYI, this will cause an error if the source project and/or the unit test project are not configured to target this desired framework: +const string DefaultFramework = "net7.0"; +var desiredFramework = ( + !IsRunningOnWindows() || + target.Equals("Coverage", StringComparison.OrdinalIgnoreCase) || + target.Equals("Run-Code-Coverage", StringComparison.OrdinalIgnoreCase) || + target.Equals("Generate-Code-Coverage-Report", StringComparison.OrdinalIgnoreCase) || + target.Equals("Upload-Coverage-Result", StringComparison.OrdinalIgnoreCase) + ) ? DefaultFramework : null; /////////////////////////////////////////////////////////////////////////////// @@ -74,12 +111,16 @@ var isBenchmarkPresent = FileExists(benchmarkProject); Setup(context => { - if (isMainBranch && (context.Log.Verbosity != Verbosity.Diagnostic)) + if (!isLocalBuild && context.Log.Verbosity != Verbosity.Diagnostic) { Information("Increasing verbosity to diagnostic."); context.Log.Verbosity = Verbosity.Diagnostic; } + Information("Calculating version info..."); + versionInfo = GitVersion(new GitVersionSettings() { OutputType = GitVersionOutput.Json }); + milestone = versionInfo.MajorMinorPatch; + Information("Building version {0} of {1} ({2}, {3}) using version {4} of Cake", versionInfo.LegacySemVerPadded, libraryName, @@ -96,11 +137,6 @@ Setup(context => isTagged ); - Information("Myget Info:\r\n\tApi Url: {0}\r\n\tApi Key: {1}", - myGetApiUrl, - string.IsNullOrEmpty(myGetApiKey) ? "[NULL]" : new string('*', myGetApiKey.Length) - ); - Information("Nuget Info:\r\n\tApi Url: {0}\r\n\tApi Key: {1}", nuGetApiUrl, string.IsNullOrEmpty(nuGetApiKey) ? "[NULL]" : new string('*', nuGetApiKey.Length) @@ -122,10 +158,37 @@ Setup(context => string.IsNullOrEmpty(gitHubPassword) ? "[NULL]" : new string('*', gitHubPassword.Length) ); } + + // Integration tests are intended to be used for debugging purposes and not intended to be executed in CI environment. + // Also, the runner for these tests contains windows-specific code (such as resizing window, moving window to center of screen, etc.) + // which can cause problems when attempting to run unit tests on an Ubuntu image on AppVeyor. + if (!isLocalBuild && isIntegrationTestsProjectPresent) + { + Information(""); + Information("Removing integration tests"); + DotNetTool(solutionFile, "sln", $"remove {integrationTestsProject.TrimStart(sourceFolder, StringComparison.OrdinalIgnoreCase)}"); + } + + // Similarly, benchmarking can causes problems similar to this one: + // error NETSDK1005: Assets file '/home/appveyor/projects/stronggrid/Source/StrongGrid.Benchmark/obj/project.assets.json' doesn't have a target for 'net5.0'. + // Ensure that restore has run and that you have included 'net5.0' in the TargetFrameworks for your project. + if (!isLocalBuild && isBenchmarkProjectPresent) + { + Information(""); + Information("Removing benchmark project"); + DotNetTool(solutionFile, "sln", $"remove {benchmarkProject.TrimStart(sourceFolder, StringComparison.OrdinalIgnoreCase)}"); + } }); Teardown(context => { + if (!isLocalBuild) + { + Information("Restoring projects that may have been removed during build script setup"); + GitCheckout(".", new FilePath[] { solutionFile }); + Information(""); + } + // Executed AFTER the last task. Information("Finished running tasks."); }); @@ -168,7 +231,7 @@ Task("Restore-NuGet-Packages") .IsDependentOn("Clean") .Does(() => { - DotNetCoreRestore("./Source/", new DotNetCoreRestoreSettings + DotNetRestore("./Source/", new DotNetRestoreSettings { Sources = new [] { "https://api.nuget.org/v3/index.json", @@ -180,55 +243,105 @@ Task("Build") .IsDependentOn("Restore-NuGet-Packages") .Does(() => { - DotNetCoreBuild($"{sourceFolder}{libraryName}.sln", new DotNetCoreBuildSettings + DotNetBuild(solutionFile, new DotNetBuildSettings { Configuration = configuration, + Framework = desiredFramework, NoRestore = true, - ArgumentCustomization = args => args.Append("/p:SemVer=" + versionInfo.LegacySemVerPadded) + MSBuildSettings = new DotNetMSBuildSettings + { + Version = versionInfo.LegacySemVerPadded, + AssemblyVersion = versionInfo.MajorMinorPatch, + FileVersion = versionInfo.MajorMinorPatch, + InformationalVersion = versionInfo.InformationalVersion, + ContinuousIntegrationBuild = true + } }); }); Task("Run-Unit-Tests") + .WithCriteria(() => isUnitTestsProjectPresent) .IsDependentOn("Build") .Does(() => { - DotNetCoreTest(unitTestsProject, new DotNetCoreTestSettings + DotNetTest(unitTestsProject, new DotNetTestSettings { NoBuild = true, NoRestore = true, - Configuration = configuration + Configuration = configuration, + Framework = desiredFramework }); }); Task("Run-Code-Coverage") + .WithCriteria(() => isUnitTestsProjectPresent) .IsDependentOn("Build") .Does(() => { - Action testAction = ctx => ctx.DotNetCoreTest(unitTestsProject, new DotNetCoreTestSettings + var testSettings = new DotNetTestSettings { NoBuild = true, NoRestore = true, - Configuration = configuration - }); + Configuration = configuration, + Framework = DefaultFramework, + + // The following assumes that coverlet.msbuild has been added to the unit testing project + ArgumentCustomization = args => args + .Append("/p:CollectCoverage=true") + .Append("/p:CoverletOutputFormat=opencover") + .Append($"/p:CoverletOutput={MakeAbsolute(Directory(codeCoverageDir))}/coverage.xml") // The name of the framework will be inserted between "coverage" and "xml". This is important to know when uploading the XML file to coveralls/codecov and when generating the HTML report + .Append($"/p:ExcludeByAttribute={string.Join("%2c", testCoverageExcludeAttributes)}") + .Append($"/p:ExcludeByFile={string.Join("%2c", testCoverageExcludeFiles)}") + .Append($"/p:Exclude={string.Join("%2c", testCoverageFilters.Where(filter => filter.StartsWith("-")).Select(filter => filter.TrimStart("-", StringComparison.OrdinalIgnoreCase)))}") + .Append($"/p:Include={string.Join("%2c", testCoverageFilters.Where(filter => filter.StartsWith("+")).Select(filter => filter.TrimStart("+", StringComparison.OrdinalIgnoreCase)))}") + .Append("/p:SkipAutoProps=true") + }; + + DotNetTest(unitTestsProject, testSettings); +}); - OpenCover(testAction, - $"{codeCoverageDir}coverage.xml", - new OpenCoverSettings - { - OldStyle = true, - MergeOutput = true, - ArgumentCustomization = args => args.Append("-returntargetcode") - } - .WithFilter(testCoverageFilter) - .ExcludeByAttribute(testCoverageExcludeByAttribute) - .ExcludeByFile(testCoverageExcludeByFile) - ); +Task("Upload-Coverage-Result-Coveralls") + .IsDependentOn("Run-Code-Coverage") + .WithCriteria(() => FileExists(coverageFile)) + .WithCriteria(() => !isLocalBuild) + .WithCriteria(() => !isPullRequest) + .WithCriteria(() => isMainRepo) + .Does(() => +{ + if(string.IsNullOrEmpty(coverallsToken)) throw new InvalidOperationException("Could not resolve Coveralls token."); + + CoverallsNet(new FilePath(coverageFile), CoverallsNetReportType.OpenCover, new CoverallsNetSettings() + { + RepoToken = coverallsToken, + UseRelativePaths = true + }); +}).OnError (exception => +{ + Information(exception.Message); + Information($"Failed to upload coverage result to Coveralls, but continuing with next Task..."); + publishingError = true; }); -Task("Upload-Coverage-Result") +Task("Upload-Coverage-Result-Codecov") + .IsDependentOn("Run-Code-Coverage") + .WithCriteria(() => FileExists(coverageFile)) + .WithCriteria(() => !isLocalBuild) + .WithCriteria(() => !isPullRequest) + .WithCriteria(() => isMainRepo) .Does(() => { - CoverallsIo($"{codeCoverageDir}coverage.xml"); + if(string.IsNullOrEmpty(codecovToken)) throw new InvalidOperationException("Could not resolve CodeCov token."); + + Codecov(new CodecovSettings + { + Files = new[] { coverageFile }, + Token = codecovToken + }); +}).OnError (exception => +{ + Information(exception.Message); + Information($"Failed to upload coverage result to Codecov, but continuing with next Task..."); + publishingError = true; }); Task("Generate-Code-Coverage-Report") @@ -236,7 +349,7 @@ Task("Generate-Code-Coverage-Report") .Does(() => { ReportGenerator( - $"{codeCoverageDir}coverage.xml", + new FilePath(coverageFile), codeCoverageDir, new ReportGeneratorSettings() { ClassFilters = new[] { "*.UnitTests*" } @@ -248,7 +361,9 @@ Task("Create-NuGet-Package") .IsDependentOn("Build") .Does(() => { - var settings = new DotNetCorePackSettings + var releaseNotesUrl = @$"https://github.com/{gitHubRepoOwner}/{gitHubRepo}/releases/tag/{milestone}"; + + var settings = new DotNetPackSettings { Configuration = configuration, IncludeSource = false, @@ -257,29 +372,26 @@ Task("Create-NuGet-Package") NoRestore = true, NoDependencies = true, OutputDirectory = outputDir, - ArgumentCustomization = (args) => + SymbolPackageFormat = "snupkg", + MSBuildSettings = new DotNetMSBuildSettings { - return args - .Append("/p:SymbolPackageFormat=snupkg") - .Append("/p:Version={0}", versionInfo.LegacySemVerPadded) - .Append("/p:AssemblyVersion={0}", versionInfo.MajorMinorPatch) - .Append("/p:FileVersion={0}", versionInfo.MajorMinorPatch) - .Append("/p:AssemblyInformationalVersion={0}", versionInfo.InformationalVersion); + PackageReleaseNotes = releaseNotesUrl, + PackageVersion = versionInfo.LegacySemVerPadded } }; - DotNetCorePack($"{sourceFolder}{libraryName}/{libraryName}.csproj", settings); + DotNetPack(sourceProject, settings); }); Task("Upload-AppVeyor-Artifacts") .WithCriteria(() => AppVeyor.IsRunningOnAppVeyor) .Does(() => { - foreach (var file in GetFiles($"{outputDir}*.*")) - { - AppVeyor.UploadArtifact(file.FullPath); - } - foreach (var file in GetFiles($"{benchmarkDir}results/*.*")) + var allFiles = GetFiles($"{outputDir}*.*") + + GetFiles($"{benchmarkDir}results/*.*") + + GetFiles($"{codeCoverageDir}*.*"); + + foreach (var file in allFiles) { AppVeyor.UploadArtifact(file.FullPath); } @@ -297,58 +409,34 @@ Task("Publish-NuGet") if(string.IsNullOrEmpty(nuGetApiKey)) throw new InvalidOperationException("Could not resolve NuGet API key."); if(string.IsNullOrEmpty(nuGetApiUrl)) throw new InvalidOperationException("Could not resolve NuGet API url."); - foreach(var package in GetFiles(outputDir + "*.nupkg")) + var settings = new DotNetNuGetPushSettings { - // Push the package. - NuGetPush(package, new NuGetPushSettings { - ApiKey = nuGetApiKey, - Source = nuGetApiUrl - }); - } -}); - -Task("Publish-MyGet") - .IsDependentOn("Create-NuGet-Package") - .WithCriteria(() => !isLocalBuild) - .WithCriteria(() => !isPullRequest) - .WithCriteria(() => isMainRepo) - .Does(() => -{ - if(string.IsNullOrEmpty(nuGetApiKey)) throw new InvalidOperationException("Could not resolve MyGet API key."); - if(string.IsNullOrEmpty(nuGetApiUrl)) throw new InvalidOperationException("Could not resolve MyGet API url."); + Source = nuGetApiUrl, + ApiKey = nuGetApiKey + }; foreach(var package in GetFiles(outputDir + "*.nupkg")) { - // Push the package. - NuGetPush(package, new NuGetPushSettings { - ApiKey = myGetApiKey, - Source = myGetApiUrl - }); + DotNetNuGetPush(package, settings); } }); Task("Create-Release-Notes") .Does(() => { - var settings = new GitReleaseManagerCreateSettings + if (string.IsNullOrEmpty(gitHubToken)) { - Name = milestone, - Milestone = milestone, - Prerelease = false, - TargetCommitish = "master" - }; - - if (!string.IsNullOrEmpty(gitHubToken)) - { - GitReleaseManagerCreate(gitHubToken, gitHubRepoOwner, gitHubRepo, settings); + throw new InvalidOperationException("GitHub token was not provided."); } - else + + GitReleaseManagerCreate(gitHubToken, gitHubRepoOwner, gitHubRepo, new GitReleaseManagerCreateSettings { - if(string.IsNullOrEmpty(gitHubUserName)) throw new InvalidOperationException("Could not resolve GitHub user name."); - if(string.IsNullOrEmpty(gitHubPassword)) throw new InvalidOperationException("Could not resolve GitHub password."); - - GitReleaseManagerCreate(gitHubUserName, gitHubPassword, gitHubRepoOwner, gitHubRepo, settings); - } + Name = milestone, + Milestone = milestone, + Prerelease = false, + TargetCommitish = "main", + Verbose = true + }); }); Task("Publish-GitHub-Release") @@ -359,55 +447,46 @@ Task("Publish-GitHub-Release") .WithCriteria(() => isTagged) .Does(() => { - var settings = new GitReleaseManagerCreateSettings + if (string.IsNullOrEmpty(gitHubToken)) { - Name = milestone, - Milestone = milestone, - Prerelease = false, - TargetCommitish = "master" - }; - - if (!string.IsNullOrEmpty(gitHubToken)) - { - GitReleaseManagerClose(gitHubToken, gitHubRepoOwner, gitHubRepo, milestone); + throw new InvalidOperationException("GitHub token was not provided."); } - else + + GitReleaseManagerClose(gitHubToken, gitHubRepoOwner, gitHubRepo, milestone, new GitReleaseManagerCloseMilestoneSettings { - if(string.IsNullOrEmpty(gitHubUserName)) throw new InvalidOperationException("Could not resolve GitHub user name."); - if(string.IsNullOrEmpty(gitHubPassword)) throw new InvalidOperationException("Could not resolve GitHub password."); - - GitReleaseManagerClose(gitHubUserName, gitHubPassword, gitHubRepoOwner, gitHubRepo, milestone); - } + Verbose = true + }); }); Task("Generate-Benchmark-Report") .IsDependentOn("Build") - .WithCriteria(isBenchmarkPresent) + .WithCriteria(isBenchmarkProjectPresent) .Does(() => { var publishDirectory = $"{benchmarkDir}Publish/"; + var publishedAppLocation = MakeAbsolute(File($"{publishDirectory}{libraryName}.Benchmark.exe")).FullPath; + var artifactsLocation = MakeAbsolute(File(benchmarkDir)).FullPath; - DotNetCorePublish(benchmarkProject, new DotNetCorePublishSettings - { - Configuration = configuration, + DotNetPublish(benchmarkProject, new DotNetPublishSettings + { + Configuration = configuration, NoRestore = true, NoBuild = true, - OutputDirectory = publishDirectory - }); + OutputDirectory = publishDirectory + }); - var assemblyLocation = MakeAbsolute(File($"{publishDirectory}{libraryName}.Benchmark.dll")).FullPath; - var artifactsLocation = MakeAbsolute(File(benchmarkDir)).FullPath; - var benchmarkToolLocation = Context.Tools.Resolve("dotnet-benchmark.exe"); - - var processResult = StartProcess( - benchmarkToolLocation, - new ProcessSettings() - { - Arguments = $"{assemblyLocation} -f * --artifacts={artifactsLocation}" - }); - if (processResult != 0) + using (DiagnosticVerbosity()) { - throw new Exception($"dotnet-benchmark.exe did not complete successfully. Result code: {processResult}"); + var processResult = StartProcess( + publishedAppLocation, + new ProcessSettings() + { + Arguments = $"-f * --artifacts={artifactsLocation}" + }); + if (processResult != 0) + { + throw new Exception($"dotnet-benchmark.exe did not complete successfully. Result code: {processResult}"); + } } }); @@ -425,25 +504,34 @@ Task("Coverage") Task("Benchmark") .IsDependentOn("Generate-Benchmark-Report") - .WithCriteria(isBenchmarkPresent) + .WithCriteria(isBenchmarkProjectPresent) .Does(() => { - var htmlReport = GetFiles($"{benchmarkDir}results/*-report.html", new GlobberSettings { IsCaseSensitive = false }).FirstOrDefault(); - StartProcess("cmd", $"/c start {htmlReport}"); + var htmlReports = GetFiles($"{benchmarkDir}results/*-report.html", new GlobberSettings { IsCaseSensitive = false }); + foreach (var htmlReport in htmlReports) + { + StartProcess("cmd", $"/c start {htmlReport}"); + } }); Task("ReleaseNotes") - .IsDependentOn("Create-Release-Notes"); + .IsDependentOn("Create-Release-Notes"); Task("AppVeyor") .IsDependentOn("Run-Code-Coverage") - .IsDependentOn("Upload-Coverage-Result") - .IsDependentOn("Generate-Benchmark-Report") + .IsDependentOn("Upload-Coverage-Result-Coveralls") + .IsDependentOn("Upload-Coverage-Result-Codecov") .IsDependentOn("Create-NuGet-Package") .IsDependentOn("Upload-AppVeyor-Artifacts") - .IsDependentOn("Publish-MyGet") .IsDependentOn("Publish-NuGet") - .IsDependentOn("Publish-GitHub-Release"); + .IsDependentOn("Publish-GitHub-Release") + .Finally(() => +{ + if (publishingError) + { + Warning("At least one exception occurred when executing non-essential tasks. These exceptions were ignored in order to allow the build script to complete."); + } +}); Task("Default") .IsDependentOn("Run-Unit-Tests") @@ -455,3 +543,71 @@ Task("Default") /////////////////////////////////////////////////////////////////////////////// RunTarget(target); + + + +/////////////////////////////////////////////////////////////////////////////// +// PRIVATE METHODS +/////////////////////////////////////////////////////////////////////////////// +private static string TrimStart(this string source, string value, StringComparison comparisonType) +{ + if (source == null) + { + throw new ArgumentNullException(nameof(source)); + } + + int valueLength = value.Length; + int startIndex = 0; + while (source.IndexOf(value, startIndex, comparisonType) == startIndex) + { + startIndex += valueLength; + } + + return source.Substring(startIndex); +} + +private static List ExecuteCommand(this ICakeContext context, FilePath exe, string args) +{ + context.StartProcess(exe, new ProcessSettings { Arguments = args, RedirectStandardOutput = true }, out var redirectedOutput); + + return redirectedOutput.ToList(); +} + +private static List ExecGitCmd(this ICakeContext context, string cmd) +{ + var gitExe = context.Tools.Resolve(context.IsRunningOnWindows() ? "git.exe" : "git"); + return context.ExecuteCommand(gitExe, cmd); +} + +private static string GetBuildBranch(this ICakeContext context) +{ + var buildSystem = context.BuildSystem(); + string repositoryBranch = null; + + if (buildSystem.IsRunningOnAppVeyor) repositoryBranch = buildSystem.AppVeyor.Environment.Repository.Branch; + else if (buildSystem.IsRunningOnAzurePipelines) repositoryBranch = buildSystem.AzurePipelines.Environment.Repository.SourceBranchName; + else if (buildSystem.IsRunningOnBamboo) repositoryBranch = buildSystem.Bamboo.Environment.Repository.Branch; + else if (buildSystem.IsRunningOnBitbucketPipelines) repositoryBranch = buildSystem.BitbucketPipelines.Environment.Repository.Branch; + else if (buildSystem.IsRunningOnBitrise) repositoryBranch = buildSystem.Bitrise.Environment.Repository.GitBranch; + else if (buildSystem.IsRunningOnGitHubActions) repositoryBranch = buildSystem.GitHubActions.Environment.Workflow.Ref.Replace("refs/heads/", ""); + else if (buildSystem.IsRunningOnGitLabCI) repositoryBranch = buildSystem.GitLabCI.Environment.Build.RefName; + else if (buildSystem.IsRunningOnTeamCity) repositoryBranch = buildSystem.TeamCity.Environment.Build.BranchName; + else if (buildSystem.IsRunningOnTravisCI) repositoryBranch = buildSystem.TravisCI.Environment.Build.Branch; + else repositoryBranch = ExecGitCmd(context, "rev-parse --abbrev-ref HEAD").Single(); + + return repositoryBranch; +} + +public static string GetRepoName(this ICakeContext context) +{ + var buildSystem = context.BuildSystem(); + + if (buildSystem.IsRunningOnAppVeyor) return buildSystem.AppVeyor.Environment.Repository.Name; + else if (buildSystem.IsRunningOnAzurePipelines) return buildSystem.AzurePipelines.Environment.Repository.RepoName; + else if (buildSystem.IsRunningOnTravisCI) return buildSystem.TravisCI.Environment.Repository.Slug; + else if (buildSystem.IsRunningOnGitHubActions) return buildSystem.GitHubActions.Environment.Workflow.Repository; + + var originUrl = ExecGitCmd(context, "config --get remote.origin.url").Single(); + var parts = originUrl.Split('/', StringSplitOptions.RemoveEmptyEntries); + return $"{parts[parts.Length - 2]}/{parts[parts.Length - 1].Replace(".git", "")}"; +} diff --git a/build.ps1 b/build.ps1 index a336e29..21821d2 100644 --- a/build.ps1 +++ b/build.ps1 @@ -1,256 +1,13 @@ -########################################################################## -# This is the Cake bootstrapper script for PowerShell. -# This file was downloaded from https://github.com/cake-build/resources -# Feel free to change this file to fit your needs. -########################################################################## +$ErrorActionPreference = 'Stop' -<# +Set-Location -LiteralPath $PSScriptRoot -.SYNOPSIS -This is a Powershell script to bootstrap a Cake build. +$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE = '1' +$env:DOTNET_CLI_TELEMETRY_OPTOUT = '1' +$env:DOTNET_NOLOGO = '1' -.DESCRIPTION -This Powershell script will download NuGet if missing, restore NuGet tools (including Cake) -and execute your Cake build script with the parameters you provide. +dotnet tool restore +if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } -.PARAMETER Script -The build script to execute. -.PARAMETER Target -The build script target to run. -.PARAMETER Configuration -The build configuration to use. -.PARAMETER Verbosity -Specifies the amount of information to be displayed. -.PARAMETER ShowDescription -Shows description about tasks. -.PARAMETER DryRun -Performs a dry run. -.PARAMETER SkipToolPackageRestore -Skips restoring of packages. -.PARAMETER ScriptArgs -Remaining arguments are added here. - -.LINK -https://cakebuild.net - -#> - -[CmdletBinding()] -Param( - [string]$Script = "build.cake", - [string]$Target, - [string]$Configuration, - [ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")] - [string]$Verbosity, - [switch]$ShowDescription, - [Alias("WhatIf", "Noop")] - [switch]$DryRun, - [switch]$SkipToolPackageRestore, - [Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)] - [string[]]$ScriptArgs -) - -# Attempt to set highest encryption available for SecurityProtocol. -# PowerShell will not set this by default (until maybe .NET 4.6.x). This -# will typically produce a message for PowerShell v2 (just an info -# message though) -try { - # Set TLS 1.2 (3072), then TLS 1.1 (768), then TLS 1.0 (192), finally SSL 3.0 (48) - # Use integers because the enumeration values for TLS 1.2 and TLS 1.1 won't - # exist in .NET 4.0, even though they are addressable if .NET 4.5+ is - # installed (.NET 4.5 is an in-place upgrade). - # PowerShell Core already has support for TLS 1.2 so we can skip this if running in that. - if (-not $IsCoreCLR) { - [System.Net.ServicePointManager]::SecurityProtocol = 3072 -bor 768 -bor 192 -bor 48 - } - } catch { - Write-Output 'Unable to set PowerShell to use TLS 1.2 and TLS 1.1 due to old .NET Framework installed. If you see underlying connection closed or trust errors, you may need to upgrade to .NET Framework 4.5+ and PowerShell v3' - } - -[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null -function MD5HashFile([string] $filePath) -{ - if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf)) - { - return $null - } - - [System.IO.Stream] $file = $null; - [System.Security.Cryptography.MD5] $md5 = $null; - try - { - $md5 = [System.Security.Cryptography.MD5]::Create() - $file = [System.IO.File]::OpenRead($filePath) - return [System.BitConverter]::ToString($md5.ComputeHash($file)) - } - finally - { - if ($file -ne $null) - { - $file.Dispose() - } - } -} - -function GetProxyEnabledWebClient -{ - $wc = New-Object System.Net.WebClient - $proxy = [System.Net.WebRequest]::GetSystemWebProxy() - $proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials - $wc.Proxy = $proxy - return $wc -} - -Write-Host "Preparing to run build script..." - -if(!$PSScriptRoot){ - $PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent -} - -$TOOLS_DIR = Join-Path $PSScriptRoot "tools" -$ADDINS_DIR = Join-Path $TOOLS_DIR "Addins" -$MODULES_DIR = Join-Path $TOOLS_DIR "Modules" -$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe" -$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe" -$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe" -$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config" -$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum" -$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config" -$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config" - -# Make sure tools folder exists -if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) { - Write-Verbose -Message "Creating tools directory..." - New-Item -Path $TOOLS_DIR -Type Directory | Out-Null -} - -# Make sure that packages.config exist. -if (!(Test-Path $PACKAGES_CONFIG)) { - Write-Verbose -Message "Downloading packages.config..." - try { - $wc = GetProxyEnabledWebClient - $wc.DownloadFile("https://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) - } catch { - Throw "Could not download packages.config." - } -} - -# Try find NuGet.exe in path if not exists -if (!(Test-Path $NUGET_EXE)) { - Write-Verbose -Message "Trying to find nuget.exe in PATH..." - $existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) } - $NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1 - if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) { - Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)." - $NUGET_EXE = $NUGET_EXE_IN_PATH.FullName - } -} - -# Try download NuGet.exe if not exists -if (!(Test-Path $NUGET_EXE)) { - Write-Verbose -Message "Downloading NuGet.exe..." - try { - $wc = GetProxyEnabledWebClient - $wc.DownloadFile($NUGET_URL, $NUGET_EXE) - } catch { - Throw "Could not download NuGet.exe." - } -} - -# Save nuget.exe path to environment to be available to child processed -$env:NUGET_EXE = $NUGET_EXE -$env:NUGET_EXE_INVOCATION = if ($IsLinux -or $IsMacOS) { - "mono `"$NUGET_EXE`"" -} else { - "`"$NUGET_EXE`"" -} - -# Restore tools from NuGet? -if(-Not $SkipToolPackageRestore.IsPresent) { - Push-Location - Set-Location $TOOLS_DIR - - # Check for changes in packages.config and remove installed tools if true. - [string] $md5Hash = MD5HashFile $PACKAGES_CONFIG - if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or - ($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) { - Write-Verbose -Message "Missing or changed package.config hash..." - Get-ChildItem -Exclude packages.config,nuget.exe,Cake.Bakery | - Remove-Item -Recurse -Force - } - - Write-Verbose -Message "Restoring tools from NuGet..." - - $NuGetOutput = Invoke-Expression "& $env:NUGET_EXE_INVOCATION install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`"" - - if ($LASTEXITCODE -ne 0) { - Throw "An error occurred while restoring NuGet tools." - } - else - { - $md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII" - } - Write-Verbose -Message ($NuGetOutput | Out-String) - - Pop-Location -} - -# Restore addins from NuGet -if (Test-Path $ADDINS_PACKAGES_CONFIG) { - Push-Location - Set-Location $ADDINS_DIR - - Write-Verbose -Message "Restoring addins from NuGet..." - $NuGetOutput = Invoke-Expression "& $env:NUGET_EXE_INVOCATION install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`"" - - if ($LASTEXITCODE -ne 0) { - Throw "An error occurred while restoring NuGet addins." - } - - Write-Verbose -Message ($NuGetOutput | Out-String) - - Pop-Location -} - -# Restore modules from NuGet -if (Test-Path $MODULES_PACKAGES_CONFIG) { - Push-Location - Set-Location $MODULES_DIR - - Write-Verbose -Message "Restoring modules from NuGet..." - $NuGetOutput = Invoke-Expression "& $env:NUGET_EXE_INVOCATION install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`"" - - if ($LASTEXITCODE -ne 0) { - Throw "An error occurred while restoring NuGet modules." - } - - Write-Verbose -Message ($NuGetOutput | Out-String) - - Pop-Location -} - -# Make sure that Cake has been installed. -if (!(Test-Path $CAKE_EXE)) { - Throw "Could not find Cake.exe at $CAKE_EXE" -} - -$CAKE_EXE_INVOCATION = if ($IsLinux -or $IsMacOS) { - "mono `"$CAKE_EXE`"" -} else { - "`"$CAKE_EXE`"" -} - - # Build an array (not a string) of Cake arguments to be joined later -$cakeArguments = @() -if ($Script) { $cakeArguments += "`"$Script`"" } -if ($Target) { $cakeArguments += "-target=`"$Target`"" } -if ($Configuration) { $cakeArguments += "-configuration=$Configuration" } -if ($Verbosity) { $cakeArguments += "-verbosity=$Verbosity" } -if ($ShowDescription) { $cakeArguments += "-showdescription" } -if ($DryRun) { $cakeArguments += "-dryrun" } -$cakeArguments += $ScriptArgs - -# Start Cake -Write-Host "Running build script..." -Invoke-Expression "& $CAKE_EXE_INVOCATION $($cakeArguments -join " ")" -exit $LASTEXITCODE +dotnet cake @args +if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } diff --git a/build.sh b/build.sh new file mode 100644 index 0000000..31be886 --- /dev/null +++ b/build.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -euox pipefail + +cd "$(dirname "${BASH_SOURCE[0]}")" + +export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 +export DOTNET_CLI_TELEMETRY_OPTOUT=1 +export DOTNET_NOLOGO=1 + +dotnet tool restore + +dotnet cake "$@" diff --git a/cake.config b/cake.config new file mode 100644 index 0000000..a90fc5a --- /dev/null +++ b/cake.config @@ -0,0 +1,10 @@ +; The configuration file for Cake. + +[Nuget] +LoadDependencies=true + +[Paths] +Tools=./.cake + +[Settings] +EnableScriptCache=true \ No newline at end of file diff --git a/global.json b/global.json new file mode 100644 index 0000000..8e621ca --- /dev/null +++ b/global.json @@ -0,0 +1,7 @@ +{ + "sdk": { + "version": "8.0.100", + "rollForward": "patch", + "allowPrerelease": false + } +} \ No newline at end of file diff --git a/tools/packages.config b/tools/packages.config deleted file mode 100644 index 0247344..0000000 --- a/tools/packages.config +++ /dev/null @@ -1,4 +0,0 @@ - - - -