Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Utility analyzer: Enable concurrent analysis and use producer/consumer pattern for file writes #8459

Draft
wants to merge 9 commits into
base: master
Choose a base branch
from
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ public abstract class UtilityAnalyzerBase : SonarDiagnosticAnalyzer
{
protected static readonly ISet<string> FileExtensionWhitelist = new HashSet<string> { ".cs", ".csx", ".vb" };
private readonly DiagnosticDescriptor rule;
protected override bool EnableConcurrentExecution => false;

public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics =>
ImmutableArray.Create(rule);
Expand Down Expand Up @@ -98,26 +97,47 @@ protected sealed override void Initialize(SonarAnalysisContext context) =>
{
return;
}
var treeMessages = new ConcurrentStack<TMessage>();
var cancel = startContext.Cancel;
var outPath = parameters.OutPath;
var treeMessages = new BlockingCollection<TMessage>();
var consumerTask = Task.Factory.StartNew(() =>
{
// Consume all messages as they arrive during the compilation and write them to disk.
// The Task starts on CompilationStart and in CompilationEnd we block until it is finished via CompleteAdding().
// Note: CompilationEndAction is not guaranteed to be called for each CompilationStart.
// Therefore it is important to properly handle cancelation here.
// LongRunning: We probably run on a dedicated thread outside of the thread pool
// If any of the IO operations throw, CompilationEnd takes care of the clean up.
Directory.CreateDirectory(outPath);
using var stream = File.Create(Path.Combine(outPath, FileName));
foreach (var message in treeMessages.GetConsumingEnumerable(cancel).WhereNotNull())
{
message.WriteDelimitedTo(stream);
}
}, cancel, TaskCreationOptions.LongRunning, TaskScheduler.Default);
startContext.RegisterSemanticModelAction(modelContext =>
{
if (ShouldGenerateMetrics(parameters, modelContext))
if (ShouldGenerateMetrics(parameters, modelContext) && !cancel.IsCancellationRequested)
{
var message = CreateMessage(parameters, modelContext.Tree, modelContext.SemanticModel);
treeMessages.Push(message);
treeMessages.Add(message);
}
});
startContext.RegisterCompilationEndAction(endContext =>
{
var allMessages = CreateAnalysisMessages(endContext)
.Concat(treeMessages)
.WhereNotNull()
.ToArray();
Directory.CreateDirectory(parameters.OutPath);
using var stream = File.Create(Path.Combine(parameters.OutPath, FileName));
foreach (var message in allMessages)
var analysisMessages = CreateAnalysisMessages(endContext);
foreach (var message in analysisMessages)
{
message.WriteDelimitedTo(stream);
treeMessages.Add(message);
}
treeMessages.CompleteAdding();
try
{
consumerTask.Wait(cancel); // Wait until all messages are written to disk. Throws, if the task failed.
}
finally
{
treeMessages.Dispose();
}
});
});
Expand Down
Loading