Add the performance benchmark project for PowerShell performance testing (#15242)

This commit is contained in:
Dongbo Wang 2021-04-30 09:34:26 -07:00 committed by GitHub
parent 8f8ddc3fb7
commit a62c9d9443
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 2154 additions and 13 deletions

3
.gitignore vendored
View File

@ -89,3 +89,6 @@ StyleCop.Cache
# Ignore SelfSignedCertificate autogenerated files
test/tools/Modules/SelfSignedCertificate/
# BenchmarkDotNet artifacts
test/perf/BenchmarkDotNet.Artifacts/

View File

@ -1310,3 +1310,6 @@ codesign
release-BuildJson
yml
centos-7
- test/perf/benchmarks/README.md
benchmarked
BenchmarkDotNet

View File

@ -14,6 +14,7 @@ trigger:
- /.vsts-ci/misc-analysis.yml
- /.github/ISSUE_TEMPLATE/*
- /.dependabot/config.yml
- test/perf/*
pr:
branches:
include:
@ -30,6 +31,7 @@ pr:
- .vsts-ci/windows.yml
- .vsts-ci/windows/*
- test/common/markdown/*
- test/perf/*
- tools/releaseBuild/*
- tools/releaseBuild/azureDevOps/templates/*

View File

@ -15,6 +15,7 @@ trigger:
- /.vsts-ci/misc-analysis.yml
- /.github/ISSUE_TEMPLATE/*
- /.dependabot/config.yml
- test/perf/*
pr:
branches:
include:
@ -31,6 +32,7 @@ pr:
- /.vsts-ci/windows.yml
- /.vsts-ci/windows/*
- test/common/markdown/*
- test/perf/*
- tools/packaging/*
- tools/releaseBuild/*
- tools/releaseBuild/azureDevOps/templates/*

View File

@ -85,7 +85,7 @@ jobs:
condition: succeededOrFailed()
- bash: |
mdspell '**/*.md' '!**/Pester/**/*.md' --ignore-numbers --ignore-acronyms --report --en-us;
mdspell '**/*.md' '!**/Pester/**/*.md' '!**/dotnet-tools/**/*.md' --ignore-numbers --ignore-acronyms --report --en-us;
displayName: Test Spelling in Markdown
condition: succeededOrFailed()
workingDirectory: '$(repoPath)'

View File

@ -14,6 +14,7 @@ trigger:
- /.vsts-ci/misc-analysis.yml
- /.github/ISSUE_TEMPLATE/*
- /.dependabot/config.yml
- test/perf/*
pr:
branches:
include:
@ -28,6 +29,7 @@ pr:
- .github/ISSUE_TEMPLATE/*
- .vsts-ci/misc-analysis.yml
- test/common/markdown/*
- test/perf/*
- tools/packaging/*
- tools/releaseBuild/*
- tools/releaseBuild/azureDevOps/templates/*

View File

@ -6,24 +6,13 @@ using System.Resources;
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("powershell-tests,PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
[assembly: InternalsVisibleTo("powershell-perf,PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
[assembly: InternalsVisibleTo("Microsoft.Test.Management.Automation.GPowershell.Analyzers,PublicKey=00240000048000009400000006020000002400005253413100040000010001003f8c902c8fe7ac83af7401b14c1bd103973b26dfafb2b77eda478a2539b979b56ce47f36336741b4ec52bbc51fecd51ba23810cec47070f3e29a2261a2d1d08e4b2b4b457beaa91460055f78cc89f21cd028377af0cc5e6c04699b6856a1e49d5fad3ef16d3c3d6010f40df0a7d6cc2ee11744b5cfb42e0f19a52b8a29dc31b0")]
#if NOT_SIGNED
// These attributes aren't every used, it's just a hack to get VS to not complain
// about access when editing using the project files that don't actually build.
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.Commands.Utility")]
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.Commands.Management")]
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.Security")]
[assembly: InternalsVisibleTo(@"System.Management.Automation.Remoting")]
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.ConsoleHost")]
#else
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.Commands.Utility" + @",PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.Commands.Management" + @",PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.Security" + @",PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
[assembly: InternalsVisibleTo(@"System.Management.Automation.Remoting" + @",PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
[assembly: InternalsVisibleTo(@"Microsoft.PowerShell.ConsoleHost" + @",PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
#endif
namespace System.Management.Automation
{

View File

@ -0,0 +1,28 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace MicroBenchmarks
{
public static class Categories
{
/// <summary>
/// Benchmarks belonging to this category are executed for CI jobs.
/// </summary>
public const string Components = "Components";
/// <summary>
/// Benchmarks belonging to this category are executed for CI jobs.
/// </summary>
public const string Engine = "Engine";
/// <summary>
/// Benchmarks belonging to this category are targeting internal APIs.
/// </summary>
public const string Internal = "Internal";
/// <summary>
/// Benchmarks belonging to this category are targeting public APIs.
/// </summary>
public const string Public = "Public";
}
}

View File

@ -0,0 +1,23 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System.Management.Automation.Language;
using BenchmarkDotNet.Attributes;
using MicroBenchmarks;
namespace Engine
{
[BenchmarkCategory(Categories.Engine, Categories.Public)]
public class Parsing
{
[Benchmark]
public Ast UsingStatement()
{
const string Script = @"
using module moduleA
using Assembly assemblyA
using namespace System.IO";
return Parser.ParseInput(Script, out _, out _);
}
}
}

View File

@ -0,0 +1,77 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Management.Automation;
using System.Management.Automation.Runspaces;
using System.Runtime.InteropServices;
using BenchmarkDotNet.Attributes;
using MicroBenchmarks;
namespace Engine
{
[BenchmarkCategory(Categories.Engine, Categories.Public)]
public class Scripting
{
private Runspace runspace;
private ScriptBlock scriptBlock;
private void SetupRunspace()
{
// Unless you want to run commands from any built-in modules, using 'CreateDefault2' is enough.
runspace = RunspaceFactory.CreateRunspace(InitialSessionState.CreateDefault2());
runspace.Open();
Runspace.DefaultRunspace = runspace;
}
#region Invoke-Method
[ParamsSource(nameof(ValuesForScript))]
public string InvokeMethodScript { get; set; }
public IEnumerable<string> ValuesForScript()
{
yield return @"'String'.GetType()";
yield return @"[System.IO.Path]::HasExtension('')";
// Test on COM method invocation.
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
yield return @"$sh=New-Object -ComObject Shell.Application; $sh.Namespace('c:\')";
yield return @"$fs=New-Object -ComObject scripting.filesystemobject; $fs.Drives";
}
}
[GlobalSetup(Target = nameof(InvokeMethod))]
public void GlobalSetup()
{
SetupRunspace();
scriptBlock = ScriptBlock.Create(InvokeMethodScript);
// Run it once to get the C# code jitted and the script compiled.
// The first call to this takes relatively too long, which makes the BDN's heuristic incorrectly
// believe that there is no need to run many ops in each interation. However, the subsequent runs
// of this method is much faster than the first run, and this causes 'MinIterationTime' warnings
// to our benchmarks and make the benchmark results not reliable.
// Calling this method once in 'GlobalSetup' is a workaround.
// See https://github.com/dotnet/BenchmarkDotNet/issues/837#issuecomment-828600157
scriptBlock.Invoke();
}
[Benchmark]
public Collection<PSObject> InvokeMethod()
{
return scriptBlock.Invoke();
}
#endregion
[GlobalCleanup]
public void GlobalCleanup()
{
runspace.Dispose();
Runspace.DefaultRunspace = null;
}
}
}

View File

@ -0,0 +1,56 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using BenchmarkDotNet.Running;
using BenchmarkDotNet.Extensions;
namespace MicroBenchmarks
{
public class Program
{
public static int Main(string[] args)
{
var argsList = new List<string>(args);
int? partitionCount;
int? partitionIndex;
List<string> exclusionFilterValue;
List<string> categoryExclusionFilterValue;
bool getDiffableDisasm;
// Parse and remove any additional parameters that we need that aren't part of BDN (BenchmarkDotnet)
try
{
CommandLineOptions.ParseAndRemoveIntParameter(argsList, "--partition-count", out partitionCount);
CommandLineOptions.ParseAndRemoveIntParameter(argsList, "--partition-index", out partitionIndex);
CommandLineOptions.ParseAndRemoveStringsParameter(argsList, "--exclusion-filter", out exclusionFilterValue);
CommandLineOptions.ParseAndRemoveStringsParameter(argsList, "--category-exclusion-filter", out categoryExclusionFilterValue);
CommandLineOptions.ParseAndRemoveBooleanParameter(argsList, "--disasm-diff", out getDiffableDisasm);
CommandLineOptions.ValidatePartitionParameters(partitionCount, partitionIndex);
}
catch (ArgumentException e)
{
Console.WriteLine("ArgumentException: {0}", e.Message);
return 1;
}
return BenchmarkSwitcher
.FromAssembly(typeof(Program).Assembly)
.Run(
argsList.ToArray(),
RecommendedConfig.Create(
artifactsPath: new DirectoryInfo(Path.Combine(Path.GetDirectoryName(typeof(Program).Assembly.Location), "BenchmarkDotNet.Artifacts")),
mandatoryCategories: ImmutableHashSet.Create(Categories.Components, Categories.Engine),
partitionCount: partitionCount,
partitionIndex: partitionIndex,
exclusionFilterValue: exclusionFilterValue,
categoryExclusionFilterValue: categoryExclusionFilterValue,
getDiffableDisasm: getDiffableDisasm))
.ToExitCode();
}
}
}

View File

@ -0,0 +1,88 @@
## Micro Benchmarks
This folder contains micro benchmarks that test the performance of PowerShell Engine.
### Requirement
1. A good suite of benchmarks
Something that measures only the thing that we are interested in and _produces accurate, stable and repeatable results_.
2. A set of machine with the same configurations.
3. Automation for regression detection.
### Design Decision
1. This project is internal visible to `System.Management.Automation`.
We want to be able to target some internal APIs to get measurements on specific scoped scenarios,
such as measuring the time to compile AST to a delegate by the compiler.
2. This project makes `ProjectReference` to other PowerShell assemblies.
This makes it easy to run benchmarks with the changes made in the codebase.
To run benchmarks with a specific version of PowerShell,
just replace the `ProjectReference` with a `PackageReference` to the `Microsoft.PowerShell.SDK` NuGet package of the corresponding version.
### Quick Start
You can run the benchmarks directly using `dotnet run` in this directory:
1. To run the benchmarks in Interactive Mode, where you will be asked which benchmark(s) to run:
```
dotnet run -c release
```
2. To list all available benchmarks ([read more](https://github.com/dotnet/performance/blob/main/docs/benchmarkdotnet.md#Listing-the-Benchmarks)):
```
dotnet run -c release --list [flat/tree]
```
3. To filter the benchmarks using a glob pattern applied to `namespace.typeName.methodName` ([read more](https://github.com/dotnet/performance/blob/main/docs/benchmarkdotnet.md#Filtering-the-Benchmarks)]):
```
dotnet run -c Release -f net6.0 --filter *script* --list flat
```
4. To profile the benchmarked code and produce an ETW Trace file ([read more](https://github.com/dotnet/performance/blob/main/docs/benchmarkdotnet.md#Profiling))
```
dotnet run -c Release -f net6.0 --filter *script* --profiler ETW
```
You can also use the function `Start-Benchmarking` from the module [`perf.psm1`](../perf.psm1) to run the benchmarks:
```powershell
Start-Benchmarking [[-TargetPSVersion] <string>] [[-List] <string>] [[-Filter] <string[]>] [[-Artifacts] <string>] [-KeepFiles] [<CommonParameters>]
```
Run `Get-Help Start-Benchmarking -Full` to see the description of each parameter.
### Regression Detection
We use the tool [`ResultsComparer`](../dotnet-tools/ResultsComparer) to compare the provided benchmark results.
See the [README.md](../dotnet-tools/ResultsComparer/README.md) for `ResultsComparer` for more details.
The module `perf.psm1` also provides `Compare-BenchmarkResult` that wraps `ResultsComparer`.
Here is an example of using it:
```
## Run benchmarks targeting the current code base
PS:1> Start-Benchmarking -Filter *script* -Artifacts C:\arena\tmp\BenchmarkDotNet.Artifacts\current\
## Run benchmarks targeting the 7.1.3 version of PS package
PS:2> Start-Benchmarking -Filter *script* -Artifacts C:\arena\tmp\BenchmarkDotNet.Artifacts\7.1.3 -TargetPSVersion 7.1.3
## Compare the results using 5% threshold
PS:3> Compare-BenchmarkResult -BaseResultPath C:\arena\tmp\BenchmarkDotNet.Artifacts\7.1.3\ -DiffResultPath C:\arena\tmp\BenchmarkDotNet.Artifacts\current\ -Threshold 1%
summary:
better: 4, geomean: 1.057
total diff: 4
No Slower results for the provided threshold = 1% and noise filter = 0.3ns.
| Faster | base/diff | Base Median (ns) | Diff Median (ns) | Modality|
| -------------------------------------------------------------------------------- | ---------:| ----------------:| ----------------:| --------:|
| Engine.Scripting.InvokeMethod(Script: "$fs=New-Object -ComObject scripting.files | 1.07 | 50635.77 | 47116.42 | |
| Engine.Scripting.InvokeMethod(Script: "$sh=New-Object -ComObject Shell.Applicati | 1.07 | 1063085.23 | 991602.08 | |
| Engine.Scripting.InvokeMethod(Script: "'String'.GetType()") | 1.06 | 1329.93 | 1252.51 | |
| Engine.Scripting.InvokeMethod(Script: "[System.IO.Path]::HasExtension('')") | 1.02 | 1322.04 | 1297.72 | |
No file given
```
## References
- [Getting started with BenchmarkDotNet](https://benchmarkdotnet.org/articles/guides/getting-started.html)
- [Micro-benchmark Design Guidelines](https://github.com/dotnet/performance/blob/main/docs/microbenchmark-design-guidelines.md)
- [Adam SITNIK: Powerful benchmarking in .NET](https://www.youtube.com/watch?v=pdcrSG4tOLI&t=351s)

View File

@ -0,0 +1,46 @@
<Project Sdk="Microsoft.NET.Sdk">
<!-- We are using a single TFM for this project, the drawback is that we cannot run benchmarks
targeting other .NET runtime versions, such as net5.0 (PS7.1) and netcoreapp3.1 (PS7.0) -->
<Import Project="../../Test.Common.props" />
<PropertyGroup>
<Description>PowerShell Performance Tests</Description>
<AssemblyName>powershell-perf</AssemblyName>
<OutputType>Exe</OutputType>
<NoWarn>$(NoWarn);CS8002</NoWarn>
<SuppressNETCoreSdkPreviewMessage>true</SuppressNETCoreSdkPreviewMessage>
<!-- To run benchmarks targeting a specific package version, set the version to 'PERF_TARGET_VERSION' as an environment variable.
Do not try passing in the value using '/property:' at command line, because
1. 'dotnet run' doesn't respect '/property:' arguments;
2. BenchmarkDotnet generates temporary project files that reference to this .csproj file,
and '/property:' arguments won't be forwarded when building those temp projects. -->
<PerfTargetVersion>$(PERF_TARGET_VERSION)</PerfTargetVersion>
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>portable</DebugType>
<DebugSymbols>true</DebugSymbols>
</PropertyGroup>
<PropertyGroup>
<DelaySign>true</DelaySign>
<AssemblyOriginatorKeyFile>../../../src/signing/visualstudiopublic.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../dotnet-tools/BenchmarkDotNet.Extensions/BenchmarkDotNet.Extensions.csproj" />
</ItemGroup>
<ItemGroup Condition="'$(PerfTargetVersion)' == ''">
<ProjectReference Include="../../../src/Microsoft.PowerShell.SDK/Microsoft.PowerShell.SDK.csproj" />
<ProjectReference Include="../../../src/Microsoft.PowerShell.Commands.Diagnostics/Microsoft.PowerShell.Commands.Diagnostics.csproj" />
</ItemGroup>
<ItemGroup Condition="'$(PerfTargetVersion)' != ''">
<PackageReference Include="Microsoft.PowerShell.SDK" Version="$(PerfTargetVersion)" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Library</OutputType>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" Version="0.12.1.1521" />
<PackageReference Include="BenchmarkDotNet.Diagnostics.Windows" Version="0.12.1.1521" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Reporting\Reporting.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,97 @@
using System;
using System.Collections.Generic;
namespace BenchmarkDotNet.Extensions
{
public class CommandLineOptions
{
// Find and parse given parameter with expected int value, then remove it and its value from the list of arguments to then pass to BenchmarkDotNet
// Throws ArgumentException if the parameter does not have a value or that value is not parsable as an int
public static List<string> ParseAndRemoveIntParameter(List<string> argsList, string parameter, out int? parameterValue)
{
int parameterIndex = argsList.IndexOf(parameter);
parameterValue = null;
if (parameterIndex != -1)
{
if (parameterIndex + 1 < argsList.Count && Int32.TryParse(argsList[parameterIndex+1], out int parsedParameterValue))
{
// remove --partition-count args
parameterValue = parsedParameterValue;
argsList.RemoveAt(parameterIndex+1);
argsList.RemoveAt(parameterIndex);
}
else
{
throw new ArgumentException(String.Format("{0} must be followed by an integer", parameter));
}
}
return argsList;
}
public static List<string> ParseAndRemoveStringsParameter(List<string> argsList, string parameter, out List<string> parameterValue)
{
int parameterIndex = argsList.IndexOf(parameter);
parameterValue = new List<string>();
if (parameterIndex + 1 < argsList.Count)
{
while (parameterIndex + 1 < argsList.Count && !argsList[parameterIndex + 1].StartsWith("-"))
{
// remove each filter string and stop when we get to the next argument flag
parameterValue.Add(argsList[parameterIndex + 1]);
argsList.RemoveAt(parameterIndex + 1);
}
}
//We only want to remove the --exclusion-filter if it exists
if (parameterIndex != -1)
{
argsList.RemoveAt(parameterIndex);
}
return argsList;
}
public static void ParseAndRemoveBooleanParameter(List<string> argsList, string parameter, out bool parameterValue)
{
int parameterIndex = argsList.IndexOf(parameter);
if (parameterIndex != -1)
{
argsList.RemoveAt(parameterIndex);
parameterValue = true;
}
else
{
parameterValue = false;
}
}
public static void ValidatePartitionParameters(int? count, int? index)
{
// Either count and index must both be specified or neither specified
if (!(count.HasValue == index.HasValue))
{
throw new ArgumentException("If either --partition-count or --partition-index is specified, both must be specified");
}
// Check values of count and index parameters
else if (count.HasValue && index.HasValue)
{
if (count < 2)
{
throw new ArgumentException("When specified, value of --partition-count must be greater than 1");
}
else if (!(index < count))
{
throw new ArgumentException("Value of --partition-index must be less than --partition-count");
}
else if (index < 0)
{
throw new ArgumentException("Value of --partition-index must be greater than or equal to 0");
}
}
}
}
}

View File

@ -0,0 +1,90 @@
using BenchmarkDotNet.Diagnosers;
using BenchmarkDotNet.Disassemblers;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
namespace BenchmarkDotNet.Extensions
{
// a simplified copy of internal BDN type: https://github.com/dotnet/BenchmarkDotNet/blob/0445917bf93059f17cb09e7d48cdb5e27a096c37/src/BenchmarkDotNet/Disassemblers/Exporters/GithubMarkdownDisassemblyExporter.cs#L35-L80
internal static class DiffableDisassemblyExporter
{
private static readonly Lazy<Func<object, SourceCode>> GetSource = new Lazy<Func<object, SourceCode>>(() => GetElementGetter<SourceCode>("Source"));
private static readonly Lazy<Func<object, string>> GetTextRepresentation = new Lazy<Func<object, string>>(() => GetElementGetter<string>("TextRepresentation"));
private static readonly Lazy<Func<DisassembledMethod, DisassemblyResult, DisassemblyDiagnoserConfig, string, IReadOnlyList<object>>> Prettify
= new Lazy<Func<DisassembledMethod, DisassemblyResult, DisassemblyDiagnoserConfig, string, IReadOnlyList<object>>>(GetPrettifyMethod);
internal static string BuildDisassemblyString(DisassemblyResult disassemblyResult, DisassemblyDiagnoserConfig config)
{
StringBuilder sb = new StringBuilder();
int methodIndex = 0;
foreach (var method in disassemblyResult.Methods.Where(method => string.IsNullOrEmpty(method.Problem)))
{
sb.AppendLine("```assembly");
sb.AppendLine($"; {method.Name}");
var pretty = Prettify.Value.Invoke(method, disassemblyResult, config, $"M{methodIndex++:00}");
ulong totalSizeInBytes = 0;
foreach (var element in pretty)
{
if (element.Source() is Asm asm)
{
checked
{
totalSizeInBytes += (uint)asm.Instruction.Length;
}
sb.AppendLine($" {element.TextRepresentation()}");
}
else // it's a DisassemblyPrettifier.Label (internal type..)
{
sb.AppendLine($"{element.TextRepresentation()}:");
}
}
sb.AppendLine($"; Total bytes of code {totalSizeInBytes}");
sb.AppendLine("```");
}
return sb.ToString();
}
private static SourceCode Source(this object element) => GetSource.Value.Invoke(element);
private static string TextRepresentation(this object element) => GetTextRepresentation.Value.Invoke(element);
private static Func<object, T> GetElementGetter<T>(string name)
{
var type = typeof(DisassemblyDiagnoser).Assembly.GetType("BenchmarkDotNet.Disassemblers.Exporters.DisassemblyPrettifier");
type = type.GetNestedType("Element", BindingFlags.Instance | BindingFlags.NonPublic);
var property = type.GetProperty(name, BindingFlags.Instance | BindingFlags.NonPublic);
var method = property.GetGetMethod(nonPublic: true);
var generic = typeof(Func<,>).MakeGenericType(type, typeof(T));
var @delegate = method.CreateDelegate(generic);
return (obj) => (T)@delegate.DynamicInvoke(obj); // cast to (Func<object, T>) throws
}
private static Func<DisassembledMethod, DisassemblyResult, DisassemblyDiagnoserConfig, string, IReadOnlyList<object>> GetPrettifyMethod()
{
var type = typeof(DisassemblyDiagnoser).Assembly.GetType("BenchmarkDotNet.Disassemblers.Exporters.DisassemblyPrettifier");
var method = type.GetMethod("Prettify", BindingFlags.Static | BindingFlags.NonPublic);
var @delegate = method.CreateDelegate(typeof(Func<DisassembledMethod, DisassemblyResult, DisassemblyDiagnoserConfig, string, IReadOnlyList<object>>));
return (Func<DisassembledMethod, DisassemblyResult, DisassemblyDiagnoserConfig, string, IReadOnlyList<object>>)@delegate;
}
}
}

View File

@ -0,0 +1,52 @@
using BenchmarkDotNet.Filters;
using BenchmarkDotNet.Running;
using System;
using System.Collections.Generic;
using System.Text;
namespace BenchmarkDotNet.Extensions
{
class ExclusionFilter : IFilter
{
private readonly GlobFilter globFilter;
public ExclusionFilter(List<string> _filter)
{
if (_filter != null && _filter.Count != 0)
{
globFilter = new GlobFilter(_filter.ToArray());
}
}
public bool Predicate(BenchmarkCase benchmarkCase)
{
if(globFilter == null)
{
return true;
}
return !globFilter.Predicate(benchmarkCase);
}
}
class CategoryExclusionFilter : IFilter
{
private readonly AnyCategoriesFilter filter;
public CategoryExclusionFilter(List<string> patterns)
{
if (patterns != null)
{
filter = new AnyCategoriesFilter(patterns.ToArray());
}
}
public bool Predicate(BenchmarkCase benchmarkCase)
{
if (filter == null)
{
return true;
}
return !filter.Predicate(benchmarkCase);
}
}
}

View File

@ -0,0 +1,26 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using BenchmarkDotNet.Reports;
namespace BenchmarkDotNet.Extensions
{
public static class SummaryExtensions
{
public static int ToExitCode(this IEnumerable<Summary> summaries)
{
// an empty summary means that initial filtering and validation did not allow to run
if (!summaries.Any())
return 1;
// if anything has failed, it's an error
if (summaries.Any(summary => summary.HasCriticalValidationErrors || summary.Reports.Any(report => !report.BuildResult.IsBuildSuccess || !report.AllMeasurements.Any())))
return 1;
return 0;
}
}
}

View File

@ -0,0 +1,35 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using BenchmarkDotNet.Validators;
namespace BenchmarkDotNet.Extensions
{
/// <summary>
/// this class makes sure that every benchmark belongs to a mandatory category
/// categories are used by the CI for filtering
/// </summary>
public class MandatoryCategoryValidator : IValidator
{
private readonly ImmutableHashSet<string> _mandatoryCategories;
public bool TreatsWarningsAsErrors => true;
public MandatoryCategoryValidator(ImmutableHashSet<string> categories) => _mandatoryCategories = categories;
public IEnumerable<ValidationError> Validate(ValidationParameters validationParameters)
=> validationParameters.Benchmarks
.Where(benchmark => !benchmark.Descriptor.Categories.Any(category => _mandatoryCategories.Contains(category)))
.Select(benchmark => benchmark.Descriptor.GetFilterName())
.Distinct()
.Select(benchmarkId =>
new ValidationError(
isCritical: TreatsWarningsAsErrors,
$"{benchmarkId} does not belong to one of the mandatory categories: {string.Join(", ", _mandatoryCategories)}. Use [BenchmarkCategory(Categories.$)]")
);
}
}

View File

@ -0,0 +1,27 @@
using BenchmarkDotNet.Filters;
using System;
using System.Collections.Generic;
using System.Linq;
using BenchmarkDotNet.Running;
public class PartitionFilter : IFilter
{
private readonly int? _partitionsCount;
private readonly int? _partitionIndex; // indexed from 0
private int _counter = 0;
public PartitionFilter(int? partitionCount, int? partitionIndex)
{
_partitionsCount = partitionCount;
_partitionIndex = partitionIndex;
}
public bool Predicate(BenchmarkCase benchmarkCase)
{
if (!_partitionsCount.HasValue || !_partitionIndex.HasValue)
return true; // the filter is not enabled so it does not filter anything out and can be added to RecommendedConfig
return _counter++ % _partitionsCount.Value == _partitionIndex.Value; // will return true only for benchmarks that belong to its partition
}
}

View File

@ -0,0 +1,115 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using BenchmarkDotNet.Diagnosers;
using BenchmarkDotNet.Exporters;
using BenchmarkDotNet.Loggers;
using BenchmarkDotNet.Reports;
using Reporting;
using System.Linq;
namespace BenchmarkDotNet.Extensions
{
internal class PerfLabExporter : ExporterBase
{
protected override string FileExtension => "json";
protected override string FileCaption => "perf-lab-report";
public PerfLabExporter()
{
}
public override void ExportToLog(Summary summary, ILogger logger)
{
var reporter = Reporter.CreateReporter();
DisassemblyDiagnoser disassemblyDiagnoser = summary.Reports
.FirstOrDefault()? // dissasembler was either enabled for all or none of them (so we use the first one)
.BenchmarkCase.Config.GetDiagnosers().OfType<DisassemblyDiagnoser>().FirstOrDefault();
foreach (var report in summary.Reports)
{
var test = new Test();
test.Name = FullNameProvider.GetBenchmarkName(report.BenchmarkCase);
test.Categories = report.BenchmarkCase.Descriptor.Categories;
var results = from result in report.AllMeasurements
where result.IterationMode == Engines.IterationMode.Workload && result.IterationStage == Engines.IterationStage.Result
orderby result.LaunchIndex, result.IterationIndex
select new { result.Nanoseconds, result.Operations};
var overheadResults = from result in report.AllMeasurements
where result.IsOverhead() && result.IterationStage != Engines.IterationStage.Jitting
orderby result.LaunchIndex, result.IterationIndex
select new { result.Nanoseconds, result.Operations };
test.Counters.Add(new Counter
{
Name = "Duration of single invocation",
TopCounter = true,
DefaultCounter = true,
HigherIsBetter = false,
MetricName = "ns",
Results = (from result in results
select result.Nanoseconds / result.Operations).ToList()
});
test.Counters.Add(new Counter
{
Name = "Overhead invocation",
TopCounter = false,
DefaultCounter = false,
HigherIsBetter = false,
MetricName = "ns",
Results = (from result in overheadResults
select result.Nanoseconds / result.Operations).ToList()
});
test.Counters.Add(new Counter
{
Name = "Duration",
TopCounter = false,
DefaultCounter = false,
HigherIsBetter = false,
MetricName = "ms",
Results = (from result in results
select result.Nanoseconds).ToList()
});
test.Counters.Add(new Counter
{
Name = "Operations",
TopCounter = false,
DefaultCounter = false,
HigherIsBetter = true,
MetricName = "Count",
Results = (from result in results
select (double)result.Operations).ToList()
});
foreach (var metric in report.Metrics.Keys)
{
var m = report.Metrics[metric];
test.Counters.Add(new Counter
{
Name = m.Descriptor.DisplayName,
TopCounter = false,
DefaultCounter = false,
HigherIsBetter = m.Descriptor.TheGreaterTheBetter,
MetricName = m.Descriptor.Unit,
Results = new[] { m.Value }
});
}
if (disassemblyDiagnoser != null && disassemblyDiagnoser.Results.TryGetValue(report.BenchmarkCase, out var disassemblyResult))
{
string disassembly = DiffableDisassemblyExporter.BuildDisassemblyString(disassemblyResult, disassemblyDiagnoser.Config);
test.AdditionalData["disasm"] = disassembly;
}
reporter.AddTest(test);
}
logger.WriteLine(reporter.GetJson());
}
}
}

View File

@ -0,0 +1,86 @@
using System.Collections.Immutable;
using System.IO;
using BenchmarkDotNet.Columns;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Diagnosers;
using BenchmarkDotNet.Exporters.Json;
using Perfolizer.Horology;
using BenchmarkDotNet.Jobs;
using BenchmarkDotNet.Reports;
using System.Collections.Generic;
using Reporting;
using BenchmarkDotNet.Loggers;
using System.Linq;
using BenchmarkDotNet.Exporters;
namespace BenchmarkDotNet.Extensions
{
public static class RecommendedConfig
{
public static IConfig Create(
DirectoryInfo artifactsPath,
ImmutableHashSet<string> mandatoryCategories,
int? partitionCount = null,
int? partitionIndex = null,
List<string> exclusionFilterValue = null,
List<string> categoryExclusionFilterValue = null,
Job job = null,
bool getDiffableDisasm = false)
{
if (job is null)
{
job = Job.Default
.WithWarmupCount(1) // 1 warmup is enough for our purpose
.WithIterationTime(TimeInterval.FromMilliseconds(250)) // the default is 0.5s per iteration, which is slighlty too much for us
.WithMinIterationCount(15)
.WithMaxIterationCount(20) // we don't want to run more that 20 iterations
.DontEnforcePowerPlan(); // make sure BDN does not try to enforce High Performance power plan on Windows
// See https://github.com/dotnet/roslyn/issues/42393
job = job.WithArguments(new Argument[] { new MsBuildArgument("/p:DebugType=portable") });
}
var config = ManualConfig.CreateEmpty()
.AddLogger(ConsoleLogger.Default) // log output to console
.AddValidator(DefaultConfig.Instance.GetValidators().ToArray()) // copy default validators
.AddAnalyser(DefaultConfig.Instance.GetAnalysers().ToArray()) // copy default analysers
.AddExporter(MarkdownExporter.GitHub) // export to GitHub markdown
.AddColumnProvider(DefaultColumnProviders.Instance) // display default columns (method name, args etc)
.AddJob(job.AsDefault()) // tell BDN that this are our default settings
.WithArtifactsPath(artifactsPath.FullName)
.AddDiagnoser(MemoryDiagnoser.Default) // MemoryDiagnoser is enabled by default
.AddFilter(new PartitionFilter(partitionCount, partitionIndex))
.AddFilter(new ExclusionFilter(exclusionFilterValue))
.AddFilter(new CategoryExclusionFilter(categoryExclusionFilterValue))
.AddExporter(JsonExporter.Full) // make sure we export to Json
.AddColumn(StatisticColumn.Median, StatisticColumn.Min, StatisticColumn.Max)
.AddValidator(TooManyTestCasesValidator.FailOnError)
.AddValidator(new UniqueArgumentsValidator()) // don't allow for duplicated arguments #404
.AddValidator(new MandatoryCategoryValidator(mandatoryCategories))
.WithSummaryStyle(SummaryStyle.Default.WithMaxParameterColumnWidth(36)); // the default is 20 and trims too aggressively some benchmark results
if (Reporter.CreateReporter().InLab)
{
config = config.AddExporter(new PerfLabExporter());
}
if (getDiffableDisasm)
{
config = config.AddDiagnoser(CreateDisassembler());
}
return config;
}
private static DisassemblyDiagnoser CreateDisassembler()
=> new DisassemblyDiagnoser(new DisassemblyDiagnoserConfig(
maxDepth: 1, // TODO: is depth == 1 enough?
formatter: null, // TODO: enable diffable format
printSource: false, // we are not interested in getting C#
printInstructionAddresses: false, // would make the diffing hard, however could be useful to determine alignment
exportGithubMarkdown: false,
exportHtml: false,
exportCombinedDisassemblyReport: false,
exportDiff: false));
}
}

View File

@ -0,0 +1,33 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using BenchmarkDotNet.Validators;
namespace BenchmarkDotNet.Extensions
{
/// <summary>
/// we need to tell our users that having more than 16 test cases per benchmark is a VERY BAD idea
/// </summary>
public class TooManyTestCasesValidator : IValidator
{
private const int Limit = 16;
public static readonly IValidator FailOnError = new TooManyTestCasesValidator();
public bool TreatsWarningsAsErrors => true;
public IEnumerable<ValidationError> Validate(ValidationParameters validationParameters)
{
var byDescriptor = validationParameters.Benchmarks.GroupBy(benchmark => (benchmark.Descriptor, benchmark.Job)); // descriptor = type + method
return byDescriptor.Where(benchmarkCase => benchmarkCase.Count() > Limit).Select(group =>
new ValidationError(
isCritical: true,
message: $"{group.Key.Descriptor.Type.Name}.{group.Key.Descriptor.WorkloadMethod.Name} has {group.Count()} test cases. It MUST NOT have more than {Limit} test cases. We don't have inifinite amount of time to run all the benchmarks!!",
benchmarkCase: group.First()));
}
}
}

View File

@ -0,0 +1,42 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using BenchmarkDotNet.Validators;
using System.Collections.Generic;
using System.Linq;
using BenchmarkDotNet.Running;
namespace BenchmarkDotNet.Extensions
{
public class UniqueArgumentsValidator : IValidator
{
public bool TreatsWarningsAsErrors => true;
public IEnumerable<ValidationError> Validate(ValidationParameters validationParameters)
=> validationParameters.Benchmarks
.Where(benchmark => benchmark.HasArguments || benchmark.HasParameters)
.GroupBy(benchmark => (benchmark.Descriptor.Type, benchmark.Descriptor.WorkloadMethod, benchmark.Job))
.Where(sameBenchmark =>
{
int numberOfUniqueTestCases = sameBenchmark.Distinct(new BenchmarkArgumentsComparer()).Count();
int numberOfTestCases = sameBenchmark.Count();
return numberOfTestCases != numberOfUniqueTestCases;
})
.Select(duplicate => new ValidationError(true, $"Benchmark Arguments should be unique, {duplicate.Key.Type}.{duplicate.Key.WorkloadMethod} has duplicate arguments.", duplicate.First()));
private class BenchmarkArgumentsComparer : IEqualityComparer<BenchmarkCase>
{
public bool Equals(BenchmarkCase x, BenchmarkCase y)
=> Enumerable.SequenceEqual(
x.Parameters.Items.Select(argument => argument.Value),
y.Parameters.Items.Select(argument => argument.Value));
public int GetHashCode(BenchmarkCase obj)
=> obj.Parameters.Items
.Where(item => item.Value != null)
.Aggregate(seed: 0, (hashCode, argument) => hashCode ^= argument.Value.GetHashCode());
}
}
}

View File

@ -0,0 +1,148 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
namespace BenchmarkDotNet.Extensions
{
public static class ValuesGenerator
{
private const int Seed = 12345; // we always use the same seed to have repeatable results!
public static T GetNonDefaultValue<T>()
{
if (typeof(T) == typeof(byte)) // we can't use ArrayOfUniqueValues for byte
return Array<T>(byte.MaxValue).First(value => !value.Equals(default));
else
return ArrayOfUniqueValues<T>(2).First(value => !value.Equals(default));
}
/// <summary>
/// does not support byte because there are only 256 unique byte values
/// </summary>
public static T[] ArrayOfUniqueValues<T>(int count)
{
// allocate the array first to try to take advantage of memory randomization
// as it's usually the first thing called from GlobalSetup method
// which with MemoryRandomization enabled is the first method called right after allocation
// of random-sized memory by BDN engine
T[] result = new T[count];
var random = new Random(Seed);
var uniqueValues = new HashSet<T>();
while (uniqueValues.Count != count)
{
T value = GenerateValue<T>(random);
if (!uniqueValues.Contains(value))
uniqueValues.Add(value);
}
uniqueValues.CopyTo(result);
return result;
}
public static T[] Array<T>(int count)
{
var result = new T[count];
var random = new Random(Seed);
if (typeof(T) == typeof(byte) || typeof(T) == typeof(sbyte))
{
random.NextBytes(Unsafe.As<byte[]>(result));
}
else
{
for (int i = 0; i < result.Length; i++)
{
result[i] = GenerateValue<T>(random);
}
}
return result;
}
public static Dictionary<TKey, TValue> Dictionary<TKey, TValue>(int count)
{
var dictionary = new Dictionary<TKey, TValue>();
var random = new Random(Seed);
while (dictionary.Count != count)
{
TKey key = GenerateValue<TKey>(random);
if (!dictionary.ContainsKey(key))
dictionary.Add(key, GenerateValue<TValue>(random));
}
return dictionary;
}
private static T GenerateValue<T>(Random random)
{
if (typeof(T) == typeof(char))
return (T)(object)(char)random.Next(char.MinValue, char.MaxValue);
if (typeof(T) == typeof(short))
return (T)(object)(short)random.Next(short.MaxValue);
if (typeof(T) == typeof(ushort))
return (T)(object)(ushort)random.Next(short.MaxValue);
if (typeof(T) == typeof(int))
return (T)(object)random.Next();
if (typeof(T) == typeof(uint))
return (T)(object)(uint)random.Next();
if (typeof(T) == typeof(long))
return (T)(object)(long)random.Next();
if (typeof(T) == typeof(ulong))
return (T)(object)(ulong)random.Next();
if (typeof(T) == typeof(float))
return (T)(object)(float)random.NextDouble();
if (typeof(T) == typeof(double))
return (T)(object)random.NextDouble();
if (typeof(T) == typeof(bool))
return (T)(object)(random.NextDouble() > 0.5);
if (typeof(T) == typeof(string))
return (T)(object)GenerateRandomString(random, 1, 50);
if (typeof(T) == typeof(Guid))
return (T)(object)GenerateRandomGuid(random);
throw new NotImplementedException($"{typeof(T).Name} is not implemented");
}
private static string GenerateRandomString(Random random, int minLength, int maxLength)
{
var length = random.Next(minLength, maxLength);
var builder = new StringBuilder(length);
for (int i = 0; i < length; i++)
{
var rangeSelector = random.Next(0, 3);
if (rangeSelector == 0)
builder.Append((char) random.Next('a', 'z'));
else if (rangeSelector == 1)
builder.Append((char) random.Next('A', 'Z'));
else
builder.Append((char) random.Next('0', '9'));
}
return builder.ToString();
}
private static Guid GenerateRandomGuid(Random random)
{
byte[] bytes = new byte[16];
random.NextBytes(bytes);
return new Guid(bytes);
}
}
}

View File

@ -0,0 +1,14 @@
## Tools
The tools here are copied from [dotnet/performance](https://github.com/dotnet/performance),
the performance testing repository for the .NET runtime and framework libraries.
- [BenchmarkDotNet.Extensions](https://github.com/dotnet/performance/tree/main/src/harness/BenchmarkDotNet.Extensions)
- It provides the needed extensions for running benckmarks,
such as the `RecommendedConfig` which defines the set of recommended configurations for running the dotnet benckmarks.
- [Reporting](https://github.com/dotnet/performance/tree/main/src/tools/Reporting)
- It provides additional result reporting support
which may be useful to us when running our benchmarks in lab.
- [ResultsComparer](https://github.com/dotnet/performance/tree/main/src/tools/ResultsComparer)
- It's a tool for comparing different benchmark results.
It's very useful to show the regression of new changes by comparing its benchmark results to the baseline results.

View File

@ -0,0 +1,31 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Text;
namespace Reporting
{
public sealed class Build
{
public string Repo { get; set; }
public string Branch { get; set; }
public string Architecture { get; set; }
public string Locale { get; set; }
public string GitHash { get; set; }
public string BuildName { get; set; }
public DateTime TimeStamp { get; set; }
public Dictionary<string, string> AdditionalData { get; set; } = new Dictionary<string, string>();
}
}

View File

@ -0,0 +1,23 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
namespace Reporting
{
public class Counter
{
public string Name { get; set; }
public bool TopCounter { get; set; }
public bool DefaultCounter { get; set; }
public bool HigherIsBetter { get; set; }
public string MetricName { get; set; }
public IList<double> Results { get; set; }
}
}

View File

@ -0,0 +1,15 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Text;
namespace Reporting
{
public class EnvironmentProvider : IEnvironment
{
public string GetEnvironmentVariable(string variable) => Environment.GetEnvironmentVariable(variable);
}
}

View File

@ -0,0 +1,15 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Text;
namespace Reporting
{
public interface IEnvironment
{
string GetEnvironmentVariable(string variable);
}
}

View File

@ -0,0 +1,15 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace Reporting
{
public class Os
{
public string Locale { get; set; }
public string Architecture { get; set; }
public string Name { get; set; }
}
}

View File

@ -0,0 +1,153 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using RuntimeEnvironment = Microsoft.DotNet.PlatformAbstractions.RuntimeEnvironment;
namespace Reporting
{
public class Reporter
{
private Run run;
private Os os;
private Build build;
private List<Test> tests = new List<Test>();
protected IEnvironment environment;
private Reporter() { }
public void AddTest(Test test)
{
if (tests.Any(t => t.Name.Equals(test.Name)))
throw new Exception($"Duplicate test name, {test.Name}");
tests.Add(test);
}
/// <summary>
/// Get a Reporter. Relies on environment variables.
/// </summary>
/// <param name="environment">Optional environment variable provider</param>
/// <returns>A Reporter instance or null if the environment is incorrect.</returns>
public static Reporter CreateReporter(IEnvironment environment = null)
{
var ret = new Reporter();
ret.environment = environment == null ? new EnvironmentProvider() : environment;
if (ret.InLab)
{
ret.Init();
}
return ret;
}
private void Init()
{
run = new Run
{
CorrelationId = environment.GetEnvironmentVariable("HELIX_CORRELATION_ID"),
PerfRepoHash = environment.GetEnvironmentVariable("PERFLAB_PERFHASH"),
Name = environment.GetEnvironmentVariable("PERFLAB_RUNNAME"),
Queue = environment.GetEnvironmentVariable("PERFLAB_QUEUE"),
};
Boolean.TryParse(environment.GetEnvironmentVariable("PERFLAB_HIDDEN"), out bool hidden);
run.Hidden = hidden;
var configs = environment.GetEnvironmentVariable("PERFLAB_CONFIGS");
if (!String.IsNullOrEmpty(configs)) // configs should be optional.
{
foreach (var kvp in configs.Split(';'))
{
var split = kvp.Split('=');
run.Configurations.Add(split[0], split[1]);
}
}
os = new Os()
{
Name = $"{RuntimeEnvironment.OperatingSystem} {RuntimeEnvironment.OperatingSystemVersion}",
Architecture = RuntimeInformation.OSArchitecture.ToString(),
Locale = CultureInfo.CurrentUICulture.ToString()
};
build = new Build
{
Repo = environment.GetEnvironmentVariable("PERFLAB_REPO"),
Branch = environment.GetEnvironmentVariable("PERFLAB_BRANCH"),
Architecture = environment.GetEnvironmentVariable("PERFLAB_BUILDARCH"),
Locale = environment.GetEnvironmentVariable("PERFLAB_LOCALE"),
GitHash = environment.GetEnvironmentVariable("PERFLAB_HASH"),
BuildName = environment.GetEnvironmentVariable("PERFLAB_BUILDNUM"),
TimeStamp = DateTime.Parse(environment.GetEnvironmentVariable("PERFLAB_BUILDTIMESTAMP")),
};
build.AdditionalData["productVersion"] = environment.GetEnvironmentVariable("DOTNET_VERSION");
}
public string GetJson()
{
if (!InLab)
{
return null;
}
var jsonobj = new
{
build,
os,
run,
tests
};
var settings = new JsonSerializerSettings();
var resolver = new DefaultContractResolver();
resolver.NamingStrategy = new CamelCaseNamingStrategy() { ProcessDictionaryKeys = false };
settings.ContractResolver = resolver;
return JsonConvert.SerializeObject(jsonobj, Formatting.Indented, settings);
}
public string WriteResultTable()
{
StringBuilder ret = new StringBuilder();
foreach (var test in tests)
{
var defaultCounter = test.Counters.Single(c => c.DefaultCounter);
var topCounters = test.Counters.Where(c => c.TopCounter && !c.DefaultCounter);
var restCounters = test.Counters.Where(c => !(c.TopCounter || c.DefaultCounter));
var counterWidth = Math.Max(test.Counters.Max(c => c.Name.Length) + 1, 15);
var resultWidth = Math.Max(test.Counters.Max(c => c.Results.Max().ToString("F3").Length + c.MetricName.Length) + 2, 15);
ret.AppendLine(test.Name);
ret.AppendLine($"{LeftJustify("Metric", counterWidth)}|{LeftJustify("Average",resultWidth)}|{LeftJustify("Min", resultWidth)}|{LeftJustify("Max",resultWidth)}");
ret.AppendLine($"{new String('-', counterWidth)}|{new String('-', resultWidth)}|{new String('-', resultWidth)}|{new String('-', resultWidth)}");
ret.AppendLine(Print(defaultCounter, counterWidth, resultWidth));
foreach(var counter in topCounters)
{
ret.AppendLine(Print(counter, counterWidth, resultWidth));
}
foreach (var counter in restCounters)
{
ret.AppendLine(Print(counter, counterWidth, resultWidth));
}
}
return ret.ToString();
}
private string Print(Counter counter, int counterWidth, int resultWidth)
{
string average = $"{counter.Results.Average():F3} {counter.MetricName}";
string max = $"{counter.Results.Max():F3} {counter.MetricName}";
string min = $"{counter.Results.Min():F3} {counter.MetricName}";
return $"{LeftJustify(counter.Name, counterWidth)}|{LeftJustify(average, resultWidth)}|{LeftJustify(min, resultWidth)}|{LeftJustify(max, resultWidth)}";
}
private string LeftJustify(string str, int width)
{
return String.Format("{0,-" + width + "}", str);
}
public bool InLab => environment.GetEnvironmentVariable("PERFLAB_INLAB")?.Equals("1") ?? false;
}
}

View File

@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Library</OutputType>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="12.0.2" />
<PackageReference Include="Microsoft.DotNet.PlatformAbstractions" Version="2.1.0" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,24 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Text;
namespace Reporting
{
public class Run
{
public bool Hidden { get; set; }
public string CorrelationId { get; set; }
public string PerfRepoHash { get; set; }
public string Name { get; set; }
public string Queue { get; set; }
public IDictionary<string, string> Configurations { get; set; } = new Dictionary<string, string>();
}
}

View File

@ -0,0 +1,43 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Reporting
{
public class Test
{
public IList<string> Categories { get; set; } = new List<string>();
public string Name { get; set; }
public Dictionary<string, string> AdditionalData { get; set; } = new Dictionary<string, string>();
public IList<Counter> Counters { get; set; } = new List<Counter>();
public void AddCounter(Counter counter)
{
if (counter.DefaultCounter && Counters.Any(c => c.DefaultCounter))
{
throw new Exception($"Duplicate default counter, name: ${counter.Name}");
}
if (Counters.Any(c => c.Name.Equals(counter.Name)))
{
throw new Exception($"Duplicate counter name, name: ${counter.Name}");
}
Counters.Add(counter);
}
public void AddCounter(IEnumerable<Counter> counters)
{
foreach (var counter in counters)
AddCounter(counter);
}
}
}

View File

@ -0,0 +1,54 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.IO;
using CommandLine;
using CommandLine.Text;
namespace ResultsComparer
{
public class CommandLineOptions
{
[Option("base", HelpText = "Path to the folder/file with base results.")]
public string BasePath { get; set; }
[Option("diff", HelpText = "Path to the folder/file with diff results.")]
public string DiffPath { get; set; }
[Option("threshold", Required = true, HelpText = "Threshold for Statistical Test. Examples: 5%, 10ms, 100ns, 1s.")]
public string StatisticalTestThreshold { get; set; }
[Option("noise", HelpText = "Noise threshold for Statistical Test. The difference for 1.0ns and 1.1ns is 10%, but it's just a noise. Examples: 0.5ns 1ns.", Default = "0.3ns" )]
public string NoiseThreshold { get; set; }
[Option("top", HelpText = "Filter the diff to top/bottom N results. Optional.")]
public int? TopCount { get; set; }
[Option("csv", HelpText = "Path to exported CSV results. Optional.")]
public FileInfo CsvPath { get; set; }
[Option("xml", HelpText = "Path to exported XML results. Optional.")]
public FileInfo XmlPath { get; set; }
[Option('f', "filter", HelpText = "Filter the benchmarks by name using glob pattern(s). Optional.")]
public IEnumerable<string> Filters { get; set; }
[Usage(ApplicationAlias = "")]
public static IEnumerable<Example> Examples
{
get
{
yield return new Example(@"Compare the results stored in 'C:\results\win' (base) vs 'C:\results\unix' (diff) using 5% threshold.",
new CommandLineOptions { BasePath = @"C:\results\win", DiffPath = @"C:\results\unix", StatisticalTestThreshold = "5%" });
yield return new Example(@"Compare the results stored in 'C:\results\win' (base) vs 'C:\results\unix' (diff) using 5% threshold and show only top/bottom 10 results.",
new CommandLineOptions { BasePath = @"C:\results\win", DiffPath = @"C:\results\unix", StatisticalTestThreshold = "5%", TopCount = 10 });
yield return new Example(@"Compare the results stored in 'C:\results\win' (base) vs 'C:\results\unix' (diff) using 5% threshold and 0.5ns noise filter.",
new CommandLineOptions { BasePath = @"C:\results\win", DiffPath = @"C:\results\unix", StatisticalTestThreshold = "5%", NoiseThreshold = "0.5ns" });
yield return new Example(@"Compare the System.Math benchmark results stored in 'C:\results\ubuntu16' (base) vs 'C:\results\ubuntu18' (diff) using 5% threshold.",
new CommandLineOptions { Filters = new[] { "System.Math*" }, BasePath = @"C:\results\win", DiffPath = @"C:\results\unix", StatisticalTestThreshold = "5%" });
}
}
}
}

View File

@ -0,0 +1,133 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// <auto-generated />
using System.Collections.Generic;
using System.Linq;
namespace DataTransferContracts // generated with http://json2csharp.com/#
{
public class ChronometerFrequency
{
public int Hertz { get; set; }
}
public class HostEnvironmentInfo
{
public string BenchmarkDotNetCaption { get; set; }
public string BenchmarkDotNetVersion { get; set; }
public string OsVersion { get; set; }
public string ProcessorName { get; set; }
public int? PhysicalProcessorCount { get; set; }
public int? PhysicalCoreCount { get; set; }
public int? LogicalCoreCount { get; set; }
public string RuntimeVersion { get; set; }
public string Architecture { get; set; }
public bool? HasAttachedDebugger { get; set; }
public bool? HasRyuJit { get; set; }
public string Configuration { get; set; }
public string JitModules { get; set; }
public string DotNetCliVersion { get; set; }
public ChronometerFrequency ChronometerFrequency { get; set; }
public string HardwareTimerKind { get; set; }
}
public class ConfidenceInterval
{
public int N { get; set; }
public double Mean { get; set; }
public double StandardError { get; set; }
public int Level { get; set; }
public double Margin { get; set; }
public double Lower { get; set; }
public double Upper { get; set; }
}
public class Percentiles
{
public double P0 { get; set; }
public double P25 { get; set; }
public double P50 { get; set; }
public double P67 { get; set; }
public double P80 { get; set; }
public double P85 { get; set; }
public double P90 { get; set; }
public double P95 { get; set; }
public double P100 { get; set; }
}
public class Statistics
{
public int N { get; set; }
public double Min { get; set; }
public double LowerFence { get; set; }
public double Q1 { get; set; }
public double Median { get; set; }
public double Mean { get; set; }
public double Q3 { get; set; }
public double UpperFence { get; set; }
public double Max { get; set; }
public double InterquartileRange { get; set; }
public List<double> LowerOutliers { get; set; }
public List<double> UpperOutliers { get; set; }
public List<double> AllOutliers { get; set; }
public double StandardError { get; set; }
public double Variance { get; set; }
public double StandardDeviation { get; set; }
public double Skewness { get; set; }
public double Kurtosis { get; set; }
public ConfidenceInterval ConfidenceInterval { get; set; }
public Percentiles Percentiles { get; set; }
}
public class Memory
{
public int Gen0Collections { get; set; }
public int Gen1Collections { get; set; }
public int Gen2Collections { get; set; }
public long TotalOperations { get; set; }
public long BytesAllocatedPerOperation { get; set; }
}
public class Measurement
{
public string IterationStage { get; set; }
public int LaunchIndex { get; set; }
public int IterationIndex { get; set; }
public long Operations { get; set; }
public double Nanoseconds { get; set; }
}
public class Benchmark
{
public string DisplayInfo { get; set; }
public object Namespace { get; set; }
public string Type { get; set; }
public string Method { get; set; }
public string MethodTitle { get; set; }
public string Parameters { get; set; }
public string FullName { get; set; }
public Statistics Statistics { get; set; }
public Memory Memory { get; set; }
public List<Measurement> Measurements { get; set; }
/// <summary>
/// this method was not auto-generated by a tool, it was added manually
/// </summary>
/// <returns>an array of the actual workload results (not warmup, not pilot)</returns>
internal double[] GetOriginalValues()
=> Measurements
.Where(measurement => measurement.IterationStage == "Result")
.Select(measurement => measurement.Nanoseconds / measurement.Operations)
.ToArray();
}
public class BdnResult
{
public string Title { get; set; }
public HostEnvironmentInfo HostEnvironmentInfo { get; set; }
public List<Benchmark> Benchmarks { get; set; }
}
}

View File

@ -0,0 +1,290 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using System.Xml;
using Perfolizer.Mathematics.Multimodality;
using Perfolizer.Mathematics.SignificanceTesting;
using Perfolizer.Mathematics.Thresholds;
using CommandLine;
using DataTransferContracts;
using MarkdownLog;
using Newtonsoft.Json;
namespace ResultsComparer
{
public class Program
{
private const string FullBdnJsonFileExtension = "full.json";
public static void Main(string[] args)
{
// we print a lot of numbers here and we want to make it always in invariant way
Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
Parser.Default.ParseArguments<CommandLineOptions>(args).WithParsed(Compare);
}
private static void Compare(CommandLineOptions args)
{
if (!Threshold.TryParse(args.StatisticalTestThreshold, out var testThreshold))
{
Console.WriteLine($"Invalid Threshold {args.StatisticalTestThreshold}. Examples: 5%, 10ms, 100ns, 1s.");
return;
}
if (!Threshold.TryParse(args.NoiseThreshold, out var noiseThreshold))
{
Console.WriteLine($"Invalid Noise Threshold {args.NoiseThreshold}. Examples: 0.3ns 1ns.");
return;
}
var notSame = GetNotSameResults(args, testThreshold, noiseThreshold).ToArray();
if (!notSame.Any())
{
Console.WriteLine($"No differences found between the benchmark results with threshold {testThreshold}.");
return;
}
PrintSummary(notSame);
PrintTable(notSame, EquivalenceTestConclusion.Slower, args);
PrintTable(notSame, EquivalenceTestConclusion.Faster, args);
ExportToCsv(notSame, args.CsvPath);
ExportToXml(notSame, args.XmlPath);
}
private static IEnumerable<(string id, Benchmark baseResult, Benchmark diffResult, EquivalenceTestConclusion conclusion)> GetNotSameResults(CommandLineOptions args, Threshold testThreshold, Threshold noiseThreshold)
{
foreach ((string id, Benchmark baseResult, Benchmark diffResult) in ReadResults(args)
.Where(result => result.baseResult.Statistics != null && result.diffResult.Statistics != null)) // failures
{
var baseValues = baseResult.GetOriginalValues();
var diffValues = diffResult.GetOriginalValues();
var userTresholdResult = StatisticalTestHelper.CalculateTost(MannWhitneyTest.Instance, baseValues, diffValues, testThreshold);
if (userTresholdResult.Conclusion == EquivalenceTestConclusion.Same)
continue;
var noiseResult = StatisticalTestHelper.CalculateTost(MannWhitneyTest.Instance, baseValues, diffValues, noiseThreshold);
if (noiseResult.Conclusion == EquivalenceTestConclusion.Same)
continue;
yield return (id, baseResult, diffResult, userTresholdResult.Conclusion);
}
}
private static void PrintSummary((string id, Benchmark baseResult, Benchmark diffResult, EquivalenceTestConclusion conclusion)[] notSame)
{
var better = notSame.Where(result => result.conclusion == EquivalenceTestConclusion.Faster);
var worse = notSame.Where(result => result.conclusion == EquivalenceTestConclusion.Slower);
var betterCount = better.Count();
var worseCount = worse.Count();
// If the baseline doesn't have the same set of tests, you wind up with Infinity in the list of diffs.
// Exclude them for purposes of geomean.
worse = worse.Where(x => GetRatio(x) != double.PositiveInfinity);
better = better.Where(x => GetRatio(x) != double.PositiveInfinity);
Console.WriteLine("summary:");
if (betterCount > 0)
{
var betterGeoMean = Math.Pow(10, better.Skip(1).Aggregate(Math.Log10(GetRatio(better.First())), (x, y) => x + Math.Log10(GetRatio(y))) / better.Count());
Console.WriteLine($"better: {betterCount}, geomean: {betterGeoMean:F3}");
}
if (worseCount > 0)
{
var worseGeoMean = Math.Pow(10, worse.Skip(1).Aggregate(Math.Log10(GetRatio(worse.First())), (x, y) => x + Math.Log10(GetRatio(y))) / worse.Count());
Console.WriteLine($"worse: {worseCount}, geomean: {worseGeoMean:F3}");
}
Console.WriteLine($"total diff: {notSame.Length}");
Console.WriteLine();
}
private static void PrintTable((string id, Benchmark baseResult, Benchmark diffResult, EquivalenceTestConclusion conclusion)[] notSame, EquivalenceTestConclusion conclusion, CommandLineOptions args)
{
var data = notSame
.Where(result => result.conclusion == conclusion)
.OrderByDescending(result => GetRatio(conclusion, result.baseResult, result.diffResult))
.Take(args.TopCount ?? int.MaxValue)
.Select(result => new
{
Id = result.id.Length > 80 ? result.id.Substring(0, 80) : result.id,
DisplayValue = GetRatio(conclusion, result.baseResult, result.diffResult),
BaseMedian = result.baseResult.Statistics.Median,
DiffMedian = result.diffResult.Statistics.Median,
Modality = GetModalInfo(result.baseResult) ?? GetModalInfo(result.diffResult)
})
.ToArray();
if (!data.Any())
{
Console.WriteLine($"No {conclusion} results for the provided threshold = {args.StatisticalTestThreshold} and noise filter = {args.NoiseThreshold}.");
Console.WriteLine();
return;
}
var table = data.ToMarkdownTable().WithHeaders(conclusion.ToString(), conclusion == EquivalenceTestConclusion.Faster ? "base/diff" : "diff/base", "Base Median (ns)", "Diff Median (ns)", "Modality");
foreach (var line in table.ToMarkdown().Split(Environment.NewLine, StringSplitOptions.RemoveEmptyEntries))
Console.WriteLine($"| {line.TrimStart()}|"); // the table starts with \t and does not end with '|' and it looks bad so we fix it
Console.WriteLine();
}
private static IEnumerable<(string id, Benchmark baseResult, Benchmark diffResult)> ReadResults(CommandLineOptions args)
{
var baseFiles = GetFilesToParse(args.BasePath);
var diffFiles = GetFilesToParse(args.DiffPath);
if (!baseFiles.Any() || !diffFiles.Any())
throw new ArgumentException($"Provided paths contained no {FullBdnJsonFileExtension} files.");
var baseResults = baseFiles.Select(ReadFromFile);
var diffResults = diffFiles.Select(ReadFromFile);
var filters = args.Filters.Select(pattern => new Regex(WildcardToRegex(pattern), RegexOptions.IgnoreCase | RegexOptions.CultureInvariant)).ToArray();
var benchmarkIdToDiffResults = diffResults
.SelectMany(result => result.Benchmarks)
.Where(benchmarkResult => !filters.Any() || filters.Any(filter => filter.IsMatch(benchmarkResult.FullName)))
.ToDictionary(benchmarkResult => benchmarkResult.FullName, benchmarkResult => benchmarkResult);
return baseResults
.SelectMany(result => result.Benchmarks)
.ToDictionary(benchmarkResult => benchmarkResult.FullName, benchmarkResult => benchmarkResult) // we use ToDictionary to make sure the results have unique IDs
.Where(baseResult => benchmarkIdToDiffResults.ContainsKey(baseResult.Key))
.Select(baseResult => (baseResult.Key, baseResult.Value, benchmarkIdToDiffResults[baseResult.Key]));
}
private static void ExportToCsv((string id, Benchmark baseResult, Benchmark diffResult, EquivalenceTestConclusion conclusion)[] notSame, FileInfo csvPath)
{
if (csvPath == null)
return;
if (csvPath.Exists)
csvPath.Delete();
using (var textWriter = csvPath.CreateText())
{
foreach (var (id, baseResult, diffResult, conclusion) in notSame)
{
textWriter.WriteLine($"\"{id.Replace("\"", "\"\"")}\";base;{conclusion};{string.Join(';', baseResult.GetOriginalValues())}");
textWriter.WriteLine($"\"{id.Replace("\"", "\"\"")}\";diff;{conclusion};{string.Join(';', diffResult.GetOriginalValues())}");
}
}
Console.WriteLine($"CSV results exported to {csvPath.FullName}");
}
private static void ExportToXml((string id, Benchmark baseResult, Benchmark diffResult, EquivalenceTestConclusion conclusion)[] notSame, FileInfo xmlPath)
{
if (xmlPath == null)
{
Console.WriteLine("No file given");
return;
}
if (xmlPath.Exists)
xmlPath.Delete();
using (XmlWriter writer = XmlWriter.Create(xmlPath.Open(FileMode.OpenOrCreate, FileAccess.Write, FileShare.Write)))
{
writer.WriteStartElement("performance-tests");
foreach (var (id, baseResult, diffResult, conclusion) in notSame.Where(x => x.conclusion == EquivalenceTestConclusion.Slower))
{
writer.WriteStartElement("test");
writer.WriteAttributeString("name", id);
writer.WriteAttributeString("type", baseResult.Type);
writer.WriteAttributeString("method", baseResult.Method);
writer.WriteAttributeString("time", "0");
writer.WriteAttributeString("result", "Fail");
writer.WriteStartElement("failure");
writer.WriteAttributeString("exception-type", "Regression");
writer.WriteElementString("message", $"{id} has regressed, was {baseResult.Statistics.Median} is {diffResult.Statistics.Median}.");
writer.WriteEndElement();
}
foreach (var (id, baseResult, diffResult, conclusion) in notSame.Where(x => x.conclusion == EquivalenceTestConclusion.Faster))
{
writer.WriteStartElement("test");
writer.WriteAttributeString("name", id);
writer.WriteAttributeString("type", baseResult.Type);
writer.WriteAttributeString("method", baseResult.Method);
writer.WriteAttributeString("time", "0");
writer.WriteAttributeString("result", "Skip");
writer.WriteElementString("reason", $"{id} has improved, was {baseResult.Statistics.Median} is {diffResult.Statistics.Median}.");
writer.WriteEndElement();
}
writer.WriteEndElement();
writer.Flush();
}
Console.WriteLine($"XML results exported to {xmlPath.FullName}");
}
private static string[] GetFilesToParse(string path)
{
if (Directory.Exists(path))
return Directory.GetFiles(path, $"*{FullBdnJsonFileExtension}", SearchOption.AllDirectories);
else if (File.Exists(path) || !path.EndsWith(FullBdnJsonFileExtension))
return new[] { path };
else
throw new FileNotFoundException($"Provided path does NOT exist or is not a {path} file", path);
}
// code and magic values taken from BenchmarkDotNet.Analysers.MultimodalDistributionAnalyzer
// See http://www.brendangregg.com/FrequencyTrails/modes.html
private static string GetModalInfo(Benchmark benchmark)
{
if (benchmark.Statistics.N < 12) // not enough data to tell
return null;
double mValue = MValueCalculator.Calculate(benchmark.GetOriginalValues());
if (mValue > 4.2)
return "multimodal";
else if (mValue > 3.2)
return "bimodal";
else if (mValue > 2.8)
return "several?";
return null;
}
private static double GetRatio((string id, Benchmark baseResult, Benchmark diffResult, EquivalenceTestConclusion conclusion) item) => GetRatio(item.conclusion, item.baseResult, item.diffResult);
private static double GetRatio(EquivalenceTestConclusion conclusion, Benchmark baseResult, Benchmark diffResult)
=> conclusion == EquivalenceTestConclusion.Faster
? baseResult.Statistics.Median / diffResult.Statistics.Median
: diffResult.Statistics.Median / baseResult.Statistics.Median;
private static BdnResult ReadFromFile(string resultFilePath)
{
try
{
return JsonConvert.DeserializeObject<BdnResult>(File.ReadAllText(resultFilePath));
}
catch (JsonSerializationException)
{
Console.WriteLine($"Exception while reading the {resultFilePath} file.");
throw;
}
}
// https://stackoverflow.com/a/6907849/5852046 not perfect but should work for all we need
private static string WildcardToRegex(string pattern) => $"^{Regex.Escape(pattern).Replace(@"\*", ".*").Replace(@"\?", ".")}$";
}
}

View File

@ -0,0 +1,41 @@
# Results Comparer
This simple tool allows for easy comparison of provided benchmark results.
It can be used to compare:
* historical results (eg. before and after my changes)
* results for different OSes (eg. Windows vs Ubuntu)
* results for different CPU architectures (eg. x64 vs ARM64)
* results for different target frameworks (eg. .NET Core 3.1 vs 5.0)
All you need to provide is:
* `--base` - path to folder/file with baseline results
* `--diff` - path to folder/file with diff results
* `--threshold` - threshold for Statistical Test. Examples: 5%, 10ms, 100ns, 1s
Optional arguments:
* `--top` - filter the diff to top/bottom `N` results
* `--noise` - noise threshold for Statistical Test. The difference for 1.0ns and 1.1ns is 10%, but it's just a noise. Examples: 0.5ns 1ns. The default value is 0.3ns.
* `--csv` - path to exported CSV results. Optional.
* `-f|--filter` - filter the benchmarks by name using glob pattern(s). Optional.
Sample: compare the results stored in `C:\results\windows` vs `C:\results\ubuntu` using `1%` threshold and print only TOP 10.
```cmd
dotnet run --base "C:\results\windows" --diff "C:\results\ubuntu" --threshold 1% --top 10
```
**Note**: the tool supports only `*full.json` results exported by BenchmarkDotNet. This exporter is enabled by default in this repository.
## Sample results
| Slower | diff/base | Base Median (ns) | Diff Median (ns) | Modality|
| --------------------------------------------------------------- | ---------:| ----------------:| ----------------:| -------:|
| PerfLabTests.BlockCopyPerf.CallBlockCopy(numElements: 100) | 1.60 | 9.22 | 14.76 | |
| System.Tests.Perf_String.Trim_CharArr(s: "Test", c: [' ', ' ']) | 1.41 | 6.18 | 8.72 | |
| Faster | base/diff | Base Median (ns) | Diff Median (ns) | Modality|
| ----------------------------------- | ---------:| ----------------:| ----------------:| -------:|
| System.Tests.Perf_Array.ArrayCopy3D | 1.31 | 372.71 | 284.73 | |
If there is no difference or if there is no match (we use full benchmark names to match the benchmarks), then the results are omitted.

View File

@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>$(PERFLAB_TARGET_FRAMEWORKS)</TargetFrameworks>
<TargetFramework Condition="'$(TargetFrameworks)' == ''">net5.0</TargetFramework>
<LangVersion>latest</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="CommandLineParser" Version="2.4.3" />
<PackageReference Include="MarkdownLog.NS20" Version="0.10.1" />
<PackageReference Include="Newtonsoft.Json" Version="12.0.1" />
<PackageReference Include="BenchmarkDotNet" Version="0.12.1" />
<PackageReference Include="Perfolizer" Version="0.2.1" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,16 @@

Microsoft Visual Studio Solution File, Format Version 12.00
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ResultsComparer", "ResultsComparer.csproj", "{00859394-44F8-466B-8624-41578CA94009}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{00859394-44F8-466B-8624-41578CA94009}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{00859394-44F8-466B-8624-41578CA94009}.Debug|Any CPU.Build.0 = Debug|Any CPU
{00859394-44F8-466B-8624-41578CA94009}.Release|Any CPU.ActiveCfg = Release|Any CPU
{00859394-44F8-466B-8624-41578CA94009}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

8
test/perf/nuget.config Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<clear />
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
<add key="benchmark-dotnet-prerelease" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/benchmark-dotnet-prerelease/nuget/v3/index.json" />
</packageSources>
</configuration>

151
test/perf/perf.psm1 Normal file
View File

@ -0,0 +1,151 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
$repoRoot = git rev-parse --show-toplevel
Import-Module "$repoRoot/build.psm1"
function Start-Benchmarking
{
<#
.SYNOPSIS
Start a benchmark run.
.PARAMETER TargetPSVersion
The version of 'Microsoft.PowerShell.SDK' package that we want the benchmark to target.
The supported versions are 7.0.x and above, including preview versions.
.PARAMETER List
List the available benchmarks, in either 'flat' or 'tree' views.
.PARAMETER Filter
One or more wildcard patterns to filter the benchmarks to be executed or to be listed.
.PARAMETER Artifacts
Path to the folder where you want to store the artifacts produced from running benchmarks.
.PARAMETER KeepFiles
Indicates to keep all temporary files produced for running benchmarks.
#>
[CmdletBinding()]
param(
[ValidatePattern(
'^7\.(0|1|2)\.\d+(-preview\.\d{1,2})?$',
ErrorMessage = 'The package version is invalid or not supported')]
[string] $TargetPSVersion,
[ValidateSet('flat', 'tree')]
[string] $List,
[string[]] $Filter = '*',
[string] $Artifacts,
[switch] $KeepFiles
)
Begin {
Find-Dotnet
if ($Artifacts) {
$Artifacts = $PSCmdlet.SessionState.Path.GetUnresolvedProviderPathFromPSPath($Artifacts)
} else {
$Artifacts = Join-Path $PSScriptRoot 'BenchmarkDotNet.Artifacts'
}
if (Test-Path -Path $Artifacts) {
Remove-Item -Path $Artifacts -Recurse -Force -ErrorAction Stop
}
}
End {
try {
Push-Location -Path "$PSScriptRoot/benchmarks"
$savedOFS = $OFS; $OFS = $null
if ($TargetPSVersion) {
Write-Log -message "Run benchmarks targeting the 'Microsoft.PowerShell.SDK' version $TargetPSVersion..."
$env:PERF_TARGET_VERSION = $TargetPSVersion
} else {
Write-Log -message "Run benchmarks targeting the current PowerShell code base..."
}
$runArgs = @()
if ($List) { $runArgs += '--list', $List }
if ($KeepFiles) { $runArgs += "--keepFiles" }
dotnet run -c release --filter $Filter --artifacts $Artifacts --envVars POWERSHELL_TELEMETRY_OPTOUT:1 $runArgs
if (Test-Path $Artifacts) {
Write-Log -message "`nBenchmark artifacts can be found at $Artifacts"
}
}
finally {
$OFS = $savedOFS
$env:PERF_TARGET_VERSION = $null
Pop-Location
}
}
}
function Compare-BenchmarkResult
{
<#
.SYNOPSIS
Compare two benchmark run results to find possible regressions.
When running benchmarks with 'Start-Benchmarking', you can define the result folder
where to save the artifacts by specifying '-Artifacts'.
To compare two benchmark run results, you need to specify the result folder paths
for both runs, one as the base and one as the diff.
.PARAMETER BaseResultPath
Path to the benchmark result used as baseline.
.PARAMETER DiffResultPath
Path to the benchmark result to be compared with the baseline.
.PARAMETER Threshold
Threshold for Statistical Test. Examples: 5%, 10ms, 100ns, 1s
.PARAMETER Noise
Noise threshold for Statistical Test.
The difference for 1.0ns and 1.1ns is 10%, but it's really just noise. Examples: 0.5ns 1ns.
The default value is 0.3ns.
.PARAMETER Top
Filter the diff to top `N` results
#>
param(
[Parameter(Mandatory)]
[string] $BaseResultPath,
[Parameter(Mandatory)]
[string] $DiffResultPath,
[Parameter(Mandatory)]
[ValidatePattern('^\d{1,2}%$|^\d+(ms|ns|s)$')]
[string] $Threshold,
[ValidatePattern('^(\d\.)?\d+(ms|ns|s)$')]
[string] $Noise,
[ValidateRange(1, 100)]
[int] $Top
)
Find-Dotnet
try {
Push-Location -Path "$PSScriptRoot/dotnet-tools/ResultsComparer"
$savedOFS = $OFS; $OFS = $null
$runArgs = @()
if ($Noise) { $runArgs += "--noise $Noise" }
if ($Top -gt 0) { $runArgs += "--top $Top" }
dotnet run -c release --base $BaseResultPath --diff $DiffResultPath --threshold $Threshold "$runArgs"
}
finally {
$OFS = $savedOFS
Pop-Location
}
}