diff --git a/BenchmarkDotNet.sln b/BenchmarkDotNet.sln index ba31552c8a..eed29c80af 100644 --- a/BenchmarkDotNet.sln +++ b/BenchmarkDotNet.sln @@ -1,7 +1,7 @@  Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 15 -VisualStudioVersion = 15.0.27130.2027 +# Visual Studio Version 17 +VisualStudioVersion = 17.8.34004.107 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{D6597E3A-6892-4A68-8E14-042FC941FDA2}" EndProject @@ -51,6 +51,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BenchmarkDotNet.Diagnostics EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BenchmarkDotNet.IntegrationTests.ManualRunning.MultipleFrameworks", "tests\BenchmarkDotNet.IntegrationTests.ManualRunning.MultipleFrameworks\BenchmarkDotNet.IntegrationTests.ManualRunning.MultipleFrameworks.csproj", "{AACA2C63-A85B-47AB-99FC-72C3FF408B14}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BenchmarkDotNet.TestAdapter", "src\BenchmarkDotNet.TestAdapter\BenchmarkDotNet.TestAdapter.csproj", "{4C9C89B8-7C4E-4ECF-B3C9-324C8772EDAC}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -137,6 +139,10 @@ Global {AACA2C63-A85B-47AB-99FC-72C3FF408B14}.Debug|Any CPU.Build.0 = Debug|Any CPU {AACA2C63-A85B-47AB-99FC-72C3FF408B14}.Release|Any CPU.ActiveCfg = Release|Any CPU {AACA2C63-A85B-47AB-99FC-72C3FF408B14}.Release|Any CPU.Build.0 = Release|Any CPU + {4C9C89B8-7C4E-4ECF-B3C9-324C8772EDAC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4C9C89B8-7C4E-4ECF-B3C9-324C8772EDAC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4C9C89B8-7C4E-4ECF-B3C9-324C8772EDAC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4C9C89B8-7C4E-4ECF-B3C9-324C8772EDAC}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -162,6 +168,7 @@ Global {B620D10A-CD8E-4A34-8B27-FD6257E63AD0} = {63B94FD6-3F3D-4E04-9727-48E86AC4384C} {C5BDA61F-3A56-4B59-901D-0A17E78F4076} = {D6597E3A-6892-4A68-8E14-042FC941FDA2} {AACA2C63-A85B-47AB-99FC-72C3FF408B14} = {14195214-591A-45B7-851A-19D3BA2413F9} + {4C9C89B8-7C4E-4ECF-B3C9-324C8772EDAC} = {D6597E3A-6892-4A68-8E14-042FC941FDA2} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {4D9AF12B-1F7F-45A7-9E8C-E4E46ADCBD1F} diff --git a/docs/articles/features/toc.yml b/docs/articles/features/toc.yml index fe55761610..ac29397743 100644 --- a/docs/articles/features/toc.yml +++ b/docs/articles/features/toc.yml @@ -11,4 +11,6 @@ - name: EtwProfiler href: etwprofiler.md - name: EventPipeProfiler - href: event-pipe-profiler.md \ No newline at end of file + href: event-pipe-profiler.md +- name: VSTest + href: vstest.md \ No newline at end of file diff --git a/docs/articles/features/vstest.md b/docs/articles/features/vstest.md new file mode 100644 index 0000000000..cdeac5fa7c --- /dev/null +++ b/docs/articles/features/vstest.md @@ -0,0 +1,74 @@ +--- +uid: docs.baselines +name: Running with VSTest +--- + +# Running with VSTest +BenchmarkDotNet has support for discovering and executing benchmarks through VSTest. This provides an alternative user experience to running benchmarks with the CLI and may be preferable for those who like their IDE's VSTest integrations that they may have used when running unit tests. + +Below is an example of running some benchmarks from the BenchmarkDotNet samples project in Visual Studio's Test Explorer. + +![](../../images/vs-testexplorer-demo.png) + +## About VSTest + +VSTest is one of the most popular test platforms in use in the .NET ecosystem, with test frameworks such as MSTest, xUnit, and NUnit providing support for it. Many IDEs, including Visual Studio and Rider, provide UIs for running tests through VSTest which some users may find more accessible than running them through the command line. + +It may seem counterintuitive to run performance tests on a platform that is designed for unit tests that expect a boolean outcome of "Passed" or "Failed", however VSTest provides good value as a protocol for discovering and executing tests. In addition, we can still make use of this boolean output to indicate if the benchmark had validation errors that caused them to fail to run. + +## Caveats and things to know +- The VSTest adapter will not call your application's entry point. + - If you use the entry point to customize how your benchmarks are run, you will need to do this through other means such as an assembly-level `IConfigSource`. + - For more about this, please read: [Setting a default configuration](#setting-a-default-configuration). +- The benchmark measurements may be affected by the VSTest host and your IDE + - If you want to have more accurate performance results, it is recommended to run benchmarks through the CLI instead without other processes on the machine impacting performance. + - This does not mean that the measurements are useless though, it will still be able to provide useful measurements during development when comparing different approaches. +- The test adapter will not display or execute benchmarks if optimizations are disabled. + - Please ensure you are compiling in Release mode or with `Optimize` set to true. + - Using an `InProcess` toolchain will let you run your benchmarks with optimizations disabled and will let you attach the debugger as well. +- The test adapter will generate an entry point for you automatically + - The generated entry point will pass the command line arguments and the current assembly into `BenchmarkSwitcher`, so you can still use it in your CLI as well as in VSTest. + - This means you can delete your entry point and only need to define your benchmarks. + - If you want to use a custom entry point, you can still do so by setting `GenerateProgramFile` to `false` in your project file. + +## How to use it + +You need to install two packages into your benchmark project: + +- `BenchmarkDotNet.TestAdapter`: Implements the VSTest protocol for BenchmarkDotNet +- `Microsoft.NET.Test.Sdk`: Includes all the pieces needed for the VSTest host to run and load the VSTest adapter. + +As mentioned in the caveats section, `BenchmarkDotNet.TestAdapter` will generate an entry point for you automatically, so if you have an entry point already you will either need to delete it or set `GenerateProgramFile` to `false` in your project file to continue using your existing one. + +After doing this, you can set your build configuration to `Release`, run a build, and you should be able to see the benchmarks in your IDE's VSTest integration. + +## Setting a default configuration + +Previously, it was common for the default configuration to be defined inside the entry point. Since the entry point is not used when running benchmarks through VSTest, the default configuration must be specified using a `Config` attribute instead that is set on the assembly. + +First, create a class that extends `ManualConfig` or `IConfig` which sets the default configuration you want: + +```csharp +class MyDefaultConfig : ManualConfig +{ + public MyDefaultConfig() + { + AddJob(Job.Dry); + AddLogger(Loggers.ConsoleLogger.Default); + AddValidator(JitOptimizationsValidator.DontFailOnError); + } +} +``` + +Then, set an assembly attribute with the following. + +```csharp +[assembly: Config(typeof(MyDefaultConfig))] +``` + +By convention, assembly attributes are usually defined inside `AssemblyInfo.cs` in a directory called `Properties`. + +## Viewing the results +The full output from BenchmarkDotNet that you would have been used to seeing in the past will be sent to the "Tests" output of your IDE. Use this view if you want to see the tabular view that compares multiple benchmarks with each other, or if you want to see the results for each individual iteration. + +One more place where you can view the results is in each individual test's output messages. In Visual Studio this can be viewed by clicking on the test in the Test Explorer after running it, and looking at the Test Detail Summary. Since this only displays statistics for a single benchmark case, it does not show the tabulated view that compares multiple benchmark cases, but instead displays a histogram and various other useful statistics. Not all IDEs support displaying these output messages, so you may only be able to view the results using the "Tests" output. \ No newline at end of file diff --git a/docs/images/vs-testexplorer-demo.png b/docs/images/vs-testexplorer-demo.png new file mode 100644 index 0000000000..91232e30d6 Binary files /dev/null and b/docs/images/vs-testexplorer-demo.png differ diff --git a/samples/BenchmarkDotNet.Samples.FSharp/BenchmarkDotNet.Samples.FSharp.fsproj b/samples/BenchmarkDotNet.Samples.FSharp/BenchmarkDotNet.Samples.FSharp.fsproj index 0247bc8ce7..2ae53f94d8 100644 --- a/samples/BenchmarkDotNet.Samples.FSharp/BenchmarkDotNet.Samples.FSharp.fsproj +++ b/samples/BenchmarkDotNet.Samples.FSharp/BenchmarkDotNet.Samples.FSharp.fsproj @@ -6,12 +6,14 @@ Exe net462;net8.0 + false + @@ -21,5 +23,6 @@ + diff --git a/samples/BenchmarkDotNet.Samples/BenchmarkDotNet.Samples.csproj b/samples/BenchmarkDotNet.Samples/BenchmarkDotNet.Samples.csproj index 4a937d0a39..1af13f61c3 100644 --- a/samples/BenchmarkDotNet.Samples/BenchmarkDotNet.Samples.csproj +++ b/samples/BenchmarkDotNet.Samples/BenchmarkDotNet.Samples.csproj @@ -11,6 +11,8 @@ AnyCPU true $(NoWarn);CA1018;CA5351;CA1825 + + false @@ -19,10 +21,13 @@ + + + diff --git a/src/BenchmarkDotNet.TestAdapter/BenchmarkCaseExtensions.cs b/src/BenchmarkDotNet.TestAdapter/BenchmarkCaseExtensions.cs new file mode 100644 index 0000000000..dec7aae8c1 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/BenchmarkCaseExtensions.cs @@ -0,0 +1,94 @@ +using BenchmarkDotNet.Attributes; +using BenchmarkDotNet.Characteristics; +using BenchmarkDotNet.Exporters; +using BenchmarkDotNet.Extensions; +using BenchmarkDotNet.Running; +using Microsoft.TestPlatform.AdapterUtilities; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using System; + +namespace BenchmarkDotNet.TestAdapter +{ + /// + /// A set of extensions for BenchmarkCase to support converting to VSTest TestCase objects. + /// + internal static class BenchmarkCaseExtensions + { + /// + /// Converts a BDN BenchmarkCase to a VSTest TestCase. + /// + /// The BenchmarkCase to convert. + /// The dll or exe of the benchmark project. + /// Whether or not the display name should include the job name. + /// The VSTest TestCase. + internal static TestCase ToVsTestCase(this BenchmarkCase benchmarkCase, string assemblyPath, bool includeJobInName = false) + { + var benchmarkMethod = benchmarkCase.Descriptor.WorkloadMethod; + var fullClassName = benchmarkCase.Descriptor.Type.GetCorrectCSharpTypeName(); + var benchmarkMethodName = benchmarkCase.Descriptor.WorkloadMethod.Name; + var benchmarkFullMethodName = $"{fullClassName}.{benchmarkMethodName}"; + + // Display name has arguments as well. + var displayMethodName = FullNameProvider.GetMethodName(benchmarkCase); + if (includeJobInName) + displayMethodName += $" [{benchmarkCase.GetUnrandomizedJobDisplayInfo()}]"; + + var displayName = $"{fullClassName}.{displayMethodName}"; + + var vsTestCase = new TestCase(benchmarkFullMethodName, VsTestAdapter.ExecutorUri, assemblyPath) + { + DisplayName = displayName, + Id = GetTestCaseId(benchmarkCase) + }; + + var benchmarkAttribute = benchmarkMethod.ResolveAttribute(); + if (benchmarkAttribute != null) + { + vsTestCase.CodeFilePath = benchmarkAttribute.SourceCodeFile; + vsTestCase.LineNumber = benchmarkAttribute.SourceCodeLineNumber; + } + + var categories = DefaultCategoryDiscoverer.Instance.GetCategories(benchmarkMethod); + foreach (var category in categories) + vsTestCase.Traits.Add("Category", category); + + vsTestCase.Traits.Add("", "BenchmarkDotNet"); + + return vsTestCase; + } + + /// + /// If an ID is not provided, a random string is used for the ID. This method will identify if randomness was + /// used for the ID and return the Job's DisplayInfo with that randomness removed so that the same benchmark + /// can be referenced across multiple processes. + /// + /// The benchmark case. + /// The benchmark case' job's DisplayInfo without randomness. + internal static string GetUnrandomizedJobDisplayInfo(this BenchmarkCase benchmarkCase) + { + var jobDisplayInfo = benchmarkCase.Job.DisplayInfo; + if (!benchmarkCase.Job.HasValue(CharacteristicObject.IdCharacteristic) && benchmarkCase.Job.ResolvedId.StartsWith("Job-", StringComparison.OrdinalIgnoreCase)) + { + // Replace Job-ABCDEF with Job + jobDisplayInfo = "Job" + jobDisplayInfo.Substring(benchmarkCase.Job.ResolvedId.Length); + } + + return jobDisplayInfo; + } + + /// + /// Gets an ID for a given BenchmarkCase that is uniquely identifiable from discovery to execution phase. + /// + /// The benchmark case. + /// The test case ID. + internal static Guid GetTestCaseId(this BenchmarkCase benchmarkCase) + { + var testIdProvider = new TestIdProvider(); + testIdProvider.AppendString(VsTestAdapter.ExecutorUriString); + testIdProvider.AppendString(benchmarkCase.Descriptor.DisplayInfo); + testIdProvider.AppendString(benchmarkCase.GetUnrandomizedJobDisplayInfo()); + testIdProvider.AppendString(benchmarkCase.Parameters.DisplayInfo); + return testIdProvider.GetId(); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/BenchmarkDotNet.TestAdapter.csproj b/src/BenchmarkDotNet.TestAdapter/BenchmarkDotNet.TestAdapter.csproj new file mode 100644 index 0000000000..b0eb8bfc3a --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/BenchmarkDotNet.TestAdapter.csproj @@ -0,0 +1,28 @@ + + + + netstandard2.0;net462 + BenchmarkDotNet.TestAdapter + BenchmarkDotNet.TestAdapter + BenchmarkDotNet.TestAdapter + True + enable + + + + + + + + + + + + + + + + + + + diff --git a/src/BenchmarkDotNet.TestAdapter/BenchmarkEnumerator.cs b/src/BenchmarkDotNet.TestAdapter/BenchmarkEnumerator.cs new file mode 100644 index 0000000000..daf3e2222e --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/BenchmarkEnumerator.cs @@ -0,0 +1,48 @@ +using BenchmarkDotNet.Extensions; +using BenchmarkDotNet.Helpers; +using BenchmarkDotNet.Running; +using BenchmarkDotNet.Toolchains; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; + +namespace BenchmarkDotNet.TestAdapter +{ + /// + /// A class used for enumerating all the benchmarks in an assembly. + /// + internal static class BenchmarkEnumerator + { + /// + /// Returns all the BenchmarkRunInfo objects from a given assembly. + /// + /// The dll or exe of the benchmark project. + /// The benchmarks inside the assembly. + public static BenchmarkRunInfo[] GetBenchmarksFromAssemblyPath(string assemblyPath) + { + var assembly = Assembly.LoadFrom(assemblyPath); + + var isDebugAssembly = assembly.IsJitOptimizationDisabled() ?? false; + + return GenericBenchmarksBuilder.GetRunnableBenchmarks(assembly.GetRunnableBenchmarks()) + .Select(type => + { + var benchmarkRunInfo = BenchmarkConverter.TypeToBenchmarks(type); + if (isDebugAssembly) + { + // If the assembly is a debug assembly, then only display them if they will run in-process + // This will allow people to debug their benchmarks using VSTest if they wish. + benchmarkRunInfo = new BenchmarkRunInfo( + benchmarkRunInfo.BenchmarksCases.Where(c => c.GetToolchain().IsInProcess).ToArray(), + benchmarkRunInfo.Type, + benchmarkRunInfo.Config); + } + + return benchmarkRunInfo; + }) + .Where(runInfo => runInfo.BenchmarksCases.Length > 0) + .ToArray(); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/BenchmarkExecutor.cs b/src/BenchmarkDotNet.TestAdapter/BenchmarkExecutor.cs new file mode 100644 index 0000000000..b13a4eaf4a --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/BenchmarkExecutor.cs @@ -0,0 +1,86 @@ +using BenchmarkDotNet.Configs; +using BenchmarkDotNet.Running; +using BenchmarkDotNet.TestAdapter.Remoting; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; + +namespace BenchmarkDotNet.TestAdapter +{ + /// + /// A class used for executing benchmarks + /// + internal class BenchmarkExecutor + { + private readonly CancellationTokenSource cts = new (); + + /// + /// Runs all the benchmarks in the given assembly, updating the TestExecutionRecorder as they get run. + /// + /// The dll or exe of the benchmark project. + /// The interface used to record the current test execution progress. + /// + /// An optional list of benchmark IDs specifying which benchmarks to run. + /// These IDs are the same as the ones generated for the VSTest TestCase. + /// + public void RunBenchmarks(string assemblyPath, TestExecutionRecorderWrapper recorder, HashSet? benchmarkIds = null) + { + var benchmarks = BenchmarkEnumerator.GetBenchmarksFromAssemblyPath(assemblyPath); + var testCases = new List(); + + var filteredBenchmarks = new List(); + foreach (var benchmark in benchmarks) + { + var needsJobInfo = benchmark.BenchmarksCases.Select(c => c.Job.DisplayInfo).Distinct().Count() > 1; + var filteredCases = new List(); + foreach (var benchmarkCase in benchmark.BenchmarksCases) + { + var testId = benchmarkCase.GetTestCaseId(); + if (benchmarkIds != null && benchmarkIds.Contains(testId)) + { + filteredCases.Add(benchmarkCase); + testCases.Add(benchmarkCase.ToVsTestCase(assemblyPath, needsJobInfo)); + } + } + + if (filteredCases.Count > 0) + { + filteredBenchmarks.Add(new BenchmarkRunInfo(filteredCases.ToArray(), benchmark.Type, benchmark.Config)); + } + } + + benchmarks = filteredBenchmarks.ToArray(); + + if (benchmarks.Length == 0) + return; + + // Create an event processor which will subscribe to events and push them to VSTest + var eventProcessor = new VsTestEventProcessor(testCases, recorder, cts.Token); + + // Create a logger which will forward all log messages in BDN to the VSTest logger. + var logger = new VsTestLogger(recorder.GetLogger()); + + // Modify all the benchmarks so that the event process and logger is added. + benchmarks = benchmarks + .Select(b => new BenchmarkRunInfo( + b.BenchmarksCases, + b.Type, + b.Config.AddEventProcessor(eventProcessor).AddLogger(logger).CreateImmutableConfig())) + .ToArray(); + + // Run all the benchmarks, and ensure that any tests that don't have a result yet are sent. + BenchmarkRunner.Run(benchmarks); + eventProcessor.SendUnsentTestResults(); + } + + /// + /// Stop the benchmarks when next able. + /// + public void Cancel() + { + cts.Cancel(); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/Package/BenchmarkDotNet.TestAdapter.props b/src/BenchmarkDotNet.TestAdapter/Package/BenchmarkDotNet.TestAdapter.props new file mode 100644 index 0000000000..a9e8340b30 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Package/BenchmarkDotNet.TestAdapter.props @@ -0,0 +1,21 @@ + + + $(MSBuildThisFileDirectory)..\entrypoints\ + + + + + + + + + + + false + + + \ No newline at end of file diff --git a/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.cs b/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.cs new file mode 100644 index 0000000000..ec2a5f87dd --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.cs @@ -0,0 +1,7 @@ +using BenchmarkDotNet.Running; +using System.Reflection; + +public class __AutoGeneratedEntryPointClass +{ + public static void Main(string[] args) => BenchmarkSwitcher.FromAssembly(typeof(__AutoGeneratedEntryPointClass).Assembly).Run(args); +} diff --git a/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.fs b/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.fs new file mode 100644 index 0000000000..e5870cd8f3 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.fs @@ -0,0 +1,9 @@ +module __AutoGeneratedEntryPointClass +open System.Reflection; +open BenchmarkDotNet.Running + +type internal __Marker = interface end // Used to help locale current assembly +[] +let main argv = + BenchmarkSwitcher.FromAssembly(typeof<__Marker>.Assembly).Run(argv) |> ignore + 0 // return an integer exit code diff --git a/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.vb b/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.vb new file mode 100644 index 0000000000..4bed7a0109 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Package/EntryPoint.vb @@ -0,0 +1,10 @@ +Imports System.Reflection +Imports BenchmarkDotNet.Running + +Namespace Global + Module __AutoGeneratedEntryPointClass + Sub Main(args As String()) + Dim summary = BenchmarkSwitcher.FromAssembly(MethodBase.GetCurrentMethod().Module.Assembly).Run(args) + End Sub + End Module +End Namespace \ No newline at end of file diff --git a/src/BenchmarkDotNet.TestAdapter/Remoting/BenchmarkEnumeratorWrapper.cs b/src/BenchmarkDotNet.TestAdapter/Remoting/BenchmarkEnumeratorWrapper.cs new file mode 100644 index 0000000000..b3ad68bb23 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Remoting/BenchmarkEnumeratorWrapper.cs @@ -0,0 +1,35 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace BenchmarkDotNet.TestAdapter.Remoting +{ + /// + /// A wrapper around the BenchmarkEnumerator for passing data across AppDomain boundaries. + /// + internal class BenchmarkEnumeratorWrapper : MarshalByRefObject + { + /// + /// Gets a list of VSTest TestCases from the given assembly. + /// Each test case is serialized into a string so that it can be used across AppDomain boundaries. + /// + /// The dll or exe of the benchmark project. + /// The serialized test cases. + public List GetTestCasesFromAssemblyPathSerialized(string assemblyPath) + { + var serializedTestCases = new List(); + foreach (var runInfo in BenchmarkEnumerator.GetBenchmarksFromAssemblyPath(assemblyPath)) + { + // If all the benchmarks have the same job, then no need to include job info. + var needsJobInfo = runInfo.BenchmarksCases.Select(c => c.Job.DisplayInfo).Distinct().Count() > 1; + foreach (var benchmarkCase in runInfo.BenchmarksCases) + { + var testCase = benchmarkCase.ToVsTestCase(assemblyPath, needsJobInfo); + serializedTestCases.Add(SerializationHelpers.Serialize(testCase)); + } + } + + return serializedTestCases; + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/Remoting/BenchmarkExecutorWrapper.cs b/src/BenchmarkDotNet.TestAdapter/Remoting/BenchmarkExecutorWrapper.cs new file mode 100644 index 0000000000..646ae2f8be --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Remoting/BenchmarkExecutorWrapper.cs @@ -0,0 +1,23 @@ +using System; +using System.Collections.Generic; + +namespace BenchmarkDotNet.TestAdapter.Remoting +{ + /// + /// A wrapper around the BenchmarkExecutor that works across AppDomain boundaries. + /// + internal class BenchmarkExecutorWrapper : MarshalByRefObject + { + private readonly BenchmarkExecutor benchmarkExecutor = new (); + + public void RunBenchmarks(string assemblyPath, TestExecutionRecorderWrapper recorder, HashSet? benchmarkIds = null) + { + benchmarkExecutor.RunBenchmarks(assemblyPath, recorder, benchmarkIds); + } + + public void Cancel() + { + benchmarkExecutor.Cancel(); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/Remoting/MessageLoggerWrapper.cs b/src/BenchmarkDotNet.TestAdapter/Remoting/MessageLoggerWrapper.cs new file mode 100644 index 0000000000..00c4f5325f --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Remoting/MessageLoggerWrapper.cs @@ -0,0 +1,23 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; + +namespace BenchmarkDotNet.TestAdapter.Remoting +{ + /// + /// A wrapper around an IMessageLogger that works across AppDomain boundaries. + /// + internal class MessageLoggerWrapper : MarshalByRefObject, IMessageLogger + { + private readonly IMessageLogger logger; + + public MessageLoggerWrapper(IMessageLogger logger) + { + this.logger = logger; + } + + public void SendMessage(TestMessageLevel testMessageLevel, string message) + { + logger.SendMessage(testMessageLevel, message); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/Remoting/SerializationHelpers.cs b/src/BenchmarkDotNet.TestAdapter/Remoting/SerializationHelpers.cs new file mode 100644 index 0000000000..5b13bd5175 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Remoting/SerializationHelpers.cs @@ -0,0 +1,26 @@ +using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities; + +namespace BenchmarkDotNet.TestAdapter.Remoting +{ + /// + /// A set of helper methods for serializing and deserializing the VSTest TestCases and TestReports. + /// + internal static class SerializationHelpers + { + // Version number of the VSTest protocol that the adapter supports. Only needs to be updated when + // the VSTest protocol has a change and this test adapter wishes to take a dependency on it. + // A list of protocol versions and a summary of the changes that were made in them can be found here: + // https://github.com/microsoft/vstest/blob/main/docs/Overview.md#protocolversion-request + private const int VsTestProtocolVersion = 7; + + public static string Serialize(T data) + { + return JsonDataSerializer.Instance.Serialize(data, version: VsTestProtocolVersion); + } + + public static T Deserialize(string data) + { + return JsonDataSerializer.Instance.Deserialize(data, version: VsTestProtocolVersion)!; + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/Remoting/TestExecutionRecorderWrapper.cs b/src/BenchmarkDotNet.TestAdapter/Remoting/TestExecutionRecorderWrapper.cs new file mode 100644 index 0000000000..0669e79019 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/Remoting/TestExecutionRecorderWrapper.cs @@ -0,0 +1,39 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using System; + +namespace BenchmarkDotNet.TestAdapter.Remoting +{ + /// + /// A wrapper around the ITestExecutionRecorder which works across AppDomain boundaries. + /// + internal class TestExecutionRecorderWrapper : MarshalByRefObject + { + private readonly ITestExecutionRecorder testExecutionRecorder; + + public TestExecutionRecorderWrapper(ITestExecutionRecorder testExecutionRecorder) + { + this.testExecutionRecorder = testExecutionRecorder; + } + + public MessageLoggerWrapper GetLogger() + { + return new MessageLoggerWrapper(testExecutionRecorder); + } + + internal void RecordStart(string serializedTestCase) + { + testExecutionRecorder.RecordStart(SerializationHelpers.Deserialize(serializedTestCase)); + } + + internal void RecordEnd(string serializedTestCase, TestOutcome testOutcome) + { + testExecutionRecorder.RecordEnd(SerializationHelpers.Deserialize(serializedTestCase), testOutcome); + } + + internal void RecordResult(string serializedTestResult) + { + testExecutionRecorder.RecordResult(SerializationHelpers.Deserialize(serializedTestResult)); + } + } +} \ No newline at end of file diff --git a/src/BenchmarkDotNet.TestAdapter/VSTestAdapter.cs b/src/BenchmarkDotNet.TestAdapter/VSTestAdapter.cs new file mode 100644 index 0000000000..eb6695de1b --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/VSTestAdapter.cs @@ -0,0 +1,213 @@ +using BenchmarkDotNet.TestAdapter.Remoting; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Threading; + +namespace BenchmarkDotNet.TestAdapter +{ + /// + /// Discovers and executes benchmarks using the VSTest protocol. + /// + [ExtensionUri(ExecutorUriString)] + [DefaultExecutorUri(ExecutorUriString)] + [FileExtension(".dll")] + [FileExtension(".exe")] + public class VsTestAdapter : ITestExecutor, ITestDiscoverer + { + // This URI is used to identify the adapter. + internal const string ExecutorUriString = "executor://BenchmarkDotNet.TestAdapter"; + internal static readonly Uri ExecutorUri = new Uri(ExecutorUriString); + + /// + /// Cancellation token used to stop any benchmarks that are currently running. + /// + private CancellationTokenSource? cts = null; + + /// + /// Discovers the benchmarks. + /// + /// List of assemblies to search for benchmarks in. + /// A context that the discovery is performed in. + /// Logger that sends messages back to VSTest host. + /// Interface that provides methods for sending discovered benchmarks back to the host. + public void DiscoverTests( + IEnumerable sources, + IDiscoveryContext discoveryContext, + IMessageLogger logger, + ITestCaseDiscoverySink discoverySink) + { + foreach (var source in sources) + { + ValidateSourceIsAssemblyOrThrow(source); + foreach (var testCase in GetVsTestCasesFromAssembly(source, logger)) + { + discoverySink.SendTestCase(testCase); + } + } + } + + /// + /// Runs a given set of test cases that represent benchmarks. + /// + /// The tests to run. + /// A context that the run is performed in. + /// Interface used for communicating with the VSTest host. + public void RunTests(IEnumerable? tests, IRunContext? runContext, IFrameworkHandle? frameworkHandle) + { + if (tests == null) + throw new ArgumentNullException(nameof(tests)); + if (frameworkHandle == null) + throw new ArgumentNullException(nameof(frameworkHandle)); + + cts ??= new CancellationTokenSource(); + + foreach (var testsPerAssembly in tests.GroupBy(t => t.Source)) + RunBenchmarks(testsPerAssembly.Key, frameworkHandle, testsPerAssembly); + + cts = null; + } + + /// + /// Runs all benchmarks in the given set of sources (assemblies). + /// + /// The assemblies to run. + /// A context that the run is performed in. + /// Interface used for communicating with the VSTest host. + public void RunTests(IEnumerable? sources, IRunContext? runContext, IFrameworkHandle? frameworkHandle) + { + if (sources == null) + throw new ArgumentNullException(nameof(sources)); + if (frameworkHandle == null) + throw new ArgumentNullException(nameof(frameworkHandle)); + + cts ??= new CancellationTokenSource(); + + foreach (var source in sources) + RunBenchmarks(source, frameworkHandle); + + cts = null; + } + + /// + /// Stops any currently running benchmarks. + /// + public void Cancel() + { + cts?.Cancel(); + } + + /// + /// Gets the VSTest test cases in the given assembly. + /// + /// The dll or exe of the benchmark project. + /// A logger that sends logs to VSTest. + /// The VSTest test cases inside the given assembly. + private static List GetVsTestCasesFromAssembly(string assemblyPath, IMessageLogger logger) + { + try + { + // Ensure that the test enumeration is done inside the context of the source directory. + var enumerator = (BenchmarkEnumeratorWrapper)CreateIsolatedType(typeof(BenchmarkEnumeratorWrapper), assemblyPath); + var testCases = enumerator + .GetTestCasesFromAssemblyPathSerialized(assemblyPath) + .Select(SerializationHelpers.Deserialize) + .ToList(); + + // Validate that all test ids are unique + var idLookup = new Dictionary(); + foreach (var testCase in testCases) + { + if (idLookup.TryGetValue(testCase.Id, out var matchingCase)) + throw new Exception($"Encountered Duplicate Test ID: '{testCase.DisplayName}' and '{matchingCase}'"); + + idLookup[testCase.Id] = testCase.DisplayName; + } + + return testCases; + } + catch (Exception ex) + { + logger.SendMessage(TestMessageLevel.Error, $"Failed to load benchmarks from assembly\n{ex}"); + throw; + } + } + + /// + /// Runs the benchmarks in the given source. + /// + /// The dll or exe of the benchmark project. + /// An interface used to communicate with the VSTest host. + /// + /// The specific test cases to be run if specified. + /// If unspecified, runs all the test cases in the source. + /// + private void RunBenchmarks(string source, IFrameworkHandle frameworkHandle, IEnumerable? testCases = null) + { + ValidateSourceIsAssemblyOrThrow(source); + + // Create a HashSet of all the TestCase IDs to be run if specified. + var caseIds = testCases == null ? null : new HashSet(testCases.Select(c => c.Id)); + + try + { + // Ensure that test execution is done inside the context of the source directory. + var executor = (BenchmarkExecutorWrapper)CreateIsolatedType(typeof(BenchmarkExecutorWrapper), source); + cts?.Token.Register(executor.Cancel); + + executor.RunBenchmarks(source, new TestExecutionRecorderWrapper(frameworkHandle), caseIds); + } + catch (Exception ex) + { + frameworkHandle.SendMessage(TestMessageLevel.Error, $"Failed to run benchmarks in assembly\n{ex}"); + throw; + } + } + + /// + /// This will create the given type in a child AppDomain when used in .NET Framework. + /// If not in the .NET Framework, it will use the current AppDomain. + /// + /// The type to create. + /// The dll or exe of the benchmark project. + /// The created object. + private static object CreateIsolatedType(Type type, string assemblyPath) + { + // .NET Framework runs require a custom AppDomain to be set up to run the benchmarks in because otherwise, + // all the assemblies will be loaded from the VSTest console rather than from the directory that the BDN + // program under test lives in. .NET Core assembly resolution is smarter and will correctly load the right + // assembly versions as needed and does not require a custom AppDomain. Unfortunately, the APIs needed to + // create the AppDomain for .NET Framework are not part of .NET Standard, and so a multi-targeting solution + // such as this is required to get this to work. This same approach is also used by other .NET unit testing + // libraries as well, further justifying this approach to solving how to get the correct assemblies loaded. +#if NETFRAMEWORK + var appBase = Path.GetDirectoryName(assemblyPath); + var setup = new AppDomainSetup { ApplicationBase = appBase }; + var domainName = $"Isolated Domain for {type.Name}"; + var appDomain = AppDomain.CreateDomain(domainName, null, setup); + return appDomain.CreateInstanceAndUnwrap( + type.Assembly.FullName, type.FullName, false, BindingFlags.Default, null, null, null, null); +#else + return Activator.CreateInstance(type); +#endif + } + + private static void ValidateSourceIsAssemblyOrThrow(string source) + { + if (string.IsNullOrEmpty(source)) + throw new ArgumentException($"'{nameof(source)}' cannot be null or whitespace.", nameof(source)); + + if (!Path.HasExtension(source)) + throw new NotSupportedException($"Missing extension on source '{source}', must have the extension '.dll' or '.exe'."); + + var extension = Path.GetExtension(source); + if (!string.Equals(extension, ".dll", StringComparison.OrdinalIgnoreCase) && !string.Equals(extension, ".exe", StringComparison.OrdinalIgnoreCase)) + throw new NotSupportedException($"Unsupported extension on source '{source}', must have the extension '.dll' or '.exe'."); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/VSTestEventProcessor.cs b/src/BenchmarkDotNet.TestAdapter/VSTestEventProcessor.cs new file mode 100644 index 0000000000..85c93069e1 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/VSTestEventProcessor.cs @@ -0,0 +1,198 @@ +using BenchmarkDotNet.EventProcessors; +using BenchmarkDotNet.Extensions; +using BenchmarkDotNet.Reports; +using BenchmarkDotNet.Running; +using BenchmarkDotNet.TestAdapter.Remoting; +using BenchmarkDotNet.Toolchains.Results; +using BenchmarkDotNet.Validators; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Perfolizer.Mathematics.Histograms; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading; + +namespace BenchmarkDotNet.TestAdapter +{ + /// + /// An event processor which will pass on benchmark execution information to VSTest. + /// + internal class VsTestEventProcessor : EventProcessor + { + private readonly Dictionary cases; + private readonly TestExecutionRecorderWrapper recorder; + private readonly CancellationToken cancellationToken; + private readonly Stopwatch runTimerStopwatch = new (); + private readonly Dictionary testResults = new (); + private readonly HashSet sentTestResults = new (); + + public VsTestEventProcessor( + List cases, + TestExecutionRecorderWrapper recorder, + CancellationToken cancellationToken) + { + this.cases = cases.ToDictionary(c => c.Id); + this.recorder = recorder; + this.cancellationToken = cancellationToken; + } + + public override void OnValidationError(ValidationError validationError) + { + // If the error is not linked to a benchmark case, then set the error on all benchmarks + var errorCases = validationError.BenchmarkCase == null + ? cases.Values.ToList() + : new List { cases[validationError.BenchmarkCase.GetTestCaseId()] }; + foreach (var testCase in errorCases) + { + var testResult = GetOrCreateTestResult(testCase); + + if (validationError.IsCritical) + { + // Fail if there is a critical validation error + testResult.Outcome = TestOutcome.Failed; + + // Append validation error message to end of test case error message + testResult.ErrorMessage = testResult.ErrorMessage == null + ? validationError.Message + : $"{testResult.ErrorMessage}\n{validationError.Message}"; + + // The test result is not sent yet, in case there are multiple validation errors that need to be sent. + } + else + { + // If the validation error is not critical, append it as a message + testResult.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, $"WARNING: {validationError.Message}\n")); + } + } + } + + public override void OnBuildComplete(BuildPartition buildPartition, BuildResult buildResult) + { + // Only need to handle build failures + if (!buildResult.IsBuildSuccess) + { + foreach (var benchmarkBuildInfo in buildPartition.Benchmarks) + { + var testCase = cases[benchmarkBuildInfo.BenchmarkCase.GetTestCaseId()]; + var testResult = GetOrCreateTestResult(testCase); + + if (buildResult.GenerateException != null) + testResult.ErrorMessage = $"// Generate Exception: {buildResult.GenerateException.Message}"; + else if (!buildResult.IsBuildSuccess && buildResult.TryToExplainFailureReason(out string reason)) + testResult.ErrorMessage = $"// Build Error: {reason}"; + else if (buildResult.ErrorMessage != null) + testResult.ErrorMessage = $"// Build Error: {buildResult.ErrorMessage}"; + testResult.Outcome = TestOutcome.Failed; + + // Send the result immediately + RecordStart(testCase); + RecordEnd(testCase, testResult.Outcome); + RecordResult(testResult); + sentTestResults.Add(testCase.Id); + } + } + } + + public override void OnStartRunBenchmark(BenchmarkCase benchmarkCase) + { + // TODO: add proper cancellation support to BDN so that we don't need to do cancellation through the event processor + cancellationToken.ThrowIfCancellationRequested(); + + var testCase = cases[benchmarkCase.GetTestCaseId()]; + var testResult = GetOrCreateTestResult(testCase); + testResult.StartTime = DateTimeOffset.UtcNow; + + RecordStart(testCase); + runTimerStopwatch.Restart(); + } + + public override void OnEndRunBenchmark(BenchmarkCase benchmarkCase, BenchmarkReport report) + { + var testCase = cases[benchmarkCase.GetTestCaseId()]; + var testResult = GetOrCreateTestResult(testCase); + testResult.EndTime = DateTimeOffset.UtcNow; + testResult.Duration = runTimerStopwatch.Elapsed; + testResult.Outcome = report.Success ? TestOutcome.Passed : TestOutcome.Failed; + + var resultRuns = report.GetResultRuns(); + + // Provide the raw result runs data. + testResult.SetPropertyValue(VsTestProperties.Measurement, resultRuns.Select(m => m.Nanoseconds.ToString()).ToArray()); + + // Add a message to the TestResult which contains the results summary. + testResult.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, report.BenchmarkCase.DisplayInfo + "\n")); + testResult.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, $"Runtime = {report.GetRuntimeInfo()}; GC = {report.GetGcInfo()}\n")); + + var statistics = resultRuns.GetStatistics(); + var cultureInfo = CultureInfo.InvariantCulture; + var formatter = statistics.CreateNanosecondFormatter(cultureInfo); + + var builder = new StringBuilder(); + var histogram = HistogramBuilder.Adaptive.Build(statistics.OriginalValues); + builder.AppendLine("-------------------- Histogram --------------------"); + builder.AppendLine(histogram.ToString(formatter)); + builder.AppendLine("---------------------------------------------------"); + + var statisticsOutput = statistics.ToString(cultureInfo, formatter, calcHistogram: false); + builder.AppendLine(statisticsOutput); + + testResult.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, builder.ToString())); + + RecordEnd(testResult.TestCase, testResult.Outcome); + RecordResult(testResult); + sentTestResults.Add(testCase.Id); + } + + /// + /// Iterate through all the benchmarks that were scheduled to run, and if they haven't been sent yet, send the result through. + /// + public void SendUnsentTestResults() + { + foreach (var testCase in cases.Values) + { + if (!sentTestResults.Contains(testCase.Id)) + { + var testResult = GetOrCreateTestResult(testCase); + if (testResult.Outcome == TestOutcome.None) + testResult.Outcome = TestOutcome.Skipped; + RecordStart(testCase); + RecordEnd(testCase, testResult.Outcome); + RecordResult(testResult); + } + } + } + + private TestResult GetOrCreateTestResult(TestCase testCase) + { + if (testResults.TryGetValue(testCase.Id, out var testResult)) + return testResult; + + var newResult = new TestResult(testCase) + { + ComputerName = Environment.MachineName, + DisplayName = testCase.DisplayName + }; + + testResults[testCase.Id] = newResult; + return newResult; + } + + private void RecordStart(TestCase testCase) + { + recorder.RecordStart(SerializationHelpers.Serialize(testCase)); + } + + private void RecordEnd(TestCase testCase, TestOutcome testOutcome) + { + recorder.RecordEnd(SerializationHelpers.Serialize(testCase), testOutcome); + } + + private void RecordResult(TestResult testResult) + { + recorder.RecordResult(SerializationHelpers.Serialize(testResult)); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/VSTestLogger.cs b/src/BenchmarkDotNet.TestAdapter/VSTestLogger.cs new file mode 100644 index 0000000000..28fc54995c --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/VSTestLogger.cs @@ -0,0 +1,58 @@ +using BenchmarkDotNet.Loggers; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; +using System.Text; + +namespace BenchmarkDotNet.TestAdapter +{ + /// + /// A class to send logs from BDN to the VSTest output log. + /// + internal sealed class VsTestLogger : ILogger + { + private readonly IMessageLogger messageLogger; + private readonly StringBuilder currentLine = new StringBuilder(); + private TestMessageLevel currentLevel = TestMessageLevel.Informational; + + public VsTestLogger(IMessageLogger logger) + { + messageLogger = logger; + } + + public string Id => nameof(VsTestLogger); + + public int Priority => 0; + + public void Flush() + { + WriteLine(); + } + + public void Write(LogKind logKind, string text) + { + currentLine.Append(text); + + // Assume that if the log kind is an error, that the whole line is treated as an error + // The level will be reset to Informational when WriteLine() is called. + if (logKind == LogKind.Error) + currentLevel = TestMessageLevel.Error; + } + + public void WriteLine() + { + // The VSTest logger throws an error on logging empty or whitespace strings, so skip them. + if (currentLine.Length == 0) + return; + + messageLogger.SendMessage(currentLevel, currentLine.ToString()); + + currentLevel = TestMessageLevel.Informational; + currentLine.Clear(); + } + + public void WriteLine(LogKind logKind, string text) + { + Write(logKind, text); + WriteLine(); + } + } +} diff --git a/src/BenchmarkDotNet.TestAdapter/VSTestProperties.cs b/src/BenchmarkDotNet.TestAdapter/VSTestProperties.cs new file mode 100644 index 0000000000..6bcbdcf299 --- /dev/null +++ b/src/BenchmarkDotNet.TestAdapter/VSTestProperties.cs @@ -0,0 +1,22 @@ +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace BenchmarkDotNet.TestAdapter +{ + /// + /// A class that contains all the custom properties that can be set on VSTest TestCase and TestResults. + /// Some of these properties are well known as they are also used by VSTest adapters for other test libraries. + /// + internal static class VsTestProperties + { + /// + /// A test property used for storing the test results so that they could be accessed + /// programmatically from a custom VSTest runner. + /// + internal static readonly TestProperty Measurement = TestProperty.Register( + "BenchmarkDotNet.TestAdapter.Measurements", + "Measurements", + typeof(string[]), + TestPropertyAttributes.Hidden, + typeof(TestResult)); + } +} diff --git a/src/BenchmarkDotNet/Properties/AssemblyInfo.cs b/src/BenchmarkDotNet/Properties/AssemblyInfo.cs index 5327197aa5..cec4ef220c 100644 --- a/src/BenchmarkDotNet/Properties/AssemblyInfo.cs +++ b/src/BenchmarkDotNet/Properties/AssemblyInfo.cs @@ -16,6 +16,7 @@ [assembly: InternalsVisibleTo("BenchmarkDotNet.Diagnostics.dotTrace,PublicKey=" + BenchmarkDotNetInfo.PublicKey)] [assembly: InternalsVisibleTo("BenchmarkDotNet.IntegrationTests.ManualRunning,PublicKey=" + BenchmarkDotNetInfo.PublicKey)] [assembly: InternalsVisibleTo("BenchmarkDotNet.IntegrationTests.ManualRunning.MultipleFrameworks,PublicKey=" + BenchmarkDotNetInfo.PublicKey)] +[assembly: InternalsVisibleTo("BenchmarkDotNet.TestAdapter,PublicKey=" + BenchmarkDotNetInfo.PublicKey)] #else [assembly: InternalsVisibleTo("BenchmarkDotNet.Tests")] [assembly: InternalsVisibleTo("BenchmarkDotNet.IntegrationTests")] @@ -23,4 +24,5 @@ [assembly: InternalsVisibleTo("BenchmarkDotNet.Diagnostics.dotTrace")] [assembly: InternalsVisibleTo("BenchmarkDotNet.IntegrationTests.ManualRunning")] [assembly: InternalsVisibleTo("BenchmarkDotNet.IntegrationTests.ManualRunning.MultipleFrameworks")] +[assembly: InternalsVisibleTo("BenchmarkDotNet.TestAdapter")] #endif \ No newline at end of file