21using namespace std::chrono_literals;
48void GenerateTemplateResults(
const std::vector<ankerl::nanobench::Result>& benchmarkResults,
const fs::path& file,
const char* tpl)
50 if (benchmarkResults.empty() || file.empty()) {
54 std::ofstream fout{file.std_path()};
57 std::cout <<
"Created " << file << std::endl;
59 std::cout <<
"Could not write to file " << file << std::endl;
70 return benchmarks_map;
80 std::regex reFilter(
args.regex_filter);
81 std::smatch baseMatch;
83 if (
args.sanity_check) {
84 std::cout <<
"Running with -sanity-check option, output is being suppressed as benchmark results will be useless." << std::endl;
89 std::vector<const char*>
ret;
90 ret.reserve(
args.setup_args.size());
91 for (
const auto& arg :
args.setup_args)
ret.emplace_back(arg.c_str());
95 std::vector<ankerl::nanobench::Result> benchmarkResults;
98 if (!std::regex_match(
name, baseMatch, reFilter)) {
102 if (
args.is_list_only) {
103 std::cout <<
name << std::endl;
108 if (
args.sanity_check) {
114 if (
args.min_time > 0ms) {
116 std::chrono::nanoseconds min_time_ns =
args.min_time;
120 if (
args.asymptote.empty()) {
123 for (
auto n :
args.asymptote) {
130 if (!bench.results().empty()) {
131 benchmarkResults.push_back(bench.results().back());
135 GenerateTemplateResults(benchmarkResults,
args.output_csv,
"# Benchmark, evals, iterations, total, min, max, median\n"
136 "{{#result}}{{name}}, {{epochs}}, {{average(iterations)}}, {{sumProduct(iterations, elapsed)}}, {{minimum(elapsed)}}, {{maximum(elapsed)}}, {{median(elapsed)}}\n"
static std::string g_running_benchmark_name
Retrieve the name of the currently in-use benchmark.
const std::function< void(const std::string &)> G_TEST_LOG_FUN
This is connected to the logger.
const std::function< std::vector< const char * >()> G_TEST_COMMAND_LINE_ARGUMENTS
Retrieve the command line arguments.
const std::function< std::string()> G_TEST_GET_FULL_NAME
Retrieve the unit test name.
static std::function< std::vector< const char * >()> g_bench_command_line_args
Retrieves the available test setup command line arguments that may be used in the benchmark.
#define Assert(val)
Identity function.
Main entry point to nanobench's benchmarking facility.
Bench & epochs(size_t numEpochs) noexcept
Controls number of epochs, the number of measurements to perform.
ANKERL_NANOBENCH(NODISCARD) std Bench & name(char const *benchmarkName)
Gets the title of the benchmark.
std::vector< BigO > complexityBigO() const
ANKERL_NANOBENCH(NODISCARD) std ANKERL_NANOBENCH(NODISCARD) std Bench & output(std::ostream *outstream) noexcept
Set the output stream where the resulting markdown table will be printed to.
Bench & complexityN(T n) noexcept
ANKERL_NANOBENCH(NODISCARD) std Bench & minEpochTime(std::chrono::nanoseconds t) noexcept
Minimum time each epoch should take.
Bench & epochIterations(uint64_t numIters) noexcept
Sets exactly the number of iterations for each epoch.
static void RunAll(const Args &args)
BenchRunner(std::string name, BenchFunction func)
std::map< std::string, BenchFunction > BenchmarkMap
static BenchmarkMap & benchmarks()
char const * json() noexcept
Template to generate JSON data.
void render(char const *mustacheTemplate, Bench const &bench, std::ostream &out)
Renders output from a mustache-like template and benchmark results.
std::function< void(Bench &)> BenchFunction