25using namespace std::chrono_literals;
53void GenerateTemplateResults(
const std::vector<ankerl::nanobench::Result>& benchmarkResults,
const fs::path& file,
const char* tpl)
55 if (benchmarkResults.empty() || file.empty()) {
59 std::ofstream fout{file};
62 std::cout <<
"Created " << file << std::endl;
64 std::cout <<
"Could not write to file " << file << std::endl;
81 using item_t = std::pair<std::string, uint8_t>;
82 auto sort_by_priority = [](item_t a, item_t b){
return a.second < b.second; };
84 return Join(sorted_priorities,
',', [](
const auto& entry){
return entry.first; });
97 return benchmarks_map;
102 benchmarks().insert(std::make_pair(
name, std::make_pair(func, level)));
107 std::regex reFilter(
args.regex_filter);
108 std::smatch baseMatch;
110 if (
args.sanity_check) {
111 std::cout <<
"Running with -sanity-check option, output is being suppressed as benchmark results will be useless." << std::endl;
116 std::vector<const char*>
ret;
117 ret.reserve(
args.setup_args.size());
118 for (
const auto& arg :
args.setup_args)
ret.emplace_back(arg.c_str());
122 std::vector<ankerl::nanobench::Result> benchmarkResults;
124 const auto& [func, priority_level] = bench_func;
126 if (!(priority_level &
args.priority)) {
130 if (!std::regex_match(
name, baseMatch, reFilter)) {
134 if (
args.is_list_only) {
135 std::cout <<
name << std::endl;
140 if (
args.sanity_check) {
146 if (
args.min_time > 0ms) {
148 std::chrono::nanoseconds min_time_ns =
args.min_time;
152 if (
args.asymptote.empty()) {
155 for (
auto n :
args.asymptote) {
162 if (!bench.results().empty()) {
163 benchmarkResults.push_back(bench.results().back());
167 GenerateTemplateResults(benchmarkResults,
args.output_csv,
"# Benchmark, evals, iterations, total, min, max, median\n"
168 "{{#result}}{{name}}, {{epochs}}, {{average(iterations)}}, {{sumProduct(iterations, elapsed)}}, {{minimum(elapsed)}}, {{maximum(elapsed)}}, {{median(elapsed)}}\n"
static std::string g_running_benchmark_name
Retrieve the name of the currently in-use benchmark.
const std::function< void(const std::string &)> G_TEST_LOG_FUN
This is connected to the logger.
const std::function< std::vector< const char * >()> G_TEST_COMMAND_LINE_ARGUMENTS
Retrieve the command line arguments.
const std::function< std::string()> G_TEST_GET_FULL_NAME
Retrieve the unit test name.
static std::function< std::vector< const char * >()> g_bench_command_line_args
Retrieves the available test setup command line arguments that may be used in the benchmark.
Main entry point to nanobench's benchmarking facility.
Bench & epochs(size_t numEpochs) noexcept
Controls number of epochs, the number of measurements to perform.
ANKERL_NANOBENCH(NODISCARD) std Bench & name(char const *benchmarkName)
Gets the title of the benchmark.
std::vector< BigO > complexityBigO() const
ANKERL_NANOBENCH(NODISCARD) std ANKERL_NANOBENCH(NODISCARD) std Bench & output(std::ostream *outstream) noexcept
Set the output stream where the resulting markdown table will be printed to.
Bench & complexityN(T n) noexcept
ANKERL_NANOBENCH(NODISCARD) std Bench & minEpochTime(std::chrono::nanoseconds t) noexcept
Minimum time each epoch should take.
Bench & epochIterations(uint64_t numIters) noexcept
Sets exactly the number of iterations for each epoch.
static void RunAll(const Args &args)
BenchRunner(std::string name, BenchFunction func, PriorityLevel level)
std::map< std::string, std::pair< BenchFunction, PriorityLevel > > BenchmarkMap
static BenchmarkMap & benchmarks()
Path class wrapper to block calls to the fs::path(std::string) implicit constructor and the fs::path:...
char const * json() noexcept
Template to generate JSON data.
void render(char const *mustacheTemplate, Bench const &bench, std::ostream &out)
Renders output from a mustache-like template and benchmark results.
std::string ListPriorities()
std::map< std::string, uint8_t > map_label_priority
std::function< void(Bench &)> BenchFunction
uint8_t StringToPriority(const std::string &str)
auto Join(const C &container, const S &separator, UnaryOp unary_op)
Join all container items.