Commit 59b94083 authored by Songqiao Su's avatar Songqiao Su Committed by Facebook Github Bot

(folly/benchmark)(RFC) allow users to record customized counters during benckmark

Summary:
There could be cases that user want to record some of their own metrics during benchmark. This diff add another kinds of macro BENCHMARK_COUNTERS to make it happen, see Test plan for an example.

One problem is that it would be hard to output the result on the fly. As it needs to run all benchmarks first to know which customized counter are used, then it can output the proper column headers.

Reviewed By: yfeldblum

Differential Revision: D9874668

fbshipit-source-id: 2004bb4484e54c1edd65763d1727486c6905d884
parent 399c4fa5
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
#include <limits> #include <limits>
#include <map> #include <map>
#include <memory> #include <memory>
#include <numeric>
#include <utility> #include <utility>
#include <vector> #include <vector>
...@@ -70,7 +71,7 @@ namespace folly { ...@@ -70,7 +71,7 @@ namespace folly {
std::chrono::high_resolution_clock::duration BenchmarkSuspender::timeSpent; std::chrono::high_resolution_clock::duration BenchmarkSuspender::timeSpent;
typedef function<detail::TimeIterPair(unsigned int)> BenchmarkFun; typedef function<detail::TimeIterData(unsigned int)> BenchmarkFun;
vector<detail::BenchmarkRegistration>& benchmarks() { vector<detail::BenchmarkRegistration>& benchmarks() {
static vector<detail::BenchmarkRegistration> _benchmarks; static vector<detail::BenchmarkRegistration> _benchmarks;
...@@ -107,22 +108,12 @@ size_t getGlobalBenchmarkBaselineIndex() { ...@@ -107,22 +108,12 @@ size_t getGlobalBenchmarkBaselineIndex() {
void detail::addBenchmarkImpl( void detail::addBenchmarkImpl(
const char* file, const char* file,
const char* name, const char* name,
BenchmarkFun fun) { BenchmarkFun fun,
benchmarks().push_back({file, name, std::move(fun)}); bool useCounter) {
benchmarks().push_back({file, name, std::move(fun), useCounter});
} }
/** static std::pair<double, UserCounters> runBenchmarkGetNSPerIteration(
* Given a bunch of benchmark samples, estimate the actual run time.
*/
static double estimateTime(double* begin, double* end) {
assert(begin < end);
// Current state of the art: get the minimum. After some
// experimentation, it seems taking the minimum is the best.
return *min_element(begin, end);
}
static double runBenchmarkGetNSPerIteration(
const BenchmarkFun& fun, const BenchmarkFun& fun,
const double globalBaseline) { const double globalBaseline) {
using std::chrono::duration_cast; using std::chrono::duration_cast;
...@@ -151,21 +142,22 @@ static double runBenchmarkGetNSPerIteration( ...@@ -151,21 +142,22 @@ static double runBenchmarkGetNSPerIteration(
const auto timeBudget = seconds(FLAGS_bm_max_secs); const auto timeBudget = seconds(FLAGS_bm_max_secs);
auto global = high_resolution_clock::now(); auto global = high_resolution_clock::now();
double epochResults[epochs] = {0}; std::vector<std::pair<double, UserCounters>> epochResults(epochs);
size_t actualEpochs = 0; size_t actualEpochs = 0;
for (; actualEpochs < epochs; ++actualEpochs) { for (; actualEpochs < epochs; ++actualEpochs) {
const auto maxIters = uint32_t(FLAGS_bm_max_iters); const auto maxIters = uint32_t(FLAGS_bm_max_iters);
for (auto n = uint32_t(FLAGS_bm_min_iters); n < maxIters; n *= 2) { for (auto n = uint32_t(FLAGS_bm_min_iters); n < maxIters; n *= 2) {
auto const nsecsAndIter = fun(static_cast<unsigned int>(n)); detail::TimeIterData timeIterData = fun(static_cast<unsigned int>(n));
if (nsecsAndIter.first < minNanoseconds) { if (timeIterData.duration < minNanoseconds) {
continue; continue;
} }
// We got an accurate enough timing, done. But only save if // We got an accurate enough timing, done. But only save if
// smaller than the current result. // smaller than the current result.
auto nsecs = duration_cast<nanoseconds>(nsecsAndIter.first).count(); auto nsecs = duration_cast<nanoseconds>(timeIterData.duration);
epochResults[actualEpochs] = epochResults[actualEpochs] = std::make_pair(
max(0.0, double(nsecs) / nsecsAndIter.second - globalBaseline); max(0.0, double(nsecs.count()) / timeIterData.niter - globalBaseline),
std::move(timeIterData.userCounters));
// Done with the current epoch, we got a meaningful timing. // Done with the current epoch, we got a meaningful timing.
break; break;
} }
...@@ -177,9 +169,16 @@ static double runBenchmarkGetNSPerIteration( ...@@ -177,9 +169,16 @@ static double runBenchmarkGetNSPerIteration(
} }
} }
// Current state of the art: get the minimum. After some
// experimentation, it seems taking the minimum is the best.
auto iter = min_element(
epochResults.begin(),
epochResults.begin() + actualEpochs,
[](const auto& a, const auto& b) { return a.first < b.first; });
// If the benchmark was basically drowned in baseline noise, it's // If the benchmark was basically drowned in baseline noise, it's
// possible it became negative. // possible it became negative.
return max(0.0, estimateTime(epochResults, epochResults + actualEpochs)); return std::make_pair(max(0.0, iter->first), iter->second);
} }
struct ScaleInfo { struct ScaleInfo {
...@@ -250,27 +249,37 @@ static string metricReadable(double n, unsigned int decimals) { ...@@ -250,27 +249,37 @@ static string metricReadable(double n, unsigned int decimals) {
namespace { namespace {
class BenchmarkResultsPrinter { class BenchmarkResultsPrinter {
public: public:
static constexpr unsigned int columns{76}; BenchmarkResultsPrinter() = default;
double baselineNsPerIter{numeric_limits<double>::max()}; explicit BenchmarkResultsPrinter(std::set<std::string> counterNames)
string lastFile; : counterNames_(std::move(counterNames)),
namesLength_{std::accumulate(
counterNames_.begin(),
counterNames_.end(),
size_t{0},
[](size_t acc, auto&& name) { return acc + 2 + name.length(); })} {}
static constexpr unsigned int columns{76};
void separator(char pad) { void separator(char pad) {
puts(string(columns, pad).c_str()); puts(string(columns + namesLength_, pad).c_str());
} }
void header(const string& file) { void header(const string& file) {
separator('='); separator('=');
printf("%-*srelative time/iter iters/s\n", columns - 28, file.c_str()); printf("%-*srelative time/iter iters/s", columns - 28, file.c_str());
for (auto const& name : counterNames_) {
printf(" %s", name.c_str());
}
printf("\n");
separator('='); separator('=');
} }
void print(const vector<detail::BenchmarkResult>& data) { void print(const vector<detail::BenchmarkResult>& data) {
for (auto& datum : data) { for (auto& datum : data) {
auto file = datum.file; auto file = datum.file;
if (file != lastFile) { if (file != lastFile_) {
// New file starting // New file starting
header(file); header(file);
lastFile = file; lastFile_ = file;
} }
string s = datum.name; string s = datum.name;
...@@ -283,7 +292,7 @@ class BenchmarkResultsPrinter { ...@@ -283,7 +292,7 @@ class BenchmarkResultsPrinter {
s.erase(0, 1); s.erase(0, 1);
useBaseline = true; useBaseline = true;
} else { } else {
baselineNsPerIter = datum.timeInNs; baselineNsPerIter_ = datum.timeInNs;
useBaseline = false; useBaseline = false;
} }
s.resize(columns - 29, ' '); s.resize(columns - 29, ' ');
...@@ -295,24 +304,38 @@ class BenchmarkResultsPrinter { ...@@ -295,24 +304,38 @@ class BenchmarkResultsPrinter {
if (!useBaseline) { if (!useBaseline) {
// Print without baseline // Print without baseline
printf( printf(
"%*s %9s %7s\n", "%*s %9s %7s",
static_cast<int>(s.size()), static_cast<int>(s.size()),
s.c_str(), s.c_str(),
readableTime(secPerIter, 2).c_str(), readableTime(secPerIter, 2).c_str(),
metricReadable(itersPerSec, 2).c_str()); metricReadable(itersPerSec, 2).c_str());
} else { } else {
// Print with baseline // Print with baseline
auto rel = baselineNsPerIter / nsPerIter * 100.0; auto rel = baselineNsPerIter_ / nsPerIter * 100.0;
printf( printf(
"%*s %7.2f%% %9s %7s\n", "%*s %7.2f%% %9s %7s",
static_cast<int>(s.size()), static_cast<int>(s.size()),
s.c_str(), s.c_str(),
rel, rel,
readableTime(secPerIter, 2).c_str(), readableTime(secPerIter, 2).c_str(),
metricReadable(itersPerSec, 2).c_str()); metricReadable(itersPerSec, 2).c_str());
} }
for (auto const& name : counterNames_) {
if (auto ptr = folly::get_ptr(datum.counters, name)) {
printf(" %-*d", int(name.length()), *ptr);
} else {
printf(" %-*s", int(name.length()), "NaN");
}
}
printf("\n");
} }
} }
private:
std::set<std::string> counterNames_;
size_t namesLength_{0};
double baselineNsPerIter_{numeric_limits<double>::max()};
string lastFile_;
}; };
} // namespace } // namespace
...@@ -359,7 +382,7 @@ void benchmarkResultsFromDynamic( ...@@ -359,7 +382,7 @@ void benchmarkResultsFromDynamic(
vector<detail::BenchmarkResult>& results) { vector<detail::BenchmarkResult>& results) {
for (auto& datum : d) { for (auto& datum : d) {
results.push_back( results.push_back(
{datum[0].asString(), datum[1].asString(), datum[2].asDouble()}); {datum[0].asString(), datum[1].asString(), datum[2].asDouble(), {}});
} }
} }
...@@ -392,7 +415,7 @@ void printResultComparison( ...@@ -392,7 +415,7 @@ void printResultComparison(
// Print header for a file // Print header for a file
auto header = [&](const string& file) { auto header = [&](const string& file) {
separator('='); separator('=');
printf("%-*srelative time/iter iters/s\n", columns - 28, file.c_str()); printf("%-*srelative time/iter iters/s", columns - 28, file.c_str());
separator('='); separator('=');
}; };
...@@ -472,32 +495,47 @@ void runBenchmarks() { ...@@ -472,32 +495,47 @@ void runBenchmarks() {
auto const globalBaseline = auto const globalBaseline =
runBenchmarkGetNSPerIteration(benchmarks()[baselineIndex].func, 0); runBenchmarkGetNSPerIteration(benchmarks()[baselineIndex].func, 0);
auto printer = BenchmarkResultsPrinter{};
bool useCounter =
std::any_of(benchmarks().begin(), benchmarks().end(), [](const auto& bm) {
return bm.useCounter;
});
BenchmarkResultsPrinter printer;
std::set<std::string> counterNames;
FOR_EACH_RANGE (i, 0, benchmarks().size()) { FOR_EACH_RANGE (i, 0, benchmarks().size()) {
if (i == baselineIndex) { if (i == baselineIndex) {
continue; continue;
} }
double elapsed = 0.0; std::pair<double, UserCounters> elapsed;
auto& bm = benchmarks()[i]; auto& bm = benchmarks()[i];
if (bm.name != "-") { // skip separators if (bm.name != "-") { // skip separators
if (bmRegex && !boost::regex_search(bm.name, *bmRegex)) { if (bmRegex && !boost::regex_search(bm.name, *bmRegex)) {
continue; continue;
} }
elapsed = runBenchmarkGetNSPerIteration(bm.func, globalBaseline); elapsed = runBenchmarkGetNSPerIteration(bm.func, globalBaseline.first);
} }
if (!FLAGS_json_verbose && !FLAGS_json) { // if customized user counters is used, it cannot print the result in real
printer.print({{bm.file, bm.name, elapsed}}); // time as it needs to run all cases first to know the complete set of
// counters have been used, then the header can be printed out properly
if (!FLAGS_json_verbose && !FLAGS_json && !useCounter) {
printer.print({{bm.file, bm.name, elapsed.first, elapsed.second}});
} else { } else {
results.push_back({bm.file, bm.name, elapsed}); results.push_back({bm.file, bm.name, elapsed.first, elapsed.second});
}
// get all counter names
for (auto const& kv : elapsed.second) {
counterNames.insert(kv.first);
} }
} }
// PLEASE MAKE NOISE. MEASUREMENTS DONE. // PLEASE MAKE NOISE. MEASUREMENTS DONE.
if (FLAGS_json_verbose || FLAGS_json) { if (FLAGS_json_verbose || FLAGS_json) {
printBenchmarkResults(results); printBenchmarkResults(results);
} else { } else {
printer = BenchmarkResultsPrinter{std::move(counterNames)};
printer.print(results);
printer.separator('='); printer.separator('=');
} }
......
...@@ -28,6 +28,7 @@ ...@@ -28,6 +28,7 @@
#include <functional> #include <functional>
#include <limits> #include <limits>
#include <type_traits> #include <type_traits>
#include <unordered_map>
#include <boost/function_types/function_arity.hpp> #include <boost/function_types/function_arity.hpp>
#include <glog/logging.h> #include <glog/logging.h>
...@@ -52,22 +53,29 @@ inline bool runBenchmarksOnFlag() { ...@@ -52,22 +53,29 @@ inline bool runBenchmarksOnFlag() {
return FLAGS_benchmark; return FLAGS_benchmark;
} }
using UserCounters = std::unordered_map<std::string, int>;
namespace detail { namespace detail {
struct TimeIterData {
std::chrono::high_resolution_clock::duration duration;
unsigned int niter;
UserCounters userCounters;
};
using TimeIterPair = using BenchmarkFun = std::function<TimeIterData(unsigned int)>;
std::pair<std::chrono::high_resolution_clock::duration, unsigned int>;
using BenchmarkFun = std::function<detail::TimeIterPair(unsigned int)>;
struct BenchmarkRegistration { struct BenchmarkRegistration {
std::string file; std::string file;
std::string name; std::string name;
BenchmarkFun func; BenchmarkFun func;
bool useCounter = false;
}; };
struct BenchmarkResult { struct BenchmarkResult {
std::string file; std::string file;
std::string name; std::string name;
double timeInNs; double timeInNs;
UserCounters counters;
}; };
/** /**
...@@ -77,7 +85,8 @@ struct BenchmarkResult { ...@@ -77,7 +85,8 @@ struct BenchmarkResult {
void addBenchmarkImpl( void addBenchmarkImpl(
const char* file, const char* file,
const char* name, const char* name,
std::function<TimeIterPair(unsigned int)>); BenchmarkFun,
bool useCounter);
} // namespace detail } // namespace detail
...@@ -166,9 +175,7 @@ struct BenchmarkSuspender { ...@@ -166,9 +175,7 @@ struct BenchmarkSuspender {
* function). * function).
*/ */
template <typename Lambda> template <typename Lambda>
typename std::enable_if< typename std::enable_if<folly::is_invocable<Lambda, unsigned>::value>::type
boost::function_types::function_arity<
decltype(&Lambda::operator())>::value == 2>::type
addBenchmark(const char* file, const char* name, Lambda&& lambda) { addBenchmark(const char* file, const char* name, Lambda&& lambda) {
auto execute = [=](unsigned int times) { auto execute = [=](unsigned int times) {
BenchmarkSuspender::timeSpent = {}; BenchmarkSuspender::timeSpent = {};
...@@ -179,13 +186,11 @@ addBenchmark(const char* file, const char* name, Lambda&& lambda) { ...@@ -179,13 +186,11 @@ addBenchmark(const char* file, const char* name, Lambda&& lambda) {
niter = lambda(times); niter = lambda(times);
auto end = std::chrono::high_resolution_clock::now(); auto end = std::chrono::high_resolution_clock::now();
// CORE MEASUREMENT ENDS // CORE MEASUREMENT ENDS
return detail::TimeIterData{
return detail::TimeIterPair( (end - start) - BenchmarkSuspender::timeSpent, niter, {}};
(end - start) - BenchmarkSuspender::timeSpent, niter);
}; };
detail::addBenchmarkImpl( detail::addBenchmarkImpl(file, name, detail::BenchmarkFun(execute), false);
file, name, std::function<detail::TimeIterPair(unsigned int)>(execute));
} }
/** /**
...@@ -195,9 +200,7 @@ addBenchmark(const char* file, const char* name, Lambda&& lambda) { ...@@ -195,9 +200,7 @@ addBenchmark(const char* file, const char* name, Lambda&& lambda) {
* (iteration occurs outside the function). * (iteration occurs outside the function).
*/ */
template <typename Lambda> template <typename Lambda>
typename std::enable_if< typename std::enable_if<folly::is_invocable<Lambda>::value>::type
boost::function_types::function_arity<
decltype(&Lambda::operator())>::value == 1>::type
addBenchmark(const char* file, const char* name, Lambda&& lambda) { addBenchmark(const char* file, const char* name, Lambda&& lambda) {
addBenchmark(file, name, [=](unsigned int times) { addBenchmark(file, name, [=](unsigned int times) {
unsigned int niter = 0; unsigned int niter = 0;
...@@ -208,6 +211,47 @@ addBenchmark(const char* file, const char* name, Lambda&& lambda) { ...@@ -208,6 +211,47 @@ addBenchmark(const char* file, const char* name, Lambda&& lambda) {
}); });
} }
/**
* similar as previous two template specialization, but lambda will also take
* customized counters in the following two cases
*/
template <typename Lambda>
typename std::enable_if<
folly::is_invocable<Lambda, UserCounters&, unsigned>::value>::type
addBenchmark(const char* file, const char* name, Lambda&& lambda) {
auto execute = [=](unsigned int times) {
BenchmarkSuspender::timeSpent = {};
unsigned int niter;
// CORE MEASUREMENT STARTS
auto start = std::chrono::high_resolution_clock::now();
UserCounters counters;
niter = lambda(counters, times);
auto end = std::chrono::high_resolution_clock::now();
// CORE MEASUREMENT ENDS
return detail::TimeIterData{
(end - start) - BenchmarkSuspender::timeSpent, niter, counters};
};
detail::addBenchmarkImpl(
file,
name,
std::function<detail::TimeIterData(unsigned int)>(execute),
true);
}
template <typename Lambda>
typename std::enable_if<folly::is_invocable<Lambda, UserCounters&>::value>::type
addBenchmark(const char* file, const char* name, Lambda&& lambda) {
addBenchmark(file, name, [=](UserCounters& counters, unsigned int times) {
unsigned int niter = 0;
while (times-- > 0) {
niter += lambda(counters);
}
return niter;
});
}
/** /**
* Call doNotOptimizeAway(var) to ensure that var will be computed even * Call doNotOptimizeAway(var) to ensure that var will be computed even
* post-optimization. Use it for variables that are computed during * post-optimization. Use it for variables that are computed during
...@@ -315,6 +359,7 @@ void printResultComparison( ...@@ -315,6 +359,7 @@ void printResultComparison(
* Introduces a benchmark function. Used internally, see BENCHMARK and * Introduces a benchmark function. Used internally, see BENCHMARK and
* friends below. * friends below.
*/ */
#define BENCHMARK_IMPL(funName, stringName, rv, paramType, paramName) \ #define BENCHMARK_IMPL(funName, stringName, rv, paramType, paramName) \
static void funName(paramType); \ static void funName(paramType); \
static bool FB_ANONYMOUS_VARIABLE(follyBenchmarkUnused) = \ static bool FB_ANONYMOUS_VARIABLE(follyBenchmarkUnused) = \
...@@ -328,6 +373,22 @@ void printResultComparison( ...@@ -328,6 +373,22 @@ void printResultComparison(
true); \ true); \
static void funName(paramType paramName) static void funName(paramType paramName)
#define BENCHMARK_IMPL_COUNTERS( \
funName, stringName, counters, rv, paramType, paramName) \
static void funName(UserCounters& FOLLY_PP_DETAIL_APPEND_VA_ARG(paramType)); \
static bool FB_ANONYMOUS_VARIABLE(follyBenchmarkUnused) = \
(::folly::addBenchmark( \
__FILE__, \
stringName, \
[](UserCounters& counters FOLLY_PP_DETAIL_APPEND_VA_ARG( \
paramType paramName)) -> unsigned { \
funName(counters FOLLY_PP_DETAIL_APPEND_VA_ARG(paramName)); \
return rv; \
}), \
true); \
static void funName(UserCounters& counters FOLLY_PP_DETAIL_APPEND_VA_ARG( \
paramType paramName))
/** /**
* Introduces a benchmark function with support for returning the actual * Introduces a benchmark function with support for returning the actual
* number of iterations. Used internally, see BENCHMARK_MULTI and friends * number of iterations. Used internally, see BENCHMARK_MULTI and friends
...@@ -355,9 +416,9 @@ void printResultComparison( ...@@ -355,9 +416,9 @@ void printResultComparison(
* v.push_back(42); * v.push_back(42);
* } * }
* *
* BENCHMARK(insertVectorBegin, n) { * BENCHMARK(insertVectorBegin, iters) {
* vector<int> v; * vector<int> v;
* FOR_EACH_RANGE (i, 0, n) { * FOR_EACH_RANGE (i, 0, iters) {
* v.insert(v.begin(), 42); * v.insert(v.begin(), 42);
* } * }
* } * }
...@@ -370,6 +431,28 @@ void printResultComparison( ...@@ -370,6 +431,28 @@ void printResultComparison(
FB_ONE_OR_NONE(unsigned, ##__VA_ARGS__), \ FB_ONE_OR_NONE(unsigned, ##__VA_ARGS__), \
__VA_ARGS__) __VA_ARGS__)
/**
* Allow users to record customized counter during benchmarking,
* there will be one extra column showing in the output result for each counter
*
* BENCHMARK_COUNTERS(insertVectorBegin, couters, iters) {
* vector<int> v;
* FOR_EACH_RANGE (i, 0, iters) {
* v.insert(v.begin(), 42);
* }
* BENCHMARK_SUSPEND {
* counters["foo"] = 10;
* }
* }
*/
#define BENCHMARK_COUNTERS(name, counters, ...) \
BENCHMARK_IMPL_COUNTERS( \
name, \
FB_STRINGIZE(name), \
counters, \
FB_ARG_2_OR_1(1, ##__VA_ARGS__), \
FB_ONE_OR_NONE(unsigned, ##__VA_ARGS__), \
__VA_ARGS__)
/** /**
* Like BENCHMARK above, but allows the user to return the actual * Like BENCHMARK above, but allows the user to return the actual
* number of iterations executed in the function body. This can be * number of iterations executed in the function body. This can be
...@@ -501,6 +584,14 @@ void printResultComparison( ...@@ -501,6 +584,14 @@ void printResultComparison(
FB_ONE_OR_NONE(unsigned, ##__VA_ARGS__), \ FB_ONE_OR_NONE(unsigned, ##__VA_ARGS__), \
__VA_ARGS__) __VA_ARGS__)
#define BENCHMARK_COUNTERS_RELATIVE(name, counters, ...) \
BENCHMARK_IMPL_COUNTERS( \
name, \
"%" FB_STRINGIZE(name), \
counters, \
FB_ARG_2_OR_1(1, ##__VA_ARGS__), \
FB_ONE_OR_NONE(unsigned, ##__VA_ARGS__), \
__VA_ARGS__)
/** /**
* Same as BENCHMARK_RELATIVE, but allows one to return the actual number * Same as BENCHMARK_RELATIVE, but allows one to return the actual number
* of iterations that have been run. * of iterations that have been run.
......
...@@ -25,6 +25,17 @@ ...@@ -25,6 +25,17 @@
using namespace folly; using namespace folly;
using namespace std; using namespace std;
BENCHMARK_COUNTERS(insertVectorBeginWithCounter, counters, n) {
vector<int> v;
for (size_t i = 0; i < n; i++) {
v.insert(v.begin(), 42);
}
BENCHMARK_SUSPEND {
counters["foo"] = v.size();
counters["bar"] = v.size() * 2;
}
}
void fun() { void fun() {
static double x = 1; static double x = 1;
++x; ++x;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment