Commit b9d5fd6e authored by Marc Celani's avatar Marc Celani Committed by Facebook Github Bot

Track min and max in TDigest, do not allow estimates to go out of bounds

Summary: As title. We were special casing q == 1 or q == 0 improperly before. This led to some strange results where p100 < p99.9. Track min and max (useful anyway), and use those for boundaries.

Reviewed By: anakryiko

Differential Revision: D7800645

fbshipit-source-id: 5fa3e1d0d286d4b31442196ba74b072030706786
parent 13d672be
...@@ -17,11 +17,10 @@ ...@@ -17,11 +17,10 @@
#include <folly/stats/TDigest.h> #include <folly/stats/TDigest.h>
#include <algorithm> #include <algorithm>
#include <cmath> #include <limits>
namespace folly { namespace folly {
namespace detail {
/* /*
* A good biased scaling function has the following properties: * A good biased scaling function has the following properties:
* - The value of the function k(0, delta) = 0, and k(1, delta) = delta. * - The value of the function k(0, delta) = 0, and k(1, delta) = delta.
...@@ -57,7 +56,7 @@ namespace detail { ...@@ -57,7 +56,7 @@ namespace detail {
* } * }
*/ */
double k_to_q(double k, double d) { static double k_to_q(double k, double d) {
double k_div_d = k / d; double k_div_d = k / d;
if (k_div_d >= 0.5) { if (k_div_d >= 0.5) {
double base = 1 - k_div_d; double base = 1 - k_div_d;
...@@ -67,7 +66,14 @@ double k_to_q(double k, double d) { ...@@ -67,7 +66,14 @@ double k_to_q(double k, double d) {
} }
} }
} // namespace detail static double clamp(double v, double lo, double hi) {
if (v > hi) {
return hi;
} else if (v < lo) {
return lo;
}
return v;
}
TDigest TDigest::merge(Range<const double*> sortedValues) const { TDigest TDigest::merge(Range<const double*> sortedValues) const {
if (sortedValues.empty()) { if (sortedValues.empty()) {
...@@ -78,12 +84,23 @@ TDigest TDigest::merge(Range<const double*> sortedValues) const { ...@@ -78,12 +84,23 @@ TDigest TDigest::merge(Range<const double*> sortedValues) const {
result.count_ = count_ + sortedValues.size(); result.count_ = count_ + sortedValues.size();
double maybeMin = *sortedValues.begin();
double maybeMax = *(sortedValues.end() - 1);
if (count_ > 0) {
// We know that min_ and max_ are numbers
result.min_ = std::min(min_, maybeMin);
result.max_ = std::max(max_, maybeMax);
} else {
// We know that min_ and max_ are NaN.
result.min_ = maybeMin;
result.max_ = maybeMax;
}
std::vector<Centroid> compressed; std::vector<Centroid> compressed;
compressed.reserve(maxSize_); compressed.reserve(maxSize_);
double k_limit = 1; double k_limit = 1;
double q_limit_times_count = double q_limit_times_count = k_to_q(k_limit++, maxSize_) * result.count_;
detail::k_to_q(k_limit++, maxSize_) * result.count_;
auto it_centroids = centroids_.begin(); auto it_centroids = centroids_.begin();
auto it_sortedValues = sortedValues.begin(); auto it_sortedValues = sortedValues.begin();
...@@ -125,7 +142,7 @@ TDigest TDigest::merge(Range<const double*> sortedValues) const { ...@@ -125,7 +142,7 @@ TDigest TDigest::merge(Range<const double*> sortedValues) const {
sumsToMerge = 0; sumsToMerge = 0;
weightsToMerge = 0; weightsToMerge = 0;
compressed.push_back(cur); compressed.push_back(cur);
q_limit_times_count = detail::k_to_q(k_limit++, maxSize_) * result.count_; q_limit_times_count = k_to_q(k_limit++, maxSize_) * result.count_;
cur = next; cur = next;
} }
} }
...@@ -153,11 +170,23 @@ TDigest TDigest::merge(Range<const TDigest*> digests) { ...@@ -153,11 +170,23 @@ TDigest TDigest::merge(Range<const TDigest*> digests) {
double count = 0; double count = 0;
// We can safely use these limits to avoid isnan checks below because we know
// nCentroids > 0, so at least one TDigest has a min and max.
double min = std::numeric_limits<double>::infinity();
double max = -std::numeric_limits<double>::infinity();
for (auto it = digests.begin(); it != digests.end(); it++) { for (auto it = digests.begin(); it != digests.end(); it++) {
starts.push_back(centroids.end()); starts.push_back(centroids.end());
count += it->count(); double curCount = it->count();
for (const auto& centroid : it->centroids_) { if (curCount > 0) {
centroids.push_back(centroid); DCHECK(!std::isnan(it->min_));
DCHECK(!std::isnan(it->max_));
min = std::min(min, it->min_);
max = std::max(max, it->max_);
count += curCount;
for (const auto& centroid : it->centroids_) {
centroids.push_back(centroid);
}
} }
} }
...@@ -193,7 +222,7 @@ TDigest TDigest::merge(Range<const TDigest*> digests) { ...@@ -193,7 +222,7 @@ TDigest TDigest::merge(Range<const TDigest*> digests) {
compressed.reserve(maxSize); compressed.reserve(maxSize);
double k_limit = 1; double k_limit = 1;
double q_limit_times_count = detail::k_to_q(k_limit, maxSize) * count; double q_limit_times_count = k_to_q(k_limit, maxSize) * count;
Centroid cur = centroids.front(); Centroid cur = centroids.front();
double weightSoFar = cur.weight(); double weightSoFar = cur.weight();
...@@ -209,7 +238,7 @@ TDigest TDigest::merge(Range<const TDigest*> digests) { ...@@ -209,7 +238,7 @@ TDigest TDigest::merge(Range<const TDigest*> digests) {
sumsToMerge = 0; sumsToMerge = 0;
weightsToMerge = 0; weightsToMerge = 0;
compressed.push_back(cur); compressed.push_back(cur);
q_limit_times_count = detail::k_to_q(k_limit++, maxSize) * count; q_limit_times_count = k_to_q(k_limit++, maxSize) * count;
cur = *it; cur = *it;
} }
} }
...@@ -217,6 +246,8 @@ TDigest TDigest::merge(Range<const TDigest*> digests) { ...@@ -217,6 +246,8 @@ TDigest TDigest::merge(Range<const TDigest*> digests) {
compressed.push_back(cur); compressed.push_back(cur);
result.count_ = count; result.count_ = count;
result.min_ = min;
result.max_ = max;
result.centroids_ = std::move(compressed); result.centroids_ = std::move(compressed);
return result; return result;
} }
...@@ -231,7 +262,7 @@ double TDigest::estimateQuantile(double q) const { ...@@ -231,7 +262,7 @@ double TDigest::estimateQuantile(double q) const {
double t; double t;
if (q > 0.5) { if (q > 0.5) {
if (q >= 1.0) { if (q >= 1.0) {
return centroids_.back().mean(); return max_;
} }
pos = 0; pos = 0;
t = count_; t = count_;
...@@ -244,7 +275,7 @@ double TDigest::estimateQuantile(double q) const { ...@@ -244,7 +275,7 @@ double TDigest::estimateQuantile(double q) const {
} }
} else { } else {
if (q <= 0.0) { if (q <= 0.0) {
return centroids_.front().mean(); return min_;
} }
pos = centroids_.size() - 1; pos = centroids_.size() - 1;
t = 0; t = 0;
...@@ -267,8 +298,9 @@ double TDigest::estimateQuantile(double q) const { ...@@ -267,8 +298,9 @@ double TDigest::estimateQuantile(double q) const {
delta = (centroids_[pos + 1].mean() - centroids_[pos - 1].mean()) / 2; delta = (centroids_[pos + 1].mean() - centroids_[pos - 1].mean()) / 2;
} }
} }
return centroids_[pos].mean() + auto value = centroids_[pos].mean() +
((rank - t) / centroids_[pos].weight() - 0.5) * delta; ((rank - t) / centroids_[pos].weight() - 0.5) * delta;
return clamp(value, min_, max_);
} }
double TDigest::Centroid::add(double sum, double weight) { double TDigest::Centroid::add(double sum, double weight) {
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#pragma once #pragma once
#include <cmath>
#include <vector> #include <vector>
#include <folly/Range.h> #include <folly/Range.h>
...@@ -48,7 +49,7 @@ namespace folly { ...@@ -48,7 +49,7 @@ namespace folly {
class TDigest { class TDigest {
public: public:
explicit TDigest(size_t maxSize = 100) explicit TDigest(size_t maxSize = 100)
: maxSize_(maxSize), sum_(0.0), count_(0.0) { : maxSize_(maxSize), sum_(0.0), count_(0.0), max_(NAN), min_(NAN) {
centroids_.reserve(maxSize); centroids_.reserve(maxSize);
} }
...@@ -81,6 +82,14 @@ class TDigest { ...@@ -81,6 +82,14 @@ class TDigest {
return count_; return count_;
} }
double min() const {
return min_;
}
double max() const {
return max_;
}
bool empty() const { bool empty() const {
return centroids_.empty(); return centroids_.empty();
} }
...@@ -117,6 +126,8 @@ class TDigest { ...@@ -117,6 +126,8 @@ class TDigest {
size_t maxSize_; size_t maxSize_;
double sum_; double sum_;
double count_; double count_;
double max_;
double min_;
}; };
} // namespace folly } // namespace folly
...@@ -58,11 +58,11 @@ TEST_P(QuantileEstimatorTest, EstimateQuantiles) { ...@@ -58,11 +58,11 @@ TEST_P(QuantileEstimatorTest, EstimateQuantiles) {
EXPECT_EQ(0.99, estimates.quantiles[3].first); EXPECT_EQ(0.99, estimates.quantiles[3].first);
EXPECT_EQ(0.999, estimates.quantiles[4].first); EXPECT_EQ(0.999, estimates.quantiles[4].first);
EXPECT_EQ(0.6, estimates.quantiles[0].second); EXPECT_EQ(1, estimates.quantiles[0].second);
EXPECT_EQ(2.0 - 0.5, estimates.quantiles[1].second); EXPECT_EQ(2.0 - 0.5, estimates.quantiles[1].second);
EXPECT_EQ(50.375, estimates.quantiles[2].second); EXPECT_EQ(50.375, estimates.quantiles[2].second);
EXPECT_EQ(100.0 - 0.5, estimates.quantiles[3].second); EXPECT_EQ(100.0 - 0.5, estimates.quantiles[3].second);
EXPECT_EQ(100.4, estimates.quantiles[4].second); EXPECT_EQ(100, estimates.quantiles[4].second);
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_CASE_P(
......
...@@ -138,33 +138,33 @@ BENCHMARK_RELATIVE_NAMED_PARAM(estimateQuantile, 1000_p999, 1000, 0.999) ...@@ -138,33 +138,33 @@ BENCHMARK_RELATIVE_NAMED_PARAM(estimateQuantile, 1000_p999, 1000, 0.999)
* ============================================================================ * ============================================================================
* folly/stats/test/TDigestBenchmark.cpp relative time/iter iters/s * folly/stats/test/TDigestBenchmark.cpp relative time/iter iters/s
* ============================================================================ * ============================================================================
* merge(100x1) 2.19us 455.86K * merge(100x1) 2.22us 451.08K
* merge(100x5) 58.77% 3.73us 267.92K * merge(100x5) 59.59% 3.72us 268.81K
* merge(100x10) 42.00% 5.22us 191.48K * merge(100x10) 43.70% 5.07us 197.14K
* merge(1000x1) 10.52% 20.86us 47.95K * merge(1000x1) 10.63% 20.86us 47.93K
* merge(1000x5) 6.54% 33.54us 29.81K * merge(1000x5) 6.60% 33.60us 29.76K
* merge(1000x10) 4.43% 49.54us 20.19K * merge(1000x10) 4.52% 49.10us 20.37K
* ---------------------------------------------------------------------------- * ----------------------------------------------------------------------------
* mergeDigests(100x10) 25.29us 39.55K * mergeDigests(100x10) 25.30us 39.53K
* mergeDigests(100x30) 21.71% 116.50us 8.58K * mergeDigests(100x30) 21.70% 116.58us 8.58K
* mergeDigests(100x60) 9.22% 274.32us 3.65K * mergeDigests(100x60) 9.22% 274.53us 3.64K
* mergeDigests(1000x60) 0.90% 2.81ms 356.45 * mergeDigests(1000x60) 0.90% 2.81ms 356.13
* ---------------------------------------------------------------------------- * ----------------------------------------------------------------------------
* estimateQuantile(100x1_p001) 8.48ns 117.88M * estimateQuantile(100x1_p001) 9.50ns 105.28M
* estimateQuantile(100_p01) 61.32% 13.83ns 72.29M * estimateQuantile(100_p01) 63.49% 14.96ns 66.85M
* estimateQuantile(100_p25) 11.66% 72.73ns 13.75M * estimateQuantile(100_p25) 14.89% 63.81ns 15.67M
* estimateQuantile(100_p50) 9.55% 88.79ns 11.26M * estimateQuantile(100_p50) 11.56% 82.15ns 12.17M
* estimateQuantile(100_p75) 13.88% 61.14ns 16.36M * estimateQuantile(100_p75) 15.44% 61.53ns 16.25M
* estimateQuantile(100_p99) 66.88% 12.68ns 78.83M * estimateQuantile(100_p99) 74.77% 12.70ns 78.72M
* estimateQuantile(100_p999) 110.57% 7.67ns 130.34M * estimateQuantile(100_p999) 117.16% 8.11ns 123.34M
* ---------------------------------------------------------------------------- * ----------------------------------------------------------------------------
* estimateQuantile(1000_p001) 26.46% 32.06ns 31.19M * estimateQuantile(1000_p001) 27.10% 35.05ns 28.53M
* estimateQuantile(1000_p01) 7.78% 108.97ns 9.18M * estimateQuantile(1000_p01) 8.58% 110.64ns 9.04M
* estimateQuantile(1000_p25) 1.74% 488.35ns 2.05M * estimateQuantile(1000_p25) 1.94% 488.67ns 2.05M
* estimateQuantile(1000_p50) 1.24% 683.10ns 1.46M * estimateQuantile(1000_p50) 1.39% 684.81ns 1.46M
* estimateQuantile(1000_p75) 1.75% 483.58ns 2.07M * estimateQuantile(1000_p75) 1.96% 483.88ns 2.07M
* estimateQuantile(1000_p99) 8.06% 105.29ns 9.50M * estimateQuantile(1000_p99) 8.99% 105.70ns 9.46M
* estimateQuantile(1000_p999) 32.98% 25.72ns 38.87M * estimateQuantile(1000_p999) 36.87% 25.76ns 38.82M
* ============================================================================ * ============================================================================
*/ */
......
...@@ -44,12 +44,14 @@ TEST(TDigest, Basic) { ...@@ -44,12 +44,14 @@ TEST(TDigest, Basic) {
EXPECT_EQ(100, digest.count()); EXPECT_EQ(100, digest.count());
EXPECT_EQ(5050, digest.sum()); EXPECT_EQ(5050, digest.sum());
EXPECT_EQ(50.5, digest.mean()); EXPECT_EQ(50.5, digest.mean());
EXPECT_EQ(1, digest.min());
EXPECT_EQ(100, digest.max());
EXPECT_EQ(0.6, digest.estimateQuantile(0.001)); EXPECT_EQ(1, digest.estimateQuantile(0.001));
EXPECT_EQ(2.0 - 0.5, digest.estimateQuantile(0.01)); EXPECT_EQ(2.0 - 0.5, digest.estimateQuantile(0.01));
EXPECT_EQ(50.375, digest.estimateQuantile(0.5)); EXPECT_EQ(50.375, digest.estimateQuantile(0.5));
EXPECT_EQ(100.0 - 0.5, digest.estimateQuantile(0.99)); EXPECT_EQ(100.0 - 0.5, digest.estimateQuantile(0.99));
EXPECT_EQ(100.4, digest.estimateQuantile(0.999)); EXPECT_EQ(100, digest.estimateQuantile(0.999));
} }
TEST(TDigest, Merge) { TEST(TDigest, Merge) {
...@@ -70,12 +72,14 @@ TEST(TDigest, Merge) { ...@@ -70,12 +72,14 @@ TEST(TDigest, Merge) {
EXPECT_EQ(200, digest.count()); EXPECT_EQ(200, digest.count());
EXPECT_EQ(20100, digest.sum()); EXPECT_EQ(20100, digest.sum());
EXPECT_EQ(100.5, digest.mean()); EXPECT_EQ(100.5, digest.mean());
EXPECT_EQ(1, digest.min());
EXPECT_EQ(200, digest.max());
EXPECT_EQ(0.7, digest.estimateQuantile(0.001)); EXPECT_EQ(1, digest.estimateQuantile(0.001));
EXPECT_EQ(4.0 - 1.5, digest.estimateQuantile(0.01)); EXPECT_EQ(4.0 - 1.5, digest.estimateQuantile(0.01));
EXPECT_EQ(100.25, digest.estimateQuantile(0.5)); EXPECT_EQ(100.25, digest.estimateQuantile(0.5));
EXPECT_EQ(200.0 - 1.5, digest.estimateQuantile(0.99)); EXPECT_EQ(200.0 - 1.5, digest.estimateQuantile(0.99));
EXPECT_EQ(200.3, digest.estimateQuantile(0.999)); EXPECT_EQ(200, digest.estimateQuantile(0.999));
} }
TEST(TDigest, MergeSmall) { TEST(TDigest, MergeSmall) {
...@@ -89,6 +93,8 @@ TEST(TDigest, MergeSmall) { ...@@ -89,6 +93,8 @@ TEST(TDigest, MergeSmall) {
EXPECT_EQ(1, digest.count()); EXPECT_EQ(1, digest.count());
EXPECT_EQ(1, digest.sum()); EXPECT_EQ(1, digest.sum());
EXPECT_EQ(1, digest.mean()); EXPECT_EQ(1, digest.mean());
EXPECT_EQ(1, digest.min());
EXPECT_EQ(1, digest.max());
EXPECT_EQ(1.0, digest.estimateQuantile(0.001)); EXPECT_EQ(1.0, digest.estimateQuantile(0.001));
EXPECT_EQ(1.0, digest.estimateQuantile(0.01)); EXPECT_EQ(1.0, digest.estimateQuantile(0.01));
...@@ -109,6 +115,8 @@ TEST(TDigest, MergeLarge) { ...@@ -109,6 +115,8 @@ TEST(TDigest, MergeLarge) {
EXPECT_EQ(1000, digest.count()); EXPECT_EQ(1000, digest.count());
EXPECT_EQ(500500, digest.sum()); EXPECT_EQ(500500, digest.sum());
EXPECT_EQ(500.5, digest.mean()); EXPECT_EQ(500.5, digest.mean());
EXPECT_EQ(1, digest.min());
EXPECT_EQ(1000, digest.max());
EXPECT_EQ(1.5, digest.estimateQuantile(0.001)); EXPECT_EQ(1.5, digest.estimateQuantile(0.001));
EXPECT_EQ(10.5, digest.estimateQuantile(0.01)); EXPECT_EQ(10.5, digest.estimateQuantile(0.01));
...@@ -139,6 +147,8 @@ TEST(TDigest, MergeLargeAsDigests) { ...@@ -139,6 +147,8 @@ TEST(TDigest, MergeLargeAsDigests) {
EXPECT_EQ(1000, digest.count()); EXPECT_EQ(1000, digest.count());
EXPECT_EQ(500500, digest.sum()); EXPECT_EQ(500500, digest.sum());
EXPECT_EQ(500.5, digest.mean()); EXPECT_EQ(500.5, digest.mean());
EXPECT_EQ(1, digest.min());
EXPECT_EQ(1000, digest.max());
EXPECT_EQ(1.5, digest.estimateQuantile(0.001)); EXPECT_EQ(1.5, digest.estimateQuantile(0.001));
EXPECT_EQ(10.5, digest.estimateQuantile(0.01)); EXPECT_EQ(10.5, digest.estimateQuantile(0.01));
...@@ -146,6 +156,69 @@ TEST(TDigest, MergeLargeAsDigests) { ...@@ -146,6 +156,69 @@ TEST(TDigest, MergeLargeAsDigests) {
EXPECT_EQ(999.5, digest.estimateQuantile(0.999)); EXPECT_EQ(999.5, digest.estimateQuantile(0.999));
} }
TEST(TDigest, NegativeValues) {
std::vector<TDigest> digests;
TDigest digest(100);
std::vector<double> values;
for (int i = 1; i <= 100; ++i) {
values.push_back(i);
values.push_back(-i);
}
std::sort(values.begin(), values.end());
digest = digest.merge(values);
EXPECT_EQ(200, digest.count());
EXPECT_EQ(0, digest.sum());
EXPECT_EQ(0, digest.mean());
EXPECT_EQ(-100, digest.min());
EXPECT_EQ(100, digest.max());
EXPECT_EQ(-100, digest.estimateQuantile(0.0));
EXPECT_EQ(-100, digest.estimateQuantile(0.001));
EXPECT_EQ(-98.5, digest.estimateQuantile(0.01));
EXPECT_EQ(98.5, digest.estimateQuantile(0.99));
EXPECT_EQ(100, digest.estimateQuantile(0.999));
EXPECT_EQ(100, digest.estimateQuantile(1.0));
}
TEST(TDigest, NegativeValuesMergeDigests) {
std::vector<TDigest> digests;
TDigest digest(100);
std::vector<double> values;
std::vector<double> negativeValues;
for (int i = 1; i <= 100; ++i) {
values.push_back(i);
negativeValues.push_back(-i);
}
std::sort(values.begin(), values.end());
std::sort(negativeValues.begin(), negativeValues.end());
auto digest1 = digest.merge(values);
auto digest2 = digest.merge(negativeValues);
std::array<TDigest, 2> a{{digest1, digest2}};
digest = TDigest::merge(a);
EXPECT_EQ(200, digest.count());
EXPECT_EQ(0, digest.sum());
EXPECT_EQ(0, digest.mean());
EXPECT_EQ(-100, digest.min());
EXPECT_EQ(100, digest.max());
EXPECT_EQ(-100, digest.estimateQuantile(0.0));
EXPECT_EQ(-100, digest.estimateQuantile(0.001));
EXPECT_EQ(-98.5, digest.estimateQuantile(0.01));
EXPECT_EQ(98.5, digest.estimateQuantile(0.99));
EXPECT_EQ(100, digest.estimateQuantile(0.999));
EXPECT_EQ(100, digest.estimateQuantile(1.0));
}
class DistributionTest class DistributionTest
: public ::testing::TestWithParam< : public ::testing::TestWithParam<
std::tuple<std::pair<bool, size_t>, double, bool>> {}; std::tuple<std::pair<bool, size_t>, double, bool>> {};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment