Commit 344f49c4 authored by Yedidya Feldblum's avatar Yedidya Feldblum Committed by Facebook Github Bot

Apply clang-format to folly/stats/

Summary: [Folly] Apply clang-format to `folly/stats/`.

Reviewed By: Orvid

Differential Revision: D5366745

fbshipit-source-id: 3b7419d4ab4c6203693603722cd8e707741d3953
parent 1fd03592
...@@ -173,8 +173,8 @@ size_t BucketedTimeSeries<VT, CT>::updateBuckets(TimePoint now) { ...@@ -173,8 +173,8 @@ size_t BucketedTimeSeries<VT, CT>::updateBuckets(TimePoint now) {
size_t currentBucket; size_t currentBucket;
TimePoint currentBucketStart; TimePoint currentBucketStart;
TimePoint nextBucketStart; TimePoint nextBucketStart;
getBucketInfo(latestTime_, &currentBucket, getBucketInfo(
&currentBucketStart, &nextBucketStart); latestTime_, &currentBucket, &currentBucketStart, &nextBucketStart);
// Update latestTime_ // Update latestTime_
latestTime_ = now; latestTime_ = now;
...@@ -246,8 +246,8 @@ typename CT::time_point BucketedTimeSeries<VT, CT>::getEarliestTimeNonEmpty() ...@@ -246,8 +246,8 @@ typename CT::time_point BucketedTimeSeries<VT, CT>::getEarliestTimeNonEmpty()
size_t currentBucket; size_t currentBucket;
TimePoint currentBucketStart; TimePoint currentBucketStart;
TimePoint nextBucketStart; TimePoint nextBucketStart;
getBucketInfo(latestTime_, &currentBucket, getBucketInfo(
&currentBucketStart, &nextBucketStart); latestTime_, &currentBucket, &currentBucketStart, &nextBucketStart);
// Subtract 1 duration from the start of the next bucket to find the // Subtract 1 duration from the start of the next bucket to find the
// earliest possible data point we could be tracking. // earliest possible data point we could be tracking.
...@@ -500,7 +500,7 @@ VT BucketedTimeSeries<VT, CT>::rangeAdjust( ...@@ -500,7 +500,7 @@ VT BucketedTimeSeries<VT, CT>::rangeAdjust(
TimePoint intervalStart = std::max(start, bucketStart); TimePoint intervalStart = std::max(start, bucketStart);
TimePoint intervalEnd = std::min(end, nextBucketStart); TimePoint intervalEnd = std::min(end, nextBucketStart);
return input * (intervalEnd - intervalStart) / return input * (intervalEnd - intervalStart) /
(nextBucketStart - bucketStart); (nextBucketStart - bucketStart);
} }
template <typename VT, typename CT> template <typename VT, typename CT>
......
...@@ -293,7 +293,7 @@ class BucketedTimeSeries { ...@@ -293,7 +293,7 @@ class BucketedTimeSeries {
* Note that you generally should call update() before calling avg(), to * Note that you generally should call update() before calling avg(), to
* make sure you are not reading stale data. * make sure you are not reading stale data.
*/ */
template <typename ReturnType=double> template <typename ReturnType = double>
ReturnType avg() const { ReturnType avg() const {
return total_.template avg<ReturnType>(); return total_.template avg<ReturnType>();
} }
...@@ -483,8 +483,8 @@ class BucketedTimeSeries { ...@@ -483,8 +483,8 @@ class BucketedTimeSeries {
TimePoint latestTime_; // time of last update() TimePoint latestTime_; // time of last update()
Duration duration_; // total duration ("window length") of the time series Duration duration_; // total duration ("window length") of the time series
Bucket total_; // sum and count of everything in time series Bucket total_; // sum and count of everything in time series
std::vector<Bucket> buckets_; // actual buckets of values std::vector<Bucket> buckets_; // actual buckets of values
}; };
} // folly } // folly
...@@ -26,13 +26,12 @@ namespace folly { ...@@ -26,13 +26,12 @@ namespace folly {
namespace detail { namespace detail {
template <typename T, typename BucketT> template <typename T, typename BucketT>
HistogramBuckets<T, BucketT>::HistogramBuckets(ValueType bucketSize, HistogramBuckets<T, BucketT>::HistogramBuckets(
ValueType min, ValueType bucketSize,
ValueType max, ValueType min,
const BucketType& defaultBucket) ValueType max,
: bucketSize_(bucketSize), const BucketType& defaultBucket)
min_(min), : bucketSize_(bucketSize), min_(min), max_(max) {
max_(max) {
CHECK_GT(bucketSize_, ValueType(0)); CHECK_GT(bucketSize_, ValueType(0));
CHECK_LT(min_, max_); CHECK_LT(min_, max_);
...@@ -88,7 +87,7 @@ size_t HistogramBuckets<T, BucketType>::getPercentileBucketIdx( ...@@ -88,7 +87,7 @@ size_t HistogramBuckets<T, BucketType>::getPercentileBucketIdx(
uint64_t totalCount = 0; uint64_t totalCount = 0;
for (size_t n = 0; n < numBuckets; ++n) { for (size_t n = 0; n < numBuckets; ++n) {
uint64_t bucketCount = uint64_t bucketCount =
countFromBucket(const_cast<const BucketType&>(buckets_[n])); countFromBucket(const_cast<const BucketType&>(buckets_[n]));
counts[n] = bucketCount; counts[n] = bucketCount;
totalCount += bucketCount; totalCount += bucketCount;
} }
...@@ -146,7 +145,6 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate( ...@@ -146,7 +145,6 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
double pct, double pct,
CountFn countFromBucket, CountFn countFromBucket,
AvgFn avgFromBucket) const { AvgFn avgFromBucket) const {
// Find the bucket where this percentile falls // Find the bucket where this percentile falls
double lowPct; double lowPct;
double highPct; double highPct;
...@@ -183,8 +181,8 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate( ...@@ -183,8 +181,8 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
// (Note that if the counter keeps being decremented, eventually it will // (Note that if the counter keeps being decremented, eventually it will
// wrap and become small enough that we won't detect this any more, and // wrap and become small enough that we won't detect this any more, and
// we will return bogus information.) // we will return bogus information.)
LOG(ERROR) << "invalid average value in histogram minimum bucket: " << LOG(ERROR) << "invalid average value in histogram minimum bucket: " << avg
avg << " > " << min_ << ": possible integer overflow?"; << " > " << min_ << ": possible integer overflow?";
return getBucketMin(bucketIdx); return getBucketMin(bucketIdx);
} }
// For the below-min bucket, just assume the lowest value ever seen is // For the below-min bucket, just assume the lowest value ever seen is
...@@ -199,8 +197,8 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate( ...@@ -199,8 +197,8 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
if (avg < max_) { if (avg < max_) {
// Most likely this means integer overflow occurred. See the comments // Most likely this means integer overflow occurred. See the comments
// above in the minimum case. // above in the minimum case.
LOG(ERROR) << "invalid average value in histogram maximum bucket: " << LOG(ERROR) << "invalid average value in histogram maximum bucket: " << avg
avg << " < " << max_ << ": possible integer overflow?"; << " < " << max_ << ": possible integer overflow?";
return getBucketMax(bucketIdx); return getBucketMax(bucketIdx);
} }
// Similarly for the above-max bucket, assume the highest value ever seen // Similarly for the above-max bucket, assume the highest value ever seen
...@@ -218,9 +216,9 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate( ...@@ -218,9 +216,9 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
// Most likely this means an integer overflow occurred. // Most likely this means an integer overflow occurred.
// See the comments above. Return the midpoint between low and high // See the comments above. Return the midpoint between low and high
// as a best guess, since avg is meaningless. // as a best guess, since avg is meaningless.
LOG(ERROR) << "invalid average value in histogram bucket: " << LOG(ERROR) << "invalid average value in histogram bucket: " << avg
avg << " not in range [" << low << ", " << high << << " not in range [" << low << ", " << high
"]: possible integer overflow?"; << "]: possible integer overflow?";
return (low + high) / 2; return (low + high) / 2;
} }
} }
...@@ -247,18 +245,27 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate( ...@@ -247,18 +245,27 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
} // detail } // detail
template <typename T> template <typename T>
std::string Histogram<T>::debugString() const { std::string Histogram<T>::debugString() const {
std::string ret = folly::to<std::string>( std::string ret = folly::to<std::string>(
"num buckets: ", buckets_.getNumBuckets(), "num buckets: ",
", bucketSize: ", buckets_.getBucketSize(), buckets_.getNumBuckets(),
", min: ", buckets_.getMin(), ", max: ", buckets_.getMax(), "\n"); ", bucketSize: ",
buckets_.getBucketSize(),
", min: ",
buckets_.getMin(),
", max: ",
buckets_.getMax(),
"\n");
for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) { for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) {
folly::toAppend(" ", buckets_.getBucketMin(i), ": ", folly::toAppend(
buckets_.getByIndex(i).count, "\n", " ",
&ret); buckets_.getBucketMin(i),
": ",
buckets_.getByIndex(i).count,
"\n",
&ret);
} }
return ret; return ret;
...@@ -272,8 +279,8 @@ void Histogram<T>::toTSV(std::ostream& out, bool skipEmptyBuckets) const { ...@@ -272,8 +279,8 @@ void Histogram<T>::toTSV(std::ostream& out, bool skipEmptyBuckets) const {
continue; continue;
} }
const auto& bucket = getBucketByIndex(i); const auto& bucket = getBucketByIndex(i);
out << getBucketMin(i) << '\t' << getBucketMax(i) << '\t' out << getBucketMin(i) << '\t' << getBucketMax(i) << '\t' << bucket.count
<< bucket.count << '\t' << bucket.sum << '\n'; << '\t' << bucket.sum << '\n';
} }
} }
......
...@@ -40,14 +40,16 @@ template size_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>:: ...@@ -40,14 +40,16 @@ template size_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
Histogram<int64_t>::CountFromBucket countFromBucket, Histogram<int64_t>::CountFromBucket countFromBucket,
double* lowPct, double* lowPct,
double* highPct) const; double* highPct) const;
template int64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket> template int64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
::getPercentileEstimate<Histogram<int64_t>::CountFromBucket, getPercentileEstimate<
Histogram<int64_t>::AvgFromBucket>( Histogram<int64_t>::CountFromBucket,
double pct, Histogram<int64_t>::AvgFromBucket>(
Histogram<int64_t>::CountFromBucket countFromBucket, double pct,
Histogram<int64_t>::AvgFromBucket avgFromBucket) const; Histogram<int64_t>::CountFromBucket countFromBucket,
template uint64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket> Histogram<int64_t>::AvgFromBucket avgFromBucket) const;
::computeTotalCount<Histogram<int64_t>::CountFromBucket>( template uint64_t
Histogram<int64_t>::CountFromBucket countFromBucket) const; detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
computeTotalCount<Histogram<int64_t>::CountFromBucket>(
Histogram<int64_t>::CountFromBucket countFromBucket) const;
} // folly } // folly
...@@ -53,8 +53,11 @@ class HistogramBuckets { ...@@ -53,8 +53,11 @@ class HistogramBuckets {
* *
* (max - min) must be larger than or equal to bucketSize. * (max - min) must be larger than or equal to bucketSize.
*/ */
HistogramBuckets(ValueType bucketSize, ValueType min, ValueType max, HistogramBuckets(
const BucketType& defaultBucket); ValueType bucketSize,
ValueType min,
ValueType max,
const BucketType& defaultBucket);
/* Returns the bucket size of each bucket in the histogram. */ /* Returns the bucket size of each bucket in the histogram. */
ValueType getBucketSize() const { ValueType getBucketSize() const {
...@@ -191,9 +194,10 @@ class HistogramBuckets { ...@@ -191,9 +194,10 @@ class HistogramBuckets {
* percentage of the data points in the histogram are less than N. * percentage of the data points in the histogram are less than N.
*/ */
template <typename CountFn, typename AvgFn> template <typename CountFn, typename AvgFn>
ValueType getPercentileEstimate(double pct, ValueType getPercentileEstimate(
CountFn countFromBucket, double pct,
AvgFn avgFromBucket) const; CountFn countFromBucket,
AvgFn avgFromBucket) const;
/* /*
* Iterator access to the buckets. * Iterator access to the buckets.
...@@ -224,7 +228,6 @@ class HistogramBuckets { ...@@ -224,7 +228,6 @@ class HistogramBuckets {
} // detail } // detail
/* /*
* A basic histogram class. * A basic histogram class.
* *
...@@ -242,7 +245,7 @@ class Histogram { ...@@ -242,7 +245,7 @@ class Histogram {
typedef detail::Bucket<T> Bucket; typedef detail::Bucket<T> Bucket;
Histogram(ValueType bucketSize, ValueType min, ValueType max) Histogram(ValueType bucketSize, ValueType min, ValueType max)
: buckets_(bucketSize, min, max, Bucket()) {} : buckets_(bucketSize, min, max, Bucket()) {}
/* Add a data point to the histogram */ /* Add a data point to the histogram */
void addValue(ValueType value) FOLLY_DISABLE_UNDEFINED_BEHAVIOR_SANITIZER( void addValue(ValueType value) FOLLY_DISABLE_UNDEFINED_BEHAVIOR_SANITIZER(
...@@ -313,13 +316,11 @@ class Histogram { ...@@ -313,13 +316,11 @@ class Histogram {
} }
/* Subtract another histogram data from the histogram */ /* Subtract another histogram data from the histogram */
void subtract(const Histogram &hist) { void subtract(const Histogram& hist) {
// the two histogram bucket definitions must match to support // the two histogram bucket definitions must match to support
// subtract. // subtract.
if (getBucketSize() != hist.getBucketSize() || if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
getMin() != hist.getMin() || getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
getMax() != hist.getMax() ||
getNumBuckets() != hist.getNumBuckets() ) {
throw std::invalid_argument("Cannot subtract input histogram."); throw std::invalid_argument("Cannot subtract input histogram.");
} }
...@@ -329,13 +330,11 @@ class Histogram { ...@@ -329,13 +330,11 @@ class Histogram {
} }
/* Merge two histogram data together */ /* Merge two histogram data together */
void merge(const Histogram &hist) { void merge(const Histogram& hist) {
// the two histogram bucket definitions must match to support // the two histogram bucket definitions must match to support
// a merge. // a merge.
if (getBucketSize() != hist.getBucketSize() || if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
getMin() != hist.getMin() || getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
getMax() != hist.getMax() ||
getNumBuckets() != hist.getNumBuckets() ) {
throw std::invalid_argument("Cannot merge from input histogram."); throw std::invalid_argument("Cannot merge from input histogram.");
} }
...@@ -345,12 +344,10 @@ class Histogram { ...@@ -345,12 +344,10 @@ class Histogram {
} }
/* Copy bucket values from another histogram */ /* Copy bucket values from another histogram */
void copy(const Histogram &hist) { void copy(const Histogram& hist) {
// the two histogram bucket definition must match // the two histogram bucket definition must match
if (getBucketSize() != hist.getBucketSize() || if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
getMin() != hist.getMin() || getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
getMax() != hist.getMax() ||
getNumBuckets() != hist.getNumBuckets() ) {
throw std::invalid_argument("Cannot copy from input histogram."); throw std::invalid_argument("Cannot copy from input histogram.");
} }
......
...@@ -113,7 +113,7 @@ void MultiLevelTimeSeries<VT, CT>::flush() { ...@@ -113,7 +113,7 @@ void MultiLevelTimeSeries<VT, CT>::flush() {
template <typename VT, typename CT> template <typename VT, typename CT>
void MultiLevelTimeSeries<VT, CT>::clear() { void MultiLevelTimeSeries<VT, CT>::clear() {
for (auto & level : levels_) { for (auto& level : levels_) {
level.clear(); level.clear();
} }
...@@ -122,4 +122,4 @@ void MultiLevelTimeSeries<VT, CT>::clear() { ...@@ -122,4 +122,4 @@ void MultiLevelTimeSeries<VT, CT>::clear() {
cachedCount_ = 0; cachedCount_ = 0;
} }
} // folly } // namespace folly
...@@ -89,7 +89,9 @@ class MultiLevelTimeSeries { ...@@ -89,7 +89,9 @@ class MultiLevelTimeSeries {
/* /*
* Return the number of levels tracked by MultiLevelTimeSeries. * Return the number of levels tracked by MultiLevelTimeSeries.
*/ */
size_t numLevels() const { return levels_.size(); } size_t numLevels() const {
return levels_.size();
}
/* /*
* Get the BucketedTimeSeries backing the specified level. * Get the BucketedTimeSeries backing the specified level.
......
...@@ -62,7 +62,7 @@ void TimeseriesHistogram<T, CT, C>::addValues( ...@@ -62,7 +62,7 @@ void TimeseriesHistogram<T, CT, C>::addValues(
for (size_t n = 0; n < hist.getNumBuckets(); ++n) { for (size_t n = 0; n < hist.getNumBuckets(); ++n) {
const typename folly::Histogram<ValueType>::Bucket& histBucket = const typename folly::Histogram<ValueType>::Bucket& histBucket =
hist.getBucketByIndex(n); hist.getBucketByIndex(n);
Bucket& myBucket = buckets_.getByIndex(n); Bucket& myBucket = buckets_.getByIndex(n);
myBucket.addValueAggregated(now, histBucket.sum, histBucket.count); myBucket.addValueAggregated(now, histBucket.sum, histBucket.count);
} }
...@@ -93,8 +93,8 @@ T TimeseriesHistogram<T, CT, C>::getPercentileEstimate(double pct, size_t level) ...@@ -93,8 +93,8 @@ T TimeseriesHistogram<T, CT, C>::getPercentileEstimate(double pct, size_t level)
return firstValue_; return firstValue_;
} }
return buckets_.getPercentileEstimate(pct / 100.0, CountFromLevel(level), return buckets_.getPercentileEstimate(
AvgFromLevel(level)); pct / 100.0, CountFromLevel(level), AvgFromLevel(level));
} }
template <typename T, typename CT, typename C> template <typename T, typename CT, typename C>
...@@ -106,9 +106,10 @@ T TimeseriesHistogram<T, CT, C>::getPercentileEstimate( ...@@ -106,9 +106,10 @@ T TimeseriesHistogram<T, CT, C>::getPercentileEstimate(
return firstValue_; return firstValue_;
} }
return buckets_.getPercentileEstimate(pct / 100.0, return buckets_.getPercentileEstimate(
CountFromInterval(start, end), pct / 100.0,
AvgFromInterval<T>(start, end)); CountFromInterval(start, end),
AvgFromInterval<T>(start, end));
} }
template <typename T, typename CT, typename C> template <typename T, typename CT, typename C>
...@@ -123,8 +124,8 @@ size_t TimeseriesHistogram<T, CT, C>::getPercentileBucketIdx( ...@@ -123,8 +124,8 @@ size_t TimeseriesHistogram<T, CT, C>::getPercentileBucketIdx(
double pct, double pct,
TimePoint start, TimePoint start,
TimePoint end) const { TimePoint end) const {
return buckets_.getPercentileBucketIdx(pct / 100.0, return buckets_.getPercentileBucketIdx(
CountFromInterval(start, end)); pct / 100.0, CountFromInterval(start, end));
} }
template <typename T, typename CT, typename C> template <typename T, typename CT, typename C>
...@@ -150,9 +151,13 @@ std::string TimeseriesHistogram<T, CT, C>::getString(size_t level) const { ...@@ -150,9 +151,13 @@ std::string TimeseriesHistogram<T, CT, C>::getString(size_t level) const {
toAppend(",", &result); toAppend(",", &result);
} }
const ContainerType& cont = buckets_.getByIndex(i); const ContainerType& cont = buckets_.getByIndex(i);
toAppend(buckets_.getBucketMin(i), toAppend(
":", cont.count(level), buckets_.getBucketMin(i),
":", cont.template avg<ValueType>(level), &result); ":",
cont.count(level),
":",
cont.template avg<ValueType>(level),
&result);
} }
return result; return result;
...@@ -169,9 +174,13 @@ std::string TimeseriesHistogram<T, CT, C>::getString( ...@@ -169,9 +174,13 @@ std::string TimeseriesHistogram<T, CT, C>::getString(
toAppend(",", &result); toAppend(",", &result);
} }
const ContainerType& cont = buckets_.getByIndex(i); const ContainerType& cont = buckets_.getByIndex(i);
toAppend(buckets_.getBucketMin(i), toAppend(
":", cont.count(start, end), buckets_.getBucketMin(i),
":", cont.avg(start, end), &result); ":",
cont.count(start, end),
":",
cont.avg(start, end),
&result);
} }
return result; return result;
...@@ -227,4 +236,4 @@ void TimeseriesHistogram<T, CT, C>::computeRateData( ...@@ -227,4 +236,4 @@ void TimeseriesHistogram<T, CT, C>::computeRateData(
} }
} }
} // namespace folly } // namespace folly
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
#pragma once #pragma once
#include <string>
#include <folly/stats/Histogram.h> #include <folly/stats/Histogram.h>
#include <folly/stats/MultiLevelTimeSeries.h> #include <folly/stats/MultiLevelTimeSeries.h>
#include <string>
namespace folly { namespace folly {
...@@ -53,8 +53,8 @@ template < ...@@ -53,8 +53,8 @@ template <
class C = folly::MultiLevelTimeSeries<T, CT>> class C = folly::MultiLevelTimeSeries<T, CT>>
class TimeseriesHistogram { class TimeseriesHistogram {
private: private:
// NOTE: T must be equivalent to _signed_ numeric type for our math. // NOTE: T must be equivalent to _signed_ numeric type for our math.
static_assert(std::numeric_limits<T>::is_signed, ""); static_assert(std::numeric_limits<T>::is_signed, "");
public: public:
// Values to be inserted into container // Values to be inserted into container
...@@ -80,17 +80,26 @@ class TimeseriesHistogram { ...@@ -80,17 +80,26 @@ class TimeseriesHistogram {
* @param defaultContainer a pre-initialized timeseries with the desired * @param defaultContainer a pre-initialized timeseries with the desired
* number of levels and their durations. * number of levels and their durations.
*/ */
TimeseriesHistogram(ValueType bucketSize, ValueType min, ValueType max, TimeseriesHistogram(
const ContainerType& defaultContainer); ValueType bucketSize,
ValueType min,
ValueType max,
const ContainerType& defaultContainer);
/* Return the bucket size of each bucket in the histogram. */ /* Return the bucket size of each bucket in the histogram. */
ValueType getBucketSize() const { return buckets_.getBucketSize(); } ValueType getBucketSize() const {
return buckets_.getBucketSize();
}
/* Return the min value at which bucketing begins. */ /* Return the min value at which bucketing begins. */
ValueType getMin() const { return buckets_.getMin(); } ValueType getMin() const {
return buckets_.getMin();
}
/* Return the max value at which bucketing ends. */ /* Return the max value at which bucketing ends. */
ValueType getMax() const { return buckets_.getMax(); } ValueType getMax() const {
return buckets_.getMax();
}
/* Return the number of levels of the Timeseries object in each bucket */ /* Return the number of levels of the Timeseries object in each bucket */
size_t getNumLevels() const { size_t getNumLevels() const {
...@@ -386,4 +395,4 @@ class TimeseriesHistogram { ...@@ -386,4 +395,4 @@ class TimeseriesHistogram {
bool singleUniqueValue_; bool singleUniqueValue_;
ValueType firstValue_; ValueType firstValue_;
}; };
} // folly } // namespace folly
...@@ -20,7 +20,8 @@ ...@@ -20,7 +20,8 @@
#include <cstdint> #include <cstdint>
#include <type_traits> #include <type_traits>
namespace folly { namespace detail { namespace folly {
namespace detail {
/* /*
* Helper function to compute the average, given a specified input type and * Helper function to compute the average, given a specified input type and
...@@ -31,7 +32,9 @@ namespace folly { namespace detail { ...@@ -31,7 +32,9 @@ namespace folly { namespace detail {
// precision. // precision.
template <typename ReturnType> template <typename ReturnType>
ReturnType avgHelper(long double sum, uint64_t count) { ReturnType avgHelper(long double sum, uint64_t count) {
if (count == 0) { return ReturnType(0); } if (count == 0) {
return ReturnType(0);
}
const long double countf = count; const long double countf = count;
return static_cast<ReturnType>(sum / countf); return static_cast<ReturnType>(sum / countf);
} }
...@@ -39,11 +42,13 @@ ReturnType avgHelper(long double sum, uint64_t count) { ...@@ -39,11 +42,13 @@ ReturnType avgHelper(long double sum, uint64_t count) {
// In all other cases divide using double precision. // In all other cases divide using double precision.
// This should be relatively fast, and accurate enough for most use cases. // This should be relatively fast, and accurate enough for most use cases.
template <typename ReturnType, typename ValueType> template <typename ReturnType, typename ValueType>
typename std::enable_if<!std::is_same<typename std::remove_cv<ValueType>::type, typename std::enable_if<
long double>::value, !std::is_same<typename std::remove_cv<ValueType>::type, long double>::value,
ReturnType>::type ReturnType>::type
avgHelper(ValueType sum, uint64_t count) { avgHelper(ValueType sum, uint64_t count) {
if (count == 0) { return ReturnType(0); } if (count == 0) {
return ReturnType(0);
}
const double sumf = double(sum); const double sumf = double(sum);
const double countf = double(count); const double countf = double(count);
return static_cast<ReturnType>(sumf / countf); return static_cast<ReturnType>(sumf / countf);
...@@ -73,23 +78,21 @@ ReturnType rateHelper(ReturnType count, Duration elapsed) { ...@@ -73,23 +78,21 @@ ReturnType rateHelper(ReturnType count, Duration elapsed) {
std::ratio<Duration::period::den, Duration::period::num>> std::ratio<Duration::period::den, Duration::period::num>>
NativeRate; NativeRate;
typedef std::chrono::duration< typedef std::chrono::duration<
ReturnType, std::ratio<Interval::period::den, ReturnType,
Interval::period::num>> DesiredRate; std::ratio<Interval::period::den, Interval::period::num>>
DesiredRate;
NativeRate native(count / elapsed.count()); NativeRate native(count / elapsed.count());
DesiredRate desired = std::chrono::duration_cast<DesiredRate>(native); DesiredRate desired = std::chrono::duration_cast<DesiredRate>(native);
return desired.count(); return desired.count();
} }
template <typename T>
template<typename T>
struct Bucket { struct Bucket {
public: public:
typedef T ValueType; typedef T ValueType;
Bucket() Bucket() : sum(ValueType()), count(0) {}
: sum(ValueType()),
count(0) {}
void clear() { void clear() {
sum = ValueType(); sum = ValueType();
...@@ -122,5 +125,5 @@ struct Bucket { ...@@ -122,5 +125,5 @@ struct Bucket {
ValueType sum; ValueType sum;
uint64_t count; uint64_t count;
}; };
} // namespace detail
}} // folly::detail } // namespace folly
...@@ -13,20 +13,23 @@ ...@@ -13,20 +13,23 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#include <folly/stats/BucketedTimeSeries.h> #include <folly/stats/BucketedTimeSeries.h>
#include <folly/stats/BucketedTimeSeries-defs.h>
#include <glog/logging.h> #include <glog/logging.h>
#include <folly/Benchmark.h> #include <folly/Benchmark.h>
#include <folly/stats/BucketedTimeSeries-defs.h>
using std::chrono::seconds; using std::chrono::seconds;
using folly::BenchmarkSuspender; using folly::BenchmarkSuspender;
using folly::BucketedTimeSeries; using folly::BucketedTimeSeries;
void addValue(unsigned int iters, void addValue(
seconds duration, size_t numBuckets, unsigned int iters,
size_t callsPerSecond) { seconds duration,
size_t numBuckets,
size_t callsPerSecond) {
BenchmarkSuspender suspend; BenchmarkSuspender suspend;
BucketedTimeSeries<int64_t> ts(numBuckets, duration); BucketedTimeSeries<int64_t> ts(numBuckets, duration);
suspend.dismiss(); suspend.dismiss();
...@@ -70,7 +73,7 @@ BENCHMARK_NAMED_PARAM(addValue, 100x10_100perSec, seconds(100), 10, 100); ...@@ -70,7 +73,7 @@ BENCHMARK_NAMED_PARAM(addValue, 100x10_100perSec, seconds(100), 10, 100);
BENCHMARK_NAMED_PARAM(addValue, 71x5_100perSec, seconds(71), 5, 100); BENCHMARK_NAMED_PARAM(addValue, 71x5_100perSec, seconds(71), 5, 100);
BENCHMARK_NAMED_PARAM(addValue, 1x1_100perSec, seconds(1), 1, 100); BENCHMARK_NAMED_PARAM(addValue, 1x1_100perSec, seconds(1), 1, 100);
int main(int argc, char *argv[]) { int main(int argc, char* argv[]) {
gflags::ParseCommandLineFlags(&argc, &argv, true); gflags::ParseCommandLineFlags(&argc, &argv, true);
folly::runBenchmarks(); folly::runBenchmarks();
return 0; return 0;
......
...@@ -27,7 +27,9 @@ void addValue(unsigned int n, int64_t bucketSize, int64_t min, int64_t max) { ...@@ -27,7 +27,9 @@ void addValue(unsigned int n, int64_t bucketSize, int64_t min, int64_t max) {
FOR_EACH_RANGE (i, 0, n) { FOR_EACH_RANGE (i, 0, n) {
hist.addValue(num); hist.addValue(num);
++num; ++num;
if (num > max) { num = min; } if (num > max) {
num = min;
}
} }
} }
...@@ -35,7 +37,7 @@ BENCHMARK_NAMED_PARAM(addValue, 0_to_100, 1, 0, 100); ...@@ -35,7 +37,7 @@ BENCHMARK_NAMED_PARAM(addValue, 0_to_100, 1, 0, 100);
BENCHMARK_NAMED_PARAM(addValue, 0_to_1000, 10, 0, 1000); BENCHMARK_NAMED_PARAM(addValue, 0_to_1000, 10, 0, 1000);
BENCHMARK_NAMED_PARAM(addValue, 5k_to_20k, 250, 5000, 20000); BENCHMARK_NAMED_PARAM(addValue, 5k_to_20k, 250, 5000, 20000);
int main(int argc, char *argv[]) { int main(int argc, char* argv[]) {
gflags::ParseCommandLineFlags(&argc, &argv, true); gflags::ParseCommandLineFlags(&argc, &argv, true);
folly::runBenchmarks(); folly::runBenchmarks();
return 0; return 0;
......
...@@ -15,9 +15,9 @@ ...@@ -15,9 +15,9 @@
*/ */
#include <folly/stats/Histogram.h> #include <folly/stats/Histogram.h>
#include <folly/stats/Histogram-defs.h>
#include <folly/portability/GTest.h> #include <folly/portability/GTest.h>
#include <folly/stats/Histogram-defs.h>
using folly::Histogram; using folly::Histogram;
...@@ -42,8 +42,8 @@ TEST(Histogram, Test100) { ...@@ -42,8 +42,8 @@ TEST(Histogram, Test100) {
if (n < 100) { if (n < 100) {
double lowPct = -1.0; double lowPct = -1.0;
double highPct = -1.0; double highPct = -1.0;
unsigned int bucketIdx = h.getPercentileBucketIdx(pct + epsilon, unsigned int bucketIdx =
&lowPct, &highPct); h.getPercentileBucketIdx(pct + epsilon, &lowPct, &highPct);
EXPECT_EQ(n + 1, bucketIdx); EXPECT_EQ(n + 1, bucketIdx);
EXPECT_FLOAT_EQ(n / 100.0, lowPct); EXPECT_FLOAT_EQ(n / 100.0, lowPct);
EXPECT_FLOAT_EQ((n + 1) / 100.0, highPct); EXPECT_FLOAT_EQ((n + 1) / 100.0, highPct);
...@@ -53,8 +53,8 @@ TEST(Histogram, Test100) { ...@@ -53,8 +53,8 @@ TEST(Histogram, Test100) {
if (n > 0) { if (n > 0) {
double lowPct = -1.0; double lowPct = -1.0;
double highPct = -1.0; double highPct = -1.0;
unsigned int bucketIdx = h.getPercentileBucketIdx(pct - epsilon, unsigned int bucketIdx =
&lowPct, &highPct); h.getPercentileBucketIdx(pct - epsilon, &lowPct, &highPct);
EXPECT_EQ(n, bucketIdx); EXPECT_EQ(n, bucketIdx);
EXPECT_FLOAT_EQ((n - 1) / 100.0, lowPct); EXPECT_FLOAT_EQ((n - 1) / 100.0, lowPct);
EXPECT_FLOAT_EQ(n / 100.0, highPct); EXPECT_FLOAT_EQ(n / 100.0, highPct);
...@@ -212,7 +212,7 @@ TEST(Histogram, Counts) { ...@@ -212,7 +212,7 @@ TEST(Histogram, Counts) {
// Add one to each bucket, make sure the counts match // Add one to each bucket, make sure the counts match
for (int32_t i = 0; i < 10; i++) { for (int32_t i = 0; i < 10; i++) {
h.addValue(i); h.addValue(i);
EXPECT_EQ(i+1, h.computeTotalCount()); EXPECT_EQ(i + 1, h.computeTotalCount());
} }
// Add a lot to one bucket, make sure the counts still make sense // Add a lot to one bucket, make sure the counts still make sense
......
...@@ -69,16 +69,16 @@ struct TestData { ...@@ -69,16 +69,16 @@ struct TestData {
vector<TimePoint> bucketStarts; vector<TimePoint> bucketStarts;
}; };
vector<TestData> testData = { vector<TestData> testData = {
// 71 seconds x 4 buckets // 71 seconds x 4 buckets
{ 71, 4, {0, 18, 36, 54}}, {71, 4, {0, 18, 36, 54}},
// 100 seconds x 10 buckets // 100 seconds x 10 buckets
{ 100, 10, {0, 10, 20, 30, 40, 50, 60, 70, 80, 90}}, {100, 10, {0, 10, 20, 30, 40, 50, 60, 70, 80, 90}},
// 10 seconds x 10 buckets // 10 seconds x 10 buckets
{ 10, 10, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}}, {10, 10, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}},
// 10 seconds x 1 buckets // 10 seconds x 1 buckets
{ 10, 1, {0}}, {10, 1, {0}},
// 1 second x 1 buckets // 1 second x 1 buckets
{ 1, 1, {0}}, {1, 1, {0}},
}; };
} }
...@@ -119,11 +119,11 @@ TEST(BucketedTimeSeries, getBucketInfo) { ...@@ -119,11 +119,11 @@ TEST(BucketedTimeSeries, getBucketInfo) {
size_t returnedIdx; size_t returnedIdx;
TimePoint returnedStart; TimePoint returnedStart;
TimePoint returnedNextStart; TimePoint returnedNextStart;
ts.getBucketInfo(expectedStart, &returnedIdx, ts.getBucketInfo(
&returnedStart, &returnedNextStart); expectedStart, &returnedIdx, &returnedStart, &returnedNextStart);
EXPECT_EQ(idx, returnedIdx) << data.duration << "x" << data.numBuckets EXPECT_EQ(idx, returnedIdx)
<< ": " << point.first << "=" << data.duration << "x" << data.numBuckets << ": " << point.first
<< point.second; << "=" << point.second;
EXPECT_EQ(expectedStart, returnedStart) EXPECT_EQ(expectedStart, returnedStart)
<< data.duration << "x" << data.numBuckets << ": " << point.first << data.duration << "x" << data.numBuckets << ": " << point.first
<< "=" << point.second; << "=" << point.second;
...@@ -167,7 +167,7 @@ void testUpdate100x10(size_t offset) { ...@@ -167,7 +167,7 @@ void testUpdate100x10(size_t offset) {
setup(); setup();
ts.update(seconds(151 + offset)); ts.update(seconds(151 + offset));
EXPECT_EQ(4, ts.count()); EXPECT_EQ(4, ts.count());
//EXPECT_EQ(6, ts.sum()); // EXPECT_EQ(6, ts.sum());
EXPECT_EQ(6, ts.avg()); EXPECT_EQ(6, ts.avg());
// The last time we added was 95. // The last time we added was 95.
...@@ -399,9 +399,10 @@ TEST(BucketedTimeSeries, avgTypeConversion) { ...@@ -399,9 +399,10 @@ TEST(BucketedTimeSeries, avgTypeConversion) {
{ {
// Test uint64_t values that would overflow int64_t // Test uint64_t values that would overflow int64_t
BucketedTimeSeries<uint64_t> ts(60, seconds(600)); BucketedTimeSeries<uint64_t> ts(60, seconds(600));
ts.addValueAggregated(seconds(0), ts.addValueAggregated(
std::numeric_limits<uint64_t>::max(), seconds(0),
std::numeric_limits<uint64_t>::max()); std::numeric_limits<uint64_t>::max(),
std::numeric_limits<uint64_t>::max());
EXPECT_DOUBLE_EQ(1.0, ts.avg()); EXPECT_DOUBLE_EQ(1.0, ts.avg());
EXPECT_DOUBLE_EQ(1.0, ts.avg<float>()); EXPECT_DOUBLE_EQ(1.0, ts.avg<float>());
...@@ -443,9 +444,7 @@ TEST(BucketedTimeSeries, avgTypeConversion) { ...@@ -443,9 +444,7 @@ TEST(BucketedTimeSeries, avgTypeConversion) {
// but the average fits in an int64_t // but the average fits in an int64_t
BucketedTimeSeries<double> ts(60, seconds(600)); BucketedTimeSeries<double> ts(60, seconds(600));
uint64_t value = 0x3fffffffffffffff; uint64_t value = 0x3fffffffffffffff;
FOR_EACH_RANGE(i, 0, 16) { FOR_EACH_RANGE (i, 0, 16) { ts.addValue(seconds(0), value); }
ts.addValue(seconds(0), value);
}
EXPECT_DOUBLE_EQ(value, ts.avg()); EXPECT_DOUBLE_EQ(value, ts.avg());
EXPECT_DOUBLE_EQ(value, ts.avg<float>()); EXPECT_DOUBLE_EQ(value, ts.avg<float>());
...@@ -458,9 +457,7 @@ TEST(BucketedTimeSeries, avgTypeConversion) { ...@@ -458,9 +457,7 @@ TEST(BucketedTimeSeries, avgTypeConversion) {
{ {
// Test BucketedTimeSeries with a smaller integer type // Test BucketedTimeSeries with a smaller integer type
BucketedTimeSeries<int16_t> ts(60, seconds(600)); BucketedTimeSeries<int16_t> ts(60, seconds(600));
FOR_EACH_RANGE(i, 0, 101) { FOR_EACH_RANGE (i, 0, 101) { ts.addValue(seconds(0), i); }
ts.addValue(seconds(0), i);
}
EXPECT_DOUBLE_EQ(50.0, ts.avg()); EXPECT_DOUBLE_EQ(50.0, ts.avg());
EXPECT_DOUBLE_EQ(50.0, ts.avg<float>()); EXPECT_DOUBLE_EQ(50.0, ts.avg<float>());
...@@ -513,10 +510,9 @@ TEST(BucketedTimeSeries, forEachBucket) { ...@@ -513,10 +510,9 @@ TEST(BucketedTimeSeries, forEachBucket) {
BucketedTimeSeries<int64_t> ts(data.numBuckets, seconds(data.duration)); BucketedTimeSeries<int64_t> ts(data.numBuckets, seconds(data.duration));
vector<BucketInfo> info; vector<BucketInfo> info;
auto fn = [&]( auto fn = [&](const Bucket& bucket,
const Bucket& bucket, TimePoint bucketStart,
TimePoint bucketStart, TimePoint bucketEnd) -> bool {
TimePoint bucketEnd) -> bool {
info.emplace_back(&bucket, bucketStart, bucketEnd); info.emplace_back(&bucket, bucketStart, bucketEnd);
return true; return true;
}; };
...@@ -589,24 +585,26 @@ TEST(BucketedTimeSeries, queryByInterval) { ...@@ -589,24 +585,26 @@ TEST(BucketedTimeSeries, queryByInterval) {
// 0: time=[0, 2): values=(0, 1), sum=1, count=2 // 0: time=[0, 2): values=(0, 1), sum=1, count=2
// 1: time=[2, 4): values=(2, 3), sum=5, count=1 // 1: time=[2, 4): values=(2, 3), sum=5, count=1
// 2: time=[4, 6): values=(4, 5), sum=9, count=2 // 2: time=[4, 6): values=(4, 5), sum=9, count=2
// clang-format off
double expectedSums1[kDuration + 1][kDuration + 1] = { double expectedSums1[kDuration + 1][kDuration + 1] = {
{0, 4.5, 9, 11.5, 14, 14.5, 15}, {0, 4.5, 9, 11.5, 14, 14.5, 15},
{0, 4.5, 7, 9.5, 10, 10.5, -1}, {0, 4.5, 7, 9.5, 10, 10.5, -1},
{0, 2.5, 5, 5.5, 6, -1, -1}, {0, 2.5, 5, 5.5, 6, -1, -1},
{0, 2.5, 3, 3.5, -1, -1, -1}, {0, 2.5, 3, 3.5, -1, -1, -1},
{0, 0.5, 1, -1, -1, -1, -1}, {0, 0.5, 1, -1, -1, -1, -1},
{0, 0.5, -1, -1, -1, -1, -1}, {0, 0.5, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1} {0, -1, -1, -1, -1, -1, -1},
}; };
int expectedCounts1[kDuration + 1][kDuration + 1] = { int expectedCounts1[kDuration + 1][kDuration + 1] = {
{0, 1, 2, 3, 4, 5, 6}, {0, 1, 2, 3, 4, 5, 6},
{0, 1, 2, 3, 4, 5, -1}, {0, 1, 2, 3, 4, 5, -1},
{0, 1, 2, 3, 4, -1, -1}, {0, 1, 2, 3, 4, -1, -1},
{0, 1, 2, 3, -1, -1, -1}, {0, 1, 2, 3, -1, -1, -1},
{0, 1, 2, -1, -1, -1, -1}, {0, 1, 2, -1, -1, -1, -1},
{0, 1, -1, -1, -1, -1, -1}, {0, 1, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1} {0, -1, -1, -1, -1, -1, -1},
}; };
// clang-format on
TimePoint currentTime = b.getLatestTime() + seconds(1); TimePoint currentTime = b.getLatestTime() + seconds(1);
for (int i = 0; i <= kDuration + 1; i++) { for (int i = 0; i <= kDuration + 1; i++) {
...@@ -646,24 +644,26 @@ TEST(BucketedTimeSeries, queryByInterval) { ...@@ -646,24 +644,26 @@ TEST(BucketedTimeSeries, queryByInterval) {
// 0: time=[6, 8): values=(6, 7), sum=13, count=2 // 0: time=[6, 8): values=(6, 7), sum=13, count=2
// 1: time=[8, 10): values=(8), sum=8, count=1 // 1: time=[8, 10): values=(8), sum=8, count=1
// 2: time=[4, 6): values=(4, 5), sum=9, count=2 // 2: time=[4, 6): values=(4, 5), sum=9, count=2
// clang-format off
double expectedSums2[kDuration + 1][kDuration + 1] = { double expectedSums2[kDuration + 1][kDuration + 1] = {
{0, 8, 14.5, 21, 25.5, 30, 30}, {0, 8, 14.5, 21, 25.5, 30, 30},
{0, 6.5, 13, 17.5, 22, 22, -1}, {0, 6.5, 13, 17.5, 22, 22, -1},
{0, 6.5, 11, 15.5, 15.5, -1, -1}, {0, 6.5, 11, 15.5, 15.5, -1, -1},
{0, 4.5, 9, 9, -1, -1, -1}, {0, 4.5, 9, 9, -1, -1, -1},
{0, 4.5, 4.5, -1, -1, -1, -1}, {0, 4.5, 4.5, -1, -1, -1, -1},
{0, 0, -1, -1, -1, -1, -1}, {0, 0, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1} {0, -1, -1, -1, -1, -1, -1},
}; };
int expectedCounts2[kDuration + 1][kDuration + 1] = { int expectedCounts2[kDuration + 1][kDuration + 1] = {
{0, 1, 2, 3, 4, 5, 5}, {0, 1, 2, 3, 4, 5, 5},
{0, 1, 2, 3, 4, 4, -1}, {0, 1, 2, 3, 4, 4, -1},
{0, 1, 2, 3, 3, -1, -1}, {0, 1, 2, 3, 3, -1, -1},
{0, 1, 2, 2, -1, -1, -1}, {0, 1, 2, 2, -1, -1, -1},
{0, 1, 1, -1, -1, -1, -1}, {0, 1, 1, -1, -1, -1, -1},
{0, 0, -1, -1, -1, -1, -1}, {0, 0, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1} {0, -1, -1, -1, -1, -1, -1},
}; };
// clang-format on
currentTime = b.getLatestTime() + seconds(1); currentTime = b.getLatestTime() + seconds(1);
for (int i = 0; i <= kDuration + 1; i++) { for (int i = 0; i <= kDuration + 1; i++) {
...@@ -692,8 +692,8 @@ TEST(BucketedTimeSeries, queryByInterval) { ...@@ -692,8 +692,8 @@ TEST(BucketedTimeSeries, queryByInterval) {
<< "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end << "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end
<< ")"; << ")";
double expectedRate = expectedInterval.count() ? double expectedRate =
expectedSum / expectedInterval.count() : 0; expectedInterval.count() ? expectedSum / expectedInterval.count() : 0;
EXPECT_EQ(expectedRate, b.rate(start, end)) EXPECT_EQ(expectedRate, b.rate(start, end))
<< "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end << "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end
<< ")"; << ")";
...@@ -890,21 +890,19 @@ TEST(BucketedTimeSeries, reConstructWithCorruptedData) { ...@@ -890,21 +890,19 @@ TEST(BucketedTimeSeries, reConstructWithCorruptedData) {
} }
namespace IntMHTS { namespace IntMHTS {
enum Levels { enum Levels {
MINUTE, MINUTE,
HOUR, HOUR,
ALLTIME, ALLTIME,
NUM_LEVELS, NUM_LEVELS,
}; };
const seconds kMinuteHourDurations[] = { const seconds kMinuteHourDurations[] = {seconds(60), seconds(3600), seconds(0)};
seconds(60), seconds(3600), seconds(0)
};
}; };
TEST(MinuteHourTimeSeries, Basic) { TEST(MinuteHourTimeSeries, Basic) {
folly::MultiLevelTimeSeries<int> mhts(60, IntMHTS::NUM_LEVELS, folly::MultiLevelTimeSeries<int> mhts(
IntMHTS::kMinuteHourDurations); 60, IntMHTS::NUM_LEVELS, IntMHTS::kMinuteHourDurations);
EXPECT_EQ(mhts.numLevels(), IntMHTS::NUM_LEVELS); EXPECT_EQ(mhts.numLevels(), IntMHTS::NUM_LEVELS);
EXPECT_EQ(mhts.numLevels(), 3); EXPECT_EQ(mhts.numLevels(), 3);
...@@ -943,8 +941,8 @@ TEST(MinuteHourTimeSeries, Basic) { ...@@ -943,8 +941,8 @@ TEST(MinuteHourTimeSeries, Basic) {
EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 300); EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 300);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600); EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 300*10); EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 300 * 10);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 300*10); EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 300 * 10);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10); EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10); EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10);
...@@ -954,18 +952,18 @@ TEST(MinuteHourTimeSeries, Basic) { ...@@ -954,18 +952,18 @@ TEST(MinuteHourTimeSeries, Basic) {
EXPECT_EQ(mhts.rate(IntMHTS::HOUR), 10); EXPECT_EQ(mhts.rate(IntMHTS::HOUR), 10);
EXPECT_EQ(mhts.rate(IntMHTS::ALLTIME), 10); EXPECT_EQ(mhts.rate(IntMHTS::ALLTIME), 10);
for (int i = 0; i < 3600*3 - 300; ++i) { for (int i = 0; i < 3600 * 3 - 300; ++i) {
mhts.addValue(cur_time++, 10); mhts.addValue(cur_time++, 10);
} }
mhts.flush(); mhts.flush();
EXPECT_EQ(mhts.getLevel(IntMHTS::MINUTE).elapsed().count(), 60); EXPECT_EQ(mhts.getLevel(IntMHTS::MINUTE).elapsed().count(), 60);
EXPECT_EQ(mhts.getLevel(IntMHTS::HOUR).elapsed().count(), 3600); EXPECT_EQ(mhts.getLevel(IntMHTS::HOUR).elapsed().count(), 3600);
EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 3600*3); EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 3600 * 3);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600); EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600*10); EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600 * 10);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600*3*10); EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10); EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10); EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10);
...@@ -980,10 +978,9 @@ TEST(MinuteHourTimeSeries, Basic) { ...@@ -980,10 +978,9 @@ TEST(MinuteHourTimeSeries, Basic) {
} }
mhts.flush(); mhts.flush();
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*100); EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 100);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600*100); EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600 * 100);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10 + 3600 * 100);
3600*3*10 + 3600*100);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 100); EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 100);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 100); EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 100);
...@@ -1000,30 +997,29 @@ TEST(MinuteHourTimeSeries, Basic) { ...@@ -1000,30 +997,29 @@ TEST(MinuteHourTimeSeries, Basic) {
} }
mhts.flush(); mhts.flush();
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*120); EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 120);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 1800 * 100 + 1800 * 120);
1800*100 + 1800*120); EXPECT_EQ(
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10 + 3600 * 100 + 1800 * 120);
3600*3*10 + 3600*100 + 1800*120);
for (int i = 0; i < 60; ++i) { for (int i = 0; i < 60; ++i) {
mhts.addValue(cur_time++, 1000); mhts.addValue(cur_time++, 1000);
} }
mhts.flush(); mhts.flush();
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*1000); EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 1000);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 1740 * 100 + 1800 * 120 + 60 * 1000);
1740*100 + 1800*120 + 60*1000); EXPECT_EQ(
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), mhts.sum(IntMHTS::ALLTIME),
3600*3*10 + 3600*100 + 1800*120 + 60*1000); 3600 * 3 * 10 + 3600 * 100 + 1800 * 120 + 60 * 1000);
mhts.clear(); mhts.clear();
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 0); EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 0);
} }
TEST(MinuteHourTimeSeries, QueryByInterval) { TEST(MinuteHourTimeSeries, QueryByInterval) {
folly::MultiLevelTimeSeries<int> mhts(60, IntMHTS::NUM_LEVELS, folly::MultiLevelTimeSeries<int> mhts(
IntMHTS::kMinuteHourDurations); 60, IntMHTS::NUM_LEVELS, IntMHTS::kMinuteHourDurations);
TimePoint curTime; TimePoint curTime;
for (curTime = mkTimePoint(0); curTime < mkTimePoint(7200); for (curTime = mkTimePoint(0); curTime < mkTimePoint(7200);
...@@ -1045,27 +1041,37 @@ TEST(MinuteHourTimeSeries, QueryByInterval) { ...@@ -1045,27 +1041,37 @@ TEST(MinuteHourTimeSeries, QueryByInterval) {
TimePoint end; TimePoint end;
}; };
TimeInterval intervals[12] = { TimeInterval intervals[12] = {
{ curTime - seconds(60), curTime }, {curTime - seconds(60), curTime},
{ curTime - seconds(3600), curTime }, {curTime - seconds(3600), curTime},
{ curTime - seconds(7200), curTime }, {curTime - seconds(7200), curTime},
{ curTime - seconds(3600), curTime - seconds(60) }, {curTime - seconds(3600), curTime - seconds(60)},
{ curTime - seconds(7200), curTime - seconds(60) }, {curTime - seconds(7200), curTime - seconds(60)},
{ curTime - seconds(7200), curTime - seconds(3600) }, {curTime - seconds(7200), curTime - seconds(3600)},
{ curTime - seconds(50), curTime - seconds(20) }, {curTime - seconds(50), curTime - seconds(20)},
{ curTime - seconds(3020), curTime - seconds(20) }, {curTime - seconds(3020), curTime - seconds(20)},
{ curTime - seconds(7200), curTime - seconds(20) }, {curTime - seconds(7200), curTime - seconds(20)},
{ curTime - seconds(3000), curTime - seconds(1000) }, {curTime - seconds(3000), curTime - seconds(1000)},
{ curTime - seconds(7200), curTime - seconds(1000) }, {curTime - seconds(7200), curTime - seconds(1000)},
{ curTime - seconds(7200), curTime - seconds(3600) }, {curTime - seconds(7200), curTime - seconds(3600)},
}; };
int expectedSums[12] = { int expectedSums[12] = {
6000, 41400, 32400, 35400, 32130, 16200, 3000, 33600, 32310, 20000, 27900, 6000,
16200 41400,
32400,
35400,
32130,
16200,
3000,
33600,
32310,
20000,
27900,
16200,
}; };
int expectedCounts[12] = { int expectedCounts[12] = {
60, 3600, 7200, 3540, 7140, 3600, 30, 3000, 7180, 2000, 6200, 3600 60, 3600, 7200, 3540, 7140, 3600, 30, 3000, 7180, 2000, 6200, 3600,
}; };
for (int i = 0; i < 12; ++i) { for (int i = 0; i < 12; ++i) {
...@@ -1078,13 +1084,11 @@ TEST(MinuteHourTimeSeries, QueryByInterval) { ...@@ -1078,13 +1084,11 @@ TEST(MinuteHourTimeSeries, QueryByInterval) {
EXPECT_EQ(expectedCounts[i], c); EXPECT_EQ(expectedCounts[i], c);
int a = mhts.avg<int>(interval.start, interval.end); int a = mhts.avg<int>(interval.start, interval.end);
EXPECT_EQ(expectedCounts[i] ? EXPECT_EQ(expectedCounts[i] ? (expectedSums[i] / expectedCounts[i]) : 0, a);
(expectedSums[i] / expectedCounts[i]) : 0,
a);
int r = mhts.rate<int>(interval.start, interval.end); int r = mhts.rate<int>(interval.start, interval.end);
int expectedRate = int expectedRate =
expectedSums[i] / (interval.end - interval.start).count(); expectedSums[i] / (interval.end - interval.start).count();
EXPECT_EQ(expectedRate, r); EXPECT_EQ(expectedRate, r);
} }
} }
......
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
*/ */
#include <folly/stats/TimeseriesHistogram.h> #include <folly/stats/TimeseriesHistogram.h>
#include <folly/stats/TimeseriesHistogram-defs.h>
#include <random> #include <random>
#include <folly/portability/GTest.h> #include <folly/portability/GTest.h>
#include <folly/stats/TimeseriesHistogram-defs.h>
using namespace std; using namespace std;
using namespace folly; using namespace folly;
...@@ -27,30 +27,35 @@ using std::chrono::seconds; ...@@ -27,30 +27,35 @@ using std::chrono::seconds;
namespace { namespace {
namespace IntMTMHTS { namespace IntMTMHTS {
enum Levels { enum Levels {
MINUTE, MINUTE,
TEN_MINUTE, TEN_MINUTE,
HOUR, HOUR,
ALLTIME, ALLTIME,
NUM_LEVELS, NUM_LEVELS,
}; };
const seconds kDurations[] = { const seconds kDurations[] = {
seconds(60), seconds(600), seconds(3600), seconds(0) seconds(60),
}; seconds(600),
seconds(3600),
seconds(0),
};
}; };
namespace IntMHTS { namespace IntMHTS {
enum Levels { enum Levels {
MINUTE, MINUTE,
HOUR, HOUR,
ALLTIME, ALLTIME,
NUM_LEVELS, NUM_LEVELS,
}; };
const seconds kDurations[] = { const seconds kDurations[] = {
seconds(60), seconds(3600), seconds(0) seconds(60),
}; seconds(3600),
seconds(0),
};
}; };
typedef std::mt19937 RandomInt32; typedef std::mt19937 RandomInt32;
...@@ -65,10 +70,12 @@ TEST(TimeseriesHistogram, Percentile) { ...@@ -65,10 +70,12 @@ TEST(TimeseriesHistogram, Percentile) {
RandomInt32 random(5); RandomInt32 random(5);
// [10, 109], 12 buckets including above and below // [10, 109], 12 buckets including above and below
{ {
TimeseriesHistogram<int> h(10, 10, 110, TimeseriesHistogram<int> h(
MultiLevelTimeSeries<int>( 10,
60, IntMTMHTS::NUM_LEVELS, 10,
IntMTMHTS::kDurations)); 110,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME)); EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
...@@ -91,8 +98,9 @@ TEST(TimeseriesHistogram, Percentile) { ...@@ -91,8 +98,9 @@ TEST(TimeseriesHistogram, Percentile) {
h.update(mkTimePoint(1500000000)); h.update(mkTimePoint(1500000000));
// bucket 0 stores everything below min, so its minimum // bucket 0 stores everything below min, so its minimum
// is the lowest possible number // is the lowest possible number
EXPECT_EQ(std::numeric_limits<int>::min(), EXPECT_EQ(
h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME)); std::numeric_limits<int>::min(),
h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME)); EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME));
EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME)); EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
...@@ -106,10 +114,12 @@ TEST(TimeseriesHistogram, String) { ...@@ -106,10 +114,12 @@ TEST(TimeseriesHistogram, String) {
RandomInt32 random(5); RandomInt32 random(5);
// [10, 109], 12 buckets including above and below // [10, 109], 12 buckets including above and below
{ {
TimeseriesHistogram<int> hist(10, 10, 110, TimeseriesHistogram<int> hist(
MultiLevelTimeSeries<int>( 10,
60, IntMTMHTS::NUM_LEVELS, 10,
IntMTMHTS::kDurations)); 110,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
int maxVal = 120; int maxVal = 120;
hist.addValue(mkTimePoint(0), 0); hist.addValue(mkTimePoint(0), 0);
...@@ -120,14 +130,14 @@ TEST(TimeseriesHistogram, String) { ...@@ -120,14 +130,14 @@ TEST(TimeseriesHistogram, String) {
hist.update(mkTimePoint(0)); hist.update(mkTimePoint(0));
const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = { const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
}; };
...@@ -137,14 +147,14 @@ TEST(TimeseriesHistogram, String) { ...@@ -137,14 +147,14 @@ TEST(TimeseriesHistogram, String) {
EXPECT_EQ(kStringValues1[level], hist.getString(level)); EXPECT_EQ(kStringValues1[level], hist.getString(level));
} }
const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = { const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64," "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115", "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
}; };
...@@ -158,10 +168,12 @@ TEST(TimeseriesHistogram, String) { ...@@ -158,10 +168,12 @@ TEST(TimeseriesHistogram, String) {
TEST(TimeseriesHistogram, Clear) { TEST(TimeseriesHistogram, Clear) {
{ {
TimeseriesHistogram<int> hist(10, 0, 100, TimeseriesHistogram<int> hist(
MultiLevelTimeSeries<int>( 10,
60, IntMTMHTS::NUM_LEVELS, 0,
IntMTMHTS::kDurations)); 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) { for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
...@@ -193,13 +205,14 @@ TEST(TimeseriesHistogram, Clear) { ...@@ -193,13 +205,14 @@ TEST(TimeseriesHistogram, Clear) {
} }
} }
TEST(TimeseriesHistogram, Basic) { TEST(TimeseriesHistogram, Basic) {
{ {
TimeseriesHistogram<int> hist(10, 0, 100, TimeseriesHistogram<int> hist(
MultiLevelTimeSeries<int>( 10,
60, IntMTMHTS::NUM_LEVELS, 0,
IntMTMHTS::kDurations)); 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) { for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
...@@ -211,8 +224,8 @@ TEST(TimeseriesHistogram, Basic) { ...@@ -211,8 +224,8 @@ TEST(TimeseriesHistogram, Basic) {
for (int pct = 1; pct <= 100; pct++) { for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10; int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, EXPECT_EQ(
IntMTMHTS::TEN_MINUTE)); expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
} }
...@@ -224,8 +237,8 @@ TEST(TimeseriesHistogram, Basic) { ...@@ -224,8 +237,8 @@ TEST(TimeseriesHistogram, Basic) {
EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME)); EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
} }
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE)); EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count( EXPECT_EQ(
IntMTMHTS::MINUTE)); 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE)); EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE));
EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE)); EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE));
...@@ -275,10 +288,12 @@ TEST(TimeseriesHistogram, Basic) { ...@@ -275,10 +288,12 @@ TEST(TimeseriesHistogram, Basic) {
// ----------------- // -----------------
{ {
TimeseriesHistogram<int> hist(10, 0, 100, TimeseriesHistogram<int> hist(
MultiLevelTimeSeries<int>( 10,
60, IntMTMHTS::NUM_LEVELS, 0,
IntMTMHTS::kDurations)); 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) { for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
...@@ -290,30 +305,32 @@ TEST(TimeseriesHistogram, Basic) { ...@@ -290,30 +305,32 @@ TEST(TimeseriesHistogram, Basic) {
for (int pct = 1; pct <= 100; pct++) { for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10; int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, EXPECT_EQ(
IntMTMHTS::TEN_MINUTE)); expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
} }
for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) { for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE)); EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE)); EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR)); EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME)); EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
} }
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE)); EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count( EXPECT_EQ(
IntMTMHTS::MINUTE)); 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
} }
// ----------------- // -----------------
{ {
TimeseriesHistogram<int> hist(10, 0, 100, TimeseriesHistogram<int> hist(
MultiLevelTimeSeries<int>( 10,
60, IntMTMHTS::NUM_LEVELS, 0,
IntMTMHTS::kDurations)); 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) { for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 50; i++) { for (int i = 0; i < 50; i++) {
...@@ -325,8 +342,8 @@ TEST(TimeseriesHistogram, Basic) { ...@@ -325,8 +342,8 @@ TEST(TimeseriesHistogram, Basic) {
for (int pct = 1; pct <= 100; pct++) { for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10; int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, EXPECT_EQ(
IntMTMHTS::TEN_MINUTE)); expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME)); EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
} }
...@@ -335,16 +352,15 @@ TEST(TimeseriesHistogram, Basic) { ...@@ -335,16 +352,15 @@ TEST(TimeseriesHistogram, Basic) {
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE)); EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR)); EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR));
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME)); EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count( EXPECT_EQ(
IntMTMHTS::MINUTE)); 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
EXPECT_EQ(0, EXPECT_EQ(
hist.getBucket(hist.getNumBuckets() - 1). 0,
count(IntMTMHTS::TEN_MINUTE)); hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count( EXPECT_EQ(
IntMTMHTS::HOUR)); 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::HOUR));
EXPECT_EQ(0, EXPECT_EQ(
hist.getBucket(hist.getNumBuckets() - 1).count( 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
IntMTMHTS::ALLTIME));
for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) { for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE)); EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
...@@ -357,18 +373,18 @@ TEST(TimeseriesHistogram, Basic) { ...@@ -357,18 +373,18 @@ TEST(TimeseriesHistogram, Basic) {
hist.addValue(mkTimePoint(3599), 200 + i); hist.addValue(mkTimePoint(3599), 200 + i);
} }
hist.update(mkTimePoint(3599)); hist.update(mkTimePoint(3599));
EXPECT_EQ(100, EXPECT_EQ(
hist.getBucket(hist.getNumBuckets() - 1).count( 100,
IntMTMHTS::ALLTIME)); hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
} }
} }
TEST(TimeseriesHistogram, QueryByInterval) { TEST(TimeseriesHistogram, QueryByInterval) {
TimeseriesHistogram<int> mhts(8, 8, 120, TimeseriesHistogram<int> mhts(
MultiLevelTimeSeries<int>( 8,
60, IntMHTS::NUM_LEVELS, 8,
IntMHTS::kDurations)); 120,
MultiLevelTimeSeries<int>(60, IntMHTS::NUM_LEVELS, IntMHTS::kDurations));
mhts.update(mkTimePoint(0)); mhts.update(mkTimePoint(0));
...@@ -392,27 +408,37 @@ TEST(TimeseriesHistogram, QueryByInterval) { ...@@ -392,27 +408,37 @@ TEST(TimeseriesHistogram, QueryByInterval) {
StatsClock::time_point end; StatsClock::time_point end;
}; };
TimeInterval intervals[12] = { TimeInterval intervals[12] = {
{ curTime - 60, curTime }, {curTime - 60, curTime},
{ curTime - 3600, curTime }, {curTime - 3600, curTime},
{ curTime - 7200, curTime }, {curTime - 7200, curTime},
{ curTime - 3600, curTime - 60 }, {curTime - 3600, curTime - 60},
{ curTime - 7200, curTime - 60 }, {curTime - 7200, curTime - 60},
{ curTime - 7200, curTime - 3600 }, {curTime - 7200, curTime - 3600},
{ curTime - 50, curTime - 20 }, {curTime - 50, curTime - 20},
{ curTime - 3020, curTime - 20 }, {curTime - 3020, curTime - 20},
{ curTime - 7200, curTime - 20 }, {curTime - 7200, curTime - 20},
{ curTime - 3000, curTime - 1000 }, {curTime - 3000, curTime - 1000},
{ curTime - 7200, curTime - 1000 }, {curTime - 7200, curTime - 1000},
{ curTime - 7200, curTime - 3600 }, {curTime - 7200, curTime - 3600},
}; };
int expectedSums[12] = { int expectedSums[12] = {
6000, 41400, 32400, 35400, 32129, 16200, 3000, 33600, 32308, 20000, 27899, 6000,
16200 41400,
32400,
35400,
32129,
16200,
3000,
33600,
32308,
20000,
27899,
16200,
}; };
int expectedCounts[12] = { int expectedCounts[12] = {
60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600 60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600,
}; };
// The first 7200 values added all fell below the histogram minimum, // The first 7200 values added all fell below the histogram minimum,
...@@ -421,18 +447,18 @@ TEST(TimeseriesHistogram, QueryByInterval) { ...@@ -421,18 +447,18 @@ TEST(TimeseriesHistogram, QueryByInterval) {
int belowMinBucket = std::numeric_limits<int>::min(); int belowMinBucket = std::numeric_limits<int>::min();
int expectedValues[12][3] = { int expectedValues[12][3] = {
{96, 96, 96}, {96, 96, 96},
{ 8, 8, 96}, {8, 8, 96},
{ belowMinBucket, belowMinBucket, 8}, // alltime {belowMinBucket, belowMinBucket, 8}, // alltime
{ 8, 8, 8}, {8, 8, 8},
{ belowMinBucket, belowMinBucket, 8}, // alltime {belowMinBucket, belowMinBucket, 8}, // alltime
{ belowMinBucket, belowMinBucket, 8}, // alltime {belowMinBucket, belowMinBucket, 8}, // alltime
{96, 96, 96}, {96, 96, 96},
{ 8, 8, 96}, {8, 8, 96},
{ belowMinBucket, belowMinBucket, 8}, // alltime {belowMinBucket, belowMinBucket, 8}, // alltime
{ 8, 8, 8}, {8, 8, 8},
{ belowMinBucket, belowMinBucket, 8}, // alltime {belowMinBucket, belowMinBucket, 8}, // alltime
{ belowMinBucket, belowMinBucket, 8} // alltime {belowMinBucket, belowMinBucket, 8} // alltime
}; };
for (int i = 0; i < 12; i++) { for (int i = 0; i < 12; i++) {
...@@ -502,10 +528,12 @@ TEST(TimeseriesHistogram, SingleUniqueValue) { ...@@ -502,10 +528,12 @@ TEST(TimeseriesHistogram, SingleUniqueValue) {
int values[] = {-1, 0, 500, 1000, 1500}; int values[] = {-1, 0, 500, 1000, 1500};
for (int ii = 0; ii < 5; ++ii) { for (int ii = 0; ii < 5; ++ii) {
int value = values[ii]; int value = values[ii];
TimeseriesHistogram<int> h(10, 0, 1000, TimeseriesHistogram<int> h(
MultiLevelTimeSeries<int>( 10,
60, IntMTMHTS::NUM_LEVELS, 0,
IntMTMHTS::kDurations)); 1000,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
const int kNumIters = 1000; const int kNumIters = 1000;
for (int jj = 0; jj < kNumIters; ++jj) { for (int jj = 0; jj < kNumIters; ++jj) {
...@@ -520,20 +548,20 @@ TEST(TimeseriesHistogram, SingleUniqueValue) { ...@@ -520,20 +548,20 @@ TEST(TimeseriesHistogram, SingleUniqueValue) {
// Things get trickier if there are multiple unique values. // Things get trickier if there are multiple unique values.
const int kNewValue = 750; const int kNewValue = 750;
for (int kk = 0; kk < 2*kNumIters; ++kk) { for (int kk = 0; kk < 2 * kNumIters; ++kk) {
h.addValue(mkTimePoint(1), kNewValue); h.addValue(mkTimePoint(1), kNewValue);
} }
h.update(mkTimePoint(1)); h.update(mkTimePoint(1));
EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue+5, 5); EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue + 5, 5);
if (value >= 0 && value <= 1000) { if (value >= 0 && value <= 1000) {
// only do further testing if value is within our bucket range, // only do further testing if value is within our bucket range,
// else estimates can be wildly off // else estimates can be wildly off
if (kNewValue > value) { if (kNewValue > value) {
EXPECT_NEAR(h.getPercentileEstimate(10, 0), value+5, 5); EXPECT_NEAR(h.getPercentileEstimate(10, 0), value + 5, 5);
EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue+5, 5); EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue + 5, 5);
} else { } else {
EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue+5, 5); EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue + 5, 5);
EXPECT_NEAR(h.getPercentileEstimate(99, 0), value+5, 5); EXPECT_NEAR(h.getPercentileEstimate(99, 0), value + 5, 5);
} }
} }
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment