blob: a465fbd6c91eeb5937f863483eecfedb9e5a4598 [file] [log] [blame]
[email protected]2f7d9cd2012-09-22 03:42:121// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "base/metrics/sample_vector.h"
6
Hans Wennborgc3cffa62020-04-27 10:09:127#include "base/check_op.h"
bcwhitefa8485b2017-05-01 16:43:258#include "base/lazy_instance.h"
bcwhitefa8485b2017-05-01 16:43:259#include "base/memory/ptr_util.h"
10#include "base/metrics/persistent_memory_allocator.h"
Hans Wennborgc3cffa62020-04-27 10:09:1211#include "base/notreached.h"
asvitkine3f17b1462017-05-03 21:37:3712#include "base/numerics/safe_conversions.h"
bcwhitefa8485b2017-05-01 16:43:2513#include "base/synchronization/lock.h"
14#include "base/threading/platform_thread.h"
15
16// This SampleVector makes use of the single-sample embedded in the base
17// HistogramSamples class. If the count is non-zero then there is guaranteed
18// (within the bounds of "eventual consistency") to be no allocated external
19// storage. Once the full counts storage is allocated, the single-sample must
20// be extracted and disabled.
[email protected]2f7d9cd2012-09-22 03:42:1221
[email protected]2f7d9cd2012-09-22 03:42:1222namespace base {
23
24typedef HistogramBase::Count Count;
25typedef HistogramBase::Sample Sample;
26
bcwhitefa8485b2017-05-01 16:43:2527SampleVectorBase::SampleVectorBase(uint64_t id,
bcwhitefa8485b2017-05-01 16:43:2528 Metadata* meta,
29 const BucketRanges* bucket_ranges)
30 : HistogramSamples(id, meta), bucket_ranges_(bucket_ranges) {
bcwhiteb036e4322015-12-10 18:36:3431 CHECK_GE(bucket_ranges_->bucket_count(), 1u);
32}
33
Chris Watkinsbb7211c2017-11-29 07:16:3834SampleVectorBase::~SampleVectorBase() = default;
[email protected]2f7d9cd2012-09-22 03:42:1235
bcwhitefa8485b2017-05-01 16:43:2536void SampleVectorBase::Accumulate(Sample value, Count count) {
37 const size_t bucket_index = GetBucketIndex(value);
[email protected]2f7d9cd2012-09-22 03:42:1238
bcwhitefa8485b2017-05-01 16:43:2539 // Handle the single-sample case.
40 if (!counts()) {
41 // Try to accumulate the parameters into the single-count entry.
42 if (AccumulateSingleSample(value, count, bucket_index)) {
43 // A race condition could lead to a new single-sample being accumulated
44 // above just after another thread executed the MountCountsStorage below.
45 // Since it is mounted, it could be mounted elsewhere and have values
46 // written to it. It's not allowed to have both a single-sample and
47 // entries in the counts array so move the single-sample.
48 if (counts())
49 MoveSingleSampleToCounts();
50 return;
51 }
[email protected]2f7d9cd2012-09-22 03:42:1252
bcwhitefa8485b2017-05-01 16:43:2553 // Need real storage to store both what was in the single-sample plus the
54 // parameter information.
55 MountCountsStorageAndMoveSingleSample();
[email protected]2f7d9cd2012-09-22 03:42:1256 }
bcwhitefa8485b2017-05-01 16:43:2557
58 // Handle the multi-sample case.
Brian White537e41a2017-11-01 03:02:1359 Count new_value =
60 subtle::NoBarrier_AtomicIncrement(&counts()[bucket_index], count);
asvitkine3f17b1462017-05-03 21:37:3761 IncreaseSumAndCount(strict_cast<int64_t>(count) * value, count);
Brian White537e41a2017-11-01 03:02:1362
63 // TODO(bcwhite) Remove after crbug.com/682680.
64 Count old_value = new_value - count;
65 if ((new_value >= 0) != (old_value >= 0) && count > 0)
66 RecordNegativeSample(SAMPLES_ACCUMULATE_OVERFLOW, count);
[email protected]2f7d9cd2012-09-22 03:42:1267}
68
bcwhitefa8485b2017-05-01 16:43:2569Count SampleVectorBase::GetCount(Sample value) const {
70 return GetCountAtIndex(GetBucketIndex(value));
[email protected]2f7d9cd2012-09-22 03:42:1271}
72
bcwhitefa8485b2017-05-01 16:43:2573Count SampleVectorBase::TotalCount() const {
74 // Handle the single-sample case.
75 SingleSample sample = single_sample().Load();
76 if (sample.count != 0)
77 return sample.count;
78
79 // Handle the multi-sample case.
80 if (counts() || MountExistingCountsStorage()) {
81 Count count = 0;
82 size_t size = counts_size();
83 const HistogramBase::AtomicCount* counts_array = counts();
84 for (size_t i = 0; i < size; ++i) {
85 count += subtle::NoBarrier_Load(&counts_array[i]);
86 }
87 return count;
88 }
89
90 // And the no-value case.
91 return 0;
[email protected]2f7d9cd2012-09-22 03:42:1292}
93
bcwhitefa8485b2017-05-01 16:43:2594Count SampleVectorBase::GetCountAtIndex(size_t bucket_index) const {
95 DCHECK(bucket_index < counts_size());
96
97 // Handle the single-sample case.
98 SingleSample sample = single_sample().Load();
99 if (sample.count != 0)
100 return sample.bucket == bucket_index ? sample.count : 0;
101
102 // Handle the multi-sample case.
103 if (counts() || MountExistingCountsStorage())
104 return subtle::NoBarrier_Load(&counts()[bucket_index]);
105
106 // And the no-value case.
107 return 0;
108}
109
110std::unique_ptr<SampleCountIterator> SampleVectorBase::Iterator() const {
111 // Handle the single-sample case.
112 SingleSample sample = single_sample().Load();
113 if (sample.count != 0) {
Jeremy Roman9532f252017-08-16 23:27:24114 return std::make_unique<SingleSampleIterator>(
bcwhitefa8485b2017-05-01 16:43:25115 bucket_ranges_->range(sample.bucket),
116 bucket_ranges_->range(sample.bucket + 1), sample.count, sample.bucket);
117 }
118
119 // Handle the multi-sample case.
120 if (counts() || MountExistingCountsStorage()) {
Jeremy Roman9532f252017-08-16 23:27:24121 return std::make_unique<SampleVectorIterator>(counts(), counts_size(),
122 bucket_ranges_);
bcwhitefa8485b2017-05-01 16:43:25123 }
124
125 // And the no-value case.
Jeremy Roman9532f252017-08-16 23:27:24126 return std::make_unique<SampleVectorIterator>(nullptr, 0, bucket_ranges_);
bcwhitefa8485b2017-05-01 16:43:25127}
128
129bool SampleVectorBase::AddSubtractImpl(SampleCountIterator* iter,
130 HistogramSamples::Operator op) {
131 // Stop now if there's nothing to do.
132 if (iter->Done())
133 return true;
134
135 // Get the first value and its index.
[email protected]2f7d9cd2012-09-22 03:42:12136 HistogramBase::Sample min;
asvitkine3f17b1462017-05-03 21:37:37137 int64_t max;
[email protected]2f7d9cd2012-09-22 03:42:12138 HistogramBase::Count count;
bcwhitefa8485b2017-05-01 16:43:25139 iter->Get(&min, &max, &count);
140 size_t dest_index = GetBucketIndex(min);
[email protected]2f7d9cd2012-09-22 03:42:12141
bcwhitefa8485b2017-05-01 16:43:25142 // The destination must be a superset of the source meaning that though the
143 // incoming ranges will find an exact match, the incoming bucket-index, if
144 // it exists, may be offset from the destination bucket-index. Calculate
145 // that offset of the passed iterator; there are are no overflow checks
146 // because 2's compliment math will work it out in the end.
147 //
148 // Because GetBucketIndex() always returns the same true or false result for
149 // a given iterator object, |index_offset| is either set here and used below,
150 // or never set and never used. The compiler doesn't know this, though, which
151 // is why it's necessary to initialize it to something.
152 size_t index_offset = 0;
153 size_t iter_index;
154 if (iter->GetBucketIndex(&iter_index))
155 index_offset = dest_index - iter_index;
156 if (dest_index >= counts_size())
157 return false;
158
159 // Post-increment. Information about the current sample is not available
160 // after this point.
161 iter->Next();
162
163 // Single-value storage is possible if there is no counts storage and the
164 // retrieved entry is the only one in the iterator.
165 if (!counts()) {
166 if (iter->Done()) {
167 // Don't call AccumulateSingleSample because that updates sum and count
168 // which was already done by the caller of this method.
169 if (single_sample().Accumulate(
170 dest_index, op == HistogramSamples::ADD ? count : -count)) {
171 // Handle race-condition that mounted counts storage between above and
172 // here.
173 if (counts())
174 MoveSingleSampleToCounts();
175 return true;
176 }
tdanderson94191022017-04-21 23:19:50177 }
bcwhitefa8485b2017-05-01 16:43:25178
179 // The counts storage will be needed to hold the multiple incoming values.
180 MountCountsStorageAndMoveSingleSample();
bcwhite4162baf2017-04-21 18:58:00181 }
tdanderson94191022017-04-21 23:19:50182
bcwhitefa8485b2017-05-01 16:43:25183 // Go through the iterator and add the counts into correct bucket.
184 while (true) {
185 // Ensure that the sample's min/max match the ranges min/max.
186 if (min != bucket_ranges_->range(dest_index) ||
187 max != bucket_ranges_->range(dest_index + 1)) {
188 NOTREACHED() << "sample=" << min << "," << max
189 << "; range=" << bucket_ranges_->range(dest_index) << ","
190 << bucket_ranges_->range(dest_index + 1);
191 return false;
192 }
193
194 // Sample's bucket matches exactly. Adjust count.
195 subtle::NoBarrier_AtomicIncrement(
196 &counts()[dest_index], op == HistogramSamples::ADD ? count : -count);
197
198 // Advance to the next iterable sample. See comments above for how
199 // everything works.
200 if (iter->Done())
201 return true;
202 iter->Get(&min, &max, &count);
203 if (iter->GetBucketIndex(&iter_index)) {
204 // Destination bucket is a known offset from the source bucket.
205 dest_index = iter_index + index_offset;
206 } else {
207 // Destination bucket has to be determined anew each time.
208 dest_index = GetBucketIndex(min);
209 }
210 if (dest_index >= counts_size())
211 return false;
212 iter->Next();
213 }
[email protected]2f7d9cd2012-09-22 03:42:12214}
215
216// Use simple binary search. This is very general, but there are better
217// approaches if we knew that the buckets were linearly distributed.
bcwhitefa8485b2017-05-01 16:43:25218size_t SampleVectorBase::GetBucketIndex(Sample value) const {
[email protected]15ce3842013-06-27 14:38:45219 size_t bucket_count = bucket_ranges_->bucket_count();
[email protected]2f7d9cd2012-09-22 03:42:12220 CHECK_GE(bucket_count, 1u);
221 CHECK_GE(value, bucket_ranges_->range(0));
222 CHECK_LT(value, bucket_ranges_->range(bucket_count));
223
224 size_t under = 0;
225 size_t over = bucket_count;
226 size_t mid;
227 do {
228 DCHECK_GE(over, under);
229 mid = under + (over - under)/2;
230 if (mid == under)
231 break;
232 if (bucket_ranges_->range(mid) <= value)
233 under = mid;
234 else
235 over = mid;
236 } while (true);
237
238 DCHECK_LE(bucket_ranges_->range(mid), value);
239 CHECK_GT(bucket_ranges_->range(mid + 1), value);
240 return mid;
241}
242
bcwhitefa8485b2017-05-01 16:43:25243void SampleVectorBase::MoveSingleSampleToCounts() {
244 DCHECK(counts());
245
246 // Disable the single-sample since there is now counts storage for the data.
247 SingleSample sample = single_sample().Extract(/*disable=*/true);
248
249 // Stop here if there is no "count" as trying to find the bucket index of
250 // an invalid (including zero) "value" will crash.
251 if (sample.count == 0)
252 return;
253
254 // Move the value into storage. Sum and redundant-count already account
255 // for this entry so no need to call IncreaseSumAndCount().
256 subtle::NoBarrier_AtomicIncrement(&counts()[sample.bucket], sample.count);
257}
258
259void SampleVectorBase::MountCountsStorageAndMoveSingleSample() {
260 // There are many SampleVector objects and the lock is needed very
261 // infrequently (just when advancing from single-sample to multi-sample) so
262 // define a single, global lock that all can use. This lock only prevents
263 // concurrent entry into the code below; access and updates to |counts_|
264 // still requires atomic operations.
265 static LazyInstance<Lock>::Leaky counts_lock = LAZY_INSTANCE_INITIALIZER;
266 if (subtle::NoBarrier_Load(&counts_) == 0) {
267 AutoLock lock(counts_lock.Get());
268 if (subtle::NoBarrier_Load(&counts_) == 0) {
269 // Create the actual counts storage while the above lock is acquired.
270 HistogramBase::Count* counts = CreateCountsStorageWhileLocked();
271 DCHECK(counts);
272
273 // Point |counts_| to the newly created storage. This is done while
274 // locked to prevent possible concurrent calls to CreateCountsStorage
275 // but, between that call and here, other threads could notice the
dschuyler11857732017-06-09 20:45:33276 // existence of the storage and race with this to set_counts(). That's
bcwhitefa8485b2017-05-01 16:43:25277 // okay because (a) it's atomic and (b) it always writes the same value.
278 set_counts(counts);
279 }
280 }
281
282 // Move any single-sample into the newly mounted storage.
283 MoveSingleSampleToCounts();
284}
285
286SampleVector::SampleVector(const BucketRanges* bucket_ranges)
287 : SampleVector(0, bucket_ranges) {}
288
289SampleVector::SampleVector(uint64_t id, const BucketRanges* bucket_ranges)
bcwhiteb4d87bb2017-07-05 15:28:57290 : SampleVectorBase(id, new LocalMetadata(), bucket_ranges) {}
bcwhitefa8485b2017-05-01 16:43:25291
bcwhiteb4d87bb2017-07-05 15:28:57292SampleVector::~SampleVector() {
293 delete static_cast<LocalMetadata*>(meta());
294}
bcwhitefa8485b2017-05-01 16:43:25295
296bool SampleVector::MountExistingCountsStorage() const {
297 // There is never any existing storage other than what is already in use.
298 return counts() != nullptr;
299}
300
301HistogramBase::AtomicCount* SampleVector::CreateCountsStorageWhileLocked() {
302 local_counts_.resize(counts_size());
303 return &local_counts_[0];
304}
305
306PersistentSampleVector::PersistentSampleVector(
307 uint64_t id,
308 const BucketRanges* bucket_ranges,
309 Metadata* meta,
310 const DelayedPersistentAllocation& counts)
311 : SampleVectorBase(id, meta, bucket_ranges), persistent_counts_(counts) {
312 // Only mount the full storage if the single-sample has been disabled.
313 // Otherwise, it is possible for this object instance to start using (empty)
314 // storage that was created incidentally while another instance continues to
315 // update to the single sample. This "incidental creation" can happen because
316 // the memory is a DelayedPersistentAllocation which allows multiple memory
317 // blocks within it and applies an all-or-nothing approach to the allocation.
318 // Thus, a request elsewhere for one of the _other_ blocks would make _this_
319 // block available even though nothing has explicitly requested it.
320 //
321 // Note that it's not possible for the ctor to mount existing storage and
322 // move any single-sample to it because sometimes the persistent memory is
323 // read-only. Only non-const methods (which assume that memory is read/write)
324 // can do that.
325 if (single_sample().IsDisabled()) {
326 bool success = MountExistingCountsStorage();
327 DCHECK(success);
328 }
329}
330
Chris Watkinsbb7211c2017-11-29 07:16:38331PersistentSampleVector::~PersistentSampleVector() = default;
bcwhitefa8485b2017-05-01 16:43:25332
333bool PersistentSampleVector::MountExistingCountsStorage() const {
334 // There is no early exit if counts is not yet mounted because, given that
335 // this is a virtual function, it's more efficient to do that at the call-
336 // site. There is no danger, however, should this get called anyway (perhaps
337 // because of a race condition) because at worst the |counts_| value would
338 // be over-written (in an atomic manner) with the exact same address.
339
340 if (!persistent_counts_.reference())
341 return false; // Nothing to mount.
342
343 // Mount the counts array in position.
344 set_counts(
345 static_cast<HistogramBase::AtomicCount*>(persistent_counts_.Get()));
Brian White101ec73a2017-08-01 17:09:36346
347 // The above shouldn't fail but can if the data is corrupt or incomplete.
348 return counts() != nullptr;
bcwhitefa8485b2017-05-01 16:43:25349}
350
351HistogramBase::AtomicCount*
352PersistentSampleVector::CreateCountsStorageWhileLocked() {
353 void* mem = persistent_counts_.Get();
354 if (!mem) {
355 // The above shouldn't fail but can if Bad Things(tm) are occurring in the
356 // persistent allocator. Crashing isn't a good option so instead just
357 // allocate something from the heap and return that. There will be no
358 // sharing or persistence but worse things are already happening.
359 return new HistogramBase::AtomicCount[counts_size()];
360 }
361
362 return static_cast<HistogramBase::AtomicCount*>(mem);
363}
364
bcwhiteb036e4322015-12-10 18:36:34365SampleVectorIterator::SampleVectorIterator(
366 const std::vector<HistogramBase::AtomicCount>* counts,
367 const BucketRanges* bucket_ranges)
368 : counts_(&(*counts)[0]),
369 counts_size_(counts->size()),
[email protected]2f7d9cd2012-09-22 03:42:12370 bucket_ranges_(bucket_ranges),
371 index_(0) {
Brian White8f9bfb62017-08-02 19:21:59372 DCHECK_GE(bucket_ranges_->bucket_count(), counts_size_);
bcwhiteb036e4322015-12-10 18:36:34373 SkipEmptyBuckets();
374}
375
376SampleVectorIterator::SampleVectorIterator(
377 const HistogramBase::AtomicCount* counts,
378 size_t counts_size,
379 const BucketRanges* bucket_ranges)
380 : counts_(counts),
381 counts_size_(counts_size),
382 bucket_ranges_(bucket_ranges),
383 index_(0) {
Brian White8f9bfb62017-08-02 19:21:59384 DCHECK_GE(bucket_ranges_->bucket_count(), counts_size_);
[email protected]2f7d9cd2012-09-22 03:42:12385 SkipEmptyBuckets();
386}
387
Chris Watkinsbb7211c2017-11-29 07:16:38388SampleVectorIterator::~SampleVectorIterator() = default;
[email protected]b4af2ec2012-10-05 21:29:44389
[email protected]2f7d9cd2012-09-22 03:42:12390bool SampleVectorIterator::Done() const {
bcwhiteb036e4322015-12-10 18:36:34391 return index_ >= counts_size_;
[email protected]2f7d9cd2012-09-22 03:42:12392}
393
394void SampleVectorIterator::Next() {
395 DCHECK(!Done());
396 index_++;
397 SkipEmptyBuckets();
398}
399
400void SampleVectorIterator::Get(HistogramBase::Sample* min,
asvitkine3f17b1462017-05-03 21:37:37401 int64_t* max,
[email protected]2f7d9cd2012-09-22 03:42:12402 HistogramBase::Count* count) const {
403 DCHECK(!Done());
Ivan Kotenkova16212a52017-11-08 12:37:33404 if (min != nullptr)
[email protected]2f7d9cd2012-09-22 03:42:12405 *min = bucket_ranges_->range(index_);
Ivan Kotenkova16212a52017-11-08 12:37:33406 if (max != nullptr)
asvitkine3f17b1462017-05-03 21:37:37407 *max = strict_cast<int64_t>(bucket_ranges_->range(index_ + 1));
Ivan Kotenkova16212a52017-11-08 12:37:33408 if (count != nullptr)
bcwhiteb036e4322015-12-10 18:36:34409 *count = subtle::NoBarrier_Load(&counts_[index_]);
[email protected]2f7d9cd2012-09-22 03:42:12410}
411
412bool SampleVectorIterator::GetBucketIndex(size_t* index) const {
413 DCHECK(!Done());
Ivan Kotenkova16212a52017-11-08 12:37:33414 if (index != nullptr)
[email protected]2f7d9cd2012-09-22 03:42:12415 *index = index_;
416 return true;
417}
418
419void SampleVectorIterator::SkipEmptyBuckets() {
420 if (Done())
421 return;
422
bcwhiteb036e4322015-12-10 18:36:34423 while (index_ < counts_size_) {
424 if (subtle::NoBarrier_Load(&counts_[index_]) != 0)
[email protected]2f7d9cd2012-09-22 03:42:12425 return;
426 index_++;
427 }
428}
429
430} // namespace base