Avi Drissman | e4622aa | 2022-09-08 20:36:06 | [diff] [blame] | 1 | // Copyright 2011 The Chromium Authors |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
Tom Sepez | 8726d30e | 2025-01-29 02:11:08 | [diff] [blame] | 5 | #ifdef UNSAFE_BUFFERS_BUILD |
| 6 | // TODO(crbug.com/390223051): Remove C-library calls to fix the errors. |
| 7 | #pragma allow_unsafe_libc_calls |
| 8 | #endif |
| 9 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 10 | #include "base/atomicops.h" |
[email protected] | 2edc286 | 2011-04-04 18:04:37 | [diff] [blame] | 11 | |
[email protected] | da463967 | 2014-04-08 04:00:36 | [diff] [blame] | 12 | #include <stdint.h> |
[email protected] | 2edc286 | 2011-04-04 18:04:37 | [diff] [blame] | 13 | #include <string.h> |
Eugene Zemtsov | d29b0173 | 2024-12-19 21:20:12 | [diff] [blame] | 14 | |
Venkatesh Srinivas | 325a5c2 | 2023-05-29 13:43:48 | [diff] [blame] | 15 | #include <type_traits> |
Eugene Zemtsov | d29b0173 | 2024-12-19 21:20:12 | [diff] [blame] | 16 | #include <vector> |
[email protected] | 2edc286 | 2011-04-04 18:04:37 | [diff] [blame] | 17 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 18 | #include "testing/gtest/include/gtest/gtest.h" |
| 19 | |
| 20 | template <class AtomicType> |
| 21 | static void TestAtomicIncrement() { |
| 22 | // For now, we just test single threaded execution |
| 23 | |
| 24 | // use a guard value to make sure the NoBarrier_AtomicIncrement doesn't go |
| 25 | // outside the expected address bounds. This is in particular to |
| 26 | // test that some future change to the asm code doesn't cause the |
| 27 | // 32-bit NoBarrier_AtomicIncrement doesn't do the wrong thing on 64-bit |
| 28 | // machines. |
| 29 | struct { |
| 30 | AtomicType prev_word; |
| 31 | AtomicType count; |
| 32 | AtomicType next_word; |
| 33 | } s; |
| 34 | |
| 35 | AtomicType prev_word_value, next_word_value; |
| 36 | memset(&prev_word_value, 0xFF, sizeof(AtomicType)); |
| 37 | memset(&next_word_value, 0xEE, sizeof(AtomicType)); |
| 38 | |
| 39 | s.prev_word = prev_word_value; |
| 40 | s.count = 0; |
| 41 | s.next_word = next_word_value; |
| 42 | |
| 43 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, 1), 1); |
| 44 | EXPECT_EQ(s.count, 1); |
| 45 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 46 | EXPECT_EQ(s.next_word, next_word_value); |
| 47 | |
| 48 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, 2), 3); |
| 49 | EXPECT_EQ(s.count, 3); |
| 50 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 51 | EXPECT_EQ(s.next_word, next_word_value); |
| 52 | |
| 53 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, 3), 6); |
| 54 | EXPECT_EQ(s.count, 6); |
| 55 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 56 | EXPECT_EQ(s.next_word, next_word_value); |
| 57 | |
| 58 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, -3), 3); |
| 59 | EXPECT_EQ(s.count, 3); |
| 60 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 61 | EXPECT_EQ(s.next_word, next_word_value); |
| 62 | |
| 63 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, -2), 1); |
| 64 | EXPECT_EQ(s.count, 1); |
| 65 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 66 | EXPECT_EQ(s.next_word, next_word_value); |
| 67 | |
| 68 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, -1), 0); |
| 69 | EXPECT_EQ(s.count, 0); |
| 70 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 71 | EXPECT_EQ(s.next_word, next_word_value); |
| 72 | |
| 73 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, -1), -1); |
| 74 | EXPECT_EQ(s.count, -1); |
| 75 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 76 | EXPECT_EQ(s.next_word, next_word_value); |
| 77 | |
| 78 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, -4), -5); |
| 79 | EXPECT_EQ(s.count, -5); |
| 80 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 81 | EXPECT_EQ(s.next_word, next_word_value); |
| 82 | |
| 83 | EXPECT_EQ(base::subtle::NoBarrier_AtomicIncrement(&s.count, 5), 0); |
| 84 | EXPECT_EQ(s.count, 0); |
| 85 | EXPECT_EQ(s.prev_word, prev_word_value); |
| 86 | EXPECT_EQ(s.next_word, next_word_value); |
| 87 | } |
| 88 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 89 | #define NUM_BITS(T) (sizeof(T) * 8) |
| 90 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 91 | template <class AtomicType> |
| 92 | static void TestCompareAndSwap() { |
| 93 | AtomicType value = 0; |
| 94 | AtomicType prev = base::subtle::NoBarrier_CompareAndSwap(&value, 0, 1); |
| 95 | EXPECT_EQ(1, value); |
| 96 | EXPECT_EQ(0, prev); |
| 97 | |
bcwhite | 85c5a0a | 2015-11-03 18:19:06 | [diff] [blame] | 98 | // Verify that CAS will *not* change "value" if it doesn't match the |
| 99 | // expected number. CAS will always return the actual value of the |
| 100 | // variable from before any change. |
| 101 | AtomicType fail = base::subtle::NoBarrier_CompareAndSwap(&value, 0, 2); |
| 102 | EXPECT_EQ(1, value); |
| 103 | EXPECT_EQ(1, fail); |
| 104 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 105 | // Use test value that has non-zero bits in both halves, more for testing |
| 106 | // 64-bit implementation on 32-bit platforms. |
Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 107 | const AtomicType k_test_val = |
| 108 | (static_cast<uint64_t>(1) << (NUM_BITS(AtomicType) - 2)) + 11; |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 109 | value = k_test_val; |
| 110 | prev = base::subtle::NoBarrier_CompareAndSwap(&value, 0, 5); |
| 111 | EXPECT_EQ(k_test_val, value); |
| 112 | EXPECT_EQ(k_test_val, prev); |
| 113 | |
| 114 | value = k_test_val; |
| 115 | prev = base::subtle::NoBarrier_CompareAndSwap(&value, k_test_val, 5); |
| 116 | EXPECT_EQ(5, value); |
| 117 | EXPECT_EQ(k_test_val, prev); |
| 118 | } |
| 119 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 120 | template <class AtomicType> |
| 121 | static void TestAtomicExchange() { |
| 122 | AtomicType value = 0; |
| 123 | AtomicType new_value = base::subtle::NoBarrier_AtomicExchange(&value, 1); |
| 124 | EXPECT_EQ(1, value); |
| 125 | EXPECT_EQ(0, new_value); |
| 126 | |
| 127 | // Use test value that has non-zero bits in both halves, more for testing |
| 128 | // 64-bit implementation on 32-bit platforms. |
Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 129 | const AtomicType k_test_val = |
| 130 | (static_cast<uint64_t>(1) << (NUM_BITS(AtomicType) - 2)) + 11; |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 131 | value = k_test_val; |
| 132 | new_value = base::subtle::NoBarrier_AtomicExchange(&value, k_test_val); |
| 133 | EXPECT_EQ(k_test_val, value); |
| 134 | EXPECT_EQ(k_test_val, new_value); |
| 135 | |
| 136 | value = k_test_val; |
| 137 | new_value = base::subtle::NoBarrier_AtomicExchange(&value, 5); |
| 138 | EXPECT_EQ(5, value); |
| 139 | EXPECT_EQ(k_test_val, new_value); |
| 140 | } |
| 141 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 142 | template <class AtomicType> |
| 143 | static void TestAtomicIncrementBounds() { |
| 144 | // Test at rollover boundary between int_max and int_min |
Peter Kasting | 134ef9af | 2024-12-28 02:30:09 | [diff] [blame] | 145 | AtomicType test_val = |
| 146 | (static_cast<uint64_t>(1) << (NUM_BITS(AtomicType) - 1)); |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 147 | AtomicType value = -1 ^ test_val; |
| 148 | AtomicType new_value = base::subtle::NoBarrier_AtomicIncrement(&value, 1); |
| 149 | EXPECT_EQ(test_val, value); |
| 150 | EXPECT_EQ(value, new_value); |
| 151 | |
| 152 | base::subtle::NoBarrier_AtomicIncrement(&value, -1); |
| 153 | EXPECT_EQ(-1 ^ test_val, value); |
| 154 | |
| 155 | // Test at 32-bit boundary for 64-bit atomic type. |
[email protected] | da463967 | 2014-04-08 04:00:36 | [diff] [blame] | 156 | test_val = static_cast<uint64_t>(1) << (NUM_BITS(AtomicType) / 2); |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 157 | value = test_val - 1; |
| 158 | new_value = base::subtle::NoBarrier_AtomicIncrement(&value, 1); |
| 159 | EXPECT_EQ(test_val, value); |
| 160 | EXPECT_EQ(value, new_value); |
| 161 | |
| 162 | base::subtle::NoBarrier_AtomicIncrement(&value, -1); |
| 163 | EXPECT_EQ(test_val - 1, value); |
| 164 | } |
| 165 | |
[email protected] | ff845a4e | 2008-08-29 10:16:14 | [diff] [blame] | 166 | // Return an AtomicType with the value 0xa5a5a5.. |
| 167 | template <class AtomicType> |
| 168 | static AtomicType TestFillValue() { |
| 169 | AtomicType val = 0; |
| 170 | memset(&val, 0xa5, sizeof(AtomicType)); |
| 171 | return val; |
| 172 | } |
| 173 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 174 | // This is a simple sanity check that values are correct. Not testing |
| 175 | // atomicity |
| 176 | template <class AtomicType> |
| 177 | static void TestStore() { |
[email protected] | ff845a4e | 2008-08-29 10:16:14 | [diff] [blame] | 178 | const AtomicType kVal1 = TestFillValue<AtomicType>(); |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 179 | const AtomicType kVal2 = static_cast<AtomicType>(-1); |
| 180 | |
| 181 | AtomicType value; |
| 182 | |
Andrew Rayskiy | 62991238 | 2023-10-18 22:58:42 | [diff] [blame] | 183 | if constexpr (std::is_same_v<AtomicType, base::subtle::Atomic32>) { |
Venkatesh Srinivas | 325a5c2 | 2023-05-29 13:43:48 | [diff] [blame] | 184 | base::subtle::NoBarrier_Store(&value, kVal1); |
| 185 | EXPECT_EQ(kVal1, value); |
| 186 | base::subtle::NoBarrier_Store(&value, kVal2); |
| 187 | EXPECT_EQ(kVal2, value); |
| 188 | } |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 189 | |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 190 | base::subtle::Release_Store(&value, kVal1); |
| 191 | EXPECT_EQ(kVal1, value); |
| 192 | base::subtle::Release_Store(&value, kVal2); |
| 193 | EXPECT_EQ(kVal2, value); |
| 194 | } |
| 195 | |
| 196 | // This is a simple sanity check that values are correct. Not testing |
| 197 | // atomicity |
| 198 | template <class AtomicType> |
| 199 | static void TestLoad() { |
[email protected] | ff845a4e | 2008-08-29 10:16:14 | [diff] [blame] | 200 | const AtomicType kVal1 = TestFillValue<AtomicType>(); |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 201 | const AtomicType kVal2 = static_cast<AtomicType>(-1); |
| 202 | |
| 203 | AtomicType value; |
| 204 | |
| 205 | value = kVal1; |
| 206 | EXPECT_EQ(kVal1, base::subtle::NoBarrier_Load(&value)); |
| 207 | value = kVal2; |
| 208 | EXPECT_EQ(kVal2, base::subtle::NoBarrier_Load(&value)); |
| 209 | |
| 210 | value = kVal1; |
| 211 | EXPECT_EQ(kVal1, base::subtle::Acquire_Load(&value)); |
| 212 | value = kVal2; |
| 213 | EXPECT_EQ(kVal2, base::subtle::Acquire_Load(&value)); |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 214 | } |
| 215 | |
| 216 | TEST(AtomicOpsTest, Inc) { |
| 217 | TestAtomicIncrement<base::subtle::Atomic32>(); |
| 218 | TestAtomicIncrement<base::subtle::AtomicWord>(); |
| 219 | } |
| 220 | |
| 221 | TEST(AtomicOpsTest, CompareAndSwap) { |
| 222 | TestCompareAndSwap<base::subtle::Atomic32>(); |
[email protected] | b492eae | 2008-08-29 09:45:09 | [diff] [blame] | 223 | } |
| 224 | |
| 225 | TEST(AtomicOpsTest, Exchange) { |
| 226 | TestAtomicExchange<base::subtle::Atomic32>(); |
| 227 | TestAtomicExchange<base::subtle::AtomicWord>(); |
| 228 | } |
| 229 | |
| 230 | TEST(AtomicOpsTest, IncrementBounds) { |
| 231 | TestAtomicIncrementBounds<base::subtle::Atomic32>(); |
| 232 | TestAtomicIncrementBounds<base::subtle::AtomicWord>(); |
| 233 | } |
| 234 | |
| 235 | TEST(AtomicOpsTest, Store) { |
| 236 | TestStore<base::subtle::Atomic32>(); |
| 237 | TestStore<base::subtle::AtomicWord>(); |
| 238 | } |
| 239 | |
| 240 | TEST(AtomicOpsTest, Load) { |
| 241 | TestLoad<base::subtle::Atomic32>(); |
| 242 | TestLoad<base::subtle::AtomicWord>(); |
| 243 | } |
Eugene Zemtsov | d29b0173 | 2024-12-19 21:20:12 | [diff] [blame] | 244 | |
| 245 | TEST(AtomicOpsTest, RelaxedAtomicWriteMemcpy) { |
| 246 | std::vector<uint8_t> src(17); |
| 247 | for (size_t i = 0; i < src.size(); i++) { |
| 248 | src[i] = i + 1; |
| 249 | } |
| 250 | |
| 251 | for (size_t i = 0; i < src.size(); i++) { |
| 252 | std::vector<uint8_t> dst(src.size()); |
| 253 | size_t bytes_to_copy = src.size() - i; |
| 254 | base::subtle::RelaxedAtomicWriteMemcpy( |
| 255 | base::span(dst).first(bytes_to_copy), |
| 256 | base::span(src).subspan(i, bytes_to_copy)); |
| 257 | for (size_t j = 0; j < bytes_to_copy; j++) { |
| 258 | EXPECT_EQ(src[i + j], dst[j]); |
| 259 | } |
| 260 | for (size_t j = bytes_to_copy; j < dst.size(); j++) { |
| 261 | EXPECT_EQ(0, dst[j]); |
| 262 | } |
| 263 | } |
| 264 | } |