root/include/asm-generic/atomic-instrumented.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. atomic_read
  2. atomic_read_acquire
  3. atomic_set
  4. atomic_set_release
  5. atomic_add
  6. atomic_add_return
  7. atomic_add_return_acquire
  8. atomic_add_return_release
  9. atomic_add_return_relaxed
  10. atomic_fetch_add
  11. atomic_fetch_add_acquire
  12. atomic_fetch_add_release
  13. atomic_fetch_add_relaxed
  14. atomic_sub
  15. atomic_sub_return
  16. atomic_sub_return_acquire
  17. atomic_sub_return_release
  18. atomic_sub_return_relaxed
  19. atomic_fetch_sub
  20. atomic_fetch_sub_acquire
  21. atomic_fetch_sub_release
  22. atomic_fetch_sub_relaxed
  23. atomic_inc
  24. atomic_inc_return
  25. atomic_inc_return_acquire
  26. atomic_inc_return_release
  27. atomic_inc_return_relaxed
  28. atomic_fetch_inc
  29. atomic_fetch_inc_acquire
  30. atomic_fetch_inc_release
  31. atomic_fetch_inc_relaxed
  32. atomic_dec
  33. atomic_dec_return
  34. atomic_dec_return_acquire
  35. atomic_dec_return_release
  36. atomic_dec_return_relaxed
  37. atomic_fetch_dec
  38. atomic_fetch_dec_acquire
  39. atomic_fetch_dec_release
  40. atomic_fetch_dec_relaxed
  41. atomic_and
  42. atomic_fetch_and
  43. atomic_fetch_and_acquire
  44. atomic_fetch_and_release
  45. atomic_fetch_and_relaxed
  46. atomic_andnot
  47. atomic_fetch_andnot
  48. atomic_fetch_andnot_acquire
  49. atomic_fetch_andnot_release
  50. atomic_fetch_andnot_relaxed
  51. atomic_or
  52. atomic_fetch_or
  53. atomic_fetch_or_acquire
  54. atomic_fetch_or_release
  55. atomic_fetch_or_relaxed
  56. atomic_xor
  57. atomic_fetch_xor
  58. atomic_fetch_xor_acquire
  59. atomic_fetch_xor_release
  60. atomic_fetch_xor_relaxed
  61. atomic_xchg
  62. atomic_xchg_acquire
  63. atomic_xchg_release
  64. atomic_xchg_relaxed
  65. atomic_cmpxchg
  66. atomic_cmpxchg_acquire
  67. atomic_cmpxchg_release
  68. atomic_cmpxchg_relaxed
  69. atomic_try_cmpxchg
  70. atomic_try_cmpxchg_acquire
  71. atomic_try_cmpxchg_release
  72. atomic_try_cmpxchg_relaxed
  73. atomic_sub_and_test
  74. atomic_dec_and_test
  75. atomic_inc_and_test
  76. atomic_add_negative
  77. atomic_fetch_add_unless
  78. atomic_add_unless
  79. atomic_inc_not_zero
  80. atomic_inc_unless_negative
  81. atomic_dec_unless_positive
  82. atomic_dec_if_positive
  83. atomic64_read
  84. atomic64_read_acquire
  85. atomic64_set
  86. atomic64_set_release
  87. atomic64_add
  88. atomic64_add_return
  89. atomic64_add_return_acquire
  90. atomic64_add_return_release
  91. atomic64_add_return_relaxed
  92. atomic64_fetch_add
  93. atomic64_fetch_add_acquire
  94. atomic64_fetch_add_release
  95. atomic64_fetch_add_relaxed
  96. atomic64_sub
  97. atomic64_sub_return
  98. atomic64_sub_return_acquire
  99. atomic64_sub_return_release
  100. atomic64_sub_return_relaxed
  101. atomic64_fetch_sub
  102. atomic64_fetch_sub_acquire
  103. atomic64_fetch_sub_release
  104. atomic64_fetch_sub_relaxed
  105. atomic64_inc
  106. atomic64_inc_return
  107. atomic64_inc_return_acquire
  108. atomic64_inc_return_release
  109. atomic64_inc_return_relaxed
  110. atomic64_fetch_inc
  111. atomic64_fetch_inc_acquire
  112. atomic64_fetch_inc_release
  113. atomic64_fetch_inc_relaxed
  114. atomic64_dec
  115. atomic64_dec_return
  116. atomic64_dec_return_acquire
  117. atomic64_dec_return_release
  118. atomic64_dec_return_relaxed
  119. atomic64_fetch_dec
  120. atomic64_fetch_dec_acquire
  121. atomic64_fetch_dec_release
  122. atomic64_fetch_dec_relaxed
  123. atomic64_and
  124. atomic64_fetch_and
  125. atomic64_fetch_and_acquire
  126. atomic64_fetch_and_release
  127. atomic64_fetch_and_relaxed
  128. atomic64_andnot
  129. atomic64_fetch_andnot
  130. atomic64_fetch_andnot_acquire
  131. atomic64_fetch_andnot_release
  132. atomic64_fetch_andnot_relaxed
  133. atomic64_or
  134. atomic64_fetch_or
  135. atomic64_fetch_or_acquire
  136. atomic64_fetch_or_release
  137. atomic64_fetch_or_relaxed
  138. atomic64_xor
  139. atomic64_fetch_xor
  140. atomic64_fetch_xor_acquire
  141. atomic64_fetch_xor_release
  142. atomic64_fetch_xor_relaxed
  143. atomic64_xchg
  144. atomic64_xchg_acquire
  145. atomic64_xchg_release
  146. atomic64_xchg_relaxed
  147. atomic64_cmpxchg
  148. atomic64_cmpxchg_acquire
  149. atomic64_cmpxchg_release
  150. atomic64_cmpxchg_relaxed
  151. atomic64_try_cmpxchg
  152. atomic64_try_cmpxchg_acquire
  153. atomic64_try_cmpxchg_release
  154. atomic64_try_cmpxchg_relaxed
  155. atomic64_sub_and_test
  156. atomic64_dec_and_test
  157. atomic64_inc_and_test
  158. atomic64_add_negative
  159. atomic64_fetch_add_unless
  160. atomic64_add_unless
  161. atomic64_inc_not_zero
  162. atomic64_inc_unless_negative
  163. atomic64_dec_unless_positive
  164. atomic64_dec_if_positive

   1 // SPDX-License-Identifier: GPL-2.0
   2 
   3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
   4 // DO NOT MODIFY THIS FILE DIRECTLY
   5 
   6 /*
   7  * This file provides wrappers with KASAN instrumentation for atomic operations.
   8  * To use this functionality an arch's atomic.h file needs to define all
   9  * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
  10  * this file at the end. This file provides atomic_read() that forwards to
  11  * arch_atomic_read() for actual atomic operation.
  12  * Note: if an arch atomic operation is implemented by means of other atomic
  13  * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
  14  * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
  15  * double instrumentation.
  16  */
  17 #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
  18 #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
  19 
  20 #include <linux/build_bug.h>
  21 #include <linux/kasan-checks.h>
  22 
  23 static inline int
  24 atomic_read(const atomic_t *v)
  25 {
  26         kasan_check_read(v, sizeof(*v));
  27         return arch_atomic_read(v);
  28 }
  29 #define atomic_read atomic_read
  30 
  31 #if defined(arch_atomic_read_acquire)
  32 static inline int
  33 atomic_read_acquire(const atomic_t *v)
  34 {
  35         kasan_check_read(v, sizeof(*v));
  36         return arch_atomic_read_acquire(v);
  37 }
  38 #define atomic_read_acquire atomic_read_acquire
  39 #endif
  40 
  41 static inline void
  42 atomic_set(atomic_t *v, int i)
  43 {
  44         kasan_check_write(v, sizeof(*v));
  45         arch_atomic_set(v, i);
  46 }
  47 #define atomic_set atomic_set
  48 
  49 #if defined(arch_atomic_set_release)
  50 static inline void
  51 atomic_set_release(atomic_t *v, int i)
  52 {
  53         kasan_check_write(v, sizeof(*v));
  54         arch_atomic_set_release(v, i);
  55 }
  56 #define atomic_set_release atomic_set_release
  57 #endif
  58 
  59 static inline void
  60 atomic_add(int i, atomic_t *v)
  61 {
  62         kasan_check_write(v, sizeof(*v));
  63         arch_atomic_add(i, v);
  64 }
  65 #define atomic_add atomic_add
  66 
  67 #if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
  68 static inline int
  69 atomic_add_return(int i, atomic_t *v)
  70 {
  71         kasan_check_write(v, sizeof(*v));
  72         return arch_atomic_add_return(i, v);
  73 }
  74 #define atomic_add_return atomic_add_return
  75 #endif
  76 
  77 #if defined(arch_atomic_add_return_acquire)
  78 static inline int
  79 atomic_add_return_acquire(int i, atomic_t *v)
  80 {
  81         kasan_check_write(v, sizeof(*v));
  82         return arch_atomic_add_return_acquire(i, v);
  83 }
  84 #define atomic_add_return_acquire atomic_add_return_acquire
  85 #endif
  86 
  87 #if defined(arch_atomic_add_return_release)
  88 static inline int
  89 atomic_add_return_release(int i, atomic_t *v)
  90 {
  91         kasan_check_write(v, sizeof(*v));
  92         return arch_atomic_add_return_release(i, v);
  93 }
  94 #define atomic_add_return_release atomic_add_return_release
  95 #endif
  96 
  97 #if defined(arch_atomic_add_return_relaxed)
  98 static inline int
  99 atomic_add_return_relaxed(int i, atomic_t *v)
 100 {
 101         kasan_check_write(v, sizeof(*v));
 102         return arch_atomic_add_return_relaxed(i, v);
 103 }
 104 #define atomic_add_return_relaxed atomic_add_return_relaxed
 105 #endif
 106 
 107 #if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
 108 static inline int
 109 atomic_fetch_add(int i, atomic_t *v)
 110 {
 111         kasan_check_write(v, sizeof(*v));
 112         return arch_atomic_fetch_add(i, v);
 113 }
 114 #define atomic_fetch_add atomic_fetch_add
 115 #endif
 116 
 117 #if defined(arch_atomic_fetch_add_acquire)
 118 static inline int
 119 atomic_fetch_add_acquire(int i, atomic_t *v)
 120 {
 121         kasan_check_write(v, sizeof(*v));
 122         return arch_atomic_fetch_add_acquire(i, v);
 123 }
 124 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
 125 #endif
 126 
 127 #if defined(arch_atomic_fetch_add_release)
 128 static inline int
 129 atomic_fetch_add_release(int i, atomic_t *v)
 130 {
 131         kasan_check_write(v, sizeof(*v));
 132         return arch_atomic_fetch_add_release(i, v);
 133 }
 134 #define atomic_fetch_add_release atomic_fetch_add_release
 135 #endif
 136 
 137 #if defined(arch_atomic_fetch_add_relaxed)
 138 static inline int
 139 atomic_fetch_add_relaxed(int i, atomic_t *v)
 140 {
 141         kasan_check_write(v, sizeof(*v));
 142         return arch_atomic_fetch_add_relaxed(i, v);
 143 }
 144 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
 145 #endif
 146 
 147 static inline void
 148 atomic_sub(int i, atomic_t *v)
 149 {
 150         kasan_check_write(v, sizeof(*v));
 151         arch_atomic_sub(i, v);
 152 }
 153 #define atomic_sub atomic_sub
 154 
 155 #if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
 156 static inline int
 157 atomic_sub_return(int i, atomic_t *v)
 158 {
 159         kasan_check_write(v, sizeof(*v));
 160         return arch_atomic_sub_return(i, v);
 161 }
 162 #define atomic_sub_return atomic_sub_return
 163 #endif
 164 
 165 #if defined(arch_atomic_sub_return_acquire)
 166 static inline int
 167 atomic_sub_return_acquire(int i, atomic_t *v)
 168 {
 169         kasan_check_write(v, sizeof(*v));
 170         return arch_atomic_sub_return_acquire(i, v);
 171 }
 172 #define atomic_sub_return_acquire atomic_sub_return_acquire
 173 #endif
 174 
 175 #if defined(arch_atomic_sub_return_release)
 176 static inline int
 177 atomic_sub_return_release(int i, atomic_t *v)
 178 {
 179         kasan_check_write(v, sizeof(*v));
 180         return arch_atomic_sub_return_release(i, v);
 181 }
 182 #define atomic_sub_return_release atomic_sub_return_release
 183 #endif
 184 
 185 #if defined(arch_atomic_sub_return_relaxed)
 186 static inline int
 187 atomic_sub_return_relaxed(int i, atomic_t *v)
 188 {
 189         kasan_check_write(v, sizeof(*v));
 190         return arch_atomic_sub_return_relaxed(i, v);
 191 }
 192 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
 193 #endif
 194 
 195 #if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
 196 static inline int
 197 atomic_fetch_sub(int i, atomic_t *v)
 198 {
 199         kasan_check_write(v, sizeof(*v));
 200         return arch_atomic_fetch_sub(i, v);
 201 }
 202 #define atomic_fetch_sub atomic_fetch_sub
 203 #endif
 204 
 205 #if defined(arch_atomic_fetch_sub_acquire)
 206 static inline int
 207 atomic_fetch_sub_acquire(int i, atomic_t *v)
 208 {
 209         kasan_check_write(v, sizeof(*v));
 210         return arch_atomic_fetch_sub_acquire(i, v);
 211 }
 212 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
 213 #endif
 214 
 215 #if defined(arch_atomic_fetch_sub_release)
 216 static inline int
 217 atomic_fetch_sub_release(int i, atomic_t *v)
 218 {
 219         kasan_check_write(v, sizeof(*v));
 220         return arch_atomic_fetch_sub_release(i, v);
 221 }
 222 #define atomic_fetch_sub_release atomic_fetch_sub_release
 223 #endif
 224 
 225 #if defined(arch_atomic_fetch_sub_relaxed)
 226 static inline int
 227 atomic_fetch_sub_relaxed(int i, atomic_t *v)
 228 {
 229         kasan_check_write(v, sizeof(*v));
 230         return arch_atomic_fetch_sub_relaxed(i, v);
 231 }
 232 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
 233 #endif
 234 
 235 #if defined(arch_atomic_inc)
 236 static inline void
 237 atomic_inc(atomic_t *v)
 238 {
 239         kasan_check_write(v, sizeof(*v));
 240         arch_atomic_inc(v);
 241 }
 242 #define atomic_inc atomic_inc
 243 #endif
 244 
 245 #if defined(arch_atomic_inc_return)
 246 static inline int
 247 atomic_inc_return(atomic_t *v)
 248 {
 249         kasan_check_write(v, sizeof(*v));
 250         return arch_atomic_inc_return(v);
 251 }
 252 #define atomic_inc_return atomic_inc_return
 253 #endif
 254 
 255 #if defined(arch_atomic_inc_return_acquire)
 256 static inline int
 257 atomic_inc_return_acquire(atomic_t *v)
 258 {
 259         kasan_check_write(v, sizeof(*v));
 260         return arch_atomic_inc_return_acquire(v);
 261 }
 262 #define atomic_inc_return_acquire atomic_inc_return_acquire
 263 #endif
 264 
 265 #if defined(arch_atomic_inc_return_release)
 266 static inline int
 267 atomic_inc_return_release(atomic_t *v)
 268 {
 269         kasan_check_write(v, sizeof(*v));
 270         return arch_atomic_inc_return_release(v);
 271 }
 272 #define atomic_inc_return_release atomic_inc_return_release
 273 #endif
 274 
 275 #if defined(arch_atomic_inc_return_relaxed)
 276 static inline int
 277 atomic_inc_return_relaxed(atomic_t *v)
 278 {
 279         kasan_check_write(v, sizeof(*v));
 280         return arch_atomic_inc_return_relaxed(v);
 281 }
 282 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
 283 #endif
 284 
 285 #if defined(arch_atomic_fetch_inc)
 286 static inline int
 287 atomic_fetch_inc(atomic_t *v)
 288 {
 289         kasan_check_write(v, sizeof(*v));
 290         return arch_atomic_fetch_inc(v);
 291 }
 292 #define atomic_fetch_inc atomic_fetch_inc
 293 #endif
 294 
 295 #if defined(arch_atomic_fetch_inc_acquire)
 296 static inline int
 297 atomic_fetch_inc_acquire(atomic_t *v)
 298 {
 299         kasan_check_write(v, sizeof(*v));
 300         return arch_atomic_fetch_inc_acquire(v);
 301 }
 302 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
 303 #endif
 304 
 305 #if defined(arch_atomic_fetch_inc_release)
 306 static inline int
 307 atomic_fetch_inc_release(atomic_t *v)
 308 {
 309         kasan_check_write(v, sizeof(*v));
 310         return arch_atomic_fetch_inc_release(v);
 311 }
 312 #define atomic_fetch_inc_release atomic_fetch_inc_release
 313 #endif
 314 
 315 #if defined(arch_atomic_fetch_inc_relaxed)
 316 static inline int
 317 atomic_fetch_inc_relaxed(atomic_t *v)
 318 {
 319         kasan_check_write(v, sizeof(*v));
 320         return arch_atomic_fetch_inc_relaxed(v);
 321 }
 322 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
 323 #endif
 324 
 325 #if defined(arch_atomic_dec)
 326 static inline void
 327 atomic_dec(atomic_t *v)
 328 {
 329         kasan_check_write(v, sizeof(*v));
 330         arch_atomic_dec(v);
 331 }
 332 #define atomic_dec atomic_dec
 333 #endif
 334 
 335 #if defined(arch_atomic_dec_return)
 336 static inline int
 337 atomic_dec_return(atomic_t *v)
 338 {
 339         kasan_check_write(v, sizeof(*v));
 340         return arch_atomic_dec_return(v);
 341 }
 342 #define atomic_dec_return atomic_dec_return
 343 #endif
 344 
 345 #if defined(arch_atomic_dec_return_acquire)
 346 static inline int
 347 atomic_dec_return_acquire(atomic_t *v)
 348 {
 349         kasan_check_write(v, sizeof(*v));
 350         return arch_atomic_dec_return_acquire(v);
 351 }
 352 #define atomic_dec_return_acquire atomic_dec_return_acquire
 353 #endif
 354 
 355 #if defined(arch_atomic_dec_return_release)
 356 static inline int
 357 atomic_dec_return_release(atomic_t *v)
 358 {
 359         kasan_check_write(v, sizeof(*v));
 360         return arch_atomic_dec_return_release(v);
 361 }
 362 #define atomic_dec_return_release atomic_dec_return_release
 363 #endif
 364 
 365 #if defined(arch_atomic_dec_return_relaxed)
 366 static inline int
 367 atomic_dec_return_relaxed(atomic_t *v)
 368 {
 369         kasan_check_write(v, sizeof(*v));
 370         return arch_atomic_dec_return_relaxed(v);
 371 }
 372 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
 373 #endif
 374 
 375 #if defined(arch_atomic_fetch_dec)
 376 static inline int
 377 atomic_fetch_dec(atomic_t *v)
 378 {
 379         kasan_check_write(v, sizeof(*v));
 380         return arch_atomic_fetch_dec(v);
 381 }
 382 #define atomic_fetch_dec atomic_fetch_dec
 383 #endif
 384 
 385 #if defined(arch_atomic_fetch_dec_acquire)
 386 static inline int
 387 atomic_fetch_dec_acquire(atomic_t *v)
 388 {
 389         kasan_check_write(v, sizeof(*v));
 390         return arch_atomic_fetch_dec_acquire(v);
 391 }
 392 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
 393 #endif
 394 
 395 #if defined(arch_atomic_fetch_dec_release)
 396 static inline int
 397 atomic_fetch_dec_release(atomic_t *v)
 398 {
 399         kasan_check_write(v, sizeof(*v));
 400         return arch_atomic_fetch_dec_release(v);
 401 }
 402 #define atomic_fetch_dec_release atomic_fetch_dec_release
 403 #endif
 404 
 405 #if defined(arch_atomic_fetch_dec_relaxed)
 406 static inline int
 407 atomic_fetch_dec_relaxed(atomic_t *v)
 408 {
 409         kasan_check_write(v, sizeof(*v));
 410         return arch_atomic_fetch_dec_relaxed(v);
 411 }
 412 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
 413 #endif
 414 
 415 static inline void
 416 atomic_and(int i, atomic_t *v)
 417 {
 418         kasan_check_write(v, sizeof(*v));
 419         arch_atomic_and(i, v);
 420 }
 421 #define atomic_and atomic_and
 422 
 423 #if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
 424 static inline int
 425 atomic_fetch_and(int i, atomic_t *v)
 426 {
 427         kasan_check_write(v, sizeof(*v));
 428         return arch_atomic_fetch_and(i, v);
 429 }
 430 #define atomic_fetch_and atomic_fetch_and
 431 #endif
 432 
 433 #if defined(arch_atomic_fetch_and_acquire)
 434 static inline int
 435 atomic_fetch_and_acquire(int i, atomic_t *v)
 436 {
 437         kasan_check_write(v, sizeof(*v));
 438         return arch_atomic_fetch_and_acquire(i, v);
 439 }
 440 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
 441 #endif
 442 
 443 #if defined(arch_atomic_fetch_and_release)
 444 static inline int
 445 atomic_fetch_and_release(int i, atomic_t *v)
 446 {
 447         kasan_check_write(v, sizeof(*v));
 448         return arch_atomic_fetch_and_release(i, v);
 449 }
 450 #define atomic_fetch_and_release atomic_fetch_and_release
 451 #endif
 452 
 453 #if defined(arch_atomic_fetch_and_relaxed)
 454 static inline int
 455 atomic_fetch_and_relaxed(int i, atomic_t *v)
 456 {
 457         kasan_check_write(v, sizeof(*v));
 458         return arch_atomic_fetch_and_relaxed(i, v);
 459 }
 460 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
 461 #endif
 462 
 463 #if defined(arch_atomic_andnot)
 464 static inline void
 465 atomic_andnot(int i, atomic_t *v)
 466 {
 467         kasan_check_write(v, sizeof(*v));
 468         arch_atomic_andnot(i, v);
 469 }
 470 #define atomic_andnot atomic_andnot
 471 #endif
 472 
 473 #if defined(arch_atomic_fetch_andnot)
 474 static inline int
 475 atomic_fetch_andnot(int i, atomic_t *v)
 476 {
 477         kasan_check_write(v, sizeof(*v));
 478         return arch_atomic_fetch_andnot(i, v);
 479 }
 480 #define atomic_fetch_andnot atomic_fetch_andnot
 481 #endif
 482 
 483 #if defined(arch_atomic_fetch_andnot_acquire)
 484 static inline int
 485 atomic_fetch_andnot_acquire(int i, atomic_t *v)
 486 {
 487         kasan_check_write(v, sizeof(*v));
 488         return arch_atomic_fetch_andnot_acquire(i, v);
 489 }
 490 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
 491 #endif
 492 
 493 #if defined(arch_atomic_fetch_andnot_release)
 494 static inline int
 495 atomic_fetch_andnot_release(int i, atomic_t *v)
 496 {
 497         kasan_check_write(v, sizeof(*v));
 498         return arch_atomic_fetch_andnot_release(i, v);
 499 }
 500 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
 501 #endif
 502 
 503 #if defined(arch_atomic_fetch_andnot_relaxed)
 504 static inline int
 505 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
 506 {
 507         kasan_check_write(v, sizeof(*v));
 508         return arch_atomic_fetch_andnot_relaxed(i, v);
 509 }
 510 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
 511 #endif
 512 
 513 static inline void
 514 atomic_or(int i, atomic_t *v)
 515 {
 516         kasan_check_write(v, sizeof(*v));
 517         arch_atomic_or(i, v);
 518 }
 519 #define atomic_or atomic_or
 520 
 521 #if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
 522 static inline int
 523 atomic_fetch_or(int i, atomic_t *v)
 524 {
 525         kasan_check_write(v, sizeof(*v));
 526         return arch_atomic_fetch_or(i, v);
 527 }
 528 #define atomic_fetch_or atomic_fetch_or
 529 #endif
 530 
 531 #if defined(arch_atomic_fetch_or_acquire)
 532 static inline int
 533 atomic_fetch_or_acquire(int i, atomic_t *v)
 534 {
 535         kasan_check_write(v, sizeof(*v));
 536         return arch_atomic_fetch_or_acquire(i, v);
 537 }
 538 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
 539 #endif
 540 
 541 #if defined(arch_atomic_fetch_or_release)
 542 static inline int
 543 atomic_fetch_or_release(int i, atomic_t *v)
 544 {
 545         kasan_check_write(v, sizeof(*v));
 546         return arch_atomic_fetch_or_release(i, v);
 547 }
 548 #define atomic_fetch_or_release atomic_fetch_or_release
 549 #endif
 550 
 551 #if defined(arch_atomic_fetch_or_relaxed)
 552 static inline int
 553 atomic_fetch_or_relaxed(int i, atomic_t *v)
 554 {
 555         kasan_check_write(v, sizeof(*v));
 556         return arch_atomic_fetch_or_relaxed(i, v);
 557 }
 558 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
 559 #endif
 560 
 561 static inline void
 562 atomic_xor(int i, atomic_t *v)
 563 {
 564         kasan_check_write(v, sizeof(*v));
 565         arch_atomic_xor(i, v);
 566 }
 567 #define atomic_xor atomic_xor
 568 
 569 #if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
 570 static inline int
 571 atomic_fetch_xor(int i, atomic_t *v)
 572 {
 573         kasan_check_write(v, sizeof(*v));
 574         return arch_atomic_fetch_xor(i, v);
 575 }
 576 #define atomic_fetch_xor atomic_fetch_xor
 577 #endif
 578 
 579 #if defined(arch_atomic_fetch_xor_acquire)
 580 static inline int
 581 atomic_fetch_xor_acquire(int i, atomic_t *v)
 582 {
 583         kasan_check_write(v, sizeof(*v));
 584         return arch_atomic_fetch_xor_acquire(i, v);
 585 }
 586 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
 587 #endif
 588 
 589 #if defined(arch_atomic_fetch_xor_release)
 590 static inline int
 591 atomic_fetch_xor_release(int i, atomic_t *v)
 592 {
 593         kasan_check_write(v, sizeof(*v));
 594         return arch_atomic_fetch_xor_release(i, v);
 595 }
 596 #define atomic_fetch_xor_release atomic_fetch_xor_release
 597 #endif
 598 
 599 #if defined(arch_atomic_fetch_xor_relaxed)
 600 static inline int
 601 atomic_fetch_xor_relaxed(int i, atomic_t *v)
 602 {
 603         kasan_check_write(v, sizeof(*v));
 604         return arch_atomic_fetch_xor_relaxed(i, v);
 605 }
 606 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
 607 #endif
 608 
 609 #if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
 610 static inline int
 611 atomic_xchg(atomic_t *v, int i)
 612 {
 613         kasan_check_write(v, sizeof(*v));
 614         return arch_atomic_xchg(v, i);
 615 }
 616 #define atomic_xchg atomic_xchg
 617 #endif
 618 
 619 #if defined(arch_atomic_xchg_acquire)
 620 static inline int
 621 atomic_xchg_acquire(atomic_t *v, int i)
 622 {
 623         kasan_check_write(v, sizeof(*v));
 624         return arch_atomic_xchg_acquire(v, i);
 625 }
 626 #define atomic_xchg_acquire atomic_xchg_acquire
 627 #endif
 628 
 629 #if defined(arch_atomic_xchg_release)
 630 static inline int
 631 atomic_xchg_release(atomic_t *v, int i)
 632 {
 633         kasan_check_write(v, sizeof(*v));
 634         return arch_atomic_xchg_release(v, i);
 635 }
 636 #define atomic_xchg_release atomic_xchg_release
 637 #endif
 638 
 639 #if defined(arch_atomic_xchg_relaxed)
 640 static inline int
 641 atomic_xchg_relaxed(atomic_t *v, int i)
 642 {
 643         kasan_check_write(v, sizeof(*v));
 644         return arch_atomic_xchg_relaxed(v, i);
 645 }
 646 #define atomic_xchg_relaxed atomic_xchg_relaxed
 647 #endif
 648 
 649 #if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
 650 static inline int
 651 atomic_cmpxchg(atomic_t *v, int old, int new)
 652 {
 653         kasan_check_write(v, sizeof(*v));
 654         return arch_atomic_cmpxchg(v, old, new);
 655 }
 656 #define atomic_cmpxchg atomic_cmpxchg
 657 #endif
 658 
 659 #if defined(arch_atomic_cmpxchg_acquire)
 660 static inline int
 661 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
 662 {
 663         kasan_check_write(v, sizeof(*v));
 664         return arch_atomic_cmpxchg_acquire(v, old, new);
 665 }
 666 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
 667 #endif
 668 
 669 #if defined(arch_atomic_cmpxchg_release)
 670 static inline int
 671 atomic_cmpxchg_release(atomic_t *v, int old, int new)
 672 {
 673         kasan_check_write(v, sizeof(*v));
 674         return arch_atomic_cmpxchg_release(v, old, new);
 675 }
 676 #define atomic_cmpxchg_release atomic_cmpxchg_release
 677 #endif
 678 
 679 #if defined(arch_atomic_cmpxchg_relaxed)
 680 static inline int
 681 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
 682 {
 683         kasan_check_write(v, sizeof(*v));
 684         return arch_atomic_cmpxchg_relaxed(v, old, new);
 685 }
 686 #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
 687 #endif
 688 
 689 #if defined(arch_atomic_try_cmpxchg)
 690 static inline bool
 691 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
 692 {
 693         kasan_check_write(v, sizeof(*v));
 694         kasan_check_write(old, sizeof(*old));
 695         return arch_atomic_try_cmpxchg(v, old, new);
 696 }
 697 #define atomic_try_cmpxchg atomic_try_cmpxchg
 698 #endif
 699 
 700 #if defined(arch_atomic_try_cmpxchg_acquire)
 701 static inline bool
 702 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
 703 {
 704         kasan_check_write(v, sizeof(*v));
 705         kasan_check_write(old, sizeof(*old));
 706         return arch_atomic_try_cmpxchg_acquire(v, old, new);
 707 }
 708 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
 709 #endif
 710 
 711 #if defined(arch_atomic_try_cmpxchg_release)
 712 static inline bool
 713 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
 714 {
 715         kasan_check_write(v, sizeof(*v));
 716         kasan_check_write(old, sizeof(*old));
 717         return arch_atomic_try_cmpxchg_release(v, old, new);
 718 }
 719 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
 720 #endif
 721 
 722 #if defined(arch_atomic_try_cmpxchg_relaxed)
 723 static inline bool
 724 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
 725 {
 726         kasan_check_write(v, sizeof(*v));
 727         kasan_check_write(old, sizeof(*old));
 728         return arch_atomic_try_cmpxchg_relaxed(v, old, new);
 729 }
 730 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
 731 #endif
 732 
 733 #if defined(arch_atomic_sub_and_test)
 734 static inline bool
 735 atomic_sub_and_test(int i, atomic_t *v)
 736 {
 737         kasan_check_write(v, sizeof(*v));
 738         return arch_atomic_sub_and_test(i, v);
 739 }
 740 #define atomic_sub_and_test atomic_sub_and_test
 741 #endif
 742 
 743 #if defined(arch_atomic_dec_and_test)
 744 static inline bool
 745 atomic_dec_and_test(atomic_t *v)
 746 {
 747         kasan_check_write(v, sizeof(*v));
 748         return arch_atomic_dec_and_test(v);
 749 }
 750 #define atomic_dec_and_test atomic_dec_and_test
 751 #endif
 752 
 753 #if defined(arch_atomic_inc_and_test)
 754 static inline bool
 755 atomic_inc_and_test(atomic_t *v)
 756 {
 757         kasan_check_write(v, sizeof(*v));
 758         return arch_atomic_inc_and_test(v);
 759 }
 760 #define atomic_inc_and_test atomic_inc_and_test
 761 #endif
 762 
 763 #if defined(arch_atomic_add_negative)
 764 static inline bool
 765 atomic_add_negative(int i, atomic_t *v)
 766 {
 767         kasan_check_write(v, sizeof(*v));
 768         return arch_atomic_add_negative(i, v);
 769 }
 770 #define atomic_add_negative atomic_add_negative
 771 #endif
 772 
 773 #if defined(arch_atomic_fetch_add_unless)
 774 static inline int
 775 atomic_fetch_add_unless(atomic_t *v, int a, int u)
 776 {
 777         kasan_check_write(v, sizeof(*v));
 778         return arch_atomic_fetch_add_unless(v, a, u);
 779 }
 780 #define atomic_fetch_add_unless atomic_fetch_add_unless
 781 #endif
 782 
 783 #if defined(arch_atomic_add_unless)
 784 static inline bool
 785 atomic_add_unless(atomic_t *v, int a, int u)
 786 {
 787         kasan_check_write(v, sizeof(*v));
 788         return arch_atomic_add_unless(v, a, u);
 789 }
 790 #define atomic_add_unless atomic_add_unless
 791 #endif
 792 
 793 #if defined(arch_atomic_inc_not_zero)
 794 static inline bool
 795 atomic_inc_not_zero(atomic_t *v)
 796 {
 797         kasan_check_write(v, sizeof(*v));
 798         return arch_atomic_inc_not_zero(v);
 799 }
 800 #define atomic_inc_not_zero atomic_inc_not_zero
 801 #endif
 802 
 803 #if defined(arch_atomic_inc_unless_negative)
 804 static inline bool
 805 atomic_inc_unless_negative(atomic_t *v)
 806 {
 807         kasan_check_write(v, sizeof(*v));
 808         return arch_atomic_inc_unless_negative(v);
 809 }
 810 #define atomic_inc_unless_negative atomic_inc_unless_negative
 811 #endif
 812 
 813 #if defined(arch_atomic_dec_unless_positive)
 814 static inline bool
 815 atomic_dec_unless_positive(atomic_t *v)
 816 {
 817         kasan_check_write(v, sizeof(*v));
 818         return arch_atomic_dec_unless_positive(v);
 819 }
 820 #define atomic_dec_unless_positive atomic_dec_unless_positive
 821 #endif
 822 
 823 #if defined(arch_atomic_dec_if_positive)
 824 static inline int
 825 atomic_dec_if_positive(atomic_t *v)
 826 {
 827         kasan_check_write(v, sizeof(*v));
 828         return arch_atomic_dec_if_positive(v);
 829 }
 830 #define atomic_dec_if_positive atomic_dec_if_positive
 831 #endif
 832 
 833 static inline s64
 834 atomic64_read(const atomic64_t *v)
 835 {
 836         kasan_check_read(v, sizeof(*v));
 837         return arch_atomic64_read(v);
 838 }
 839 #define atomic64_read atomic64_read
 840 
 841 #if defined(arch_atomic64_read_acquire)
 842 static inline s64
 843 atomic64_read_acquire(const atomic64_t *v)
 844 {
 845         kasan_check_read(v, sizeof(*v));
 846         return arch_atomic64_read_acquire(v);
 847 }
 848 #define atomic64_read_acquire atomic64_read_acquire
 849 #endif
 850 
 851 static inline void
 852 atomic64_set(atomic64_t *v, s64 i)
 853 {
 854         kasan_check_write(v, sizeof(*v));
 855         arch_atomic64_set(v, i);
 856 }
 857 #define atomic64_set atomic64_set
 858 
 859 #if defined(arch_atomic64_set_release)
 860 static inline void
 861 atomic64_set_release(atomic64_t *v, s64 i)
 862 {
 863         kasan_check_write(v, sizeof(*v));
 864         arch_atomic64_set_release(v, i);
 865 }
 866 #define atomic64_set_release atomic64_set_release
 867 #endif
 868 
 869 static inline void
 870 atomic64_add(s64 i, atomic64_t *v)
 871 {
 872         kasan_check_write(v, sizeof(*v));
 873         arch_atomic64_add(i, v);
 874 }
 875 #define atomic64_add atomic64_add
 876 
 877 #if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
 878 static inline s64
 879 atomic64_add_return(s64 i, atomic64_t *v)
 880 {
 881         kasan_check_write(v, sizeof(*v));
 882         return arch_atomic64_add_return(i, v);
 883 }
 884 #define atomic64_add_return atomic64_add_return
 885 #endif
 886 
 887 #if defined(arch_atomic64_add_return_acquire)
 888 static inline s64
 889 atomic64_add_return_acquire(s64 i, atomic64_t *v)
 890 {
 891         kasan_check_write(v, sizeof(*v));
 892         return arch_atomic64_add_return_acquire(i, v);
 893 }
 894 #define atomic64_add_return_acquire atomic64_add_return_acquire
 895 #endif
 896 
 897 #if defined(arch_atomic64_add_return_release)
 898 static inline s64
 899 atomic64_add_return_release(s64 i, atomic64_t *v)
 900 {
 901         kasan_check_write(v, sizeof(*v));
 902         return arch_atomic64_add_return_release(i, v);
 903 }
 904 #define atomic64_add_return_release atomic64_add_return_release
 905 #endif
 906 
 907 #if defined(arch_atomic64_add_return_relaxed)
 908 static inline s64
 909 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
 910 {
 911         kasan_check_write(v, sizeof(*v));
 912         return arch_atomic64_add_return_relaxed(i, v);
 913 }
 914 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
 915 #endif
 916 
 917 #if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
 918 static inline s64
 919 atomic64_fetch_add(s64 i, atomic64_t *v)
 920 {
 921         kasan_check_write(v, sizeof(*v));
 922         return arch_atomic64_fetch_add(i, v);
 923 }
 924 #define atomic64_fetch_add atomic64_fetch_add
 925 #endif
 926 
 927 #if defined(arch_atomic64_fetch_add_acquire)
 928 static inline s64
 929 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
 930 {
 931         kasan_check_write(v, sizeof(*v));
 932         return arch_atomic64_fetch_add_acquire(i, v);
 933 }
 934 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
 935 #endif
 936 
 937 #if defined(arch_atomic64_fetch_add_release)
 938 static inline s64
 939 atomic64_fetch_add_release(s64 i, atomic64_t *v)
 940 {
 941         kasan_check_write(v, sizeof(*v));
 942         return arch_atomic64_fetch_add_release(i, v);
 943 }
 944 #define atomic64_fetch_add_release atomic64_fetch_add_release
 945 #endif
 946 
 947 #if defined(arch_atomic64_fetch_add_relaxed)
 948 static inline s64
 949 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
 950 {
 951         kasan_check_write(v, sizeof(*v));
 952         return arch_atomic64_fetch_add_relaxed(i, v);
 953 }
 954 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
 955 #endif
 956 
 957 static inline void
 958 atomic64_sub(s64 i, atomic64_t *v)
 959 {
 960         kasan_check_write(v, sizeof(*v));
 961         arch_atomic64_sub(i, v);
 962 }
 963 #define atomic64_sub atomic64_sub
 964 
 965 #if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
 966 static inline s64
 967 atomic64_sub_return(s64 i, atomic64_t *v)
 968 {
 969         kasan_check_write(v, sizeof(*v));
 970         return arch_atomic64_sub_return(i, v);
 971 }
 972 #define atomic64_sub_return atomic64_sub_return
 973 #endif
 974 
 975 #if defined(arch_atomic64_sub_return_acquire)
 976 static inline s64
 977 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
 978 {
 979         kasan_check_write(v, sizeof(*v));
 980         return arch_atomic64_sub_return_acquire(i, v);
 981 }
 982 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
 983 #endif
 984 
 985 #if defined(arch_atomic64_sub_return_release)
 986 static inline s64
 987 atomic64_sub_return_release(s64 i, atomic64_t *v)
 988 {
 989         kasan_check_write(v, sizeof(*v));
 990         return arch_atomic64_sub_return_release(i, v);
 991 }
 992 #define atomic64_sub_return_release atomic64_sub_return_release
 993 #endif
 994 
 995 #if defined(arch_atomic64_sub_return_relaxed)
 996 static inline s64
 997 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
 998 {
 999         kasan_check_write(v, sizeof(*v));
1000         return arch_atomic64_sub_return_relaxed(i, v);
1001 }
1002 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1003 #endif
1004 
1005 #if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1006 static inline s64
1007 atomic64_fetch_sub(s64 i, atomic64_t *v)
1008 {
1009         kasan_check_write(v, sizeof(*v));
1010         return arch_atomic64_fetch_sub(i, v);
1011 }
1012 #define atomic64_fetch_sub atomic64_fetch_sub
1013 #endif
1014 
1015 #if defined(arch_atomic64_fetch_sub_acquire)
1016 static inline s64
1017 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1018 {
1019         kasan_check_write(v, sizeof(*v));
1020         return arch_atomic64_fetch_sub_acquire(i, v);
1021 }
1022 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1023 #endif
1024 
1025 #if defined(arch_atomic64_fetch_sub_release)
1026 static inline s64
1027 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1028 {
1029         kasan_check_write(v, sizeof(*v));
1030         return arch_atomic64_fetch_sub_release(i, v);
1031 }
1032 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1033 #endif
1034 
1035 #if defined(arch_atomic64_fetch_sub_relaxed)
1036 static inline s64
1037 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1038 {
1039         kasan_check_write(v, sizeof(*v));
1040         return arch_atomic64_fetch_sub_relaxed(i, v);
1041 }
1042 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1043 #endif
1044 
1045 #if defined(arch_atomic64_inc)
1046 static inline void
1047 atomic64_inc(atomic64_t *v)
1048 {
1049         kasan_check_write(v, sizeof(*v));
1050         arch_atomic64_inc(v);
1051 }
1052 #define atomic64_inc atomic64_inc
1053 #endif
1054 
1055 #if defined(arch_atomic64_inc_return)
1056 static inline s64
1057 atomic64_inc_return(atomic64_t *v)
1058 {
1059         kasan_check_write(v, sizeof(*v));
1060         return arch_atomic64_inc_return(v);
1061 }
1062 #define atomic64_inc_return atomic64_inc_return
1063 #endif
1064 
1065 #if defined(arch_atomic64_inc_return_acquire)
1066 static inline s64
1067 atomic64_inc_return_acquire(atomic64_t *v)
1068 {
1069         kasan_check_write(v, sizeof(*v));
1070         return arch_atomic64_inc_return_acquire(v);
1071 }
1072 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1073 #endif
1074 
1075 #if defined(arch_atomic64_inc_return_release)
1076 static inline s64
1077 atomic64_inc_return_release(atomic64_t *v)
1078 {
1079         kasan_check_write(v, sizeof(*v));
1080         return arch_atomic64_inc_return_release(v);
1081 }
1082 #define atomic64_inc_return_release atomic64_inc_return_release
1083 #endif
1084 
1085 #if defined(arch_atomic64_inc_return_relaxed)
1086 static inline s64
1087 atomic64_inc_return_relaxed(atomic64_t *v)
1088 {
1089         kasan_check_write(v, sizeof(*v));
1090         return arch_atomic64_inc_return_relaxed(v);
1091 }
1092 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1093 #endif
1094 
1095 #if defined(arch_atomic64_fetch_inc)
1096 static inline s64
1097 atomic64_fetch_inc(atomic64_t *v)
1098 {
1099         kasan_check_write(v, sizeof(*v));
1100         return arch_atomic64_fetch_inc(v);
1101 }
1102 #define atomic64_fetch_inc atomic64_fetch_inc
1103 #endif
1104 
1105 #if defined(arch_atomic64_fetch_inc_acquire)
1106 static inline s64
1107 atomic64_fetch_inc_acquire(atomic64_t *v)
1108 {
1109         kasan_check_write(v, sizeof(*v));
1110         return arch_atomic64_fetch_inc_acquire(v);
1111 }
1112 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1113 #endif
1114 
1115 #if defined(arch_atomic64_fetch_inc_release)
1116 static inline s64
1117 atomic64_fetch_inc_release(atomic64_t *v)
1118 {
1119         kasan_check_write(v, sizeof(*v));
1120         return arch_atomic64_fetch_inc_release(v);
1121 }
1122 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1123 #endif
1124 
1125 #if defined(arch_atomic64_fetch_inc_relaxed)
1126 static inline s64
1127 atomic64_fetch_inc_relaxed(atomic64_t *v)
1128 {
1129         kasan_check_write(v, sizeof(*v));
1130         return arch_atomic64_fetch_inc_relaxed(v);
1131 }
1132 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1133 #endif
1134 
1135 #if defined(arch_atomic64_dec)
1136 static inline void
1137 atomic64_dec(atomic64_t *v)
1138 {
1139         kasan_check_write(v, sizeof(*v));
1140         arch_atomic64_dec(v);
1141 }
1142 #define atomic64_dec atomic64_dec
1143 #endif
1144 
1145 #if defined(arch_atomic64_dec_return)
1146 static inline s64
1147 atomic64_dec_return(atomic64_t *v)
1148 {
1149         kasan_check_write(v, sizeof(*v));
1150         return arch_atomic64_dec_return(v);
1151 }
1152 #define atomic64_dec_return atomic64_dec_return
1153 #endif
1154 
1155 #if defined(arch_atomic64_dec_return_acquire)
1156 static inline s64
1157 atomic64_dec_return_acquire(atomic64_t *v)
1158 {
1159         kasan_check_write(v, sizeof(*v));
1160         return arch_atomic64_dec_return_acquire(v);
1161 }
1162 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1163 #endif
1164 
1165 #if defined(arch_atomic64_dec_return_release)
1166 static inline s64
1167 atomic64_dec_return_release(atomic64_t *v)
1168 {
1169         kasan_check_write(v, sizeof(*v));
1170         return arch_atomic64_dec_return_release(v);
1171 }
1172 #define atomic64_dec_return_release atomic64_dec_return_release
1173 #endif
1174 
1175 #if defined(arch_atomic64_dec_return_relaxed)
1176 static inline s64
1177 atomic64_dec_return_relaxed(atomic64_t *v)
1178 {
1179         kasan_check_write(v, sizeof(*v));
1180         return arch_atomic64_dec_return_relaxed(v);
1181 }
1182 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1183 #endif
1184 
1185 #if defined(arch_atomic64_fetch_dec)
1186 static inline s64
1187 atomic64_fetch_dec(atomic64_t *v)
1188 {
1189         kasan_check_write(v, sizeof(*v));
1190         return arch_atomic64_fetch_dec(v);
1191 }
1192 #define atomic64_fetch_dec atomic64_fetch_dec
1193 #endif
1194 
1195 #if defined(arch_atomic64_fetch_dec_acquire)
1196 static inline s64
1197 atomic64_fetch_dec_acquire(atomic64_t *v)
1198 {
1199         kasan_check_write(v, sizeof(*v));
1200         return arch_atomic64_fetch_dec_acquire(v);
1201 }
1202 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1203 #endif
1204 
1205 #if defined(arch_atomic64_fetch_dec_release)
1206 static inline s64
1207 atomic64_fetch_dec_release(atomic64_t *v)
1208 {
1209         kasan_check_write(v, sizeof(*v));
1210         return arch_atomic64_fetch_dec_release(v);
1211 }
1212 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1213 #endif
1214 
1215 #if defined(arch_atomic64_fetch_dec_relaxed)
1216 static inline s64
1217 atomic64_fetch_dec_relaxed(atomic64_t *v)
1218 {
1219         kasan_check_write(v, sizeof(*v));
1220         return arch_atomic64_fetch_dec_relaxed(v);
1221 }
1222 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1223 #endif
1224 
1225 static inline void
1226 atomic64_and(s64 i, atomic64_t *v)
1227 {
1228         kasan_check_write(v, sizeof(*v));
1229         arch_atomic64_and(i, v);
1230 }
1231 #define atomic64_and atomic64_and
1232 
1233 #if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1234 static inline s64
1235 atomic64_fetch_and(s64 i, atomic64_t *v)
1236 {
1237         kasan_check_write(v, sizeof(*v));
1238         return arch_atomic64_fetch_and(i, v);
1239 }
1240 #define atomic64_fetch_and atomic64_fetch_and
1241 #endif
1242 
1243 #if defined(arch_atomic64_fetch_and_acquire)
1244 static inline s64
1245 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1246 {
1247         kasan_check_write(v, sizeof(*v));
1248         return arch_atomic64_fetch_and_acquire(i, v);
1249 }
1250 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1251 #endif
1252 
1253 #if defined(arch_atomic64_fetch_and_release)
1254 static inline s64
1255 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1256 {
1257         kasan_check_write(v, sizeof(*v));
1258         return arch_atomic64_fetch_and_release(i, v);
1259 }
1260 #define atomic64_fetch_and_release atomic64_fetch_and_release
1261 #endif
1262 
1263 #if defined(arch_atomic64_fetch_and_relaxed)
1264 static inline s64
1265 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1266 {
1267         kasan_check_write(v, sizeof(*v));
1268         return arch_atomic64_fetch_and_relaxed(i, v);
1269 }
1270 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1271 #endif
1272 
1273 #if defined(arch_atomic64_andnot)
1274 static inline void
1275 atomic64_andnot(s64 i, atomic64_t *v)
1276 {
1277         kasan_check_write(v, sizeof(*v));
1278         arch_atomic64_andnot(i, v);
1279 }
1280 #define atomic64_andnot atomic64_andnot
1281 #endif
1282 
1283 #if defined(arch_atomic64_fetch_andnot)
1284 static inline s64
1285 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1286 {
1287         kasan_check_write(v, sizeof(*v));
1288         return arch_atomic64_fetch_andnot(i, v);
1289 }
1290 #define atomic64_fetch_andnot atomic64_fetch_andnot
1291 #endif
1292 
1293 #if defined(arch_atomic64_fetch_andnot_acquire)
1294 static inline s64
1295 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1296 {
1297         kasan_check_write(v, sizeof(*v));
1298         return arch_atomic64_fetch_andnot_acquire(i, v);
1299 }
1300 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1301 #endif
1302 
1303 #if defined(arch_atomic64_fetch_andnot_release)
1304 static inline s64
1305 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1306 {
1307         kasan_check_write(v, sizeof(*v));
1308         return arch_atomic64_fetch_andnot_release(i, v);
1309 }
1310 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1311 #endif
1312 
1313 #if defined(arch_atomic64_fetch_andnot_relaxed)
1314 static inline s64
1315 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1316 {
1317         kasan_check_write(v, sizeof(*v));
1318         return arch_atomic64_fetch_andnot_relaxed(i, v);
1319 }
1320 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1321 #endif
1322 
1323 static inline void
1324 atomic64_or(s64 i, atomic64_t *v)
1325 {
1326         kasan_check_write(v, sizeof(*v));
1327         arch_atomic64_or(i, v);
1328 }
1329 #define atomic64_or atomic64_or
1330 
1331 #if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1332 static inline s64
1333 atomic64_fetch_or(s64 i, atomic64_t *v)
1334 {
1335         kasan_check_write(v, sizeof(*v));
1336         return arch_atomic64_fetch_or(i, v);
1337 }
1338 #define atomic64_fetch_or atomic64_fetch_or
1339 #endif
1340 
1341 #if defined(arch_atomic64_fetch_or_acquire)
1342 static inline s64
1343 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1344 {
1345         kasan_check_write(v, sizeof(*v));
1346         return arch_atomic64_fetch_or_acquire(i, v);
1347 }
1348 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1349 #endif
1350 
1351 #if defined(arch_atomic64_fetch_or_release)
1352 static inline s64
1353 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1354 {
1355         kasan_check_write(v, sizeof(*v));
1356         return arch_atomic64_fetch_or_release(i, v);
1357 }
1358 #define atomic64_fetch_or_release atomic64_fetch_or_release
1359 #endif
1360 
1361 #if defined(arch_atomic64_fetch_or_relaxed)
1362 static inline s64
1363 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1364 {
1365         kasan_check_write(v, sizeof(*v));
1366         return arch_atomic64_fetch_or_relaxed(i, v);
1367 }
1368 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1369 #endif
1370 
1371 static inline void
1372 atomic64_xor(s64 i, atomic64_t *v)
1373 {
1374         kasan_check_write(v, sizeof(*v));
1375         arch_atomic64_xor(i, v);
1376 }
1377 #define atomic64_xor atomic64_xor
1378 
1379 #if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1380 static inline s64
1381 atomic64_fetch_xor(s64 i, atomic64_t *v)
1382 {
1383         kasan_check_write(v, sizeof(*v));
1384         return arch_atomic64_fetch_xor(i, v);
1385 }
1386 #define atomic64_fetch_xor atomic64_fetch_xor
1387 #endif
1388 
1389 #if defined(arch_atomic64_fetch_xor_acquire)
1390 static inline s64
1391 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1392 {
1393         kasan_check_write(v, sizeof(*v));
1394         return arch_atomic64_fetch_xor_acquire(i, v);
1395 }
1396 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1397 #endif
1398 
1399 #if defined(arch_atomic64_fetch_xor_release)
1400 static inline s64
1401 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1402 {
1403         kasan_check_write(v, sizeof(*v));
1404         return arch_atomic64_fetch_xor_release(i, v);
1405 }
1406 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1407 #endif
1408 
1409 #if defined(arch_atomic64_fetch_xor_relaxed)
1410 static inline s64
1411 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1412 {
1413         kasan_check_write(v, sizeof(*v));
1414         return arch_atomic64_fetch_xor_relaxed(i, v);
1415 }
1416 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1417 #endif
1418 
1419 #if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1420 static inline s64
1421 atomic64_xchg(atomic64_t *v, s64 i)
1422 {
1423         kasan_check_write(v, sizeof(*v));
1424         return arch_atomic64_xchg(v, i);
1425 }
1426 #define atomic64_xchg atomic64_xchg
1427 #endif
1428 
1429 #if defined(arch_atomic64_xchg_acquire)
1430 static inline s64
1431 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1432 {
1433         kasan_check_write(v, sizeof(*v));
1434         return arch_atomic64_xchg_acquire(v, i);
1435 }
1436 #define atomic64_xchg_acquire atomic64_xchg_acquire
1437 #endif
1438 
1439 #if defined(arch_atomic64_xchg_release)
1440 static inline s64
1441 atomic64_xchg_release(atomic64_t *v, s64 i)
1442 {
1443         kasan_check_write(v, sizeof(*v));
1444         return arch_atomic64_xchg_release(v, i);
1445 }
1446 #define atomic64_xchg_release atomic64_xchg_release
1447 #endif
1448 
1449 #if defined(arch_atomic64_xchg_relaxed)
1450 static inline s64
1451 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1452 {
1453         kasan_check_write(v, sizeof(*v));
1454         return arch_atomic64_xchg_relaxed(v, i);
1455 }
1456 #define atomic64_xchg_relaxed atomic64_xchg_relaxed
1457 #endif
1458 
1459 #if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1460 static inline s64
1461 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1462 {
1463         kasan_check_write(v, sizeof(*v));
1464         return arch_atomic64_cmpxchg(v, old, new);
1465 }
1466 #define atomic64_cmpxchg atomic64_cmpxchg
1467 #endif
1468 
1469 #if defined(arch_atomic64_cmpxchg_acquire)
1470 static inline s64
1471 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1472 {
1473         kasan_check_write(v, sizeof(*v));
1474         return arch_atomic64_cmpxchg_acquire(v, old, new);
1475 }
1476 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1477 #endif
1478 
1479 #if defined(arch_atomic64_cmpxchg_release)
1480 static inline s64
1481 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1482 {
1483         kasan_check_write(v, sizeof(*v));
1484         return arch_atomic64_cmpxchg_release(v, old, new);
1485 }
1486 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
1487 #endif
1488 
1489 #if defined(arch_atomic64_cmpxchg_relaxed)
1490 static inline s64
1491 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1492 {
1493         kasan_check_write(v, sizeof(*v));
1494         return arch_atomic64_cmpxchg_relaxed(v, old, new);
1495 }
1496 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1497 #endif
1498 
1499 #if defined(arch_atomic64_try_cmpxchg)
1500 static inline bool
1501 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1502 {
1503         kasan_check_write(v, sizeof(*v));
1504         kasan_check_write(old, sizeof(*old));
1505         return arch_atomic64_try_cmpxchg(v, old, new);
1506 }
1507 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
1508 #endif
1509 
1510 #if defined(arch_atomic64_try_cmpxchg_acquire)
1511 static inline bool
1512 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1513 {
1514         kasan_check_write(v, sizeof(*v));
1515         kasan_check_write(old, sizeof(*old));
1516         return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1517 }
1518 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1519 #endif
1520 
1521 #if defined(arch_atomic64_try_cmpxchg_release)
1522 static inline bool
1523 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1524 {
1525         kasan_check_write(v, sizeof(*v));
1526         kasan_check_write(old, sizeof(*old));
1527         return arch_atomic64_try_cmpxchg_release(v, old, new);
1528 }
1529 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1530 #endif
1531 
1532 #if defined(arch_atomic64_try_cmpxchg_relaxed)
1533 static inline bool
1534 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1535 {
1536         kasan_check_write(v, sizeof(*v));
1537         kasan_check_write(old, sizeof(*old));
1538         return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1539 }
1540 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1541 #endif
1542 
1543 #if defined(arch_atomic64_sub_and_test)
1544 static inline bool
1545 atomic64_sub_and_test(s64 i, atomic64_t *v)
1546 {
1547         kasan_check_write(v, sizeof(*v));
1548         return arch_atomic64_sub_and_test(i, v);
1549 }
1550 #define atomic64_sub_and_test atomic64_sub_and_test
1551 #endif
1552 
1553 #if defined(arch_atomic64_dec_and_test)
1554 static inline bool
1555 atomic64_dec_and_test(atomic64_t *v)
1556 {
1557         kasan_check_write(v, sizeof(*v));
1558         return arch_atomic64_dec_and_test(v);
1559 }
1560 #define atomic64_dec_and_test atomic64_dec_and_test
1561 #endif
1562 
1563 #if defined(arch_atomic64_inc_and_test)
1564 static inline bool
1565 atomic64_inc_and_test(atomic64_t *v)
1566 {
1567         kasan_check_write(v, sizeof(*v));
1568         return arch_atomic64_inc_and_test(v);
1569 }
1570 #define atomic64_inc_and_test atomic64_inc_and_test
1571 #endif
1572 
1573 #if defined(arch_atomic64_add_negative)
1574 static inline bool
1575 atomic64_add_negative(s64 i, atomic64_t *v)
1576 {
1577         kasan_check_write(v, sizeof(*v));
1578         return arch_atomic64_add_negative(i, v);
1579 }
1580 #define atomic64_add_negative atomic64_add_negative
1581 #endif
1582 
1583 #if defined(arch_atomic64_fetch_add_unless)
1584 static inline s64
1585 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1586 {
1587         kasan_check_write(v, sizeof(*v));
1588         return arch_atomic64_fetch_add_unless(v, a, u);
1589 }
1590 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
1591 #endif
1592 
1593 #if defined(arch_atomic64_add_unless)
1594 static inline bool
1595 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1596 {
1597         kasan_check_write(v, sizeof(*v));
1598         return arch_atomic64_add_unless(v, a, u);
1599 }
1600 #define atomic64_add_unless atomic64_add_unless
1601 #endif
1602 
1603 #if defined(arch_atomic64_inc_not_zero)
1604 static inline bool
1605 atomic64_inc_not_zero(atomic64_t *v)
1606 {
1607         kasan_check_write(v, sizeof(*v));
1608         return arch_atomic64_inc_not_zero(v);
1609 }
1610 #define atomic64_inc_not_zero atomic64_inc_not_zero
1611 #endif
1612 
1613 #if defined(arch_atomic64_inc_unless_negative)
1614 static inline bool
1615 atomic64_inc_unless_negative(atomic64_t *v)
1616 {
1617         kasan_check_write(v, sizeof(*v));
1618         return arch_atomic64_inc_unless_negative(v);
1619 }
1620 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
1621 #endif
1622 
1623 #if defined(arch_atomic64_dec_unless_positive)
1624 static inline bool
1625 atomic64_dec_unless_positive(atomic64_t *v)
1626 {
1627         kasan_check_write(v, sizeof(*v));
1628         return arch_atomic64_dec_unless_positive(v);
1629 }
1630 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
1631 #endif
1632 
1633 #if defined(arch_atomic64_dec_if_positive)
1634 static inline s64
1635 atomic64_dec_if_positive(atomic64_t *v)
1636 {
1637         kasan_check_write(v, sizeof(*v));
1638         return arch_atomic64_dec_if_positive(v);
1639 }
1640 #define atomic64_dec_if_positive atomic64_dec_if_positive
1641 #endif
1642 
1643 #if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1644 #define xchg(ptr, ...)                                          \
1645 ({                                                                      \
1646         typeof(ptr) __ai_ptr = (ptr);                                   \
1647         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1648         arch_xchg(__ai_ptr, __VA_ARGS__);                               \
1649 })
1650 #endif
1651 
1652 #if defined(arch_xchg_acquire)
1653 #define xchg_acquire(ptr, ...)                                          \
1654 ({                                                                      \
1655         typeof(ptr) __ai_ptr = (ptr);                                   \
1656         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1657         arch_xchg_acquire(__ai_ptr, __VA_ARGS__);                               \
1658 })
1659 #endif
1660 
1661 #if defined(arch_xchg_release)
1662 #define xchg_release(ptr, ...)                                          \
1663 ({                                                                      \
1664         typeof(ptr) __ai_ptr = (ptr);                                   \
1665         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1666         arch_xchg_release(__ai_ptr, __VA_ARGS__);                               \
1667 })
1668 #endif
1669 
1670 #if defined(arch_xchg_relaxed)
1671 #define xchg_relaxed(ptr, ...)                                          \
1672 ({                                                                      \
1673         typeof(ptr) __ai_ptr = (ptr);                                   \
1674         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1675         arch_xchg_relaxed(__ai_ptr, __VA_ARGS__);                               \
1676 })
1677 #endif
1678 
1679 #if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1680 #define cmpxchg(ptr, ...)                                               \
1681 ({                                                                      \
1682         typeof(ptr) __ai_ptr = (ptr);                                   \
1683         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1684         arch_cmpxchg(__ai_ptr, __VA_ARGS__);                            \
1685 })
1686 #endif
1687 
1688 #if defined(arch_cmpxchg_acquire)
1689 #define cmpxchg_acquire(ptr, ...)                                               \
1690 ({                                                                      \
1691         typeof(ptr) __ai_ptr = (ptr);                                   \
1692         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1693         arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__);                            \
1694 })
1695 #endif
1696 
1697 #if defined(arch_cmpxchg_release)
1698 #define cmpxchg_release(ptr, ...)                                               \
1699 ({                                                                      \
1700         typeof(ptr) __ai_ptr = (ptr);                                   \
1701         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1702         arch_cmpxchg_release(__ai_ptr, __VA_ARGS__);                            \
1703 })
1704 #endif
1705 
1706 #if defined(arch_cmpxchg_relaxed)
1707 #define cmpxchg_relaxed(ptr, ...)                                               \
1708 ({                                                                      \
1709         typeof(ptr) __ai_ptr = (ptr);                                   \
1710         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1711         arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__);                            \
1712 })
1713 #endif
1714 
1715 #if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1716 #define cmpxchg64(ptr, ...)                                             \
1717 ({                                                                      \
1718         typeof(ptr) __ai_ptr = (ptr);                                   \
1719         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1720         arch_cmpxchg64(__ai_ptr, __VA_ARGS__);                          \
1721 })
1722 #endif
1723 
1724 #if defined(arch_cmpxchg64_acquire)
1725 #define cmpxchg64_acquire(ptr, ...)                                             \
1726 ({                                                                      \
1727         typeof(ptr) __ai_ptr = (ptr);                                   \
1728         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1729         arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__);                          \
1730 })
1731 #endif
1732 
1733 #if defined(arch_cmpxchg64_release)
1734 #define cmpxchg64_release(ptr, ...)                                             \
1735 ({                                                                      \
1736         typeof(ptr) __ai_ptr = (ptr);                                   \
1737         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1738         arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__);                          \
1739 })
1740 #endif
1741 
1742 #if defined(arch_cmpxchg64_relaxed)
1743 #define cmpxchg64_relaxed(ptr, ...)                                             \
1744 ({                                                                      \
1745         typeof(ptr) __ai_ptr = (ptr);                                   \
1746         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1747         arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__);                          \
1748 })
1749 #endif
1750 
1751 #define cmpxchg_local(ptr, ...)                                         \
1752 ({                                                                      \
1753         typeof(ptr) __ai_ptr = (ptr);                                   \
1754         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1755         arch_cmpxchg_local(__ai_ptr, __VA_ARGS__);                              \
1756 })
1757 
1758 #define cmpxchg64_local(ptr, ...)                                               \
1759 ({                                                                      \
1760         typeof(ptr) __ai_ptr = (ptr);                                   \
1761         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1762         arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__);                            \
1763 })
1764 
1765 #define sync_cmpxchg(ptr, ...)                                          \
1766 ({                                                                      \
1767         typeof(ptr) __ai_ptr = (ptr);                                   \
1768         kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));         \
1769         arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__);                               \
1770 })
1771 
1772 #define cmpxchg_double(ptr, ...)                                                \
1773 ({                                                                      \
1774         typeof(ptr) __ai_ptr = (ptr);                                   \
1775         kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));             \
1776         arch_cmpxchg_double(__ai_ptr, __VA_ARGS__);                             \
1777 })
1778 
1779 
1780 #define cmpxchg_double_local(ptr, ...)                                          \
1781 ({                                                                      \
1782         typeof(ptr) __ai_ptr = (ptr);                                   \
1783         kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));             \
1784         arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__);                               \
1785 })
1786 
1787 #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1788 // b29b625d5de9280f680e42c7be859b55b15e5f6a

/* [<][>][^][v][top][bottom][index][help] */