root/arch/mips/kernel/cmpxchg.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. __xchg_small
  2. __cmpxchg_small

   1 // SPDX-License-Identifier: GPL-2.0-or-later
   2 /*
   3  * Copyright (C) 2017 Imagination Technologies
   4  * Author: Paul Burton <paul.burton@mips.com>
   5  */
   6 
   7 #include <linux/bitops.h>
   8 #include <asm/cmpxchg.h>
   9 
  10 unsigned long __xchg_small(volatile void *ptr, unsigned long val, unsigned int size)
  11 {
  12         u32 old32, new32, load32, mask;
  13         volatile u32 *ptr32;
  14         unsigned int shift;
  15 
  16         /* Check that ptr is naturally aligned */
  17         WARN_ON((unsigned long)ptr & (size - 1));
  18 
  19         /* Mask value to the correct size. */
  20         mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);
  21         val &= mask;
  22 
  23         /*
  24          * Calculate a shift & mask that correspond to the value we wish to
  25          * exchange within the naturally aligned 4 byte integerthat includes
  26          * it.
  27          */
  28         shift = (unsigned long)ptr & 0x3;
  29         if (IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))
  30                 shift ^= sizeof(u32) - size;
  31         shift *= BITS_PER_BYTE;
  32         mask <<= shift;
  33 
  34         /*
  35          * Calculate a pointer to the naturally aligned 4 byte integer that
  36          * includes our byte of interest, and load its value.
  37          */
  38         ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);
  39         load32 = *ptr32;
  40 
  41         do {
  42                 old32 = load32;
  43                 new32 = (load32 & ~mask) | (val << shift);
  44                 load32 = cmpxchg(ptr32, old32, new32);
  45         } while (load32 != old32);
  46 
  47         return (load32 & mask) >> shift;
  48 }
  49 
  50 unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
  51                               unsigned long new, unsigned int size)
  52 {
  53         u32 mask, old32, new32, load32, load;
  54         volatile u32 *ptr32;
  55         unsigned int shift;
  56 
  57         /* Check that ptr is naturally aligned */
  58         WARN_ON((unsigned long)ptr & (size - 1));
  59 
  60         /* Mask inputs to the correct size. */
  61         mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);
  62         old &= mask;
  63         new &= mask;
  64 
  65         /*
  66          * Calculate a shift & mask that correspond to the value we wish to
  67          * compare & exchange within the naturally aligned 4 byte integer
  68          * that includes it.
  69          */
  70         shift = (unsigned long)ptr & 0x3;
  71         if (IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))
  72                 shift ^= sizeof(u32) - size;
  73         shift *= BITS_PER_BYTE;
  74         mask <<= shift;
  75 
  76         /*
  77          * Calculate a pointer to the naturally aligned 4 byte integer that
  78          * includes our byte of interest, and load its value.
  79          */
  80         ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);
  81         load32 = *ptr32;
  82 
  83         while (true) {
  84                 /*
  85                  * Ensure the byte we want to exchange matches the expected
  86                  * old value, and if not then bail.
  87                  */
  88                 load = (load32 & mask) >> shift;
  89                 if (load != old)
  90                         return load;
  91 
  92                 /*
  93                  * Calculate the old & new values of the naturally aligned
  94                  * 4 byte integer that include the byte we want to exchange.
  95                  * Attempt to exchange the old value for the new value, and
  96                  * return if we succeed.
  97                  */
  98                 old32 = (load32 & ~mask) | (old << shift);
  99                 new32 = (load32 & ~mask) | (new << shift);
 100                 load32 = cmpxchg(ptr32, old32, new32);
 101                 if (load32 == old32)
 102                         return old;
 103         }
 104 }

/* [<][>][^][v][top][bottom][index][help] */