root/arch/arm64/include/asm/futex.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. arch_futex_atomic_op_inuser
  2. futex_atomic_cmpxchg_inatomic

   1 /* SPDX-License-Identifier: GPL-2.0-only */
   2 /*
   3  * Copyright (C) 2012 ARM Ltd.
   4  */
   5 #ifndef __ASM_FUTEX_H
   6 #define __ASM_FUTEX_H
   7 
   8 #include <linux/futex.h>
   9 #include <linux/uaccess.h>
  10 
  11 #include <asm/errno.h>
  12 
  13 #define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
  14 
  15 #define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg)         \
  16 do {                                                                    \
  17         unsigned int loops = FUTEX_MAX_LOOPS;                           \
  18                                                                         \
  19         uaccess_enable();                                               \
  20         asm volatile(                                                   \
  21 "       prfm    pstl1strm, %2\n"                                        \
  22 "1:     ldxr    %w1, %2\n"                                              \
  23         insn "\n"                                                       \
  24 "2:     stlxr   %w0, %w3, %2\n"                                         \
  25 "       cbz     %w0, 3f\n"                                              \
  26 "       sub     %w4, %w4, %w0\n"                                        \
  27 "       cbnz    %w4, 1b\n"                                              \
  28 "       mov     %w0, %w7\n"                                             \
  29 "3:\n"                                                                  \
  30 "       dmb     ish\n"                                                  \
  31 "       .pushsection .fixup,\"ax\"\n"                                   \
  32 "       .align  2\n"                                                    \
  33 "4:     mov     %w0, %w6\n"                                             \
  34 "       b       3b\n"                                                   \
  35 "       .popsection\n"                                                  \
  36         _ASM_EXTABLE(1b, 4b)                                            \
  37         _ASM_EXTABLE(2b, 4b)                                            \
  38         : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp),      \
  39           "+r" (loops)                                                  \
  40         : "r" (oparg), "Ir" (-EFAULT), "Ir" (-EAGAIN)                   \
  41         : "memory");                                                    \
  42         uaccess_disable();                                              \
  43 } while (0)
  44 
  45 static inline int
  46 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
  47 {
  48         int oldval = 0, ret, tmp;
  49         u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
  50 
  51         pagefault_disable();
  52 
  53         switch (op) {
  54         case FUTEX_OP_SET:
  55                 __futex_atomic_op("mov  %w3, %w5",
  56                                   ret, oldval, uaddr, tmp, oparg);
  57                 break;
  58         case FUTEX_OP_ADD:
  59                 __futex_atomic_op("add  %w3, %w1, %w5",
  60                                   ret, oldval, uaddr, tmp, oparg);
  61                 break;
  62         case FUTEX_OP_OR:
  63                 __futex_atomic_op("orr  %w3, %w1, %w5",
  64                                   ret, oldval, uaddr, tmp, oparg);
  65                 break;
  66         case FUTEX_OP_ANDN:
  67                 __futex_atomic_op("and  %w3, %w1, %w5",
  68                                   ret, oldval, uaddr, tmp, ~oparg);
  69                 break;
  70         case FUTEX_OP_XOR:
  71                 __futex_atomic_op("eor  %w3, %w1, %w5",
  72                                   ret, oldval, uaddr, tmp, oparg);
  73                 break;
  74         default:
  75                 ret = -ENOSYS;
  76         }
  77 
  78         pagefault_enable();
  79 
  80         if (!ret)
  81                 *oval = oldval;
  82 
  83         return ret;
  84 }
  85 
  86 static inline int
  87 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
  88                               u32 oldval, u32 newval)
  89 {
  90         int ret = 0;
  91         unsigned int loops = FUTEX_MAX_LOOPS;
  92         u32 val, tmp;
  93         u32 __user *uaddr;
  94 
  95         if (!access_ok(_uaddr, sizeof(u32)))
  96                 return -EFAULT;
  97 
  98         uaddr = __uaccess_mask_ptr(_uaddr);
  99         uaccess_enable();
 100         asm volatile("// futex_atomic_cmpxchg_inatomic\n"
 101 "       prfm    pstl1strm, %2\n"
 102 "1:     ldxr    %w1, %2\n"
 103 "       sub     %w3, %w1, %w5\n"
 104 "       cbnz    %w3, 4f\n"
 105 "2:     stlxr   %w3, %w6, %2\n"
 106 "       cbz     %w3, 3f\n"
 107 "       sub     %w4, %w4, %w3\n"
 108 "       cbnz    %w4, 1b\n"
 109 "       mov     %w0, %w8\n"
 110 "3:\n"
 111 "       dmb     ish\n"
 112 "4:\n"
 113 "       .pushsection .fixup,\"ax\"\n"
 114 "5:     mov     %w0, %w7\n"
 115 "       b       4b\n"
 116 "       .popsection\n"
 117         _ASM_EXTABLE(1b, 5b)
 118         _ASM_EXTABLE(2b, 5b)
 119         : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
 120         : "r" (oldval), "r" (newval), "Ir" (-EFAULT), "Ir" (-EAGAIN)
 121         : "memory");
 122         uaccess_disable();
 123 
 124         if (!ret)
 125                 *uval = val;
 126 
 127         return ret;
 128 }
 129 
 130 #endif /* __ASM_FUTEX_H */

/* [<][>][^][v][top][bottom][index][help] */