root/arch/x86/lib/atomic64_cx8_32.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0-or-later */
   2 /*
   3  * atomic64_t for 586+
   4  *
   5  * Copyright © 2010  Luca Barbieri
   6  */
   7 
   8 #include <linux/linkage.h>
   9 #include <asm/alternative-asm.h>
  10 
  11 .macro read64 reg
  12         movl %ebx, %eax
  13         movl %ecx, %edx
  14 /* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
  15         LOCK_PREFIX
  16         cmpxchg8b (\reg)
  17 .endm
  18 
  19 ENTRY(atomic64_read_cx8)
  20         read64 %ecx
  21         ret
  22 ENDPROC(atomic64_read_cx8)
  23 
  24 ENTRY(atomic64_set_cx8)
  25 1:
  26 /* we don't need LOCK_PREFIX since aligned 64-bit writes
  27  * are atomic on 586 and newer */
  28         cmpxchg8b (%esi)
  29         jne 1b
  30 
  31         ret
  32 ENDPROC(atomic64_set_cx8)
  33 
  34 ENTRY(atomic64_xchg_cx8)
  35 1:
  36         LOCK_PREFIX
  37         cmpxchg8b (%esi)
  38         jne 1b
  39 
  40         ret
  41 ENDPROC(atomic64_xchg_cx8)
  42 
  43 .macro addsub_return func ins insc
  44 ENTRY(atomic64_\func\()_return_cx8)
  45         pushl %ebp
  46         pushl %ebx
  47         pushl %esi
  48         pushl %edi
  49 
  50         movl %eax, %esi
  51         movl %edx, %edi
  52         movl %ecx, %ebp
  53 
  54         read64 %ecx
  55 1:
  56         movl %eax, %ebx
  57         movl %edx, %ecx
  58         \ins\()l %esi, %ebx
  59         \insc\()l %edi, %ecx
  60         LOCK_PREFIX
  61         cmpxchg8b (%ebp)
  62         jne 1b
  63 
  64 10:
  65         movl %ebx, %eax
  66         movl %ecx, %edx
  67         popl %edi
  68         popl %esi
  69         popl %ebx
  70         popl %ebp
  71         ret
  72 ENDPROC(atomic64_\func\()_return_cx8)
  73 .endm
  74 
  75 addsub_return add add adc
  76 addsub_return sub sub sbb
  77 
  78 .macro incdec_return func ins insc
  79 ENTRY(atomic64_\func\()_return_cx8)
  80         pushl %ebx
  81 
  82         read64 %esi
  83 1:
  84         movl %eax, %ebx
  85         movl %edx, %ecx
  86         \ins\()l $1, %ebx
  87         \insc\()l $0, %ecx
  88         LOCK_PREFIX
  89         cmpxchg8b (%esi)
  90         jne 1b
  91 
  92 10:
  93         movl %ebx, %eax
  94         movl %ecx, %edx
  95         popl %ebx
  96         ret
  97 ENDPROC(atomic64_\func\()_return_cx8)
  98 .endm
  99 
 100 incdec_return inc add adc
 101 incdec_return dec sub sbb
 102 
 103 ENTRY(atomic64_dec_if_positive_cx8)
 104         pushl %ebx
 105 
 106         read64 %esi
 107 1:
 108         movl %eax, %ebx
 109         movl %edx, %ecx
 110         subl $1, %ebx
 111         sbb $0, %ecx
 112         js 2f
 113         LOCK_PREFIX
 114         cmpxchg8b (%esi)
 115         jne 1b
 116 
 117 2:
 118         movl %ebx, %eax
 119         movl %ecx, %edx
 120         popl %ebx
 121         ret
 122 ENDPROC(atomic64_dec_if_positive_cx8)
 123 
 124 ENTRY(atomic64_add_unless_cx8)
 125         pushl %ebp
 126         pushl %ebx
 127 /* these just push these two parameters on the stack */
 128         pushl %edi
 129         pushl %ecx
 130 
 131         movl %eax, %ebp
 132         movl %edx, %edi
 133 
 134         read64 %esi
 135 1:
 136         cmpl %eax, 0(%esp)
 137         je 4f
 138 2:
 139         movl %eax, %ebx
 140         movl %edx, %ecx
 141         addl %ebp, %ebx
 142         adcl %edi, %ecx
 143         LOCK_PREFIX
 144         cmpxchg8b (%esi)
 145         jne 1b
 146 
 147         movl $1, %eax
 148 3:
 149         addl $8, %esp
 150         popl %ebx
 151         popl %ebp
 152         ret
 153 4:
 154         cmpl %edx, 4(%esp)
 155         jne 2b
 156         xorl %eax, %eax
 157         jmp 3b
 158 ENDPROC(atomic64_add_unless_cx8)
 159 
 160 ENTRY(atomic64_inc_not_zero_cx8)
 161         pushl %ebx
 162 
 163         read64 %esi
 164 1:
 165         movl %eax, %ecx
 166         orl %edx, %ecx
 167         jz 3f
 168         movl %eax, %ebx
 169         xorl %ecx, %ecx
 170         addl $1, %ebx
 171         adcl %edx, %ecx
 172         LOCK_PREFIX
 173         cmpxchg8b (%esi)
 174         jne 1b
 175 
 176         movl $1, %eax
 177 3:
 178         popl %ebx
 179         ret
 180 ENDPROC(atomic64_inc_not_zero_cx8)

/* [<][>][^][v][top][bottom][index][help] */