1/* 2 * Copyright 2011 Tilera Corporation. All Rights Reserved. 3 * 4 * This program is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU General Public License 6 * as published by the Free Software Foundation, version 2. 7 * 8 * This program is distributed in the hope that it will be useful, but 9 * WITHOUT ANY WARRANTY; without even the implied warranty of 10 * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or 11 * NON INFRINGEMENT. See the GNU General Public License for 12 * more details. 13 */ 14 15#ifndef _ASM_TILE_BITOPS_64_H 16#define _ASM_TILE_BITOPS_64_H 17 18#include <linux/compiler.h> 19#include <asm/cmpxchg.h> 20 21/* See <asm/bitops.h> for API comments. */ 22 23static inline void set_bit(unsigned nr, volatile unsigned long *addr) 24{ 25 unsigned long mask = (1UL << (nr % BITS_PER_LONG)); 26 __insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask); 27} 28 29static inline void clear_bit(unsigned nr, volatile unsigned long *addr) 30{ 31 unsigned long mask = (1UL << (nr % BITS_PER_LONG)); 32 __insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask); 33} 34 35static inline void change_bit(unsigned nr, volatile unsigned long *addr) 36{ 37 unsigned long mask = (1UL << (nr % BITS_PER_LONG)); 38 unsigned long guess, oldval; 39 addr += nr / BITS_PER_LONG; 40 oldval = *addr; 41 do { 42 guess = oldval; 43 oldval = cmpxchg(addr, guess, guess ^ mask); 44 } while (guess != oldval); 45} 46 47 48/* 49 * The test_and_xxx_bit() routines require a memory fence before we 50 * start the operation, and after the operation completes. We use 51 * smp_mb() before, and rely on the "!= 0" comparison, plus a compiler 52 * barrier(), to block until the atomic op is complete. 53 */ 54 55static inline int test_and_set_bit(unsigned nr, volatile unsigned long *addr) 56{ 57 int val; 58 unsigned long mask = (1UL << (nr % BITS_PER_LONG)); 59 smp_mb(); /* barrier for proper semantics */ 60 val = (__insn_fetchor((void *)(addr + nr / BITS_PER_LONG), mask) 61 & mask) != 0; 62 barrier(); 63 return val; 64} 65 66 67static inline int test_and_clear_bit(unsigned nr, volatile unsigned long *addr) 68{ 69 int val; 70 unsigned long mask = (1UL << (nr % BITS_PER_LONG)); 71 smp_mb(); /* barrier for proper semantics */ 72 val = (__insn_fetchand((void *)(addr + nr / BITS_PER_LONG), ~mask) 73 & mask) != 0; 74 barrier(); 75 return val; 76} 77 78 79static inline int test_and_change_bit(unsigned nr, 80 volatile unsigned long *addr) 81{ 82 unsigned long mask = (1UL << (nr % BITS_PER_LONG)); 83 unsigned long guess, oldval; 84 addr += nr / BITS_PER_LONG; 85 oldval = *addr; 86 do { 87 guess = oldval; 88 oldval = cmpxchg(addr, guess, guess ^ mask); 89 } while (guess != oldval); 90 return (oldval & mask) != 0; 91} 92 93#include <asm-generic/bitops/ext2-atomic-setbit.h> 94 95#endif /* _ASM_TILE_BITOPS_64_H */ 96