root/arch/mips/include/asm/hazards.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


   1 /*
   2  * This file is subject to the terms and conditions of the GNU General Public
   3  * License.  See the file "COPYING" in the main directory of this archive
   4  * for more details.
   5  *
   6  * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
   7  * Copyright (C) MIPS Technologies, Inc.
   8  *   written by Ralf Baechle <ralf@linux-mips.org>
   9  */
  10 #ifndef _ASM_HAZARDS_H
  11 #define _ASM_HAZARDS_H
  12 
  13 #include <linux/stringify.h>
  14 #include <asm/compiler.h>
  15 
  16 #define ___ssnop                                                        \
  17         sll     $0, $0, 1
  18 
  19 #define ___ehb                                                          \
  20         sll     $0, $0, 3
  21 
  22 /*
  23  * TLB hazards
  24  */
  25 #if (defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)) && \
  26         !defined(CONFIG_CPU_CAVIUM_OCTEON) && !defined(CONFIG_LOONGSON3_ENHANCEMENT)
  27 
  28 /*
  29  * MIPSR2 defines ehb for hazard avoidance
  30  */
  31 
  32 #define __mtc0_tlbw_hazard                                              \
  33         ___ehb
  34 
  35 #define __mtc0_tlbr_hazard                                              \
  36         ___ehb
  37 
  38 #define __tlbw_use_hazard                                               \
  39         ___ehb
  40 
  41 #define __tlb_read_hazard                                               \
  42         ___ehb
  43 
  44 #define __tlb_probe_hazard                                              \
  45         ___ehb
  46 
  47 #define __irq_enable_hazard                                             \
  48         ___ehb
  49 
  50 #define __irq_disable_hazard                                            \
  51         ___ehb
  52 
  53 #define __back_to_back_c0_hazard                                        \
  54         ___ehb
  55 
  56 /*
  57  * gcc has a tradition of misscompiling the previous construct using the
  58  * address of a label as argument to inline assembler.  Gas otoh has the
  59  * annoying difference between la and dla which are only usable for 32-bit
  60  * rsp. 64-bit code, so can't be used without conditional compilation.
  61  * The alternative is switching the assembler to 64-bit code which happens
  62  * to work right even for 32-bit code...
  63  */
  64 #define instruction_hazard()                                            \
  65 do {                                                                    \
  66         unsigned long tmp;                                              \
  67                                                                         \
  68         __asm__ __volatile__(                                           \
  69         "       .set    push                                    \n"     \
  70         "       .set "MIPS_ISA_LEVEL"                           \n"     \
  71         "       dla     %0, 1f                                  \n"     \
  72         "       jr.hb   %0                                      \n"     \
  73         "       .set    pop                                     \n"     \
  74         "1:                                                     \n"     \
  75         : "=r" (tmp));                                                  \
  76 } while (0)
  77 
  78 #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \
  79         defined(CONFIG_CPU_BMIPS)
  80 
  81 /*
  82  * These are slightly complicated by the fact that we guarantee R1 kernels to
  83  * run fine on R2 processors.
  84  */
  85 
  86 #define __mtc0_tlbw_hazard                                              \
  87         ___ssnop;                                                       \
  88         ___ssnop;                                                       \
  89         ___ehb
  90 
  91 #define __mtc0_tlbr_hazard                                              \
  92         ___ssnop;                                                       \
  93         ___ssnop;                                                       \
  94         ___ehb
  95 
  96 #define __tlbw_use_hazard                                               \
  97         ___ssnop;                                                       \
  98         ___ssnop;                                                       \
  99         ___ssnop;                                                       \
 100         ___ehb
 101 
 102 #define __tlb_read_hazard                                               \
 103         ___ssnop;                                                       \
 104         ___ssnop;                                                       \
 105         ___ssnop;                                                       \
 106         ___ehb
 107 
 108 #define __tlb_probe_hazard                                              \
 109         ___ssnop;                                                       \
 110         ___ssnop;                                                       \
 111         ___ssnop;                                                       \
 112         ___ehb
 113 
 114 #define __irq_enable_hazard                                             \
 115         ___ssnop;                                                       \
 116         ___ssnop;                                                       \
 117         ___ssnop;                                                       \
 118         ___ehb
 119 
 120 #define __irq_disable_hazard                                            \
 121         ___ssnop;                                                       \
 122         ___ssnop;                                                       \
 123         ___ssnop;                                                       \
 124         ___ehb
 125 
 126 #define __back_to_back_c0_hazard                                        \
 127         ___ssnop;                                                       \
 128         ___ssnop;                                                       \
 129         ___ssnop;                                                       \
 130         ___ehb
 131 
 132 /*
 133  * gcc has a tradition of misscompiling the previous construct using the
 134  * address of a label as argument to inline assembler.  Gas otoh has the
 135  * annoying difference between la and dla which are only usable for 32-bit
 136  * rsp. 64-bit code, so can't be used without conditional compilation.
 137  * The alternative is switching the assembler to 64-bit code which happens
 138  * to work right even for 32-bit code...
 139  */
 140 #define __instruction_hazard()                                          \
 141 do {                                                                    \
 142         unsigned long tmp;                                              \
 143                                                                         \
 144         __asm__ __volatile__(                                           \
 145         "       .set    push                                    \n"     \
 146         "       .set    mips64r2                                \n"     \
 147         "       dla     %0, 1f                                  \n"     \
 148         "       jr.hb   %0                                      \n"     \
 149         "       .set    pop                                     \n"     \
 150         "1:                                                     \n"     \
 151         : "=r" (tmp));                                                  \
 152 } while (0)
 153 
 154 #define instruction_hazard()                                            \
 155 do {                                                                    \
 156         if (cpu_has_mips_r2_r6)                                         \
 157                 __instruction_hazard();                                 \
 158 } while (0)
 159 
 160 #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
 161         defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_LOONGSON3_ENHANCEMENT) || \
 162         defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_R5500) || defined(CONFIG_CPU_XLR)
 163 
 164 /*
 165  * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
 166  */
 167 
 168 #define __mtc0_tlbw_hazard
 169 
 170 #define __mtc0_tlbr_hazard
 171 
 172 #define __tlbw_use_hazard
 173 
 174 #define __tlb_read_hazard
 175 
 176 #define __tlb_probe_hazard
 177 
 178 #define __irq_enable_hazard
 179 
 180 #define __irq_disable_hazard
 181 
 182 #define __back_to_back_c0_hazard
 183 
 184 #define instruction_hazard() do { } while (0)
 185 
 186 #elif defined(CONFIG_CPU_SB1)
 187 
 188 /*
 189  * Mostly like R4000 for historic reasons
 190  */
 191 #define __mtc0_tlbw_hazard
 192 
 193 #define __mtc0_tlbr_hazard
 194 
 195 #define __tlbw_use_hazard
 196 
 197 #define __tlb_read_hazard
 198 
 199 #define __tlb_probe_hazard
 200 
 201 #define __irq_enable_hazard
 202 
 203 #define __irq_disable_hazard                                            \
 204         ___ssnop;                                                       \
 205         ___ssnop;                                                       \
 206         ___ssnop
 207 
 208 #define __back_to_back_c0_hazard
 209 
 210 #define instruction_hazard() do { } while (0)
 211 
 212 #else
 213 
 214 /*
 215  * Finally the catchall case for all other processors including R4000, R4400,
 216  * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
 217  *
 218  * The taken branch will result in a two cycle penalty for the two killed
 219  * instructions on R4000 / R4400.  Other processors only have a single cycle
 220  * hazard so this is nice trick to have an optimal code for a range of
 221  * processors.
 222  */
 223 #define __mtc0_tlbw_hazard                                              \
 224         nop;                                                            \
 225         nop
 226 
 227 #define __mtc0_tlbr_hazard                                              \
 228         nop;                                                            \
 229         nop
 230 
 231 #define __tlbw_use_hazard                                               \
 232         nop;                                                            \
 233         nop;                                                            \
 234         nop
 235 
 236 #define __tlb_read_hazard                                               \
 237         nop;                                                            \
 238         nop;                                                            \
 239         nop
 240 
 241 #define __tlb_probe_hazard                                              \
 242         nop;                                                            \
 243         nop;                                                            \
 244         nop
 245 
 246 #define __irq_enable_hazard                                             \
 247         ___ssnop;                                                       \
 248         ___ssnop;                                                       \
 249         ___ssnop
 250 
 251 #define __irq_disable_hazard                                            \
 252         nop;                                                            \
 253         nop;                                                            \
 254         nop
 255 
 256 #define __back_to_back_c0_hazard                                        \
 257         ___ssnop;                                                       \
 258         ___ssnop;                                                       \
 259         ___ssnop
 260 
 261 #define instruction_hazard() do { } while (0)
 262 
 263 #endif
 264 
 265 
 266 /* FPU hazards */
 267 
 268 #if defined(CONFIG_CPU_SB1)
 269 
 270 #define __enable_fpu_hazard                                             \
 271         .set    push;                                                   \
 272         .set    mips64;                                                 \
 273         .set    noreorder;                                              \
 274         ___ssnop;                                                       \
 275         bnezl   $0, .+4;                                                \
 276         ___ssnop;                                                       \
 277         .set    pop
 278 
 279 #define __disable_fpu_hazard
 280 
 281 #elif defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
 282 
 283 #define __enable_fpu_hazard                                             \
 284         ___ehb
 285 
 286 #define __disable_fpu_hazard                                            \
 287         ___ehb
 288 
 289 #else
 290 
 291 #define __enable_fpu_hazard                                             \
 292         nop;                                                            \
 293         nop;                                                            \
 294         nop;                                                            \
 295         nop
 296 
 297 #define __disable_fpu_hazard                                            \
 298         ___ehb
 299 
 300 #endif
 301 
 302 #ifdef __ASSEMBLY__
 303 
 304 #define _ssnop ___ssnop
 305 #define _ehb ___ehb
 306 #define mtc0_tlbw_hazard __mtc0_tlbw_hazard
 307 #define mtc0_tlbr_hazard __mtc0_tlbr_hazard
 308 #define tlbw_use_hazard __tlbw_use_hazard
 309 #define tlb_read_hazard __tlb_read_hazard
 310 #define tlb_probe_hazard __tlb_probe_hazard
 311 #define irq_enable_hazard __irq_enable_hazard
 312 #define irq_disable_hazard __irq_disable_hazard
 313 #define back_to_back_c0_hazard __back_to_back_c0_hazard
 314 #define enable_fpu_hazard __enable_fpu_hazard
 315 #define disable_fpu_hazard __disable_fpu_hazard
 316 
 317 #else
 318 
 319 #define _ssnop()                                                        \
 320 do {                                                                    \
 321         __asm__ __volatile__(                                           \
 322         __stringify(___ssnop)                                           \
 323         );                                                              \
 324 } while (0)
 325 
 326 #define _ehb()                                                          \
 327 do {                                                                    \
 328         __asm__ __volatile__(                                           \
 329         __stringify(___ehb)                                             \
 330         );                                                              \
 331 } while (0)
 332 
 333 
 334 #define mtc0_tlbw_hazard()                                              \
 335 do {                                                                    \
 336         __asm__ __volatile__(                                           \
 337         __stringify(__mtc0_tlbw_hazard)                                 \
 338         );                                                              \
 339 } while (0)
 340 
 341 
 342 #define mtc0_tlbr_hazard()                                              \
 343 do {                                                                    \
 344         __asm__ __volatile__(                                           \
 345         __stringify(__mtc0_tlbr_hazard)                                 \
 346         );                                                              \
 347 } while (0)
 348 
 349 
 350 #define tlbw_use_hazard()                                               \
 351 do {                                                                    \
 352         __asm__ __volatile__(                                           \
 353         __stringify(__tlbw_use_hazard)                                  \
 354         );                                                              \
 355 } while (0)
 356 
 357 
 358 #define tlb_read_hazard()                                               \
 359 do {                                                                    \
 360         __asm__ __volatile__(                                           \
 361         __stringify(__tlb_read_hazard)                                  \
 362         );                                                              \
 363 } while (0)
 364 
 365 
 366 #define tlb_probe_hazard()                                              \
 367 do {                                                                    \
 368         __asm__ __volatile__(                                           \
 369         __stringify(__tlb_probe_hazard)                                 \
 370         );                                                              \
 371 } while (0)
 372 
 373 
 374 #define irq_enable_hazard()                                             \
 375 do {                                                                    \
 376         __asm__ __volatile__(                                           \
 377         __stringify(__irq_enable_hazard)                                \
 378         );                                                              \
 379 } while (0)
 380 
 381 
 382 #define irq_disable_hazard()                                            \
 383 do {                                                                    \
 384         __asm__ __volatile__(                                           \
 385         __stringify(__irq_disable_hazard)                               \
 386         );                                                              \
 387 } while (0)
 388 
 389 
 390 #define back_to_back_c0_hazard()                                        \
 391 do {                                                                    \
 392         __asm__ __volatile__(                                           \
 393         __stringify(__back_to_back_c0_hazard)                           \
 394         );                                                              \
 395 } while (0)
 396 
 397 
 398 #define enable_fpu_hazard()                                             \
 399 do {                                                                    \
 400         __asm__ __volatile__(                                           \
 401         __stringify(__enable_fpu_hazard)                                \
 402         );                                                              \
 403 } while (0)
 404 
 405 
 406 #define disable_fpu_hazard()                                            \
 407 do {                                                                    \
 408         __asm__ __volatile__(                                           \
 409         __stringify(__disable_fpu_hazard)                               \
 410         );                                                              \
 411 } while (0)
 412 
 413 /*
 414  * MIPS R2 instruction hazard barrier.   Needs to be called as a subroutine.
 415  */
 416 extern void mips_ihb(void);
 417 
 418 #endif /* __ASSEMBLY__  */
 419 
 420 #endif /* _ASM_HAZARDS_H */

/* [<][>][^][v][top][bottom][index][help] */