1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 95, 96, 97, 98, 99, 2003 by Ralf Baechle
7 * Copyright (C) 1996 by Paul M. Antoine
8 * Copyright (C) 1999 Silicon Graphics
9 * Copyright (C) 2000 MIPS Technologies, Inc.
10 */
11#include <asm/irqflags.h>
12#include <asm/hazards.h>
13#include <linux/compiler.h>
14#include <linux/preempt.h>
15#include <linux/export.h>
16#include <linux/stringify.h>
17
18#if !defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_MIPSR6)
19
20/*
21 * For cli() we have to insert nops to make sure that the new value
22 * has actually arrived in the status register before the end of this
23 * macro.
24 * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
25 * no nops at all.
26 */
27/*
28 * For TX49, operating only IE bit is not enough.
29 *
30 * If mfc0 $12 follows store and the mfc0 is last instruction of a
31 * page and fetching the next instruction causes TLB miss, the result
32 * of the mfc0 might wrongly contain EXL bit.
33 *
34 * ERT-TX49H2-027, ERT-TX49H3-012, ERT-TX49HL3-006, ERT-TX49H4-008
35 *
36 * Workaround: mask EXL bit of the result or place a nop before mfc0.
37 */
38notrace void arch_local_irq_disable(void)
39{
40	preempt_disable();
41
42	__asm__ __volatile__(
43	"	.set	push						\n"
44	"	.set	noat						\n"
45	"	mfc0	$1,$12						\n"
46	"	ori	$1,0x1f						\n"
47	"	xori	$1,0x1f						\n"
48	"	.set	noreorder					\n"
49	"	mtc0	$1,$12						\n"
50	"	" __stringify(__irq_disable_hazard) "			\n"
51	"	.set	pop						\n"
52	: /* no outputs */
53	: /* no inputs */
54	: "memory");
55
56	preempt_enable();
57}
58EXPORT_SYMBOL(arch_local_irq_disable);
59
60
61notrace unsigned long arch_local_irq_save(void)
62{
63	unsigned long flags;
64
65	preempt_disable();
66
67	__asm__ __volatile__(
68	"	.set	push						\n"
69	"	.set	reorder						\n"
70	"	.set	noat						\n"
71	"	mfc0	%[flags], $12					\n"
72	"	ori	$1, %[flags], 0x1f				\n"
73	"	xori	$1, 0x1f					\n"
74	"	.set	noreorder					\n"
75	"	mtc0	$1, $12						\n"
76	"	" __stringify(__irq_disable_hazard) "			\n"
77	"	.set	pop						\n"
78	: [flags] "=r" (flags)
79	: /* no inputs */
80	: "memory");
81
82	preempt_enable();
83
84	return flags;
85}
86EXPORT_SYMBOL(arch_local_irq_save);
87
88notrace void arch_local_irq_restore(unsigned long flags)
89{
90	unsigned long __tmp1;
91
92	preempt_disable();
93
94	__asm__ __volatile__(
95	"	.set	push						\n"
96	"	.set	noreorder					\n"
97	"	.set	noat						\n"
98	"	mfc0	$1, $12						\n"
99	"	andi	%[flags], 1					\n"
100	"	ori	$1, 0x1f					\n"
101	"	xori	$1, 0x1f					\n"
102	"	or	%[flags], $1					\n"
103	"	mtc0	%[flags], $12					\n"
104	"	" __stringify(__irq_disable_hazard) "			\n"
105	"	.set	pop						\n"
106	: [flags] "=r" (__tmp1)
107	: "0" (flags)
108	: "memory");
109
110	preempt_enable();
111}
112EXPORT_SYMBOL(arch_local_irq_restore);
113
114
115notrace void __arch_local_irq_restore(unsigned long flags)
116{
117	unsigned long __tmp1;
118
119	preempt_disable();
120
121	__asm__ __volatile__(
122	"	.set	push						\n"
123	"	.set	noreorder					\n"
124	"	.set	noat						\n"
125	"	mfc0	$1, $12						\n"
126	"	andi	%[flags], 1					\n"
127	"	ori	$1, 0x1f					\n"
128	"	xori	$1, 0x1f					\n"
129	"	or	%[flags], $1					\n"
130	"	mtc0	%[flags], $12					\n"
131	"	" __stringify(__irq_disable_hazard) "			\n"
132	"	.set	pop						\n"
133	: [flags] "=r" (__tmp1)
134	: "0" (flags)
135	: "memory");
136
137	preempt_enable();
138}
139EXPORT_SYMBOL(__arch_local_irq_restore);
140
141#endif /* !CONFIG_CPU_MIPSR2 */
142