1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
|
/*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
* Copyright (C) 2003 Ralf Baechle
*/
#ifndef _ASM_ASMMACRO_H
#define _ASM_ASMMACRO_H
#include <asm/hazards.h>
#ifdef CONFIG_32BIT
#include <asm/asmmacro-32.h>
#endif
#ifdef CONFIG_64BIT
#include <asm/asmmacro-64.h>
#endif
#ifdef CONFIG_MIPS_MT_SMTC
#include <asm/mipsmtregs.h>
#endif
#ifdef CONFIG_MIPS_MT_SMTC
.macro local_irq_enable reg=t0
mfc0 \reg, CP0_TCSTATUS
ori \reg, \reg, TCSTATUS_IXMT
xori \reg, \reg, TCSTATUS_IXMT
mtc0 \reg, CP0_TCSTATUS
_ehb
.endm
.macro local_irq_disable reg=t0
mfc0 \reg, CP0_TCSTATUS
ori \reg, \reg, TCSTATUS_IXMT
mtc0 \reg, CP0_TCSTATUS
_ehb
.endm
#elif defined(CONFIG_CPU_MIPSR2)
.macro local_irq_enable reg=t0
ei
irq_enable_hazard
.endm
.macro local_irq_disable reg=t0
di
irq_disable_hazard
.endm
#else
.macro local_irq_enable reg=t0
mfc0 \reg, CP0_STATUS
ori \reg, \reg, 1
mtc0 \reg, CP0_STATUS
irq_enable_hazard
.endm
.macro local_irq_disable reg=t0
mfc0 \reg, CP0_STATUS
ori \reg, \reg, 1
xori \reg, \reg, 1
mtc0 \reg, CP0_STATUS
irq_disable_hazard
.endm
#endif /* CONFIG_MIPS_MT_SMTC */
.macro fpu_save_16even thread tmp=t0
cfc1 \tmp, fcr31
sdc1 $f0, THREAD_FPR0(\thread)
sdc1 $f2, THREAD_FPR2(\thread)
sdc1 $f4, THREAD_FPR4(\thread)
sdc1 $f6, THREAD_FPR6(\thread)
sdc1 $f8, THREAD_FPR8(\thread)
sdc1 $f10, THREAD_FPR10(\thread)
sdc1 $f12, THREAD_FPR12(\thread)
sdc1 $f14, THREAD_FPR14(\thread)
sdc1 $f16, THREAD_FPR16(\thread)
sdc1 $f18, THREAD_FPR18(\thread)
sdc1 $f20, THREAD_FPR20(\thread)
sdc1 $f22, THREAD_FPR22(\thread)
sdc1 $f24, THREAD_FPR24(\thread)
sdc1 $f26, THREAD_FPR26(\thread)
sdc1 $f28, THREAD_FPR28(\thread)
sdc1 $f30, THREAD_FPR30(\thread)
sw \tmp, THREAD_FCR31(\thread)
.endm
.macro fpu_save_16odd thread
.set push
.set mips64r2
sdc1 $f1, THREAD_FPR1(\thread)
sdc1 $f3, THREAD_FPR3(\thread)
sdc1 $f5, THREAD_FPR5(\thread)
sdc1 $f7, THREAD_FPR7(\thread)
sdc1 $f9, THREAD_FPR9(\thread)
sdc1 $f11, THREAD_FPR11(\thread)
sdc1 $f13, THREAD_FPR13(\thread)
sdc1 $f15, THREAD_FPR15(\thread)
sdc1 $f17, THREAD_FPR17(\thread)
sdc1 $f19, THREAD_FPR19(\thread)
sdc1 $f21, THREAD_FPR21(\thread)
sdc1 $f23, THREAD_FPR23(\thread)
sdc1 $f25, THREAD_FPR25(\thread)
sdc1 $f27, THREAD_FPR27(\thread)
sdc1 $f29, THREAD_FPR29(\thread)
sdc1 $f31, THREAD_FPR31(\thread)
.set pop
.endm
.macro fpu_save_double thread status tmp
#if defined(CONFIG_MIPS64) || defined(CONFIG_CPU_MIPS32_R2)
sll \tmp, \status, 5
bgez \tmp, 10f
fpu_save_16odd \thread
10:
#endif
fpu_save_16even \thread \tmp
.endm
.macro fpu_restore_16even thread tmp=t0
lw \tmp, THREAD_FCR31(\thread)
ldc1 $f0, THREAD_FPR0(\thread)
ldc1 $f2, THREAD_FPR2(\thread)
ldc1 $f4, THREAD_FPR4(\thread)
ldc1 $f6, THREAD_FPR6(\thread)
ldc1 $f8, THREAD_FPR8(\thread)
ldc1 $f10, THREAD_FPR10(\thread)
ldc1 $f12, THREAD_FPR12(\thread)
ldc1 $f14, THREAD_FPR14(\thread)
ldc1 $f16, THREAD_FPR16(\thread)
ldc1 $f18, THREAD_FPR18(\thread)
ldc1 $f20, THREAD_FPR20(\thread)
ldc1 $f22, THREAD_FPR22(\thread)
ldc1 $f24, THREAD_FPR24(\thread)
ldc1 $f26, THREAD_FPR26(\thread)
ldc1 $f28, THREAD_FPR28(\thread)
ldc1 $f30, THREAD_FPR30(\thread)
ctc1 \tmp, fcr31
.endm
.macro fpu_restore_16odd thread
.set push
.set mips64r2
ldc1 $f1, THREAD_FPR1(\thread)
ldc1 $f3, THREAD_FPR3(\thread)
ldc1 $f5, THREAD_FPR5(\thread)
ldc1 $f7, THREAD_FPR7(\thread)
ldc1 $f9, THREAD_FPR9(\thread)
ldc1 $f11, THREAD_FPR11(\thread)
ldc1 $f13, THREAD_FPR13(\thread)
ldc1 $f15, THREAD_FPR15(\thread)
ldc1 $f17, THREAD_FPR17(\thread)
ldc1 $f19, THREAD_FPR19(\thread)
ldc1 $f21, THREAD_FPR21(\thread)
ldc1 $f23, THREAD_FPR23(\thread)
ldc1 $f25, THREAD_FPR25(\thread)
ldc1 $f27, THREAD_FPR27(\thread)
ldc1 $f29, THREAD_FPR29(\thread)
ldc1 $f31, THREAD_FPR31(\thread)
.set pop
.endm
.macro fpu_restore_double thread status tmp
#if defined(CONFIG_MIPS64) || defined(CONFIG_CPU_MIPS32_R2)
sll \tmp, \status, 5
bgez \tmp, 10f # 16 register mode?
fpu_restore_16odd \thread
10:
#endif
fpu_restore_16even \thread \tmp
.endm
#ifdef CONFIG_CPU_MIPSR2
.macro _EXT rd, rs, p, s
ext \rd, \rs, \p, \s
.endm
#else /* !CONFIG_CPU_MIPSR2 */
.macro _EXT rd, rs, p, s
srl \rd, \rs, \p
andi \rd, \rd, (1 << \s) - 1
.endm
#endif /* !CONFIG_CPU_MIPSR2 */
/*
* Temporary until all gas have MT ASE support
*/
.macro DMT reg=0
.word 0x41600bc1 | (\reg << 16)
.endm
.macro EMT reg=0
.word 0x41600be1 | (\reg << 16)
.endm
.macro DVPE reg=0
.word 0x41600001 | (\reg << 16)
.endm
.macro EVPE reg=0
.word 0x41600021 | (\reg << 16)
.endm
.macro MFTR rt=0, rd=0, u=0, sel=0
.word 0x41000000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
.endm
.macro MTTR rt=0, rd=0, u=0, sel=0
.word 0x41800000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
.endm
#endif /* _ASM_ASMMACRO_H */
|