Commit | Line | Data |
---|---|---|
920dae64 AT |
1 | /* |
2 | * ========== Copyright Header Begin ========================================== | |
3 | * | |
4 | * OpenSPARC T2 Processor File: sparcv9instns.c | |
5 | * Copyright (c) 2006 Sun Microsystems, Inc. All Rights Reserved. | |
6 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES. | |
7 | * | |
8 | * The above named program is free software; you can redistribute it and/or | |
9 | * modify it under the terms of the GNU General Public | |
10 | * License version 2 as published by the Free Software Foundation. | |
11 | * | |
12 | * The above named program is distributed in the hope that it will be | |
13 | * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
15 | * General Public License for more details. | |
16 | * | |
17 | * You should have received a copy of the GNU General Public | |
18 | * License along with this work; if not, write to the Free Software | |
19 | * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. | |
20 | * | |
21 | * ========== Copyright Header End ============================================ | |
22 | */ | |
23 | /* | |
24 | * Copyright 2007 Sun Microsystems, Inc. All rights reserved. | |
25 | * Use is subject to license terms. | |
26 | */ | |
27 | ||
28 | #pragma ident "@(#)sparcv9instns.c 1.64 07/03/19 SMI" | |
29 | ||
30 | #include <stdio.h> | |
31 | #include <stdlib.h> | |
32 | ||
33 | #include <assert.h> | |
34 | ||
35 | #include "basics.h" | |
36 | #include "fatal.h" | |
37 | #include "allocate.h" | |
38 | #include "simcore.h" | |
39 | #include "config.h" | |
40 | #include "xicache.h" | |
41 | #include "xdcache.h" | |
42 | #include "tsparcv9.h" | |
43 | #include "tsparcv9internal.h" | |
44 | #include "sparcv9regs.h" | |
45 | #include "sparcv9cc.h" | |
46 | #include "magictraps.h" | |
47 | #include "hostnative_asm.h" /* autogenerated from hostnative_asm.S */ | |
48 | #include "hostnative.h" /* autogenerated from hostnative.c */ | |
49 | #include "sparcv9decode.h" | |
50 | #include "fpsim.h" | |
51 | ||
52 | #define FPU_NOT_IMPLEMENTED(_which) \ | |
53 | FIXME_WARNING(("non-native implementation of " _which \ | |
54 | " not yet completed")); \ | |
55 | sp->v9_fsr_ctrl &= ~V9_FSR_FTT_MASK; \ | |
56 | sp->v9_fsr_ctrl |= SPARCv9_FTT_unimplemented_FPop << V9_FSR_FTT_SHIFT; \ | |
57 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_exception_other); \ | |
58 | return; | |
59 | ||
60 | /* | |
61 | * Implementation(s) of pre-decoded sparcv9 specific instructions. | |
62 | */ | |
63 | ||
64 | #define IMPL( _n ) void decoded_impl_sparcv9_##_n (simcpu_t *sp, xicache_instn_t * xcip) { | |
65 | #define ENDI NEXT_INSTN(sp); ENDDEF | |
66 | #define ENDDEF } | |
67 | ||
68 | #define _U (1 << V9_fcc_u) | |
69 | #define _G (1 << V9_fcc_g) | |
70 | #define _L (1 << V9_fcc_l) | |
71 | #define _E (1 << V9_fcc_e) | |
72 | ||
73 | static uint8_t sparcv9_fcc_magic[16] = { | |
74 | 0, /* never */ | |
75 | _U|_G|_L, /* NE */ | |
76 | _G|_L, /* LG */ | |
77 | _U|_L, /* UL */ | |
78 | _L, /* L */ | |
79 | _U|_G, /* UG */ | |
80 | _G, /* G */ | |
81 | _U, /* U */ | |
82 | _U|_G|_L|_E, /* always */ | |
83 | _E, /* E */ | |
84 | _U|_E, /* UE */ | |
85 | _G|_E, /* GE */ | |
86 | _U|_G|_E, /* UGE */ | |
87 | _L|_E, /* LE */ | |
88 | _U|_L|_E, /* ULE */ | |
89 | _G|_L|_E /* O */ | |
90 | }; | |
91 | ||
92 | #undef _U | |
93 | #undef _G | |
94 | #undef _L | |
95 | #undef _E | |
96 | ||
97 | ||
98 | /* | |
99 | * Some FPU instructions that can never cause exceptions still | |
100 | * update the FSR - so just clear current exceptions and trap type. | |
101 | */ | |
102 | #define FP_CLEAR_CEXC_FTT(_sp) do { \ | |
103 | (_sp)->v9_fsr_exc &= ~V9_FSR_CEXC_MASK; \ | |
104 | (_sp)->v9_fsr_ctrl &= ~V9_FSR_FTT_MASK; \ | |
105 | } while (0) | |
106 | ||
107 | ||
108 | /* ------------------------------------------------------------ */ | |
109 | ||
110 | /* | |
111 | * Instruction: sparcv9_add_co_imm | |
112 | */ | |
113 | ||
114 | #if !defined(HAS_NATIVE_sparcv9_add_co_imm) /* { */ | |
115 | ||
116 | IMPL(add_co_imm) | |
117 | int64_t s1 = Rsrc1; | |
118 | int64_t s2 = Simm16; | |
119 | int64_t d; | |
120 | uint64_t v, c; | |
121 | ||
122 | d = s1 + s2; | |
123 | ||
124 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
125 | c = (s1 & s2) | (~d & (s1 | s2)); | |
126 | ||
127 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
128 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
129 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
130 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
131 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
132 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
133 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
134 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
135 | Rdest = d; | |
136 | ENDI | |
137 | ||
138 | #endif /* } */ | |
139 | ||
140 | ||
141 | /* | |
142 | * Instruction: sparcv9_add_co_rrr | |
143 | */ | |
144 | ||
145 | #if !defined(HAS_NATIVE_sparcv9_add_co_rrr) /* { */ | |
146 | ||
147 | IMPL(add_co_rrr) | |
148 | uint64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
149 | uint64_t v, c; | |
150 | ||
151 | d = Rsrc1 + Rsrc2; | |
152 | ||
153 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
154 | c = (s1 & s2) | (~d & (s1 | s2)); | |
155 | ||
156 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
157 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
158 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
159 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
160 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
161 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
162 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
163 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
164 | Rdest = d; | |
165 | ENDI | |
166 | ||
167 | #endif /* } */ | |
168 | ||
169 | ||
170 | /* | |
171 | * Instruction: sparcv9_add_co_imm_rd0 | |
172 | */ | |
173 | ||
174 | #if !defined(HAS_NATIVE_sparcv9_add_co_imm_rd0) /* { */ | |
175 | ||
176 | IMPL(add_co_imm_rd0) | |
177 | int64_t s1 = Rsrc1, s2 = (int64_t)(int32_t)Simm16, d; | |
178 | uint64_t v, c; | |
179 | ||
180 | d = s1 + s2; | |
181 | ||
182 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
183 | c = (s1 & s2) | (~d & (s1 | s2)); | |
184 | ||
185 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
186 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
187 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
188 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
189 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
190 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
191 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
192 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
193 | ENDI | |
194 | ||
195 | #endif /* } */ | |
196 | ||
197 | ||
198 | ||
199 | ||
200 | ||
201 | /* | |
202 | * Instruction: sparcv9_add_co_rrr_rd0 | |
203 | */ | |
204 | ||
205 | #if !defined(HAS_NATIVE_sparcv9_add_co_rrr_rd0) /* { */ | |
206 | ||
207 | IMPL(add_co_rrr_rd0) | |
208 | int64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
209 | uint64_t v, c; | |
210 | ||
211 | d = s1 + s2; | |
212 | ||
213 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
214 | c = (s1 & s2) | (~d & (s1 | s2)); | |
215 | ||
216 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
217 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
218 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
219 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
220 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
221 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
222 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
223 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
224 | ENDI | |
225 | ||
226 | #endif /* } */ | |
227 | ||
228 | ||
229 | ||
230 | ||
231 | ||
232 | /* | |
233 | * Instruction: sparcv9_add_ci_imm | |
234 | */ | |
235 | ||
236 | #if !defined(HAS_NATIVE_sparcv9_add_ci_imm) /* { */ | |
237 | ||
238 | IMPL(add_ci_imm) | |
239 | int64_t s1 = Rsrc1, s2 = (int64_t)(int32_t)Simm16, d; | |
240 | ||
241 | d = s1 + s2 + (sp->v9_ccr & 1); | |
242 | Rdest = d; | |
243 | ENDI | |
244 | ||
245 | #endif /* } */ | |
246 | ||
247 | ||
248 | /* | |
249 | * Instruction: sparcv9_add_ci_rrr | |
250 | */ | |
251 | ||
252 | #if !defined(HAS_NATIVE_sparcv9_add_ci_rrr) /* { */ | |
253 | ||
254 | IMPL(add_ci_rrr) | |
255 | int64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
256 | ||
257 | d = s1 + s2 + (sp->v9_ccr & 1); | |
258 | Rdest = d; | |
259 | ENDI | |
260 | ||
261 | #endif /* } */ | |
262 | ||
263 | ||
264 | /* | |
265 | * Instruction: sparcv9_add_cico_imm | |
266 | */ | |
267 | ||
268 | #if !defined(HAS_NATIVE_sparcv9_add_cico_imm) /* { */ | |
269 | ||
270 | IMPL(add_cico_imm) | |
271 | int64_t s1 = Rsrc1, s2 = (int64_t)(int32_t)Simm16, d; | |
272 | uint64_t v, c; | |
273 | ||
274 | d = s1 + s2 + (sp->v9_ccr & 1); | |
275 | ||
276 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
277 | c = (s1 & s2) | (~d & (s1 | s2)); | |
278 | ||
279 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
280 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
281 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
282 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
283 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
284 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
285 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
286 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
287 | Rdest = d; | |
288 | ENDI | |
289 | ||
290 | #endif /* } */ | |
291 | ||
292 | ||
293 | ||
294 | ||
295 | ||
296 | /* | |
297 | * Instruction: sparcv9_add_cico_rrr | |
298 | */ | |
299 | ||
300 | #if !defined(HAS_NATIVE_sparcv9_add_cico_rrr) /* { */ | |
301 | ||
302 | IMPL(add_cico_rrr) | |
303 | int64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
304 | uint64_t v, c; | |
305 | ||
306 | d = s1 + s2 + (sp->v9_ccr & 1); | |
307 | ||
308 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
309 | c = (s1 & s2) | (~d & (s1 | s2)); | |
310 | ||
311 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
312 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
313 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
314 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
315 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
316 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
317 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
318 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
319 | Rdest = d; | |
320 | ENDI | |
321 | ||
322 | #endif /* } */ | |
323 | ||
324 | ||
325 | ||
326 | ||
327 | ||
328 | /* | |
329 | * Instruction: sparcv9_add_cico_imm_rd0 | |
330 | */ | |
331 | ||
332 | #if !defined(HAS_NATIVE_sparcv9_add_cico_imm_rd0) /* { */ | |
333 | ||
334 | IMPL(add_cico_imm_rd0) | |
335 | int64_t s1 = Rsrc1, s2 = (int64_t)(int32_t)Simm16, d; | |
336 | uint64_t v, c; | |
337 | ||
338 | d = s1 + s2 + (sp->v9_ccr & 1); | |
339 | ||
340 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
341 | c = (s1 & s2) | (~d & (s1 | s2)); | |
342 | ||
343 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
344 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
345 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
346 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
347 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
348 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
349 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
350 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
351 | ENDI | |
352 | ||
353 | #endif /* } */ | |
354 | ||
355 | ||
356 | /* | |
357 | * Instruction: sparcv9_add_cico_rrr_rd0 | |
358 | */ | |
359 | ||
360 | #if !defined(HAS_NATIVE_sparcv9_add_cico_rrr_rd0) /* { */ | |
361 | ||
362 | IMPL(add_cico_rrr_rd0) | |
363 | int64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
364 | uint64_t v, c; | |
365 | ||
366 | d = s1 + s2 + (sp->v9_ccr & 1); | |
367 | ||
368 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
369 | c = (s1 & s2) | (~d & (s1 | s2)); | |
370 | ||
371 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
372 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
373 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
374 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
375 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
376 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
377 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
378 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
379 | ||
380 | ENDI | |
381 | ||
382 | #endif /* } */ | |
383 | ||
384 | ||
385 | ||
386 | ||
387 | ||
388 | /* | |
389 | * Instruction: sparcv9_sub_co_imm | |
390 | */ | |
391 | ||
392 | #if !defined(HAS_NATIVE_sparcv9_sub_co_imm) /* { */ | |
393 | ||
394 | IMPL(sub_co_imm) | |
395 | int64_t s1 = Rsrc1; | |
396 | int64_t s2 = Simm16; | |
397 | int64_t d; | |
398 | uint64_t v, c; | |
399 | ||
400 | d = s1 - s2; | |
401 | ||
402 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
403 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
404 | ||
405 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
406 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
407 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
408 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
409 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
410 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
411 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
412 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
413 | Rdest = d; | |
414 | ENDI | |
415 | ||
416 | #endif /* } */ | |
417 | ||
418 | ||
419 | ||
420 | ||
421 | ||
422 | /* | |
423 | * Instruction: sparcv9_sub_co_rrr | |
424 | */ | |
425 | ||
426 | #if !defined(HAS_NATIVE_sparcv9_sub_co_rrr) /* { */ | |
427 | ||
428 | IMPL(sub_co_rrr) | |
429 | int64_t s1 = Rsrc1; | |
430 | int64_t s2 = Rsrc2; | |
431 | int64_t d; | |
432 | uint64_t v, c; | |
433 | ||
434 | d = s1 - s2; | |
435 | ||
436 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
437 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
438 | ||
439 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
440 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
441 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
442 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
443 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
444 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
445 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
446 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
447 | Rdest = d; | |
448 | ENDI | |
449 | ||
450 | #endif /* } */ | |
451 | ||
452 | ||
453 | ||
454 | ||
455 | ||
456 | /* | |
457 | * Instruction: sparcv9_sub_co_imm_rd0 | |
458 | */ | |
459 | ||
460 | #if !defined(HAS_NATIVE_sparcv9_sub_co_imm_rd0) /* { */ | |
461 | ||
462 | IMPL(sub_co_imm_rd0) | |
463 | int64_t s1 = Rsrc1; | |
464 | int64_t s2 = Simm16; | |
465 | int64_t d; | |
466 | uint64_t v, c; | |
467 | ||
468 | d = s1 - s2; | |
469 | ||
470 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
471 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
472 | ||
473 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
474 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
475 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
476 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
477 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
478 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
479 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
480 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
481 | ENDI | |
482 | ||
483 | #endif /* } */ | |
484 | ||
485 | ||
486 | ||
487 | ||
488 | ||
489 | /* | |
490 | * Instruction: sparcv9_sub_co_rrr_rd0 | |
491 | */ | |
492 | ||
493 | #if !defined(HAS_NATIVE_sparcv9_sub_co_rrr_rd0) /* { */ | |
494 | ||
495 | IMPL(sub_co_rrr_rd0) | |
496 | int64_t s1 = Rsrc1; | |
497 | int64_t s2 = Rsrc2; | |
498 | int64_t d; | |
499 | uint64_t v, c; | |
500 | ||
501 | d = s1 - s2; | |
502 | ||
503 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
504 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
505 | ||
506 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
507 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
508 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
509 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
510 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
511 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
512 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
513 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
514 | ENDI | |
515 | ||
516 | #endif /* } */ | |
517 | ||
518 | ||
519 | ||
520 | ||
521 | ||
522 | /* | |
523 | * Instruction: sparcv9_sub_ci_imm | |
524 | */ | |
525 | ||
526 | #if !defined(HAS_NATIVE_sparcv9_sub_ci_imm) /* { */ | |
527 | ||
528 | IMPL(sub_ci_imm) | |
529 | int64_t s1 = Rsrc1, s2 = (int64_t)(int32_t)Simm16, d; | |
530 | ||
531 | d = s1 - s2 - (sp->v9_ccr & 1); | |
532 | Rdest = d; | |
533 | ENDI | |
534 | ||
535 | #endif /* } */ | |
536 | ||
537 | ||
538 | ||
539 | ||
540 | ||
541 | /* | |
542 | * Instruction: sparcv9_sub_ci_rrr | |
543 | */ | |
544 | ||
545 | #if !defined(HAS_NATIVE_sparcv9_sub_ci_rrr) /* { */ | |
546 | ||
547 | IMPL(sub_ci_rrr) | |
548 | int64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
549 | ||
550 | d = s1 - s2 - (sp->v9_ccr & 1); | |
551 | Rdest = d; | |
552 | ENDI | |
553 | ||
554 | #endif /* } */ | |
555 | ||
556 | ||
557 | ||
558 | /* | |
559 | * Instruction: sparcv9_sub_cico_imm | |
560 | */ | |
561 | ||
562 | #if !defined(HAS_NATIVE_sparcv9_sub_cico_imm) /* { */ | |
563 | ||
564 | IMPL(sub_cico_imm) | |
565 | int64_t s1 = Rsrc1, s2 = (int64_t)(int32_t)Simm16, d; | |
566 | uint64_t v, c; | |
567 | ||
568 | d = s1 - s2 - (sp->v9_ccr & 1); | |
569 | ||
570 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
571 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
572 | ||
573 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
574 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
575 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
576 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
577 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
578 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
579 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
580 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
581 | Rdest = d; | |
582 | ENDI | |
583 | ||
584 | #endif /* } */ | |
585 | ||
586 | ||
587 | /* | |
588 | * Instruction: sparcv9_sub_cico_rrr | |
589 | */ | |
590 | ||
591 | #if !defined(HAS_NATIVE_sparcv9_sub_cico_rrr) /* { */ | |
592 | ||
593 | IMPL(sub_cico_rrr) | |
594 | int64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
595 | uint64_t v, c; | |
596 | ||
597 | d = s1 - s2 - (sp->v9_ccr & 1); | |
598 | ||
599 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
600 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
601 | ||
602 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
603 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
604 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
605 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
606 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
607 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
608 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
609 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
610 | Rdest = d; | |
611 | ENDI | |
612 | ||
613 | #endif /* } */ | |
614 | ||
615 | ||
616 | ||
617 | /* | |
618 | * Instruction: sparcv9_sub_cico_imm_rd0 | |
619 | */ | |
620 | ||
621 | #if !defined(HAS_NATIVE_sparcv9_sub_cico_imm_rd0) /* { */ | |
622 | ||
623 | IMPL(sub_cico_imm_rd0) | |
624 | int64_t s1 = Rsrc1, s2 = (int64_t)(int32_t)Simm16, d; | |
625 | uint64_t v, c; | |
626 | ||
627 | d = s1 - s2 - (sp->v9_ccr & 1); | |
628 | ||
629 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
630 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
631 | ||
632 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
633 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
634 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
635 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
636 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
637 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
638 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
639 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
640 | ENDI | |
641 | ||
642 | #endif /* } */ | |
643 | ||
644 | ||
645 | ||
646 | ||
647 | ||
648 | /* | |
649 | * Instruction: sparcv9_sub_cico_rrr_rd0 | |
650 | */ | |
651 | ||
652 | #if !defined(HAS_NATIVE_sparcv9_sub_cico_rrr_rd0) /* { */ | |
653 | ||
654 | IMPL(sub_cico_rrr_rd0) | |
655 | int64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
656 | uint64_t v, c; | |
657 | ||
658 | d = s1 - s2 - (sp->v9_ccr & 1); | |
659 | ||
660 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
661 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
662 | ||
663 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
664 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
665 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
666 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
667 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
668 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
669 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
670 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
671 | ENDI | |
672 | ||
673 | #endif /* } */ | |
674 | ||
675 | ||
676 | ||
677 | #define LOGIC_OP(_res, _op) do { \ | |
678 | uint64_t result; \ | |
679 | result = _op; \ | |
680 | sp->v9_ccr = V9_xcc_n((result>>63)&1) \ | |
681 | | V9_xcc_z(result==0LL) \ | |
682 | | V9_icc_n((result>>31)&1) \ | |
683 | | V9_icc_z((result&MASK64(31,0))==0LL); \ | |
684 | _res; \ | |
685 | } while (0) | |
686 | ||
687 | ||
688 | ||
689 | /* | |
690 | * Instruction: sparcv9_and_cc_imm | |
691 | */ | |
692 | ||
693 | #if !defined(HAS_NATIVE_sparcv9_and_cc_imm) /* { */ | |
694 | ||
695 | IMPL(and_cc_imm) | |
696 | LOGIC_OP(Rdest = result, Rsrc1 & Simm16); | |
697 | ENDI | |
698 | ||
699 | #endif /* } */ | |
700 | ||
701 | ||
702 | ||
703 | ||
704 | ||
705 | /* | |
706 | * Instruction: sparcv9_and_cc_rrr | |
707 | */ | |
708 | ||
709 | #if !defined(HAS_NATIVE_sparcv9_and_cc_rrr) /* { */ | |
710 | ||
711 | IMPL(and_cc_rrr) | |
712 | LOGIC_OP(Rdest = result, Rsrc1 & Rsrc2); | |
713 | ENDI | |
714 | ||
715 | #endif /* } */ | |
716 | ||
717 | ||
718 | ||
719 | ||
720 | ||
721 | /* | |
722 | * Instruction: sparcv9_and_cc_imm_rd0 | |
723 | */ | |
724 | ||
725 | #if !defined(HAS_NATIVE_sparcv9_and_cc_imm_rd0) /* { */ | |
726 | ||
727 | IMPL(and_cc_imm_rd0) | |
728 | LOGIC_OP(/*nada*/, Rsrc1 & Simm16); | |
729 | ENDI | |
730 | ||
731 | #endif /* } */ | |
732 | ||
733 | ||
734 | ||
735 | ||
736 | ||
737 | /* | |
738 | * Instruction: sparcv9_and_cc_rrr_rd0 | |
739 | */ | |
740 | ||
741 | #if !defined(HAS_NATIVE_sparcv9_and_cc_rrr_rd0) /* { */ | |
742 | ||
743 | IMPL(and_cc_rrr_rd0) | |
744 | LOGIC_OP(/*nada*/, Rsrc1 & Rsrc2); | |
745 | ENDI | |
746 | ||
747 | #endif /* } */ | |
748 | ||
749 | ||
750 | ||
751 | ||
752 | ||
753 | /* | |
754 | * Instruction: sparcv9_andn_cc_rrr | |
755 | */ | |
756 | ||
757 | #if !defined(HAS_NATIVE_sparcv9_andn_cc_rrr) /* { */ | |
758 | ||
759 | IMPL(andn_cc_rrr) | |
760 | LOGIC_OP(Rdest=result, (Rsrc1 & ~(Rsrc2)) ); | |
761 | ENDI | |
762 | ||
763 | #endif /* } */ | |
764 | ||
765 | ||
766 | ||
767 | ||
768 | ||
769 | /* | |
770 | * Instruction: sparcv9_andn_cc_rrr_rd0 | |
771 | */ | |
772 | ||
773 | #if !defined(HAS_NATIVE_sparcv9_andn_cc_rrr_rd0) /* { */ | |
774 | ||
775 | IMPL(andn_cc_rrr_rd0) | |
776 | LOGIC_OP(/*nada*/, (Rsrc1 & ~(Rsrc2)) ); | |
777 | ENDI | |
778 | ||
779 | #endif /* } */ | |
780 | ||
781 | ||
782 | ||
783 | ||
784 | ||
785 | /* | |
786 | * Instruction: sparcv9_or_cc_imm | |
787 | */ | |
788 | ||
789 | #if !defined(HAS_NATIVE_sparcv9_or_cc_imm) /* { */ | |
790 | ||
791 | IMPL(or_cc_imm) | |
792 | LOGIC_OP(Rdest=result, Rsrc1 | Simm16 ); | |
793 | ENDI | |
794 | ||
795 | #endif /* } */ | |
796 | ||
797 | ||
798 | ||
799 | ||
800 | ||
801 | /* | |
802 | * Instruction: sparcv9_or_cc_rrr | |
803 | */ | |
804 | ||
805 | #if !defined(HAS_NATIVE_sparcv9_or_cc_rrr) /* { */ | |
806 | ||
807 | IMPL(or_cc_rrr) | |
808 | LOGIC_OP(Rdest=result, Rsrc1 | Rsrc2 ); | |
809 | ENDI | |
810 | ||
811 | #endif /* } */ | |
812 | ||
813 | ||
814 | ||
815 | ||
816 | ||
817 | /* | |
818 | * Instruction: sparcv9_or_cc_imm_rd0 | |
819 | */ | |
820 | ||
821 | #if !defined(HAS_NATIVE_sparcv9_or_cc_imm_rd0) /* { */ | |
822 | ||
823 | IMPL(or_cc_imm_rd0) | |
824 | LOGIC_OP(/*nada*/, Rsrc1 | Simm16 ); | |
825 | ENDI | |
826 | ||
827 | #endif /* } */ | |
828 | ||
829 | ||
830 | ||
831 | ||
832 | ||
833 | /* | |
834 | * Instruction: sparcv9_or_cc_rrr_rd0 | |
835 | */ | |
836 | ||
837 | #if !defined(HAS_NATIVE_sparcv9_or_cc_rrr_rd0) /* { */ | |
838 | ||
839 | IMPL(or_cc_rrr_rd0) | |
840 | LOGIC_OP(/*nada*/, Rsrc1 | Rsrc2 ); | |
841 | ENDI | |
842 | ||
843 | #endif /* } */ | |
844 | ||
845 | ||
846 | ||
847 | ||
848 | ||
849 | /* | |
850 | * Instruction: sparcv9_orn_cc_rrr | |
851 | */ | |
852 | ||
853 | #if !defined(HAS_NATIVE_sparcv9_orn_cc_rrr) /* { */ | |
854 | ||
855 | IMPL(orn_cc_rrr) | |
856 | LOGIC_OP(Rdest=result, (Rsrc1 | ~(Rsrc2)) ); | |
857 | ENDI | |
858 | ||
859 | #endif /* } */ | |
860 | ||
861 | ||
862 | ||
863 | ||
864 | ||
865 | /* | |
866 | * Instruction: sparcv9_orn_cc_rrr_rd0 | |
867 | */ | |
868 | ||
869 | #if !defined(HAS_NATIVE_sparcv9_orn_cc_rrr_rd0) /* { */ | |
870 | ||
871 | IMPL(orn_cc_rrr_rd0) | |
872 | LOGIC_OP(/*nada*/, (Rsrc1 | ~(Rsrc2)) ); | |
873 | ENDI | |
874 | ||
875 | #endif /* } */ | |
876 | ||
877 | ||
878 | ||
879 | ||
880 | ||
881 | /* | |
882 | * Instruction: sparcv9_xor_cc_imm | |
883 | */ | |
884 | ||
885 | #if !defined(HAS_NATIVE_sparcv9_xor_cc_imm) /* { */ | |
886 | ||
887 | IMPL(xor_cc_imm) | |
888 | LOGIC_OP(Rdest=result, Rsrc1 ^ Simm16 ); | |
889 | ENDI | |
890 | ||
891 | #endif /* } */ | |
892 | ||
893 | ||
894 | ||
895 | ||
896 | ||
897 | /* | |
898 | * Instruction: sparcv9_xor_cc_rrr | |
899 | */ | |
900 | ||
901 | #if !defined(HAS_NATIVE_sparcv9_xor_cc_rrr) /* { */ | |
902 | ||
903 | IMPL(xor_cc_rrr) | |
904 | LOGIC_OP(Rdest=result, Rsrc1 ^ Rsrc2 ); | |
905 | ENDI | |
906 | ||
907 | #endif /* } */ | |
908 | ||
909 | ||
910 | ||
911 | ||
912 | ||
913 | /* | |
914 | * Instruction: sparcv9_xor_cc_imm_rd0 | |
915 | */ | |
916 | ||
917 | #if !defined(HAS_NATIVE_sparcv9_xor_cc_imm_rd0) /* { */ | |
918 | ||
919 | IMPL(xor_cc_imm_rd0) | |
920 | LOGIC_OP(/*nada*/, Rsrc1 ^ Simm16 ); | |
921 | ENDI | |
922 | ||
923 | #endif /* } */ | |
924 | ||
925 | ||
926 | ||
927 | ||
928 | ||
929 | /* | |
930 | * Instruction: sparcv9_xor_cc_rrr_rd0 | |
931 | */ | |
932 | ||
933 | #if !defined(HAS_NATIVE_sparcv9_xor_cc_rrr_rd0) /* { */ | |
934 | ||
935 | IMPL(xor_cc_rrr_rd0) | |
936 | LOGIC_OP(/*nada*/, Rsrc1 ^ Rsrc2 ); | |
937 | ENDI | |
938 | ||
939 | #endif /* } */ | |
940 | ||
941 | ||
942 | ||
943 | ||
944 | ||
945 | /* | |
946 | * Instruction: sparcv9_xnor_cc_rrr | |
947 | */ | |
948 | ||
949 | #if !defined(HAS_NATIVE_sparcv9_xnor_cc_rrr) /* { */ | |
950 | ||
951 | IMPL(xnor_cc_rrr) | |
952 | LOGIC_OP(Rdest=result, ~(Rsrc1 ^ Rsrc2) ); | |
953 | ENDI | |
954 | ||
955 | #endif /* } */ | |
956 | ||
957 | ||
958 | ||
959 | ||
960 | ||
961 | /* | |
962 | * Instruction: sparcv9_xnor_cc_rrr_rd0 | |
963 | */ | |
964 | ||
965 | #if !defined(HAS_NATIVE_sparcv9_xnor_cc_rrr_rd0) /* { */ | |
966 | ||
967 | IMPL(xnor_cc_rrr_rd0) | |
968 | LOGIC_OP(/*nada*/, ~(Rsrc1 ^ Rsrc2) ); | |
969 | ENDI | |
970 | ||
971 | #endif /* } */ | |
972 | ||
973 | ||
974 | ||
975 | ||
976 | ||
977 | /* | |
978 | * Instruction: sparcv9_bne_icc | |
979 | */ | |
980 | ||
981 | #if !defined(HAS_NATIVE_sparcv9_bne_icc) /* { */ | |
982 | ||
983 | IMPL(bne_icc) | |
984 | if (!(sp->v9_ccr & V9_icc_z_mask)) { | |
985 | tvaddr_t tpc = Rpc + SBRoffset32; | |
986 | Rpc = Rnpc; | |
987 | Rnpc = tpc; | |
988 | return; | |
989 | } | |
990 | ENDI | |
991 | ||
992 | #endif /* } */ | |
993 | ||
994 | ||
995 | ||
996 | ||
997 | ||
998 | /* | |
999 | * Instruction: sparcv9_be_icc | |
1000 | */ | |
1001 | ||
1002 | #if !defined(HAS_NATIVE_sparcv9_be_icc) /* { */ | |
1003 | ||
1004 | IMPL(be_icc) | |
1005 | if (sp->v9_ccr & V9_icc_z_mask) { | |
1006 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1007 | Rpc = Rnpc; | |
1008 | Rnpc = tpc; | |
1009 | return; | |
1010 | } | |
1011 | ENDI | |
1012 | ||
1013 | #endif /* } */ | |
1014 | ||
1015 | ||
1016 | ||
1017 | ||
1018 | ||
1019 | /* | |
1020 | * Instruction: sparcv9_bg_icc | |
1021 | */ | |
1022 | ||
1023 | #if !defined(HAS_NATIVE_sparcv9_bg_icc) /* { */ | |
1024 | ||
1025 | IMPL(bg_icc) | |
1026 | int cc = V9_ext_icc(sp->v9_ccr); | |
1027 | if ( (sparcv9_cc_magic[cond_g] >> cc) &1 ) { | |
1028 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1029 | Rpc = Rnpc; | |
1030 | Rnpc = tpc; | |
1031 | return; | |
1032 | } | |
1033 | ENDI | |
1034 | ||
1035 | #endif /* } */ | |
1036 | ||
1037 | ||
1038 | ||
1039 | ||
1040 | ||
1041 | /* | |
1042 | * Instruction: sparcv9_ble_icc | |
1043 | */ | |
1044 | ||
1045 | #if !defined(HAS_NATIVE_sparcv9_ble_icc) /* { */ | |
1046 | ||
1047 | IMPL(ble_icc) | |
1048 | int cc = V9_ext_icc(sp->v9_ccr); | |
1049 | if ( (sparcv9_cc_magic[cond_le] >> cc) &1 ) { | |
1050 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1051 | Rpc = Rnpc; | |
1052 | Rnpc = tpc; | |
1053 | return; | |
1054 | } | |
1055 | ENDI | |
1056 | ||
1057 | #endif /* } */ | |
1058 | ||
1059 | ||
1060 | ||
1061 | ||
1062 | ||
1063 | /* | |
1064 | * Instruction: sparcv9_bge_icc | |
1065 | */ | |
1066 | ||
1067 | #if !defined(HAS_NATIVE_sparcv9_bge_icc) /* { */ | |
1068 | ||
1069 | IMPL(bge_icc) | |
1070 | int cc = V9_ext_icc(sp->v9_ccr); | |
1071 | if ( (sparcv9_cc_magic[cond_ge] >> cc) &1 ) { | |
1072 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1073 | Rpc = Rnpc; | |
1074 | Rnpc = tpc; | |
1075 | return; | |
1076 | } | |
1077 | ENDI | |
1078 | ||
1079 | #endif /* } */ | |
1080 | ||
1081 | ||
1082 | ||
1083 | ||
1084 | ||
1085 | /* | |
1086 | * Instruction: sparcv9_bl_icc | |
1087 | */ | |
1088 | ||
1089 | #if !defined(HAS_NATIVE_sparcv9_bl_icc) /* { */ | |
1090 | ||
1091 | IMPL(bl_icc) | |
1092 | int cc = V9_ext_icc(sp->v9_ccr); | |
1093 | if ( (sparcv9_cc_magic[cond_l] >> cc) &1 ) { | |
1094 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1095 | Rpc = Rnpc; | |
1096 | Rnpc = tpc; | |
1097 | return; | |
1098 | } | |
1099 | ENDI | |
1100 | ||
1101 | #endif /* } */ | |
1102 | ||
1103 | ||
1104 | ||
1105 | ||
1106 | ||
1107 | /* | |
1108 | * Instruction: sparcv9_bgu_icc | |
1109 | */ | |
1110 | ||
1111 | #if !defined(HAS_NATIVE_sparcv9_bgu_icc) /* { */ | |
1112 | ||
1113 | IMPL(bgu_icc) | |
1114 | int cc = V9_ext_icc(sp->v9_ccr); | |
1115 | if ( (sparcv9_cc_magic[cond_gu] >> cc) &1 ) { | |
1116 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1117 | Rpc = Rnpc; | |
1118 | Rnpc = tpc; | |
1119 | return; | |
1120 | } | |
1121 | ENDI | |
1122 | ||
1123 | #endif /* } */ | |
1124 | ||
1125 | ||
1126 | ||
1127 | ||
1128 | ||
1129 | /* | |
1130 | * Instruction: sparcv9_bleu_icc | |
1131 | */ | |
1132 | ||
1133 | #if !defined(HAS_NATIVE_sparcv9_bleu_icc) /* { */ | |
1134 | ||
1135 | IMPL(bleu_icc) | |
1136 | int cc = V9_ext_icc(sp->v9_ccr); | |
1137 | if ( (sparcv9_cc_magic[cond_leu] >> cc) &1 ) { | |
1138 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1139 | Rpc = Rnpc; | |
1140 | Rnpc = tpc; | |
1141 | return; | |
1142 | } | |
1143 | ENDI | |
1144 | ||
1145 | #endif /* } */ | |
1146 | ||
1147 | ||
1148 | ||
1149 | ||
1150 | ||
1151 | /* | |
1152 | * Instruction: sparcv9_bcc_icc | |
1153 | */ | |
1154 | ||
1155 | #if !defined(HAS_NATIVE_sparcv9_bcc_icc) /* { */ | |
1156 | ||
1157 | IMPL(bcc_icc) | |
1158 | if ( !(sp->v9_ccr & V9_icc_c_mask) ) { | |
1159 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1160 | Rpc = Rnpc; | |
1161 | Rnpc = tpc; | |
1162 | return; | |
1163 | } | |
1164 | ENDI | |
1165 | ||
1166 | #endif /* } */ | |
1167 | ||
1168 | ||
1169 | ||
1170 | ||
1171 | ||
1172 | /* | |
1173 | * Instruction: sparcv9_bcs_icc | |
1174 | */ | |
1175 | ||
1176 | #if !defined(HAS_NATIVE_sparcv9_bcs_icc) /* { */ | |
1177 | ||
1178 | IMPL(bcs_icc) | |
1179 | if ( sp->v9_ccr & V9_icc_c_mask ) { | |
1180 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1181 | Rpc = Rnpc; | |
1182 | Rnpc = tpc; | |
1183 | return; | |
1184 | } | |
1185 | ENDI | |
1186 | ||
1187 | #endif /* } */ | |
1188 | ||
1189 | ||
1190 | ||
1191 | ||
1192 | ||
1193 | /* | |
1194 | * Instruction: sparcv9_bpos_icc | |
1195 | */ | |
1196 | ||
1197 | #if !defined(HAS_NATIVE_sparcv9_bpos_icc) /* { */ | |
1198 | ||
1199 | IMPL(bpos_icc) | |
1200 | if ( !(sp->v9_ccr & V9_icc_n_mask) ) { | |
1201 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1202 | Rpc = Rnpc; | |
1203 | Rnpc = tpc; | |
1204 | return; | |
1205 | } | |
1206 | ENDI | |
1207 | ||
1208 | #endif /* } */ | |
1209 | ||
1210 | ||
1211 | ||
1212 | ||
1213 | ||
1214 | /* | |
1215 | * Instruction: sparcv9_bneg_icc | |
1216 | */ | |
1217 | ||
1218 | #if !defined(HAS_NATIVE_sparcv9_bneg_icc) /* { */ | |
1219 | ||
1220 | IMPL(bneg_icc) | |
1221 | if ( sp->v9_ccr & V9_icc_n_mask ) { | |
1222 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1223 | Rpc = Rnpc; | |
1224 | Rnpc = tpc; | |
1225 | return; | |
1226 | } | |
1227 | ENDI | |
1228 | ||
1229 | #endif /* } */ | |
1230 | ||
1231 | ||
1232 | ||
1233 | ||
1234 | ||
1235 | /* | |
1236 | * Instruction: sparcv9_bvc_icc | |
1237 | */ | |
1238 | ||
1239 | #if !defined(HAS_NATIVE_sparcv9_bvc_icc) /* { */ | |
1240 | ||
1241 | IMPL(bvc_icc) | |
1242 | if ( !(sp->v9_ccr & V9_icc_v_mask) ) { | |
1243 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1244 | Rpc = Rnpc; | |
1245 | Rnpc = tpc; | |
1246 | return; | |
1247 | } | |
1248 | ENDI | |
1249 | ||
1250 | #endif /* } */ | |
1251 | ||
1252 | ||
1253 | ||
1254 | ||
1255 | ||
1256 | /* | |
1257 | * Instruction: sparcv9_bvs_icc | |
1258 | */ | |
1259 | ||
1260 | #if !defined(HAS_NATIVE_sparcv9_bvs_icc) /* { */ | |
1261 | ||
1262 | IMPL(bvs_icc) | |
1263 | if ( sp->v9_ccr & V9_icc_v_mask ) { | |
1264 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1265 | Rpc = Rnpc; | |
1266 | Rnpc = tpc; | |
1267 | return; | |
1268 | } | |
1269 | ENDI | |
1270 | ||
1271 | #endif /* } */ | |
1272 | ||
1273 | ||
1274 | ||
1275 | /* | |
1276 | * Instruction: sparcv9_bne_xcc | |
1277 | */ | |
1278 | ||
1279 | #if !defined(HAS_NATIVE_sparcv9_bne_xcc) /* { */ | |
1280 | ||
1281 | IMPL(bne_xcc) | |
1282 | if (!(sp->v9_ccr & V9_xcc_z_mask)) { | |
1283 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1284 | Rpc = Rnpc; | |
1285 | Rnpc = tpc; | |
1286 | return; | |
1287 | } | |
1288 | ENDI | |
1289 | ||
1290 | #endif /* } */ | |
1291 | ||
1292 | ||
1293 | ||
1294 | ||
1295 | ||
1296 | /* | |
1297 | * Instruction: sparcv9_be_xcc | |
1298 | */ | |
1299 | ||
1300 | #if !defined(HAS_NATIVE_sparcv9_be_xcc) /* { */ | |
1301 | ||
1302 | IMPL(be_xcc) | |
1303 | if (sp->v9_ccr & V9_xcc_z_mask) { | |
1304 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1305 | Rpc = Rnpc; | |
1306 | Rnpc = tpc; | |
1307 | return; | |
1308 | } | |
1309 | ENDI | |
1310 | ||
1311 | #endif /* } */ | |
1312 | ||
1313 | ||
1314 | ||
1315 | ||
1316 | ||
1317 | /* | |
1318 | * Instruction: sparcv9_bg_xcc | |
1319 | */ | |
1320 | ||
1321 | #if !defined(HAS_NATIVE_sparcv9_bg_xcc) /* { */ | |
1322 | ||
1323 | IMPL(bg_xcc) | |
1324 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1325 | if ( (sparcv9_cc_magic[cond_g] >> cc) &1 ) { | |
1326 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1327 | Rpc = Rnpc; | |
1328 | Rnpc = tpc; | |
1329 | return; | |
1330 | } | |
1331 | ENDI | |
1332 | ||
1333 | #endif /* } */ | |
1334 | ||
1335 | ||
1336 | ||
1337 | ||
1338 | ||
1339 | /* | |
1340 | * Instruction: sparcv9_ble_xcc | |
1341 | */ | |
1342 | ||
1343 | #if !defined(HAS_NATIVE_sparcv9_ble_xcc) /* { */ | |
1344 | ||
1345 | IMPL(ble_xcc) | |
1346 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1347 | if ( (sparcv9_cc_magic[cond_le] >> cc) &1 ) { | |
1348 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1349 | Rpc = Rnpc; | |
1350 | Rnpc = tpc; | |
1351 | return; | |
1352 | } | |
1353 | ENDI | |
1354 | ||
1355 | #endif /* } */ | |
1356 | ||
1357 | ||
1358 | ||
1359 | ||
1360 | ||
1361 | /* | |
1362 | * Instruction: sparcv9_bge_xcc | |
1363 | */ | |
1364 | ||
1365 | #if !defined(HAS_NATIVE_sparcv9_bge_xcc) /* { */ | |
1366 | ||
1367 | IMPL(bge_xcc) | |
1368 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1369 | if ( (sparcv9_cc_magic[cond_ge] >> cc) &1 ) { | |
1370 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1371 | Rpc = Rnpc; | |
1372 | Rnpc = tpc; | |
1373 | return; | |
1374 | } | |
1375 | ENDI | |
1376 | ||
1377 | #endif /* } */ | |
1378 | ||
1379 | ||
1380 | ||
1381 | ||
1382 | ||
1383 | /* | |
1384 | * Instruction: sparcv9_bl_xcc | |
1385 | */ | |
1386 | ||
1387 | #if !defined(HAS_NATIVE_sparcv9_bl_xcc) /* { */ | |
1388 | ||
1389 | IMPL(bl_xcc) | |
1390 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1391 | if ( (sparcv9_cc_magic[cond_l] >> cc) &1 ) { | |
1392 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1393 | Rpc = Rnpc; | |
1394 | Rnpc = tpc; | |
1395 | return; | |
1396 | } | |
1397 | ENDI | |
1398 | ||
1399 | #endif /* } */ | |
1400 | ||
1401 | ||
1402 | ||
1403 | ||
1404 | ||
1405 | /* | |
1406 | * Instruction: sparcv9_bgu_xcc | |
1407 | */ | |
1408 | ||
1409 | #if !defined(HAS_NATIVE_sparcv9_bgu_xcc) /* { */ | |
1410 | ||
1411 | IMPL(bgu_xcc) | |
1412 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1413 | if ( (sparcv9_cc_magic[cond_gu] >> cc) &1 ) { | |
1414 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1415 | Rpc = Rnpc; | |
1416 | Rnpc = tpc; | |
1417 | return; | |
1418 | } | |
1419 | ENDI | |
1420 | ||
1421 | #endif /* } */ | |
1422 | ||
1423 | ||
1424 | ||
1425 | ||
1426 | ||
1427 | /* | |
1428 | * Instruction: sparcv9_bleu_xcc | |
1429 | */ | |
1430 | ||
1431 | #if !defined(HAS_NATIVE_sparcv9_bleu_xcc) /* { */ | |
1432 | ||
1433 | IMPL(bleu_xcc) | |
1434 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1435 | if ( (sparcv9_cc_magic[cond_leu] >> cc) &1 ) { | |
1436 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1437 | Rpc = Rnpc; | |
1438 | Rnpc = tpc; | |
1439 | return; | |
1440 | } | |
1441 | ENDI | |
1442 | ||
1443 | #endif /* } */ | |
1444 | ||
1445 | ||
1446 | ||
1447 | ||
1448 | ||
1449 | /* | |
1450 | * Instruction: sparcv9_bcc_xcc | |
1451 | */ | |
1452 | ||
1453 | #if !defined(HAS_NATIVE_sparcv9_bcc_xcc) /* { */ | |
1454 | ||
1455 | IMPL(bcc_xcc) | |
1456 | if ( !(sp->v9_ccr & V9_xcc_c_mask) ) { | |
1457 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1458 | Rpc = Rnpc; | |
1459 | Rnpc = tpc; | |
1460 | return; | |
1461 | } | |
1462 | ENDI | |
1463 | ||
1464 | #endif /* } */ | |
1465 | ||
1466 | ||
1467 | ||
1468 | ||
1469 | ||
1470 | /* | |
1471 | * Instruction: sparcv9_bcs_xcc | |
1472 | */ | |
1473 | ||
1474 | #if !defined(HAS_NATIVE_sparcv9_bcs_xcc) /* { */ | |
1475 | ||
1476 | IMPL(bcs_xcc) | |
1477 | if ( sp->v9_ccr & V9_xcc_c_mask ) { | |
1478 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1479 | Rpc = Rnpc; | |
1480 | Rnpc = tpc; | |
1481 | return; | |
1482 | } | |
1483 | ENDI | |
1484 | ||
1485 | #endif /* } */ | |
1486 | ||
1487 | ||
1488 | ||
1489 | ||
1490 | ||
1491 | /* | |
1492 | * Instruction: sparcv9_bpos_xcc | |
1493 | */ | |
1494 | ||
1495 | #if !defined(HAS_NATIVE_sparcv9_bpos_xcc) /* { */ | |
1496 | ||
1497 | IMPL(bpos_xcc) | |
1498 | if ( !(sp->v9_ccr & V9_xcc_n_mask) ) { | |
1499 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1500 | Rpc = Rnpc; | |
1501 | Rnpc = tpc; | |
1502 | return; | |
1503 | } | |
1504 | ENDI | |
1505 | ||
1506 | #endif /* } */ | |
1507 | ||
1508 | ||
1509 | ||
1510 | ||
1511 | ||
1512 | /* | |
1513 | * Instruction: sparcv9_bneg_xcc | |
1514 | */ | |
1515 | ||
1516 | #if !defined(HAS_NATIVE_sparcv9_bneg_xcc) /* { */ | |
1517 | ||
1518 | IMPL(bneg_xcc) | |
1519 | if ( sp->v9_ccr & V9_xcc_n_mask ) { | |
1520 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1521 | Rpc = Rnpc; | |
1522 | Rnpc = tpc; | |
1523 | return; | |
1524 | } | |
1525 | ENDI | |
1526 | ||
1527 | #endif /* } */ | |
1528 | ||
1529 | ||
1530 | ||
1531 | ||
1532 | ||
1533 | /* | |
1534 | * Instruction: sparcv9_bvc_xcc | |
1535 | */ | |
1536 | ||
1537 | #if !defined(HAS_NATIVE_sparcv9_bvc_xcc) /* { */ | |
1538 | ||
1539 | IMPL(bvc_xcc) | |
1540 | if ( !(sp->v9_ccr & V9_xcc_v_mask) ) { | |
1541 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1542 | Rpc = Rnpc; | |
1543 | Rnpc = tpc; | |
1544 | return; | |
1545 | } | |
1546 | ENDI | |
1547 | ||
1548 | #endif /* } */ | |
1549 | ||
1550 | ||
1551 | ||
1552 | ||
1553 | ||
1554 | /* | |
1555 | * Instruction: sparcv9_bvs_xcc | |
1556 | */ | |
1557 | ||
1558 | #if !defined(HAS_NATIVE_sparcv9_bvs_xcc) /* { */ | |
1559 | ||
1560 | IMPL(bvs_xcc) | |
1561 | if ( sp->v9_ccr & V9_xcc_v_mask ) { | |
1562 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1563 | Rpc = Rnpc; | |
1564 | Rnpc = tpc; | |
1565 | return; | |
1566 | } | |
1567 | ENDI | |
1568 | ||
1569 | #endif /* } */ | |
1570 | ||
1571 | ||
1572 | ||
1573 | ||
1574 | ||
1575 | ||
1576 | ||
1577 | ||
1578 | /* | |
1579 | * Instruction: sparcv9_bne_icc_an | |
1580 | */ | |
1581 | ||
1582 | #if !defined(HAS_NATIVE_sparcv9_bne_icc_an) /* { */ | |
1583 | ||
1584 | IMPL(bne_icc_an) | |
1585 | if (!(sp->v9_ccr & V9_icc_z_mask)) { | |
1586 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1587 | Rpc = Rnpc; | |
1588 | Rnpc = tpc; | |
1589 | return; | |
1590 | } | |
1591 | Rpc = Rnpc + 4; | |
1592 | Rnpc = Rnpc + 8; | |
1593 | ENDDEF | |
1594 | ||
1595 | #endif /* } */ | |
1596 | ||
1597 | ||
1598 | ||
1599 | ||
1600 | ||
1601 | /* | |
1602 | * Instruction: sparcv9_be_icc_an | |
1603 | */ | |
1604 | ||
1605 | #if !defined(HAS_NATIVE_sparcv9_be_icc_an) /* { */ | |
1606 | ||
1607 | IMPL(be_icc_an) | |
1608 | if (sp->v9_ccr & V9_icc_z_mask) { | |
1609 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1610 | Rpc = Rnpc; | |
1611 | Rnpc = tpc; | |
1612 | return; | |
1613 | } | |
1614 | Rpc = Rnpc + 4; | |
1615 | Rnpc = Rnpc + 8; | |
1616 | ENDDEF | |
1617 | ||
1618 | #endif /* } */ | |
1619 | ||
1620 | ||
1621 | ||
1622 | ||
1623 | ||
1624 | /* | |
1625 | * Instruction: sparcv9_bg_icc_an | |
1626 | */ | |
1627 | ||
1628 | #if !defined(HAS_NATIVE_sparcv9_bg_icc_an) /* { */ | |
1629 | ||
1630 | IMPL(bg_icc_an) | |
1631 | int cc = V9_ext_icc(sp->v9_ccr); | |
1632 | if ( (sparcv9_cc_magic[cond_g] >> cc) &1 ) { | |
1633 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1634 | Rpc = Rnpc; | |
1635 | Rnpc = tpc; | |
1636 | return; | |
1637 | } | |
1638 | Rpc = Rnpc + 4; | |
1639 | Rnpc = Rnpc + 8; | |
1640 | ENDDEF | |
1641 | ||
1642 | #endif /* } */ | |
1643 | ||
1644 | ||
1645 | ||
1646 | ||
1647 | ||
1648 | /* | |
1649 | * Instruction: sparcv9_ble_icc_an | |
1650 | */ | |
1651 | ||
1652 | #if !defined(HAS_NATIVE_sparcv9_ble_icc_an) /* { */ | |
1653 | ||
1654 | IMPL(ble_icc_an) | |
1655 | int cc = V9_ext_icc(sp->v9_ccr); | |
1656 | if ( (sparcv9_cc_magic[cond_le] >> cc) &1 ) { | |
1657 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1658 | Rpc = Rnpc; | |
1659 | Rnpc = tpc; | |
1660 | return; | |
1661 | } | |
1662 | Rpc = Rnpc + 4; | |
1663 | Rnpc = Rnpc + 8; | |
1664 | ENDDEF | |
1665 | ||
1666 | #endif /* } */ | |
1667 | ||
1668 | ||
1669 | ||
1670 | ||
1671 | ||
1672 | /* | |
1673 | * Instruction: sparcv9_bge_icc_an | |
1674 | */ | |
1675 | ||
1676 | #if !defined(HAS_NATIVE_sparcv9_bge_icc_an) /* { */ | |
1677 | ||
1678 | IMPL(bge_icc_an) | |
1679 | int cc = V9_ext_icc(sp->v9_ccr); | |
1680 | if ( (sparcv9_cc_magic[cond_ge] >> cc) &1 ) { | |
1681 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1682 | Rpc = Rnpc; | |
1683 | Rnpc = tpc; | |
1684 | return; | |
1685 | } | |
1686 | Rpc = Rnpc + 4; | |
1687 | Rnpc = Rnpc + 8; | |
1688 | ENDDEF | |
1689 | ||
1690 | #endif /* } */ | |
1691 | ||
1692 | ||
1693 | ||
1694 | ||
1695 | ||
1696 | /* | |
1697 | * Instruction: sparcv9_bl_icc_an | |
1698 | */ | |
1699 | ||
1700 | #if !defined(HAS_NATIVE_sparcv9_bl_icc_an) /* { */ | |
1701 | ||
1702 | IMPL(bl_icc_an) | |
1703 | int cc = V9_ext_icc(sp->v9_ccr); | |
1704 | if ( (sparcv9_cc_magic[cond_l] >> cc) &1 ) { | |
1705 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1706 | Rpc = Rnpc; | |
1707 | Rnpc = tpc; | |
1708 | return; | |
1709 | } | |
1710 | Rpc = Rnpc + 4; | |
1711 | Rnpc = Rnpc + 8; | |
1712 | ENDDEF | |
1713 | ||
1714 | #endif /* } */ | |
1715 | ||
1716 | ||
1717 | ||
1718 | ||
1719 | ||
1720 | /* | |
1721 | * Instruction: sparcv9_bgu_icc_an | |
1722 | */ | |
1723 | ||
1724 | #if !defined(HAS_NATIVE_sparcv9_bgu_icc_an) /* { */ | |
1725 | ||
1726 | IMPL(bgu_icc_an) | |
1727 | int cc = V9_ext_icc(sp->v9_ccr); | |
1728 | if ( (sparcv9_cc_magic[cond_gu] >> cc) &1 ) { | |
1729 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1730 | Rpc = Rnpc; | |
1731 | Rnpc = tpc; | |
1732 | return; | |
1733 | } | |
1734 | Rpc = Rnpc + 4; | |
1735 | Rnpc = Rnpc + 8; | |
1736 | ENDDEF | |
1737 | ||
1738 | #endif /* } */ | |
1739 | ||
1740 | ||
1741 | ||
1742 | ||
1743 | ||
1744 | /* | |
1745 | * Instruction: sparcv9_bleu_icc_an | |
1746 | */ | |
1747 | ||
1748 | #if !defined(HAS_NATIVE_sparcv9_bleu_icc_an) /* { */ | |
1749 | ||
1750 | IMPL(bleu_icc_an) | |
1751 | int cc = V9_ext_icc(sp->v9_ccr); | |
1752 | if ( (sparcv9_cc_magic[cond_leu] >> cc) &1 ) { | |
1753 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1754 | Rpc = Rnpc; | |
1755 | Rnpc = tpc; | |
1756 | return; | |
1757 | } | |
1758 | Rpc = Rnpc + 4; | |
1759 | Rnpc = Rnpc + 8; | |
1760 | ENDDEF | |
1761 | ||
1762 | #endif /* } */ | |
1763 | ||
1764 | ||
1765 | ||
1766 | ||
1767 | ||
1768 | /* | |
1769 | * Instruction: sparcv9_bcc_icc_an | |
1770 | */ | |
1771 | ||
1772 | #if !defined(HAS_NATIVE_sparcv9_bcc_icc_an) /* { */ | |
1773 | ||
1774 | IMPL(bcc_icc_an) | |
1775 | if ( !(sp->v9_ccr & V9_icc_c_mask) ) { | |
1776 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1777 | Rpc = Rnpc; | |
1778 | Rnpc = tpc; | |
1779 | return; | |
1780 | } | |
1781 | Rpc = Rnpc + 4; | |
1782 | Rnpc = Rnpc + 8; | |
1783 | ENDDEF | |
1784 | ||
1785 | #endif /* } */ | |
1786 | ||
1787 | ||
1788 | ||
1789 | ||
1790 | ||
1791 | /* | |
1792 | * Instruction: sparcv9_bcs_icc_an | |
1793 | */ | |
1794 | ||
1795 | #if !defined(HAS_NATIVE_sparcv9_bcs_icc_an) /* { */ | |
1796 | ||
1797 | IMPL(bcs_icc_an) | |
1798 | if ( sp->v9_ccr & V9_icc_c_mask ) { | |
1799 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1800 | Rpc = Rnpc; | |
1801 | Rnpc = tpc; | |
1802 | return; | |
1803 | } | |
1804 | Rpc = Rnpc + 4; | |
1805 | Rnpc = Rnpc + 8; | |
1806 | ENDDEF | |
1807 | ||
1808 | #endif /* } */ | |
1809 | ||
1810 | ||
1811 | ||
1812 | ||
1813 | ||
1814 | /* | |
1815 | * Instruction: sparcv9_bpos_icc_an | |
1816 | */ | |
1817 | ||
1818 | #if !defined(HAS_NATIVE_sparcv9_bpos_icc_an) /* { */ | |
1819 | ||
1820 | IMPL(bpos_icc_an) | |
1821 | if ( !(sp->v9_ccr & V9_icc_n_mask) ) { | |
1822 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1823 | Rpc = Rnpc; | |
1824 | Rnpc = tpc; | |
1825 | return; | |
1826 | } | |
1827 | Rpc = Rnpc + 4; | |
1828 | Rnpc = Rnpc + 8; | |
1829 | ENDDEF | |
1830 | ||
1831 | #endif /* } */ | |
1832 | ||
1833 | ||
1834 | ||
1835 | ||
1836 | ||
1837 | /* | |
1838 | * Instruction: sparcv9_bneg_icc_an | |
1839 | */ | |
1840 | ||
1841 | #if !defined(HAS_NATIVE_sparcv9_bneg_icc_an) /* { */ | |
1842 | ||
1843 | IMPL(bneg_icc_an) | |
1844 | if ( sp->v9_ccr & V9_icc_n_mask ) { | |
1845 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1846 | Rpc = Rnpc; | |
1847 | Rnpc = tpc; | |
1848 | return; | |
1849 | } | |
1850 | Rpc = Rnpc + 4; | |
1851 | Rnpc = Rnpc + 8; | |
1852 | ENDDEF | |
1853 | ||
1854 | #endif /* } */ | |
1855 | ||
1856 | ||
1857 | ||
1858 | ||
1859 | ||
1860 | /* | |
1861 | * Instruction: sparcv9_bvc_icc_an | |
1862 | */ | |
1863 | ||
1864 | #if !defined(HAS_NATIVE_sparcv9_bvc_icc_an) /* { */ | |
1865 | ||
1866 | IMPL(bvc_icc_an) | |
1867 | if ( !(sp->v9_ccr & V9_icc_v_mask) ) { | |
1868 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1869 | Rpc = Rnpc; | |
1870 | Rnpc = tpc; | |
1871 | return; | |
1872 | } | |
1873 | Rpc = Rnpc + 4; | |
1874 | Rnpc = Rnpc + 8; | |
1875 | ENDDEF | |
1876 | ||
1877 | #endif /* } */ | |
1878 | ||
1879 | ||
1880 | ||
1881 | ||
1882 | ||
1883 | /* | |
1884 | * Instruction: sparcv9_bvs_icc_an | |
1885 | */ | |
1886 | ||
1887 | #if !defined(HAS_NATIVE_sparcv9_bvs_icc_an) /* { */ | |
1888 | ||
1889 | IMPL(bvs_icc_an) | |
1890 | if ( sp->v9_ccr & V9_icc_v_mask ) { | |
1891 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1892 | Rpc = Rnpc; | |
1893 | Rnpc = tpc; | |
1894 | return; | |
1895 | } | |
1896 | Rpc = Rnpc + 4; | |
1897 | Rnpc = Rnpc + 8; | |
1898 | ENDDEF | |
1899 | ||
1900 | #endif /* } */ | |
1901 | ||
1902 | ||
1903 | ||
1904 | ||
1905 | ||
1906 | ||
1907 | /* | |
1908 | * Instruction: sparcv9_bne_xcc_an | |
1909 | */ | |
1910 | ||
1911 | #if !defined(HAS_NATIVE_sparcv9_bne_xcc_an) /* { */ | |
1912 | ||
1913 | IMPL(bne_xcc_an) | |
1914 | if (!(sp->v9_ccr & V9_xcc_z_mask)) { | |
1915 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1916 | Rpc = Rnpc; | |
1917 | Rnpc = tpc; | |
1918 | return; | |
1919 | } | |
1920 | Rpc = Rnpc + 4; | |
1921 | Rnpc = Rnpc + 8; | |
1922 | ENDDEF | |
1923 | ||
1924 | #endif /* } */ | |
1925 | ||
1926 | ||
1927 | ||
1928 | ||
1929 | ||
1930 | /* | |
1931 | * Instruction: sparcv9_be_xcc_an | |
1932 | */ | |
1933 | ||
1934 | #if !defined(HAS_NATIVE_sparcv9_be_xcc_an) /* { */ | |
1935 | ||
1936 | IMPL(be_xcc_an) | |
1937 | if (sp->v9_ccr & V9_xcc_z_mask) { | |
1938 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1939 | Rpc = Rnpc; | |
1940 | Rnpc = tpc; | |
1941 | return; | |
1942 | } | |
1943 | Rpc = Rnpc + 4; | |
1944 | Rnpc = Rnpc + 8; | |
1945 | ENDDEF | |
1946 | ||
1947 | #endif /* } */ | |
1948 | ||
1949 | ||
1950 | ||
1951 | ||
1952 | ||
1953 | /* | |
1954 | * Instruction: sparcv9_bg_xcc_an | |
1955 | */ | |
1956 | ||
1957 | #if !defined(HAS_NATIVE_sparcv9_bg_xcc_an) /* { */ | |
1958 | ||
1959 | IMPL(bg_xcc_an) | |
1960 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1961 | if ( (sparcv9_cc_magic[cond_g] >> cc) &1 ) { | |
1962 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1963 | Rpc = Rnpc; | |
1964 | Rnpc = tpc; | |
1965 | return; | |
1966 | } | |
1967 | Rpc = Rnpc + 4; | |
1968 | Rnpc = Rnpc + 8; | |
1969 | ENDDEF | |
1970 | ||
1971 | #endif /* } */ | |
1972 | ||
1973 | ||
1974 | ||
1975 | ||
1976 | ||
1977 | /* | |
1978 | * Instruction: sparcv9_ble_xcc_an | |
1979 | */ | |
1980 | ||
1981 | #if !defined(HAS_NATIVE_sparcv9_ble_xcc_an) /* { */ | |
1982 | ||
1983 | IMPL(ble_xcc_an) | |
1984 | int cc = V9_ext_xcc(sp->v9_ccr); | |
1985 | if ( (sparcv9_cc_magic[cond_le] >> cc) &1 ) { | |
1986 | tvaddr_t tpc = Rpc + SBRoffset32; | |
1987 | Rpc = Rnpc; | |
1988 | Rnpc = tpc; | |
1989 | return; | |
1990 | } | |
1991 | Rpc = Rnpc + 4; | |
1992 | Rnpc = Rnpc + 8; | |
1993 | ENDDEF | |
1994 | ||
1995 | #endif /* } */ | |
1996 | ||
1997 | ||
1998 | ||
1999 | ||
2000 | ||
2001 | /* | |
2002 | * Instruction: sparcv9_bge_xcc_an | |
2003 | */ | |
2004 | ||
2005 | #if !defined(HAS_NATIVE_sparcv9_bge_xcc_an) /* { */ | |
2006 | ||
2007 | IMPL(bge_xcc_an) | |
2008 | int cc = V9_ext_xcc(sp->v9_ccr); | |
2009 | if ( (sparcv9_cc_magic[cond_ge] >> cc) &1 ) { | |
2010 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2011 | Rpc = Rnpc; | |
2012 | Rnpc = tpc; | |
2013 | return; | |
2014 | } | |
2015 | Rpc = Rnpc + 4; | |
2016 | Rnpc = Rnpc + 8; | |
2017 | ENDDEF | |
2018 | ||
2019 | #endif /* } */ | |
2020 | ||
2021 | ||
2022 | ||
2023 | ||
2024 | ||
2025 | /* | |
2026 | * Instruction: sparcv9_bl_xcc_an | |
2027 | */ | |
2028 | ||
2029 | #if !defined(HAS_NATIVE_sparcv9_bl_xcc_an) /* { */ | |
2030 | ||
2031 | IMPL(bl_xcc_an) | |
2032 | int cc = V9_ext_xcc(sp->v9_ccr); | |
2033 | if ( (sparcv9_cc_magic[cond_l] >> cc) &1 ) { | |
2034 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2035 | Rpc = Rnpc; | |
2036 | Rnpc = tpc; | |
2037 | return; | |
2038 | } | |
2039 | Rpc = Rnpc + 4; | |
2040 | Rnpc = Rnpc + 8; | |
2041 | ENDDEF | |
2042 | ||
2043 | #endif /* } */ | |
2044 | ||
2045 | ||
2046 | ||
2047 | ||
2048 | ||
2049 | /* | |
2050 | * Instruction: sparcv9_bgu_xcc_an | |
2051 | */ | |
2052 | ||
2053 | #if !defined(HAS_NATIVE_sparcv9_bgu_xcc_an) /* { */ | |
2054 | ||
2055 | IMPL(bgu_xcc_an) | |
2056 | int cc = V9_ext_xcc(sp->v9_ccr); | |
2057 | if ( (sparcv9_cc_magic[cond_gu] >> cc) &1 ) { | |
2058 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2059 | Rpc = Rnpc; | |
2060 | Rnpc = tpc; | |
2061 | return; | |
2062 | } | |
2063 | Rpc = Rnpc + 4; | |
2064 | Rnpc = Rnpc + 8; | |
2065 | ENDDEF | |
2066 | ||
2067 | #endif /* } */ | |
2068 | ||
2069 | ||
2070 | ||
2071 | ||
2072 | ||
2073 | /* | |
2074 | * Instruction: sparcv9_bleu_xcc_an | |
2075 | */ | |
2076 | ||
2077 | #if !defined(HAS_NATIVE_sparcv9_bleu_xcc_an) /* { */ | |
2078 | ||
2079 | IMPL(bleu_xcc_an) | |
2080 | int cc = V9_ext_xcc(sp->v9_ccr); | |
2081 | if ( (sparcv9_cc_magic[cond_leu] >> cc) &1 ) { | |
2082 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2083 | Rpc = Rnpc; | |
2084 | Rnpc = tpc; | |
2085 | return; | |
2086 | } | |
2087 | Rpc = Rnpc + 4; | |
2088 | Rnpc = Rnpc + 8; | |
2089 | ENDDEF | |
2090 | ||
2091 | #endif /* } */ | |
2092 | ||
2093 | ||
2094 | ||
2095 | ||
2096 | ||
2097 | /* | |
2098 | * Instruction: sparcv9_bcc_xcc_an | |
2099 | */ | |
2100 | ||
2101 | #if !defined(HAS_NATIVE_sparcv9_bcc_xcc_an) /* { */ | |
2102 | ||
2103 | IMPL(bcc_xcc_an) | |
2104 | if ( !(sp->v9_ccr & V9_xcc_c_mask) ) { | |
2105 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2106 | Rpc = Rnpc; | |
2107 | Rnpc = tpc; | |
2108 | return; | |
2109 | } | |
2110 | Rpc = Rnpc + 4; | |
2111 | Rnpc = Rnpc + 8; | |
2112 | ENDDEF | |
2113 | ||
2114 | #endif /* } */ | |
2115 | ||
2116 | ||
2117 | ||
2118 | ||
2119 | ||
2120 | /* | |
2121 | * Instruction: sparcv9_bcs_xcc_an | |
2122 | */ | |
2123 | ||
2124 | #if !defined(HAS_NATIVE_sparcv9_bcs_xcc_an) /* { */ | |
2125 | ||
2126 | IMPL(bcs_xcc_an) | |
2127 | if ( sp->v9_ccr & V9_xcc_c_mask ) { | |
2128 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2129 | Rpc = Rnpc; | |
2130 | Rnpc = tpc; | |
2131 | return; | |
2132 | } | |
2133 | Rpc = Rnpc + 4; | |
2134 | Rnpc = Rnpc + 8; | |
2135 | ENDDEF | |
2136 | ||
2137 | #endif /* } */ | |
2138 | ||
2139 | ||
2140 | ||
2141 | ||
2142 | ||
2143 | /* | |
2144 | * Instruction: sparcv9_bpos_xcc_an | |
2145 | */ | |
2146 | ||
2147 | #if !defined(HAS_NATIVE_sparcv9_bpos_xcc_an) /* { */ | |
2148 | ||
2149 | IMPL(bpos_xcc_an) | |
2150 | if ( !(sp->v9_ccr & V9_xcc_n_mask) ) { | |
2151 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2152 | Rpc = Rnpc; | |
2153 | Rnpc = tpc; | |
2154 | return; | |
2155 | } | |
2156 | Rpc = Rnpc + 4; | |
2157 | Rnpc = Rnpc + 8; | |
2158 | ENDDEF | |
2159 | ||
2160 | #endif /* } */ | |
2161 | ||
2162 | ||
2163 | ||
2164 | ||
2165 | ||
2166 | /* | |
2167 | * Instruction: sparcv9_bneg_xcc_an | |
2168 | */ | |
2169 | ||
2170 | #if !defined(HAS_NATIVE_sparcv9_bneg_xcc_an) /* { */ | |
2171 | ||
2172 | IMPL(bneg_xcc_an) | |
2173 | if ( sp->v9_ccr & V9_xcc_n_mask ) { | |
2174 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2175 | Rpc = Rnpc; | |
2176 | Rnpc = tpc; | |
2177 | return; | |
2178 | } | |
2179 | Rpc = Rnpc + 4; | |
2180 | Rnpc = Rnpc + 8; | |
2181 | ENDDEF | |
2182 | ||
2183 | #endif /* } */ | |
2184 | ||
2185 | ||
2186 | ||
2187 | ||
2188 | ||
2189 | /* | |
2190 | * Instruction: sparcv9_bvc_xcc_an | |
2191 | */ | |
2192 | ||
2193 | #if !defined(HAS_NATIVE_sparcv9_bvc_xcc_an) /* { */ | |
2194 | ||
2195 | IMPL(bvc_xcc_an) | |
2196 | if ( !(sp->v9_ccr & V9_xcc_v_mask) ) { | |
2197 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2198 | Rpc = Rnpc; | |
2199 | Rnpc = tpc; | |
2200 | return; | |
2201 | } | |
2202 | Rpc = Rnpc + 4; | |
2203 | Rnpc = Rnpc + 8; | |
2204 | ENDDEF | |
2205 | ||
2206 | #endif /* } */ | |
2207 | ||
2208 | ||
2209 | ||
2210 | ||
2211 | ||
2212 | /* | |
2213 | * Instruction: sparcv9_bvs_xcc_an | |
2214 | */ | |
2215 | ||
2216 | #if !defined(HAS_NATIVE_sparcv9_bvs_xcc_an) /* { */ | |
2217 | ||
2218 | IMPL(bvs_xcc_an) | |
2219 | if ( sp->v9_ccr & V9_xcc_v_mask ) { | |
2220 | tvaddr_t tpc = Rpc + SBRoffset32; | |
2221 | Rpc = Rnpc; | |
2222 | Rnpc = tpc; | |
2223 | return; | |
2224 | } | |
2225 | Rpc = Rnpc + 4; | |
2226 | Rnpc = Rnpc + 8; | |
2227 | ENDDEF | |
2228 | ||
2229 | #endif /* } */ | |
2230 | ||
2231 | ||
2232 | ||
2233 | ||
2234 | ||
2235 | ||
2236 | ||
2237 | ||
2238 | ||
2239 | ||
2240 | /* | |
2241 | * Instruction: sparcv9_brz | |
2242 | */ | |
2243 | ||
2244 | #if !defined(HAS_NATIVE_sparcv9_brz) /* { */ | |
2245 | ||
2246 | IMPL(brz) | |
2247 | if (0ULL == Rsrc1) { | |
2248 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2249 | Rpc = Rnpc; | |
2250 | Rnpc = tpc; | |
2251 | return; | |
2252 | } | |
2253 | ENDI | |
2254 | ||
2255 | #endif /* } */ | |
2256 | ||
2257 | ||
2258 | ||
2259 | ||
2260 | ||
2261 | /* | |
2262 | * Instruction: sparcv9_brlez | |
2263 | */ | |
2264 | ||
2265 | #if !defined(HAS_NATIVE_sparcv9_brlez) /* { */ | |
2266 | ||
2267 | IMPL(brlez) | |
2268 | if (SRsrc1 <= 0LL) { | |
2269 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2270 | Rpc = Rnpc; | |
2271 | Rnpc = tpc; | |
2272 | return; | |
2273 | } | |
2274 | ENDI | |
2275 | ||
2276 | #endif /* } */ | |
2277 | ||
2278 | ||
2279 | ||
2280 | ||
2281 | ||
2282 | /* | |
2283 | * Instruction: sparcv9_brlz | |
2284 | */ | |
2285 | ||
2286 | #if !defined(HAS_NATIVE_sparcv9_brlz) /* { */ | |
2287 | ||
2288 | IMPL(brlz) | |
2289 | if (SRsrc1 < 0LL) { | |
2290 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2291 | Rpc = Rnpc; | |
2292 | Rnpc = tpc; | |
2293 | return; | |
2294 | } | |
2295 | ENDI | |
2296 | ||
2297 | #endif /* } */ | |
2298 | ||
2299 | ||
2300 | ||
2301 | ||
2302 | ||
2303 | /* | |
2304 | * Instruction: sparcv9_brnz | |
2305 | */ | |
2306 | ||
2307 | #if !defined(HAS_NATIVE_sparcv9_brnz) /* { */ | |
2308 | ||
2309 | IMPL(brnz) | |
2310 | if (SRsrc1 != 0LL) { | |
2311 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2312 | Rpc = Rnpc; | |
2313 | Rnpc = tpc; | |
2314 | return; | |
2315 | } | |
2316 | ENDI | |
2317 | ||
2318 | #endif /* } */ | |
2319 | ||
2320 | ||
2321 | ||
2322 | ||
2323 | ||
2324 | /* | |
2325 | * Instruction: sparcv9_brgz | |
2326 | */ | |
2327 | ||
2328 | #if !defined(HAS_NATIVE_sparcv9_brgz) /* { */ | |
2329 | ||
2330 | IMPL(brgz) | |
2331 | if (SRsrc1 > 0LL) { | |
2332 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2333 | Rpc = Rnpc; | |
2334 | Rnpc = tpc; | |
2335 | return; | |
2336 | } | |
2337 | ENDI | |
2338 | ||
2339 | #endif /* } */ | |
2340 | ||
2341 | ||
2342 | ||
2343 | ||
2344 | ||
2345 | /* | |
2346 | * Instruction: sparcv9_brgez | |
2347 | */ | |
2348 | ||
2349 | #if !defined(HAS_NATIVE_sparcv9_brgez) /* { */ | |
2350 | ||
2351 | IMPL(brgez) | |
2352 | if (SRsrc1 >= 0LL) { | |
2353 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2354 | Rpc = Rnpc; | |
2355 | Rnpc = tpc; | |
2356 | return; | |
2357 | } | |
2358 | ENDI | |
2359 | ||
2360 | #endif /* } */ | |
2361 | ||
2362 | ||
2363 | ||
2364 | ||
2365 | ||
2366 | /* | |
2367 | * Instruction: sparcv9_brz_an | |
2368 | */ | |
2369 | ||
2370 | #if !defined(HAS_NATIVE_sparcv9_brz_an) /* { */ | |
2371 | ||
2372 | IMPL(brz_an) | |
2373 | if ( Rsrc1 == 0LL ) { | |
2374 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2375 | Rpc = Rnpc; | |
2376 | Rnpc = tpc; | |
2377 | return; | |
2378 | } | |
2379 | Rpc = Rnpc + 4; | |
2380 | Rnpc = Rnpc + 8; | |
2381 | ENDDEF | |
2382 | ||
2383 | #endif /* } */ | |
2384 | ||
2385 | ||
2386 | ||
2387 | ||
2388 | ||
2389 | /* | |
2390 | * Instruction: sparcv9_brlez_an | |
2391 | */ | |
2392 | ||
2393 | #if !defined(HAS_NATIVE_sparcv9_brlez_an) /* { */ | |
2394 | ||
2395 | IMPL(brlez_an) | |
2396 | if ( SRsrc1 <= 0LL ) { | |
2397 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2398 | Rpc = Rnpc; | |
2399 | Rnpc = tpc; | |
2400 | return; | |
2401 | } | |
2402 | Rpc = Rnpc + 4; | |
2403 | Rnpc = Rnpc + 8; | |
2404 | ENDDEF | |
2405 | ||
2406 | #endif /* } */ | |
2407 | ||
2408 | ||
2409 | ||
2410 | ||
2411 | ||
2412 | /* | |
2413 | * Instruction: sparcv9_brlz_an | |
2414 | */ | |
2415 | ||
2416 | #if !defined(HAS_NATIVE_sparcv9_brlz_an) /* { */ | |
2417 | ||
2418 | IMPL(brlz_an) | |
2419 | if ( SRsrc1 < 0LL ) { | |
2420 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2421 | Rpc = Rnpc; | |
2422 | Rnpc = tpc; | |
2423 | return; | |
2424 | } | |
2425 | Rpc = Rnpc + 4; | |
2426 | Rnpc = Rnpc + 8; | |
2427 | ENDDEF | |
2428 | ||
2429 | #endif /* } */ | |
2430 | ||
2431 | ||
2432 | ||
2433 | ||
2434 | ||
2435 | /* | |
2436 | * Instruction: sparcv9_brnz_an | |
2437 | */ | |
2438 | ||
2439 | #if !defined(HAS_NATIVE_sparcv9_brnz_an) /* { */ | |
2440 | ||
2441 | IMPL(brnz_an) | |
2442 | if ( SRsrc1 != 0LL ) { | |
2443 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2444 | Rpc = Rnpc; | |
2445 | Rnpc = tpc; | |
2446 | return; | |
2447 | } | |
2448 | Rpc = Rnpc + 4; | |
2449 | Rnpc = Rnpc + 8; | |
2450 | ENDDEF | |
2451 | ||
2452 | #endif /* } */ | |
2453 | ||
2454 | ||
2455 | ||
2456 | ||
2457 | ||
2458 | /* | |
2459 | * Instruction: sparcv9_brgz_an | |
2460 | */ | |
2461 | ||
2462 | #if !defined(HAS_NATIVE_sparcv9_brgz_an) /* { */ | |
2463 | ||
2464 | IMPL(brgz_an) | |
2465 | if ( SRsrc1 > 0LL ) { | |
2466 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2467 | Rpc = Rnpc; | |
2468 | Rnpc = tpc; | |
2469 | return; | |
2470 | } | |
2471 | Rpc = Rnpc + 4; | |
2472 | Rnpc = Rnpc + 8; | |
2473 | ENDDEF | |
2474 | ||
2475 | #endif /* } */ | |
2476 | ||
2477 | ||
2478 | ||
2479 | ||
2480 | ||
2481 | /* | |
2482 | * Instruction: sparcv9_brgez_an | |
2483 | */ | |
2484 | ||
2485 | #if !defined(HAS_NATIVE_sparcv9_brgez_an) /* { */ | |
2486 | ||
2487 | IMPL(brgez_an) | |
2488 | if ( SRsrc1 >= 0LL ) { | |
2489 | tvaddr_t tpc = Rpc + SBRreg_off32; | |
2490 | Rpc = Rnpc; | |
2491 | Rnpc = tpc; | |
2492 | return; | |
2493 | } | |
2494 | Rpc = Rnpc + 4; | |
2495 | Rnpc = Rnpc + 8; | |
2496 | ENDDEF | |
2497 | ||
2498 | #endif /* } */ | |
2499 | ||
2500 | ||
2501 | ||
2502 | ||
2503 | ||
2504 | /* | |
2505 | * Instruction: sparcv9_fadds | |
2506 | */ | |
2507 | ||
2508 | #if !defined(HAS_NATIVE_fadds) /* { */ | |
2509 | ||
2510 | IMPL(fadds) | |
2511 | FP_EXEC_FPU_ON_CHECK; | |
2512 | FPU_NOT_IMPLEMENTED("fadds") | |
2513 | ENDDEF | |
2514 | ||
2515 | #endif /* } */ | |
2516 | ||
2517 | ||
2518 | ||
2519 | ||
2520 | ||
2521 | /* | |
2522 | * Instruction: sparcv9_faddd | |
2523 | */ | |
2524 | ||
2525 | #if !defined(HAS_NATIVE_faddd) /* { */ | |
2526 | ||
2527 | IMPL(faddd) | |
2528 | FP_EXEC_FPU_ON_CHECK; | |
2529 | if (F64src1 == 0 && F64src2 == 0) | |
2530 | F64dest = 0; | |
2531 | else { | |
2532 | FPU_NOT_IMPLEMENTED("faddd") | |
2533 | } | |
2534 | ENDI | |
2535 | ||
2536 | #endif /* } */ | |
2537 | ||
2538 | ||
2539 | ||
2540 | ||
2541 | ||
2542 | /* | |
2543 | * Instruction: sparcv9_fsubs | |
2544 | */ | |
2545 | ||
2546 | #if !defined(HAS_NATIVE_fsubs) /* { */ | |
2547 | ||
2548 | IMPL(fsubs) | |
2549 | FP_EXEC_FPU_ON_CHECK; | |
2550 | FPU_NOT_IMPLEMENTED("fsubs") | |
2551 | ENDDEF | |
2552 | ||
2553 | #endif /* } */ | |
2554 | ||
2555 | ||
2556 | ||
2557 | ||
2558 | ||
2559 | /* | |
2560 | * Instruction: sparcv9_fsubd | |
2561 | */ | |
2562 | ||
2563 | #if !defined(HAS_NATIVE_fsubd) /* { */ | |
2564 | ||
2565 | IMPL(fsubd) | |
2566 | FP_EXEC_FPU_ON_CHECK; | |
2567 | FPU_NOT_IMPLEMENTED("fsubd") | |
2568 | ENDDEF | |
2569 | ||
2570 | #endif /* } */ | |
2571 | ||
2572 | ||
2573 | ||
2574 | ||
2575 | ||
2576 | /* | |
2577 | * Instruction: sparcv9_fmuls | |
2578 | */ | |
2579 | ||
2580 | #if !defined(HAS_NATIVE_fmuls) /* { */ | |
2581 | ||
2582 | IMPL(fmuls) | |
2583 | FP_EXEC_FPU_ON_CHECK; | |
2584 | FPU_NOT_IMPLEMENTED("fmuls") | |
2585 | ENDDEF | |
2586 | ||
2587 | #endif /* } */ | |
2588 | ||
2589 | ||
2590 | ||
2591 | ||
2592 | ||
2593 | /* | |
2594 | * Instruction: sparcv9_fmuld | |
2595 | */ | |
2596 | ||
2597 | #if !defined(HAS_NATIVE_fmuld) /* { */ | |
2598 | ||
2599 | IMPL(fmuld) | |
2600 | FP_EXEC_FPU_ON_CHECK; | |
2601 | if (F64src1 == 0 || F64src2 == 0) | |
2602 | F64dest = 0; | |
2603 | else { | |
2604 | FPU_NOT_IMPLEMENTED("fmuld") | |
2605 | } | |
2606 | ENDI | |
2607 | ||
2608 | #endif /* } */ | |
2609 | ||
2610 | ||
2611 | ||
2612 | ||
2613 | ||
2614 | /* | |
2615 | * Instruction: sparcv9_fdivs | |
2616 | */ | |
2617 | ||
2618 | #if !defined(HAS_NATIVE_fdivs) /* { */ | |
2619 | ||
2620 | IMPL(fdivs) | |
2621 | FP_EXEC_FPU_ON_CHECK; | |
2622 | FPU_NOT_IMPLEMENTED("fdivs") | |
2623 | ENDDEF | |
2624 | ||
2625 | #endif /* } */ | |
2626 | ||
2627 | ||
2628 | ||
2629 | ||
2630 | ||
2631 | /* | |
2632 | * Instruction: sparcv9_fdivd | |
2633 | */ | |
2634 | ||
2635 | #if !defined(HAS_NATIVE_fdivd) /* { */ | |
2636 | ||
2637 | IMPL(fdivd) | |
2638 | FP_EXEC_FPU_ON_CHECK; | |
2639 | FPU_NOT_IMPLEMENTED("fdivd") | |
2640 | ENDDEF | |
2641 | ||
2642 | #endif /* } */ | |
2643 | ||
2644 | ||
2645 | ||
2646 | ||
2647 | /* | |
2648 | * Instruction: sparcv9_fsmuld | |
2649 | */ | |
2650 | ||
2651 | #if !defined(HAS_NATIVE_fsmuld) /* { */ | |
2652 | ||
2653 | IMPL(fsmuld) | |
2654 | FP_EXEC_FPU_ON_CHECK; | |
2655 | FPU_NOT_IMPLEMENTED("fsmuld") | |
2656 | ENDDEF | |
2657 | ||
2658 | #endif /* } */ | |
2659 | ||
2660 | ||
2661 | ||
2662 | ||
2663 | ||
2664 | /* | |
2665 | * Instruction: sparcv9_fstod | |
2666 | */ | |
2667 | ||
2668 | #if !defined(HAS_NATIVE_fstod) /* { */ | |
2669 | ||
2670 | IMPL(fstod) | |
2671 | FP_EXEC_FPU_ON_CHECK; | |
2672 | FPU_NOT_IMPLEMENTED("fstod") | |
2673 | ENDDEF | |
2674 | ||
2675 | #endif /* } */ | |
2676 | ||
2677 | ||
2678 | ||
2679 | ||
2680 | ||
2681 | /* | |
2682 | * Instruction: sparcv9_fdtos | |
2683 | */ | |
2684 | ||
2685 | #if !defined(HAS_NATIVE_fdtos) /* { */ | |
2686 | ||
2687 | IMPL(fdtos) | |
2688 | FP_EXEC_FPU_ON_CHECK; | |
2689 | FPU_NOT_IMPLEMENTED("fdtos") | |
2690 | ENDDEF | |
2691 | ||
2692 | #endif /* } */ | |
2693 | ||
2694 | ||
2695 | ||
2696 | ||
2697 | ||
2698 | /* | |
2699 | * Instruction: sparcv9_fsqrts | |
2700 | */ | |
2701 | ||
2702 | #if !defined(HAS_NATIVE_fsqrts) /* { */ | |
2703 | ||
2704 | IMPL(fsqrts) | |
2705 | FP_EXEC_FPU_ON_CHECK; | |
2706 | FPU_NOT_IMPLEMENTED("fsqrts") | |
2707 | ENDDEF | |
2708 | ||
2709 | #endif /* } */ | |
2710 | ||
2711 | ||
2712 | ||
2713 | ||
2714 | ||
2715 | /* | |
2716 | * Instruction: sparcv9_fsqrtd | |
2717 | */ | |
2718 | ||
2719 | #if !defined(HAS_NATIVE_fsqrtd) /* { */ | |
2720 | ||
2721 | IMPL(fsqrtd) | |
2722 | FP_EXEC_FPU_ON_CHECK; | |
2723 | FPU_NOT_IMPLEMENTED("fsqrtd") | |
2724 | ENDDEF | |
2725 | ||
2726 | #endif /* } */ | |
2727 | ||
2728 | ||
2729 | ||
2730 | ||
2731 | ||
2732 | /* | |
2733 | * Instruction: sparcv9_fmovs | |
2734 | */ | |
2735 | ||
2736 | #if !defined(HAS_NATIVE_sparcv9_fmovs) /* { */ | |
2737 | ||
2738 | IMPL(fmovs) | |
2739 | FP_EXEC_FPU_ON_CHECK; | |
2740 | F32dest = F32src1; | |
2741 | FP_CLEAR_CEXC_FTT(sp); | |
2742 | ENDI | |
2743 | ||
2744 | #endif /* } */ | |
2745 | ||
2746 | ||
2747 | ||
2748 | ||
2749 | ||
2750 | /* | |
2751 | * Instruction: sparcv9_fmovd | |
2752 | */ | |
2753 | ||
2754 | #if !defined(HAS_NATIVE_sparcv9_fmovd) /* { */ | |
2755 | ||
2756 | IMPL(fmovd) | |
2757 | FP_EXEC_FPU_ON_CHECK; | |
2758 | F64dest = F64src1; | |
2759 | FP_CLEAR_CEXC_FTT(sp); | |
2760 | ENDI | |
2761 | ||
2762 | #endif /* } */ | |
2763 | ||
2764 | ||
2765 | ||
2766 | ||
2767 | ||
2768 | IMPL( fmovscc ) | |
2769 | int ccr; | |
2770 | FP_EXEC_FPU_ON_CHECK; | |
2771 | ccr = sp->v9_ccr; | |
2772 | if (MOVCC_cc) ccr>>=4; | |
2773 | ||
2774 | if ( (sparcv9_cc_magic[MOVCC_cond] >> (ccr & 0xf)) &1 ) { | |
2775 | F32dest = F32src1; | |
2776 | } | |
2777 | NEXT_INSTN(sp); | |
2778 | ENDDEF | |
2779 | ||
2780 | ||
2781 | ||
2782 | IMPL( fmovsfcc ) | |
2783 | int ccr; | |
2784 | FP_EXEC_FPU_ON_CHECK; | |
2785 | if (MOVCC_cc == 0) | |
2786 | ccr = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
2787 | else | |
2788 | ccr = V9_FSR_FCCN(sp->v9_fsr_ctrl, MOVCC_cc); | |
2789 | ||
2790 | if ( (sparcv9_fcc_magic[MOVCC_cond] >> ccr) & 1 ) { | |
2791 | F32dest = F32src1; | |
2792 | } | |
2793 | NEXT_INSTN(sp); | |
2794 | ENDDEF | |
2795 | ||
2796 | ||
2797 | ||
2798 | IMPL( fmovdcc ) | |
2799 | int ccr; | |
2800 | FP_EXEC_FPU_ON_CHECK; | |
2801 | ccr = sp->v9_ccr; | |
2802 | if (MOVCC_cc) ccr>>=4; | |
2803 | ||
2804 | if ( (sparcv9_cc_magic[MOVCC_cond] >> (ccr & 0xf)) &1 ) { | |
2805 | F64dest = F64src1; | |
2806 | } | |
2807 | NEXT_INSTN(sp); | |
2808 | ENDDEF | |
2809 | ||
2810 | IMPL( fmovrs_z ) | |
2811 | FP_EXEC_FPU_ON_CHECK; | |
2812 | if (SRsrc1 == 0) | |
2813 | F32dest = F32src2; | |
2814 | ENDI | |
2815 | IMPL( fmovrs_lez ) | |
2816 | FP_EXEC_FPU_ON_CHECK; | |
2817 | if (SRsrc1 <= 0) | |
2818 | F32dest = F32src2; | |
2819 | ENDI | |
2820 | IMPL( fmovrs_lz ) | |
2821 | FP_EXEC_FPU_ON_CHECK; | |
2822 | if (SRsrc1 < 0) | |
2823 | F32dest = F32src2; | |
2824 | ENDI | |
2825 | IMPL( fmovrs_nz ) | |
2826 | FP_EXEC_FPU_ON_CHECK; | |
2827 | if (SRsrc1 != 0) | |
2828 | F32dest = F32src2; | |
2829 | ENDI | |
2830 | IMPL( fmovrs_gz ) | |
2831 | FP_EXEC_FPU_ON_CHECK; | |
2832 | if (SRsrc1 > 0) | |
2833 | F32dest = F32src2; | |
2834 | ENDI | |
2835 | IMPL( fmovrs_gez ) | |
2836 | FP_EXEC_FPU_ON_CHECK; | |
2837 | if (SRsrc1 >= 0) | |
2838 | F32dest = F32src2; | |
2839 | ENDI | |
2840 | ||
2841 | IMPL( fmovrd_z ) | |
2842 | FP_EXEC_FPU_ON_CHECK; | |
2843 | if (SRsrc1 == 0) | |
2844 | F64dest = F64src2; | |
2845 | ENDI | |
2846 | IMPL( fmovrd_lez ) | |
2847 | FP_EXEC_FPU_ON_CHECK; | |
2848 | if (SRsrc1 <= 0) | |
2849 | F64dest = F64src2; | |
2850 | ENDI | |
2851 | IMPL( fmovrd_lz ) | |
2852 | FP_EXEC_FPU_ON_CHECK; | |
2853 | if (SRsrc1 < 0) | |
2854 | F64dest = F64src2; | |
2855 | ENDI | |
2856 | IMPL( fmovrd_nz ) | |
2857 | FP_EXEC_FPU_ON_CHECK; | |
2858 | if (SRsrc1 != 0) | |
2859 | F64dest = F64src2; | |
2860 | ENDI | |
2861 | IMPL( fmovrd_gz ) | |
2862 | FP_EXEC_FPU_ON_CHECK; | |
2863 | if (SRsrc1 > 0) | |
2864 | F64dest = F64src2; | |
2865 | ENDI | |
2866 | IMPL( fmovrd_gez ) | |
2867 | FP_EXEC_FPU_ON_CHECK; | |
2868 | if (SRsrc1 >= 0) | |
2869 | F64dest = F64src2; | |
2870 | ENDI | |
2871 | ||
2872 | ||
2873 | ||
2874 | IMPL( fmovdfcc ) | |
2875 | int ccr; | |
2876 | FP_EXEC_FPU_ON_CHECK; | |
2877 | if (MOVCC_cc == 0) | |
2878 | ccr = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
2879 | else | |
2880 | ccr = V9_FSR_FCCN(sp->v9_fsr_ctrl, MOVCC_cc); | |
2881 | ||
2882 | if ( (sparcv9_fcc_magic[MOVCC_cond] >> ccr) & 1 ) { | |
2883 | F64dest = F64src1; | |
2884 | } | |
2885 | NEXT_INSTN(sp); | |
2886 | ENDDEF | |
2887 | ||
2888 | ||
2889 | ||
2890 | ||
2891 | ||
2892 | /* | |
2893 | * Instruction: sparcv9_fnegs | |
2894 | */ | |
2895 | ||
2896 | #if !defined(HAS_NATIVE_sparcv9_fnegs) /* { */ | |
2897 | ||
2898 | IMPL(fnegs) | |
2899 | FP_EXEC_FPU_ON_CHECK; | |
2900 | F32dest = F32src1 ^ (1u << 31); | |
2901 | FP_CLEAR_CEXC_FTT(sp); | |
2902 | ENDI | |
2903 | ||
2904 | #endif /* } */ | |
2905 | ||
2906 | ||
2907 | ||
2908 | ||
2909 | ||
2910 | /* | |
2911 | * Instruction: sparcv9_fnegd | |
2912 | */ | |
2913 | ||
2914 | #if !defined(HAS_NATIVE_sparcv9_fnegd) /* { */ | |
2915 | ||
2916 | IMPL(fnegd) | |
2917 | FP_EXEC_FPU_ON_CHECK; | |
2918 | F64dest = F64src1 ^ (1ull << 63); | |
2919 | FP_CLEAR_CEXC_FTT(sp); | |
2920 | ENDI | |
2921 | ||
2922 | #endif /* } */ | |
2923 | ||
2924 | ||
2925 | ||
2926 | ||
2927 | ||
2928 | /* | |
2929 | * Instruction: sparcv9_fabss | |
2930 | */ | |
2931 | ||
2932 | #if !defined(HAS_NATIVE_sparcv9_fabss) /* { */ | |
2933 | ||
2934 | IMPL(fabss) | |
2935 | FP_EXEC_FPU_ON_CHECK; | |
2936 | F32dest = F32src1 & 0x7fffffffu; | |
2937 | FP_CLEAR_CEXC_FTT(sp); | |
2938 | ENDI | |
2939 | ||
2940 | #endif /* } */ | |
2941 | ||
2942 | ||
2943 | ||
2944 | ||
2945 | ||
2946 | /* | |
2947 | * Instruction: sparcv9_fabsd | |
2948 | */ | |
2949 | ||
2950 | #if !defined(HAS_NATIVE_sparcv9_fabsd) /* { */ | |
2951 | ||
2952 | IMPL(fabsd) | |
2953 | FP_EXEC_FPU_ON_CHECK; | |
2954 | F64dest = F64src1 & 0x7fffffffffffffffull; | |
2955 | FP_CLEAR_CEXC_FTT(sp); | |
2956 | ENDI | |
2957 | ||
2958 | #endif /* } */ | |
2959 | ||
2960 | ||
2961 | ||
2962 | ||
2963 | ||
2964 | /* | |
2965 | * Instruction: sparcv9_fstoi | |
2966 | */ | |
2967 | ||
2968 | #if !defined(HAS_NATIVE_fstoi) /* { */ | |
2969 | ||
2970 | IMPL(fstoi) | |
2971 | FP_EXEC_FPU_ON_CHECK; | |
2972 | FPU_NOT_IMPLEMENTED("fstoi") | |
2973 | ENDDEF | |
2974 | ||
2975 | #endif /* } */ | |
2976 | ||
2977 | ||
2978 | /* | |
2979 | * Instruction: sparcv9_fitos | |
2980 | */ | |
2981 | ||
2982 | #if !defined(HAS_NATIVE_fitos) /* { */ | |
2983 | ||
2984 | IMPL(fitos) | |
2985 | FP_EXEC_FPU_ON_CHECK; | |
2986 | FPU_NOT_IMPLEMENTED("fitos") | |
2987 | ENDDEF | |
2988 | ||
2989 | #endif /* } */ | |
2990 | ||
2991 | ||
2992 | ||
2993 | /* | |
2994 | * Instruction: sparcv9_fdtoi | |
2995 | */ | |
2996 | ||
2997 | #if !defined(HAS_NATIVE_fdtoi) /* { */ | |
2998 | ||
2999 | IMPL(fdtoi) | |
3000 | FP_EXEC_FPU_ON_CHECK; | |
3001 | FPU_NOT_IMPLEMENTED("fdtoi") | |
3002 | ENDDEF | |
3003 | ||
3004 | #endif /* } */ | |
3005 | ||
3006 | ||
3007 | ||
3008 | /* | |
3009 | * Instruction: sparcv9_fitod | |
3010 | */ | |
3011 | ||
3012 | #if !defined(HAS_NATIVE_fitod) /* { */ | |
3013 | ||
3014 | IMPL(fitod) | |
3015 | FP_EXEC_FPU_ON_CHECK; | |
3016 | FPU_NOT_IMPLEMENTED("fitod") | |
3017 | ENDDEF | |
3018 | ||
3019 | #endif /* } */ | |
3020 | ||
3021 | ||
3022 | ||
3023 | /* | |
3024 | * Instruction: sparcv9_fstox | |
3025 | */ | |
3026 | ||
3027 | #if !defined(HAS_NATIVE_fstox) /* { */ | |
3028 | ||
3029 | IMPL(fstox) | |
3030 | FP_EXEC_FPU_ON_CHECK; | |
3031 | FPU_NOT_IMPLEMENTED("fstox") | |
3032 | ENDDEF | |
3033 | ||
3034 | #endif /* } */ | |
3035 | ||
3036 | ||
3037 | ||
3038 | /* | |
3039 | * Instruction: sparcv9_fxtos | |
3040 | */ | |
3041 | ||
3042 | #if !defined(HAS_NATIVE_fxtos) /* { */ | |
3043 | ||
3044 | IMPL(fxtos) | |
3045 | FP_EXEC_FPU_ON_CHECK; | |
3046 | FPU_NOT_IMPLEMENTED("fxtos") | |
3047 | ENDDEF | |
3048 | ||
3049 | #endif /* } */ | |
3050 | ||
3051 | ||
3052 | /* | |
3053 | * Instruction: sparcv9_fdtox | |
3054 | */ | |
3055 | ||
3056 | #if !defined(HAS_NATIVE_fdtox) /* { */ | |
3057 | ||
3058 | IMPL(fdtox) | |
3059 | FP_EXEC_FPU_ON_CHECK; | |
3060 | FPU_NOT_IMPLEMENTED("fdtox") | |
3061 | ENDDEF | |
3062 | ||
3063 | #endif /* } */ | |
3064 | ||
3065 | ||
3066 | ||
3067 | /* | |
3068 | * Instruction: sparcv9_fxtod | |
3069 | */ | |
3070 | ||
3071 | #if !defined(HAS_NATIVE_fxtod) /* { */ | |
3072 | ||
3073 | IMPL(fxtod) | |
3074 | FP_EXEC_FPU_ON_CHECK; | |
3075 | FPU_NOT_IMPLEMENTED("fxtod") | |
3076 | ENDDEF | |
3077 | ||
3078 | #endif /* } */ | |
3079 | ||
3080 | ||
3081 | /* | |
3082 | * FP register logical operations. | |
3083 | */ | |
3084 | ||
3085 | IMPL(fzerod) | |
3086 | FP_EXEC_FPU_ON_CHECK; | |
3087 | F64dest = 0; | |
3088 | ENDI | |
3089 | ||
3090 | IMPL(fzeros) | |
3091 | FP_EXEC_FPU_ON_CHECK; | |
3092 | F32dest = 0; | |
3093 | ENDI | |
3094 | ||
3095 | IMPL(foned) | |
3096 | FP_EXEC_FPU_ON_CHECK; | |
3097 | F64dest = (uint64_t)-1; | |
3098 | ENDI | |
3099 | ||
3100 | IMPL(fones) | |
3101 | FP_EXEC_FPU_ON_CHECK; | |
3102 | F32dest = (uint32_t)-1; | |
3103 | ENDI | |
3104 | ||
3105 | IMPL(fandd) | |
3106 | FP_EXEC_FPU_ON_CHECK; | |
3107 | F64dest = F64src1 & F64src2; | |
3108 | ENDI | |
3109 | ||
3110 | IMPL(fandnot1d) | |
3111 | FP_EXEC_FPU_ON_CHECK; | |
3112 | F64dest = ~F64src1 & F64src2; | |
3113 | ENDI | |
3114 | ||
3115 | IMPL(fandnot1s) | |
3116 | FP_EXEC_FPU_ON_CHECK; | |
3117 | F32dest = ~F32src1 & F32src2; | |
3118 | ENDI | |
3119 | ||
3120 | IMPL(fandnot2d) | |
3121 | FP_EXEC_FPU_ON_CHECK; | |
3122 | F64dest = F64src1 & ~F64src2; | |
3123 | ENDI | |
3124 | ||
3125 | IMPL(fandnot2s) | |
3126 | FP_EXEC_FPU_ON_CHECK; | |
3127 | F32dest = F32src1 & ~F32src2; | |
3128 | ENDI | |
3129 | ||
3130 | IMPL(fands) | |
3131 | FP_EXEC_FPU_ON_CHECK; | |
3132 | F32dest = F32src1 & F32src2; | |
3133 | ENDI | |
3134 | ||
3135 | IMPL(fnandd) | |
3136 | FP_EXEC_FPU_ON_CHECK; | |
3137 | F64dest = ~(F64src1 & F64src2); | |
3138 | ENDI | |
3139 | ||
3140 | IMPL(fnands) | |
3141 | FP_EXEC_FPU_ON_CHECK; | |
3142 | F32dest = ~(F32src1 & F32src2); | |
3143 | ENDI | |
3144 | ||
3145 | IMPL(fnord) | |
3146 | FP_EXEC_FPU_ON_CHECK; | |
3147 | F64dest = ~(F64src1 | F64src2); | |
3148 | ENDI | |
3149 | ||
3150 | IMPL(fnors) | |
3151 | FP_EXEC_FPU_ON_CHECK; | |
3152 | F32dest = ~(F32src1 | F32src2); | |
3153 | ENDI | |
3154 | ||
3155 | IMPL(fnotXd) | |
3156 | FP_EXEC_FPU_ON_CHECK; | |
3157 | F64dest = ~F64src1; | |
3158 | ENDI | |
3159 | ||
3160 | IMPL(fnotXs) | |
3161 | FP_EXEC_FPU_ON_CHECK; | |
3162 | F32dest = ~F32src1; | |
3163 | ENDI | |
3164 | ||
3165 | IMPL(ford) | |
3166 | FP_EXEC_FPU_ON_CHECK; | |
3167 | F64dest = F64src1 | F64src2; | |
3168 | ENDI | |
3169 | ||
3170 | IMPL(fornot1d) | |
3171 | FP_EXEC_FPU_ON_CHECK; | |
3172 | F64dest = ~F64src1 | F64src2; | |
3173 | ENDI | |
3174 | ||
3175 | IMPL(fornot1s) | |
3176 | FP_EXEC_FPU_ON_CHECK; | |
3177 | F32dest = ~F32src1 | F32src2; | |
3178 | ENDI | |
3179 | ||
3180 | IMPL(fornot2d) | |
3181 | FP_EXEC_FPU_ON_CHECK; | |
3182 | F64dest = F64src1 | ~F64src2; | |
3183 | ENDI | |
3184 | ||
3185 | IMPL(fornot2s) | |
3186 | FP_EXEC_FPU_ON_CHECK; | |
3187 | F32dest = F32src1 | ~F32src2; | |
3188 | ENDI | |
3189 | ||
3190 | IMPL(fors) | |
3191 | FP_EXEC_FPU_ON_CHECK; | |
3192 | F32dest = F32src1 | F32src2; | |
3193 | ENDI | |
3194 | ||
3195 | IMPL(fsrcXd) | |
3196 | FP_EXEC_FPU_ON_CHECK; | |
3197 | F64dest = F64src1; | |
3198 | ENDI | |
3199 | ||
3200 | IMPL(fsrcXs) | |
3201 | FP_EXEC_FPU_ON_CHECK; | |
3202 | F32dest = F32src1; | |
3203 | ENDI | |
3204 | ||
3205 | IMPL(fxnord) | |
3206 | FP_EXEC_FPU_ON_CHECK; | |
3207 | F64dest = ~(F64src1 ^ F64src2); | |
3208 | ENDI | |
3209 | ||
3210 | IMPL(fxnors) | |
3211 | FP_EXEC_FPU_ON_CHECK; | |
3212 | F32dest = ~(F32src1 ^ F32src2); | |
3213 | ENDI | |
3214 | ||
3215 | IMPL(fxord) | |
3216 | FP_EXEC_FPU_ON_CHECK; | |
3217 | F64dest = F64src1 ^ F64src2; | |
3218 | ENDI | |
3219 | ||
3220 | IMPL(fxors) | |
3221 | FP_EXEC_FPU_ON_CHECK; | |
3222 | F32dest = F32src1 ^ F32src2; | |
3223 | ENDI | |
3224 | ||
3225 | /* | |
3226 | * FP register fixed-point partitioned add and subtract. | |
3227 | */ | |
3228 | ||
3229 | IMPL(fpadd16) | |
3230 | FP_EXEC_FPU_ON_CHECK; | |
3231 | uint16_t s1, s2, d; | |
3232 | uint64_t fs1, fs2, res; | |
3233 | uint_t i; | |
3234 | ||
3235 | fs1 = F64src1; | |
3236 | fs2 = F64src2; | |
3237 | res = 0; | |
3238 | for (i = 0; i < 4; i++) { | |
3239 | s1 = (uint16_t) fs1; | |
3240 | s2 = (uint16_t) fs2; | |
3241 | d = s1 + s2; | |
3242 | res >>= 16; | |
3243 | res |= (uint64_t)d << 48; | |
3244 | fs1 >>= 16; | |
3245 | fs2 >>= 16; | |
3246 | } | |
3247 | F64dest = res; | |
3248 | ENDI | |
3249 | ||
3250 | IMPL(fpadd16s) | |
3251 | FP_EXEC_FPU_ON_CHECK; | |
3252 | uint16_t s1, s2, d; | |
3253 | uint32_t fs1, fs2, res; | |
3254 | uint_t i; | |
3255 | ||
3256 | fs1 = F32src1; | |
3257 | fs2 = F32src2; | |
3258 | res = 0; | |
3259 | for (i = 0; i < 2; i++) { | |
3260 | s1 = (uint16_t) fs1; | |
3261 | s2 = (uint16_t) fs2; | |
3262 | d = s1 + s2; | |
3263 | res >>= 16; | |
3264 | res |= (uint32_t)d << 16; | |
3265 | fs1 >>= 16; | |
3266 | fs2 >>= 16; | |
3267 | } | |
3268 | F32dest = res; | |
3269 | ENDI | |
3270 | ||
3271 | IMPL(fpadd32) | |
3272 | FP_EXEC_FPU_ON_CHECK; | |
3273 | uint32_t s1, s2, d; | |
3274 | uint64_t fs1, fs2, res; | |
3275 | uint_t i; | |
3276 | ||
3277 | fs1 = F64src1; | |
3278 | fs2 = F64src2; | |
3279 | res = 0; | |
3280 | for (i = 0; i < 2; i++) { | |
3281 | s1 = (uint32_t) fs1; | |
3282 | s2 = (uint32_t) fs2; | |
3283 | d = s1 + s2; | |
3284 | res >>= 32; | |
3285 | res |= (uint64_t)d << 32; | |
3286 | fs1 >>= 32; | |
3287 | fs2 >>= 32; | |
3288 | } | |
3289 | F64dest = res; | |
3290 | ENDI | |
3291 | ||
3292 | IMPL(fpadd32s) | |
3293 | FP_EXEC_FPU_ON_CHECK; | |
3294 | F32dest = F32src1 + F32src2; | |
3295 | ENDI | |
3296 | ||
3297 | IMPL(fpsub16) | |
3298 | FP_EXEC_FPU_ON_CHECK; | |
3299 | uint16_t s1, s2, d; | |
3300 | uint64_t fs1, fs2, res; | |
3301 | uint_t i; | |
3302 | ||
3303 | fs1 = F64src1; | |
3304 | fs2 = F64src2; | |
3305 | res = 0; | |
3306 | for (i = 0; i < 4; i++) { | |
3307 | s1 = (uint16_t) fs1; | |
3308 | s2 = (uint16_t) fs2; | |
3309 | d = s1 - s2; | |
3310 | res >>= 16; | |
3311 | res |= (uint64_t)d << 48; | |
3312 | fs1 >>= 16; | |
3313 | fs2 >>= 16; | |
3314 | } | |
3315 | F64dest = res; | |
3316 | ENDI | |
3317 | ||
3318 | IMPL(fpsub16s) | |
3319 | FP_EXEC_FPU_ON_CHECK; | |
3320 | uint16_t s1, s2, d; | |
3321 | uint32_t fs1, fs2, res; | |
3322 | uint_t i; | |
3323 | ||
3324 | fs1 = F32src1; | |
3325 | fs2 = F32src2; | |
3326 | res = 0; | |
3327 | for (i = 0; i < 2; i++) { | |
3328 | s1 = (uint16_t) fs1; | |
3329 | s2 = (uint16_t) fs2; | |
3330 | d = s1 - s2; | |
3331 | res >>= 16; | |
3332 | res |= (uint32_t)d << 16; | |
3333 | fs1 >>= 16; | |
3334 | fs2 >>= 16; | |
3335 | } | |
3336 | F32dest = res; | |
3337 | ENDI | |
3338 | ||
3339 | IMPL(fpsub32) | |
3340 | FP_EXEC_FPU_ON_CHECK; | |
3341 | uint32_t s1, s2, d; | |
3342 | uint64_t fs1, fs2, res; | |
3343 | uint_t i; | |
3344 | ||
3345 | fs1 = F64src1; | |
3346 | fs2 = F64src2; | |
3347 | res = 0; | |
3348 | for (i = 0; i < 2; i++) { | |
3349 | s1 = (uint32_t) fs1; | |
3350 | s2 = (uint32_t) fs2; | |
3351 | d = s1 - s2; | |
3352 | res >>= 32; | |
3353 | res |= (uint64_t)d << 32; | |
3354 | fs1 >>= 32; | |
3355 | fs2 >>= 32; | |
3356 | } | |
3357 | F64dest = res; | |
3358 | ENDI | |
3359 | ||
3360 | IMPL(fpsub32s) | |
3361 | FP_EXEC_FPU_ON_CHECK; | |
3362 | F32dest = F32src1 - F32src2; | |
3363 | ENDI | |
3364 | ||
3365 | ||
3366 | ||
3367 | ||
3368 | ||
3369 | ||
3370 | ||
3371 | ||
3372 | /* ------------------------------------------------------------ */ | |
3373 | ||
3374 | ||
3375 | /* | |
3376 | * Basic branch instructions | |
3377 | */ | |
3378 | ||
3379 | ||
3380 | /* Note: special case - normally taken branches */ | |
3381 | /* always execute their delay slots - not the always case ! */ | |
3382 | ||
3383 | IMPL( bralways_ds_annul ) /* branch always annul delay slot */ | |
3384 | tvaddr_t xpc; | |
3385 | ||
3386 | xpc = Rpc + SBRoffset32; | |
3387 | Rpc = xpc; | |
3388 | Rnpc = xpc + 4; | |
3389 | ENDDEF | |
3390 | ||
3391 | ||
3392 | IMPL( bralways_ds ) /* branch always executing delay slot */ | |
3393 | tvaddr_t xpc; | |
3394 | ||
3395 | xpc = Rpc + SBRoffset32; | |
3396 | Rpc = Rnpc; | |
3397 | Rnpc = xpc; | |
3398 | ENDDEF | |
3399 | ||
3400 | IMPL( brnever_ds_annul ) /* branch never, but annul delay slot */ | |
3401 | tvaddr_t xpc; | |
3402 | ||
3403 | xpc = Rnpc + 4; | |
3404 | Rpc = xpc; | |
3405 | Rnpc = xpc + 4; | |
3406 | ENDDEF | |
3407 | ||
3408 | ||
3409 | ||
3410 | ||
3411 | /* ------------------------------------------------------------ */ | |
3412 | ||
3413 | ||
3414 | /* | |
3415 | * Call instruction + Jump and links ... | |
3416 | */ | |
3417 | ||
3418 | IMPL( call ) | |
3419 | tvaddr_t tpc, opc; | |
3420 | sparcv9_cpu_t * v9p; | |
3421 | ||
3422 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
3423 | ||
3424 | opc = Rpc; | |
3425 | tpc = opc + (sint64_t)Simm32; | |
3426 | tpc &= ~0x3; | |
3427 | if (!v9p->pstate.addr_mask && v9p->check_vahole(sp, tpc)) return; | |
3428 | Rpc = Rnpc; | |
3429 | Rnpc = tpc; | |
3430 | ||
3431 | if (v9p->pstate.addr_mask) opc &= MASK64(31,0); /* FIXME: SV9_ID125 ? */ | |
3432 | ||
3433 | IReg( Reg_sparcv9_o7 ) = opc; | |
3434 | ENDDEF | |
3435 | ||
3436 | ||
3437 | ||
3438 | ||
3439 | IMPL( jmpl_imm ) | |
3440 | tvaddr_t opc, npc; | |
3441 | sparcv9_cpu_t * v9p; | |
3442 | ||
3443 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
3444 | npc = Rsrc1 + Simm16; | |
3445 | if ((npc & 3) != 0) { | |
3446 | v9p->post_precise_trap(sp, | |
3447 | Sparcv9_trap_mem_address_not_aligned); | |
3448 | return; | |
3449 | } | |
3450 | if (!v9p->pstate.addr_mask && v9p->check_vahole(sp, Rsrc1 + Simm16)) return; | |
3451 | ||
3452 | /* aligned check with xicache operation - maybe here instead ? FIXME*/ | |
3453 | opc = Rpc; /* stash incase Rdest is same reg as Rsrc */ | |
3454 | Rpc = Rnpc; | |
3455 | Rnpc = npc; | |
3456 | ||
3457 | if (v9p->pstate.addr_mask) opc &= MASK64(31,0); /* FIXME: SV9_ID125 ? */ | |
3458 | ||
3459 | Rdest = opc; | |
3460 | ENDDEF | |
3461 | ||
3462 | ||
3463 | IMPL( jmpl_imm_rd0 ) /* fast track normal return-from-call instructions */ | |
3464 | /* aligned check with xicache operation - maybe here instead ? FIXME*/ | |
3465 | tvaddr_t npc; | |
3466 | sparcv9_cpu_t * v9p; | |
3467 | ||
3468 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
3469 | npc = Rsrc1 + Simm16; | |
3470 | if ((npc & 3) != 0) { | |
3471 | v9p->post_precise_trap(sp, | |
3472 | Sparcv9_trap_mem_address_not_aligned); | |
3473 | return; | |
3474 | } | |
3475 | if (!v9p->pstate.addr_mask && v9p->check_vahole(sp, Rsrc1 + Simm16)) return; | |
3476 | Rpc = Rnpc; | |
3477 | Rnpc = npc; | |
3478 | ENDDEF | |
3479 | ||
3480 | IMPL( jmpl_rrr ) | |
3481 | tvaddr_t opc, npc; | |
3482 | sparcv9_cpu_t * v9p; | |
3483 | ||
3484 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
3485 | npc = Rsrc1 + Rsrc2; | |
3486 | if ((npc & 3) != 0) { | |
3487 | v9p->post_precise_trap(sp, | |
3488 | Sparcv9_trap_mem_address_not_aligned); | |
3489 | return; | |
3490 | } | |
3491 | if (!v9p->pstate.addr_mask && v9p->check_vahole(sp, Rsrc1 + Rsrc2)) return; | |
3492 | ||
3493 | /* aligned check with xicache operation - maybe here instead ? FIXME*/ | |
3494 | opc = Rpc; /* stash incase Rdest is same reg as Rsrc */ | |
3495 | Rpc = Rnpc; | |
3496 | Rnpc = npc; | |
3497 | ||
3498 | if (v9p->pstate.addr_mask) opc &= MASK64(31,0); /* FIXME: SV9_ID125 ? */ | |
3499 | ||
3500 | Rdest = opc; | |
3501 | ENDDEF | |
3502 | ||
3503 | ||
3504 | IMPL( jmpl_rrr_rd0 ) /* fast track normal return-from-call instructions */ | |
3505 | /* aligned check with xicache operation - maybe here instead ? FIXME*/ | |
3506 | tvaddr_t npc; | |
3507 | sparcv9_cpu_t * v9p; | |
3508 | ||
3509 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
3510 | npc = Rsrc1 + Rsrc2; | |
3511 | if ((npc & 3) != 0) { | |
3512 | v9p->post_precise_trap(sp, | |
3513 | Sparcv9_trap_mem_address_not_aligned); | |
3514 | return; | |
3515 | } | |
3516 | if (!v9p->pstate.addr_mask && v9p->check_vahole(sp, Rsrc1 + Rsrc2)) return; | |
3517 | Rpc = Rnpc; | |
3518 | Rnpc = npc; | |
3519 | ENDDEF | |
3520 | ||
3521 | ||
3522 | ||
3523 | ||
3524 | ||
3525 | ||
3526 | ||
3527 | ||
3528 | /* ------------------------------------------------------------ */ | |
3529 | ||
3530 | /* | |
3531 | * Specialist maths instructions | |
3532 | */ | |
3533 | ||
3534 | IMPL( udiv64_imm ) | |
3535 | sparcv9_udiv64( sp, Rdest_num, Rsrc1, Simm16 ); | |
3536 | ENDDEF | |
3537 | ||
3538 | IMPL( udiv64_rrr ) | |
3539 | sparcv9_udiv64( sp, Rdest_num, Rsrc1, Rsrc2 ); | |
3540 | ENDDEF | |
3541 | ||
3542 | IMPL(sdiv64_imm) | |
3543 | if (Simm16 == 0) { | |
3544 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
3545 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3546 | return; | |
3547 | } | |
3548 | if (!Zero_Reg(Rdest_num)) { | |
3549 | if ((uint64_t)Rsrc1 == (1ULL << 63) && (int64_t)Simm16 == -1) | |
3550 | Rdest = 0x7fffffffffffffffULL; | |
3551 | else | |
3552 | Rdest = (int64_t)Rsrc1 / (int64_t)Simm16; | |
3553 | } | |
3554 | ENDI | |
3555 | ||
3556 | IMPL(sdiv64_rrr) | |
3557 | if (Rsrc2 == 0) { | |
3558 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
3559 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3560 | return; | |
3561 | } | |
3562 | if (!Zero_Reg(Rdest_num)) { | |
3563 | if ((uint64_t)Rsrc1 == (1ULL << 63) && (int64_t)Rsrc2 == -1) | |
3564 | Rdest = 0x7fffffffffffffffULL; | |
3565 | else | |
3566 | Rdest = (int64_t)Rsrc1 / (int64_t)Rsrc2; | |
3567 | } | |
3568 | ENDI | |
3569 | ||
3570 | IMPL(sdiv_imm) | |
3571 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3572 | int64_t foo; | |
3573 | if (Simm16 == 0) { | |
3574 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3575 | return; | |
3576 | } | |
3577 | if (!Zero_Reg(Rdest_num)) { | |
3578 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3579 | if ((uint64_t)foo == (1ULL << 63) && (int32_t)Simm16 == -1) | |
3580 | foo = 0x7fffffff; | |
3581 | else | |
3582 | foo = foo / (int32_t)Simm16; | |
3583 | if (foo >= (1ull << 31)) | |
3584 | foo = (1ull << 31) - 1; | |
3585 | else if (foo <= (int64_t)0xffffffff7fffffffULL) | |
3586 | foo = 0x80000000LL; | |
3587 | Rdest = (int32_t)foo; | |
3588 | } | |
3589 | ENDI | |
3590 | ||
3591 | IMPL(udiv_imm) | |
3592 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3593 | uint64_t foo; | |
3594 | if (Simm16 == 0) { | |
3595 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3596 | return; | |
3597 | } | |
3598 | if (!Zero_Reg(Rdest_num)) { | |
3599 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3600 | foo = ((uint64_t)foo) / ((uint32_t)(int32_t)Simm16); | |
3601 | if (foo >= (1ull << 32)) | |
3602 | foo = (1ull << 32) - 1; | |
3603 | Rdest = (uint32_t)foo; | |
3604 | } | |
3605 | ENDI | |
3606 | ||
3607 | IMPL(sdiv_rrr) | |
3608 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3609 | int64_t foo; | |
3610 | if ((int32_t)Rsrc2 == 0) { | |
3611 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3612 | return; | |
3613 | } | |
3614 | if (!Zero_Reg(Rdest_num)) { | |
3615 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3616 | if ((uint64_t)foo == (1ULL << 63) && (int32_t)Rsrc2 == -1) | |
3617 | foo = 0x7fffffff; | |
3618 | else | |
3619 | foo = foo / (int32_t)Rsrc2; | |
3620 | if (foo >= (1ull << 31)) | |
3621 | foo = (1ull << 31) - 1; | |
3622 | else if (foo <= (int64_t)0xffffffff7fffffffULL) | |
3623 | foo = 0x80000000LL; | |
3624 | Rdest = (int32_t)foo; | |
3625 | } | |
3626 | ENDI | |
3627 | ||
3628 | IMPL(udiv_rrr) | |
3629 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3630 | uint64_t foo; | |
3631 | if ((uint32_t)Rsrc2 == 0) { | |
3632 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3633 | return; | |
3634 | } | |
3635 | if (!Zero_Reg(Rdest_num)) { | |
3636 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3637 | foo = (int64_t)foo / (uint32_t)Rsrc2; | |
3638 | if (foo >= (1ull << 32)) | |
3639 | foo = (1ull << 32) - 1; | |
3640 | Rdest = (uint32_t)foo; | |
3641 | } | |
3642 | ENDI | |
3643 | ||
3644 | IMPL(sdiv_cc_imm) | |
3645 | sparcv9_cpu_t *v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3646 | int64_t foo; | |
3647 | int v = 0; | |
3648 | if (Simm16 == 0) { | |
3649 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3650 | return; | |
3651 | } | |
3652 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3653 | if ((uint64_t)foo == (1ULL << 63) && (int32_t)Simm16 == -1) { | |
3654 | foo = 0x7fffffff; | |
3655 | v = (1 << 1); /* icc.v */ | |
3656 | } else | |
3657 | foo = foo / (int32_t)Simm16; | |
3658 | if (foo >= (1ll << 31)) { | |
3659 | foo = (1ll << 31) - 1; | |
3660 | v = (1 << 1); /* icc.v */ | |
3661 | } else if (foo <= (int64_t)0xffffffff7fffffffULL) { | |
3662 | foo = 0x80000000LL; | |
3663 | v = (1 << 1); /* icc.v */ | |
3664 | } | |
3665 | foo = (int32_t)foo; | |
3666 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3667 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3668 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3669 | ((foo == 0) ? (1 << 6) : 0) | /* xcc.z */ | |
3670 | v; | |
3671 | if (!Zero_Reg(Rdest_num)) | |
3672 | Rdest = foo; | |
3673 | ENDI | |
3674 | ||
3675 | IMPL(udiv_cc_imm) | |
3676 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3677 | uint64_t foo; | |
3678 | int v = 0; | |
3679 | if (Simm16 == 0) { | |
3680 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3681 | return; | |
3682 | } | |
3683 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3684 | foo = ((uint64_t)foo) / ((uint32_t)(int32_t)Simm16); | |
3685 | if (foo >= (1ull << 32)) { | |
3686 | foo = (1ull << 32) - 1; | |
3687 | v = (1 << 1); /* icc.v */ | |
3688 | } | |
3689 | foo = (uint32_t)foo; | |
3690 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3691 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3692 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3693 | ((foo == 0) ? (1 << 6) : 0) | /* xcc.z */ | |
3694 | v; | |
3695 | if (!Zero_Reg(Rdest_num)) | |
3696 | Rdest = foo; | |
3697 | ENDI | |
3698 | ||
3699 | IMPL(sdiv_cc_rrr) | |
3700 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3701 | int64_t foo; | |
3702 | int v = 0; | |
3703 | if ((int32_t)Rsrc2 == 0) { | |
3704 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3705 | return; | |
3706 | } | |
3707 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3708 | if ((uint64_t)foo == (1ULL << 63) && (int32_t)Rsrc2 == -1) { | |
3709 | foo = 0x7fffffff; | |
3710 | v = (1 << 1); /* icc.v */ | |
3711 | } else | |
3712 | foo = foo / (int32_t)Rsrc2; | |
3713 | if (foo >= (1ll << 31)) { | |
3714 | foo = (1ll << 31) - 1; | |
3715 | v = (1 << 1); /* icc.v */ | |
3716 | } else if (foo <= (int64_t)0xffffffff7fffffffULL) { | |
3717 | foo = 0x80000000LL; | |
3718 | v = (1 << 1); /* icc.v */ | |
3719 | } | |
3720 | foo = (int32_t)foo; | |
3721 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3722 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3723 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3724 | ((foo == 0) ? (1 << 6) : 0) | /* xcc.z */ | |
3725 | v; | |
3726 | if (!Zero_Reg(Rdest_num)) | |
3727 | Rdest = foo; | |
3728 | ENDI | |
3729 | ||
3730 | IMPL(udiv_cc_rrr) | |
3731 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3732 | uint64_t foo; | |
3733 | int v = 0; | |
3734 | if ((uint32_t)Rsrc2 == 0) { | |
3735 | v9p->post_precise_trap(sp, Sparcv9_trap_division_by_zero); | |
3736 | return; | |
3737 | } | |
3738 | foo = (((int64_t)sp->v9_y) << 32) | (uint32_t)Rsrc1; | |
3739 | foo = ((int64_t)foo) / ((uint32_t)Rsrc2); | |
3740 | if (foo >= (1ull << 32)) { | |
3741 | foo = (1ull << 32) - 1; | |
3742 | v = (1 << 1); /* icc.v */ | |
3743 | } | |
3744 | foo = (uint32_t)foo; | |
3745 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3746 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3747 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3748 | ((foo == 0) ? (1 << 6) : 0) | /* xcc.z */ | |
3749 | v; | |
3750 | if (!Zero_Reg(Rdest_num)) | |
3751 | Rdest = foo; | |
3752 | ENDI | |
3753 | ||
3754 | IMPL(smul_imm) | |
3755 | uint64_t foo; | |
3756 | foo = (int64_t)(int32_t)Rsrc1 * (int64_t)(int32_t)Simm16; | |
3757 | sp->v9_y = foo >> 32; | |
3758 | if (!Zero_Reg(Rdest_num)) | |
3759 | Rdest = foo; | |
3760 | ENDI | |
3761 | ||
3762 | IMPL(umul_imm) | |
3763 | uint64_t foo; | |
3764 | foo = (uint64_t)(uint32_t)Rsrc1 * (uint64_t)(uint32_t)(int32_t)Simm16; | |
3765 | sp->v9_y = foo >> 32; | |
3766 | if (!Zero_Reg(Rdest_num)) | |
3767 | Rdest = foo; | |
3768 | ENDI | |
3769 | ||
3770 | IMPL(smul_cc_imm) | |
3771 | uint64_t foo; | |
3772 | foo = (int64_t)(int32_t)Rsrc1 * (int64_t)(int32_t)Simm16; | |
3773 | sp->v9_y = foo >> 32; | |
3774 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3775 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3776 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3777 | ((foo == 0) ? (1 << 6) : 0); /* xcc.z */ | |
3778 | if (!Zero_Reg(Rdest_num)) | |
3779 | Rdest = foo; | |
3780 | ENDI | |
3781 | ||
3782 | IMPL(umul_cc_imm) | |
3783 | uint64_t foo; | |
3784 | foo = (uint64_t)(uint32_t)Rsrc1 * (uint64_t)(uint32_t)(int32_t)Simm16; | |
3785 | sp->v9_y = foo >> 32; | |
3786 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3787 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3788 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3789 | ((foo == 0) ? (1 << 6) : 0); /* xcc.z */ | |
3790 | if (!Zero_Reg(Rdest_num)) | |
3791 | Rdest = foo; | |
3792 | ENDI | |
3793 | ||
3794 | IMPL(mulscc_rrr) | |
3795 | uint64_t foo; | |
3796 | uint32_t s1, s2; | |
3797 | uint32_t d; | |
3798 | uint32_t v, c; | |
3799 | foo = (((int64_t)Rsrc1) << 32) | (uint32_t)sp->v9_y; | |
3800 | /* icc.n xor icc.v */ | |
3801 | s1 = ((sp->v9_ccr >> 3) & 1) ^ ((sp->v9_ccr >> 1) & 1); | |
3802 | s1 = (s1 << 31) | (((uint32_t)Rsrc1) >> 1); | |
3803 | if (foo & 1) | |
3804 | s2 = (uint32_t)Rsrc2; | |
3805 | else | |
3806 | s2 = 0; | |
3807 | foo >>= 1; | |
3808 | sp->v9_y = (uint32_t)foo; | |
3809 | d = s1 + s2; | |
3810 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
3811 | c = (s1 & s2) | (~d & (s1 | s2)); | |
3812 | sp->v9_ccr = (sp->v9_ccr & V9_xcc_mask); | |
3813 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
3814 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
3815 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
3816 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
3817 | if (!Zero_Reg(Rdest_num)) | |
3818 | Rdest = (uint32_t)d; | |
3819 | ENDI | |
3820 | ||
3821 | IMPL(mulscc_imm) | |
3822 | uint64_t foo; | |
3823 | uint32_t s1, s2; | |
3824 | uint32_t d; | |
3825 | uint32_t v, c; | |
3826 | foo = (((int64_t)Rsrc1) << 32) | (uint32_t)sp->v9_y; | |
3827 | /* icc.n xor icc.v */ | |
3828 | s1 = ((sp->v9_ccr >> 3) & 1) ^ ((sp->v9_ccr >> 1) & 1); | |
3829 | s1 = (s1 << 31) | (((uint32_t)Rsrc1) >> 1); | |
3830 | if (foo & 1) | |
3831 | s2 = (uint32_t)Simm16; | |
3832 | else | |
3833 | s2 = 0; | |
3834 | foo >>= 1; | |
3835 | sp->v9_y = (uint32_t)foo; | |
3836 | d = s1 + s2; | |
3837 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
3838 | c = (s1 & s2) | (~d & (s1 | s2)); | |
3839 | sp->v9_ccr = (sp->v9_ccr & V9_xcc_mask); | |
3840 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
3841 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
3842 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
3843 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
3844 | if (!Zero_Reg(Rdest_num)) | |
3845 | Rdest = (uint32_t)d; | |
3846 | ENDI | |
3847 | ||
3848 | IMPL(smul_rrr) | |
3849 | uint64_t foo; | |
3850 | foo = (int64_t)(int32_t)Rsrc1 * (int64_t)(int32_t)Rsrc2; | |
3851 | sp->v9_y = foo >> 32; | |
3852 | if (!Zero_Reg(Rdest_num)) | |
3853 | Rdest = foo; | |
3854 | ENDI | |
3855 | ||
3856 | ||
3857 | IMPL(umul_rrr) | |
3858 | uint64_t foo; | |
3859 | foo = (uint64_t)(uint32_t)Rsrc1 * (uint64_t)(uint32_t)Rsrc2; | |
3860 | sp->v9_y = foo >> 32; | |
3861 | if (!Zero_Reg(Rdest_num)) | |
3862 | Rdest = foo; | |
3863 | ENDI | |
3864 | ||
3865 | IMPL(smul_cc_rrr) | |
3866 | uint64_t foo; | |
3867 | foo = (int64_t)(int32_t)Rsrc1 * (int64_t)(int32_t)Rsrc2; | |
3868 | sp->v9_y = foo >> 32; | |
3869 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3870 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3871 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3872 | ((foo == 0) ? (1 << 6) : 0); /* xcc.z */ | |
3873 | if (!Zero_Reg(Rdest_num)) | |
3874 | Rdest = foo; | |
3875 | ENDI | |
3876 | ||
3877 | IMPL(umul_cc_rrr) | |
3878 | uint64_t foo; | |
3879 | foo = (uint64_t)(uint32_t)Rsrc1 * (uint64_t)(uint32_t)Rsrc2; | |
3880 | sp->v9_y = foo >> 32; | |
3881 | sp->v9_ccr = ((foo & (1ull << 31)) ? (1 << 3) : 0) | /* icc.n */ | |
3882 | (((uint32_t)foo == 0) ? (1 << 2) : 0) | /* icc.z */ | |
3883 | ((foo & (1ull << 63)) ? (1 << 7) : 0) | /* xcc.n */ | |
3884 | ((foo == 0) ? (1 << 6) : 0); /* xcc.z */ | |
3885 | if (!Zero_Reg(Rdest_num)) | |
3886 | Rdest = foo; | |
3887 | ENDI | |
3888 | ||
3889 | ||
3890 | /* ------------------------------------------------------------ */ | |
3891 | ||
3892 | /* | |
3893 | * trap instructions | |
3894 | */ | |
3895 | ||
3896 | ||
3897 | IMPL( trap_imm_fast ) | |
3898 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
3899 | int tn = Simm16; | |
3900 | ||
3901 | if (V9_User==v9p->state) | |
3902 | tn &= 0x7f; | |
3903 | else | |
3904 | tn &= 0xff; | |
3905 | ||
3906 | if (SS_MAGIC_TRAP_CC(TRAP_cc) && SS_MAGIC_TRAP(sp, tn)) { | |
3907 | NEXT_INSTN(sp); | |
3908 | return; | |
3909 | } | |
3910 | ||
3911 | v9p->post_precise_trap(sp, tn+Sparcv9_trap_trap_instruction); | |
3912 | ENDDEF | |
3913 | ||
3914 | ||
3915 | IMPL( trapcc_imm ) | |
3916 | sparcv9_trapcc( sp, Rsrc1 + Simm16, TRAP_cc, TRAP_cond ); | |
3917 | ENDDEF | |
3918 | ||
3919 | ||
3920 | IMPL( trapcc_rr ) | |
3921 | sparcv9_trapcc( sp, Rsrc1 + Rsrc2, TRAP_cc, TRAP_cond ); | |
3922 | ENDDEF | |
3923 | ||
3924 | ||
3925 | /* ------------------------------------------------------------ */ | |
3926 | ||
3927 | IMPL( movcc_imm ) | |
3928 | int ccr; | |
3929 | ccr = sp->v9_ccr; | |
3930 | if (MOVCC_cc) ccr>>=4; | |
3931 | ||
3932 | if ( (sparcv9_cc_magic[MOVCC_cond] >> (ccr & 0xf)) &1 ) { | |
3933 | Rdest = Simm16; | |
3934 | } | |
3935 | NEXT_INSTN(sp); | |
3936 | ENDDEF | |
3937 | ||
3938 | ||
3939 | ||
3940 | IMPL( movcc_rr ) | |
3941 | int ccr; | |
3942 | ccr = sp->v9_ccr; | |
3943 | if (MOVCC_cc) ccr>>=4; | |
3944 | ||
3945 | if ( (sparcv9_cc_magic[MOVCC_cond] >> (ccr & 0xf)) &1 ) { | |
3946 | Rdest = Rsrc2; | |
3947 | } | |
3948 | NEXT_INSTN(sp); | |
3949 | ENDDEF | |
3950 | ||
3951 | ||
3952 | ||
3953 | IMPL( movfcc_imm ) | |
3954 | int ccr; | |
3955 | FP_EXEC_FPU_ON_CHECK; | |
3956 | if (MOVCC_cc == 0) | |
3957 | ccr = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
3958 | else | |
3959 | ccr = V9_FSR_FCCN(sp->v9_fsr_ctrl, MOVCC_cc); | |
3960 | ||
3961 | if ( (sparcv9_fcc_magic[MOVCC_cond] >> ccr) & 1 ) { | |
3962 | Rdest = Simm16; | |
3963 | } | |
3964 | NEXT_INSTN(sp); | |
3965 | ENDDEF | |
3966 | ||
3967 | ||
3968 | ||
3969 | IMPL( movfcc_rr ) | |
3970 | int ccr; | |
3971 | FP_EXEC_FPU_ON_CHECK; | |
3972 | if (MOVCC_cc == 0) | |
3973 | ccr = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
3974 | else | |
3975 | ccr = V9_FSR_FCCN(sp->v9_fsr_ctrl, MOVCC_cc); | |
3976 | ||
3977 | if ( (sparcv9_fcc_magic[MOVCC_cond] >> ccr) & 1 ) { | |
3978 | Rdest = Rsrc2; | |
3979 | } | |
3980 | NEXT_INSTN(sp); | |
3981 | ENDDEF | |
3982 | ||
3983 | ||
3984 | IMPL( movr_imm_z ) | |
3985 | if (Rsrc1 == 0) | |
3986 | Rdest = Simm16; | |
3987 | ENDI | |
3988 | IMPL( movr_imm_lez ) | |
3989 | if (SRsrc1 <= 0) | |
3990 | Rdest = Simm16; | |
3991 | ENDI | |
3992 | IMPL( movr_imm_lz ) | |
3993 | if (SRsrc1 < 0) | |
3994 | Rdest = Simm16; | |
3995 | ENDI | |
3996 | IMPL( movr_imm_nz ) | |
3997 | if (SRsrc1 != 0) | |
3998 | Rdest = Simm16; | |
3999 | ENDI | |
4000 | IMPL( movr_imm_gz ) | |
4001 | if (SRsrc1 > 0) | |
4002 | Rdest = Simm16; | |
4003 | ENDI | |
4004 | IMPL( movr_imm_gez ) | |
4005 | if (SRsrc1 >= 0) | |
4006 | Rdest = Simm16; | |
4007 | ENDI | |
4008 | IMPL( movr_rr_z ) | |
4009 | if (Rsrc1 == 0) | |
4010 | Rdest = Rsrc2; | |
4011 | ENDI | |
4012 | IMPL( movr_rr_lez ) | |
4013 | if (SRsrc1 <= 0) | |
4014 | Rdest = Rsrc2; | |
4015 | ENDI | |
4016 | IMPL( movr_rr_lz ) | |
4017 | if (SRsrc1 < 0) | |
4018 | Rdest = Rsrc2; | |
4019 | ENDI | |
4020 | IMPL( movr_rr_nz ) | |
4021 | if (SRsrc1 != 0) | |
4022 | Rdest = Rsrc2; | |
4023 | ENDI | |
4024 | IMPL( movr_rr_gz ) | |
4025 | if (SRsrc1 > 0) | |
4026 | Rdest = Rsrc2; | |
4027 | ENDI | |
4028 | IMPL( movr_rr_gez ) | |
4029 | if (SRsrc1 >= 0) | |
4030 | Rdest = Rsrc2; | |
4031 | ENDI | |
4032 | ||
4033 | IMPL( popc_imm ) | |
4034 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4035 | int64_t cnt; | |
4036 | int64_t val; | |
4037 | ||
4038 | if (Rsrc1_num != 0) { | |
4039 | v9p->post_precise_trap(sp, Sparcv9_trap_illegal_instruction); | |
4040 | return; | |
4041 | } | |
4042 | if (!Zero_Reg(Rdest_num)) { | |
4043 | val = (int64_t)(int32_t)Simm16; | |
4044 | for (cnt = 0; val != 0; val &= val-1) | |
4045 | cnt++; | |
4046 | Rdest = cnt; | |
4047 | } | |
4048 | ENDI | |
4049 | ||
4050 | IMPL( popc_rrr ) | |
4051 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4052 | int64_t cnt; | |
4053 | uint64_t val; | |
4054 | ||
4055 | if (Rsrc1_num != 0) { | |
4056 | v9p->post_precise_trap(sp, Sparcv9_trap_illegal_instruction); | |
4057 | return; | |
4058 | } | |
4059 | if (!Zero_Reg(Rdest_num)) { | |
4060 | val = Rsrc2; | |
4061 | for (cnt = 0; val != 0; val &= val-1) | |
4062 | cnt++; | |
4063 | Rdest = cnt; | |
4064 | } | |
4065 | ENDI | |
4066 | ||
4067 | ||
4068 | /* ------------------------------------------------------------ */ | |
4069 | ||
4070 | /* | |
4071 | * Specialist instructions ... | |
4072 | * ... typically implementation dependent | |
4073 | * ... so use the provided callbacks to the actual device | |
4074 | */ | |
4075 | ||
4076 | IMPL( read_state_reg ) | |
4077 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4078 | v9p->read_state_reg( sp, Rdest_num, Rsrc1_num ); | |
4079 | ENDDEF | |
4080 | ||
4081 | IMPL( write_state_reg_imm ) | |
4082 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4083 | v9p->write_state_reg( sp, Rdest_num, Rsrc1 ^ Simm16 ); | |
4084 | ENDDEF | |
4085 | ||
4086 | IMPL( write_state_reg_rrr ) | |
4087 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4088 | v9p->write_state_reg( sp, Rdest_num, Rsrc1 ^ Rsrc2 ); | |
4089 | ENDDEF | |
4090 | ||
4091 | ||
4092 | IMPL( read_priv_reg ) | |
4093 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4094 | v9p->read_priv_reg( sp, Rdest_num, Rsrc1_num ); | |
4095 | ENDDEF | |
4096 | ||
4097 | IMPL( write_priv_reg_imm ) | |
4098 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4099 | v9p->write_priv_reg( sp, Rdest_num, Rsrc1 ^ Simm16 ); | |
4100 | ENDDEF | |
4101 | ||
4102 | IMPL( write_priv_reg_rrr ) | |
4103 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4104 | v9p->write_priv_reg( sp, Rdest_num, Rsrc1 ^ Rsrc2 ); | |
4105 | ENDDEF | |
4106 | ||
4107 | ||
4108 | IMPL( read_hyper_priv_reg ) | |
4109 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4110 | v9p->read_hyp_priv_reg( sp, Rdest_num, Rsrc1_num ); | |
4111 | ENDDEF | |
4112 | ||
4113 | IMPL( write_hyper_priv_reg_imm ) | |
4114 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4115 | v9p->write_hyp_priv_reg( sp, Rdest_num, Rsrc1 ^ Simm16 ); | |
4116 | ENDDEF | |
4117 | ||
4118 | IMPL( write_hyper_priv_reg_rrr ) | |
4119 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4120 | v9p->write_hyp_priv_reg( sp, Rdest_num, Rsrc1 ^ Rsrc2 ); | |
4121 | ENDDEF | |
4122 | ||
4123 | ||
4124 | IMPL( stbar ) | |
4125 | /* Nothing to do in legion?? */ | |
4126 | NEXT_INSTN(sp); | |
4127 | ENDDEF | |
4128 | ||
4129 | IMPL( membar ) | |
4130 | /* Nothing to do in legion?? */ | |
4131 | NEXT_INSTN(sp); | |
4132 | ENDDEF | |
4133 | ||
4134 | ||
4135 | ||
4136 | IMPL( save_imm ) | |
4137 | sparcv9_save_instr(sp, Rdest_num, Rsrc1 + Simm16); | |
4138 | ENDDEF | |
4139 | ||
4140 | IMPL( save_rrr ) | |
4141 | sparcv9_save_instr(sp, Rdest_num, Rsrc1 + Rsrc2); | |
4142 | ENDDEF | |
4143 | ||
4144 | IMPL( restore_imm ) | |
4145 | sparcv9_restore_instr(sp, Rdest_num, Rsrc1 + Simm16); | |
4146 | ENDDEF | |
4147 | ||
4148 | IMPL( restore_rrr ) | |
4149 | sparcv9_restore_instr(sp, Rdest_num, Rsrc1 + Rsrc2); | |
4150 | ENDDEF | |
4151 | ||
4152 | ||
4153 | ||
4154 | ||
4155 | ||
4156 | IMPL( saved ) | |
4157 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4158 | ||
4159 | if (V9_User == v9p->state) { | |
4160 | v9p->post_precise_trap(sp, Sparcv9_trap_privileged_opcode); | |
4161 | return; | |
4162 | } | |
4163 | ||
4164 | v9p->cansave = INC_MOD(v9p->cansave, v9p->nwins); | |
4165 | if (v9p->otherwin == 0) | |
4166 | v9p->canrestore = DEC_MOD(v9p->canrestore, v9p->nwins); | |
4167 | else | |
4168 | v9p->otherwin = DEC_MOD(v9p->otherwin, v9p->nwins); | |
4169 | NEXT_INSTN(sp); | |
4170 | ENDDEF | |
4171 | ||
4172 | ||
4173 | IMPL( restored ) | |
4174 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4175 | ||
4176 | if (V9_User == v9p->state) { | |
4177 | v9p->post_precise_trap(sp, Sparcv9_trap_privileged_opcode); | |
4178 | return; | |
4179 | } | |
4180 | ||
4181 | v9p->canrestore = INC_MOD(v9p->canrestore, v9p->nwins); | |
4182 | if (v9p->otherwin == 0) | |
4183 | v9p->cansave = DEC_MOD(v9p->cansave, v9p->nwins); | |
4184 | else | |
4185 | v9p->otherwin = DEC_MOD(v9p->otherwin, v9p->nwins); | |
4186 | ||
4187 | if (v9p->cleanwin < (v9p->nwins-1)) | |
4188 | v9p->cleanwin = v9p->cleanwin + 1; | |
4189 | ||
4190 | NEXT_INSTN(sp); | |
4191 | ENDDEF | |
4192 | ||
4193 | ||
4194 | IMPL( allclean ) | |
4195 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4196 | ||
4197 | if (V9_User == v9p->state) { | |
4198 | v9p->post_precise_trap(sp, Sparcv9_trap_privileged_opcode); | |
4199 | return; | |
4200 | } | |
4201 | ||
4202 | v9p->cleanwin = v9p->nwins - 1; | |
4203 | ||
4204 | NEXT_INSTN(sp); | |
4205 | ENDDEF | |
4206 | ||
4207 | ||
4208 | IMPL( otherw ) | |
4209 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4210 | ||
4211 | if (V9_User == v9p->state) { | |
4212 | v9p->post_precise_trap(sp, Sparcv9_trap_privileged_opcode); | |
4213 | return; | |
4214 | } | |
4215 | ||
4216 | if (v9p->otherwin != 0) { | |
4217 | EXEC_WARNING(("(@pc=0x%llx) " | |
4218 | "otherw executed with otherwin != 0", | |
4219 | sp->pc)); | |
4220 | } | |
4221 | v9p->otherwin = v9p->canrestore; | |
4222 | v9p->canrestore = 0; | |
4223 | ||
4224 | NEXT_INSTN(sp); | |
4225 | ENDDEF | |
4226 | ||
4227 | ||
4228 | IMPL( normalw ) | |
4229 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4230 | ||
4231 | if (V9_User == v9p->state) { | |
4232 | v9p->post_precise_trap(sp, Sparcv9_trap_privileged_opcode); | |
4233 | return; | |
4234 | } | |
4235 | ||
4236 | if (v9p->canrestore != 0) { | |
4237 | EXEC_WARNING(("(@pc=0x%llx) " | |
4238 | "normalw executed with canrestore != 0", | |
4239 | sp->pc)); | |
4240 | } | |
4241 | v9p->canrestore = v9p->otherwin ; | |
4242 | v9p->otherwin = 0; | |
4243 | ||
4244 | NEXT_INSTN(sp); | |
4245 | ENDDEF | |
4246 | ||
4247 | ||
4248 | IMPL( invalw ) | |
4249 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4250 | ||
4251 | if (V9_User == v9p->state) { | |
4252 | v9p->post_precise_trap(sp, Sparcv9_trap_privileged_opcode); | |
4253 | return; | |
4254 | } | |
4255 | ||
4256 | v9p->cansave = v9p->nwins - 2; | |
4257 | v9p->canrestore = 0; | |
4258 | v9p->otherwin = 0; | |
4259 | ||
4260 | NEXT_INSTN(sp); | |
4261 | ENDDEF | |
4262 | ||
4263 | ||
4264 | IMPL( flushw ) | |
4265 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4266 | sparcv9_trap_type_t tt; | |
4267 | ||
4268 | if (v9p->cansave == (v9p->nwins-2)) { | |
4269 | NEXT_INSTN(sp); | |
4270 | return; | |
4271 | } | |
4272 | if (v9p->otherwin != 0) { | |
4273 | tt = Sparcv9_trap_spill_0_other | (v9p->wstate_other<<2); | |
4274 | } else { | |
4275 | tt = Sparcv9_trap_spill_0_normal | (v9p->wstate_normal<<2); | |
4276 | } | |
4277 | v9p->post_precise_trap(sp, tt); | |
4278 | ENDDEF | |
4279 | ||
4280 | ||
4281 | ||
4282 | /* | |
4283 | * Return | |
4284 | */ | |
4285 | ||
4286 | IMPL( return_imm ) | |
4287 | sparcv9_cpu_t * v9p; | |
4288 | ||
4289 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4290 | if (!v9p->pstate.addr_mask && v9p->check_vahole(sp, Rsrc1 + Simm16)) return; | |
4291 | sparcv9_return_instr(sp, Rsrc1 + Simm16); | |
4292 | ENDDEF | |
4293 | ||
4294 | IMPL( return_rrr ) | |
4295 | sparcv9_cpu_t * v9p; | |
4296 | ||
4297 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4298 | if (!v9p->pstate.addr_mask && v9p->check_vahole(sp, Rsrc1 + Rsrc2)) return; | |
4299 | sparcv9_return_instr(sp, Rsrc1 + Rsrc2); | |
4300 | ENDDEF | |
4301 | ||
4302 | /* ------------------------------------------------------------ */ | |
4303 | ||
4304 | /* | |
4305 | * Done / Retry | |
4306 | */ | |
4307 | ||
4308 | IMPL( done_retry ) | |
4309 | sparcv9_cpu_t * v9p; | |
4310 | v9p = (sparcv9_cpu_t *)(sp->specificp); | |
4311 | v9p->done_retry(sp, (bool_t)Misc32); /* true if done instruction */ | |
4312 | ENDDEF | |
4313 | ||
4314 | ||
4315 | /* ------------------------------------------------------------ */ | |
4316 | ||
4317 | /* | |
4318 | * Instruction cache flushing | |
4319 | * | |
4320 | * Since Legion does not have a pipeline to clear, there | |
4321 | * is nothing to do here for processors that do not | |
4322 | * translate the VA. | |
4323 | * | |
4324 | * Processors that do more are those that do not maintain | |
4325 | * instruction cache coherency in hardware. They will have | |
4326 | * processor specific versions of these implementations. | |
4327 | */ | |
4328 | ||
4329 | ||
4330 | IMPL(iflush_rr) | |
4331 | tvaddr_t va; | |
4332 | va = (Rsrc1 + Rsrc2) & ~(tvaddr_t)7; | |
4333 | NEXT_INSTN(sp); | |
4334 | ENDDEF | |
4335 | ||
4336 | IMPL(iflush_imm) | |
4337 | tvaddr_t va; | |
4338 | va = (Rsrc1 + Simm16) & ~(tvaddr_t)7; | |
4339 | NEXT_INSTN(sp); | |
4340 | ENDDEF | |
4341 | ||
4342 | /* ------------------------------------------------------------ */ | |
4343 | ||
4344 | /* | |
4345 | * ASI loads and stores | |
4346 | */ | |
4347 | ||
4348 | ||
4349 | /* immediate forms */ | |
4350 | ||
4351 | IMPL(asi_reg_imm) | |
4352 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4353 | v9p->asi_access( sp, ASI_op, ASI_Rdest, sp->v9_asi, Rsrc1, Simm16, USE_ASI_REG ); | |
4354 | ENDDEF | |
4355 | ||
4356 | IMPL(asi_reg) | |
4357 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4358 | v9p->asi_access( sp, ASI_op, ASI_Rdest, sp->v9_asi, Rsrc1, Rsrc2, USE_ASI_REG ); | |
4359 | ENDDEF | |
4360 | ||
4361 | IMPL(asi_num) | |
4362 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4363 | v9p->asi_access( sp, ASI_op, ASI_Rdest, ASI_num, Rsrc1, Rsrc2, NO_FLAG ); | |
4364 | ENDDEF | |
4365 | ||
4366 | IMPL(memop_rrr) | |
4367 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4368 | v9p->asi_access( sp, ASI_op, ASI_Rdest, V9_ASI_IMPLICIT, Rsrc1, Rsrc2, NO_FLAG ); | |
4369 | ENDDEF | |
4370 | ||
4371 | IMPL(memop_imm) | |
4372 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4373 | v9p->asi_access( sp, ASI_op, ASI_Rdest, V9_ASI_IMPLICIT, Rsrc1, Simm16, NO_FLAG ); | |
4374 | ENDDEF | |
4375 | ||
4376 | IMPL(fp64asi_imm) | |
4377 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4378 | #ifndef FP_DECODE_DISABLED | |
4379 | if (!v9p->fpu_on) { | |
4380 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4381 | return; | |
4382 | } | |
4383 | #endif /* FP_DECODE_DISABLED */ | |
4384 | v9p->asi_access( sp, ASI_op, ASI_Rdest, sp->v9_asi, Rsrc1, Simm16, NO_FLAG ); | |
4385 | ENDDEF | |
4386 | ||
4387 | IMPL(fp64asi_rrr) | |
4388 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4389 | #ifndef FP_DECODE_DISABLED | |
4390 | if (!v9p->fpu_on) { | |
4391 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4392 | return; | |
4393 | } | |
4394 | #endif /* FP_DECODE_DISABLED */ | |
4395 | v9p->asi_access( sp, ASI_op, ASI_Rdest, ASI_num, Rsrc1, Rsrc2, NO_FLAG ); | |
4396 | ENDDEF | |
4397 | ||
4398 | IMPL(ldfsr_imm) | |
4399 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4400 | #ifndef FP_DECODE_DISABLED | |
4401 | if (!v9p->fpu_on) { | |
4402 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4403 | return; | |
4404 | } | |
4405 | #endif /* FP_DECODE_DISABLED */ | |
4406 | v9p->asi_access( sp, MA_V9_LdFSR|MA_Size32, NULL, V9_ASI_IMPLICIT, Rsrc1, Simm16, NO_FLAG ); | |
4407 | ENDDEF | |
4408 | ||
4409 | IMPL(ldxfsr_imm) | |
4410 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4411 | #ifndef FP_DECODE_DISABLED | |
4412 | if (!v9p->fpu_on) { | |
4413 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4414 | return; | |
4415 | } | |
4416 | #endif /* FP_DECODE_DISABLED */ | |
4417 | v9p->asi_access( sp, MA_V9_LdXFSR|MA_Size64, NULL, V9_ASI_IMPLICIT, Rsrc1, Simm16, NO_FLAG ); | |
4418 | ENDDEF | |
4419 | ||
4420 | ||
4421 | IMPL(stfsr_imm) | |
4422 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4423 | #ifndef FP_DECODE_DISABLED | |
4424 | if (!v9p->fpu_on) { | |
4425 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4426 | return; | |
4427 | } | |
4428 | #endif /* FP_DECODE_DISABLED */ | |
4429 | v9p->asi_access( sp, MA_V9_StFSR|MA_Size32, NULL, V9_ASI_IMPLICIT, Rsrc1, Simm16, NO_FLAG ); | |
4430 | ENDDEF | |
4431 | ||
4432 | IMPL(stxfsr_imm) | |
4433 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4434 | #ifndef FP_DECODE_DISABLED | |
4435 | if (!v9p->fpu_on) { | |
4436 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4437 | return; | |
4438 | } | |
4439 | #endif /* FP_DECODE_DISABLED */ | |
4440 | v9p->asi_access( sp, MA_V9_StXFSR|MA_Size64, NULL, V9_ASI_IMPLICIT, Rsrc1, Simm16, NO_FLAG ); | |
4441 | ENDDEF | |
4442 | ||
4443 | ||
4444 | IMPL(ldfsr_rr) | |
4445 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4446 | #ifndef FP_DECODE_DISABLED | |
4447 | if (!v9p->fpu_on) { | |
4448 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4449 | return; | |
4450 | } | |
4451 | #endif /* FP_DECODE_DISABLED */ | |
4452 | v9p->asi_access( sp, MA_V9_LdFSR|MA_Size32, NULL, V9_ASI_IMPLICIT, Rsrc1, Rsrc2, NO_FLAG ); | |
4453 | ENDDEF | |
4454 | ||
4455 | IMPL(ldxfsr_rr) | |
4456 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4457 | #ifndef FP_DECODE_DISABLED | |
4458 | if (!v9p->fpu_on) { | |
4459 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4460 | return; | |
4461 | } | |
4462 | #endif /* FP_DECODE_DISABLED */ | |
4463 | v9p->asi_access( sp, MA_V9_LdXFSR|MA_Size64, NULL, V9_ASI_IMPLICIT, Rsrc1, Rsrc2, NO_FLAG ); | |
4464 | ENDDEF | |
4465 | ||
4466 | ||
4467 | IMPL(stfsr_rr) | |
4468 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4469 | #ifndef FP_DECODE_DISABLED | |
4470 | if (!v9p->fpu_on) { | |
4471 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4472 | return; | |
4473 | } | |
4474 | #endif /* FP_DECODE_DISABLED */ | |
4475 | v9p->asi_access( sp, MA_V9_StFSR|MA_Size32, NULL, V9_ASI_IMPLICIT, Rsrc1, Rsrc2, NO_FLAG ); | |
4476 | ENDDEF | |
4477 | ||
4478 | IMPL(stxfsr_rr) | |
4479 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
4480 | #ifndef FP_DECODE_DISABLED | |
4481 | if (!v9p->fpu_on) { | |
4482 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
4483 | return; | |
4484 | } | |
4485 | #endif /* FP_DECODE_DISABLED */ | |
4486 | v9p->asi_access( sp, MA_V9_StXFSR|MA_Size64, NULL, V9_ASI_IMPLICIT, Rsrc1, Rsrc2, NO_FLAG ); | |
4487 | ENDDEF | |
4488 | ||
4489 | ||
4490 | /* ------------------------------------------------------------ */ | |
4491 | ||
4492 | /* Floating point branches. */ | |
4493 | ||
4494 | ||
4495 | ||
4496 | ||
4497 | ||
4498 | /* | |
4499 | * Instruction: sparcv9_fbule_fcc0 | |
4500 | */ | |
4501 | ||
4502 | #if !defined(HAS_NATIVE_sparcv9_fbule_fcc0) /* { */ | |
4503 | ||
4504 | IMPL(fbule_fcc0) | |
4505 | FP_EXEC_FPU_ON_CHECK; | |
4506 | if (V9_FSR_FCC0(sp->v9_fsr_ctrl) != V9_fcc_g) { | |
4507 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4508 | Rpc = Rnpc; | |
4509 | Rnpc = tpc; | |
4510 | return; | |
4511 | } | |
4512 | ENDI | |
4513 | ||
4514 | #endif /* } */ | |
4515 | ||
4516 | ||
4517 | ||
4518 | ||
4519 | ||
4520 | /* | |
4521 | * Instruction: sparcv9_fbg_fcc0 | |
4522 | */ | |
4523 | ||
4524 | #if !defined(HAS_NATIVE_sparcv9_fbg_fcc0) /* { */ | |
4525 | ||
4526 | IMPL(fbg_fcc0) | |
4527 | FP_EXEC_FPU_ON_CHECK; | |
4528 | if (V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_g) { | |
4529 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4530 | Rpc = Rnpc; | |
4531 | Rnpc = tpc; | |
4532 | return; | |
4533 | } | |
4534 | ENDI | |
4535 | ||
4536 | #endif /* } */ | |
4537 | ||
4538 | ||
4539 | ||
4540 | ||
4541 | ||
4542 | /* | |
4543 | * Instruction: sparcv9_fblg_fcc0 | |
4544 | */ | |
4545 | ||
4546 | #if !defined(HAS_NATIVE_sparcv9_fblg_fcc0) /* { */ | |
4547 | ||
4548 | IMPL(fblg_fcc0) | |
4549 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
4550 | FP_EXEC_FPU_ON_CHECK; | |
4551 | if ( cc == V9_fcc_l || cc == V9_fcc_g ) { | |
4552 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4553 | Rpc = Rnpc; | |
4554 | Rnpc = tpc; | |
4555 | return; | |
4556 | } | |
4557 | ENDI | |
4558 | ||
4559 | #endif /* } */ | |
4560 | ||
4561 | ||
4562 | ||
4563 | ||
4564 | ||
4565 | /* | |
4566 | * Instruction: sparcv9_fble_fcc0 | |
4567 | */ | |
4568 | ||
4569 | #if !defined(HAS_NATIVE_sparcv9_fble_fcc0) /* { */ | |
4570 | ||
4571 | IMPL(fble_fcc0) | |
4572 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
4573 | FP_EXEC_FPU_ON_CHECK; | |
4574 | if ( cc == V9_fcc_e || cc == V9_fcc_l ) { | |
4575 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4576 | Rpc = Rnpc; | |
4577 | Rnpc = tpc; | |
4578 | return; | |
4579 | } | |
4580 | ENDI | |
4581 | ||
4582 | #endif /* } */ | |
4583 | ||
4584 | ||
4585 | ||
4586 | ||
4587 | ||
4588 | /* | |
4589 | * Instruction: sparcv9_fbge_fcc0 | |
4590 | */ | |
4591 | ||
4592 | #if !defined(HAS_NATIVE_sparcv9_fbge_fcc0) /* { */ | |
4593 | ||
4594 | IMPL(fbge_fcc0) | |
4595 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
4596 | FP_EXEC_FPU_ON_CHECK; | |
4597 | if ( cc == V9_fcc_g || cc == V9_fcc_e ) { | |
4598 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4599 | Rpc = Rnpc; | |
4600 | Rnpc = tpc; | |
4601 | return; | |
4602 | } | |
4603 | ENDI | |
4604 | ||
4605 | #endif /* } */ | |
4606 | ||
4607 | ||
4608 | ||
4609 | ||
4610 | ||
4611 | /* | |
4612 | * Instruction: sparcv9_fbne_fcc0 | |
4613 | */ | |
4614 | ||
4615 | #if !defined(HAS_NATIVE_sparcv9_fbne_fcc0) /* { */ | |
4616 | ||
4617 | IMPL(fbne_fcc0) | |
4618 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
4619 | FP_EXEC_FPU_ON_CHECK; | |
4620 | if ( cc != V9_fcc_e ) { | |
4621 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4622 | Rpc = Rnpc; | |
4623 | Rnpc = tpc; | |
4624 | return; | |
4625 | } | |
4626 | ENDI | |
4627 | ||
4628 | #endif /* } */ | |
4629 | ||
4630 | ||
4631 | ||
4632 | ||
4633 | ||
4634 | /* | |
4635 | * Instruction: sparcv9_fbug_fcc0 | |
4636 | */ | |
4637 | ||
4638 | #if !defined(HAS_NATIVE_sparcv9_fbug_fcc0) /* { */ | |
4639 | ||
4640 | IMPL(fbug_fcc0) | |
4641 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
4642 | FP_EXEC_FPU_ON_CHECK; | |
4643 | if ( cc == V9_fcc_u || cc == V9_fcc_g ) { | |
4644 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4645 | Rpc = Rnpc; | |
4646 | Rnpc = tpc; | |
4647 | return; | |
4648 | } | |
4649 | ENDI | |
4650 | ||
4651 | #endif /* } */ | |
4652 | ||
4653 | ||
4654 | ||
4655 | ||
4656 | ||
4657 | /* | |
4658 | * Instruction: sparcv9_fbul_fcc0 | |
4659 | */ | |
4660 | ||
4661 | #if !defined(HAS_NATIVE_sparcv9_fbul_fcc0) /* { */ | |
4662 | ||
4663 | IMPL(fbul_fcc0) | |
4664 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
4665 | FP_EXEC_FPU_ON_CHECK; | |
4666 | if ( cc == V9_fcc_u || cc == V9_fcc_l ) { | |
4667 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4668 | Rpc = Rnpc; | |
4669 | Rnpc = tpc; | |
4670 | return; | |
4671 | } | |
4672 | ENDI | |
4673 | ||
4674 | #endif /* } */ | |
4675 | ||
4676 | ||
4677 | ||
4678 | ||
4679 | ||
4680 | /* | |
4681 | * Instruction: sparcv9_fbue_fcc0 | |
4682 | */ | |
4683 | ||
4684 | #if !defined(HAS_NATIVE_sparcv9_fbue_fcc0) /* { */ | |
4685 | ||
4686 | IMPL(fbue_fcc0) | |
4687 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
4688 | FP_EXEC_FPU_ON_CHECK; | |
4689 | if ( cc == V9_fcc_u || cc == V9_fcc_e ) { | |
4690 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4691 | Rpc = Rnpc; | |
4692 | Rnpc = tpc; | |
4693 | return; | |
4694 | } | |
4695 | ENDI | |
4696 | ||
4697 | #endif /* } */ | |
4698 | ||
4699 | ||
4700 | ||
4701 | ||
4702 | ||
4703 | /* | |
4704 | * Instruction: sparcv9_fbe_fcc0 | |
4705 | */ | |
4706 | ||
4707 | #if !defined(HAS_NATIVE_sparcv9_fbe_fcc0) /* { */ | |
4708 | ||
4709 | IMPL(fbe_fcc0) | |
4710 | FP_EXEC_FPU_ON_CHECK; | |
4711 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_e ) { | |
4712 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4713 | Rpc = Rnpc; | |
4714 | Rnpc = tpc; | |
4715 | return; | |
4716 | } | |
4717 | ENDI | |
4718 | ||
4719 | #endif /* } */ | |
4720 | ||
4721 | ||
4722 | ||
4723 | ||
4724 | ||
4725 | /* | |
4726 | * Instruction: sparcv9_fbo_fcc0 | |
4727 | */ | |
4728 | ||
4729 | #if !defined(HAS_NATIVE_sparcv9_fbo_fcc0) /* { */ | |
4730 | ||
4731 | IMPL(fbo_fcc0) | |
4732 | FP_EXEC_FPU_ON_CHECK; | |
4733 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) != V9_fcc_u ) { | |
4734 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4735 | Rpc = Rnpc; | |
4736 | Rnpc = tpc; | |
4737 | return; | |
4738 | } | |
4739 | ENDI | |
4740 | ||
4741 | #endif /* } */ | |
4742 | ||
4743 | ||
4744 | ||
4745 | ||
4746 | ||
4747 | /* | |
4748 | * Instruction: sparcv9_fbu_fcc0 | |
4749 | */ | |
4750 | ||
4751 | #if !defined(HAS_NATIVE_sparcv9_fbu_fcc0) /* { */ | |
4752 | ||
4753 | IMPL(fbu_fcc0) | |
4754 | FP_EXEC_FPU_ON_CHECK; | |
4755 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_u ) { | |
4756 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4757 | Rpc = Rnpc; | |
4758 | Rnpc = tpc; | |
4759 | return; | |
4760 | } | |
4761 | ENDI | |
4762 | ||
4763 | #endif /* } */ | |
4764 | ||
4765 | ||
4766 | ||
4767 | ||
4768 | ||
4769 | /* | |
4770 | * Instruction: sparcv9_fbuge_fcc0 | |
4771 | */ | |
4772 | ||
4773 | #if !defined(HAS_NATIVE_sparcv9_fbuge_fcc0) /* { */ | |
4774 | ||
4775 | IMPL(fbuge_fcc0) | |
4776 | FP_EXEC_FPU_ON_CHECK; | |
4777 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) != V9_fcc_l ) { | |
4778 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4779 | Rpc = Rnpc; | |
4780 | Rnpc = tpc; | |
4781 | return; | |
4782 | } | |
4783 | ENDI | |
4784 | ||
4785 | #endif /* } */ | |
4786 | ||
4787 | ||
4788 | ||
4789 | ||
4790 | ||
4791 | /* | |
4792 | * Instruction: sparcv9_fbvs_fcc0 | |
4793 | */ | |
4794 | ||
4795 | #if !defined(HAS_NATIVE_sparcv9_fbvs_fcc0) /* { */ | |
4796 | ||
4797 | IMPL(fbl_fcc0) | |
4798 | FP_EXEC_FPU_ON_CHECK; | |
4799 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_l ) { | |
4800 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4801 | Rpc = Rnpc; | |
4802 | Rnpc = tpc; | |
4803 | return; | |
4804 | } | |
4805 | ENDI | |
4806 | ||
4807 | #endif /* } */ | |
4808 | ||
4809 | ||
4810 | ||
4811 | /* | |
4812 | * Instruction: sparcv9_fbule_fccN | |
4813 | */ | |
4814 | ||
4815 | #if !defined(HAS_NATIVE_sparcv9_fbule_fccN) /* { */ | |
4816 | ||
4817 | IMPL(fbule_fccN) | |
4818 | FP_EXEC_FPU_ON_CHECK; | |
4819 | if (V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) != V9_fcc_g) { | |
4820 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4821 | Rpc = Rnpc; | |
4822 | Rnpc = tpc; | |
4823 | return; | |
4824 | } | |
4825 | ENDI | |
4826 | ||
4827 | #endif /* } */ | |
4828 | ||
4829 | ||
4830 | ||
4831 | ||
4832 | ||
4833 | /* | |
4834 | * Instruction: sparcv9_fbg_fccN | |
4835 | */ | |
4836 | ||
4837 | #if !defined(HAS_NATIVE_sparcv9_fbg_fccN) /* { */ | |
4838 | ||
4839 | IMPL(fbg_fccN) | |
4840 | FP_EXEC_FPU_ON_CHECK; | |
4841 | if (V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_g) { | |
4842 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4843 | Rpc = Rnpc; | |
4844 | Rnpc = tpc; | |
4845 | return; | |
4846 | } | |
4847 | ENDI | |
4848 | ||
4849 | #endif /* } */ | |
4850 | ||
4851 | ||
4852 | ||
4853 | ||
4854 | ||
4855 | /* | |
4856 | * Instruction: sparcv9_fblg_fccN | |
4857 | */ | |
4858 | ||
4859 | #if !defined(HAS_NATIVE_sparcv9_fblg_fccN) /* { */ | |
4860 | ||
4861 | IMPL(fblg_fccN) | |
4862 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
4863 | FP_EXEC_FPU_ON_CHECK; | |
4864 | if ( cc == V9_fcc_l || cc == V9_fcc_g ) { | |
4865 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4866 | Rpc = Rnpc; | |
4867 | Rnpc = tpc; | |
4868 | return; | |
4869 | } | |
4870 | ENDI | |
4871 | ||
4872 | #endif /* } */ | |
4873 | ||
4874 | ||
4875 | ||
4876 | ||
4877 | ||
4878 | /* | |
4879 | * Instruction: sparcv9_fble_fccN | |
4880 | */ | |
4881 | ||
4882 | #if !defined(HAS_NATIVE_sparcv9_fble_fccN) /* { */ | |
4883 | ||
4884 | IMPL(fble_fccN) | |
4885 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
4886 | FP_EXEC_FPU_ON_CHECK; | |
4887 | if ( cc == V9_fcc_e || cc == V9_fcc_l ) { | |
4888 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4889 | Rpc = Rnpc; | |
4890 | Rnpc = tpc; | |
4891 | return; | |
4892 | } | |
4893 | ENDI | |
4894 | ||
4895 | #endif /* } */ | |
4896 | ||
4897 | ||
4898 | ||
4899 | ||
4900 | ||
4901 | /* | |
4902 | * Instruction: sparcv9_fbge_fccN | |
4903 | */ | |
4904 | ||
4905 | #if !defined(HAS_NATIVE_sparcv9_fbge_fccN) /* { */ | |
4906 | ||
4907 | IMPL(fbge_fccN) | |
4908 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
4909 | FP_EXEC_FPU_ON_CHECK; | |
4910 | if ( cc == V9_fcc_g || cc == V9_fcc_e ) { | |
4911 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4912 | Rpc = Rnpc; | |
4913 | Rnpc = tpc; | |
4914 | return; | |
4915 | } | |
4916 | ENDI | |
4917 | ||
4918 | #endif /* } */ | |
4919 | ||
4920 | ||
4921 | ||
4922 | ||
4923 | ||
4924 | /* | |
4925 | * Instruction: sparcv9_fbne_fccN | |
4926 | */ | |
4927 | ||
4928 | #if !defined(HAS_NATIVE_sparcv9_fbne_fccN) /* { */ | |
4929 | ||
4930 | IMPL(fbne_fccN) | |
4931 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
4932 | FP_EXEC_FPU_ON_CHECK; | |
4933 | if ( cc != V9_fcc_e ) { | |
4934 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4935 | Rpc = Rnpc; | |
4936 | Rnpc = tpc; | |
4937 | return; | |
4938 | } | |
4939 | ENDI | |
4940 | ||
4941 | #endif /* } */ | |
4942 | ||
4943 | ||
4944 | ||
4945 | ||
4946 | ||
4947 | /* | |
4948 | * Instruction: sparcv9_fbug_fccN | |
4949 | */ | |
4950 | ||
4951 | #if !defined(HAS_NATIVE_sparcv9_fbug_fccN) /* { */ | |
4952 | ||
4953 | IMPL(fbug_fccN) | |
4954 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
4955 | FP_EXEC_FPU_ON_CHECK; | |
4956 | if ( cc == V9_fcc_u || cc == V9_fcc_g ) { | |
4957 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4958 | Rpc = Rnpc; | |
4959 | Rnpc = tpc; | |
4960 | return; | |
4961 | } | |
4962 | ENDI | |
4963 | ||
4964 | #endif /* } */ | |
4965 | ||
4966 | ||
4967 | ||
4968 | ||
4969 | ||
4970 | /* | |
4971 | * Instruction: sparcv9_fbul_fccN | |
4972 | */ | |
4973 | ||
4974 | #if !defined(HAS_NATIVE_sparcv9_fbul_fccN) /* { */ | |
4975 | ||
4976 | IMPL(fbul_fccN) | |
4977 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
4978 | FP_EXEC_FPU_ON_CHECK; | |
4979 | if ( cc == V9_fcc_u || cc == V9_fcc_l ) { | |
4980 | tvaddr_t tpc = Rpc + SBRoffset32; | |
4981 | Rpc = Rnpc; | |
4982 | Rnpc = tpc; | |
4983 | return; | |
4984 | } | |
4985 | ENDI | |
4986 | ||
4987 | #endif /* } */ | |
4988 | ||
4989 | ||
4990 | ||
4991 | ||
4992 | ||
4993 | /* | |
4994 | * Instruction: sparcv9_fbue_fccN | |
4995 | */ | |
4996 | ||
4997 | #if !defined(HAS_NATIVE_sparcv9_fbue_fccN) /* { */ | |
4998 | ||
4999 | IMPL(fbue_fccN) | |
5000 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5001 | FP_EXEC_FPU_ON_CHECK; | |
5002 | if ( cc == V9_fcc_u || cc == V9_fcc_e ) { | |
5003 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5004 | Rpc = Rnpc; | |
5005 | Rnpc = tpc; | |
5006 | return; | |
5007 | } | |
5008 | ENDI | |
5009 | ||
5010 | #endif /* } */ | |
5011 | ||
5012 | ||
5013 | ||
5014 | ||
5015 | ||
5016 | /* | |
5017 | * Instruction: sparcv9_fbe_fccN | |
5018 | */ | |
5019 | ||
5020 | #if !defined(HAS_NATIVE_sparcv9_fbe_fccN) /* { */ | |
5021 | ||
5022 | IMPL(fbe_fccN) | |
5023 | FP_EXEC_FPU_ON_CHECK; | |
5024 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_e ) { | |
5025 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5026 | Rpc = Rnpc; | |
5027 | Rnpc = tpc; | |
5028 | return; | |
5029 | } | |
5030 | ENDI | |
5031 | ||
5032 | #endif /* } */ | |
5033 | ||
5034 | ||
5035 | ||
5036 | ||
5037 | ||
5038 | /* | |
5039 | * Instruction: sparcv9_fbo_fccN | |
5040 | */ | |
5041 | ||
5042 | #if !defined(HAS_NATIVE_sparcv9_fbo_fccN) /* { */ | |
5043 | ||
5044 | IMPL(fbo_fccN) | |
5045 | FP_EXEC_FPU_ON_CHECK; | |
5046 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) != V9_fcc_u ) { | |
5047 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5048 | Rpc = Rnpc; | |
5049 | Rnpc = tpc; | |
5050 | return; | |
5051 | } | |
5052 | ENDI | |
5053 | ||
5054 | #endif /* } */ | |
5055 | ||
5056 | ||
5057 | ||
5058 | ||
5059 | ||
5060 | /* | |
5061 | * Instruction: sparcv9_fbu_fccN | |
5062 | */ | |
5063 | ||
5064 | #if !defined(HAS_NATIVE_sparcv9_fbu_fccN) /* { */ | |
5065 | ||
5066 | IMPL(fbu_fccN) | |
5067 | FP_EXEC_FPU_ON_CHECK; | |
5068 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_u ) { | |
5069 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5070 | Rpc = Rnpc; | |
5071 | Rnpc = tpc; | |
5072 | return; | |
5073 | } | |
5074 | ENDI | |
5075 | ||
5076 | #endif /* } */ | |
5077 | ||
5078 | ||
5079 | ||
5080 | ||
5081 | ||
5082 | /* | |
5083 | * Instruction: sparcv9_fbuge_fccN | |
5084 | */ | |
5085 | ||
5086 | #if !defined(HAS_NATIVE_sparcv9_fbuge_fccN) /* { */ | |
5087 | ||
5088 | IMPL(fbuge_fccN) | |
5089 | FP_EXEC_FPU_ON_CHECK; | |
5090 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) != V9_fcc_l ) { | |
5091 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5092 | Rpc = Rnpc; | |
5093 | Rnpc = tpc; | |
5094 | return; | |
5095 | } | |
5096 | ENDI | |
5097 | ||
5098 | #endif /* } */ | |
5099 | ||
5100 | ||
5101 | ||
5102 | ||
5103 | ||
5104 | /* | |
5105 | * Instruction: sparcv9_fbl_fccN | |
5106 | */ | |
5107 | ||
5108 | #if !defined(HAS_NATIVE_sparcv9_fbl_fccN) /* { */ | |
5109 | ||
5110 | IMPL(fbl_fccN) | |
5111 | FP_EXEC_FPU_ON_CHECK; | |
5112 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_l ) { | |
5113 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5114 | Rpc = Rnpc; | |
5115 | Rnpc = tpc; | |
5116 | return; | |
5117 | } | |
5118 | ENDI | |
5119 | ||
5120 | #endif /* } */ | |
5121 | ||
5122 | ||
5123 | ||
5124 | ||
5125 | ||
5126 | ||
5127 | ||
5128 | ||
5129 | /* | |
5130 | * Instruction: sparcv9_fbule_fcc0_an | |
5131 | */ | |
5132 | ||
5133 | #if !defined(HAS_NATIVE_sparcv9_fbule_fcc0_an) /* { */ | |
5134 | ||
5135 | IMPL(fbule_fcc0_an) | |
5136 | FP_EXEC_FPU_ON_CHECK; | |
5137 | if (V9_FSR_FCC0(sp->v9_fsr_ctrl) != V9_fcc_g) { | |
5138 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5139 | Rpc = Rnpc; | |
5140 | Rnpc = tpc; | |
5141 | return; | |
5142 | } | |
5143 | Rpc = Rnpc + 4; | |
5144 | Rnpc = Rnpc + 8; | |
5145 | ENDDEF | |
5146 | ||
5147 | #endif /* } */ | |
5148 | ||
5149 | ||
5150 | ||
5151 | ||
5152 | ||
5153 | /* | |
5154 | * Instruction: sparcv9_fbg_fcc0_an | |
5155 | */ | |
5156 | ||
5157 | #if !defined(HAS_NATIVE_sparcv9_fbg_fcc0_an) /* { */ | |
5158 | ||
5159 | IMPL(fbg_fcc0_an) | |
5160 | FP_EXEC_FPU_ON_CHECK; | |
5161 | if (V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_g) { | |
5162 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5163 | Rpc = Rnpc; | |
5164 | Rnpc = tpc; | |
5165 | return; | |
5166 | } | |
5167 | Rpc = Rnpc + 4; | |
5168 | Rnpc = Rnpc + 8; | |
5169 | ENDDEF | |
5170 | ||
5171 | #endif /* } */ | |
5172 | ||
5173 | ||
5174 | ||
5175 | ||
5176 | ||
5177 | /* | |
5178 | * Instruction: sparcv9_fblg_fcc0_an | |
5179 | */ | |
5180 | ||
5181 | #if !defined(HAS_NATIVE_sparcv9_fblg_fcc0_an) /* { */ | |
5182 | ||
5183 | IMPL(fblg_fcc0_an) | |
5184 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
5185 | FP_EXEC_FPU_ON_CHECK; | |
5186 | if ( cc == V9_fcc_l || cc == V9_fcc_g ) { | |
5187 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5188 | Rpc = Rnpc; | |
5189 | Rnpc = tpc; | |
5190 | return; | |
5191 | } | |
5192 | Rpc = Rnpc + 4; | |
5193 | Rnpc = Rnpc + 8; | |
5194 | ENDDEF | |
5195 | ||
5196 | #endif /* } */ | |
5197 | ||
5198 | ||
5199 | ||
5200 | ||
5201 | ||
5202 | /* | |
5203 | * Instruction: sparcv9_fble_fcc0_an | |
5204 | */ | |
5205 | ||
5206 | #if !defined(HAS_NATIVE_sparcv9_fble_fcc0_an) /* { */ | |
5207 | ||
5208 | IMPL(fble_fcc0_an) | |
5209 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
5210 | FP_EXEC_FPU_ON_CHECK; | |
5211 | if ( cc == V9_fcc_e || cc == V9_fcc_l ) { | |
5212 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5213 | Rpc = Rnpc; | |
5214 | Rnpc = tpc; | |
5215 | return; | |
5216 | } | |
5217 | Rpc = Rnpc + 4; | |
5218 | Rnpc = Rnpc + 8; | |
5219 | ENDDEF | |
5220 | ||
5221 | #endif /* } */ | |
5222 | ||
5223 | ||
5224 | ||
5225 | ||
5226 | ||
5227 | /* | |
5228 | * Instruction: sparcv9_fbge_fcc0_an | |
5229 | */ | |
5230 | ||
5231 | #if !defined(HAS_NATIVE_sparcv9_fbge_fcc0_an) /* { */ | |
5232 | ||
5233 | IMPL(fbge_fcc0_an) | |
5234 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
5235 | FP_EXEC_FPU_ON_CHECK; | |
5236 | if ( cc == V9_fcc_g || cc == V9_fcc_e ) { | |
5237 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5238 | Rpc = Rnpc; | |
5239 | Rnpc = tpc; | |
5240 | return; | |
5241 | } | |
5242 | Rpc = Rnpc + 4; | |
5243 | Rnpc = Rnpc + 8; | |
5244 | ENDDEF | |
5245 | ||
5246 | #endif /* } */ | |
5247 | ||
5248 | ||
5249 | ||
5250 | ||
5251 | ||
5252 | /* | |
5253 | * Instruction: sparcv9_fbne_fcc0_an | |
5254 | */ | |
5255 | ||
5256 | #if !defined(HAS_NATIVE_sparcv9_fbne_fcc0_an) /* { */ | |
5257 | ||
5258 | IMPL(fbne_fcc0_an) | |
5259 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
5260 | FP_EXEC_FPU_ON_CHECK; | |
5261 | if ( cc != V9_fcc_e ) { | |
5262 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5263 | Rpc = Rnpc; | |
5264 | Rnpc = tpc; | |
5265 | return; | |
5266 | } | |
5267 | Rpc = Rnpc + 4; | |
5268 | Rnpc = Rnpc + 8; | |
5269 | ENDDEF | |
5270 | ||
5271 | #endif /* } */ | |
5272 | ||
5273 | ||
5274 | ||
5275 | ||
5276 | ||
5277 | /* | |
5278 | * Instruction: sparcv9_fbug_fcc0_an | |
5279 | */ | |
5280 | ||
5281 | #if !defined(HAS_NATIVE_sparcv9_fbug_fcc0_an) /* { */ | |
5282 | ||
5283 | IMPL(fbug_fcc0_an) | |
5284 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
5285 | FP_EXEC_FPU_ON_CHECK; | |
5286 | if ( cc == V9_fcc_u || cc == V9_fcc_g ) { | |
5287 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5288 | Rpc = Rnpc; | |
5289 | Rnpc = tpc; | |
5290 | return; | |
5291 | } | |
5292 | Rpc = Rnpc + 4; | |
5293 | Rnpc = Rnpc + 8; | |
5294 | ENDDEF | |
5295 | ||
5296 | #endif /* } */ | |
5297 | ||
5298 | ||
5299 | ||
5300 | ||
5301 | ||
5302 | /* | |
5303 | * Instruction: sparcv9_fbul_fcc0_an | |
5304 | */ | |
5305 | ||
5306 | #if !defined(HAS_NATIVE_sparcv9_fbul_fcc0_an) /* { */ | |
5307 | ||
5308 | IMPL(fbul_fcc0_an) | |
5309 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
5310 | FP_EXEC_FPU_ON_CHECK; | |
5311 | if ( cc == V9_fcc_u || cc == V9_fcc_l ) { | |
5312 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5313 | Rpc = Rnpc; | |
5314 | Rnpc = tpc; | |
5315 | return; | |
5316 | } | |
5317 | Rpc = Rnpc + 4; | |
5318 | Rnpc = Rnpc + 8; | |
5319 | ENDDEF | |
5320 | ||
5321 | #endif /* } */ | |
5322 | ||
5323 | ||
5324 | ||
5325 | ||
5326 | ||
5327 | /* | |
5328 | * Instruction: sparcv9_fbue_fcc0_an | |
5329 | */ | |
5330 | ||
5331 | #if !defined(HAS_NATIVE_sparcv9_fbue_fcc0_an) /* { */ | |
5332 | ||
5333 | IMPL(fbue_fcc0_an) | |
5334 | int cc = V9_FSR_FCC0(sp->v9_fsr_ctrl); | |
5335 | FP_EXEC_FPU_ON_CHECK; | |
5336 | if ( cc == V9_fcc_u || cc == V9_fcc_e ) { | |
5337 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5338 | Rpc = Rnpc; | |
5339 | Rnpc = tpc; | |
5340 | return; | |
5341 | } | |
5342 | Rpc = Rnpc + 4; | |
5343 | Rnpc = Rnpc + 8; | |
5344 | ENDDEF | |
5345 | ||
5346 | #endif /* } */ | |
5347 | ||
5348 | ||
5349 | ||
5350 | ||
5351 | ||
5352 | /* | |
5353 | * Instruction: sparcv9_fbe_fcc0_an | |
5354 | */ | |
5355 | ||
5356 | #if !defined(HAS_NATIVE_sparcv9_fbe_fcc0_an) /* { */ | |
5357 | ||
5358 | IMPL(fbe_fcc0_an) | |
5359 | FP_EXEC_FPU_ON_CHECK; | |
5360 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_e ) { | |
5361 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5362 | Rpc = Rnpc; | |
5363 | Rnpc = tpc; | |
5364 | return; | |
5365 | } | |
5366 | Rpc = Rnpc + 4; | |
5367 | Rnpc = Rnpc + 8; | |
5368 | ENDDEF | |
5369 | ||
5370 | #endif /* } */ | |
5371 | ||
5372 | ||
5373 | ||
5374 | ||
5375 | ||
5376 | /* | |
5377 | * Instruction: sparcv9_fbo_fcc0_an | |
5378 | */ | |
5379 | ||
5380 | #if !defined(HAS_NATIVE_sparcv9_fbo_fcc0_an) /* { */ | |
5381 | ||
5382 | IMPL(fbo_fcc0_an) | |
5383 | FP_EXEC_FPU_ON_CHECK; | |
5384 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) != V9_fcc_u ) { | |
5385 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5386 | Rpc = Rnpc; | |
5387 | Rnpc = tpc; | |
5388 | return; | |
5389 | } | |
5390 | Rpc = Rnpc + 4; | |
5391 | Rnpc = Rnpc + 8; | |
5392 | ENDDEF | |
5393 | ||
5394 | #endif /* } */ | |
5395 | ||
5396 | ||
5397 | ||
5398 | ||
5399 | ||
5400 | /* | |
5401 | * Instruction: sparcv9_fbu_fcc0_an | |
5402 | */ | |
5403 | ||
5404 | #if !defined(HAS_NATIVE_sparcv9_fbu_fcc0_an) /* { */ | |
5405 | ||
5406 | IMPL(fbu_fcc0_an) | |
5407 | FP_EXEC_FPU_ON_CHECK; | |
5408 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_u ) { | |
5409 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5410 | Rpc = Rnpc; | |
5411 | Rnpc = tpc; | |
5412 | return; | |
5413 | } | |
5414 | Rpc = Rnpc + 4; | |
5415 | Rnpc = Rnpc + 8; | |
5416 | ENDDEF | |
5417 | ||
5418 | #endif /* } */ | |
5419 | ||
5420 | ||
5421 | ||
5422 | ||
5423 | ||
5424 | /* | |
5425 | * Instruction: sparcv9_fbuge_fcc0_an | |
5426 | */ | |
5427 | ||
5428 | #if !defined(HAS_NATIVE_sparcv9_fbuge_fcc0_an) /* { */ | |
5429 | ||
5430 | IMPL(fbuge_fcc0_an) | |
5431 | FP_EXEC_FPU_ON_CHECK; | |
5432 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) != V9_fcc_l ) { | |
5433 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5434 | Rpc = Rnpc; | |
5435 | Rnpc = tpc; | |
5436 | return; | |
5437 | } | |
5438 | Rpc = Rnpc + 4; | |
5439 | Rnpc = Rnpc + 8; | |
5440 | ENDDEF | |
5441 | ||
5442 | #endif /* } */ | |
5443 | ||
5444 | ||
5445 | ||
5446 | ||
5447 | ||
5448 | /* | |
5449 | * Instruction: sparcv9_fbvs_fcc0_an | |
5450 | */ | |
5451 | ||
5452 | #if !defined(HAS_NATIVE_sparcv9_fbvs_fcc0_an) /* { */ | |
5453 | ||
5454 | IMPL(fbl_fcc0_an) | |
5455 | FP_EXEC_FPU_ON_CHECK; | |
5456 | if ( V9_FSR_FCC0(sp->v9_fsr_ctrl) == V9_fcc_l ) { | |
5457 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5458 | Rpc = Rnpc; | |
5459 | Rnpc = tpc; | |
5460 | return; | |
5461 | } | |
5462 | Rpc = Rnpc + 4; | |
5463 | Rnpc = Rnpc + 8; | |
5464 | ENDDEF | |
5465 | ||
5466 | #endif /* } */ | |
5467 | ||
5468 | ||
5469 | ||
5470 | ||
5471 | ||
5472 | ||
5473 | /* | |
5474 | * Instruction: sparcv9_fbule_fccN_an | |
5475 | */ | |
5476 | ||
5477 | #if !defined(HAS_NATIVE_sparcv9_fbule_fccN_an) /* { */ | |
5478 | ||
5479 | IMPL(fbule_fccN_an) | |
5480 | FP_EXEC_FPU_ON_CHECK; | |
5481 | if (V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) != V9_fcc_g) { | |
5482 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5483 | Rpc = Rnpc; | |
5484 | Rnpc = tpc; | |
5485 | return; | |
5486 | } | |
5487 | Rpc = Rnpc + 4; | |
5488 | Rnpc = Rnpc + 8; | |
5489 | ENDDEF | |
5490 | ||
5491 | #endif /* } */ | |
5492 | ||
5493 | ||
5494 | ||
5495 | ||
5496 | ||
5497 | /* | |
5498 | * Instruction: sparcv9_fbg_fccN_an | |
5499 | */ | |
5500 | ||
5501 | #if !defined(HAS_NATIVE_sparcv9_fbg_fccN_an) /* { */ | |
5502 | ||
5503 | IMPL(fbg_fccN_an) | |
5504 | FP_EXEC_FPU_ON_CHECK; | |
5505 | if (V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_g) { | |
5506 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5507 | Rpc = Rnpc; | |
5508 | Rnpc = tpc; | |
5509 | return; | |
5510 | } | |
5511 | Rpc = Rnpc + 4; | |
5512 | Rnpc = Rnpc + 8; | |
5513 | ENDDEF | |
5514 | ||
5515 | #endif /* } */ | |
5516 | ||
5517 | ||
5518 | ||
5519 | ||
5520 | ||
5521 | /* | |
5522 | * Instruction: sparcv9_fblg_fccN_an | |
5523 | */ | |
5524 | ||
5525 | #if !defined(HAS_NATIVE_sparcv9_fblg_fccN_an) /* { */ | |
5526 | ||
5527 | IMPL(fblg_fccN_an) | |
5528 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5529 | FP_EXEC_FPU_ON_CHECK; | |
5530 | if ( cc == V9_fcc_l || cc == V9_fcc_g ) { | |
5531 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5532 | Rpc = Rnpc; | |
5533 | Rnpc = tpc; | |
5534 | return; | |
5535 | } | |
5536 | Rpc = Rnpc + 4; | |
5537 | Rnpc = Rnpc + 8; | |
5538 | ENDDEF | |
5539 | ||
5540 | #endif /* } */ | |
5541 | ||
5542 | ||
5543 | ||
5544 | ||
5545 | ||
5546 | /* | |
5547 | * Instruction: sparcv9_fble_fccN_an | |
5548 | */ | |
5549 | ||
5550 | #if !defined(HAS_NATIVE_sparcv9_fble_fccN_an) /* { */ | |
5551 | ||
5552 | IMPL(fble_fccN_an) | |
5553 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5554 | FP_EXEC_FPU_ON_CHECK; | |
5555 | if ( cc == V9_fcc_e || cc == V9_fcc_l ) { | |
5556 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5557 | Rpc = Rnpc; | |
5558 | Rnpc = tpc; | |
5559 | return; | |
5560 | } | |
5561 | Rpc = Rnpc + 4; | |
5562 | Rnpc = Rnpc + 8; | |
5563 | ENDDEF | |
5564 | ||
5565 | #endif /* } */ | |
5566 | ||
5567 | ||
5568 | ||
5569 | ||
5570 | ||
5571 | /* | |
5572 | * Instruction: sparcv9_fbge_fccN_an | |
5573 | */ | |
5574 | ||
5575 | #if !defined(HAS_NATIVE_sparcv9_fbge_fccN_an) /* { */ | |
5576 | ||
5577 | IMPL(fbge_fccN_an) | |
5578 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5579 | FP_EXEC_FPU_ON_CHECK; | |
5580 | if ( cc == V9_fcc_g || cc == V9_fcc_e ) { | |
5581 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5582 | Rpc = Rnpc; | |
5583 | Rnpc = tpc; | |
5584 | return; | |
5585 | } | |
5586 | Rpc = Rnpc + 4; | |
5587 | Rnpc = Rnpc + 8; | |
5588 | ENDDEF | |
5589 | ||
5590 | #endif /* } */ | |
5591 | ||
5592 | ||
5593 | ||
5594 | ||
5595 | ||
5596 | /* | |
5597 | * Instruction: sparcv9_fbne_fccN_an | |
5598 | */ | |
5599 | ||
5600 | #if !defined(HAS_NATIVE_sparcv9_fbne_fccN_an) /* { */ | |
5601 | ||
5602 | IMPL(fbne_fccN_an) | |
5603 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5604 | FP_EXEC_FPU_ON_CHECK; | |
5605 | if ( cc != V9_fcc_e ) { | |
5606 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5607 | Rpc = Rnpc; | |
5608 | Rnpc = tpc; | |
5609 | return; | |
5610 | } | |
5611 | Rpc = Rnpc + 4; | |
5612 | Rnpc = Rnpc + 8; | |
5613 | ENDDEF | |
5614 | ||
5615 | #endif /* } */ | |
5616 | ||
5617 | ||
5618 | ||
5619 | ||
5620 | ||
5621 | /* | |
5622 | * Instruction: sparcv9_fbug_fccN_an | |
5623 | */ | |
5624 | ||
5625 | #if !defined(HAS_NATIVE_sparcv9_fbug_fccN_an) /* { */ | |
5626 | ||
5627 | IMPL(fbug_fccN_an) | |
5628 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5629 | FP_EXEC_FPU_ON_CHECK; | |
5630 | if ( cc == V9_fcc_u || cc == V9_fcc_g ) { | |
5631 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5632 | Rpc = Rnpc; | |
5633 | Rnpc = tpc; | |
5634 | return; | |
5635 | } | |
5636 | Rpc = Rnpc + 4; | |
5637 | Rnpc = Rnpc + 8; | |
5638 | ENDDEF | |
5639 | ||
5640 | #endif /* } */ | |
5641 | ||
5642 | ||
5643 | ||
5644 | ||
5645 | ||
5646 | /* | |
5647 | * Instruction: sparcv9_fbul_fccN_an | |
5648 | */ | |
5649 | ||
5650 | #if !defined(HAS_NATIVE_sparcv9_fbul_fccN_an) /* { */ | |
5651 | ||
5652 | IMPL(fbul_fccN_an) | |
5653 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5654 | FP_EXEC_FPU_ON_CHECK; | |
5655 | if ( cc == V9_fcc_u || cc == V9_fcc_l ) { | |
5656 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5657 | Rpc = Rnpc; | |
5658 | Rnpc = tpc; | |
5659 | return; | |
5660 | } | |
5661 | Rpc = Rnpc + 4; | |
5662 | Rnpc = Rnpc + 8; | |
5663 | ENDDEF | |
5664 | ||
5665 | #endif /* } */ | |
5666 | ||
5667 | ||
5668 | ||
5669 | ||
5670 | ||
5671 | /* | |
5672 | * Instruction: sparcv9_fbue_fccN_an | |
5673 | */ | |
5674 | ||
5675 | #if !defined(HAS_NATIVE_sparcv9_fbue_fccN_an) /* { */ | |
5676 | ||
5677 | IMPL(fbue_fccN_an) | |
5678 | int cc = V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc); | |
5679 | FP_EXEC_FPU_ON_CHECK; | |
5680 | if ( cc == V9_fcc_u || cc == V9_fcc_e ) { | |
5681 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5682 | Rpc = Rnpc; | |
5683 | Rnpc = tpc; | |
5684 | return; | |
5685 | } | |
5686 | Rpc = Rnpc + 4; | |
5687 | Rnpc = Rnpc + 8; | |
5688 | ENDDEF | |
5689 | ||
5690 | #endif /* } */ | |
5691 | ||
5692 | ||
5693 | ||
5694 | ||
5695 | ||
5696 | /* | |
5697 | * Instruction: sparcv9_fbe_fccN_an | |
5698 | */ | |
5699 | ||
5700 | #if !defined(HAS_NATIVE_sparcv9_fbe_fccN_an) /* { */ | |
5701 | ||
5702 | IMPL(fbe_fccN_an) | |
5703 | FP_EXEC_FPU_ON_CHECK; | |
5704 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_e ) { | |
5705 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5706 | Rpc = Rnpc; | |
5707 | Rnpc = tpc; | |
5708 | return; | |
5709 | } | |
5710 | Rpc = Rnpc + 4; | |
5711 | Rnpc = Rnpc + 8; | |
5712 | ENDDEF | |
5713 | ||
5714 | #endif /* } */ | |
5715 | ||
5716 | ||
5717 | ||
5718 | ||
5719 | ||
5720 | /* | |
5721 | * Instruction: sparcv9_fbo_fccN_an | |
5722 | */ | |
5723 | ||
5724 | #if !defined(HAS_NATIVE_sparcv9_fbo_fccN_an) /* { */ | |
5725 | ||
5726 | IMPL(fbo_fccN_an) | |
5727 | FP_EXEC_FPU_ON_CHECK; | |
5728 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) != V9_fcc_u ) { | |
5729 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5730 | Rpc = Rnpc; | |
5731 | Rnpc = tpc; | |
5732 | return; | |
5733 | } | |
5734 | Rpc = Rnpc + 4; | |
5735 | Rnpc = Rnpc + 8; | |
5736 | ENDDEF | |
5737 | ||
5738 | #endif /* } */ | |
5739 | ||
5740 | ||
5741 | ||
5742 | ||
5743 | ||
5744 | /* | |
5745 | * Instruction: sparcv9_fbu_fccN_an | |
5746 | */ | |
5747 | ||
5748 | #if !defined(HAS_NATIVE_sparcv9_fbu_fccN_an) /* { */ | |
5749 | ||
5750 | IMPL(fbu_fccN_an) | |
5751 | FP_EXEC_FPU_ON_CHECK; | |
5752 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_u ) { | |
5753 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5754 | Rpc = Rnpc; | |
5755 | Rnpc = tpc; | |
5756 | return; | |
5757 | } | |
5758 | Rpc = Rnpc + 4; | |
5759 | Rnpc = Rnpc + 8; | |
5760 | ENDDEF | |
5761 | ||
5762 | #endif /* } */ | |
5763 | ||
5764 | ||
5765 | ||
5766 | ||
5767 | ||
5768 | /* | |
5769 | * Instruction: sparcv9_fbuge_fccN_an | |
5770 | */ | |
5771 | ||
5772 | #if !defined(HAS_NATIVE_sparcv9_fbuge_fccN_an) /* { */ | |
5773 | ||
5774 | IMPL(fbuge_fccN_an) | |
5775 | FP_EXEC_FPU_ON_CHECK; | |
5776 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) != V9_fcc_l ) { | |
5777 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5778 | Rpc = Rnpc; | |
5779 | Rnpc = tpc; | |
5780 | return; | |
5781 | } | |
5782 | Rpc = Rnpc + 4; | |
5783 | Rnpc = Rnpc + 8; | |
5784 | ENDDEF | |
5785 | ||
5786 | #endif /* } */ | |
5787 | ||
5788 | ||
5789 | ||
5790 | ||
5791 | ||
5792 | /* | |
5793 | * Instruction: sparcv9_fbl_fccN_an | |
5794 | */ | |
5795 | ||
5796 | #if !defined(HAS_NATIVE_sparcv9_fbl_fccN_an) /* { */ | |
5797 | ||
5798 | IMPL(fbl_fccN_an) | |
5799 | FP_EXEC_FPU_ON_CHECK; | |
5800 | if ( V9_FSR_FCCN(sp->v9_fsr_ctrl, SBRfcc) == V9_fcc_l ) { | |
5801 | tvaddr_t tpc = Rpc + SBRoffset32; | |
5802 | Rpc = Rnpc; | |
5803 | Rnpc = tpc; | |
5804 | return; | |
5805 | } | |
5806 | Rpc = Rnpc + 4; | |
5807 | Rnpc = Rnpc + 8; | |
5808 | ENDDEF | |
5809 | ||
5810 | #endif /* } */ | |
5811 | ||
5812 | ||
5813 | IMPL(illtrap) | |
5814 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
5815 | ||
5816 | v9p->post_precise_trap(sp, Sparcv9_trap_illegal_instruction); | |
5817 | ENDDEF | |
5818 | ||
5819 | /* ------------------------------------------------------------ */ | |
5820 | ||
5821 | /* Miscellaneous stuff ... not real instructions, but executed that way */ | |
5822 | ||
5823 | ||
5824 | IMPL(illegal_instruction) | |
5825 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
5826 | #define IBUF 160 | |
5827 | char ibuf[IBUF]; | |
5828 | /* | |
5829 | * Hack to limit spewing of warnings about invalid instructions. Each | |
5830 | * opcode (as determined by bits 24-19) has a limit of 40 (actually | |
5831 | * INV_INST_LIMIT) error reports. Feel free to replace this code with | |
5832 | * something better. | |
5833 | */ | |
5834 | #define INV_INST_LIMIT 40 | |
5835 | #define INV_INST_SIZE 64 | |
5836 | #define OP_EXTR(inst) (((inst) >> 19) & 0x3f) | |
5837 | /* statics init to zero */ | |
5838 | static int invalid_instruction_count[INV_INST_SIZE]; | |
5839 | ||
5840 | DBGILLINST( | |
5841 | sparcv9_idis(ibuf, IBUF, FE_INSTN(xcip->rawi), sp->pc); | |
5842 | if (invalid_instruction_count[OP_EXTR(FE_INSTN(xcip->rawi))]++ < INV_INST_LIMIT) { | |
5843 | lprintf(sp->gid, "illegal instruction pc=0x%llx " | |
5844 | "instn=%08x: %s\n", sp->pc, FE_INSTN(xcip->rawi), ibuf); | |
5845 | } | |
5846 | ); | |
5847 | v9p->post_precise_trap(sp, Sparcv9_trap_illegal_instruction); | |
5848 | #undef IBUF | |
5849 | ENDDEF | |
5850 | ||
5851 | ||
5852 | IMPL(fp_unimplemented_instruction) | |
5853 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
5854 | #define IBUF 160 | |
5855 | char ibuf[IBUF]; | |
5856 | ||
5857 | if (!v9p->fpu_on) { | |
5858 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
5859 | return; | |
5860 | } | |
5861 | ||
5862 | DBGILLINST( | |
5863 | sparcv9_idis(ibuf, IBUF, FE_INSTN(xcip->rawi), sp->pc); | |
5864 | lprintf(sp->gid, "unimplemented fp op pc=0x%llx instn=%08x: %s\n", sp->pc, FE_INSTN(xcip->rawi), ibuf); | |
5865 | ); | |
5866 | ||
5867 | sp->v9_fsr_ctrl &= ~V9_FSR_FTT_MASK; | |
5868 | sp->v9_fsr_ctrl |= SPARCv9_FTT_unimplemented_FPop << V9_FSR_FTT_SHIFT; | |
5869 | ||
5870 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_exception_other); | |
5871 | #undef IBUF | |
5872 | ||
5873 | ENDDEF | |
5874 | ||
5875 | ||
5876 | #ifdef PROCESSOR_SUPPORTS_QUADFP /* { */ | |
5877 | ||
5878 | IMPL(fp_invalidreg_instruction) | |
5879 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
5880 | #define IBUF 160 | |
5881 | char ibuf[IBUF]; | |
5882 | ||
5883 | if (!v9p->fpu_on) { | |
5884 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_disabled); | |
5885 | return; | |
5886 | } | |
5887 | ||
5888 | DBGILLINST( | |
5889 | sparcv9_idis(ibuf, IBUF, FE_INSTN(xcip->rawi), sp->pc); | |
5890 | lprintf(sp->gid, "invalid fp register pc=0x%llx instn=%08x: %s\n", sp->pc, FE_INSTN(xcip->rawi), ibuf); | |
5891 | ); | |
5892 | ||
5893 | sp->v9_fsr_ctrl &= ~V9_FSR_FTT_MASK; | |
5894 | sp->v9_fsr_ctrl |= SPARCv9_FTT_invalid_fp_register << V9_FSR_FTT_SHIFT; | |
5895 | ||
5896 | v9p->post_precise_trap(sp, Sparcv9_trap_fp_exception_other); | |
5897 | #undef IBUF | |
5898 | ||
5899 | ENDDEF | |
5900 | ||
5901 | #endif /* } */ | |
5902 | ||
5903 | ||
5904 | #if !defined(HAS_NATIVE_sparcv9_fcmps_fcc0) /* { */ | |
5905 | IMPL(fcmps_fcc0) | |
5906 | FP_EXEC_FPU_ON_CHECK | |
5907 | FPU_NOT_IMPLEMENTED("fcmps_fcc0") | |
5908 | ENDDEF | |
5909 | #endif /* } */ | |
5910 | ||
5911 | #if !defined(HAS_NATIVE_sparcv9_fcmps_fcc1) /* { */ | |
5912 | IMPL(fcmps_fcc1) | |
5913 | FP_EXEC_FPU_ON_CHECK | |
5914 | FPU_NOT_IMPLEMENTED("fcmps_fcc1") | |
5915 | ENDDEF | |
5916 | #endif /* } */ | |
5917 | ||
5918 | #if !defined(HAS_NATIVE_sparcv9_fcmps_fcc2) /* { */ | |
5919 | IMPL(fcmps_fcc2) | |
5920 | FP_EXEC_FPU_ON_CHECK | |
5921 | FPU_NOT_IMPLEMENTED("fcmps_fcc2") | |
5922 | ENDDEF | |
5923 | #endif /* } */ | |
5924 | ||
5925 | #if !defined(HAS_NATIVE_sparcv9_fcmps_fcc3) /* { */ | |
5926 | IMPL(fcmps_fcc3) | |
5927 | FP_EXEC_FPU_ON_CHECK | |
5928 | FPU_NOT_IMPLEMENTED("fcmps_fcc3") | |
5929 | ENDDEF | |
5930 | #endif /* } */ | |
5931 | ||
5932 | #if !defined(HAS_NATIVE_sparcv9_fcmpd_fcc0) /* { */ | |
5933 | IMPL(fcmpd_fcc0) | |
5934 | FP_EXEC_FPU_ON_CHECK | |
5935 | FPU_NOT_IMPLEMENTED("fcmpd_fcc0") | |
5936 | ENDDEF | |
5937 | #endif /* } */ | |
5938 | ||
5939 | #if !defined(HAS_NATIVE_sparcv9_fcmpd_fcc1) /* { */ | |
5940 | IMPL(fcmpd_fcc1) | |
5941 | FP_EXEC_FPU_ON_CHECK | |
5942 | FPU_NOT_IMPLEMENTED("fcmpd_fcc1") | |
5943 | ENDDEF | |
5944 | #endif /* } */ | |
5945 | ||
5946 | #if !defined(HAS_NATIVE_sparcv9_fcmpd_fcc2) /* { */ | |
5947 | IMPL(fcmpd_fcc2) | |
5948 | FP_EXEC_FPU_ON_CHECK | |
5949 | FPU_NOT_IMPLEMENTED("fcmpd_fcc2") | |
5950 | ENDDEF | |
5951 | #endif /* } */ | |
5952 | ||
5953 | #if !defined(HAS_NATIVE_sparcv9_fcmpd_fcc3) /* { */ | |
5954 | IMPL(fcmpd_fcc3) | |
5955 | FP_EXEC_FPU_ON_CHECK | |
5956 | FPU_NOT_IMPLEMENTED("fcmpd_fcc3") | |
5957 | ENDDEF | |
5958 | #endif /* } */ | |
5959 | ||
5960 | #if !defined(HAS_NATIVE_sparcv9_fcmpes_fcc0) /* { */ | |
5961 | IMPL(fcmpes_fcc0) | |
5962 | FP_EXEC_FPU_ON_CHECK | |
5963 | FPU_NOT_IMPLEMENTED("fcmpes_fcc0") | |
5964 | ENDDEF | |
5965 | #endif /* } */ | |
5966 | ||
5967 | #if !defined(HAS_NATIVE_sparcv9_fcmpes_fcc1) /* { */ | |
5968 | IMPL(fcmpes_fcc1) | |
5969 | FP_EXEC_FPU_ON_CHECK | |
5970 | FPU_NOT_IMPLEMENTED("fcmpes_fcc1") | |
5971 | ENDDEF | |
5972 | #endif /* } */ | |
5973 | ||
5974 | #if !defined(HAS_NATIVE_sparcv9_fcmpes_fcc2) /* { */ | |
5975 | IMPL(fcmpes_fcc2) | |
5976 | FP_EXEC_FPU_ON_CHECK | |
5977 | FPU_NOT_IMPLEMENTED("fcmpes_fcc2") | |
5978 | ENDDEF | |
5979 | #endif /* } */ | |
5980 | ||
5981 | #if !defined(HAS_NATIVE_sparcv9_fcmpes_fcc3) /* { */ | |
5982 | IMPL(fcmpes_fcc3) | |
5983 | FP_EXEC_FPU_ON_CHECK | |
5984 | FPU_NOT_IMPLEMENTED("fcmpes_fcc3") | |
5985 | ENDDEF | |
5986 | #endif /* } */ | |
5987 | ||
5988 | #if !defined(HAS_NATIVE_sparcv9_fcmped_fcc0) /* { */ | |
5989 | IMPL(fcmped_fcc0) | |
5990 | FP_EXEC_FPU_ON_CHECK | |
5991 | FPU_NOT_IMPLEMENTED("fcmped_fcc0") | |
5992 | ENDDEF | |
5993 | #endif /* } */ | |
5994 | ||
5995 | #if !defined(HAS_NATIVE_sparcv9_fcmped_fcc1) /* { */ | |
5996 | IMPL(fcmped_fcc1) | |
5997 | FP_EXEC_FPU_ON_CHECK | |
5998 | FPU_NOT_IMPLEMENTED("fcmped_fcc1") | |
5999 | ENDDEF | |
6000 | #endif /* } */ | |
6001 | ||
6002 | #if !defined(HAS_NATIVE_sparcv9_fcmped_fcc2) /* { */ | |
6003 | IMPL(fcmped_fcc2) | |
6004 | FP_EXEC_FPU_ON_CHECK | |
6005 | FPU_NOT_IMPLEMENTED("fcmped_fcc2") | |
6006 | ENDDEF | |
6007 | #endif /* } */ | |
6008 | ||
6009 | #if !defined(HAS_NATIVE_sparcv9_fcmped_fcc3) /* { */ | |
6010 | IMPL(fcmped_fcc3) | |
6011 | FP_EXEC_FPU_ON_CHECK | |
6012 | FPU_NOT_IMPLEMENTED("fcmped_fcc3") | |
6013 | ENDDEF | |
6014 | #endif /* } */ | |
6015 | ||
6016 | ||
6017 | /* | |
6018 | * Tagged add and subtract. | |
6019 | */ | |
6020 | ||
6021 | #if !defined(HAS_NATIVE_sparcv9_tadd_co_imm) /* { */ | |
6022 | ||
6023 | IMPL(tadd_co_imm) | |
6024 | int64_t s1 = Rsrc1; | |
6025 | int64_t s2 = Simm16; | |
6026 | int64_t d; | |
6027 | uint64_t v, c; | |
6028 | uint32_t icc_v; | |
6029 | ||
6030 | d = s1 + s2; | |
6031 | ||
6032 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
6033 | c = (s1 & s2) | (~d & (s1 | s2)); | |
6034 | ||
6035 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6036 | ||
6037 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6038 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6039 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6040 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6041 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6042 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6043 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6044 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6045 | if (!Zero_Reg(Rdest_num)) | |
6046 | Rdest = d; | |
6047 | ENDI | |
6048 | ||
6049 | #endif /* } */ | |
6050 | ||
6051 | #if !defined(HAS_NATIVE_sparcv9_tadd_co_rrr) /* { */ | |
6052 | ||
6053 | IMPL(tadd_co_rrr) | |
6054 | uint64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
6055 | uint64_t v, c; | |
6056 | uint32_t icc_v; | |
6057 | ||
6058 | d = Rsrc1 + Rsrc2; | |
6059 | ||
6060 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
6061 | c = (s1 & s2) | (~d & (s1 | s2)); | |
6062 | ||
6063 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6064 | ||
6065 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6066 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6067 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6068 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6069 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6070 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6071 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6072 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6073 | if (!Zero_Reg(Rdest_num)) | |
6074 | Rdest = d; | |
6075 | ENDI | |
6076 | ||
6077 | #endif /* } */ | |
6078 | ||
6079 | ||
6080 | #if !defined(HAS_NATIVE_sparcv9_tadd_co_tv_imm) /* { */ | |
6081 | ||
6082 | IMPL(tadd_co_tv_imm) | |
6083 | int64_t s1 = Rsrc1; | |
6084 | int64_t s2 = Simm16; | |
6085 | int64_t d; | |
6086 | uint64_t v, c; | |
6087 | uint32_t icc_v; | |
6088 | ||
6089 | d = s1 + s2; | |
6090 | ||
6091 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
6092 | c = (s1 & s2) | (~d & (s1 | s2)); | |
6093 | ||
6094 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6095 | ||
6096 | if (icc_v) { | |
6097 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
6098 | ||
6099 | v9p->post_precise_trap(sp, Sparcv9_trap_tag_overflow); | |
6100 | return; | |
6101 | } | |
6102 | ||
6103 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6104 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6105 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6106 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6107 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6108 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6109 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6110 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6111 | if (!Zero_Reg(Rdest_num)) | |
6112 | Rdest = d; | |
6113 | ENDI | |
6114 | ||
6115 | #endif /* } */ | |
6116 | ||
6117 | #if !defined(HAS_NATIVE_sparcv9_tadd_co_tv_rrr) /* { */ | |
6118 | ||
6119 | IMPL(tadd_co_tv_rrr) | |
6120 | uint64_t s1 = Rsrc1, s2 = Rsrc2, d; | |
6121 | uint64_t v, c; | |
6122 | uint32_t icc_v; | |
6123 | ||
6124 | d = Rsrc1 + Rsrc2; | |
6125 | ||
6126 | v = (s1 & s2 & ~d) | (~s1 & ~s2 & d); | |
6127 | c = (s1 & s2) | (~d & (s1 | s2)); | |
6128 | ||
6129 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6130 | ||
6131 | if (icc_v) { | |
6132 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
6133 | ||
6134 | v9p->post_precise_trap(sp, Sparcv9_trap_tag_overflow); | |
6135 | return; | |
6136 | } | |
6137 | ||
6138 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6139 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6140 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6141 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6142 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6143 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6144 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6145 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6146 | if (!Zero_Reg(Rdest_num)) | |
6147 | Rdest = d; | |
6148 | ENDI | |
6149 | ||
6150 | #endif /* } */ | |
6151 | ||
6152 | ||
6153 | #if !defined(HAS_NATIVE_sparcv9_tsub_co_imm) /* { */ | |
6154 | ||
6155 | IMPL(tsub_co_imm) | |
6156 | int64_t s1 = Rsrc1; | |
6157 | int64_t s2 = Simm16; | |
6158 | int64_t d; | |
6159 | uint64_t v, c; | |
6160 | uint32_t icc_v; | |
6161 | ||
6162 | d = s1 - s2; | |
6163 | ||
6164 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
6165 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
6166 | ||
6167 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6168 | ||
6169 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6170 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6171 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6172 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6173 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6174 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6175 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6176 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6177 | if (!Zero_Reg(Rdest_num)) | |
6178 | Rdest = d; | |
6179 | ENDI | |
6180 | ||
6181 | #endif /* } */ | |
6182 | ||
6183 | #if !defined(HAS_NATIVE_sparcv9_tsub_co_rrr) /* { */ | |
6184 | ||
6185 | IMPL(tsub_co_rrr) | |
6186 | int64_t s1 = Rsrc1; | |
6187 | int64_t s2 = Rsrc2; | |
6188 | int64_t d; | |
6189 | uint64_t v, c; | |
6190 | uint32_t icc_v; | |
6191 | ||
6192 | d = s1 - s2; | |
6193 | ||
6194 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
6195 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
6196 | ||
6197 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6198 | ||
6199 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6200 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6201 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6202 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6203 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6204 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6205 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6206 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6207 | if (!Zero_Reg(Rdest_num)) | |
6208 | Rdest = d; | |
6209 | ENDI | |
6210 | ||
6211 | #endif /* } */ | |
6212 | ||
6213 | ||
6214 | #if !defined(HAS_NATIVE_sparcv9_tsub_co_tv_imm) /* { */ | |
6215 | ||
6216 | IMPL(tsub_co_tv_imm) | |
6217 | int64_t s1 = Rsrc1; | |
6218 | int64_t s2 = Simm16; | |
6219 | int64_t d; | |
6220 | uint64_t v, c; | |
6221 | uint32_t icc_v; | |
6222 | ||
6223 | d = s1 - s2; | |
6224 | ||
6225 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
6226 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
6227 | ||
6228 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6229 | ||
6230 | if (icc_v) { | |
6231 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
6232 | ||
6233 | v9p->post_precise_trap(sp, Sparcv9_trap_tag_overflow); | |
6234 | return; | |
6235 | } | |
6236 | ||
6237 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6238 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6239 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6240 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6241 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6242 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6243 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6244 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6245 | if (!Zero_Reg(Rdest_num)) | |
6246 | Rdest = d; | |
6247 | ENDI | |
6248 | ||
6249 | #endif /* } */ | |
6250 | ||
6251 | #if !defined(HAS_NATIVE_sparcv9_tsub_co_tv_rrr) /* { */ | |
6252 | ||
6253 | IMPL(tsub_co_tv_rrr) | |
6254 | int64_t s1 = Rsrc1; | |
6255 | int64_t s2 = Rsrc2; | |
6256 | int64_t d; | |
6257 | uint64_t v, c; | |
6258 | uint32_t icc_v; | |
6259 | ||
6260 | d = s1 - s2; | |
6261 | ||
6262 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
6263 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
6264 | ||
6265 | icc_v = ((v >> 31) & 1) | (((s1 | s2) >> 1) & 1) | ((s1 | s2) & 1); | |
6266 | ||
6267 | if (icc_v) { | |
6268 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t*)(sp->specificp); | |
6269 | ||
6270 | v9p->post_precise_trap(sp, Sparcv9_trap_tag_overflow); | |
6271 | return; | |
6272 | } | |
6273 | ||
6274 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6275 | sp->v9_ccr |= V9_icc_v(icc_v); | |
6276 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6277 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6278 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6279 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6280 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6281 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6282 | if (!Zero_Reg(Rdest_num)) | |
6283 | Rdest = d; | |
6284 | ENDI | |
6285 | ||
6286 | #endif /* } */ | |
6287 | ||
6288 | ||
6289 | IMPL(siam) | |
6290 | uint64_t mode; | |
6291 | ||
6292 | FP_EXEC_FPU_ON_CHECK; | |
6293 | mode = Simm16; | |
6294 | sp->v9_gsr = (sp->v9_gsr & ~(V9_GSR_IM_MASK|V9_GSR_IRND_MASK)) | | |
6295 | ((mode & 7) << V9_GSR_IRND_SHIFT); | |
6296 | ENDI | |
6297 | ||
6298 | ||
6299 | ||
6300 | IMPL( sir ) | |
6301 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6302 | ||
6303 | if (V9_User == v9p->state || V9_Priv == v9p->state) { | |
6304 | v9p->post_precise_trap(sp, Sparcv9_trap_illegal_instruction); | |
6305 | return; | |
6306 | } | |
6307 | ||
6308 | v9p->post_precise_trap(sp, Sparcv9_trap_software_initiated_reset); | |
6309 | ||
6310 | ENDDEF | |
6311 | ||
6312 | IMPL( alignaddr ) | |
6313 | uint64_t d; | |
6314 | ||
6315 | FP_EXEC_FPU_ON_CHECK; | |
6316 | d = Rsrc1 + Rsrc2; | |
6317 | /* gsr.align = lower 3 bits */ | |
6318 | sp->v9_gsr &= ~MASK64(2,0); | |
6319 | sp->v9_gsr |= (d & MASK64(2,0)); | |
6320 | d &= ~MASK64(2,0); /* zero lower 3 bits */ | |
6321 | if (!Zero_Reg(Rdest_num)) { | |
6322 | Rdest = d; | |
6323 | } | |
6324 | ENDI | |
6325 | ||
6326 | IMPL( alignaddrl ) | |
6327 | uint64_t d; | |
6328 | ||
6329 | FP_EXEC_FPU_ON_CHECK; | |
6330 | d = Rsrc1 + Rsrc2; | |
6331 | /* gsr.align = two's complement of lower 3 bits */ | |
6332 | sp->v9_gsr &= ~MASK64(2,0); | |
6333 | sp->v9_gsr |= (0x8 - (d & MASK64(2,0))); | |
6334 | d &= ~MASK64(2,0); /* zero lower 3 bits */ | |
6335 | if (!Zero_Reg(Rdest_num)) { | |
6336 | Rdest = d; | |
6337 | } | |
6338 | ENDI | |
6339 | ||
6340 | IMPL( bmask ) | |
6341 | uint64_t d; | |
6342 | ||
6343 | FP_EXEC_FPU_ON_CHECK; | |
6344 | d = Rsrc1 + Rsrc2; | |
6345 | if (!Zero_Reg(Rdest_num)) { | |
6346 | Rdest = d; | |
6347 | } | |
6348 | /* gsr.mask = lower 32 bits */ | |
6349 | sp->v9_gsr &= ~MASK64(63,32); | |
6350 | sp->v9_gsr |= (d << 32); | |
6351 | ||
6352 | ENDI | |
6353 | ||
6354 | IMPL( bshuffle ) | |
6355 | uint64_t d = 0x0ull; | |
6356 | uint32_t mask; | |
6357 | uint_t idx, byte; | |
6358 | ||
6359 | FP_EXEC_FPU_ON_CHECK; | |
6360 | mask = (uint32_t) (sp->v9_gsr >> 32); | |
6361 | ||
6362 | for (idx = 0; idx < 8; idx++) { | |
6363 | byte = (mask >> (28 - (idx*4)) & MASK64(3,0)); | |
6364 | if (byte < 8) { | |
6365 | if (idx <= byte) | |
6366 | d |= ((F64src1 & MASK64(63-(8*byte),56-(8*byte)))<<((byte-idx)*8)); | |
6367 | else | |
6368 | d |= ((F64src1 & MASK64(63-(8*byte),56-(8*byte)))>>((idx-byte)*8)); | |
6369 | } else { | |
6370 | byte -= 8; | |
6371 | if (idx <= byte) | |
6372 | d |= ((F64src2 & MASK64(63-(8*byte),56-(8*byte)))<<((byte-idx)*8)); | |
6373 | else | |
6374 | d |= ((F64src2 & MASK64(63-(8*byte),56-(8*byte)))>>((idx-byte)*8)); | |
6375 | } | |
6376 | } | |
6377 | F64dest = d; | |
6378 | ENDI | |
6379 | ||
6380 | IMPL( faligndata ) | |
6381 | uint64_t d; | |
6382 | ||
6383 | FP_EXEC_FPU_ON_CHECK; | |
6384 | /* align data based on GSR.align field */ | |
6385 | d = (F64src1 << ((sp->v9_gsr & MASK64(2,0)) * 8)); | |
6386 | if ((sp->v9_gsr & MASK64(2,0)) != 0) { /* prevent Rsrc2 >> 64 */ | |
6387 | d |= (F64src2 >> ((8 - (sp->v9_gsr & MASK64(2,0))) * 8)); | |
6388 | } | |
6389 | F64dest = d; | |
6390 | ||
6391 | ENDI | |
6392 | ||
6393 | IMPL( fpack32 ) | |
6394 | uint64_t s1, s2; | |
6395 | uint_t gsr_scale; | |
6396 | uint64_t d; | |
6397 | int64_t i; | |
6398 | ||
6399 | FP_EXEC_FPU_ON_CHECK; | |
6400 | s1 = F64src1; | |
6401 | s2 = F64src2; | |
6402 | gsr_scale = (sp->v9_gsr >> 3) & 0x1f; | |
6403 | d = 0; | |
6404 | ||
6405 | i = (s2 >> 32) & 0xffffffff; | |
6406 | SIGN_EXT(i, 32); | |
6407 | i <<= gsr_scale; | |
6408 | i >>= 23; | |
6409 | if (i > 0xff) | |
6410 | i = 0xff; | |
6411 | else | |
6412 | if (i < 0) | |
6413 | i = 0; | |
6414 | d |= (i & 0xff) << 32; | |
6415 | ||
6416 | i = (s2 >> 0) & 0xffffffff; | |
6417 | SIGN_EXT(i, 32); | |
6418 | i <<= gsr_scale; | |
6419 | i >>= 23; | |
6420 | if (i > 0xff) | |
6421 | i = 0xff; | |
6422 | else | |
6423 | if (i < 0) | |
6424 | i = 0; | |
6425 | d |= (i & 0xff) << 0; | |
6426 | ||
6427 | s1 <<= 8; | |
6428 | d |= s1 & 0xffffff00ffffff00ull; | |
6429 | F64dest = d; | |
6430 | ENDI | |
6431 | ||
6432 | IMPL( fpack16 ) | |
6433 | uint64_t s2; | |
6434 | uint_t gsr_scale; | |
6435 | int64_t i; | |
6436 | uint32_t d; | |
6437 | ||
6438 | FP_EXEC_FPU_ON_CHECK; | |
6439 | s2 = F64src2; | |
6440 | /* fpack16 ignores gsr.scale[4] */ | |
6441 | gsr_scale = (sp->v9_gsr >> 3) & 0x0f; | |
6442 | d = 0; | |
6443 | ||
6444 | i = (s2 >> 48) & 0xffff; | |
6445 | SIGN_EXT(i, 16); | |
6446 | i <<= gsr_scale; | |
6447 | i >>= 7; | |
6448 | if (i > 0xff) | |
6449 | i = 0xff; | |
6450 | else | |
6451 | if (i < 0) | |
6452 | i = 0; | |
6453 | d |= (i & 0xff) << 24; | |
6454 | ||
6455 | i = (s2 >> 32) & 0xffff; | |
6456 | SIGN_EXT(i, 16); | |
6457 | i <<= gsr_scale; | |
6458 | i >>= 7; | |
6459 | if (i > 0xff) | |
6460 | i = 0xff; | |
6461 | else | |
6462 | if (i < 0) | |
6463 | i = 0; | |
6464 | d |= (i & 0xff) << 16; | |
6465 | ||
6466 | i = (s2 >> 16) & 0xffff; | |
6467 | SIGN_EXT(i, 16); | |
6468 | i <<= gsr_scale; | |
6469 | i >>= 7; | |
6470 | if (i > 0xff) | |
6471 | i = 0xff; | |
6472 | else | |
6473 | if (i < 0) | |
6474 | i = 0; | |
6475 | d |= (i & 0xff) << 8; | |
6476 | ||
6477 | i = (s2 >> 0) & 0xffff; | |
6478 | SIGN_EXT(i, 16); | |
6479 | i <<= gsr_scale; | |
6480 | i >>= 7; | |
6481 | if (i > 0xff) | |
6482 | i = 0xff; | |
6483 | else | |
6484 | if (i < 0) | |
6485 | i = 0; | |
6486 | d |= (i & 0xff) << 0; | |
6487 | ||
6488 | F32dest = d; | |
6489 | ENDI | |
6490 | ||
6491 | IMPL( fpackfix ) | |
6492 | uint64_t s2; | |
6493 | uint_t gsr_scale; | |
6494 | int64_t i; | |
6495 | uint32_t d; | |
6496 | ||
6497 | FP_EXEC_FPU_ON_CHECK; | |
6498 | s2 = F64src2; | |
6499 | gsr_scale = (sp->v9_gsr >> 3) & 0x1f; | |
6500 | d = 0; | |
6501 | ||
6502 | i = (s2 >> 32) & 0xffffffffull; | |
6503 | SIGN_EXT(i, 32); | |
6504 | i <<= gsr_scale; | |
6505 | i >>= 16; | |
6506 | if (i > 0x7fff) | |
6507 | i = 0x7fff; | |
6508 | else | |
6509 | if (i < -0x8000) | |
6510 | i = -0x8000; | |
6511 | d |= (i & 0xffff) << 16; | |
6512 | ||
6513 | i = (s2 >> 0) & 0xffffffffull; | |
6514 | SIGN_EXT(i, 32); | |
6515 | i <<= gsr_scale; | |
6516 | i >>= 16; | |
6517 | if (i > 0x7fff) | |
6518 | i = 0x7fff; | |
6519 | else | |
6520 | if (i < -0x8000) | |
6521 | i = -0x8000; | |
6522 | d |= (i & 0xffff) << 0; | |
6523 | ||
6524 | F32dest = d; | |
6525 | ENDI | |
6526 | ||
6527 | IMPL( pdist ) | |
6528 | uint64_t s1, s2; | |
6529 | uint64_t d; | |
6530 | uint_t idx; | |
6531 | ||
6532 | FP_EXEC_FPU_ON_CHECK; | |
6533 | s1 = F64src1; | |
6534 | s2 = F64src2; | |
6535 | d = 0; | |
6536 | for (idx = 0; idx < 8; idx++) { | |
6537 | if ((s1 & 0xff) > (s2 & 0xff)) | |
6538 | d += (s1 & 0xff) - (s2 & 0xff); | |
6539 | else | |
6540 | d += (s2 & 0xff) - (s1 & 0xff); | |
6541 | s1 >>= 8; | |
6542 | s2 >>= 8; | |
6543 | } | |
6544 | F64dest += d; | |
6545 | ENDI | |
6546 | ||
6547 | IMPL( pdistn ) | |
6548 | uint64_t s1, s2; | |
6549 | uint64_t d; | |
6550 | uint_t idx; | |
6551 | ||
6552 | FP_EXEC_FPU_ON_CHECK; | |
6553 | s1 = F64src1; | |
6554 | s2 = F64src2; | |
6555 | d = 0; | |
6556 | for (idx = 0; idx < 8; idx++) { | |
6557 | if ((s1 & 0xff) > (s2 & 0xff)) | |
6558 | d += (s1 & 0xff) - (s2 & 0xff); | |
6559 | else | |
6560 | d += (s2 & 0xff) - (s1 & 0xff); | |
6561 | s1 >>= 8; | |
6562 | s2 >>= 8; | |
6563 | } | |
6564 | if (!Zero_Reg(Rdest_num)) | |
6565 | Rdest = d; | |
6566 | ENDI | |
6567 | ||
6568 | IMPL( fpmerge ) | |
6569 | uint64_t s1, s2; | |
6570 | ||
6571 | FP_EXEC_FPU_ON_CHECK; | |
6572 | s1 = F32src1; | |
6573 | s2 = F32src2; | |
6574 | F64dest = ((s1 & 0xff000000) << 32) | | |
6575 | ((s1 & 0xff0000) << 24) | | |
6576 | ((s1 & 0xff00) << 16) | | |
6577 | ((s1 & 0xff) << 8) | | |
6578 | ((s2 & 0xff000000) << 24) | | |
6579 | ((s2 & 0xff0000) << 16) | | |
6580 | ((s2 & 0xff00) << 8) | | |
6581 | ((s2 & 0xff) << 0); | |
6582 | ENDI | |
6583 | ||
6584 | IMPL( fexpand ) | |
6585 | uint64_t s2; | |
6586 | ||
6587 | FP_EXEC_FPU_ON_CHECK; | |
6588 | s2 = F32src2; | |
6589 | F64dest = ((s2 & 0xff000000) << 28) | | |
6590 | ((s2 & 0xff0000) << 20) | | |
6591 | ((s2 & 0xff00) << 12) | | |
6592 | ((s2 & 0xff) << 4); | |
6593 | ENDI | |
6594 | ||
6595 | IMPL( array16 ) | |
6596 | uint64_t s1, n, d; | |
6597 | ||
6598 | s1 = Rsrc1; | |
6599 | n = Rsrc2; | |
6600 | n &= 7; | |
6601 | if (n > 5) | |
6602 | n = 5; | |
6603 | d = 0; | |
6604 | d |= (s1 >> 11) & 3; | |
6605 | d |= ((s1 >> 33) & 3) << 2; | |
6606 | d |= ((s1 >> 55) & 1) << 4; | |
6607 | d |= ((s1 >> 13) & 0xf) << 5; | |
6608 | d |= ((s1 >> 35) & 0xf) << 9; | |
6609 | d |= ((s1 >> 56) & 0xf) << 13; | |
6610 | if (n != 0) { | |
6611 | d |= ((s1 >> 17) & ((1<<n)-1)) << 17; | |
6612 | d |= ((s1 >> 39) & ((1<<n)-1)) << (17+n); | |
6613 | } | |
6614 | d |= ((s1 >> 60) & 0xf) << (17+(2*n)); | |
6615 | d <<= 1; | |
6616 | if (!Zero_Reg(Rdest_num)) | |
6617 | Rdest = d; | |
6618 | ENDI | |
6619 | ||
6620 | IMPL( array32 ) | |
6621 | uint64_t s1, n, d; | |
6622 | ||
6623 | s1 = Rsrc1; | |
6624 | n = Rsrc2; | |
6625 | n &= 7; | |
6626 | if (n > 5) | |
6627 | n = 5; | |
6628 | d = 0; | |
6629 | d |= (s1 >> 11) & 3; | |
6630 | d |= ((s1 >> 33) & 3) << 2; | |
6631 | d |= ((s1 >> 55) & 1) << 4; | |
6632 | d |= ((s1 >> 13) & 0xf) << 5; | |
6633 | d |= ((s1 >> 35) & 0xf) << 9; | |
6634 | d |= ((s1 >> 56) & 0xf) << 13; | |
6635 | if (n != 0) { | |
6636 | d |= ((s1 >> 17) & ((1<<n)-1)) << 17; | |
6637 | d |= ((s1 >> 39) & ((1<<n)-1)) << (17+n); | |
6638 | } | |
6639 | d |= ((s1 >> 60) & 0xf) << (17+(2*n)); | |
6640 | d <<= 2; | |
6641 | if (!Zero_Reg(Rdest_num)) | |
6642 | Rdest = d; | |
6643 | ENDI | |
6644 | ||
6645 | IMPL( array8 ) | |
6646 | uint64_t s1, n, d; | |
6647 | ||
6648 | s1 = Rsrc1; | |
6649 | n = Rsrc2; | |
6650 | n &= 7; | |
6651 | if (n > 5) | |
6652 | n = 5; | |
6653 | d = 0; | |
6654 | d |= (s1 >> 11) & 3; | |
6655 | d |= ((s1 >> 33) & 3) << 2; | |
6656 | d |= ((s1 >> 55) & 1) << 4; | |
6657 | d |= ((s1 >> 13) & 0xf) << 5; | |
6658 | d |= ((s1 >> 35) & 0xf) << 9; | |
6659 | d |= ((s1 >> 56) & 0xf) << 13; | |
6660 | if (n != 0) { | |
6661 | d |= ((s1 >> 17) & ((1<<n)-1)) << 17; | |
6662 | d |= ((s1 >> 39) & ((1<<n)-1)) << (17+n); | |
6663 | } | |
6664 | d |= ((s1 >> 60) & 0xf) << (17+(2*n)); | |
6665 | if (!Zero_Reg(Rdest_num)) | |
6666 | Rdest = d; | |
6667 | ENDI | |
6668 | ||
6669 | /* | |
6670 | * sim_edge: simulates all 12 EDGE opcodes: | |
6671 | edge8cc,edge8lcc,edge16cc,edge16lcc,edge32cc,edge32lcc (VIS1) | |
6672 | edge8n, edge8ln, edge16n, edge16ln, edge32n, edge32ln (VIS2) | |
6673 | ||
6674 | No code is provided here for setting the condition code for | |
6675 | the cc-setting variants (opcode bit5=0). It must be computed | |
6676 | externally to this function by computing (op1 - op2) and | |
6677 | setting the condition code bits as for the SUBCC instruction. | |
6678 | */ | |
6679 | ||
6680 | static uint64_t /* returns edge result */ | |
6681 | sim_edge ( | |
6682 | uint32_t instr, /* instruction opcode */ | |
6683 | uint64_t op1, /* operand1 */ | |
6684 | uint64_t op2, /* operand2 */ | |
6685 | bool_t am_flag /* pstate.AM */ | |
6686 | ) | |
6687 | { | |
6688 | uint64_t res_mask; /* result mask: 8/4/2 bits */ | |
6689 | uint64_t ledge; /* left edge */ | |
6690 | uint64_t redge; /* right edge */ | |
6691 | ||
6692 | /* convert opcode bits 8:7 (8/16/32) to shift counts */ | |
6693 | uint64_t sh_edg = (instr >> 7) & 3; /* edge shift (0/1/2) */ | |
6694 | uint64_t sh_res = 8 >> sh_edg; /* result shift (8/4/2) */ | |
6695 | ||
6696 | uint64_t lsize = (op1 & 7) >> sh_edg; /* left edge size */ | |
6697 | uint64_t rsize = (op2 & 7) >> sh_edg; /* right edge size */ | |
6698 | ||
6699 | /* compare address bits for equality */ | |
6700 | uint64_t adr_diff = op1 ^ op2; | |
6701 | /* in 32 bit mode ignore miscompares in bits 63:32 */ | |
6702 | if(am_flag) /* test AM bit */ | |
6703 | { | |
6704 | adr_diff = (uint32_t)adr_diff; | |
6705 | } | |
6706 | adr_diff >>= 3; /* address-equality flag for later */ | |
6707 | ||
6708 | /* BIG or LITTLE endian instruction variant? (opcode bit 6) */ | |
6709 | if(instr & 0x40) /* 1=little-endian */ | |
6710 | { | |
6711 | /* 0xFF = starting left mask (byte0) */ | |
6712 | /* 0x1FE = starting right mask (byte1) and extra ones */ | |
6713 | ||
6714 | res_mask = (0xFFu << sh_res) >> 8; | |
6715 | ledge = (0xFFu << lsize) & res_mask; | |
6716 | ||
6717 | if(0==adr_diff) | |
6718 | { | |
6719 | redge = ((0x1FEu << rsize) >> 8) & res_mask; | |
6720 | ledge &= redge; | |
6721 | } | |
6722 | } | |
6723 | else /* 0=big-endian */ | |
6724 | { | |
6725 | /* 0xFF = starting left mask (byte0) */ | |
6726 | /* 0x7F80 = starting right mask (byte0) and extra ones */ | |
6727 | ||
6728 | res_mask = (0xFF00u >> sh_res) & 0xFF; | |
6729 | int rjust = 8 - sh_res; | |
6730 | ledge = ((0xFFu >> lsize) & res_mask) >> rjust; | |
6731 | ||
6732 | if(0==adr_diff) | |
6733 | { | |
6734 | redge = ((0x7F80u >> rsize) & res_mask) >> rjust; | |
6735 | ledge &= redge; | |
6736 | } | |
6737 | } | |
6738 | ||
6739 | return ledge; | |
6740 | /* Don't forget to calculate the CC from op1-op2 */ | |
6741 | } | |
6742 | ||
6743 | static void | |
6744 | sim_edge_cc(simcpu_t *sp, uint64_t op1, uint64_t op2) | |
6745 | { | |
6746 | int64_t s1 = op1; | |
6747 | int64_t s2 = op1; | |
6748 | int64_t d; | |
6749 | uint64_t v, c; | |
6750 | ||
6751 | d = s1 - s2; | |
6752 | ||
6753 | v = (s1 & ~s2 & ~d) | (~s1 & s2 & d); | |
6754 | c = (~s1 & s2) | (d & (~s1 | s2)); | |
6755 | ||
6756 | sp->v9_ccr = V9_xcc_v((v >> 63) & 1); | |
6757 | sp->v9_ccr |= V9_icc_v((v >> 31) & 1); | |
6758 | sp->v9_ccr |= V9_xcc_c((c >> 63) & 1); | |
6759 | sp->v9_ccr |= V9_icc_c((c >> 31) & 1); | |
6760 | sp->v9_ccr |= V9_xcc_n((d >> 63) & 1); | |
6761 | sp->v9_ccr |= V9_icc_n((d >> 31) & 1); | |
6762 | sp->v9_ccr |= V9_xcc_z(d ? 0 : 1); | |
6763 | sp->v9_ccr |= V9_icc_z((d & MASK64(31,0)) ? 0 : 1); | |
6764 | } | |
6765 | ||
6766 | IMPL( edge16 ) | |
6767 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6768 | sim_edge_cc(sp, Rsrc1, Rsrc2); | |
6769 | if (!Zero_Reg(Rdest_num)) | |
6770 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6771 | v9p->pstate.addr_mask); | |
6772 | ENDI | |
6773 | ||
6774 | IMPL( edge16l ) | |
6775 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6776 | sim_edge_cc(sp, Rsrc1, Rsrc2); | |
6777 | if (!Zero_Reg(Rdest_num)) | |
6778 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6779 | v9p->pstate.addr_mask); | |
6780 | ENDI | |
6781 | ||
6782 | IMPL( edge16ln ) | |
6783 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6784 | if (!Zero_Reg(Rdest_num)) | |
6785 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6786 | v9p->pstate.addr_mask); | |
6787 | ENDI | |
6788 | ||
6789 | IMPL( edge16n ) | |
6790 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6791 | if (!Zero_Reg(Rdest_num)) | |
6792 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6793 | v9p->pstate.addr_mask); | |
6794 | ENDI | |
6795 | ||
6796 | IMPL( edge32 ) | |
6797 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6798 | sim_edge_cc(sp, Rsrc1, Rsrc2); | |
6799 | if (!Zero_Reg(Rdest_num)) | |
6800 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6801 | v9p->pstate.addr_mask); | |
6802 | ENDI | |
6803 | ||
6804 | IMPL( edge32l ) | |
6805 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6806 | sim_edge_cc(sp, Rsrc1, Rsrc2); | |
6807 | if (!Zero_Reg(Rdest_num)) | |
6808 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6809 | v9p->pstate.addr_mask); | |
6810 | ENDI | |
6811 | ||
6812 | IMPL( edge32ln ) | |
6813 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6814 | if (!Zero_Reg(Rdest_num)) | |
6815 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6816 | v9p->pstate.addr_mask); | |
6817 | ENDI | |
6818 | ||
6819 | IMPL( edge32n ) | |
6820 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6821 | if (!Zero_Reg(Rdest_num)) | |
6822 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6823 | v9p->pstate.addr_mask); | |
6824 | ENDI | |
6825 | ||
6826 | IMPL( edge8 ) | |
6827 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6828 | sim_edge_cc(sp, Rsrc1, Rsrc2); | |
6829 | if (!Zero_Reg(Rdest_num)) | |
6830 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6831 | v9p->pstate.addr_mask); | |
6832 | ENDI | |
6833 | ||
6834 | IMPL( edge8l ) | |
6835 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6836 | sim_edge_cc(sp, Rsrc1, Rsrc2); | |
6837 | if (!Zero_Reg(Rdest_num)) | |
6838 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6839 | v9p->pstate.addr_mask); | |
6840 | ENDI | |
6841 | ||
6842 | IMPL( edge8ln ) | |
6843 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6844 | if (!Zero_Reg(Rdest_num)) | |
6845 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6846 | v9p->pstate.addr_mask); | |
6847 | ENDI | |
6848 | ||
6849 | IMPL( edge8n ) | |
6850 | sparcv9_cpu_t * v9p = (sparcv9_cpu_t *)(sp->specificp); | |
6851 | if (!Zero_Reg(Rdest_num)) | |
6852 | Rdest = sim_edge(xcip->rawi, Rsrc1, Rsrc2, | |
6853 | v9p->pstate.addr_mask); | |
6854 | ENDI | |
6855 | ||
6856 | #define FCMPx16(_cond_) \ | |
6857 | int32_t s1, s2; \ | |
6858 | uint64_t fs1, fs2, res; \ | |
6859 | uint_t i; \ | |
6860 | fs1 = F64src1; \ | |
6861 | fs2 = F64src2; \ | |
6862 | res = 0; \ | |
6863 | for (i = 0; i < 4; i++) { \ | |
6864 | s1 = fs1 & 0xffff; \ | |
6865 | s2 = fs2 & 0xffff; \ | |
6866 | SIGN_EXT(s1, 16); \ | |
6867 | SIGN_EXT(s2, 16); \ | |
6868 | if (s1 _cond_ s2) \ | |
6869 | res |= 1 << i; \ | |
6870 | fs1 >>= 16; \ | |
6871 | fs2 >>= 16; \ | |
6872 | } \ | |
6873 | if (Zero_Reg(Rdest_num)) \ | |
6874 | Rdest = res; | |
6875 | ||
6876 | IMPL( fcmpeq16 ) | |
6877 | FP_EXEC_FPU_ON_CHECK; | |
6878 | FCMPx16(==) | |
6879 | ENDI | |
6880 | ||
6881 | IMPL( fcmpgt16 ) | |
6882 | FP_EXEC_FPU_ON_CHECK; | |
6883 | FCMPx16(>) | |
6884 | ENDI | |
6885 | ||
6886 | IMPL( fcmple16 ) | |
6887 | FP_EXEC_FPU_ON_CHECK; | |
6888 | FCMPx16(<=) | |
6889 | ENDI | |
6890 | ||
6891 | IMPL( fcmpne16 ) | |
6892 | FP_EXEC_FPU_ON_CHECK; | |
6893 | FCMPx16(!=) | |
6894 | ENDI | |
6895 | ||
6896 | #define FCMPx32(_cond_) \ | |
6897 | int64_t s1, s2; \ | |
6898 | uint64_t fs1, fs2, res; \ | |
6899 | uint_t i; \ | |
6900 | fs1 = F64src1; \ | |
6901 | fs2 = F64src2; \ | |
6902 | res = 0; \ | |
6903 | for (i = 0; i < 2; i++) { \ | |
6904 | s1 = fs1 & 0xffffffff; \ | |
6905 | s2 = fs2 & 0xffffffff; \ | |
6906 | SIGN_EXT(s1, 32); \ | |
6907 | SIGN_EXT(s2, 32); \ | |
6908 | if (s1 _cond_ s2) \ | |
6909 | res |= 1 << i; \ | |
6910 | fs1 >>= 32; \ | |
6911 | fs2 >>= 32; \ | |
6912 | } \ | |
6913 | if (Zero_Reg(Rdest_num)) \ | |
6914 | Rdest = res; | |
6915 | ||
6916 | IMPL( fcmpeq32 ) | |
6917 | FP_EXEC_FPU_ON_CHECK; | |
6918 | FCMPx32(==) | |
6919 | ENDI | |
6920 | ||
6921 | IMPL( fcmpgt32 ) | |
6922 | FP_EXEC_FPU_ON_CHECK; | |
6923 | FCMPx32(>) | |
6924 | ENDI | |
6925 | ||
6926 | IMPL( fcmple32 ) | |
6927 | FP_EXEC_FPU_ON_CHECK; | |
6928 | FCMPx32(<=) | |
6929 | ENDI | |
6930 | ||
6931 | IMPL( fcmpne32 ) | |
6932 | FP_EXEC_FPU_ON_CHECK; | |
6933 | FCMPx32(!=) | |
6934 | ENDI | |
6935 | ||
6936 | IMPL( fmul8x16 ) | |
6937 | FP_EXEC_FPU_ON_CHECK; | |
6938 | uint16_t s1; | |
6939 | int32_t s2; | |
6940 | int32_t d; | |
6941 | uint32_t fs1; | |
6942 | uint64_t fs2, res; | |
6943 | uint_t i; | |
6944 | ||
6945 | fs1 = F32src1; | |
6946 | fs2 = F64src2; | |
6947 | res = 0; | |
6948 | for (i = 0; i < 4; i++) { | |
6949 | s1 = (uint8_t) fs1; | |
6950 | s2 = (int16_t) fs2; | |
6951 | d = s1 * s2; | |
6952 | d += 0x80; | |
6953 | d >>= 8; | |
6954 | res >>= 16; | |
6955 | res |= (uint64_t)d << 48; | |
6956 | fs1 >>= 8; | |
6957 | fs2 >>= 16; | |
6958 | } | |
6959 | F64dest = res; | |
6960 | ENDI | |
6961 | ||
6962 | IMPL( fmul8x16al ) | |
6963 | FP_EXEC_FPU_ON_CHECK; | |
6964 | uint16_t s1; | |
6965 | int32_t s2; | |
6966 | int32_t d; | |
6967 | uint32_t fs1, fs2; | |
6968 | uint64_t res; | |
6969 | uint_t i; | |
6970 | ||
6971 | fs1 = F32src1; | |
6972 | fs2 = F32src2; | |
6973 | res = 0; | |
6974 | s2 = (int16_t) fs2; | |
6975 | for (i = 0; i < 4; i++) { | |
6976 | s1 = (uint8_t) fs1; | |
6977 | d = s1 * s2; | |
6978 | d += 0x80; | |
6979 | d >>= 8; | |
6980 | res >>= 16; | |
6981 | res |= (uint64_t)d << 48; | |
6982 | fs1 >>= 8; | |
6983 | } | |
6984 | F64dest = res; | |
6985 | ENDI | |
6986 | ||
6987 | IMPL( fmul8x16au ) | |
6988 | FP_EXEC_FPU_ON_CHECK; | |
6989 | uint16_t s1; | |
6990 | int32_t s2; | |
6991 | int32_t d; | |
6992 | uint32_t fs1, fs2; | |
6993 | uint64_t res; | |
6994 | uint_t i; | |
6995 | ||
6996 | fs1 = F32src1; | |
6997 | fs2 = F32src2; | |
6998 | res = 0; | |
6999 | s2 = (int16_t) (fs2 >> 16); | |
7000 | for (i = 0; i < 4; i++) { | |
7001 | s1 = (uint8_t) fs1; | |
7002 | d = s1 * s2; | |
7003 | d += 0x80; | |
7004 | d >>= 8; | |
7005 | res >>= 16; | |
7006 | res |= (uint64_t)d << 48; | |
7007 | fs1 >>= 8; | |
7008 | } | |
7009 | F64dest = res; | |
7010 | ENDI | |
7011 | ||
7012 | IMPL( fmul8sux16 ) | |
7013 | FP_EXEC_FPU_ON_CHECK; | |
7014 | int16_t s1; | |
7015 | int32_t s2; | |
7016 | int32_t d; | |
7017 | uint64_t fs1, fs2, res; | |
7018 | uint_t i; | |
7019 | ||
7020 | fs1 = F64src1; | |
7021 | fs2 = F64src2; | |
7022 | res = 0; | |
7023 | fs1 >>= 8; | |
7024 | for (i = 0; i < 4; i++) { | |
7025 | s1 = (int8_t) fs1; | |
7026 | s2 = (int16_t) fs2; | |
7027 | d = s1 * s2; | |
7028 | d += 0x80; | |
7029 | d >>= 8; | |
7030 | res >>= 16; | |
7031 | res |= (uint64_t)d << 48; | |
7032 | fs1 >>= 16; | |
7033 | fs2 >>= 16; | |
7034 | } | |
7035 | F64dest = res; | |
7036 | ENDI | |
7037 | ||
7038 | IMPL( fmul8ulx16 ) | |
7039 | FP_EXEC_FPU_ON_CHECK; | |
7040 | uint16_t s1; | |
7041 | int32_t s2; | |
7042 | int32_t d; | |
7043 | uint64_t fs1, fs2, res; | |
7044 | uint_t i; | |
7045 | ||
7046 | fs1 = F64src1; | |
7047 | fs2 = F64src2; | |
7048 | res = 0; | |
7049 | for (i = 0; i < 4; i++) { | |
7050 | s1 = (uint8_t) fs1; | |
7051 | s2 = (int16_t) fs2; | |
7052 | d = s1 * s2; | |
7053 | d += 0x8000; | |
7054 | d >>= 16; | |
7055 | res >>= 16; | |
7056 | res |= (uint64_t)d << 48; | |
7057 | fs1 >>= 16; | |
7058 | fs2 >>= 16; | |
7059 | } | |
7060 | F64dest = res; | |
7061 | ENDI | |
7062 | ||
7063 | IMPL( fmuld8sux16 ) | |
7064 | FP_EXEC_FPU_ON_CHECK; | |
7065 | int16_t s1; | |
7066 | int32_t s2; | |
7067 | int32_t d; | |
7068 | uint32_t fs1, fs2; | |
7069 | uint64_t res; | |
7070 | uint_t i; | |
7071 | ||
7072 | fs1 = F32src1; | |
7073 | fs2 = F32src2; | |
7074 | res = 0; | |
7075 | fs1 >>= 8; | |
7076 | for (i = 0; i < 2; i++) { | |
7077 | s1 = (int8_t) fs1; | |
7078 | s2 = (int16_t) fs2; | |
7079 | d = s1 * s2; | |
7080 | d <<= 8; | |
7081 | res >>= 32; | |
7082 | res |= (uint64_t)d << 32; | |
7083 | fs1 >>= 16; | |
7084 | fs2 >>= 16; | |
7085 | } | |
7086 | F64dest = res; | |
7087 | ENDI | |
7088 | ||
7089 | IMPL( fmuld8ulx16 ) | |
7090 | FP_EXEC_FPU_ON_CHECK; | |
7091 | uint16_t s1; | |
7092 | int32_t s2; | |
7093 | int32_t d; | |
7094 | uint32_t fs1, fs2; | |
7095 | uint64_t res; | |
7096 | uint_t i; | |
7097 | ||
7098 | fs1 = F32src1; | |
7099 | fs2 = F32src2; | |
7100 | res = 0; | |
7101 | for (i = 0; i < 2; i++) { | |
7102 | s1 = (uint8_t) fs1; | |
7103 | s2 = (int16_t) fs2; | |
7104 | d = s1 * s2; | |
7105 | SIGN_EXT(d, 24); | |
7106 | res >>= 32; | |
7107 | res |= (uint64_t)d << 32; | |
7108 | fs1 >>= 16; | |
7109 | fs2 >>= 16; | |
7110 | } | |
7111 | F64dest = res; | |
7112 | ENDI | |
7113 | ||
7114 | ||
7115 | /*************************************************************/ | |
7116 | ||
7117 | /* | |
7118 | * Floating point status register(s) ... | |
7119 | * | |
7120 | * sp->v9_fsr_ctrl holds all the FP control bits (only) | |
7121 | * tem, cexc and aexe fields are zeroed. | |
7122 | * This value is used for setup before execution .. and holds | |
7123 | * the current control bits and the current condition codes. | |
7124 | * The error and trap enable bits should always be zeroed. | |
7125 | * sp->v9_fsr_exc holds the exception bits (current and accumulated). | |
7126 | * in the same bits positions as they occur in the fsr. | |
7127 | * sp->v9_fsr_tem holds the trap enable bits NOTE: these bits are shifted | |
7128 | * down to start at bit0 in this register - this is to make | |
7129 | * masking with fsr_exc one instruction faster in the | |
7130 | * fpop execution common case. | |
7131 | */ | |
7132 | ||
7133 | void v9_set_fsr_lower(simcpu_t * sp, uint64_t val) | |
7134 | { | |
7135 | v9_set_fsr(sp, (val & MASK64(31,0)) | (sp->v9_fsr_ctrl & ~MASK64(31,0))); | |
7136 | } | |
7137 | ||
7138 | void v9_set_fsr(simcpu_t * sp, uint64_t val) | |
7139 | { | |
7140 | uint64_t oldval; | |
7141 | ||
7142 | DBGFSR( oldval = v9_get_fsr(sp); ); | |
7143 | #ifdef NIAGARA1 | |
7144 | val &= V9_FSR_REG_MASK; | |
7145 | #else /* NIAGARA1 */ | |
7146 | val &= V9_FSR_REG_MASK | V9_FSR_NS_MASK; | |
7147 | #endif /* NIAGARA1 */ | |
7148 | val |= sp->v9_fsr_ctrl & V9_FSR_FTT_MASK; | |
7149 | sp->v9_fsr_ctrl = val & ~(V9_FSR_TEM_MASK | V9_FSR_AEXC_MASK | V9_FSR_CEXC_MASK); | |
7150 | sp->v9_fsr_tem = (val & V9_FSR_TEM_MASK)>>V9_FSR_TEM_BIT; | |
7151 | sp->v9_fsr_exc = val & (V9_FSR_AEXC_MASK | V9_FSR_CEXC_MASK); | |
7152 | DBGFSR( lprintf(sp->gid, "v9_set_fsr: pc=0x%llx, fsr=0x%llx (was 0x%llx)\n", sp->pc, v9_get_fsr(sp), oldval); ); | |
7153 | } | |
7154 | ||
7155 | uint64_t v9_get_fsr(simcpu_t * sp) | |
7156 | { | |
7157 | return sp->v9_fsr_ctrl | (sp->v9_fsr_tem<<V9_FSR_TEM_BIT) | sp->v9_fsr_exc; | |
7158 | } | |
7159 | ||
7160 | void v9_set_fsr_fp_op (simcpu_t * sp, uint64_t val) | |
7161 | { | |
7162 | uint64_t oldval; | |
7163 | ||
7164 | DBGFSR( oldval = v9_get_fsr(sp); ); | |
7165 | sp->v9_fsr_ctrl &= ~(V9_FSR_FTT_MASK); | |
7166 | sp->v9_fsr_ctrl |= (val & V9_FSR_FTT_MASK); | |
7167 | sp->v9_fsr_exc = val & (V9_FSR_AEXC_MASK | V9_FSR_CEXC_MASK); | |
7168 | DBGFSR( lprintf(sp->gid, "v9_set_fsr_fp_op: pc=0x%llx, fsr=0x%llx (was 0x%llx)\n", sp->pc, v9_get_fsr(sp), oldval); ); | |
7169 | } | |
7170 | ||
7171 | ||
7172 | /*-------------------------- OLD CODE ------------------------*/ | |
7173 | ||
7174 | ||
7175 | ||
7176 | ||
7177 | #if 0 /* { */ | |
7178 | -- | |
7179 | -- /**/ | |
7180 | -- /* We still get crappy code from these from the Sun*/ | |
7181 | -- /* compiler - eventually well re-write these entirely*/ | |
7182 | -- /* as host native routines in assembler ...*/ | |
7183 | -- /**/ | |
7184 | -- | |
7185 | -- /* BTW: cico = condition codes in+out ... not just*/ | |
7186 | -- /* carry in+carry out ... note addcc sets *all* cond codes*/ | |
7187 | -- | |
7188 | -- IMPL( add_ci_imm ) | |
7189 | -- Rdest = Rsrc1 + Simm16 + (Rccr & 1LL); | |
7190 | -- ENDI | |
7191 | -- | |
7192 | -- IMPL( add_co_imm ) | |
7193 | -- Rdest = il_add_co( Rsrc1, Simm16, &Rccr ); | |
7194 | -- ENDI | |
7195 | -- | |
7196 | -- IMPL( add_cico_imm ) | |
7197 | -- Rdest = il_add_cico( Rsrc1, Simm16, &Rccr ); | |
7198 | -- ENDI | |
7199 | -- | |
7200 | -- IMPL( add_co_imm_rd0 ) | |
7201 | -- (void)il_add_co( Rsrc1, Simm16, &Rccr ); | |
7202 | -- ENDI | |
7203 | -- | |
7204 | -- IMPL( add_cico_imm_rd0 ) | |
7205 | -- (void)il_add_cico( Rsrc1, Simm16, &Rccr ); | |
7206 | -- ENDI | |
7207 | -- | |
7208 | -- IMPL( add_ci_rrr ) | |
7209 | -- Rdest = Rsrc1 + Rsrc2 + (Rccr & 1LL); | |
7210 | -- ENDI | |
7211 | -- | |
7212 | -- IMPL( add_co_rrr ) | |
7213 | -- Rdest = il_add_co( Rsrc1, Rsrc2, &Rccr ); | |
7214 | -- ENDI | |
7215 | -- | |
7216 | -- IMPL( add_cico_rrr ) | |
7217 | -- Rdest = il_add_cico( Rsrc1, Rsrc2, &Rccr ); | |
7218 | -- ENDI | |
7219 | -- | |
7220 | -- IMPL( add_co_rrr_rd0 ) | |
7221 | -- (void)il_add_co( Rsrc1, Rsrc2, &Rccr ); | |
7222 | -- ENDI | |
7223 | -- | |
7224 | -- IMPL( add_cico_rrr_rd0 ) | |
7225 | -- (void)il_add_cico( Rsrc1, Rsrc2, &Rccr ); | |
7226 | -- ENDI | |
7227 | -- | |
7228 | -- | |
7229 | -- | |
7230 | -- | |
7231 | -- | |
7232 | -- IMPL( sub_ci_imm ) | |
7233 | -- Rdest = Rsrc1 - Simm16 - (Rccr & 1LL); | |
7234 | -- ENDI | |
7235 | -- | |
7236 | -- IMPL( sub_co_imm ) | |
7237 | -- Rdest = il_sub_co( Rsrc1, Simm16, &Rccr ); | |
7238 | -- ENDI | |
7239 | -- | |
7240 | -- IMPL( sub_co_imm_rd0 ) | |
7241 | -- (void)il_sub_co( Rsrc1, Simm16, &Rccr ); | |
7242 | -- ENDI | |
7243 | -- | |
7244 | -- IMPL( sub_cico_imm ) | |
7245 | -- Rdest = il_sub_cico( Rsrc1, Simm16, &Rccr ); | |
7246 | -- ENDI | |
7247 | -- | |
7248 | -- IMPL( sub_cico_imm_rd0 ) | |
7249 | -- (void)il_sub_cico( Rsrc1, Simm16, &Rccr ); | |
7250 | -- ENDI | |
7251 | -- | |
7252 | -- IMPL( sub_ci_rrr ) | |
7253 | -- Rdest = Rsrc1 - Rsrc2 - (Rccr & 1LL); | |
7254 | -- ENDI | |
7255 | -- | |
7256 | -- IMPL( sub_co_rrr ) | |
7257 | -- Rdest = il_sub_co( Rsrc1, Rsrc2, &Rccr ); | |
7258 | -- ENDI | |
7259 | -- | |
7260 | -- IMPL( sub_co_rrr_rd0 ) | |
7261 | -- (void)il_sub_co( Rsrc1, Rsrc2, &Rccr ); | |
7262 | -- ENDI | |
7263 | -- | |
7264 | -- IMPL( sub_cico_rrr ) | |
7265 | -- Rdest = il_sub_cico( Rsrc1, Rsrc2, &Rccr ); | |
7266 | -- ENDI | |
7267 | -- | |
7268 | -- IMPL( sub_cico_rrr_rd0 ) | |
7269 | -- (void)il_sub_cico( Rsrc1, Rsrc2, &Rccr ); | |
7270 | -- ENDI | |
7271 | -- | |
7272 | -- | |
7273 | -- | |
7274 | -- /**/ | |
7275 | -- /* Logic instructions*/ | |
7276 | -- /**/ | |
7277 | -- | |
7278 | -- IMPL( and_imm ) | |
7279 | -- Rdest = Rsrc1 & Simm16; | |
7280 | -- ENDI | |
7281 | -- | |
7282 | -- IMPL( and_cc_imm ) | |
7283 | -- Rdest = il_and_cc( Rsrc1, Simm16, &Rccr ); | |
7284 | -- ENDI | |
7285 | -- | |
7286 | -- IMPL( and_cc_imm_rd0 ) | |
7287 | -- (void)il_and_cc( Rsrc1, Simm16, &Rccr ); | |
7288 | -- ENDI | |
7289 | -- | |
7290 | -- IMPL( and_rrr ) | |
7291 | -- Rdest = Rsrc1 & Rsrc2; | |
7292 | -- ENDI | |
7293 | -- | |
7294 | -- IMPL( and_cc_rrr ) | |
7295 | -- Rdest = il_and_cc( Rsrc1, Rsrc2, &Rccr ); | |
7296 | -- ENDI | |
7297 | -- | |
7298 | -- IMPL( and_cc_rrr_rd0 ) | |
7299 | -- (void)il_and_cc( Rsrc1, Rsrc2, &Rccr ); | |
7300 | -- ENDI | |
7301 | -- | |
7302 | -- | |
7303 | -- IMPL( andn_rrr ) | |
7304 | -- Rdest = Rsrc1 & ~Rsrc2; | |
7305 | -- ENDI | |
7306 | -- | |
7307 | -- IMPL( andn_cc_rrr ) | |
7308 | -- Rdest = il_andn_cc( Rsrc1, Rsrc2, &Rccr ); | |
7309 | -- ENDI | |
7310 | -- | |
7311 | -- IMPL( andn_cc_rrr_rd0 ) | |
7312 | -- (void)il_andn_cc( Rsrc1, Rsrc2, &Rccr ); | |
7313 | -- ENDI | |
7314 | -- | |
7315 | -- | |
7316 | -- | |
7317 | -- IMPL( or_imm ) | |
7318 | -- Rdest = Rsrc1 | Simm16; | |
7319 | -- ENDI | |
7320 | -- | |
7321 | -- IMPL( or_cc_imm ) | |
7322 | -- Rdest = il_or_cc( Rsrc1, Simm16, &Rccr ); | |
7323 | -- ENDI | |
7324 | -- | |
7325 | -- IMPL( or_cc_imm_rd0 ) | |
7326 | -- (void)il_or_cc( Rsrc1, Simm16, &Rccr ); | |
7327 | -- ENDI | |
7328 | -- | |
7329 | -- IMPL( or_rrr ) | |
7330 | -- Rdest = Rsrc1 | Rsrc2; | |
7331 | -- ENDI | |
7332 | -- | |
7333 | -- IMPL( or_cc_rrr ) | |
7334 | -- Rdest = il_or_cc( Rsrc1, Rsrc2, &Rccr ); | |
7335 | -- ENDI | |
7336 | -- | |
7337 | -- IMPL( or_cc_rrr_rd0 ) | |
7338 | -- (void)il_or_cc( Rsrc1, Rsrc2, &Rccr ); | |
7339 | -- ENDI | |
7340 | -- | |
7341 | -- | |
7342 | -- | |
7343 | -- IMPL( orn_rrr ) | |
7344 | -- Rdest = Rsrc1 | ~Rsrc2; | |
7345 | -- ENDI | |
7346 | -- | |
7347 | -- IMPL( orn_cc_rrr ) | |
7348 | -- Rdest = il_orn_cc( Rsrc1, Rsrc2, &Rccr ); | |
7349 | -- ENDI | |
7350 | -- | |
7351 | -- IMPL( orn_cc_rrr_rd0 ) | |
7352 | -- (void)il_orn_cc( Rsrc1, Rsrc2, &Rccr ); | |
7353 | -- ENDI | |
7354 | -- | |
7355 | -- | |
7356 | -- | |
7357 | -- IMPL( xor_imm ) | |
7358 | -- Rdest = Rsrc1 ^ Simm16; | |
7359 | -- ENDI | |
7360 | -- | |
7361 | -- IMPL( xor_cc_imm ) | |
7362 | -- Rdest = il_xor_cc( Rsrc1, Simm16, &Rccr ); | |
7363 | -- ENDI | |
7364 | -- | |
7365 | -- IMPL( xor_cc_imm_rd0 ) | |
7366 | -- (void)il_xor_cc( Rsrc1, Simm16, &Rccr ); | |
7367 | -- ENDI | |
7368 | -- | |
7369 | -- IMPL( xor_rrr ) | |
7370 | -- Rdest = Rsrc1 ^ Rsrc2; | |
7371 | -- ENDI | |
7372 | -- | |
7373 | -- IMPL( xor_cc_rrr ) | |
7374 | -- Rdest = il_xor_cc( Rsrc1, Rsrc2, &Rccr ); | |
7375 | -- ENDI | |
7376 | -- | |
7377 | -- IMPL( xor_cc_rrr_rd0 ) | |
7378 | -- (void)il_xor_cc( Rsrc1, Rsrc2, &Rccr ); | |
7379 | -- ENDI | |
7380 | -- | |
7381 | -- | |
7382 | -- | |
7383 | -- IMPL( xnor_rrr ) | |
7384 | -- Rdest = ~(Rsrc1 ^ Rsrc2); | |
7385 | -- ENDI | |
7386 | -- | |
7387 | -- IMPL( xnor_cc_rrr ) | |
7388 | -- Rdest = il_xnor_cc( Rsrc1, Rsrc2, &Rccr ); | |
7389 | -- ENDI | |
7390 | -- | |
7391 | -- IMPL( xnor_cc_rrr_rd0 ) | |
7392 | -- (void)il_xnor_cc( Rsrc1, Rsrc2, &Rccr ); | |
7393 | -- ENDI | |
7394 | -- | |
7395 | -- | |
7396 | -- | |
7397 | -- | |
7398 | -- /* branch with no annulled delay slot */ | |
7399 | -- IMPL( brcond_ds ) | |
7400 | -- uint64_t xpc; | |
7401 | -- | |
7402 | -- cpup->cti_executed = TRUE; | |
7403 | -- | |
7404 | -- if (COMPUTE_TAKEN(Rccr)) { | |
7405 | -- xpc = Rpc + SBRoffset32; | |
7406 | -- } else { | |
7407 | -- xpc = Rnpc + 4; | |
7408 | -- } | |
7409 | -- Rpc = Rnpc; | |
7410 | -- Rnpc = xpc; | |
7411 | -- ENDI | |
7412 | -- | |
7413 | -- | |
7414 | -- /* branch with annulled delay slot */ | |
7415 | -- IMPL( brcond_ds_annul ) | |
7416 | -- uint64_t xpc; | |
7417 | -- | |
7418 | -- cpup->cti_executed = TRUE; | |
7419 | -- | |
7420 | -- if (COMPUTE_TAKEN(Rccr)) { | |
7421 | -- xpc = Rpc + SBRoffset32; | |
7422 | -- } else { | |
7423 | -- cpup->annul = TRUE; | |
7424 | -- xpc = Rnpc + 4; | |
7425 | -- } | |
7426 | -- Rpc = Rnpc; | |
7427 | -- Rnpc = xpc; | |
7428 | -- ENDI | |
7429 | -- | |
7430 | -- | |
7431 | -- /* branch with no annulled delay slot */ | |
7432 | -- IMPL( br_g_le_ds ) | |
7433 | -- uint64_t xpc; | |
7434 | -- uint64_t func; | |
7435 | -- | |
7436 | -- func = Rccr; | |
7437 | -- func = ((func>>1) ^ (func>>3))|(func>>2); /* get Z|(N^V) in bits 0,4 */ | |
7438 | -- | |
7439 | -- cpup->cti_executed = TRUE; | |
7440 | -- | |
7441 | -- if (COMPUTE_TAKEN(func)) { | |
7442 | -- xpc = Rpc + SBRoffset32; | |
7443 | -- } else { | |
7444 | -- xpc = Rnpc + 4; | |
7445 | -- } | |
7446 | -- Rpc = Rnpc; | |
7447 | -- Rnpc = xpc; | |
7448 | -- ENDI | |
7449 | -- | |
7450 | -- | |
7451 | -- /* branch with annulled delay slot */ | |
7452 | -- IMPL( br_g_le_ds_annul ) | |
7453 | -- uint64_t xpc; | |
7454 | -- uint64_t func; | |
7455 | -- | |
7456 | -- func = Rccr; | |
7457 | -- func = ((func>>1) ^ (func>>3))|(func>>2); /* get Z|(N^V) in bits 0,4 */ | |
7458 | -- | |
7459 | -- cpup->cti_executed = TRUE; | |
7460 | -- | |
7461 | -- if (COMPUTE_TAKEN(func)) { | |
7462 | -- xpc = Rpc + SBRoffset32; | |
7463 | -- } else { | |
7464 | -- cpup->annul = TRUE; | |
7465 | -- xpc = Rnpc + 4; | |
7466 | -- } | |
7467 | -- Rpc = Rnpc; | |
7468 | -- Rnpc = xpc; | |
7469 | -- ENDI | |
7470 | -- | |
7471 | -- | |
7472 | -- /* branch with no annulled delay slot */ | |
7473 | -- IMPL( br_ge_l_ds ) | |
7474 | -- uint64_t xpc; | |
7475 | -- uint64_t func; | |
7476 | -- | |
7477 | -- func = Rccr; | |
7478 | -- func = (func>>1) ^ (func>>3); /* get (N^V) in bits 0,4 */ | |
7479 | -- | |
7480 | -- cpup->cti_executed = TRUE; | |
7481 | -- | |
7482 | -- if (COMPUTE_TAKEN(func)) { | |
7483 | -- xpc = Rpc + SBRoffset32; | |
7484 | -- } else { | |
7485 | -- xpc = Rnpc + 4; | |
7486 | -- } | |
7487 | -- Rpc = Rnpc; | |
7488 | -- Rnpc = xpc; | |
7489 | -- ENDI | |
7490 | -- | |
7491 | -- | |
7492 | -- /* branch with annulled delay slot */ | |
7493 | -- IMPL( br_ge_l_ds_annul ) | |
7494 | -- uint64_t xpc; | |
7495 | -- uint64_t func; | |
7496 | -- | |
7497 | -- func = Rccr; | |
7498 | -- func = (func>>1) ^ (func>>3); /* get (N^V) in bits 0,4 */ | |
7499 | -- | |
7500 | -- cpup->cti_executed = TRUE; | |
7501 | -- | |
7502 | -- if (COMPUTE_TAKEN(func)) { | |
7503 | -- xpc = Rpc + SBRoffset32; | |
7504 | -- } else { | |
7505 | -- cpup->annul = TRUE; | |
7506 | -- xpc = Rnpc + 4; | |
7507 | -- } | |
7508 | -- Rpc = Rnpc; | |
7509 | -- Rnpc = xpc; | |
7510 | -- ENDI | |
7511 | -- | |
7512 | -- | |
7513 | -- /* branch with no annulled delay slot */ | |
7514 | -- IMPL( br_gu_leu_ds ) | |
7515 | -- uint64_t xpc; | |
7516 | -- uint64_t func; | |
7517 | -- | |
7518 | -- func = Rccr; | |
7519 | -- func = (func>>2) | func; /* get (Z|C) in bits 0,4 */ | |
7520 | -- | |
7521 | -- cpup->cti_executed = TRUE; | |
7522 | -- | |
7523 | -- if (COMPUTE_TAKEN(func)) { | |
7524 | -- xpc = Rpc + SBRoffset32; | |
7525 | -- } else { | |
7526 | -- xpc = Rnpc + 4; | |
7527 | -- } | |
7528 | -- Rpc = Rnpc; | |
7529 | -- Rnpc = xpc; | |
7530 | -- ENDI | |
7531 | -- | |
7532 | -- | |
7533 | -- /* branch with annulled delay slot */ | |
7534 | -- IMPL( br_gu_leu_ds_annul ) | |
7535 | -- uint64_t xpc; | |
7536 | -- uint64_t func; | |
7537 | -- | |
7538 | -- func = Rccr; | |
7539 | -- func = (func>>2) | func; /* get (Z|C) in bits 0,4 */ | |
7540 | -- | |
7541 | -- cpup->cti_executed = TRUE; | |
7542 | -- | |
7543 | -- if (COMPUTE_TAKEN(func)) { | |
7544 | -- xpc = Rpc + SBRoffset32; | |
7545 | -- } else { | |
7546 | -- cpup->annul = TRUE; | |
7547 | -- xpc = Rnpc + 4; | |
7548 | -- } | |
7549 | -- Rpc = Rnpc; | |
7550 | -- Rnpc = xpc; | |
7551 | -- ENDI | |
7552 | -- | |
7553 | -- | |
7554 | -- | |
7555 | -- IMPL( bralways_ds ) | |
7556 | -- uint64_t xpc; | |
7557 | -- cpup->cti_executed = TRUE; | |
7558 | -- | |
7559 | -- xpc = Rpc + SBRoffset32; | |
7560 | -- Rpc = Rnpc; | |
7561 | -- Rnpc = xpc; | |
7562 | -- ENDI | |
7563 | -- | |
7564 | -- | |
7565 | -- /* FIXME: prob not worth burning an instn on its own | |
7566 | -- * combine with bralways_ds above, and carry annul | |
7567 | -- * bit in the decoded form | |
7568 | -- */ | |
7569 | -- IMPL( bralways_ds_annul ) | |
7570 | -- uint64_t xpc; | |
7571 | -- cpup->cti_executed = TRUE; | |
7572 | -- | |
7573 | -- cpup->annul = TRUE; | |
7574 | -- xpc = Rpc + SBRoffset32; | |
7575 | -- Rpc = Rnpc; | |
7576 | -- Rnpc = xpc; | |
7577 | -- ENDI | |
7578 | -- | |
7579 | -- | |
7580 | -- | |
7581 | -- IMPL( brnever_ds_annul ) | |
7582 | -- cpup->cti_executed = TRUE; | |
7583 | -- | |
7584 | -- cpup->annul = TRUE; | |
7585 | -- Rpc = Rnpc; | |
7586 | -- Rnpc += 4; | |
7587 | -- ENDI | |
7588 | -- | |
7589 | -- | |
7590 | #endif /* } */ | |
7591 |