filename | src/sh4/sh4x86.c |
changeset | 386:6fb10951326a |
prev | 381:aade6c9aca4d |
next | 388:13bae2fb0373 |
author | nkeynes |
date | Sun Sep 16 07:03:23 2007 +0000 (14 years ago) |
permissions | -rw-r--r-- |
last change | Implement MAC.W, MAC.L and DIV1 Correct SHAD/SHLD Fix privilege and slot illegal checks on LDC/STC opcodes Fix various other small bugs |
view | annotate | diff | log | raw |
1 /**
2 * $Id: sh4x86.c,v 1.9 2007-09-16 07:03:23 nkeynes Exp $
3 *
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
7 *
8 * Copyright (c) 2007 Nathan Keynes.
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
14 *
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
19 */
21 #include <assert.h>
23 #ifndef NDEBUG
24 #define DEBUG_JUMPS 1
25 #endif
27 #include "sh4/sh4core.h"
28 #include "sh4/sh4trans.h"
29 #include "sh4/x86op.h"
30 #include "clock.h"
32 #define DEFAULT_BACKPATCH_SIZE 4096
34 /**
35 * Struct to manage internal translation state. This state is not saved -
36 * it is only valid between calls to sh4_translate_begin_block() and
37 * sh4_translate_end_block()
38 */
39 struct sh4_x86_state {
40 gboolean in_delay_slot;
41 gboolean priv_checked; /* true if we've already checked the cpu mode. */
42 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
44 /* Allocated memory for the (block-wide) back-patch list */
45 uint32_t **backpatch_list;
46 uint32_t backpatch_posn;
47 uint32_t backpatch_size;
48 };
50 #define EXIT_DATA_ADDR_READ 0
51 #define EXIT_DATA_ADDR_WRITE 7
52 #define EXIT_ILLEGAL 14
53 #define EXIT_SLOT_ILLEGAL 21
54 #define EXIT_FPU_DISABLED 28
55 #define EXIT_SLOT_FPU_DISABLED 35
57 static struct sh4_x86_state sh4_x86;
59 void signsat48( void )
60 {
61 if( ((int64_t)sh4r.mac) < (int64_t)0xFFFF800000000000LL )
62 sh4r.mac = 0xFFFF800000000000LL;
63 else if( ((int64_t)sh4r.mac) > (int64_t)0x00007FFFFFFFFFFFLL )
64 sh4r.mac = 0x00007FFFFFFFFFFFLL;
65 }
68 void sh4_x86_init()
69 {
70 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
71 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
72 }
75 static void sh4_x86_add_backpatch( uint8_t *ptr )
76 {
77 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
78 sh4_x86.backpatch_size <<= 1;
79 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
80 assert( sh4_x86.backpatch_list != NULL );
81 }
82 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
83 }
85 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
86 {
87 unsigned int i;
88 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
89 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
90 }
91 }
93 /**
94 * Emit an instruction to load an SH4 reg into a real register
95 */
96 static inline void load_reg( int x86reg, int sh4reg )
97 {
98 /* mov [bp+n], reg */
99 OP(0x8B);
100 OP(0x45 + (x86reg<<3));
101 OP(REG_OFFSET(r[sh4reg]));
102 }
104 static inline void load_reg16s( int x86reg, int sh4reg )
105 {
106 OP(0x0F);
107 OP(0xBF);
108 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
109 }
111 static inline void load_reg16u( int x86reg, int sh4reg )
112 {
113 OP(0x0F);
114 OP(0xB7);
115 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
117 }
119 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
120 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
121 /**
122 * Emit an instruction to load an immediate value into a register
123 */
124 static inline void load_imm32( int x86reg, uint32_t value ) {
125 /* mov #value, reg */
126 OP(0xB8 + x86reg);
127 OP32(value);
128 }
130 /**
131 * Emit an instruction to store an SH4 reg (RN)
132 */
133 void static inline store_reg( int x86reg, int sh4reg ) {
134 /* mov reg, [bp+n] */
135 OP(0x89);
136 OP(0x45 + (x86reg<<3));
137 OP(REG_OFFSET(r[sh4reg]));
138 }
140 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
142 /**
143 * Load an FR register (single-precision floating point) into an integer x86
144 * register (eg for register-to-register moves)
145 */
146 void static inline load_fr( int bankreg, int x86reg, int frm )
147 {
148 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
149 }
151 /**
152 * Store an FR register (single-precision floating point) into an integer x86
153 * register (eg for register-to-register moves)
154 */
155 void static inline store_fr( int bankreg, int x86reg, int frn )
156 {
157 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
158 }
161 /**
162 * Load a pointer to the back fp back into the specified x86 register. The
163 * bankreg must have been previously loaded with FPSCR.
164 * NB: 10 bytes
165 */
166 static inline void load_xf_bank( int bankreg )
167 {
168 NOT_r32( bankreg );
169 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
170 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
171 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
172 }
174 /**
175 * Update the fr_bank pointer based on the current fpscr value.
176 */
177 static inline void update_fr_bank( int fpscrreg )
178 {
179 SHR_imm8_r32( (21 - 6), fpscrreg ); // Extract bit 21 then *64 for bank size
180 AND_imm8s_r32( 0x40, fpscrreg ); // Complete extraction
181 OP(0x8D); OP(0x44+(fpscrreg<<3)); OP(0x28+fpscrreg); OP(REG_OFFSET(fr)); // LEA [ebp+fpscrreg+disp], fpscrreg
182 store_spreg( fpscrreg, REG_OFFSET(fr_bank) );
183 }
184 /**
185 * Push FPUL (as a 32-bit float) onto the FPU stack
186 */
187 static inline void push_fpul( )
188 {
189 OP(0xD9); OP(0x45); OP(R_FPUL);
190 }
192 /**
193 * Pop FPUL (as a 32-bit float) from the FPU stack
194 */
195 static inline void pop_fpul( )
196 {
197 OP(0xD9); OP(0x5D); OP(R_FPUL);
198 }
200 /**
201 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
202 * with the location of the current fp bank.
203 */
204 static inline void push_fr( int bankreg, int frm )
205 {
206 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
207 }
209 /**
210 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
211 * with bankreg previously loaded with the location of the current fp bank.
212 */
213 static inline void pop_fr( int bankreg, int frm )
214 {
215 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
216 }
218 /**
219 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
220 * with the location of the current fp bank.
221 */
222 static inline void push_dr( int bankreg, int frm )
223 {
224 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
225 }
227 static inline void pop_dr( int bankreg, int frm )
228 {
229 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
230 }
232 /**
233 * Note: clobbers EAX to make the indirect call - this isn't usually
234 * a problem since the callee will usually clobber it anyway.
235 */
236 static inline void call_func0( void *ptr )
237 {
238 load_imm32(R_EAX, (uint32_t)ptr);
239 CALL_r32(R_EAX);
240 }
242 static inline void call_func1( void *ptr, int arg1 )
243 {
244 PUSH_r32(arg1);
245 call_func0(ptr);
246 ADD_imm8s_r32( 4, R_ESP );
247 }
249 static inline void call_func2( void *ptr, int arg1, int arg2 )
250 {
251 PUSH_r32(arg2);
252 PUSH_r32(arg1);
253 call_func0(ptr);
254 ADD_imm8s_r32( 8, R_ESP );
255 }
257 /**
258 * Write a double (64-bit) value into memory, with the first word in arg2a, and
259 * the second in arg2b
260 * NB: 30 bytes
261 */
262 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
263 {
264 ADD_imm8s_r32( 4, addr );
265 PUSH_r32(arg2b);
266 PUSH_r32(addr);
267 ADD_imm8s_r32( -4, addr );
268 PUSH_r32(arg2a);
269 PUSH_r32(addr);
270 call_func0(sh4_write_long);
271 ADD_imm8s_r32( 8, R_ESP );
272 call_func0(sh4_write_long);
273 ADD_imm8s_r32( 8, R_ESP );
274 }
276 /**
277 * Read a double (64-bit) value from memory, writing the first word into arg2a
278 * and the second into arg2b. The addr must not be in EAX
279 * NB: 27 bytes
280 */
281 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
282 {
283 PUSH_r32(addr);
284 call_func0(sh4_read_long);
285 POP_r32(addr);
286 PUSH_r32(R_EAX);
287 ADD_imm8s_r32( 4, addr );
288 PUSH_r32(addr);
289 call_func0(sh4_read_long);
290 ADD_imm8s_r32( 4, R_ESP );
291 MOV_r32_r32( R_EAX, arg2b );
292 POP_r32(arg2a);
293 }
295 /* Exception checks - Note that all exception checks will clobber EAX */
296 static void check_priv( )
297 {
298 if( !sh4_x86.priv_checked ) {
299 sh4_x86.priv_checked = TRUE;
300 load_spreg( R_EAX, R_SR );
301 AND_imm32_r32( SR_MD, R_EAX );
302 if( sh4_x86.in_delay_slot ) {
303 JE_exit( EXIT_SLOT_ILLEGAL );
304 } else {
305 JE_exit( EXIT_ILLEGAL );
306 }
307 }
308 }
310 static void check_fpuen( )
311 {
312 if( !sh4_x86.fpuen_checked ) {
313 sh4_x86.fpuen_checked = TRUE;
314 load_spreg( R_EAX, R_SR );
315 AND_imm32_r32( SR_FD, R_EAX );
316 if( sh4_x86.in_delay_slot ) {
317 JNE_exit(EXIT_SLOT_FPU_DISABLED);
318 } else {
319 JNE_exit(EXIT_FPU_DISABLED);
320 }
321 }
322 }
324 static void check_ralign16( int x86reg )
325 {
326 TEST_imm32_r32( 0x00000001, x86reg );
327 JNE_exit(EXIT_DATA_ADDR_READ);
328 }
330 static void check_walign16( int x86reg )
331 {
332 TEST_imm32_r32( 0x00000001, x86reg );
333 JNE_exit(EXIT_DATA_ADDR_WRITE);
334 }
336 static void check_ralign32( int x86reg )
337 {
338 TEST_imm32_r32( 0x00000003, x86reg );
339 JNE_exit(EXIT_DATA_ADDR_READ);
340 }
341 static void check_walign32( int x86reg )
342 {
343 TEST_imm32_r32( 0x00000003, x86reg );
344 JNE_exit(EXIT_DATA_ADDR_WRITE);
345 }
347 static inline void raise_exception( int exc )
348 {
349 PUSH_imm32(exc);
350 call_func0(sh4_raise_exception);
351 ADD_imm8s_r32( 4, R_ESP );
352 sh4_x86.in_delay_slot = FALSE;
353 }
355 #define UNDEF()
356 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
357 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
358 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
359 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
360 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
361 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
362 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
364 #define RAISE_EXCEPTION( exc ) raise_exception(exc); return 1;
365 #define SLOTILLEGAL() JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
369 /**
370 * Emit the 'start of block' assembly. Sets up the stack frame and save
371 * SI/DI as required
372 */
373 void sh4_translate_begin_block()
374 {
375 PUSH_r32(R_EBP);
376 /* mov &sh4r, ebp */
377 load_imm32( R_EBP, (uint32_t)&sh4r );
378 PUSH_r32(R_EDI);
379 PUSH_r32(R_ESI);
380 XOR_r32_r32(R_ESI, R_ESI);
382 sh4_x86.in_delay_slot = FALSE;
383 sh4_x86.priv_checked = FALSE;
384 sh4_x86.fpuen_checked = FALSE;
385 sh4_x86.backpatch_posn = 0;
386 }
388 /**
389 * Exit the block early (ie branch out), conditionally or otherwise
390 */
391 void exit_block( )
392 {
393 store_spreg( R_EDI, REG_OFFSET(pc) );
394 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
395 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
396 MUL_r32( R_ESI );
397 ADD_r32_r32( R_EAX, R_ECX );
398 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
399 load_imm32( R_EAX, 1 );
400 POP_r32(R_ESI);
401 POP_r32(R_EDI);
402 POP_r32(R_EBP);
403 RET();
404 }
406 /**
407 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
408 */
409 void sh4_translate_end_block( sh4addr_t pc ) {
410 assert( !sh4_x86.in_delay_slot ); // should never stop here
411 // Normal termination - save PC, cycle count
412 exit_block( );
414 uint8_t *end_ptr = xlat_output;
415 // Exception termination. Jump block for various exception codes:
416 PUSH_imm32( EXC_DATA_ADDR_READ );
417 JMP_rel8( 33, target1 );
418 PUSH_imm32( EXC_DATA_ADDR_WRITE );
419 JMP_rel8( 26, target2 );
420 PUSH_imm32( EXC_ILLEGAL );
421 JMP_rel8( 19, target3 );
422 PUSH_imm32( EXC_SLOT_ILLEGAL );
423 JMP_rel8( 12, target4 );
424 PUSH_imm32( EXC_FPU_DISABLED );
425 JMP_rel8( 5, target5 );
426 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
427 // target
428 JMP_TARGET(target1);
429 JMP_TARGET(target2);
430 JMP_TARGET(target3);
431 JMP_TARGET(target4);
432 JMP_TARGET(target5);
433 load_spreg( R_ECX, REG_OFFSET(pc) );
434 ADD_r32_r32( R_ESI, R_ECX );
435 ADD_r32_r32( R_ESI, R_ECX );
436 store_spreg( R_ECX, REG_OFFSET(pc) );
437 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
438 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
439 MUL_r32( R_ESI );
440 ADD_r32_r32( R_EAX, R_ECX );
441 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
443 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
444 CALL_r32( R_EAX ); // 2
445 ADD_imm8s_r32( 4, R_ESP );
446 POP_r32(R_ESI);
447 POP_r32(R_EDI);
448 POP_r32(R_EBP);
449 RET();
451 sh4_x86_do_backpatch( end_ptr );
452 }
454 /**
455 * Translate a single instruction. Delayed branches are handled specially
456 * by translating both branch and delayed instruction as a single unit (as
457 *
458 *
459 * @return true if the instruction marks the end of a basic block
460 * (eg a branch or
461 */
462 uint32_t sh4_x86_translate_instruction( uint32_t pc )
463 {
464 uint16_t ir = sh4_read_word( pc );
466 switch( (ir&0xF000) >> 12 ) {
467 case 0x0:
468 switch( ir&0xF ) {
469 case 0x2:
470 switch( (ir&0x80) >> 7 ) {
471 case 0x0:
472 switch( (ir&0x70) >> 4 ) {
473 case 0x0:
474 { /* STC SR, Rn */
475 uint32_t Rn = ((ir>>8)&0xF);
476 check_priv();
477 call_func0(sh4_read_sr);
478 store_reg( R_EAX, Rn );
479 }
480 break;
481 case 0x1:
482 { /* STC GBR, Rn */
483 uint32_t Rn = ((ir>>8)&0xF);
484 load_spreg( R_EAX, R_GBR );
485 store_reg( R_EAX, Rn );
486 }
487 break;
488 case 0x2:
489 { /* STC VBR, Rn */
490 uint32_t Rn = ((ir>>8)&0xF);
491 check_priv();
492 load_spreg( R_EAX, R_VBR );
493 store_reg( R_EAX, Rn );
494 }
495 break;
496 case 0x3:
497 { /* STC SSR, Rn */
498 uint32_t Rn = ((ir>>8)&0xF);
499 check_priv();
500 load_spreg( R_EAX, R_SSR );
501 store_reg( R_EAX, Rn );
502 }
503 break;
504 case 0x4:
505 { /* STC SPC, Rn */
506 uint32_t Rn = ((ir>>8)&0xF);
507 check_priv();
508 load_spreg( R_EAX, R_SPC );
509 store_reg( R_EAX, Rn );
510 }
511 break;
512 default:
513 UNDEF();
514 break;
515 }
516 break;
517 case 0x1:
518 { /* STC Rm_BANK, Rn */
519 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
520 check_priv();
521 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
522 store_reg( R_EAX, Rn );
523 }
524 break;
525 }
526 break;
527 case 0x3:
528 switch( (ir&0xF0) >> 4 ) {
529 case 0x0:
530 { /* BSRF Rn */
531 uint32_t Rn = ((ir>>8)&0xF);
532 if( sh4_x86.in_delay_slot ) {
533 SLOTILLEGAL();
534 } else {
535 load_imm32( R_EAX, pc + 4 );
536 store_spreg( R_EAX, R_PR );
537 load_reg( R_EDI, Rn );
538 ADD_r32_r32( R_EAX, R_EDI );
539 sh4_x86.in_delay_slot = TRUE;
540 return 0;
541 }
542 }
543 break;
544 case 0x2:
545 { /* BRAF Rn */
546 uint32_t Rn = ((ir>>8)&0xF);
547 if( sh4_x86.in_delay_slot ) {
548 SLOTILLEGAL();
549 } else {
550 load_reg( R_EDI, Rn );
551 ADD_imm32_r32( pc + 4, R_EDI );
552 sh4_x86.in_delay_slot = TRUE;
553 return 0;
554 }
555 }
556 break;
557 case 0x8:
558 { /* PREF @Rn */
559 uint32_t Rn = ((ir>>8)&0xF);
560 load_reg( R_EAX, Rn );
561 PUSH_r32( R_EAX );
562 AND_imm32_r32( 0xFC000000, R_EAX );
563 CMP_imm32_r32( 0xE0000000, R_EAX );
564 JNE_rel8(7, end);
565 call_func0( sh4_flush_store_queue );
566 JMP_TARGET(end);
567 ADD_imm8s_r32( 4, R_ESP );
568 }
569 break;
570 case 0x9:
571 { /* OCBI @Rn */
572 uint32_t Rn = ((ir>>8)&0xF);
573 }
574 break;
575 case 0xA:
576 { /* OCBP @Rn */
577 uint32_t Rn = ((ir>>8)&0xF);
578 }
579 break;
580 case 0xB:
581 { /* OCBWB @Rn */
582 uint32_t Rn = ((ir>>8)&0xF);
583 }
584 break;
585 case 0xC:
586 { /* MOVCA.L R0, @Rn */
587 uint32_t Rn = ((ir>>8)&0xF);
588 load_reg( R_EAX, 0 );
589 load_reg( R_ECX, Rn );
590 check_walign32( R_ECX );
591 MEM_WRITE_LONG( R_ECX, R_EAX );
592 }
593 break;
594 default:
595 UNDEF();
596 break;
597 }
598 break;
599 case 0x4:
600 { /* MOV.B Rm, @(R0, Rn) */
601 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
602 load_reg( R_EAX, 0 );
603 load_reg( R_ECX, Rn );
604 ADD_r32_r32( R_EAX, R_ECX );
605 load_reg( R_EAX, Rm );
606 MEM_WRITE_BYTE( R_ECX, R_EAX );
607 }
608 break;
609 case 0x5:
610 { /* MOV.W Rm, @(R0, Rn) */
611 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
612 load_reg( R_EAX, 0 );
613 load_reg( R_ECX, Rn );
614 ADD_r32_r32( R_EAX, R_ECX );
615 check_walign16( R_ECX );
616 load_reg( R_EAX, Rm );
617 MEM_WRITE_WORD( R_ECX, R_EAX );
618 }
619 break;
620 case 0x6:
621 { /* MOV.L Rm, @(R0, Rn) */
622 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
623 load_reg( R_EAX, 0 );
624 load_reg( R_ECX, Rn );
625 ADD_r32_r32( R_EAX, R_ECX );
626 check_walign32( R_ECX );
627 load_reg( R_EAX, Rm );
628 MEM_WRITE_LONG( R_ECX, R_EAX );
629 }
630 break;
631 case 0x7:
632 { /* MUL.L Rm, Rn */
633 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
634 load_reg( R_EAX, Rm );
635 load_reg( R_ECX, Rn );
636 MUL_r32( R_ECX );
637 store_spreg( R_EAX, R_MACL );
638 }
639 break;
640 case 0x8:
641 switch( (ir&0xFF0) >> 4 ) {
642 case 0x0:
643 { /* CLRT */
644 CLC();
645 SETC_t();
646 }
647 break;
648 case 0x1:
649 { /* SETT */
650 STC();
651 SETC_t();
652 }
653 break;
654 case 0x2:
655 { /* CLRMAC */
656 XOR_r32_r32(R_EAX, R_EAX);
657 store_spreg( R_EAX, R_MACL );
658 store_spreg( R_EAX, R_MACH );
659 }
660 break;
661 case 0x3:
662 { /* LDTLB */
663 }
664 break;
665 case 0x4:
666 { /* CLRS */
667 CLC();
668 SETC_sh4r(R_S);
669 }
670 break;
671 case 0x5:
672 { /* SETS */
673 STC();
674 SETC_sh4r(R_S);
675 }
676 break;
677 default:
678 UNDEF();
679 break;
680 }
681 break;
682 case 0x9:
683 switch( (ir&0xF0) >> 4 ) {
684 case 0x0:
685 { /* NOP */
686 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
687 }
688 break;
689 case 0x1:
690 { /* DIV0U */
691 XOR_r32_r32( R_EAX, R_EAX );
692 store_spreg( R_EAX, R_Q );
693 store_spreg( R_EAX, R_M );
694 store_spreg( R_EAX, R_T );
695 }
696 break;
697 case 0x2:
698 { /* MOVT Rn */
699 uint32_t Rn = ((ir>>8)&0xF);
700 load_spreg( R_EAX, R_T );
701 store_reg( R_EAX, Rn );
702 }
703 break;
704 default:
705 UNDEF();
706 break;
707 }
708 break;
709 case 0xA:
710 switch( (ir&0xF0) >> 4 ) {
711 case 0x0:
712 { /* STS MACH, Rn */
713 uint32_t Rn = ((ir>>8)&0xF);
714 load_spreg( R_EAX, R_MACH );
715 store_reg( R_EAX, Rn );
716 }
717 break;
718 case 0x1:
719 { /* STS MACL, Rn */
720 uint32_t Rn = ((ir>>8)&0xF);
721 load_spreg( R_EAX, R_MACL );
722 store_reg( R_EAX, Rn );
723 }
724 break;
725 case 0x2:
726 { /* STS PR, Rn */
727 uint32_t Rn = ((ir>>8)&0xF);
728 load_spreg( R_EAX, R_PR );
729 store_reg( R_EAX, Rn );
730 }
731 break;
732 case 0x3:
733 { /* STC SGR, Rn */
734 uint32_t Rn = ((ir>>8)&0xF);
735 check_priv();
736 load_spreg( R_EAX, R_SGR );
737 store_reg( R_EAX, Rn );
738 }
739 break;
740 case 0x5:
741 { /* STS FPUL, Rn */
742 uint32_t Rn = ((ir>>8)&0xF);
743 load_spreg( R_EAX, R_FPUL );
744 store_reg( R_EAX, Rn );
745 }
746 break;
747 case 0x6:
748 { /* STS FPSCR, Rn */
749 uint32_t Rn = ((ir>>8)&0xF);
750 load_spreg( R_EAX, R_FPSCR );
751 store_reg( R_EAX, Rn );
752 }
753 break;
754 case 0xF:
755 { /* STC DBR, Rn */
756 uint32_t Rn = ((ir>>8)&0xF);
757 check_priv();
758 load_spreg( R_EAX, R_DBR );
759 store_reg( R_EAX, Rn );
760 }
761 break;
762 default:
763 UNDEF();
764 break;
765 }
766 break;
767 case 0xB:
768 switch( (ir&0xFF0) >> 4 ) {
769 case 0x0:
770 { /* RTS */
771 if( sh4_x86.in_delay_slot ) {
772 SLOTILLEGAL();
773 } else {
774 load_spreg( R_EDI, R_PR );
775 sh4_x86.in_delay_slot = TRUE;
776 return 0;
777 }
778 }
779 break;
780 case 0x1:
781 { /* SLEEP */
782 /* TODO */
783 }
784 break;
785 case 0x2:
786 { /* RTE */
787 check_priv();
788 if( sh4_x86.in_delay_slot ) {
789 SLOTILLEGAL();
790 } else {
791 load_spreg( R_EDI, R_SPC );
792 load_spreg( R_EAX, R_SSR );
793 call_func1( sh4_write_sr, R_EAX );
794 sh4_x86.in_delay_slot = TRUE;
795 sh4_x86.priv_checked = FALSE;
796 sh4_x86.fpuen_checked = FALSE;
797 return 0;
798 }
799 }
800 break;
801 default:
802 UNDEF();
803 break;
804 }
805 break;
806 case 0xC:
807 { /* MOV.B @(R0, Rm), Rn */
808 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
809 load_reg( R_EAX, 0 );
810 load_reg( R_ECX, Rm );
811 ADD_r32_r32( R_EAX, R_ECX );
812 MEM_READ_BYTE( R_ECX, R_EAX );
813 store_reg( R_EAX, Rn );
814 }
815 break;
816 case 0xD:
817 { /* MOV.W @(R0, Rm), Rn */
818 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
819 load_reg( R_EAX, 0 );
820 load_reg( R_ECX, Rm );
821 ADD_r32_r32( R_EAX, R_ECX );
822 check_ralign16( R_ECX );
823 MEM_READ_WORD( R_ECX, R_EAX );
824 store_reg( R_EAX, Rn );
825 }
826 break;
827 case 0xE:
828 { /* MOV.L @(R0, Rm), Rn */
829 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
830 load_reg( R_EAX, 0 );
831 load_reg( R_ECX, Rm );
832 ADD_r32_r32( R_EAX, R_ECX );
833 check_ralign32( R_ECX );
834 MEM_READ_LONG( R_ECX, R_EAX );
835 store_reg( R_EAX, Rn );
836 }
837 break;
838 case 0xF:
839 { /* MAC.L @Rm+, @Rn+ */
840 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
841 load_reg( R_ECX, Rm );
842 check_ralign32( R_ECX );
843 load_reg( R_ECX, Rn );
844 check_ralign32( R_ECX );
845 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
846 MEM_READ_LONG( R_ECX, R_EAX );
847 PUSH_r32( R_EAX );
848 load_reg( R_ECX, Rm );
849 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
850 MEM_READ_LONG( R_ECX, R_EAX );
851 POP_r32( R_ECX );
852 IMUL_r32( R_ECX );
853 ADD_r32_sh4r( R_EAX, R_MACL );
854 ADC_r32_sh4r( R_EDX, R_MACH );
856 load_spreg( R_ECX, R_S );
857 TEST_r32_r32(R_ECX, R_ECX);
858 JE_rel8( 7, nosat );
859 call_func0( signsat48 );
860 JMP_TARGET( nosat );
861 }
862 break;
863 default:
864 UNDEF();
865 break;
866 }
867 break;
868 case 0x1:
869 { /* MOV.L Rm, @(disp, Rn) */
870 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
871 load_reg( R_ECX, Rn );
872 load_reg( R_EAX, Rm );
873 ADD_imm32_r32( disp, R_ECX );
874 check_walign32( R_ECX );
875 MEM_WRITE_LONG( R_ECX, R_EAX );
876 }
877 break;
878 case 0x2:
879 switch( ir&0xF ) {
880 case 0x0:
881 { /* MOV.B Rm, @Rn */
882 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
883 load_reg( R_EAX, Rm );
884 load_reg( R_ECX, Rn );
885 MEM_WRITE_BYTE( R_ECX, R_EAX );
886 }
887 break;
888 case 0x1:
889 { /* MOV.W Rm, @Rn */
890 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
891 load_reg( R_ECX, Rn );
892 check_walign16( R_ECX );
893 load_reg( R_EAX, Rm );
894 MEM_WRITE_WORD( R_ECX, R_EAX );
895 }
896 break;
897 case 0x2:
898 { /* MOV.L Rm, @Rn */
899 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
900 load_reg( R_EAX, Rm );
901 load_reg( R_ECX, Rn );
902 check_walign32(R_ECX);
903 MEM_WRITE_LONG( R_ECX, R_EAX );
904 }
905 break;
906 case 0x4:
907 { /* MOV.B Rm, @-Rn */
908 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
909 load_reg( R_EAX, Rm );
910 load_reg( R_ECX, Rn );
911 ADD_imm8s_r32( -1, R_ECX );
912 store_reg( R_ECX, Rn );
913 MEM_WRITE_BYTE( R_ECX, R_EAX );
914 }
915 break;
916 case 0x5:
917 { /* MOV.W Rm, @-Rn */
918 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
919 load_reg( R_ECX, Rn );
920 check_walign16( R_ECX );
921 load_reg( R_EAX, Rm );
922 ADD_imm8s_r32( -2, R_ECX );
923 store_reg( R_ECX, Rn );
924 MEM_WRITE_WORD( R_ECX, R_EAX );
925 }
926 break;
927 case 0x6:
928 { /* MOV.L Rm, @-Rn */
929 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
930 load_reg( R_EAX, Rm );
931 load_reg( R_ECX, Rn );
932 check_walign32( R_ECX );
933 ADD_imm8s_r32( -4, R_ECX );
934 store_reg( R_ECX, Rn );
935 MEM_WRITE_LONG( R_ECX, R_EAX );
936 }
937 break;
938 case 0x7:
939 { /* DIV0S Rm, Rn */
940 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
941 load_reg( R_EAX, Rm );
942 load_reg( R_ECX, Rn );
943 SHR_imm8_r32( 31, R_EAX );
944 SHR_imm8_r32( 31, R_ECX );
945 store_spreg( R_EAX, R_M );
946 store_spreg( R_ECX, R_Q );
947 CMP_r32_r32( R_EAX, R_ECX );
948 SETNE_t();
949 }
950 break;
951 case 0x8:
952 { /* TST Rm, Rn */
953 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
954 load_reg( R_EAX, Rm );
955 load_reg( R_ECX, Rn );
956 TEST_r32_r32( R_EAX, R_ECX );
957 SETE_t();
958 }
959 break;
960 case 0x9:
961 { /* AND Rm, Rn */
962 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
963 load_reg( R_EAX, Rm );
964 load_reg( R_ECX, Rn );
965 AND_r32_r32( R_EAX, R_ECX );
966 store_reg( R_ECX, Rn );
967 }
968 break;
969 case 0xA:
970 { /* XOR Rm, Rn */
971 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
972 load_reg( R_EAX, Rm );
973 load_reg( R_ECX, Rn );
974 XOR_r32_r32( R_EAX, R_ECX );
975 store_reg( R_ECX, Rn );
976 }
977 break;
978 case 0xB:
979 { /* OR Rm, Rn */
980 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
981 load_reg( R_EAX, Rm );
982 load_reg( R_ECX, Rn );
983 OR_r32_r32( R_EAX, R_ECX );
984 store_reg( R_ECX, Rn );
985 }
986 break;
987 case 0xC:
988 { /* CMP/STR Rm, Rn */
989 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
990 load_reg( R_EAX, Rm );
991 load_reg( R_ECX, Rn );
992 XOR_r32_r32( R_ECX, R_EAX );
993 TEST_r8_r8( R_AL, R_AL );
994 JE_rel8(13, target1);
995 TEST_r8_r8( R_AH, R_AH ); // 2
996 JE_rel8(9, target2);
997 SHR_imm8_r32( 16, R_EAX ); // 3
998 TEST_r8_r8( R_AL, R_AL ); // 2
999 JE_rel8(2, target3);
1000 TEST_r8_r8( R_AH, R_AH ); // 2
1001 JMP_TARGET(target1);
1002 JMP_TARGET(target2);
1003 JMP_TARGET(target3);
1004 SETE_t();
1005 }
1006 break;
1007 case 0xD:
1008 { /* XTRCT Rm, Rn */
1009 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1010 load_reg( R_EAX, Rm );
1011 MOV_r32_r32( R_EAX, R_ECX );
1012 SHR_imm8_r32( 16, R_EAX );
1013 SHL_imm8_r32( 16, R_ECX );
1014 OR_r32_r32( R_EAX, R_ECX );
1015 store_reg( R_ECX, Rn );
1016 }
1017 break;
1018 case 0xE:
1019 { /* MULU.W Rm, Rn */
1020 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1021 load_reg16u( R_EAX, Rm );
1022 load_reg16u( R_ECX, Rn );
1023 MUL_r32( R_ECX );
1024 store_spreg( R_EAX, R_MACL );
1025 }
1026 break;
1027 case 0xF:
1028 { /* MULS.W Rm, Rn */
1029 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1030 load_reg16s( R_EAX, Rm );
1031 load_reg16s( R_ECX, Rn );
1032 MUL_r32( R_ECX );
1033 store_spreg( R_EAX, R_MACL );
1034 }
1035 break;
1036 default:
1037 UNDEF();
1038 break;
1039 }
1040 break;
1041 case 0x3:
1042 switch( ir&0xF ) {
1043 case 0x0:
1044 { /* CMP/EQ Rm, Rn */
1045 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1046 load_reg( R_EAX, Rm );
1047 load_reg( R_ECX, Rn );
1048 CMP_r32_r32( R_EAX, R_ECX );
1049 SETE_t();
1050 }
1051 break;
1052 case 0x2:
1053 { /* CMP/HS Rm, Rn */
1054 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1055 load_reg( R_EAX, Rm );
1056 load_reg( R_ECX, Rn );
1057 CMP_r32_r32( R_EAX, R_ECX );
1058 SETAE_t();
1059 }
1060 break;
1061 case 0x3:
1062 { /* CMP/GE Rm, Rn */
1063 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1064 load_reg( R_EAX, Rm );
1065 load_reg( R_ECX, Rn );
1066 CMP_r32_r32( R_EAX, R_ECX );
1067 SETGE_t();
1068 }
1069 break;
1070 case 0x4:
1071 { /* DIV1 Rm, Rn */
1072 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1073 load_spreg( R_ECX, R_M );
1074 load_reg( R_EAX, Rn );
1075 LDC_t();
1076 RCL1_r32( R_EAX );
1077 SETC_r8( R_DL ); // Q'
1078 CMP_sh4r_r32( R_Q, R_ECX );
1079 JE_rel8(5, mqequal);
1080 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
1081 JMP_rel8(3, end);
1082 JMP_TARGET(mqequal);
1083 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
1084 JMP_TARGET(end);
1085 store_reg( R_EAX, Rn ); // Done with Rn now
1086 SETC_r8(R_AL); // tmp1
1087 XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
1088 XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
1089 store_spreg( R_ECX, R_Q );
1090 XOR_imm8s_r32( 1, R_AL ); // T = !Q'
1091 MOVZX_r8_r32( R_AL, R_EAX );
1092 store_spreg( R_EAX, R_T );
1093 }
1094 break;
1095 case 0x5:
1096 { /* DMULU.L Rm, Rn */
1097 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1098 load_reg( R_EAX, Rm );
1099 load_reg( R_ECX, Rn );
1100 MUL_r32(R_ECX);
1101 store_spreg( R_EDX, R_MACH );
1102 store_spreg( R_EAX, R_MACL );
1103 }
1104 break;
1105 case 0x6:
1106 { /* CMP/HI Rm, Rn */
1107 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1108 load_reg( R_EAX, Rm );
1109 load_reg( R_ECX, Rn );
1110 CMP_r32_r32( R_EAX, R_ECX );
1111 SETA_t();
1112 }
1113 break;
1114 case 0x7:
1115 { /* CMP/GT Rm, Rn */
1116 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1117 load_reg( R_EAX, Rm );
1118 load_reg( R_ECX, Rn );
1119 CMP_r32_r32( R_EAX, R_ECX );
1120 SETG_t();
1121 }
1122 break;
1123 case 0x8:
1124 { /* SUB Rm, Rn */
1125 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1126 load_reg( R_EAX, Rm );
1127 load_reg( R_ECX, Rn );
1128 SUB_r32_r32( R_EAX, R_ECX );
1129 store_reg( R_ECX, Rn );
1130 }
1131 break;
1132 case 0xA:
1133 { /* SUBC Rm, Rn */
1134 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1135 load_reg( R_EAX, Rm );
1136 load_reg( R_ECX, Rn );
1137 LDC_t();
1138 SBB_r32_r32( R_EAX, R_ECX );
1139 store_reg( R_ECX, Rn );
1140 }
1141 break;
1142 case 0xB:
1143 { /* SUBV Rm, Rn */
1144 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1145 load_reg( R_EAX, Rm );
1146 load_reg( R_ECX, Rn );
1147 SUB_r32_r32( R_EAX, R_ECX );
1148 store_reg( R_ECX, Rn );
1149 SETO_t();
1150 }
1151 break;
1152 case 0xC:
1153 { /* ADD Rm, Rn */
1154 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1155 load_reg( R_EAX, Rm );
1156 load_reg( R_ECX, Rn );
1157 ADD_r32_r32( R_EAX, R_ECX );
1158 store_reg( R_ECX, Rn );
1159 }
1160 break;
1161 case 0xD:
1162 { /* DMULS.L Rm, Rn */
1163 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1164 load_reg( R_EAX, Rm );
1165 load_reg( R_ECX, Rn );
1166 IMUL_r32(R_ECX);
1167 store_spreg( R_EDX, R_MACH );
1168 store_spreg( R_EAX, R_MACL );
1169 }
1170 break;
1171 case 0xE:
1172 { /* ADDC Rm, Rn */
1173 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1174 load_reg( R_EAX, Rm );
1175 load_reg( R_ECX, Rn );
1176 LDC_t();
1177 ADC_r32_r32( R_EAX, R_ECX );
1178 store_reg( R_ECX, Rn );
1179 SETC_t();
1180 }
1181 break;
1182 case 0xF:
1183 { /* ADDV Rm, Rn */
1184 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1185 load_reg( R_EAX, Rm );
1186 load_reg( R_ECX, Rn );
1187 ADD_r32_r32( R_EAX, R_ECX );
1188 store_reg( R_ECX, Rn );
1189 SETO_t();
1190 }
1191 break;
1192 default:
1193 UNDEF();
1194 break;
1195 }
1196 break;
1197 case 0x4:
1198 switch( ir&0xF ) {
1199 case 0x0:
1200 switch( (ir&0xF0) >> 4 ) {
1201 case 0x0:
1202 { /* SHLL Rn */
1203 uint32_t Rn = ((ir>>8)&0xF);
1204 load_reg( R_EAX, Rn );
1205 SHL1_r32( R_EAX );
1206 store_reg( R_EAX, Rn );
1207 }
1208 break;
1209 case 0x1:
1210 { /* DT Rn */
1211 uint32_t Rn = ((ir>>8)&0xF);
1212 load_reg( R_EAX, Rn );
1213 ADD_imm8s_r32( -1, R_EAX );
1214 store_reg( R_EAX, Rn );
1215 SETE_t();
1216 }
1217 break;
1218 case 0x2:
1219 { /* SHAL Rn */
1220 uint32_t Rn = ((ir>>8)&0xF);
1221 load_reg( R_EAX, Rn );
1222 SHL1_r32( R_EAX );
1223 store_reg( R_EAX, Rn );
1224 }
1225 break;
1226 default:
1227 UNDEF();
1228 break;
1229 }
1230 break;
1231 case 0x1:
1232 switch( (ir&0xF0) >> 4 ) {
1233 case 0x0:
1234 { /* SHLR Rn */
1235 uint32_t Rn = ((ir>>8)&0xF);
1236 load_reg( R_EAX, Rn );
1237 SHR1_r32( R_EAX );
1238 store_reg( R_EAX, Rn );
1239 }
1240 break;
1241 case 0x1:
1242 { /* CMP/PZ Rn */
1243 uint32_t Rn = ((ir>>8)&0xF);
1244 load_reg( R_EAX, Rn );
1245 CMP_imm8s_r32( 0, R_EAX );
1246 SETGE_t();
1247 }
1248 break;
1249 case 0x2:
1250 { /* SHAR Rn */
1251 uint32_t Rn = ((ir>>8)&0xF);
1252 load_reg( R_EAX, Rn );
1253 SAR1_r32( R_EAX );
1254 store_reg( R_EAX, Rn );
1255 }
1256 break;
1257 default:
1258 UNDEF();
1259 break;
1260 }
1261 break;
1262 case 0x2:
1263 switch( (ir&0xF0) >> 4 ) {
1264 case 0x0:
1265 { /* STS.L MACH, @-Rn */
1266 uint32_t Rn = ((ir>>8)&0xF);
1267 load_reg( R_ECX, Rn );
1268 ADD_imm8s_r32( -4, R_ECX );
1269 store_reg( R_ECX, Rn );
1270 load_spreg( R_EAX, R_MACH );
1271 MEM_WRITE_LONG( R_ECX, R_EAX );
1272 }
1273 break;
1274 case 0x1:
1275 { /* STS.L MACL, @-Rn */
1276 uint32_t Rn = ((ir>>8)&0xF);
1277 load_reg( R_ECX, Rn );
1278 ADD_imm8s_r32( -4, R_ECX );
1279 store_reg( R_ECX, Rn );
1280 load_spreg( R_EAX, R_MACL );
1281 MEM_WRITE_LONG( R_ECX, R_EAX );
1282 }
1283 break;
1284 case 0x2:
1285 { /* STS.L PR, @-Rn */
1286 uint32_t Rn = ((ir>>8)&0xF);
1287 load_reg( R_ECX, Rn );
1288 ADD_imm8s_r32( -4, R_ECX );
1289 store_reg( R_ECX, Rn );
1290 load_spreg( R_EAX, R_PR );
1291 MEM_WRITE_LONG( R_ECX, R_EAX );
1292 }
1293 break;
1294 case 0x3:
1295 { /* STC.L SGR, @-Rn */
1296 uint32_t Rn = ((ir>>8)&0xF);
1297 check_priv();
1298 load_reg( R_ECX, Rn );
1299 ADD_imm8s_r32( -4, R_ECX );
1300 store_reg( R_ECX, Rn );
1301 load_spreg( R_EAX, R_SGR );
1302 MEM_WRITE_LONG( R_ECX, R_EAX );
1303 }
1304 break;
1305 case 0x5:
1306 { /* STS.L FPUL, @-Rn */
1307 uint32_t Rn = ((ir>>8)&0xF);
1308 load_reg( R_ECX, Rn );
1309 ADD_imm8s_r32( -4, R_ECX );
1310 store_reg( R_ECX, Rn );
1311 load_spreg( R_EAX, R_FPUL );
1312 MEM_WRITE_LONG( R_ECX, R_EAX );
1313 }
1314 break;
1315 case 0x6:
1316 { /* STS.L FPSCR, @-Rn */
1317 uint32_t Rn = ((ir>>8)&0xF);
1318 load_reg( R_ECX, Rn );
1319 ADD_imm8s_r32( -4, R_ECX );
1320 store_reg( R_ECX, Rn );
1321 load_spreg( R_EAX, R_FPSCR );
1322 MEM_WRITE_LONG( R_ECX, R_EAX );
1323 }
1324 break;
1325 case 0xF:
1326 { /* STC.L DBR, @-Rn */
1327 uint32_t Rn = ((ir>>8)&0xF);
1328 check_priv();
1329 load_reg( R_ECX, Rn );
1330 ADD_imm8s_r32( -4, R_ECX );
1331 store_reg( R_ECX, Rn );
1332 load_spreg( R_EAX, R_DBR );
1333 MEM_WRITE_LONG( R_ECX, R_EAX );
1334 }
1335 break;
1336 default:
1337 UNDEF();
1338 break;
1339 }
1340 break;
1341 case 0x3:
1342 switch( (ir&0x80) >> 7 ) {
1343 case 0x0:
1344 switch( (ir&0x70) >> 4 ) {
1345 case 0x0:
1346 { /* STC.L SR, @-Rn */
1347 uint32_t Rn = ((ir>>8)&0xF);
1348 check_priv();
1349 load_reg( R_ECX, Rn );
1350 ADD_imm8s_r32( -4, R_ECX );
1351 store_reg( R_ECX, Rn );
1352 call_func0( sh4_read_sr );
1353 MEM_WRITE_LONG( R_ECX, R_EAX );
1354 }
1355 break;
1356 case 0x1:
1357 { /* STC.L GBR, @-Rn */
1358 uint32_t Rn = ((ir>>8)&0xF);
1359 load_reg( R_ECX, Rn );
1360 ADD_imm8s_r32( -4, R_ECX );
1361 store_reg( R_ECX, Rn );
1362 load_spreg( R_EAX, R_GBR );
1363 MEM_WRITE_LONG( R_ECX, R_EAX );
1364 }
1365 break;
1366 case 0x2:
1367 { /* STC.L VBR, @-Rn */
1368 uint32_t Rn = ((ir>>8)&0xF);
1369 check_priv();
1370 load_reg( R_ECX, Rn );
1371 ADD_imm8s_r32( -4, R_ECX );
1372 store_reg( R_ECX, Rn );
1373 load_spreg( R_EAX, R_VBR );
1374 MEM_WRITE_LONG( R_ECX, R_EAX );
1375 }
1376 break;
1377 case 0x3:
1378 { /* STC.L SSR, @-Rn */
1379 uint32_t Rn = ((ir>>8)&0xF);
1380 check_priv();
1381 load_reg( R_ECX, Rn );
1382 ADD_imm8s_r32( -4, R_ECX );
1383 store_reg( R_ECX, Rn );
1384 load_spreg( R_EAX, R_SSR );
1385 MEM_WRITE_LONG( R_ECX, R_EAX );
1386 }
1387 break;
1388 case 0x4:
1389 { /* STC.L SPC, @-Rn */
1390 uint32_t Rn = ((ir>>8)&0xF);
1391 check_priv();
1392 load_reg( R_ECX, Rn );
1393 ADD_imm8s_r32( -4, R_ECX );
1394 store_reg( R_ECX, Rn );
1395 load_spreg( R_EAX, R_SPC );
1396 MEM_WRITE_LONG( R_ECX, R_EAX );
1397 }
1398 break;
1399 default:
1400 UNDEF();
1401 break;
1402 }
1403 break;
1404 case 0x1:
1405 { /* STC.L Rm_BANK, @-Rn */
1406 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
1407 check_priv();
1408 load_reg( R_ECX, Rn );
1409 ADD_imm8s_r32( -4, R_ECX );
1410 store_reg( R_ECX, Rn );
1411 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
1412 MEM_WRITE_LONG( R_ECX, R_EAX );
1413 }
1414 break;
1415 }
1416 break;
1417 case 0x4:
1418 switch( (ir&0xF0) >> 4 ) {
1419 case 0x0:
1420 { /* ROTL Rn */
1421 uint32_t Rn = ((ir>>8)&0xF);
1422 load_reg( R_EAX, Rn );
1423 ROL1_r32( R_EAX );
1424 store_reg( R_EAX, Rn );
1425 SETC_t();
1426 }
1427 break;
1428 case 0x2:
1429 { /* ROTCL Rn */
1430 uint32_t Rn = ((ir>>8)&0xF);
1431 load_reg( R_EAX, Rn );
1432 LDC_t();
1433 RCL1_r32( R_EAX );
1434 store_reg( R_EAX, Rn );
1435 SETC_t();
1436 }
1437 break;
1438 default:
1439 UNDEF();
1440 break;
1441 }
1442 break;
1443 case 0x5:
1444 switch( (ir&0xF0) >> 4 ) {
1445 case 0x0:
1446 { /* ROTR Rn */
1447 uint32_t Rn = ((ir>>8)&0xF);
1448 load_reg( R_EAX, Rn );
1449 ROR1_r32( R_EAX );
1450 store_reg( R_EAX, Rn );
1451 SETC_t();
1452 }
1453 break;
1454 case 0x1:
1455 { /* CMP/PL Rn */
1456 uint32_t Rn = ((ir>>8)&0xF);
1457 load_reg( R_EAX, Rn );
1458 CMP_imm8s_r32( 0, R_EAX );
1459 SETG_t();
1460 }
1461 break;
1462 case 0x2:
1463 { /* ROTCR Rn */
1464 uint32_t Rn = ((ir>>8)&0xF);
1465 load_reg( R_EAX, Rn );
1466 LDC_t();
1467 RCR1_r32( R_EAX );
1468 store_reg( R_EAX, Rn );
1469 SETC_t();
1470 }
1471 break;
1472 default:
1473 UNDEF();
1474 break;
1475 }
1476 break;
1477 case 0x6:
1478 switch( (ir&0xF0) >> 4 ) {
1479 case 0x0:
1480 { /* LDS.L @Rm+, MACH */
1481 uint32_t Rm = ((ir>>8)&0xF);
1482 load_reg( R_EAX, Rm );
1483 MOV_r32_r32( R_EAX, R_ECX );
1484 ADD_imm8s_r32( 4, R_EAX );
1485 store_reg( R_EAX, Rm );
1486 MEM_READ_LONG( R_ECX, R_EAX );
1487 store_spreg( R_EAX, R_MACH );
1488 }
1489 break;
1490 case 0x1:
1491 { /* LDS.L @Rm+, MACL */
1492 uint32_t Rm = ((ir>>8)&0xF);
1493 load_reg( R_EAX, Rm );
1494 MOV_r32_r32( R_EAX, R_ECX );
1495 ADD_imm8s_r32( 4, R_EAX );
1496 store_reg( R_EAX, Rm );
1497 MEM_READ_LONG( R_ECX, R_EAX );
1498 store_spreg( R_EAX, R_MACL );
1499 }
1500 break;
1501 case 0x2:
1502 { /* LDS.L @Rm+, PR */
1503 uint32_t Rm = ((ir>>8)&0xF);
1504 load_reg( R_EAX, Rm );
1505 MOV_r32_r32( R_EAX, R_ECX );
1506 ADD_imm8s_r32( 4, R_EAX );
1507 store_reg( R_EAX, Rm );
1508 MEM_READ_LONG( R_ECX, R_EAX );
1509 store_spreg( R_EAX, R_PR );
1510 }
1511 break;
1512 case 0x3:
1513 { /* LDC.L @Rm+, SGR */
1514 uint32_t Rm = ((ir>>8)&0xF);
1515 check_priv();
1516 load_reg( R_EAX, Rm );
1517 MOV_r32_r32( R_EAX, R_ECX );
1518 ADD_imm8s_r32( 4, R_EAX );
1519 store_reg( R_EAX, Rm );
1520 MEM_READ_LONG( R_ECX, R_EAX );
1521 store_spreg( R_EAX, R_SGR );
1522 }
1523 break;
1524 case 0x5:
1525 { /* LDS.L @Rm+, FPUL */
1526 uint32_t Rm = ((ir>>8)&0xF);
1527 load_reg( R_EAX, Rm );
1528 MOV_r32_r32( R_EAX, R_ECX );
1529 ADD_imm8s_r32( 4, R_EAX );
1530 store_reg( R_EAX, Rm );
1531 MEM_READ_LONG( R_ECX, R_EAX );
1532 store_spreg( R_EAX, R_FPUL );
1533 }
1534 break;
1535 case 0x6:
1536 { /* LDS.L @Rm+, FPSCR */
1537 uint32_t Rm = ((ir>>8)&0xF);
1538 load_reg( R_EAX, Rm );
1539 MOV_r32_r32( R_EAX, R_ECX );
1540 ADD_imm8s_r32( 4, R_EAX );
1541 store_reg( R_EAX, Rm );
1542 MEM_READ_LONG( R_ECX, R_EAX );
1543 store_spreg( R_EAX, R_FPSCR );
1544 update_fr_bank( R_EAX );
1545 }
1546 break;
1547 case 0xF:
1548 { /* LDC.L @Rm+, DBR */
1549 uint32_t Rm = ((ir>>8)&0xF);
1550 check_priv();
1551 load_reg( R_EAX, Rm );
1552 MOV_r32_r32( R_EAX, R_ECX );
1553 ADD_imm8s_r32( 4, R_EAX );
1554 store_reg( R_EAX, Rm );
1555 MEM_READ_LONG( R_ECX, R_EAX );
1556 store_spreg( R_EAX, R_DBR );
1557 }
1558 break;
1559 default:
1560 UNDEF();
1561 break;
1562 }
1563 break;
1564 case 0x7:
1565 switch( (ir&0x80) >> 7 ) {
1566 case 0x0:
1567 switch( (ir&0x70) >> 4 ) {
1568 case 0x0:
1569 { /* LDC.L @Rm+, SR */
1570 uint32_t Rm = ((ir>>8)&0xF);
1571 if( sh4_x86.in_delay_slot ) {
1572 SLOTILLEGAL();
1573 } else {
1574 check_priv();
1575 load_reg( R_EAX, Rm );
1576 MOV_r32_r32( R_EAX, R_ECX );
1577 ADD_imm8s_r32( 4, R_EAX );
1578 store_reg( R_EAX, Rm );
1579 MEM_READ_LONG( R_ECX, R_EAX );
1580 call_func1( sh4_write_sr, R_EAX );
1581 sh4_x86.priv_checked = FALSE;
1582 sh4_x86.fpuen_checked = FALSE;
1583 }
1584 }
1585 break;
1586 case 0x1:
1587 { /* LDC.L @Rm+, GBR */
1588 uint32_t Rm = ((ir>>8)&0xF);
1589 load_reg( R_EAX, Rm );
1590 MOV_r32_r32( R_EAX, R_ECX );
1591 ADD_imm8s_r32( 4, R_EAX );
1592 store_reg( R_EAX, Rm );
1593 MEM_READ_LONG( R_ECX, R_EAX );
1594 store_spreg( R_EAX, R_GBR );
1595 }
1596 break;
1597 case 0x2:
1598 { /* LDC.L @Rm+, VBR */
1599 uint32_t Rm = ((ir>>8)&0xF);
1600 check_priv();
1601 load_reg( R_EAX, Rm );
1602 MOV_r32_r32( R_EAX, R_ECX );
1603 ADD_imm8s_r32( 4, R_EAX );
1604 store_reg( R_EAX, Rm );
1605 MEM_READ_LONG( R_ECX, R_EAX );
1606 store_spreg( R_EAX, R_VBR );
1607 }
1608 break;
1609 case 0x3:
1610 { /* LDC.L @Rm+, SSR */
1611 uint32_t Rm = ((ir>>8)&0xF);
1612 check_priv();
1613 load_reg( R_EAX, Rm );
1614 MOV_r32_r32( R_EAX, R_ECX );
1615 ADD_imm8s_r32( 4, R_EAX );
1616 store_reg( R_EAX, Rm );
1617 MEM_READ_LONG( R_ECX, R_EAX );
1618 store_spreg( R_EAX, R_SSR );
1619 }
1620 break;
1621 case 0x4:
1622 { /* LDC.L @Rm+, SPC */
1623 uint32_t Rm = ((ir>>8)&0xF);
1624 check_priv();
1625 load_reg( R_EAX, Rm );
1626 MOV_r32_r32( R_EAX, R_ECX );
1627 ADD_imm8s_r32( 4, R_EAX );
1628 store_reg( R_EAX, Rm );
1629 MEM_READ_LONG( R_ECX, R_EAX );
1630 store_spreg( R_EAX, R_SPC );
1631 }
1632 break;
1633 default:
1634 UNDEF();
1635 break;
1636 }
1637 break;
1638 case 0x1:
1639 { /* LDC.L @Rm+, Rn_BANK */
1640 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1641 check_priv();
1642 load_reg( R_EAX, Rm );
1643 MOV_r32_r32( R_EAX, R_ECX );
1644 ADD_imm8s_r32( 4, R_EAX );
1645 store_reg( R_EAX, Rm );
1646 MEM_READ_LONG( R_ECX, R_EAX );
1647 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1648 }
1649 break;
1650 }
1651 break;
1652 case 0x8:
1653 switch( (ir&0xF0) >> 4 ) {
1654 case 0x0:
1655 { /* SHLL2 Rn */
1656 uint32_t Rn = ((ir>>8)&0xF);
1657 load_reg( R_EAX, Rn );
1658 SHL_imm8_r32( 2, R_EAX );
1659 store_reg( R_EAX, Rn );
1660 }
1661 break;
1662 case 0x1:
1663 { /* SHLL8 Rn */
1664 uint32_t Rn = ((ir>>8)&0xF);
1665 load_reg( R_EAX, Rn );
1666 SHL_imm8_r32( 8, R_EAX );
1667 store_reg( R_EAX, Rn );
1668 }
1669 break;
1670 case 0x2:
1671 { /* SHLL16 Rn */
1672 uint32_t Rn = ((ir>>8)&0xF);
1673 load_reg( R_EAX, Rn );
1674 SHL_imm8_r32( 16, R_EAX );
1675 store_reg( R_EAX, Rn );
1676 }
1677 break;
1678 default:
1679 UNDEF();
1680 break;
1681 }
1682 break;
1683 case 0x9:
1684 switch( (ir&0xF0) >> 4 ) {
1685 case 0x0:
1686 { /* SHLR2 Rn */
1687 uint32_t Rn = ((ir>>8)&0xF);
1688 load_reg( R_EAX, Rn );
1689 SHR_imm8_r32( 2, R_EAX );
1690 store_reg( R_EAX, Rn );
1691 }
1692 break;
1693 case 0x1:
1694 { /* SHLR8 Rn */
1695 uint32_t Rn = ((ir>>8)&0xF);
1696 load_reg( R_EAX, Rn );
1697 SHR_imm8_r32( 8, R_EAX );
1698 store_reg( R_EAX, Rn );
1699 }
1700 break;
1701 case 0x2:
1702 { /* SHLR16 Rn */
1703 uint32_t Rn = ((ir>>8)&0xF);
1704 load_reg( R_EAX, Rn );
1705 SHR_imm8_r32( 16, R_EAX );
1706 store_reg( R_EAX, Rn );
1707 }
1708 break;
1709 default:
1710 UNDEF();
1711 break;
1712 }
1713 break;
1714 case 0xA:
1715 switch( (ir&0xF0) >> 4 ) {
1716 case 0x0:
1717 { /* LDS Rm, MACH */
1718 uint32_t Rm = ((ir>>8)&0xF);
1719 load_reg( R_EAX, Rm );
1720 store_spreg( R_EAX, R_MACH );
1721 }
1722 break;
1723 case 0x1:
1724 { /* LDS Rm, MACL */
1725 uint32_t Rm = ((ir>>8)&0xF);
1726 load_reg( R_EAX, Rm );
1727 store_spreg( R_EAX, R_MACL );
1728 }
1729 break;
1730 case 0x2:
1731 { /* LDS Rm, PR */
1732 uint32_t Rm = ((ir>>8)&0xF);
1733 load_reg( R_EAX, Rm );
1734 store_spreg( R_EAX, R_PR );
1735 }
1736 break;
1737 case 0x3:
1738 { /* LDC Rm, SGR */
1739 uint32_t Rm = ((ir>>8)&0xF);
1740 check_priv();
1741 load_reg( R_EAX, Rm );
1742 store_spreg( R_EAX, R_SGR );
1743 }
1744 break;
1745 case 0x5:
1746 { /* LDS Rm, FPUL */
1747 uint32_t Rm = ((ir>>8)&0xF);
1748 load_reg( R_EAX, Rm );
1749 store_spreg( R_EAX, R_FPUL );
1750 }
1751 break;
1752 case 0x6:
1753 { /* LDS Rm, FPSCR */
1754 uint32_t Rm = ((ir>>8)&0xF);
1755 load_reg( R_EAX, Rm );
1756 store_spreg( R_EAX, R_FPSCR );
1757 update_fr_bank( R_EAX );
1758 }
1759 break;
1760 case 0xF:
1761 { /* LDC Rm, DBR */
1762 uint32_t Rm = ((ir>>8)&0xF);
1763 check_priv();
1764 load_reg( R_EAX, Rm );
1765 store_spreg( R_EAX, R_DBR );
1766 }
1767 break;
1768 default:
1769 UNDEF();
1770 break;
1771 }
1772 break;
1773 case 0xB:
1774 switch( (ir&0xF0) >> 4 ) {
1775 case 0x0:
1776 { /* JSR @Rn */
1777 uint32_t Rn = ((ir>>8)&0xF);
1778 if( sh4_x86.in_delay_slot ) {
1779 SLOTILLEGAL();
1780 } else {
1781 load_imm32( R_EAX, pc + 4 );
1782 store_spreg( R_EAX, R_PR );
1783 load_reg( R_EDI, Rn );
1784 sh4_x86.in_delay_slot = TRUE;
1785 return 0;
1786 }
1787 }
1788 break;
1789 case 0x1:
1790 { /* TAS.B @Rn */
1791 uint32_t Rn = ((ir>>8)&0xF);
1792 load_reg( R_ECX, Rn );
1793 MEM_READ_BYTE( R_ECX, R_EAX );
1794 TEST_r8_r8( R_AL, R_AL );
1795 SETE_t();
1796 OR_imm8_r8( 0x80, R_AL );
1797 load_reg( R_ECX, Rn );
1798 MEM_WRITE_BYTE( R_ECX, R_EAX );
1799 }
1800 break;
1801 case 0x2:
1802 { /* JMP @Rn */
1803 uint32_t Rn = ((ir>>8)&0xF);
1804 if( sh4_x86.in_delay_slot ) {
1805 SLOTILLEGAL();
1806 } else {
1807 load_reg( R_EDI, Rn );
1808 sh4_x86.in_delay_slot = TRUE;
1809 return 0;
1810 }
1811 }
1812 break;
1813 default:
1814 UNDEF();
1815 break;
1816 }
1817 break;
1818 case 0xC:
1819 { /* SHAD Rm, Rn */
1820 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1821 /* Annoyingly enough, not directly convertible */
1822 load_reg( R_EAX, Rn );
1823 load_reg( R_ECX, Rm );
1824 CMP_imm32_r32( 0, R_ECX );
1825 JGE_rel8(16, doshl);
1827 NEG_r32( R_ECX ); // 2
1828 AND_imm8_r8( 0x1F, R_CL ); // 3
1829 JE_rel8( 4, emptysar); // 2
1830 SAR_r32_CL( R_EAX ); // 2
1831 JMP_rel8(10, end); // 2
1833 JMP_TARGET(emptysar);
1834 SAR_imm8_r32(31, R_EAX ); // 3
1835 JMP_rel8(5, end2);
1837 JMP_TARGET(doshl);
1838 AND_imm8_r8( 0x1F, R_CL ); // 3
1839 SHL_r32_CL( R_EAX ); // 2
1840 JMP_TARGET(end);
1841 JMP_TARGET(end2);
1842 store_reg( R_EAX, Rn );
1843 }
1844 break;
1845 case 0xD:
1846 { /* SHLD Rm, Rn */
1847 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1848 load_reg( R_EAX, Rn );
1849 load_reg( R_ECX, Rm );
1850 CMP_imm32_r32( 0, R_ECX );
1851 JGE_rel8(15, doshl);
1853 NEG_r32( R_ECX ); // 2
1854 AND_imm8_r8( 0x1F, R_CL ); // 3
1855 JE_rel8( 4, emptyshr );
1856 SHR_r32_CL( R_EAX ); // 2
1857 JMP_rel8(9, end); // 2
1859 JMP_TARGET(emptyshr);
1860 XOR_r32_r32( R_EAX, R_EAX );
1861 JMP_rel8(5, end2);
1863 JMP_TARGET(doshl);
1864 AND_imm8_r8( 0x1F, R_CL ); // 3
1865 SHL_r32_CL( R_EAX ); // 2
1866 JMP_TARGET(end);
1867 JMP_TARGET(end2);
1868 store_reg( R_EAX, Rn );
1869 }
1870 break;
1871 case 0xE:
1872 switch( (ir&0x80) >> 7 ) {
1873 case 0x0:
1874 switch( (ir&0x70) >> 4 ) {
1875 case 0x0:
1876 { /* LDC Rm, SR */
1877 uint32_t Rm = ((ir>>8)&0xF);
1878 if( sh4_x86.in_delay_slot ) {
1879 SLOTILLEGAL();
1880 } else {
1881 check_priv();
1882 load_reg( R_EAX, Rm );
1883 call_func1( sh4_write_sr, R_EAX );
1884 sh4_x86.priv_checked = FALSE;
1885 sh4_x86.fpuen_checked = FALSE;
1886 }
1887 }
1888 break;
1889 case 0x1:
1890 { /* LDC Rm, GBR */
1891 uint32_t Rm = ((ir>>8)&0xF);
1892 load_reg( R_EAX, Rm );
1893 store_spreg( R_EAX, R_GBR );
1894 }
1895 break;
1896 case 0x2:
1897 { /* LDC Rm, VBR */
1898 uint32_t Rm = ((ir>>8)&0xF);
1899 check_priv();
1900 load_reg( R_EAX, Rm );
1901 store_spreg( R_EAX, R_VBR );
1902 }
1903 break;
1904 case 0x3:
1905 { /* LDC Rm, SSR */
1906 uint32_t Rm = ((ir>>8)&0xF);
1907 check_priv();
1908 load_reg( R_EAX, Rm );
1909 store_spreg( R_EAX, R_SSR );
1910 }
1911 break;
1912 case 0x4:
1913 { /* LDC Rm, SPC */
1914 uint32_t Rm = ((ir>>8)&0xF);
1915 check_priv();
1916 load_reg( R_EAX, Rm );
1917 store_spreg( R_EAX, R_SPC );
1918 }
1919 break;
1920 default:
1921 UNDEF();
1922 break;
1923 }
1924 break;
1925 case 0x1:
1926 { /* LDC Rm, Rn_BANK */
1927 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1928 check_priv();
1929 load_reg( R_EAX, Rm );
1930 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1931 }
1932 break;
1933 }
1934 break;
1935 case 0xF:
1936 { /* MAC.W @Rm+, @Rn+ */
1937 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1938 load_reg( R_ECX, Rm );
1939 check_ralign16( R_ECX );
1940 load_reg( R_ECX, Rn );
1941 check_ralign16( R_ECX );
1942 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
1943 MEM_READ_WORD( R_ECX, R_EAX );
1944 PUSH_r32( R_EAX );
1945 load_reg( R_ECX, Rm );
1946 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
1947 MEM_READ_WORD( R_ECX, R_EAX );
1948 POP_r32( R_ECX );
1949 IMUL_r32( R_ECX );
1951 load_spreg( R_ECX, R_S );
1952 TEST_r32_r32( R_ECX, R_ECX );
1953 JE_rel8( 47, nosat );
1955 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
1956 JNO_rel8( 51, end ); // 2
1957 load_imm32( R_EDX, 1 ); // 5
1958 store_spreg( R_EDX, R_MACH ); // 6
1959 JS_rel8( 13, positive ); // 2
1960 load_imm32( R_EAX, 0x80000000 );// 5
1961 store_spreg( R_EAX, R_MACL ); // 6
1962 JMP_rel8( 25, end2 ); // 2
1964 JMP_TARGET(positive);
1965 load_imm32( R_EAX, 0x7FFFFFFF );// 5
1966 store_spreg( R_EAX, R_MACL ); // 6
1967 JMP_rel8( 12, end3); // 2
1969 JMP_TARGET(nosat);
1970 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
1971 ADC_r32_sh4r( R_EDX, R_MACH ); // 6
1972 JMP_TARGET(end);
1973 JMP_TARGET(end2);
1974 JMP_TARGET(end3);
1975 }
1976 break;
1977 }
1978 break;
1979 case 0x5:
1980 { /* MOV.L @(disp, Rm), Rn */
1981 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
1982 load_reg( R_ECX, Rm );
1983 ADD_imm8s_r32( disp, R_ECX );
1984 check_ralign32( R_ECX );
1985 MEM_READ_LONG( R_ECX, R_EAX );
1986 store_reg( R_EAX, Rn );
1987 }
1988 break;
1989 case 0x6:
1990 switch( ir&0xF ) {
1991 case 0x0:
1992 { /* MOV.B @Rm, Rn */
1993 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1994 load_reg( R_ECX, Rm );
1995 MEM_READ_BYTE( R_ECX, R_EAX );
1996 store_reg( R_EAX, Rn );
1997 }
1998 break;
1999 case 0x1:
2000 { /* MOV.W @Rm, Rn */
2001 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2002 load_reg( R_ECX, Rm );
2003 check_ralign16( R_ECX );
2004 MEM_READ_WORD( R_ECX, R_EAX );
2005 store_reg( R_EAX, Rn );
2006 }
2007 break;
2008 case 0x2:
2009 { /* MOV.L @Rm, Rn */
2010 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2011 load_reg( R_ECX, Rm );
2012 check_ralign32( R_ECX );
2013 MEM_READ_LONG( R_ECX, R_EAX );
2014 store_reg( R_EAX, Rn );
2015 }
2016 break;
2017 case 0x3:
2018 { /* MOV Rm, Rn */
2019 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2020 load_reg( R_EAX, Rm );
2021 store_reg( R_EAX, Rn );
2022 }
2023 break;
2024 case 0x4:
2025 { /* MOV.B @Rm+, Rn */
2026 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2027 load_reg( R_ECX, Rm );
2028 MOV_r32_r32( R_ECX, R_EAX );
2029 ADD_imm8s_r32( 1, R_EAX );
2030 store_reg( R_EAX, Rm );
2031 MEM_READ_BYTE( R_ECX, R_EAX );
2032 store_reg( R_EAX, Rn );
2033 }
2034 break;
2035 case 0x5:
2036 { /* MOV.W @Rm+, Rn */
2037 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2038 load_reg( R_EAX, Rm );
2039 check_ralign16( R_EAX );
2040 MOV_r32_r32( R_EAX, R_ECX );
2041 ADD_imm8s_r32( 2, R_EAX );
2042 store_reg( R_EAX, Rm );
2043 MEM_READ_WORD( R_ECX, R_EAX );
2044 store_reg( R_EAX, Rn );
2045 }
2046 break;
2047 case 0x6:
2048 { /* MOV.L @Rm+, Rn */
2049 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2050 load_reg( R_EAX, Rm );
2051 check_ralign32( R_EAX );
2052 MOV_r32_r32( R_EAX, R_ECX );
2053 ADD_imm8s_r32( 4, R_EAX );
2054 store_reg( R_EAX, Rm );
2055 MEM_READ_LONG( R_ECX, R_EAX );
2056 store_reg( R_EAX, Rn );
2057 }
2058 break;
2059 case 0x7:
2060 { /* NOT Rm, Rn */
2061 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2062 load_reg( R_EAX, Rm );
2063 NOT_r32( R_EAX );
2064 store_reg( R_EAX, Rn );
2065 }
2066 break;
2067 case 0x8:
2068 { /* SWAP.B Rm, Rn */
2069 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2070 load_reg( R_EAX, Rm );
2071 XCHG_r8_r8( R_AL, R_AH );
2072 store_reg( R_EAX, Rn );
2073 }
2074 break;
2075 case 0x9:
2076 { /* SWAP.W Rm, Rn */
2077 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2078 load_reg( R_EAX, Rm );
2079 MOV_r32_r32( R_EAX, R_ECX );
2080 SHL_imm8_r32( 16, R_ECX );
2081 SHR_imm8_r32( 16, R_EAX );
2082 OR_r32_r32( R_EAX, R_ECX );
2083 store_reg( R_ECX, Rn );
2084 }
2085 break;
2086 case 0xA:
2087 { /* NEGC Rm, Rn */
2088 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2089 load_reg( R_EAX, Rm );
2090 XOR_r32_r32( R_ECX, R_ECX );
2091 LDC_t();
2092 SBB_r32_r32( R_EAX, R_ECX );
2093 store_reg( R_ECX, Rn );
2094 SETC_t();
2095 }
2096 break;
2097 case 0xB:
2098 { /* NEG Rm, Rn */
2099 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2100 load_reg( R_EAX, Rm );
2101 NEG_r32( R_EAX );
2102 store_reg( R_EAX, Rn );
2103 }
2104 break;
2105 case 0xC:
2106 { /* EXTU.B Rm, Rn */
2107 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2108 load_reg( R_EAX, Rm );
2109 MOVZX_r8_r32( R_EAX, R_EAX );
2110 store_reg( R_EAX, Rn );
2111 }
2112 break;
2113 case 0xD:
2114 { /* EXTU.W Rm, Rn */
2115 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2116 load_reg( R_EAX, Rm );
2117 MOVZX_r16_r32( R_EAX, R_EAX );
2118 store_reg( R_EAX, Rn );
2119 }
2120 break;
2121 case 0xE:
2122 { /* EXTS.B Rm, Rn */
2123 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2124 load_reg( R_EAX, Rm );
2125 MOVSX_r8_r32( R_EAX, R_EAX );
2126 store_reg( R_EAX, Rn );
2127 }
2128 break;
2129 case 0xF:
2130 { /* EXTS.W Rm, Rn */
2131 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2132 load_reg( R_EAX, Rm );
2133 MOVSX_r16_r32( R_EAX, R_EAX );
2134 store_reg( R_EAX, Rn );
2135 }
2136 break;
2137 }
2138 break;
2139 case 0x7:
2140 { /* ADD #imm, Rn */
2141 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2142 load_reg( R_EAX, Rn );
2143 ADD_imm8s_r32( imm, R_EAX );
2144 store_reg( R_EAX, Rn );
2145 }
2146 break;
2147 case 0x8:
2148 switch( (ir&0xF00) >> 8 ) {
2149 case 0x0:
2150 { /* MOV.B R0, @(disp, Rn) */
2151 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2152 load_reg( R_EAX, 0 );
2153 load_reg( R_ECX, Rn );
2154 ADD_imm32_r32( disp, R_ECX );
2155 MEM_WRITE_BYTE( R_ECX, R_EAX );
2156 }
2157 break;
2158 case 0x1:
2159 { /* MOV.W R0, @(disp, Rn) */
2160 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2161 load_reg( R_ECX, Rn );
2162 load_reg( R_EAX, 0 );
2163 ADD_imm32_r32( disp, R_ECX );
2164 check_walign16( R_ECX );
2165 MEM_WRITE_WORD( R_ECX, R_EAX );
2166 }
2167 break;
2168 case 0x4:
2169 { /* MOV.B @(disp, Rm), R0 */
2170 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2171 load_reg( R_ECX, Rm );
2172 ADD_imm32_r32( disp, R_ECX );
2173 MEM_READ_BYTE( R_ECX, R_EAX );
2174 store_reg( R_EAX, 0 );
2175 }
2176 break;
2177 case 0x5:
2178 { /* MOV.W @(disp, Rm), R0 */
2179 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2180 load_reg( R_ECX, Rm );
2181 ADD_imm32_r32( disp, R_ECX );
2182 check_ralign16( R_ECX );
2183 MEM_READ_WORD( R_ECX, R_EAX );
2184 store_reg( R_EAX, 0 );
2185 }
2186 break;
2187 case 0x8:
2188 { /* CMP/EQ #imm, R0 */
2189 int32_t imm = SIGNEXT8(ir&0xFF);
2190 load_reg( R_EAX, 0 );
2191 CMP_imm8s_r32(imm, R_EAX);
2192 SETE_t();
2193 }
2194 break;
2195 case 0x9:
2196 { /* BT disp */
2197 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2198 if( sh4_x86.in_delay_slot ) {
2199 SLOTILLEGAL();
2200 } else {
2201 load_imm32( R_EDI, pc + 2 );
2202 CMP_imm8s_sh4r( 0, R_T );
2203 JE_rel8( 5, nottaken );
2204 load_imm32( R_EDI, disp + pc + 4 );
2205 JMP_TARGET(nottaken);
2206 INC_r32(R_ESI);
2207 return 1;
2208 }
2209 }
2210 break;
2211 case 0xB:
2212 { /* BF disp */
2213 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2214 if( sh4_x86.in_delay_slot ) {
2215 SLOTILLEGAL();
2216 } else {
2217 load_imm32( R_EDI, pc + 2 );
2218 CMP_imm8s_sh4r( 0, R_T );
2219 JNE_rel8( 5, nottaken );
2220 load_imm32( R_EDI, disp + pc + 4 );
2221 JMP_TARGET(nottaken);
2222 INC_r32(R_ESI);
2223 return 1;
2224 }
2225 }
2226 break;
2227 case 0xD:
2228 { /* BT/S disp */
2229 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2230 if( sh4_x86.in_delay_slot ) {
2231 SLOTILLEGAL();
2232 } else {
2233 load_imm32( R_EDI, pc + 4 );
2234 CMP_imm8s_sh4r( 0, R_T );
2235 JE_rel8( 5, nottaken );
2236 load_imm32( R_EDI, disp + pc + 4 );
2237 JMP_TARGET(nottaken);
2238 sh4_x86.in_delay_slot = TRUE;
2239 return 0;
2240 }
2241 }
2242 break;
2243 case 0xF:
2244 { /* BF/S disp */
2245 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2246 if( sh4_x86.in_delay_slot ) {
2247 SLOTILLEGAL();
2248 } else {
2249 load_imm32( R_EDI, pc + 4 );
2250 CMP_imm8s_sh4r( 0, R_T );
2251 JNE_rel8( 5, nottaken );
2252 load_imm32( R_EDI, disp + pc + 4 );
2253 JMP_TARGET(nottaken);
2254 sh4_x86.in_delay_slot = TRUE;
2255 return 0;
2256 }
2257 }
2258 break;
2259 default:
2260 UNDEF();
2261 break;
2262 }
2263 break;
2264 case 0x9:
2265 { /* MOV.W @(disp, PC), Rn */
2266 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1;
2267 if( sh4_x86.in_delay_slot ) {
2268 SLOTILLEGAL();
2269 } else {
2270 load_imm32( R_ECX, pc + disp + 4 );
2271 MEM_READ_WORD( R_ECX, R_EAX );
2272 store_reg( R_EAX, Rn );
2273 }
2274 }
2275 break;
2276 case 0xA:
2277 { /* BRA disp */
2278 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2279 if( sh4_x86.in_delay_slot ) {
2280 SLOTILLEGAL();
2281 } else {
2282 load_imm32( R_EDI, disp + pc + 4 );
2283 sh4_x86.in_delay_slot = TRUE;
2284 return 0;
2285 }
2286 }
2287 break;
2288 case 0xB:
2289 { /* BSR disp */
2290 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2291 if( sh4_x86.in_delay_slot ) {
2292 SLOTILLEGAL();
2293 } else {
2294 load_imm32( R_EAX, pc + 4 );
2295 store_spreg( R_EAX, R_PR );
2296 load_imm32( R_EDI, disp + pc + 4 );
2297 sh4_x86.in_delay_slot = TRUE;
2298 return 0;
2299 }
2300 }
2301 break;
2302 case 0xC:
2303 switch( (ir&0xF00) >> 8 ) {
2304 case 0x0:
2305 { /* MOV.B R0, @(disp, GBR) */
2306 uint32_t disp = (ir&0xFF);
2307 load_reg( R_EAX, 0 );
2308 load_spreg( R_ECX, R_GBR );
2309 ADD_imm32_r32( disp, R_ECX );
2310 MEM_WRITE_BYTE( R_ECX, R_EAX );
2311 }
2312 break;
2313 case 0x1:
2314 { /* MOV.W R0, @(disp, GBR) */
2315 uint32_t disp = (ir&0xFF)<<1;
2316 load_spreg( R_ECX, R_GBR );
2317 load_reg( R_EAX, 0 );
2318 ADD_imm32_r32( disp, R_ECX );
2319 check_walign16( R_ECX );
2320 MEM_WRITE_WORD( R_ECX, R_EAX );
2321 }
2322 break;
2323 case 0x2:
2324 { /* MOV.L R0, @(disp, GBR) */
2325 uint32_t disp = (ir&0xFF)<<2;
2326 load_spreg( R_ECX, R_GBR );
2327 load_reg( R_EAX, 0 );
2328 ADD_imm32_r32( disp, R_ECX );
2329 check_walign32( R_ECX );
2330 MEM_WRITE_LONG( R_ECX, R_EAX );
2331 }
2332 break;
2333 case 0x3:
2334 { /* TRAPA #imm */
2335 uint32_t imm = (ir&0xFF);
2336 if( sh4_x86.in_delay_slot ) {
2337 SLOTILLEGAL();
2338 } else {
2339 // TODO: Write TRA
2340 RAISE_EXCEPTION(EXC_TRAP);
2341 }
2342 }
2343 break;
2344 case 0x4:
2345 { /* MOV.B @(disp, GBR), R0 */
2346 uint32_t disp = (ir&0xFF);
2347 load_spreg( R_ECX, R_GBR );
2348 ADD_imm32_r32( disp, R_ECX );
2349 MEM_READ_BYTE( R_ECX, R_EAX );
2350 store_reg( R_EAX, 0 );
2351 }
2352 break;
2353 case 0x5:
2354 { /* MOV.W @(disp, GBR), R0 */
2355 uint32_t disp = (ir&0xFF)<<1;
2356 load_spreg( R_ECX, R_GBR );
2357 ADD_imm32_r32( disp, R_ECX );
2358 check_ralign16( R_ECX );
2359 MEM_READ_WORD( R_ECX, R_EAX );
2360 store_reg( R_EAX, 0 );
2361 }
2362 break;
2363 case 0x6:
2364 { /* MOV.L @(disp, GBR), R0 */
2365 uint32_t disp = (ir&0xFF)<<2;
2366 load_spreg( R_ECX, R_GBR );
2367 ADD_imm32_r32( disp, R_ECX );
2368 check_ralign32( R_ECX );
2369 MEM_READ_LONG( R_ECX, R_EAX );
2370 store_reg( R_EAX, 0 );
2371 }
2372 break;
2373 case 0x7:
2374 { /* MOVA @(disp, PC), R0 */
2375 uint32_t disp = (ir&0xFF)<<2;
2376 if( sh4_x86.in_delay_slot ) {
2377 SLOTILLEGAL();
2378 } else {
2379 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2380 store_reg( R_ECX, 0 );
2381 }
2382 }
2383 break;
2384 case 0x8:
2385 { /* TST #imm, R0 */
2386 uint32_t imm = (ir&0xFF);
2387 load_reg( R_EAX, 0 );
2388 TEST_imm32_r32( imm, R_EAX );
2389 SETE_t();
2390 }
2391 break;
2392 case 0x9:
2393 { /* AND #imm, R0 */
2394 uint32_t imm = (ir&0xFF);
2395 load_reg( R_EAX, 0 );
2396 AND_imm32_r32(imm, R_EAX);
2397 store_reg( R_EAX, 0 );
2398 }
2399 break;
2400 case 0xA:
2401 { /* XOR #imm, R0 */
2402 uint32_t imm = (ir&0xFF);
2403 load_reg( R_EAX, 0 );
2404 XOR_imm32_r32( imm, R_EAX );
2405 store_reg( R_EAX, 0 );
2406 }
2407 break;
2408 case 0xB:
2409 { /* OR #imm, R0 */
2410 uint32_t imm = (ir&0xFF);
2411 load_reg( R_EAX, 0 );
2412 OR_imm32_r32(imm, R_EAX);
2413 store_reg( R_EAX, 0 );
2414 }
2415 break;
2416 case 0xC:
2417 { /* TST.B #imm, @(R0, GBR) */
2418 uint32_t imm = (ir&0xFF);
2419 load_reg( R_EAX, 0);
2420 load_reg( R_ECX, R_GBR);
2421 ADD_r32_r32( R_EAX, R_ECX );
2422 MEM_READ_BYTE( R_ECX, R_EAX );
2423 TEST_imm8_r8( imm, R_EAX );
2424 SETE_t();
2425 }
2426 break;
2427 case 0xD:
2428 { /* AND.B #imm, @(R0, GBR) */
2429 uint32_t imm = (ir&0xFF);
2430 load_reg( R_EAX, 0 );
2431 load_spreg( R_ECX, R_GBR );
2432 ADD_r32_r32( R_EAX, R_ECX );
2433 PUSH_r32(R_ECX);
2434 call_func0(sh4_read_byte);
2435 POP_r32(R_ECX);
2436 AND_imm32_r32(imm, R_EAX );
2437 MEM_WRITE_BYTE( R_ECX, R_EAX );
2438 }
2439 break;
2440 case 0xE:
2441 { /* XOR.B #imm, @(R0, GBR) */
2442 uint32_t imm = (ir&0xFF);
2443 load_reg( R_EAX, 0 );
2444 load_spreg( R_ECX, R_GBR );
2445 ADD_r32_r32( R_EAX, R_ECX );
2446 PUSH_r32(R_ECX);
2447 call_func0(sh4_read_byte);
2448 POP_r32(R_ECX);
2449 XOR_imm32_r32( imm, R_EAX );
2450 MEM_WRITE_BYTE( R_ECX, R_EAX );
2451 }
2452 break;
2453 case 0xF:
2454 { /* OR.B #imm, @(R0, GBR) */
2455 uint32_t imm = (ir&0xFF);
2456 load_reg( R_EAX, 0 );
2457 load_spreg( R_ECX, R_GBR );
2458 ADD_r32_r32( R_EAX, R_ECX );
2459 PUSH_r32(R_ECX);
2460 call_func0(sh4_read_byte);
2461 POP_r32(R_ECX);
2462 OR_imm32_r32(imm, R_EAX );
2463 MEM_WRITE_BYTE( R_ECX, R_EAX );
2464 }
2465 break;
2466 }
2467 break;
2468 case 0xD:
2469 { /* MOV.L @(disp, PC), Rn */
2470 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2;
2471 if( sh4_x86.in_delay_slot ) {
2472 SLOTILLEGAL();
2473 } else {
2474 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2475 MEM_READ_LONG( R_ECX, R_EAX );
2476 store_reg( R_EAX, Rn );
2477 }
2478 }
2479 break;
2480 case 0xE:
2481 { /* MOV #imm, Rn */
2482 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2483 load_imm32( R_EAX, imm );
2484 store_reg( R_EAX, Rn );
2485 }
2486 break;
2487 case 0xF:
2488 switch( ir&0xF ) {
2489 case 0x0:
2490 { /* FADD FRm, FRn */
2491 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2492 check_fpuen();
2493 load_spreg( R_ECX, R_FPSCR );
2494 TEST_imm32_r32( FPSCR_PR, R_ECX );
2495 load_fr_bank( R_EDX );
2496 JNE_rel8(13,doubleprec);
2497 push_fr(R_EDX, FRm);
2498 push_fr(R_EDX, FRn);
2499 FADDP_st(1);
2500 pop_fr(R_EDX, FRn);
2501 JMP_rel8(11,end);
2502 JMP_TARGET(doubleprec);
2503 push_dr(R_EDX, FRm);
2504 push_dr(R_EDX, FRn);
2505 FADDP_st(1);
2506 pop_dr(R_EDX, FRn);
2507 JMP_TARGET(end);
2508 }
2509 break;
2510 case 0x1:
2511 { /* FSUB FRm, FRn */
2512 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2513 check_fpuen();
2514 load_spreg( R_ECX, R_FPSCR );
2515 TEST_imm32_r32( FPSCR_PR, R_ECX );
2516 load_fr_bank( R_EDX );
2517 JNE_rel8(13, doubleprec);
2518 push_fr(R_EDX, FRn);
2519 push_fr(R_EDX, FRm);
2520 FMULP_st(1);
2521 pop_fr(R_EDX, FRn);
2522 JMP_rel8(11, end);
2523 JMP_TARGET(doubleprec);
2524 push_dr(R_EDX, FRn);
2525 push_dr(R_EDX, FRm);
2526 FMULP_st(1);
2527 pop_dr(R_EDX, FRn);
2528 JMP_TARGET(end);
2529 }
2530 break;
2531 case 0x2:
2532 { /* FMUL FRm, FRn */
2533 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2534 check_fpuen();
2535 load_spreg( R_ECX, R_FPSCR );
2536 TEST_imm32_r32( FPSCR_PR, R_ECX );
2537 load_fr_bank( R_EDX );
2538 JNE_rel8(13, doubleprec);
2539 push_fr(R_EDX, FRm);
2540 push_fr(R_EDX, FRn);
2541 FMULP_st(1);
2542 pop_fr(R_EDX, FRn);
2543 JMP_rel8(11, end);
2544 JMP_TARGET(doubleprec);
2545 push_dr(R_EDX, FRm);
2546 push_dr(R_EDX, FRn);
2547 FMULP_st(1);
2548 pop_dr(R_EDX, FRn);
2549 JMP_TARGET(end);
2550 }
2551 break;
2552 case 0x3:
2553 { /* FDIV FRm, FRn */
2554 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2555 check_fpuen();
2556 load_spreg( R_ECX, R_FPSCR );
2557 TEST_imm32_r32( FPSCR_PR, R_ECX );
2558 load_fr_bank( R_EDX );
2559 JNE_rel8(13, doubleprec);
2560 push_fr(R_EDX, FRn);
2561 push_fr(R_EDX, FRm);
2562 FDIVP_st(1);
2563 pop_fr(R_EDX, FRn);
2564 JMP_rel8(11, end);
2565 JMP_TARGET(doubleprec);
2566 push_dr(R_EDX, FRn);
2567 push_dr(R_EDX, FRm);
2568 FDIVP_st(1);
2569 pop_dr(R_EDX, FRn);
2570 JMP_TARGET(end);
2571 }
2572 break;
2573 case 0x4:
2574 { /* FCMP/EQ FRm, FRn */
2575 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2576 check_fpuen();
2577 load_spreg( R_ECX, R_FPSCR );
2578 TEST_imm32_r32( FPSCR_PR, R_ECX );
2579 load_fr_bank( R_EDX );
2580 JNE_rel8(8, doubleprec);
2581 push_fr(R_EDX, FRm);
2582 push_fr(R_EDX, FRn);
2583 JMP_rel8(6, end);
2584 JMP_TARGET(doubleprec);
2585 push_dr(R_EDX, FRm);
2586 push_dr(R_EDX, FRn);
2587 JMP_TARGET(end);
2588 FCOMIP_st(1);
2589 SETE_t();
2590 FPOP_st();
2591 }
2592 break;
2593 case 0x5:
2594 { /* FCMP/GT FRm, FRn */
2595 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2596 check_fpuen();
2597 load_spreg( R_ECX, R_FPSCR );
2598 TEST_imm32_r32( FPSCR_PR, R_ECX );
2599 load_fr_bank( R_EDX );
2600 JNE_rel8(8, doubleprec);
2601 push_fr(R_EDX, FRm);
2602 push_fr(R_EDX, FRn);
2603 JMP_rel8(6, end);
2604 JMP_TARGET(doubleprec);
2605 push_dr(R_EDX, FRm);
2606 push_dr(R_EDX, FRn);
2607 JMP_TARGET(end);
2608 FCOMIP_st(1);
2609 SETA_t();
2610 FPOP_st();
2611 }
2612 break;
2613 case 0x6:
2614 { /* FMOV @(R0, Rm), FRn */
2615 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2616 check_fpuen();
2617 load_reg( R_EDX, Rm );
2618 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2619 check_ralign32( R_EDX );
2620 load_spreg( R_ECX, R_FPSCR );
2621 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2622 JNE_rel8(19, doublesize);
2623 MEM_READ_LONG( R_EDX, R_EAX );
2624 load_fr_bank( R_ECX );
2625 store_fr( R_ECX, R_EAX, FRn );
2626 if( FRn&1 ) {
2627 JMP_rel8(48, end);
2628 JMP_TARGET(doublesize);
2629 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2630 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2631 load_xf_bank( R_ECX );
2632 store_fr( R_ECX, R_EAX, FRn&0x0E );
2633 store_fr( R_ECX, R_EDX, FRn|0x01 );
2634 JMP_TARGET(end);
2635 } else {
2636 JMP_rel8(36, end);
2637 JMP_TARGET(doublesize);
2638 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2639 load_fr_bank( R_ECX );
2640 store_fr( R_ECX, R_EAX, FRn&0x0E );
2641 store_fr( R_ECX, R_EDX, FRn|0x01 );
2642 JMP_TARGET(end);
2643 }
2644 }
2645 break;
2646 case 0x7:
2647 { /* FMOV FRm, @(R0, Rn) */
2648 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2649 check_fpuen();
2650 load_reg( R_EDX, Rn );
2651 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2652 check_walign32( R_EDX );
2653 load_spreg( R_ECX, R_FPSCR );
2654 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2655 JNE_rel8(20, doublesize);
2656 load_fr_bank( R_ECX );
2657 load_fr( R_ECX, R_EAX, FRm );
2658 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2659 if( FRm&1 ) {
2660 JMP_rel8( 48, end );
2661 JMP_TARGET(doublesize);
2662 load_xf_bank( R_ECX );
2663 load_fr( R_ECX, R_EAX, FRm&0x0E );
2664 load_fr( R_ECX, R_ECX, FRm|0x01 );
2665 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2666 JMP_TARGET(end);
2667 } else {
2668 JMP_rel8( 39, end );
2669 JMP_TARGET(doublesize);
2670 load_fr_bank( R_ECX );
2671 load_fr( R_ECX, R_EAX, FRm&0x0E );
2672 load_fr( R_ECX, R_ECX, FRm|0x01 );
2673 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2674 JMP_TARGET(end);
2675 }
2676 }
2677 break;
2678 case 0x8:
2679 { /* FMOV @Rm, FRn */
2680 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2681 check_fpuen();
2682 load_reg( R_EDX, Rm );
2683 check_ralign32( R_EDX );
2684 load_spreg( R_ECX, R_FPSCR );
2685 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2686 JNE_rel8(19, doublesize);
2687 MEM_READ_LONG( R_EDX, R_EAX );
2688 load_fr_bank( R_ECX );
2689 store_fr( R_ECX, R_EAX, FRn );
2690 if( FRn&1 ) {
2691 JMP_rel8(48, end);
2692 JMP_TARGET(doublesize);
2693 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2694 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2695 load_xf_bank( R_ECX );
2696 store_fr( R_ECX, R_EAX, FRn&0x0E );
2697 store_fr( R_ECX, R_EDX, FRn|0x01 );
2698 JMP_TARGET(end);
2699 } else {
2700 JMP_rel8(36, end);
2701 JMP_TARGET(doublesize);
2702 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2703 load_fr_bank( R_ECX );
2704 store_fr( R_ECX, R_EAX, FRn&0x0E );
2705 store_fr( R_ECX, R_EDX, FRn|0x01 );
2706 JMP_TARGET(end);
2707 }
2708 }
2709 break;
2710 case 0x9:
2711 { /* FMOV @Rm+, FRn */
2712 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2713 check_fpuen();
2714 load_reg( R_EDX, Rm );
2715 check_ralign32( R_EDX );
2716 MOV_r32_r32( R_EDX, R_EAX );
2717 load_spreg( R_ECX, R_FPSCR );
2718 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2719 JNE_rel8(25, doublesize);
2720 ADD_imm8s_r32( 4, R_EAX );
2721 store_reg( R_EAX, Rm );
2722 MEM_READ_LONG( R_EDX, R_EAX );
2723 load_fr_bank( R_ECX );
2724 store_fr( R_ECX, R_EAX, FRn );
2725 if( FRn&1 ) {
2726 JMP_rel8(54, end);
2727 JMP_TARGET(doublesize);
2728 ADD_imm8s_r32( 8, R_EAX );
2729 store_reg(R_EAX, Rm);
2730 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2731 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2732 load_xf_bank( R_ECX );
2733 store_fr( R_ECX, R_EAX, FRn&0x0E );
2734 store_fr( R_ECX, R_EDX, FRn|0x01 );
2735 JMP_TARGET(end);
2736 } else {
2737 JMP_rel8(42, end);
2738 ADD_imm8s_r32( 8, R_EAX );
2739 store_reg(R_EAX, Rm);
2740 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2741 load_fr_bank( R_ECX );
2742 store_fr( R_ECX, R_EAX, FRn&0x0E );
2743 store_fr( R_ECX, R_EDX, FRn|0x01 );
2744 JMP_TARGET(end);
2745 }
2746 }
2747 break;
2748 case 0xA:
2749 { /* FMOV FRm, @Rn */
2750 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2751 check_fpuen();
2752 load_reg( R_EDX, Rn );
2753 check_walign32( R_EDX );
2754 load_spreg( R_ECX, R_FPSCR );
2755 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2756 JNE_rel8(20, doublesize);
2757 load_fr_bank( R_ECX );
2758 load_fr( R_ECX, R_EAX, FRm );
2759 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2760 if( FRm&1 ) {
2761 JMP_rel8( 48, end );
2762 JMP_TARGET(doublesize);
2763 load_xf_bank( R_ECX );
2764 load_fr( R_ECX, R_EAX, FRm&0x0E );
2765 load_fr( R_ECX, R_ECX, FRm|0x01 );
2766 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2767 JMP_TARGET(end);
2768 } else {
2769 JMP_rel8( 39, end );
2770 JMP_TARGET(doublesize);
2771 load_fr_bank( R_ECX );
2772 load_fr( R_ECX, R_EAX, FRm&0x0E );
2773 load_fr( R_ECX, R_ECX, FRm|0x01 );
2774 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2775 JMP_TARGET(end);
2776 }
2777 }
2778 break;
2779 case 0xB:
2780 { /* FMOV FRm, @-Rn */
2781 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2782 check_fpuen();
2783 load_reg( R_EDX, Rn );
2784 check_walign32( R_EDX );
2785 load_spreg( R_ECX, R_FPSCR );
2786 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2787 JNE_rel8(26, doublesize);
2788 load_fr_bank( R_ECX );
2789 load_fr( R_ECX, R_EAX, FRm );
2790 ADD_imm8s_r32(-4,R_EDX);
2791 store_reg( R_EDX, Rn );
2792 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2793 if( FRm&1 ) {
2794 JMP_rel8( 54, end );
2795 JMP_TARGET(doublesize);
2796 load_xf_bank( R_ECX );
2797 load_fr( R_ECX, R_EAX, FRm&0x0E );
2798 load_fr( R_ECX, R_ECX, FRm|0x01 );
2799 ADD_imm8s_r32(-8,R_EDX);
2800 store_reg( R_EDX, Rn );
2801 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2802 JMP_TARGET(end);
2803 } else {
2804 JMP_rel8( 45, end );
2805 JMP_TARGET(doublesize);
2806 load_fr_bank( R_ECX );
2807 load_fr( R_ECX, R_EAX, FRm&0x0E );
2808 load_fr( R_ECX, R_ECX, FRm|0x01 );
2809 ADD_imm8s_r32(-8,R_EDX);
2810 store_reg( R_EDX, Rn );
2811 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2812 JMP_TARGET(end);
2813 }
2814 }
2815 break;
2816 case 0xC:
2817 { /* FMOV FRm, FRn */
2818 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2819 /* As horrible as this looks, it's actually covering 5 separate cases:
2820 * 1. 32-bit fr-to-fr (PR=0)
2821 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
2822 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
2823 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
2824 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
2825 */
2826 check_fpuen();
2827 load_spreg( R_ECX, R_FPSCR );
2828 load_fr_bank( R_EDX );
2829 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2830 JNE_rel8(8, doublesize);
2831 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
2832 store_fr( R_EDX, R_EAX, FRn );
2833 if( FRm&1 ) {
2834 JMP_rel8(24, end);
2835 JMP_TARGET(doublesize);
2836 load_xf_bank( R_ECX );
2837 load_fr( R_ECX, R_EAX, FRm-1 );
2838 if( FRn&1 ) {
2839 load_fr( R_ECX, R_EDX, FRm );
2840 store_fr( R_ECX, R_EAX, FRn-1 );
2841 store_fr( R_ECX, R_EDX, FRn );
2842 } else /* FRn&1 == 0 */ {
2843 load_fr( R_ECX, R_ECX, FRm );
2844 store_fr( R_EDX, R_EAX, FRn-1 );
2845 store_fr( R_EDX, R_ECX, FRn );
2846 }
2847 JMP_TARGET(end);
2848 } else /* FRm&1 == 0 */ {
2849 if( FRn&1 ) {
2850 JMP_rel8(24, end);
2851 load_xf_bank( R_ECX );
2852 load_fr( R_EDX, R_EAX, FRm );
2853 load_fr( R_EDX, R_EDX, FRm+1 );
2854 store_fr( R_ECX, R_EAX, FRn-1 );
2855 store_fr( R_ECX, R_EDX, FRn );
2856 JMP_TARGET(end);
2857 } else /* FRn&1 == 0 */ {
2858 JMP_rel8(12, end);
2859 load_fr( R_EDX, R_EAX, FRm );
2860 load_fr( R_EDX, R_ECX, FRm+1 );
2861 store_fr( R_EDX, R_EAX, FRn );
2862 store_fr( R_EDX, R_ECX, FRn+1 );
2863 JMP_TARGET(end);
2864 }
2865 }
2866 }
2867 break;
2868 case 0xD:
2869 switch( (ir&0xF0) >> 4 ) {
2870 case 0x0:
2871 { /* FSTS FPUL, FRn */
2872 uint32_t FRn = ((ir>>8)&0xF);
2873 check_fpuen();
2874 load_fr_bank( R_ECX );
2875 load_spreg( R_EAX, R_FPUL );
2876 store_fr( R_ECX, R_EAX, FRn );
2877 }
2878 break;
2879 case 0x1:
2880 { /* FLDS FRm, FPUL */
2881 uint32_t FRm = ((ir>>8)&0xF);
2882 check_fpuen();
2883 load_fr_bank( R_ECX );
2884 load_fr( R_ECX, R_EAX, FRm );
2885 store_spreg( R_EAX, R_FPUL );
2886 }
2887 break;
2888 case 0x2:
2889 { /* FLOAT FPUL, FRn */
2890 uint32_t FRn = ((ir>>8)&0xF);
2891 check_fpuen();
2892 load_spreg( R_ECX, R_FPSCR );
2893 load_spreg(R_EDX, REG_OFFSET(fr_bank));
2894 FILD_sh4r(R_FPUL);
2895 TEST_imm32_r32( FPSCR_PR, R_ECX );
2896 JNE_rel8(5, doubleprec);
2897 pop_fr( R_EDX, FRn );
2898 JMP_rel8(3, end);
2899 JMP_TARGET(doubleprec);
2900 pop_dr( R_EDX, FRn );
2901 JMP_TARGET(end);
2902 }
2903 break;
2904 case 0x3:
2905 { /* FTRC FRm, FPUL */
2906 uint32_t FRm = ((ir>>8)&0xF);
2907 check_fpuen();
2908 // TODO
2909 }
2910 break;
2911 case 0x4:
2912 { /* FNEG FRn */
2913 uint32_t FRn = ((ir>>8)&0xF);
2914 check_fpuen();
2915 load_spreg( R_ECX, R_FPSCR );
2916 TEST_imm32_r32( FPSCR_PR, R_ECX );
2917 load_fr_bank( R_EDX );
2918 JNE_rel8(10, doubleprec);
2919 push_fr(R_EDX, FRn);
2920 FCHS_st0();
2921 pop_fr(R_EDX, FRn);
2922 JMP_rel8(8, end);
2923 JMP_TARGET(doubleprec);
2924 push_dr(R_EDX, FRn);
2925 FCHS_st0();
2926 pop_dr(R_EDX, FRn);
2927 JMP_TARGET(end);
2928 }
2929 break;
2930 case 0x5:
2931 { /* FABS FRn */
2932 uint32_t FRn = ((ir>>8)&0xF);
2933 check_fpuen();
2934 load_spreg( R_ECX, R_FPSCR );
2935 load_fr_bank( R_EDX );
2936 TEST_imm32_r32( FPSCR_PR, R_ECX );
2937 JNE_rel8(10, doubleprec);
2938 push_fr(R_EDX, FRn); // 3
2939 FABS_st0(); // 2
2940 pop_fr( R_EDX, FRn); //3
2941 JMP_rel8(8,end); // 2
2942 JMP_TARGET(doubleprec);
2943 push_dr(R_EDX, FRn);
2944 FABS_st0();
2945 pop_dr(R_EDX, FRn);
2946 JMP_TARGET(end);
2947 }
2948 break;
2949 case 0x6:
2950 { /* FSQRT FRn */
2951 uint32_t FRn = ((ir>>8)&0xF);
2952 check_fpuen();
2953 load_spreg( R_ECX, R_FPSCR );
2954 TEST_imm32_r32( FPSCR_PR, R_ECX );
2955 load_fr_bank( R_EDX );
2956 JNE_rel8(10, doubleprec);
2957 push_fr(R_EDX, FRn);
2958 FSQRT_st0();
2959 pop_fr(R_EDX, FRn);
2960 JMP_rel8(8, end);
2961 JMP_TARGET(doubleprec);
2962 push_dr(R_EDX, FRn);
2963 FSQRT_st0();
2964 pop_dr(R_EDX, FRn);
2965 JMP_TARGET(end);
2966 }
2967 break;
2968 case 0x7:
2969 { /* FSRRA FRn */
2970 uint32_t FRn = ((ir>>8)&0xF);
2971 check_fpuen();
2972 load_spreg( R_ECX, R_FPSCR );
2973 TEST_imm32_r32( FPSCR_PR, R_ECX );
2974 load_fr_bank( R_EDX );
2975 JNE_rel8(12, end); // PR=0 only
2976 FLD1_st0();
2977 push_fr(R_EDX, FRn);
2978 FSQRT_st0();
2979 FDIVP_st(1);
2980 pop_fr(R_EDX, FRn);
2981 JMP_TARGET(end);
2982 }
2983 break;
2984 case 0x8:
2985 { /* FLDI0 FRn */
2986 uint32_t FRn = ((ir>>8)&0xF);
2987 /* IFF PR=0 */
2988 check_fpuen();
2989 load_spreg( R_ECX, R_FPSCR );
2990 TEST_imm32_r32( FPSCR_PR, R_ECX );
2991 JNE_rel8(8, end);
2992 XOR_r32_r32( R_EAX, R_EAX );
2993 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2994 store_fr( R_ECX, R_EAX, FRn );
2995 JMP_TARGET(end);
2996 }
2997 break;
2998 case 0x9:
2999 { /* FLDI1 FRn */
3000 uint32_t FRn = ((ir>>8)&0xF);
3001 /* IFF PR=0 */
3002 check_fpuen();
3003 load_spreg( R_ECX, R_FPSCR );
3004 TEST_imm32_r32( FPSCR_PR, R_ECX );
3005 JNE_rel8(11, end);
3006 load_imm32(R_EAX, 0x3F800000);
3007 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
3008 store_fr( R_ECX, R_EAX, FRn );
3009 JMP_TARGET(end);
3010 }
3011 break;
3012 case 0xA:
3013 { /* FCNVSD FPUL, FRn */
3014 uint32_t FRn = ((ir>>8)&0xF);
3015 check_fpuen();
3016 check_fpuen();
3017 load_spreg( R_ECX, R_FPSCR );
3018 TEST_imm32_r32( FPSCR_PR, R_ECX );
3019 JE_rel8(9, end); // only when PR=1
3020 load_fr_bank( R_ECX );
3021 push_fpul();
3022 pop_dr( R_ECX, FRn );
3023 JMP_TARGET(end);
3024 }
3025 break;
3026 case 0xB:
3027 { /* FCNVDS FRm, FPUL */
3028 uint32_t FRm = ((ir>>8)&0xF);
3029 check_fpuen();
3030 load_spreg( R_ECX, R_FPSCR );
3031 TEST_imm32_r32( FPSCR_PR, R_ECX );
3032 JE_rel8(9, end); // only when PR=1
3033 load_fr_bank( R_ECX );
3034 push_dr( R_ECX, FRm );
3035 pop_fpul();
3036 JMP_TARGET(end);
3037 }
3038 break;
3039 case 0xE:
3040 { /* FIPR FVm, FVn */
3041 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3);
3042 check_fpuen();
3043 }
3044 break;
3045 case 0xF:
3046 switch( (ir&0x100) >> 8 ) {
3047 case 0x0:
3048 { /* FSCA FPUL, FRn */
3049 uint32_t FRn = ((ir>>9)&0x7)<<1;
3050 check_fpuen();
3051 }
3052 break;
3053 case 0x1:
3054 switch( (ir&0x200) >> 9 ) {
3055 case 0x0:
3056 { /* FTRV XMTRX, FVn */
3057 uint32_t FVn = ((ir>>10)&0x3);
3058 check_fpuen();
3059 }
3060 break;
3061 case 0x1:
3062 switch( (ir&0xC00) >> 10 ) {
3063 case 0x0:
3064 { /* FSCHG */
3065 check_fpuen();
3066 load_spreg( R_ECX, R_FPSCR );
3067 XOR_imm32_r32( FPSCR_SZ, R_ECX );
3068 store_spreg( R_ECX, R_FPSCR );
3069 }
3070 break;
3071 case 0x2:
3072 { /* FRCHG */
3073 check_fpuen();
3074 load_spreg( R_ECX, R_FPSCR );
3075 XOR_imm32_r32( FPSCR_FR, R_ECX );
3076 store_spreg( R_ECX, R_FPSCR );
3077 update_fr_bank( R_ECX );
3078 }
3079 break;
3080 case 0x3:
3081 { /* UNDEF */
3082 if( sh4_x86.in_delay_slot ) {
3083 SLOTILLEGAL();
3084 } else {
3085 JMP_exit(EXIT_ILLEGAL);
3086 return 1;
3087 }
3088 }
3089 break;
3090 default:
3091 UNDEF();
3092 break;
3093 }
3094 break;
3095 }
3096 break;
3097 }
3098 break;
3099 default:
3100 UNDEF();
3101 break;
3102 }
3103 break;
3104 case 0xE:
3105 { /* FMAC FR0, FRm, FRn */
3106 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
3107 check_fpuen();
3108 load_spreg( R_ECX, R_FPSCR );
3109 load_spreg( R_EDX, REG_OFFSET(fr_bank));
3110 TEST_imm32_r32( FPSCR_PR, R_ECX );
3111 JNE_rel8(18, doubleprec);
3112 push_fr( R_EDX, 0 );
3113 push_fr( R_EDX, FRm );
3114 FMULP_st(1);
3115 push_fr( R_EDX, FRn );
3116 FADDP_st(1);
3117 pop_fr( R_EDX, FRn );
3118 JMP_rel8(16, end);
3119 JMP_TARGET(doubleprec);
3120 push_dr( R_EDX, 0 );
3121 push_dr( R_EDX, FRm );
3122 FMULP_st(1);
3123 push_dr( R_EDX, FRn );
3124 FADDP_st(1);
3125 pop_dr( R_EDX, FRn );
3126 JMP_TARGET(end);
3127 }
3128 break;
3129 default:
3130 UNDEF();
3131 break;
3132 }
3133 break;
3134 }
3136 if( sh4_x86.in_delay_slot ) {
3137 ADD_imm8s_r32(2,R_ESI);
3138 sh4_x86.in_delay_slot = FALSE;
3139 return 1;
3140 } else {
3141 INC_r32(R_ESI);
3142 }
3143 return 0;
3144 }
.