2 * $Id: sh4x86.c,v 1.5 2007-09-11 21:23:48 nkeynes Exp $
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
8 * Copyright (c) 2007 Nathan Keynes.
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
23 #include "sh4/sh4core.h"
24 #include "sh4/sh4trans.h"
25 #include "sh4/x86op.h"
28 #define DEFAULT_BACKPATCH_SIZE 4096
31 * Struct to manage internal translation state. This state is not saved -
32 * it is only valid between calls to sh4_translate_begin_block() and
33 * sh4_translate_end_block()
35 struct sh4_x86_state {
36 gboolean in_delay_slot;
37 gboolean priv_checked; /* true if we've already checked the cpu mode. */
38 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
40 /* Allocated memory for the (block-wide) back-patch list */
41 uint32_t **backpatch_list;
42 uint32_t backpatch_posn;
43 uint32_t backpatch_size;
46 #define EXIT_DATA_ADDR_READ 0
47 #define EXIT_DATA_ADDR_WRITE 7
48 #define EXIT_ILLEGAL 14
49 #define EXIT_SLOT_ILLEGAL 21
50 #define EXIT_FPU_DISABLED 28
51 #define EXIT_SLOT_FPU_DISABLED 35
53 static struct sh4_x86_state sh4_x86;
57 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
58 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
62 static void sh4_x86_add_backpatch( uint8_t *ptr )
64 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
65 sh4_x86.backpatch_size <<= 1;
66 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
67 assert( sh4_x86.backpatch_list != NULL );
69 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
72 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
75 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
76 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
81 #define MARK_JMP(x,n) uint8_t *_mark_jmp_##x = xlat_output + n
82 #define CHECK_JMP(x) assert( _mark_jmp_##x == xlat_output )
90 * Emit an instruction to load an SH4 reg into a real register
92 static inline void load_reg( int x86reg, int sh4reg )
96 OP(0x45 + (x86reg<<3));
97 OP(REG_OFFSET(r[sh4reg]));
100 static inline void load_reg16s( int x86reg, int sh4reg )
104 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
107 static inline void load_reg16u( int x86reg, int sh4reg )
111 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
115 static inline void load_spreg( int x86reg, int regoffset )
117 /* mov [bp+n], reg */
119 OP(0x45 + (x86reg<<3));
124 * Emit an instruction to load an immediate value into a register
126 static inline void load_imm32( int x86reg, uint32_t value ) {
127 /* mov #value, reg */
133 * Emit an instruction to store an SH4 reg (RN)
135 void static inline store_reg( int x86reg, int sh4reg ) {
136 /* mov reg, [bp+n] */
138 OP(0x45 + (x86reg<<3));
139 OP(REG_OFFSET(r[sh4reg]));
141 void static inline store_spreg( int x86reg, int regoffset ) {
142 /* mov reg, [bp+n] */
144 OP(0x45 + (x86reg<<3));
149 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
152 * Load an FR register (single-precision floating point) into an integer x86
153 * register (eg for register-to-register moves)
155 void static inline load_fr( int bankreg, int x86reg, int frm )
157 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
161 * Store an FR register (single-precision floating point) into an integer x86
162 * register (eg for register-to-register moves)
164 void static inline store_fr( int bankreg, int x86reg, int frn )
166 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
171 * Load a pointer to the back fp back into the specified x86 register. The
172 * bankreg must have been previously loaded with FPSCR.
175 static inline void load_xf_bank( int bankreg )
177 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
178 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
179 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
183 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
184 * with the location of the current fp bank.
186 static inline void push_fr( int bankreg, int frm )
188 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
192 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
193 * with bankreg previously loaded with the location of the current fp bank.
195 static inline void pop_fr( int bankreg, int frm )
197 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
201 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
202 * with the location of the current fp bank.
204 static inline void push_dr( int bankreg, int frm )
207 // this is technically undefined, but it seems to work consistently - high 32 bits
208 // loaded from FRm (32-bits), low 32bits are 0.
209 OP(0xFF); OP(0x70 + bankreg); OP((frm^1)<<2); // PUSH [bankreg + frm^1]
214 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
218 static inline void pop_dr( int bankreg, int frm )
222 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
227 * Note: clobbers EAX to make the indirect call - this isn't usually
228 * a problem since the callee will usually clobber it anyway.
230 static inline void call_func0( void *ptr )
232 load_imm32(R_EAX, (uint32_t)ptr);
236 static inline void call_func1( void *ptr, int arg1 )
240 ADD_imm8s_r32( -4, R_ESP );
243 static inline void call_func2( void *ptr, int arg1, int arg2 )
248 ADD_imm8s_r32( -8, R_ESP );
252 * Write a double (64-bit) value into memory, with the first word in arg2a, and
253 * the second in arg2b
256 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
258 ADD_imm8s_r32( 4, addr );
261 ADD_imm8s_r32( -4, addr );
264 call_func0(sh4_write_long);
265 ADD_imm8s_r32( -8, R_ESP );
266 call_func0(sh4_write_long);
267 ADD_imm8s_r32( -8, R_ESP );
271 * Read a double (64-bit) value from memory, writing the first word into arg2a
272 * and the second into arg2b. The addr must not be in EAX
275 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
278 call_func0(sh4_read_long);
281 ADD_imm8s_r32( 4, addr );
283 call_func0(sh4_read_long);
284 ADD_imm8s_r32( -4, R_ESP );
285 MOV_r32_r32( R_EAX, arg2b );
289 /* Exception checks - Note that all exception checks will clobber EAX */
290 static void check_priv( )
292 if( !sh4_x86.priv_checked ) {
293 sh4_x86.priv_checked = TRUE;
294 load_spreg( R_EAX, R_SR );
295 AND_imm32_r32( SR_MD, R_EAX );
296 if( sh4_x86.in_delay_slot ) {
297 JE_exit( EXIT_SLOT_ILLEGAL );
299 JE_exit( EXIT_ILLEGAL );
304 static void check_fpuen( )
306 if( !sh4_x86.fpuen_checked ) {
307 sh4_x86.fpuen_checked = TRUE;
308 load_spreg( R_EAX, R_SR );
309 AND_imm32_r32( SR_FD, R_EAX );
310 if( sh4_x86.in_delay_slot ) {
311 JNE_exit(EXIT_SLOT_FPU_DISABLED);
313 JNE_exit(EXIT_FPU_DISABLED);
318 static void check_ralign16( int x86reg )
320 TEST_imm32_r32( 0x00000001, x86reg );
321 JNE_exit(EXIT_DATA_ADDR_READ);
324 static void check_walign16( int x86reg )
326 TEST_imm32_r32( 0x00000001, x86reg );
327 JNE_exit(EXIT_DATA_ADDR_WRITE);
330 static void check_ralign32( int x86reg )
332 TEST_imm32_r32( 0x00000003, x86reg );
333 JNE_exit(EXIT_DATA_ADDR_READ);
335 static void check_walign32( int x86reg )
337 TEST_imm32_r32( 0x00000003, x86reg );
338 JNE_exit(EXIT_DATA_ADDR_WRITE);
343 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
344 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
345 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
346 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
347 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
348 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
349 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
351 #define RAISE_EXCEPTION( exc ) call_func1(sh4_raise_exception, exc);
352 #define SLOTILLEGAL() RAISE_EXCEPTION(EXC_SLOT_ILLEGAL); return 1
357 * Emit the 'start of block' assembly. Sets up the stack frame and save
360 void sh4_translate_begin_block()
364 load_imm32( R_EBP, (uint32_t)&sh4r );
368 sh4_x86.in_delay_slot = FALSE;
369 sh4_x86.priv_checked = FALSE;
370 sh4_x86.fpuen_checked = FALSE;
371 sh4_x86.backpatch_posn = 0;
375 * Exit the block early (ie branch out), conditionally or otherwise
379 store_spreg( R_EDI, REG_OFFSET(pc) );
380 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
381 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
383 ADD_r32_r32( R_EAX, R_ECX );
384 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
385 XOR_r32_r32( R_EAX, R_EAX );
393 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
395 void sh4_translate_end_block( sh4addr_t pc ) {
396 assert( !sh4_x86.in_delay_slot ); // should never stop here
397 // Normal termination - save PC, cycle count
400 uint8_t *end_ptr = xlat_output;
401 // Exception termination. Jump block for various exception codes:
402 PUSH_imm32( EXC_DATA_ADDR_READ );
404 PUSH_imm32( EXC_DATA_ADDR_WRITE );
406 PUSH_imm32( EXC_ILLEGAL );
408 PUSH_imm32( EXC_SLOT_ILLEGAL );
410 PUSH_imm32( EXC_FPU_DISABLED );
412 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
414 load_spreg( R_ECX, REG_OFFSET(pc) );
415 ADD_r32_r32( R_ESI, R_ECX );
416 ADD_r32_r32( R_ESI, R_ECX );
417 store_spreg( R_ECX, REG_OFFSET(pc) );
418 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
419 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
421 ADD_r32_r32( R_EAX, R_ECX );
422 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
424 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
425 CALL_r32( R_EAX ); // 2
429 sh4_x86_do_backpatch( end_ptr );
433 * Translate a single instruction. Delayed branches are handled specially
434 * by translating both branch and delayed instruction as a single unit (as
437 * @return true if the instruction marks the end of a basic block
440 uint32_t sh4_x86_translate_instruction( uint32_t pc )
442 uint16_t ir = sh4_read_word( pc );
444 switch( (ir&0xF000) >> 12 ) {
448 switch( (ir&0x80) >> 7 ) {
450 switch( (ir&0x70) >> 4 ) {
453 uint32_t Rn = ((ir>>8)&0xF);
454 call_func0(sh4_read_sr);
455 store_reg( R_EAX, Rn );
460 uint32_t Rn = ((ir>>8)&0xF);
461 load_spreg( R_EAX, R_GBR );
462 store_reg( R_EAX, Rn );
467 uint32_t Rn = ((ir>>8)&0xF);
468 load_spreg( R_EAX, R_VBR );
469 store_reg( R_EAX, Rn );
474 uint32_t Rn = ((ir>>8)&0xF);
475 load_spreg( R_EAX, R_SSR );
476 store_reg( R_EAX, Rn );
481 uint32_t Rn = ((ir>>8)&0xF);
482 load_spreg( R_EAX, R_SPC );
483 store_reg( R_EAX, Rn );
492 { /* STC Rm_BANK, Rn */
493 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
494 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
495 store_reg( R_EAX, Rn );
501 switch( (ir&0xF0) >> 4 ) {
504 uint32_t Rn = ((ir>>8)&0xF);
505 if( sh4_x86.in_delay_slot ) {
508 load_imm32( R_EAX, pc + 4 );
509 store_spreg( R_EAX, R_PR );
510 load_reg( R_EDI, Rn );
511 ADD_r32_r32( R_EAX, R_EDI );
512 sh4_x86.in_delay_slot = TRUE;
520 uint32_t Rn = ((ir>>8)&0xF);
521 if( sh4_x86.in_delay_slot ) {
524 load_reg( R_EDI, Rn );
525 sh4_x86.in_delay_slot = TRUE;
533 uint32_t Rn = ((ir>>8)&0xF);
534 load_reg( R_EAX, Rn );
536 AND_imm32_r32( 0xFC000000, R_EAX );
537 CMP_imm32_r32( 0xE0000000, R_EAX );
539 call_func0( sh4_flush_store_queue );
540 ADD_imm8s_r32( -4, R_ESP );
545 uint32_t Rn = ((ir>>8)&0xF);
550 uint32_t Rn = ((ir>>8)&0xF);
555 uint32_t Rn = ((ir>>8)&0xF);
559 { /* MOVCA.L R0, @Rn */
560 uint32_t Rn = ((ir>>8)&0xF);
561 load_reg( R_EAX, 0 );
562 load_reg( R_ECX, Rn );
563 check_walign32( R_ECX );
564 MEM_WRITE_LONG( R_ECX, R_EAX );
573 { /* MOV.B Rm, @(R0, Rn) */
574 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
575 load_reg( R_EAX, 0 );
576 load_reg( R_ECX, Rn );
577 ADD_r32_r32( R_EAX, R_ECX );
578 load_reg( R_EAX, Rm );
579 MEM_WRITE_BYTE( R_ECX, R_EAX );
583 { /* MOV.W Rm, @(R0, Rn) */
584 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
585 load_reg( R_EAX, 0 );
586 load_reg( R_ECX, Rn );
587 ADD_r32_r32( R_EAX, R_ECX );
588 check_walign16( R_ECX );
589 load_reg( R_EAX, Rm );
590 MEM_WRITE_WORD( R_ECX, R_EAX );
594 { /* MOV.L Rm, @(R0, Rn) */
595 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
596 load_reg( R_EAX, 0 );
597 load_reg( R_ECX, Rn );
598 ADD_r32_r32( R_EAX, R_ECX );
599 check_walign32( R_ECX );
600 load_reg( R_EAX, Rm );
601 MEM_WRITE_LONG( R_ECX, R_EAX );
606 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
607 load_reg( R_EAX, Rm );
608 load_reg( R_ECX, Rn );
610 store_spreg( R_EAX, R_MACL );
614 switch( (ir&0xFF0) >> 4 ) {
629 XOR_r32_r32(R_EAX, R_EAX);
630 store_spreg( R_EAX, R_MACL );
631 store_spreg( R_EAX, R_MACH );
656 switch( (ir&0xF0) >> 4 ) {
659 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
664 XOR_r32_r32( R_EAX, R_EAX );
665 store_spreg( R_EAX, R_Q );
666 store_spreg( R_EAX, R_M );
667 store_spreg( R_EAX, R_T );
672 uint32_t Rn = ((ir>>8)&0xF);
673 load_spreg( R_EAX, R_T );
674 store_reg( R_EAX, Rn );
683 switch( (ir&0xF0) >> 4 ) {
686 uint32_t Rn = ((ir>>8)&0xF);
687 load_spreg( R_EAX, R_MACH );
688 store_reg( R_EAX, Rn );
693 uint32_t Rn = ((ir>>8)&0xF);
694 load_spreg( R_EAX, R_MACL );
695 store_reg( R_EAX, Rn );
700 uint32_t Rn = ((ir>>8)&0xF);
701 load_spreg( R_EAX, R_PR );
702 store_reg( R_EAX, Rn );
707 uint32_t Rn = ((ir>>8)&0xF);
708 load_spreg( R_EAX, R_SGR );
709 store_reg( R_EAX, Rn );
714 uint32_t Rn = ((ir>>8)&0xF);
715 load_spreg( R_EAX, R_FPUL );
716 store_reg( R_EAX, Rn );
720 { /* STS FPSCR, Rn */
721 uint32_t Rn = ((ir>>8)&0xF);
722 load_spreg( R_EAX, R_FPSCR );
723 store_reg( R_EAX, Rn );
728 uint32_t Rn = ((ir>>8)&0xF);
729 load_spreg( R_EAX, R_DBR );
730 store_reg( R_EAX, Rn );
739 switch( (ir&0xFF0) >> 4 ) {
742 if( sh4_x86.in_delay_slot ) {
745 load_spreg( R_EDI, R_PR );
746 sh4_x86.in_delay_slot = TRUE;
760 if( sh4_x86.in_delay_slot ) {
763 load_spreg( R_EDI, R_PR );
764 load_spreg( R_EAX, R_SSR );
765 call_func1( sh4_write_sr, R_EAX );
766 sh4_x86.in_delay_slot = TRUE;
778 { /* MOV.B @(R0, Rm), Rn */
779 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
780 load_reg( R_EAX, 0 );
781 load_reg( R_ECX, Rm );
782 ADD_r32_r32( R_EAX, R_ECX );
783 MEM_READ_BYTE( R_ECX, R_EAX );
784 store_reg( R_EAX, Rn );
788 { /* MOV.W @(R0, Rm), Rn */
789 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
790 load_reg( R_EAX, 0 );
791 load_reg( R_ECX, Rm );
792 ADD_r32_r32( R_EAX, R_ECX );
793 check_ralign16( R_ECX );
794 MEM_READ_WORD( R_ECX, R_EAX );
795 store_reg( R_EAX, Rn );
799 { /* MOV.L @(R0, Rm), Rn */
800 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
801 load_reg( R_EAX, 0 );
802 load_reg( R_ECX, Rm );
803 ADD_r32_r32( R_EAX, R_ECX );
804 check_ralign32( R_ECX );
805 MEM_READ_LONG( R_ECX, R_EAX );
806 store_reg( R_EAX, Rn );
810 { /* MAC.L @Rm+, @Rn+ */
811 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
820 { /* MOV.L Rm, @(disp, Rn) */
821 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
822 load_reg( R_ECX, Rn );
823 load_reg( R_EAX, Rm );
824 ADD_imm32_r32( disp, R_ECX );
825 check_walign32( R_ECX );
826 MEM_WRITE_LONG( R_ECX, R_EAX );
832 { /* MOV.B Rm, @Rn */
833 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
834 load_reg( R_EAX, Rm );
835 load_reg( R_ECX, Rn );
836 MEM_WRITE_BYTE( R_ECX, R_EAX );
840 { /* MOV.W Rm, @Rn */
841 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
842 load_reg( R_ECX, Rn );
843 check_walign16( R_ECX );
844 MEM_READ_WORD( R_ECX, R_EAX );
845 store_reg( R_EAX, Rn );
849 { /* MOV.L Rm, @Rn */
850 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
851 load_reg( R_EAX, Rm );
852 load_reg( R_ECX, Rn );
853 check_walign32(R_ECX);
854 MEM_WRITE_LONG( R_ECX, R_EAX );
858 { /* MOV.B Rm, @-Rn */
859 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
860 load_reg( R_EAX, Rm );
861 load_reg( R_ECX, Rn );
862 ADD_imm8s_r32( -1, Rn );
863 store_reg( R_ECX, Rn );
864 MEM_WRITE_BYTE( R_ECX, R_EAX );
868 { /* MOV.W Rm, @-Rn */
869 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
870 load_reg( R_ECX, Rn );
871 check_walign16( R_ECX );
872 load_reg( R_EAX, Rm );
873 ADD_imm8s_r32( -2, R_ECX );
874 MEM_WRITE_WORD( R_ECX, R_EAX );
878 { /* MOV.L Rm, @-Rn */
879 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
880 load_reg( R_EAX, Rm );
881 load_reg( R_ECX, Rn );
882 check_walign32( R_ECX );
883 ADD_imm8s_r32( -4, R_ECX );
884 store_reg( R_ECX, Rn );
885 MEM_WRITE_LONG( R_ECX, R_EAX );
890 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
891 load_reg( R_EAX, Rm );
892 load_reg( R_ECX, Rm );
893 SHR_imm8_r32( 31, R_EAX );
894 SHR_imm8_r32( 31, R_ECX );
895 store_spreg( R_EAX, R_M );
896 store_spreg( R_ECX, R_Q );
897 CMP_r32_r32( R_EAX, R_ECX );
903 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
904 load_reg( R_EAX, Rm );
905 load_reg( R_ECX, Rn );
906 TEST_r32_r32( R_EAX, R_ECX );
912 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
913 load_reg( R_EAX, Rm );
914 load_reg( R_ECX, Rn );
915 AND_r32_r32( R_EAX, R_ECX );
916 store_reg( R_ECX, Rn );
921 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
922 load_reg( R_EAX, Rm );
923 load_reg( R_ECX, Rn );
924 XOR_r32_r32( R_EAX, R_ECX );
925 store_reg( R_ECX, Rn );
930 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
931 load_reg( R_EAX, Rm );
932 load_reg( R_ECX, Rn );
933 OR_r32_r32( R_EAX, R_ECX );
934 store_reg( R_ECX, Rn );
938 { /* CMP/STR Rm, Rn */
939 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
940 load_reg( R_EAX, Rm );
941 load_reg( R_ECX, Rn );
942 XOR_r32_r32( R_ECX, R_EAX );
943 TEST_r8_r8( R_AL, R_AL );
945 TEST_r8_r8( R_AH, R_AH ); // 2
947 SHR_imm8_r32( 16, R_EAX ); // 3
948 TEST_r8_r8( R_AL, R_AL ); // 2
950 TEST_r8_r8( R_AH, R_AH ); // 2
956 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
957 load_reg( R_EAX, Rm );
958 MOV_r32_r32( R_EAX, R_ECX );
959 SHR_imm8_r32( 16, R_EAX );
960 SHL_imm8_r32( 16, R_ECX );
961 OR_r32_r32( R_EAX, R_ECX );
962 store_reg( R_ECX, Rn );
966 { /* MULU.W Rm, Rn */
967 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
968 load_reg16u( R_EAX, Rm );
969 load_reg16u( R_ECX, Rn );
971 store_spreg( R_EAX, R_MACL );
975 { /* MULS.W Rm, Rn */
976 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
977 load_reg16s( R_EAX, Rm );
978 load_reg16s( R_ECX, Rn );
980 store_spreg( R_EAX, R_MACL );
991 { /* CMP/EQ Rm, Rn */
992 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
993 load_reg( R_EAX, Rm );
994 load_reg( R_ECX, Rn );
995 CMP_r32_r32( R_EAX, R_ECX );
1000 { /* CMP/HS Rm, Rn */
1001 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1002 load_reg( R_EAX, Rm );
1003 load_reg( R_ECX, Rn );
1004 CMP_r32_r32( R_EAX, R_ECX );
1009 { /* CMP/GE Rm, Rn */
1010 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1011 load_reg( R_EAX, Rm );
1012 load_reg( R_ECX, Rn );
1013 CMP_r32_r32( R_EAX, R_ECX );
1019 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1020 load_reg( R_ECX, Rn );
1022 RCL1_r32( R_ECX ); // OP2
1023 SETC_r32( R_EDX ); // Q
1024 load_spreg( R_EAX, R_Q );
1025 CMP_sh4r_r32( R_M, R_EAX );
1027 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_ECX );
1029 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_ECX );
1034 { /* DMULU.L Rm, Rn */
1035 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1036 load_reg( R_EAX, Rm );
1037 load_reg( R_ECX, Rn );
1039 store_spreg( R_EDX, R_MACH );
1040 store_spreg( R_EAX, R_MACL );
1044 { /* CMP/HI Rm, Rn */
1045 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1046 load_reg( R_EAX, Rm );
1047 load_reg( R_ECX, Rn );
1048 CMP_r32_r32( R_EAX, R_ECX );
1053 { /* CMP/GT Rm, Rn */
1054 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1055 load_reg( R_EAX, Rm );
1056 load_reg( R_ECX, Rn );
1057 CMP_r32_r32( R_EAX, R_ECX );
1063 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1064 load_reg( R_EAX, Rm );
1065 load_reg( R_ECX, Rn );
1066 SUB_r32_r32( R_EAX, R_ECX );
1067 store_reg( R_ECX, Rn );
1072 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1073 load_reg( R_EAX, Rm );
1074 load_reg( R_ECX, Rn );
1076 SBB_r32_r32( R_EAX, R_ECX );
1077 store_reg( R_ECX, Rn );
1082 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1083 load_reg( R_EAX, Rm );
1084 load_reg( R_ECX, Rn );
1085 SUB_r32_r32( R_EAX, R_ECX );
1086 store_reg( R_ECX, Rn );
1092 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1093 load_reg( R_EAX, Rm );
1094 load_reg( R_ECX, Rn );
1095 ADD_r32_r32( R_EAX, R_ECX );
1096 store_reg( R_ECX, Rn );
1100 { /* DMULS.L Rm, Rn */
1101 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1102 load_reg( R_EAX, Rm );
1103 load_reg( R_ECX, Rn );
1105 store_spreg( R_EDX, R_MACH );
1106 store_spreg( R_EAX, R_MACL );
1111 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1112 load_reg( R_EAX, Rm );
1113 load_reg( R_ECX, Rn );
1115 ADC_r32_r32( R_EAX, R_ECX );
1116 store_reg( R_ECX, Rn );
1122 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1123 load_reg( R_EAX, Rm );
1124 load_reg( R_ECX, Rn );
1125 ADD_r32_r32( R_EAX, R_ECX );
1126 store_reg( R_ECX, Rn );
1138 switch( (ir&0xF0) >> 4 ) {
1141 uint32_t Rn = ((ir>>8)&0xF);
1142 load_reg( R_EAX, Rn );
1144 store_reg( R_EAX, Rn );
1149 uint32_t Rn = ((ir>>8)&0xF);
1150 load_reg( R_EAX, Rn );
1151 ADD_imm8s_r32( -1, Rn );
1152 store_reg( R_EAX, Rn );
1158 uint32_t Rn = ((ir>>8)&0xF);
1159 load_reg( R_EAX, Rn );
1161 store_reg( R_EAX, Rn );
1170 switch( (ir&0xF0) >> 4 ) {
1173 uint32_t Rn = ((ir>>8)&0xF);
1174 load_reg( R_EAX, Rn );
1176 store_reg( R_EAX, Rn );
1181 uint32_t Rn = ((ir>>8)&0xF);
1182 load_reg( R_EAX, Rn );
1183 CMP_imm8s_r32( 0, R_EAX );
1189 uint32_t Rn = ((ir>>8)&0xF);
1190 load_reg( R_EAX, Rn );
1192 store_reg( R_EAX, Rn );
1201 switch( (ir&0xF0) >> 4 ) {
1203 { /* STS.L MACH, @-Rn */
1204 uint32_t Rn = ((ir>>8)&0xF);
1205 load_reg( R_ECX, Rn );
1206 ADD_imm8s_r32( -4, Rn );
1207 store_reg( R_ECX, Rn );
1208 load_spreg( R_EAX, R_MACH );
1209 MEM_WRITE_LONG( R_ECX, R_EAX );
1213 { /* STS.L MACL, @-Rn */
1214 uint32_t Rn = ((ir>>8)&0xF);
1215 load_reg( R_ECX, Rn );
1216 ADD_imm8s_r32( -4, Rn );
1217 store_reg( R_ECX, Rn );
1218 load_spreg( R_EAX, R_MACL );
1219 MEM_WRITE_LONG( R_ECX, R_EAX );
1223 { /* STS.L PR, @-Rn */
1224 uint32_t Rn = ((ir>>8)&0xF);
1225 load_reg( R_ECX, Rn );
1226 ADD_imm8s_r32( -4, Rn );
1227 store_reg( R_ECX, Rn );
1228 load_spreg( R_EAX, R_PR );
1229 MEM_WRITE_LONG( R_ECX, R_EAX );
1233 { /* STC.L SGR, @-Rn */
1234 uint32_t Rn = ((ir>>8)&0xF);
1235 load_reg( R_ECX, Rn );
1236 ADD_imm8s_r32( -4, Rn );
1237 store_reg( R_ECX, Rn );
1238 load_spreg( R_EAX, R_SGR );
1239 MEM_WRITE_LONG( R_ECX, R_EAX );
1243 { /* STS.L FPUL, @-Rn */
1244 uint32_t Rn = ((ir>>8)&0xF);
1245 load_reg( R_ECX, Rn );
1246 ADD_imm8s_r32( -4, Rn );
1247 store_reg( R_ECX, Rn );
1248 load_spreg( R_EAX, R_FPUL );
1249 MEM_WRITE_LONG( R_ECX, R_EAX );
1253 { /* STS.L FPSCR, @-Rn */
1254 uint32_t Rn = ((ir>>8)&0xF);
1255 load_reg( R_ECX, Rn );
1256 ADD_imm8s_r32( -4, Rn );
1257 store_reg( R_ECX, Rn );
1258 load_spreg( R_EAX, R_FPSCR );
1259 MEM_WRITE_LONG( R_ECX, R_EAX );
1263 { /* STC.L DBR, @-Rn */
1264 uint32_t Rn = ((ir>>8)&0xF);
1265 load_reg( R_ECX, Rn );
1266 ADD_imm8s_r32( -4, Rn );
1267 store_reg( R_ECX, Rn );
1268 load_spreg( R_EAX, R_DBR );
1269 MEM_WRITE_LONG( R_ECX, R_EAX );
1278 switch( (ir&0x80) >> 7 ) {
1280 switch( (ir&0x70) >> 4 ) {
1282 { /* STC.L SR, @-Rn */
1283 uint32_t Rn = ((ir>>8)&0xF);
1284 load_reg( R_ECX, Rn );
1285 ADD_imm8s_r32( -4, Rn );
1286 store_reg( R_ECX, Rn );
1287 call_func0( sh4_read_sr );
1288 MEM_WRITE_LONG( R_ECX, R_EAX );
1292 { /* STC.L GBR, @-Rn */
1293 uint32_t Rn = ((ir>>8)&0xF);
1294 load_reg( R_ECX, Rn );
1295 ADD_imm8s_r32( -4, Rn );
1296 store_reg( R_ECX, Rn );
1297 load_spreg( R_EAX, R_GBR );
1298 MEM_WRITE_LONG( R_ECX, R_EAX );
1302 { /* STC.L VBR, @-Rn */
1303 uint32_t Rn = ((ir>>8)&0xF);
1304 load_reg( R_ECX, Rn );
1305 ADD_imm8s_r32( -4, Rn );
1306 store_reg( R_ECX, Rn );
1307 load_spreg( R_EAX, R_VBR );
1308 MEM_WRITE_LONG( R_ECX, R_EAX );
1312 { /* STC.L SSR, @-Rn */
1313 uint32_t Rn = ((ir>>8)&0xF);
1314 load_reg( R_ECX, Rn );
1315 ADD_imm8s_r32( -4, Rn );
1316 store_reg( R_ECX, Rn );
1317 load_spreg( R_EAX, R_SSR );
1318 MEM_WRITE_LONG( R_ECX, R_EAX );
1322 { /* STC.L SPC, @-Rn */
1323 uint32_t Rn = ((ir>>8)&0xF);
1324 load_reg( R_ECX, Rn );
1325 ADD_imm8s_r32( -4, Rn );
1326 store_reg( R_ECX, Rn );
1327 load_spreg( R_EAX, R_SPC );
1328 MEM_WRITE_LONG( R_ECX, R_EAX );
1337 { /* STC.L Rm_BANK, @-Rn */
1338 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
1339 load_reg( R_ECX, Rn );
1340 ADD_imm8s_r32( -4, Rn );
1341 store_reg( R_ECX, Rn );
1342 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
1343 MEM_WRITE_LONG( R_ECX, R_EAX );
1349 switch( (ir&0xF0) >> 4 ) {
1352 uint32_t Rn = ((ir>>8)&0xF);
1353 load_reg( R_EAX, Rn );
1355 store_reg( R_EAX, Rn );
1361 uint32_t Rn = ((ir>>8)&0xF);
1362 load_reg( R_EAX, Rn );
1365 store_reg( R_EAX, Rn );
1375 switch( (ir&0xF0) >> 4 ) {
1378 uint32_t Rn = ((ir>>8)&0xF);
1379 load_reg( R_EAX, Rn );
1381 store_reg( R_EAX, Rn );
1387 uint32_t Rn = ((ir>>8)&0xF);
1388 load_reg( R_EAX, Rn );
1389 CMP_imm8s_r32( 0, R_EAX );
1395 uint32_t Rn = ((ir>>8)&0xF);
1396 load_reg( R_EAX, Rn );
1399 store_reg( R_EAX, Rn );
1409 switch( (ir&0xF0) >> 4 ) {
1411 { /* LDS.L @Rm+, MACH */
1412 uint32_t Rm = ((ir>>8)&0xF);
1413 load_reg( R_EAX, Rm );
1414 MOV_r32_r32( R_EAX, R_ECX );
1415 ADD_imm8s_r32( 4, R_EAX );
1416 store_reg( R_EAX, Rm );
1417 MEM_READ_LONG( R_ECX, R_EAX );
1418 store_spreg( R_EAX, R_MACH );
1422 { /* LDS.L @Rm+, MACL */
1423 uint32_t Rm = ((ir>>8)&0xF);
1424 load_reg( R_EAX, Rm );
1425 MOV_r32_r32( R_EAX, R_ECX );
1426 ADD_imm8s_r32( 4, R_EAX );
1427 store_reg( R_EAX, Rm );
1428 MEM_READ_LONG( R_ECX, R_EAX );
1429 store_spreg( R_EAX, R_MACL );
1433 { /* LDS.L @Rm+, PR */
1434 uint32_t Rm = ((ir>>8)&0xF);
1435 load_reg( R_EAX, Rm );
1436 MOV_r32_r32( R_EAX, R_ECX );
1437 ADD_imm8s_r32( 4, R_EAX );
1438 store_reg( R_EAX, Rm );
1439 MEM_READ_LONG( R_ECX, R_EAX );
1440 store_spreg( R_EAX, R_PR );
1444 { /* LDC.L @Rm+, SGR */
1445 uint32_t Rm = ((ir>>8)&0xF);
1446 load_reg( R_EAX, Rm );
1447 MOV_r32_r32( R_EAX, R_ECX );
1448 ADD_imm8s_r32( 4, R_EAX );
1449 store_reg( R_EAX, Rm );
1450 MEM_READ_LONG( R_ECX, R_EAX );
1451 store_spreg( R_EAX, R_SGR );
1455 { /* LDS.L @Rm+, FPUL */
1456 uint32_t Rm = ((ir>>8)&0xF);
1457 load_reg( R_EAX, Rm );
1458 MOV_r32_r32( R_EAX, R_ECX );
1459 ADD_imm8s_r32( 4, R_EAX );
1460 store_reg( R_EAX, Rm );
1461 MEM_READ_LONG( R_ECX, R_EAX );
1462 store_spreg( R_EAX, R_FPUL );
1466 { /* LDS.L @Rm+, FPSCR */
1467 uint32_t Rm = ((ir>>8)&0xF);
1468 load_reg( R_EAX, Rm );
1469 MOV_r32_r32( R_EAX, R_ECX );
1470 ADD_imm8s_r32( 4, R_EAX );
1471 store_reg( R_EAX, Rm );
1472 MEM_READ_LONG( R_ECX, R_EAX );
1473 store_spreg( R_EAX, R_FPSCR );
1477 { /* LDC.L @Rm+, DBR */
1478 uint32_t Rm = ((ir>>8)&0xF);
1479 load_reg( R_EAX, Rm );
1480 MOV_r32_r32( R_EAX, R_ECX );
1481 ADD_imm8s_r32( 4, R_EAX );
1482 store_reg( R_EAX, Rm );
1483 MEM_READ_LONG( R_ECX, R_EAX );
1484 store_spreg( R_EAX, R_DBR );
1493 switch( (ir&0x80) >> 7 ) {
1495 switch( (ir&0x70) >> 4 ) {
1497 { /* LDC.L @Rm+, SR */
1498 uint32_t Rm = ((ir>>8)&0xF);
1499 load_reg( R_EAX, Rm );
1500 MOV_r32_r32( R_EAX, R_ECX );
1501 ADD_imm8s_r32( 4, R_EAX );
1502 store_reg( R_EAX, Rm );
1503 MEM_READ_LONG( R_ECX, R_EAX );
1504 call_func1( sh4_write_sr, R_EAX );
1508 { /* LDC.L @Rm+, GBR */
1509 uint32_t Rm = ((ir>>8)&0xF);
1510 load_reg( R_EAX, Rm );
1511 MOV_r32_r32( R_EAX, R_ECX );
1512 ADD_imm8s_r32( 4, R_EAX );
1513 store_reg( R_EAX, Rm );
1514 MEM_READ_LONG( R_ECX, R_EAX );
1515 store_spreg( R_EAX, R_GBR );
1519 { /* LDC.L @Rm+, VBR */
1520 uint32_t Rm = ((ir>>8)&0xF);
1521 load_reg( R_EAX, Rm );
1522 MOV_r32_r32( R_EAX, R_ECX );
1523 ADD_imm8s_r32( 4, R_EAX );
1524 store_reg( R_EAX, Rm );
1525 MEM_READ_LONG( R_ECX, R_EAX );
1526 store_spreg( R_EAX, R_VBR );
1530 { /* LDC.L @Rm+, SSR */
1531 uint32_t Rm = ((ir>>8)&0xF);
1532 load_reg( R_EAX, Rm );
1533 MOV_r32_r32( R_EAX, R_ECX );
1534 ADD_imm8s_r32( 4, R_EAX );
1535 store_reg( R_EAX, Rm );
1536 MEM_READ_LONG( R_ECX, R_EAX );
1537 store_spreg( R_EAX, R_SSR );
1541 { /* LDC.L @Rm+, SPC */
1542 uint32_t Rm = ((ir>>8)&0xF);
1543 load_reg( R_EAX, Rm );
1544 MOV_r32_r32( R_EAX, R_ECX );
1545 ADD_imm8s_r32( 4, R_EAX );
1546 store_reg( R_EAX, Rm );
1547 MEM_READ_LONG( R_ECX, R_EAX );
1548 store_spreg( R_EAX, R_SPC );
1557 { /* LDC.L @Rm+, Rn_BANK */
1558 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1559 load_reg( R_EAX, Rm );
1560 MOV_r32_r32( R_EAX, R_ECX );
1561 ADD_imm8s_r32( 4, R_EAX );
1562 store_reg( R_EAX, Rm );
1563 MEM_READ_LONG( R_ECX, R_EAX );
1564 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1570 switch( (ir&0xF0) >> 4 ) {
1573 uint32_t Rn = ((ir>>8)&0xF);
1574 load_reg( R_EAX, Rn );
1575 SHL_imm8_r32( 2, R_EAX );
1576 store_reg( R_EAX, Rn );
1581 uint32_t Rn = ((ir>>8)&0xF);
1582 load_reg( R_EAX, Rn );
1583 SHL_imm8_r32( 8, R_EAX );
1584 store_reg( R_EAX, Rn );
1589 uint32_t Rn = ((ir>>8)&0xF);
1590 load_reg( R_EAX, Rn );
1591 SHL_imm8_r32( 16, R_EAX );
1592 store_reg( R_EAX, Rn );
1601 switch( (ir&0xF0) >> 4 ) {
1604 uint32_t Rn = ((ir>>8)&0xF);
1605 load_reg( R_EAX, Rn );
1606 SHR_imm8_r32( 2, R_EAX );
1607 store_reg( R_EAX, Rn );
1612 uint32_t Rn = ((ir>>8)&0xF);
1613 load_reg( R_EAX, Rn );
1614 SHR_imm8_r32( 8, R_EAX );
1615 store_reg( R_EAX, Rn );
1620 uint32_t Rn = ((ir>>8)&0xF);
1621 load_reg( R_EAX, Rn );
1622 SHR_imm8_r32( 16, R_EAX );
1623 store_reg( R_EAX, Rn );
1632 switch( (ir&0xF0) >> 4 ) {
1634 { /* LDS Rm, MACH */
1635 uint32_t Rm = ((ir>>8)&0xF);
1636 load_reg( R_EAX, Rm );
1637 store_spreg( R_EAX, R_MACH );
1641 { /* LDS Rm, MACL */
1642 uint32_t Rm = ((ir>>8)&0xF);
1643 load_reg( R_EAX, Rm );
1644 store_spreg( R_EAX, R_MACL );
1649 uint32_t Rm = ((ir>>8)&0xF);
1650 load_reg( R_EAX, Rm );
1651 store_spreg( R_EAX, R_PR );
1656 uint32_t Rm = ((ir>>8)&0xF);
1657 load_reg( R_EAX, Rm );
1658 store_spreg( R_EAX, R_SGR );
1662 { /* LDS Rm, FPUL */
1663 uint32_t Rm = ((ir>>8)&0xF);
1664 load_reg( R_EAX, Rm );
1665 store_spreg( R_EAX, R_FPUL );
1669 { /* LDS Rm, FPSCR */
1670 uint32_t Rm = ((ir>>8)&0xF);
1671 load_reg( R_EAX, Rm );
1672 store_spreg( R_EAX, R_FPSCR );
1677 uint32_t Rm = ((ir>>8)&0xF);
1678 load_reg( R_EAX, Rm );
1679 store_spreg( R_EAX, R_DBR );
1688 switch( (ir&0xF0) >> 4 ) {
1691 uint32_t Rn = ((ir>>8)&0xF);
1692 if( sh4_x86.in_delay_slot ) {
1695 load_imm32( R_EAX, pc + 4 );
1696 store_spreg( R_EAX, R_PR );
1697 load_reg( R_EDI, Rn );
1698 sh4_x86.in_delay_slot = TRUE;
1706 uint32_t Rn = ((ir>>8)&0xF);
1707 load_reg( R_ECX, Rn );
1708 MEM_READ_BYTE( R_ECX, R_EAX );
1709 TEST_r8_r8( R_AL, R_AL );
1711 OR_imm8_r8( 0x80, R_AL );
1712 MEM_WRITE_BYTE( R_ECX, R_EAX );
1717 uint32_t Rn = ((ir>>8)&0xF);
1718 if( sh4_x86.in_delay_slot ) {
1721 load_reg( R_EDI, Rn );
1722 sh4_x86.in_delay_slot = TRUE;
1735 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1736 /* Annoyingly enough, not directly convertible */
1737 load_reg( R_EAX, Rn );
1738 load_reg( R_ECX, Rm );
1739 CMP_imm32_r32( 0, R_ECX );
1742 NEG_r32( R_ECX ); // 2
1743 AND_imm8_r8( 0x1F, R_CL ); // 3
1744 SAR_r32_CL( R_EAX ); // 2
1747 AND_imm8_r8( 0x1F, R_CL ); // 3
1748 SHL_r32_CL( R_EAX ); // 2
1750 store_reg( R_EAX, Rn );
1755 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1756 load_reg( R_EAX, Rn );
1757 load_reg( R_ECX, Rm );
1759 MOV_r32_r32( R_EAX, R_EDX );
1760 SHL_r32_CL( R_EAX );
1762 SHR_r32_CL( R_EDX );
1763 CMP_imm8s_r32( 0, R_ECX );
1764 CMOVAE_r32_r32( R_EDX, R_EAX );
1765 store_reg( R_EAX, Rn );
1769 switch( (ir&0x80) >> 7 ) {
1771 switch( (ir&0x70) >> 4 ) {
1774 uint32_t Rm = ((ir>>8)&0xF);
1775 load_reg( R_EAX, Rm );
1776 call_func1( sh4_write_sr, R_EAX );
1781 uint32_t Rm = ((ir>>8)&0xF);
1782 load_reg( R_EAX, Rm );
1783 store_spreg( R_EAX, R_GBR );
1788 uint32_t Rm = ((ir>>8)&0xF);
1789 load_reg( R_EAX, Rm );
1790 store_spreg( R_EAX, R_VBR );
1795 uint32_t Rm = ((ir>>8)&0xF);
1796 load_reg( R_EAX, Rm );
1797 store_spreg( R_EAX, R_SSR );
1802 uint32_t Rm = ((ir>>8)&0xF);
1803 load_reg( R_EAX, Rm );
1804 store_spreg( R_EAX, R_SPC );
1813 { /* LDC Rm, Rn_BANK */
1814 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1815 load_reg( R_EAX, Rm );
1816 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1822 { /* MAC.W @Rm+, @Rn+ */
1823 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1829 { /* MOV.L @(disp, Rm), Rn */
1830 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
1831 load_reg( R_ECX, Rm );
1832 ADD_imm8s_r32( disp, R_ECX );
1833 check_ralign32( R_ECX );
1834 MEM_READ_LONG( R_ECX, R_EAX );
1835 store_reg( R_EAX, Rn );
1841 { /* MOV.B @Rm, Rn */
1842 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1843 load_reg( R_ECX, Rm );
1844 MEM_READ_BYTE( R_ECX, R_EAX );
1845 store_reg( R_ECX, Rn );
1849 { /* MOV.W @Rm, Rn */
1850 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1851 load_reg( R_ECX, Rm );
1852 check_ralign16( R_ECX );
1853 MEM_READ_WORD( R_ECX, R_EAX );
1854 store_reg( R_EAX, Rn );
1858 { /* MOV.L @Rm, Rn */
1859 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1860 load_reg( R_ECX, Rm );
1861 check_ralign32( R_ECX );
1862 MEM_READ_LONG( R_ECX, R_EAX );
1863 store_reg( R_EAX, Rn );
1868 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1869 load_reg( R_EAX, Rm );
1870 store_reg( R_EAX, Rn );
1874 { /* MOV.B @Rm+, Rn */
1875 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1876 load_reg( R_ECX, Rm );
1877 MOV_r32_r32( R_ECX, R_EAX );
1878 ADD_imm8s_r32( 1, R_EAX );
1879 store_reg( R_EAX, Rm );
1880 MEM_READ_BYTE( R_ECX, R_EAX );
1881 store_reg( R_EAX, Rn );
1885 { /* MOV.W @Rm+, Rn */
1886 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1887 load_reg( R_EAX, Rm );
1888 check_ralign16( R_EAX );
1889 MOV_r32_r32( R_EAX, R_ECX );
1890 ADD_imm8s_r32( 2, R_EAX );
1891 store_reg( R_EAX, Rm );
1892 MEM_READ_WORD( R_ECX, R_EAX );
1893 store_reg( R_EAX, Rn );
1897 { /* MOV.L @Rm+, Rn */
1898 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1899 load_reg( R_EAX, Rm );
1900 check_ralign32( R_ECX );
1901 MOV_r32_r32( R_EAX, R_ECX );
1902 ADD_imm8s_r32( 4, R_EAX );
1903 store_reg( R_EAX, Rm );
1904 MEM_READ_LONG( R_ECX, R_EAX );
1905 store_reg( R_EAX, Rn );
1910 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1911 load_reg( R_EAX, Rm );
1913 store_reg( R_EAX, Rn );
1917 { /* SWAP.B Rm, Rn */
1918 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1919 load_reg( R_EAX, Rm );
1920 XCHG_r8_r8( R_AL, R_AH );
1921 store_reg( R_EAX, Rn );
1925 { /* SWAP.W Rm, Rn */
1926 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1927 load_reg( R_EAX, Rm );
1928 MOV_r32_r32( R_EAX, R_ECX );
1929 SHL_imm8_r32( 16, R_ECX );
1930 SHR_imm8_r32( 16, R_EAX );
1931 OR_r32_r32( R_EAX, R_ECX );
1932 store_reg( R_ECX, Rn );
1937 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1938 load_reg( R_EAX, Rm );
1939 XOR_r32_r32( R_ECX, R_ECX );
1941 SBB_r32_r32( R_EAX, R_ECX );
1942 store_reg( R_ECX, Rn );
1948 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1949 load_reg( R_EAX, Rm );
1951 store_reg( R_EAX, Rn );
1955 { /* EXTU.B Rm, Rn */
1956 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1957 load_reg( R_EAX, Rm );
1958 MOVZX_r8_r32( R_EAX, R_EAX );
1959 store_reg( R_EAX, Rn );
1963 { /* EXTU.W Rm, Rn */
1964 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1965 load_reg( R_EAX, Rm );
1966 MOVZX_r16_r32( R_EAX, R_EAX );
1967 store_reg( R_EAX, Rn );
1971 { /* EXTS.B Rm, Rn */
1972 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1973 load_reg( R_EAX, Rm );
1974 MOVSX_r8_r32( R_EAX, R_EAX );
1975 store_reg( R_EAX, Rn );
1979 { /* EXTS.W Rm, Rn */
1980 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1981 load_reg( R_EAX, Rm );
1982 MOVSX_r16_r32( R_EAX, R_EAX );
1983 store_reg( R_EAX, Rn );
1989 { /* ADD #imm, Rn */
1990 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
1991 load_reg( R_EAX, Rn );
1992 ADD_imm8s_r32( imm, R_EAX );
1993 store_reg( R_EAX, Rn );
1997 switch( (ir&0xF00) >> 8 ) {
1999 { /* MOV.B R0, @(disp, Rn) */
2000 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2001 load_reg( R_EAX, 0 );
2002 load_reg( R_ECX, Rn );
2003 ADD_imm32_r32( disp, R_ECX );
2004 MEM_WRITE_BYTE( R_ECX, R_EAX );
2008 { /* MOV.W R0, @(disp, Rn) */
2009 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2010 load_reg( R_ECX, Rn );
2011 load_reg( R_EAX, 0 );
2012 ADD_imm32_r32( disp, R_ECX );
2013 check_walign16( R_ECX );
2014 MEM_WRITE_WORD( R_ECX, R_EAX );
2018 { /* MOV.B @(disp, Rm), R0 */
2019 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2020 load_reg( R_ECX, Rm );
2021 ADD_imm32_r32( disp, R_ECX );
2022 MEM_READ_BYTE( R_ECX, R_EAX );
2023 store_reg( R_EAX, 0 );
2027 { /* MOV.W @(disp, Rm), R0 */
2028 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2029 load_reg( R_ECX, Rm );
2030 ADD_imm32_r32( disp, R_ECX );
2031 check_ralign16( R_ECX );
2032 MEM_READ_WORD( R_ECX, R_EAX );
2033 store_reg( R_EAX, 0 );
2037 { /* CMP/EQ #imm, R0 */
2038 int32_t imm = SIGNEXT8(ir&0xFF);
2039 load_reg( R_EAX, 0 );
2040 CMP_imm8s_r32(imm, R_EAX);
2046 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2047 if( sh4_x86.in_delay_slot ) {
2050 load_imm32( R_EDI, pc + 2 );
2051 CMP_imm8s_sh4r( 0, R_T );
2053 load_imm32( R_EDI, disp + pc + 4 );
2061 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2062 if( sh4_x86.in_delay_slot ) {
2065 load_imm32( R_EDI, pc + 2 );
2066 CMP_imm8s_sh4r( 0, R_T );
2068 load_imm32( R_EDI, disp + pc + 4 );
2076 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2077 if( sh4_x86.in_delay_slot ) {
2080 load_imm32( R_EDI, pc + 2 );
2081 CMP_imm8s_sh4r( 0, R_T );
2083 load_imm32( R_EDI, disp + pc + 4 );
2084 sh4_x86.in_delay_slot = TRUE;
2092 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2093 if( sh4_x86.in_delay_slot ) {
2096 load_imm32( R_EDI, pc + 2 );
2097 CMP_imm8s_sh4r( 0, R_T );
2099 load_imm32( R_EDI, disp + pc + 4 );
2100 sh4_x86.in_delay_slot = TRUE;
2112 { /* MOV.W @(disp, PC), Rn */
2113 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1;
2114 if( sh4_x86.in_delay_slot ) {
2117 load_imm32( R_ECX, pc + disp + 4 );
2118 MEM_READ_WORD( R_ECX, R_EAX );
2119 store_reg( R_EAX, Rn );
2125 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2126 if( sh4_x86.in_delay_slot ) {
2129 load_imm32( R_EDI, disp + pc + 4 );
2130 sh4_x86.in_delay_slot = TRUE;
2138 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2139 if( sh4_x86.in_delay_slot ) {
2142 load_imm32( R_EAX, pc + 4 );
2143 store_spreg( R_EAX, R_PR );
2144 load_imm32( R_EDI, disp + pc + 4 );
2145 sh4_x86.in_delay_slot = TRUE;
2152 switch( (ir&0xF00) >> 8 ) {
2154 { /* MOV.B R0, @(disp, GBR) */
2155 uint32_t disp = (ir&0xFF);
2156 load_reg( R_EAX, 0 );
2157 load_spreg( R_ECX, R_GBR );
2158 ADD_imm32_r32( disp, R_ECX );
2159 MEM_WRITE_BYTE( R_ECX, R_EAX );
2163 { /* MOV.W R0, @(disp, GBR) */
2164 uint32_t disp = (ir&0xFF)<<1;
2165 load_spreg( R_ECX, R_GBR );
2166 load_reg( R_EAX, 0 );
2167 ADD_imm32_r32( disp, R_ECX );
2168 check_walign16( R_ECX );
2169 MEM_WRITE_WORD( R_ECX, R_EAX );
2173 { /* MOV.L R0, @(disp, GBR) */
2174 uint32_t disp = (ir&0xFF)<<2;
2175 load_spreg( R_ECX, R_GBR );
2176 load_reg( R_EAX, 0 );
2177 ADD_imm32_r32( disp, R_ECX );
2178 check_walign32( R_ECX );
2179 MEM_WRITE_LONG( R_ECX, R_EAX );
2184 uint32_t imm = (ir&0xFF);
2185 if( sh4_x86.in_delay_slot ) {
2189 RAISE_EXCEPTION(EXC_TRAP);
2194 { /* MOV.B @(disp, GBR), R0 */
2195 uint32_t disp = (ir&0xFF);
2196 load_spreg( R_ECX, R_GBR );
2197 ADD_imm32_r32( disp, R_ECX );
2198 MEM_READ_BYTE( R_ECX, R_EAX );
2199 store_reg( R_EAX, 0 );
2203 { /* MOV.W @(disp, GBR), R0 */
2204 uint32_t disp = (ir&0xFF)<<1;
2205 load_spreg( R_ECX, R_GBR );
2206 ADD_imm32_r32( disp, R_ECX );
2207 check_ralign16( R_ECX );
2208 MEM_READ_WORD( R_ECX, R_EAX );
2209 store_reg( R_EAX, 0 );
2213 { /* MOV.L @(disp, GBR), R0 */
2214 uint32_t disp = (ir&0xFF)<<2;
2215 load_spreg( R_ECX, R_GBR );
2216 ADD_imm32_r32( disp, R_ECX );
2217 check_ralign32( R_ECX );
2218 MEM_READ_LONG( R_ECX, R_EAX );
2219 store_reg( R_EAX, 0 );
2223 { /* MOVA @(disp, PC), R0 */
2224 uint32_t disp = (ir&0xFF)<<2;
2225 if( sh4_x86.in_delay_slot ) {
2228 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2229 store_reg( R_ECX, 0 );
2234 { /* TST #imm, R0 */
2235 uint32_t imm = (ir&0xFF);
2236 load_reg( R_EAX, 0 );
2237 TEST_imm32_r32( imm, R_EAX );
2242 { /* AND #imm, R0 */
2243 uint32_t imm = (ir&0xFF);
2244 load_reg( R_EAX, 0 );
2245 AND_imm32_r32(imm, R_EAX);
2246 store_reg( R_EAX, 0 );
2250 { /* XOR #imm, R0 */
2251 uint32_t imm = (ir&0xFF);
2252 load_reg( R_EAX, 0 );
2253 XOR_imm32_r32( imm, R_EAX );
2254 store_reg( R_EAX, 0 );
2259 uint32_t imm = (ir&0xFF);
2260 load_reg( R_EAX, 0 );
2261 OR_imm32_r32(imm, R_EAX);
2262 store_reg( R_EAX, 0 );
2266 { /* TST.B #imm, @(R0, GBR) */
2267 uint32_t imm = (ir&0xFF);
2268 load_reg( R_EAX, 0);
2269 load_reg( R_ECX, R_GBR);
2270 ADD_r32_r32( R_EAX, R_ECX );
2271 MEM_READ_BYTE( R_ECX, R_EAX );
2272 TEST_imm8_r8( imm, R_EAX );
2277 { /* AND.B #imm, @(R0, GBR) */
2278 uint32_t imm = (ir&0xFF);
2279 load_reg( R_EAX, 0 );
2280 load_spreg( R_ECX, R_GBR );
2281 ADD_r32_r32( R_EAX, R_ECX );
2282 MEM_READ_BYTE( R_ECX, R_EAX );
2283 AND_imm32_r32(imm, R_ECX );
2284 MEM_WRITE_BYTE( R_ECX, R_EAX );
2288 { /* XOR.B #imm, @(R0, GBR) */
2289 uint32_t imm = (ir&0xFF);
2290 load_reg( R_EAX, 0 );
2291 load_spreg( R_ECX, R_GBR );
2292 ADD_r32_r32( R_EAX, R_ECX );
2293 MEM_READ_BYTE( R_ECX, R_EAX );
2294 XOR_imm32_r32( imm, R_EAX );
2295 MEM_WRITE_BYTE( R_ECX, R_EAX );
2299 { /* OR.B #imm, @(R0, GBR) */
2300 uint32_t imm = (ir&0xFF);
2301 load_reg( R_EAX, 0 );
2302 load_spreg( R_ECX, R_GBR );
2303 ADD_r32_r32( R_EAX, R_ECX );
2304 MEM_READ_BYTE( R_ECX, R_EAX );
2305 OR_imm32_r32(imm, R_ECX );
2306 MEM_WRITE_BYTE( R_ECX, R_EAX );
2312 { /* MOV.L @(disp, PC), Rn */
2313 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2;
2314 if( sh4_x86.in_delay_slot ) {
2317 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2318 MEM_READ_LONG( R_ECX, R_EAX );
2319 store_reg( R_EAX, 0 );
2324 { /* MOV #imm, Rn */
2325 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2326 load_imm32( R_EAX, imm );
2327 store_reg( R_EAX, Rn );
2333 { /* FADD FRm, FRn */
2334 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2338 { /* FSUB FRm, FRn */
2339 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2343 { /* FMUL FRm, FRn */
2344 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2348 { /* FDIV FRm, FRn */
2349 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2353 { /* FCMP/EQ FRm, FRn */
2354 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2358 { /* FCMP/GT FRm, FRn */
2359 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2363 { /* FMOV @(R0, Rm), FRn */
2364 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2368 { /* FMOV FRm, @(R0, Rn) */
2369 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2373 { /* FMOV @Rm, FRn */
2374 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2375 load_reg( R_EDX, Rm );
2376 check_ralign32( R_EDX );
2377 load_spreg( R_ECX, R_FPSCR );
2378 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2380 MEM_READ_LONG( R_EDX, R_EAX );
2381 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2382 store_fr( R_ECX, R_EAX, FRn );
2385 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2386 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2387 load_xf_bank( R_ECX );
2390 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2391 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2393 store_fr( R_ECX, R_EAX, FRn&0x0E );
2394 store_fr( R_ECX, R_EDX, FRn|0x01 );
2398 { /* FMOV @Rm+, FRn */
2399 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2403 { /* FMOV FRm, @Rn */
2404 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2405 load_reg( R_EDX, Rn );
2406 check_walign32( R_EDX );
2407 load_spreg( R_ECX, R_FPSCR );
2408 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2410 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2411 load_fr( R_ECX, R_EAX, FRm );
2412 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2415 load_xf_bank( R_ECX );
2418 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2420 load_fr( R_ECX, R_EAX, FRm&0x0E );
2421 load_fr( R_ECX, R_ECX, FRm|0x01 );
2422 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2426 { /* FMOV FRm, @-Rn */
2427 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2431 { /* FMOV FRm, FRn */
2432 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2433 /* As horrible as this looks, it's actually covering 5 separate cases:
2434 * 1. 32-bit fr-to-fr (PR=0)
2435 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
2436 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
2437 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
2438 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
2440 load_spreg( R_ECX, R_FPSCR );
2441 load_spreg( R_EDX, REG_OFFSET(fr_bank) );
2442 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2444 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
2445 store_fr( R_EDX, R_EAX, FRn );
2448 load_xf_bank( R_ECX );
2449 load_fr( R_ECX, R_EAX, FRm-1 );
2451 load_fr( R_ECX, R_EDX, FRm );
2452 store_fr( R_ECX, R_EAX, FRn-1 );
2453 store_fr( R_ECX, R_EDX, FRn );
2454 } else /* FRn&1 == 0 */ {
2455 load_fr( R_ECX, R_ECX, FRm );
2456 store_fr( R_EDX, R_EAX, FRn-1 );
2457 store_fr( R_EDX, R_ECX, FRn );
2459 } else /* FRm&1 == 0 */ {
2462 load_xf_bank( R_ECX );
2463 load_fr( R_EDX, R_EAX, FRm );
2464 load_fr( R_EDX, R_EDX, FRm+1 );
2465 store_fr( R_ECX, R_EAX, FRn-1 );
2466 store_fr( R_ECX, R_EDX, FRn );
2467 } else /* FRn&1 == 0 */ {
2469 load_fr( R_EDX, R_EAX, FRm );
2470 load_fr( R_EDX, R_ECX, FRm+1 );
2471 store_fr( R_EDX, R_EAX, FRn );
2472 store_fr( R_EDX, R_ECX, FRn+1 );
2478 switch( (ir&0xF0) >> 4 ) {
2480 { /* FSTS FPUL, FRn */
2481 uint32_t FRn = ((ir>>8)&0xF);
2485 { /* FLDS FRm, FPUL */
2486 uint32_t FRm = ((ir>>8)&0xF);
2490 { /* FLOAT FPUL, FRn */
2491 uint32_t FRn = ((ir>>8)&0xF);
2495 { /* FTRC FRm, FPUL */
2496 uint32_t FRm = ((ir>>8)&0xF);
2501 uint32_t FRn = ((ir>>8)&0xF);
2506 uint32_t FRn = ((ir>>8)&0xF);
2507 load_spreg( R_ECX, R_FPSCR );
2508 load_spreg( R_EDX, REG_OFFSET(fr_bank) );
2509 TEST_imm32_r32( FPSCR_PR, R_ECX );
2511 push_fr(R_EDX, FRn); // 3
2513 pop_fr( R_EDX, FRn); //3
2515 push_dr(R_EDX, FRn);
2522 uint32_t FRn = ((ir>>8)&0xF);
2527 uint32_t FRn = ((ir>>8)&0xF);
2532 uint32_t FRn = ((ir>>8)&0xF);
2537 uint32_t FRn = ((ir>>8)&0xF);
2541 { /* FCNVSD FPUL, FRn */
2542 uint32_t FRn = ((ir>>8)&0xF);
2546 { /* FCNVDS FRm, FPUL */
2547 uint32_t FRm = ((ir>>8)&0xF);
2551 { /* FIPR FVm, FVn */
2552 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3);
2556 switch( (ir&0x100) >> 8 ) {
2558 { /* FSCA FPUL, FRn */
2559 uint32_t FRn = ((ir>>9)&0x7)<<1;
2563 switch( (ir&0x200) >> 9 ) {
2565 { /* FTRV XMTRX, FVn */
2566 uint32_t FVn = ((ir>>10)&0x3);
2570 switch( (ir&0xC00) >> 10 ) {
2581 if( sh4_x86.in_delay_slot ) {
2582 RAISE_EXCEPTION(EXC_SLOT_ILLEGAL);
2584 RAISE_EXCEPTION(EXC_ILLEGAL);
2604 { /* FMAC FR0, FRm, FRn */
2605 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2616 if( sh4_x86.in_delay_slot ) {
2617 sh4_x86.in_delay_slot = FALSE;
.