2 * $Id: sh4x86.c,v 1.14 2007-09-20 08:37:19 nkeynes Exp $
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
8 * Copyright (c) 2007 Nathan Keynes.
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
28 #include "sh4/sh4core.h"
29 #include "sh4/sh4trans.h"
30 #include "sh4/sh4mmio.h"
31 #include "sh4/x86op.h"
34 #define DEFAULT_BACKPATCH_SIZE 4096
37 * Struct to manage internal translation state. This state is not saved -
38 * it is only valid between calls to sh4_translate_begin_block() and
39 * sh4_translate_end_block()
41 struct sh4_x86_state {
42 gboolean in_delay_slot;
43 gboolean priv_checked; /* true if we've already checked the cpu mode. */
44 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
47 /* Allocated memory for the (block-wide) back-patch list */
48 uint32_t **backpatch_list;
49 uint32_t backpatch_posn;
50 uint32_t backpatch_size;
53 #define EXIT_DATA_ADDR_READ 0
54 #define EXIT_DATA_ADDR_WRITE 7
55 #define EXIT_ILLEGAL 14
56 #define EXIT_SLOT_ILLEGAL 21
57 #define EXIT_FPU_DISABLED 28
58 #define EXIT_SLOT_FPU_DISABLED 35
60 static struct sh4_x86_state sh4_x86;
62 static uint32_t max_int = 0x7FFFFFFF;
63 static uint32_t min_int = 0x80000000;
64 static uint32_t save_fcw; /* save value for fpu control word */
65 static uint32_t trunc_fcw = 0x0F7F; /* fcw value for truncation mode */
69 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
70 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
74 static void sh4_x86_add_backpatch( uint8_t *ptr )
76 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
77 sh4_x86.backpatch_size <<= 1;
78 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
79 assert( sh4_x86.backpatch_list != NULL );
81 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
84 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
87 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
88 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
93 * Emit an instruction to load an SH4 reg into a real register
95 static inline void load_reg( int x86reg, int sh4reg )
99 OP(0x45 + (x86reg<<3));
100 OP(REG_OFFSET(r[sh4reg]));
103 static inline void load_reg16s( int x86reg, int sh4reg )
107 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
110 static inline void load_reg16u( int x86reg, int sh4reg )
114 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
118 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
119 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
121 * Emit an instruction to load an immediate value into a register
123 static inline void load_imm32( int x86reg, uint32_t value ) {
124 /* mov #value, reg */
130 * Emit an instruction to store an SH4 reg (RN)
132 void static inline store_reg( int x86reg, int sh4reg ) {
133 /* mov reg, [bp+n] */
135 OP(0x45 + (x86reg<<3));
136 OP(REG_OFFSET(r[sh4reg]));
139 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
142 * Load an FR register (single-precision floating point) into an integer x86
143 * register (eg for register-to-register moves)
145 void static inline load_fr( int bankreg, int x86reg, int frm )
147 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
151 * Store an FR register (single-precision floating point) into an integer x86
152 * register (eg for register-to-register moves)
154 void static inline store_fr( int bankreg, int x86reg, int frn )
156 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
161 * Load a pointer to the back fp back into the specified x86 register. The
162 * bankreg must have been previously loaded with FPSCR.
165 static inline void load_xf_bank( int bankreg )
168 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
169 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
170 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
174 * Update the fr_bank pointer based on the current fpscr value.
176 static inline void update_fr_bank( int fpscrreg )
178 SHR_imm8_r32( (21 - 6), fpscrreg ); // Extract bit 21 then *64 for bank size
179 AND_imm8s_r32( 0x40, fpscrreg ); // Complete extraction
180 OP(0x8D); OP(0x44+(fpscrreg<<3)); OP(0x28+fpscrreg); OP(REG_OFFSET(fr)); // LEA [ebp+fpscrreg+disp], fpscrreg
181 store_spreg( fpscrreg, REG_OFFSET(fr_bank) );
184 * Push FPUL (as a 32-bit float) onto the FPU stack
186 static inline void push_fpul( )
188 OP(0xD9); OP(0x45); OP(R_FPUL);
192 * Pop FPUL (as a 32-bit float) from the FPU stack
194 static inline void pop_fpul( )
196 OP(0xD9); OP(0x5D); OP(R_FPUL);
200 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
201 * with the location of the current fp bank.
203 static inline void push_fr( int bankreg, int frm )
205 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
209 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
210 * with bankreg previously loaded with the location of the current fp bank.
212 static inline void pop_fr( int bankreg, int frm )
214 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
218 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
219 * with the location of the current fp bank.
221 static inline void push_dr( int bankreg, int frm )
223 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
226 static inline void pop_dr( int bankreg, int frm )
228 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
232 * Note: clobbers EAX to make the indirect call - this isn't usually
233 * a problem since the callee will usually clobber it anyway.
235 static inline void call_func0( void *ptr )
237 load_imm32(R_EAX, (uint32_t)ptr);
241 static inline void call_func1( void *ptr, int arg1 )
245 ADD_imm8s_r32( 4, R_ESP );
248 static inline void call_func2( void *ptr, int arg1, int arg2 )
253 ADD_imm8s_r32( 8, R_ESP );
257 * Write a double (64-bit) value into memory, with the first word in arg2a, and
258 * the second in arg2b
261 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
263 ADD_imm8s_r32( 4, addr );
266 ADD_imm8s_r32( -4, addr );
269 call_func0(sh4_write_long);
270 ADD_imm8s_r32( 8, R_ESP );
271 call_func0(sh4_write_long);
272 ADD_imm8s_r32( 8, R_ESP );
276 * Read a double (64-bit) value from memory, writing the first word into arg2a
277 * and the second into arg2b. The addr must not be in EAX
280 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
283 call_func0(sh4_read_long);
286 ADD_imm8s_r32( 4, addr );
288 call_func0(sh4_read_long);
289 ADD_imm8s_r32( 4, R_ESP );
290 MOV_r32_r32( R_EAX, arg2b );
294 /* Exception checks - Note that all exception checks will clobber EAX */
295 static void check_priv( )
297 if( !sh4_x86.priv_checked ) {
298 sh4_x86.priv_checked = TRUE;
299 load_spreg( R_EAX, R_SR );
300 AND_imm32_r32( SR_MD, R_EAX );
301 if( sh4_x86.in_delay_slot ) {
302 JE_exit( EXIT_SLOT_ILLEGAL );
304 JE_exit( EXIT_ILLEGAL );
309 static void check_fpuen( )
311 if( !sh4_x86.fpuen_checked ) {
312 sh4_x86.fpuen_checked = TRUE;
313 load_spreg( R_EAX, R_SR );
314 AND_imm32_r32( SR_FD, R_EAX );
315 if( sh4_x86.in_delay_slot ) {
316 JNE_exit(EXIT_SLOT_FPU_DISABLED);
318 JNE_exit(EXIT_FPU_DISABLED);
323 static void check_ralign16( int x86reg )
325 TEST_imm32_r32( 0x00000001, x86reg );
326 JNE_exit(EXIT_DATA_ADDR_READ);
329 static void check_walign16( int x86reg )
331 TEST_imm32_r32( 0x00000001, x86reg );
332 JNE_exit(EXIT_DATA_ADDR_WRITE);
335 static void check_ralign32( int x86reg )
337 TEST_imm32_r32( 0x00000003, x86reg );
338 JNE_exit(EXIT_DATA_ADDR_READ);
340 static void check_walign32( int x86reg )
342 TEST_imm32_r32( 0x00000003, x86reg );
343 JNE_exit(EXIT_DATA_ADDR_WRITE);
347 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
348 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
349 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
350 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
351 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
352 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
353 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
355 #define SLOTILLEGAL() JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
360 * Emit the 'start of block' assembly. Sets up the stack frame and save
363 void sh4_translate_begin_block()
367 load_imm32( R_EBP, (uint32_t)&sh4r );
370 XOR_r32_r32(R_ESI, R_ESI);
372 sh4_x86.in_delay_slot = FALSE;
373 sh4_x86.priv_checked = FALSE;
374 sh4_x86.fpuen_checked = FALSE;
375 sh4_x86.backpatch_posn = 0;
376 sh4_x86.exit_code = 1;
380 * Exit the block early (ie branch out), conditionally or otherwise
384 store_spreg( R_EDI, REG_OFFSET(pc) );
385 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
386 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
388 ADD_r32_r32( R_EAX, R_ECX );
389 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
390 load_imm32( R_EAX, sh4_x86.exit_code );
398 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
400 void sh4_translate_end_block( sh4addr_t pc ) {
401 assert( !sh4_x86.in_delay_slot ); // should never stop here
402 // Normal termination - save PC, cycle count
405 if( sh4_x86.backpatch_posn != 0 ) {
406 uint8_t *end_ptr = xlat_output;
407 // Exception termination. Jump block for various exception codes:
408 PUSH_imm32( EXC_DATA_ADDR_READ );
409 JMP_rel8( 33, target1 );
410 PUSH_imm32( EXC_DATA_ADDR_WRITE );
411 JMP_rel8( 26, target2 );
412 PUSH_imm32( EXC_ILLEGAL );
413 JMP_rel8( 19, target3 );
414 PUSH_imm32( EXC_SLOT_ILLEGAL );
415 JMP_rel8( 12, target4 );
416 PUSH_imm32( EXC_FPU_DISABLED );
417 JMP_rel8( 5, target5 );
418 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
425 load_spreg( R_ECX, REG_OFFSET(pc) );
426 ADD_r32_r32( R_ESI, R_ECX );
427 ADD_r32_r32( R_ESI, R_ECX );
428 store_spreg( R_ECX, REG_OFFSET(pc) );
429 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
430 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
432 ADD_r32_r32( R_EAX, R_ECX );
433 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
435 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
436 CALL_r32( R_EAX ); // 2
437 ADD_imm8s_r32( 4, R_ESP );
443 sh4_x86_do_backpatch( end_ptr );
449 extern uint16_t *sh4_icache;
450 extern uint32_t sh4_icache_addr;
453 * Translate a single instruction. Delayed branches are handled specially
454 * by translating both branch and delayed instruction as a single unit (as
457 * @return true if the instruction marks the end of a basic block
460 uint32_t sh4_x86_translate_instruction( uint32_t pc )
463 /* Read instruction */
464 uint32_t pageaddr = pc >> 12;
465 if( sh4_icache != NULL && pageaddr == sh4_icache_addr ) {
466 ir = sh4_icache[(pc&0xFFF)>>1];
468 sh4_icache = (uint16_t *)mem_get_page(pc);
469 if( ((uint32_t)sh4_icache) < MAX_IO_REGIONS ) {
470 /* If someone's actually been so daft as to try to execute out of an IO
471 * region, fallback on the full-blown memory read
474 ir = sh4_read_word(pc);
476 sh4_icache_addr = pageaddr;
477 ir = sh4_icache[(pc&0xFFF)>>1];
481 switch( (ir&0xF000) >> 12 ) {
485 switch( (ir&0x80) >> 7 ) {
487 switch( (ir&0x70) >> 4 ) {
490 uint32_t Rn = ((ir>>8)&0xF);
492 call_func0(sh4_read_sr);
493 store_reg( R_EAX, Rn );
498 uint32_t Rn = ((ir>>8)&0xF);
499 load_spreg( R_EAX, R_GBR );
500 store_reg( R_EAX, Rn );
505 uint32_t Rn = ((ir>>8)&0xF);
507 load_spreg( R_EAX, R_VBR );
508 store_reg( R_EAX, Rn );
513 uint32_t Rn = ((ir>>8)&0xF);
515 load_spreg( R_EAX, R_SSR );
516 store_reg( R_EAX, Rn );
521 uint32_t Rn = ((ir>>8)&0xF);
523 load_spreg( R_EAX, R_SPC );
524 store_reg( R_EAX, Rn );
533 { /* STC Rm_BANK, Rn */
534 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
536 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
537 store_reg( R_EAX, Rn );
543 switch( (ir&0xF0) >> 4 ) {
546 uint32_t Rn = ((ir>>8)&0xF);
547 if( sh4_x86.in_delay_slot ) {
550 load_imm32( R_EAX, pc + 4 );
551 store_spreg( R_EAX, R_PR );
552 load_reg( R_EDI, Rn );
553 ADD_r32_r32( R_EAX, R_EDI );
554 sh4_x86.in_delay_slot = TRUE;
561 uint32_t Rn = ((ir>>8)&0xF);
562 if( sh4_x86.in_delay_slot ) {
565 load_reg( R_EDI, Rn );
566 ADD_imm32_r32( pc + 4, R_EDI );
567 sh4_x86.in_delay_slot = TRUE;
574 uint32_t Rn = ((ir>>8)&0xF);
575 load_reg( R_EAX, Rn );
577 AND_imm32_r32( 0xFC000000, R_EAX );
578 CMP_imm32_r32( 0xE0000000, R_EAX );
580 call_func0( sh4_flush_store_queue );
582 ADD_imm8s_r32( 4, R_ESP );
587 uint32_t Rn = ((ir>>8)&0xF);
592 uint32_t Rn = ((ir>>8)&0xF);
597 uint32_t Rn = ((ir>>8)&0xF);
601 { /* MOVCA.L R0, @Rn */
602 uint32_t Rn = ((ir>>8)&0xF);
603 load_reg( R_EAX, 0 );
604 load_reg( R_ECX, Rn );
605 check_walign32( R_ECX );
606 MEM_WRITE_LONG( R_ECX, R_EAX );
615 { /* MOV.B Rm, @(R0, Rn) */
616 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
617 load_reg( R_EAX, 0 );
618 load_reg( R_ECX, Rn );
619 ADD_r32_r32( R_EAX, R_ECX );
620 load_reg( R_EAX, Rm );
621 MEM_WRITE_BYTE( R_ECX, R_EAX );
625 { /* MOV.W Rm, @(R0, Rn) */
626 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
627 load_reg( R_EAX, 0 );
628 load_reg( R_ECX, Rn );
629 ADD_r32_r32( R_EAX, R_ECX );
630 check_walign16( R_ECX );
631 load_reg( R_EAX, Rm );
632 MEM_WRITE_WORD( R_ECX, R_EAX );
636 { /* MOV.L Rm, @(R0, Rn) */
637 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
638 load_reg( R_EAX, 0 );
639 load_reg( R_ECX, Rn );
640 ADD_r32_r32( R_EAX, R_ECX );
641 check_walign32( R_ECX );
642 load_reg( R_EAX, Rm );
643 MEM_WRITE_LONG( R_ECX, R_EAX );
648 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
649 load_reg( R_EAX, Rm );
650 load_reg( R_ECX, Rn );
652 store_spreg( R_EAX, R_MACL );
656 switch( (ir&0xFF0) >> 4 ) {
671 XOR_r32_r32(R_EAX, R_EAX);
672 store_spreg( R_EAX, R_MACL );
673 store_spreg( R_EAX, R_MACH );
698 switch( (ir&0xF0) >> 4 ) {
701 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
706 XOR_r32_r32( R_EAX, R_EAX );
707 store_spreg( R_EAX, R_Q );
708 store_spreg( R_EAX, R_M );
709 store_spreg( R_EAX, R_T );
714 uint32_t Rn = ((ir>>8)&0xF);
715 load_spreg( R_EAX, R_T );
716 store_reg( R_EAX, Rn );
725 switch( (ir&0xF0) >> 4 ) {
728 uint32_t Rn = ((ir>>8)&0xF);
729 load_spreg( R_EAX, R_MACH );
730 store_reg( R_EAX, Rn );
735 uint32_t Rn = ((ir>>8)&0xF);
736 load_spreg( R_EAX, R_MACL );
737 store_reg( R_EAX, Rn );
742 uint32_t Rn = ((ir>>8)&0xF);
743 load_spreg( R_EAX, R_PR );
744 store_reg( R_EAX, Rn );
749 uint32_t Rn = ((ir>>8)&0xF);
751 load_spreg( R_EAX, R_SGR );
752 store_reg( R_EAX, Rn );
757 uint32_t Rn = ((ir>>8)&0xF);
758 load_spreg( R_EAX, R_FPUL );
759 store_reg( R_EAX, Rn );
763 { /* STS FPSCR, Rn */
764 uint32_t Rn = ((ir>>8)&0xF);
765 load_spreg( R_EAX, R_FPSCR );
766 store_reg( R_EAX, Rn );
771 uint32_t Rn = ((ir>>8)&0xF);
773 load_spreg( R_EAX, R_DBR );
774 store_reg( R_EAX, Rn );
783 switch( (ir&0xFF0) >> 4 ) {
786 if( sh4_x86.in_delay_slot ) {
789 load_spreg( R_EDI, R_PR );
790 sh4_x86.in_delay_slot = TRUE;
798 call_func0( sh4_sleep );
799 sh4_x86.exit_code = 0;
800 sh4_x86.in_delay_slot = FALSE;
808 if( sh4_x86.in_delay_slot ) {
811 load_spreg( R_EDI, R_SPC );
812 load_spreg( R_EAX, R_SSR );
813 call_func1( sh4_write_sr, R_EAX );
814 sh4_x86.in_delay_slot = TRUE;
815 sh4_x86.priv_checked = FALSE;
816 sh4_x86.fpuen_checked = FALSE;
827 { /* MOV.B @(R0, Rm), Rn */
828 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
829 load_reg( R_EAX, 0 );
830 load_reg( R_ECX, Rm );
831 ADD_r32_r32( R_EAX, R_ECX );
832 MEM_READ_BYTE( R_ECX, R_EAX );
833 store_reg( R_EAX, Rn );
837 { /* MOV.W @(R0, Rm), Rn */
838 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
839 load_reg( R_EAX, 0 );
840 load_reg( R_ECX, Rm );
841 ADD_r32_r32( R_EAX, R_ECX );
842 check_ralign16( R_ECX );
843 MEM_READ_WORD( R_ECX, R_EAX );
844 store_reg( R_EAX, Rn );
848 { /* MOV.L @(R0, Rm), Rn */
849 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
850 load_reg( R_EAX, 0 );
851 load_reg( R_ECX, Rm );
852 ADD_r32_r32( R_EAX, R_ECX );
853 check_ralign32( R_ECX );
854 MEM_READ_LONG( R_ECX, R_EAX );
855 store_reg( R_EAX, Rn );
859 { /* MAC.L @Rm+, @Rn+ */
860 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
861 load_reg( R_ECX, Rm );
862 check_ralign32( R_ECX );
863 load_reg( R_ECX, Rn );
864 check_ralign32( R_ECX );
865 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
866 MEM_READ_LONG( R_ECX, R_EAX );
868 load_reg( R_ECX, Rm );
869 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
870 MEM_READ_LONG( R_ECX, R_EAX );
873 ADD_r32_sh4r( R_EAX, R_MACL );
874 ADC_r32_sh4r( R_EDX, R_MACH );
876 load_spreg( R_ECX, R_S );
877 TEST_r32_r32(R_ECX, R_ECX);
879 call_func0( signsat48 );
889 { /* MOV.L Rm, @(disp, Rn) */
890 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
891 load_reg( R_ECX, Rn );
892 load_reg( R_EAX, Rm );
893 ADD_imm32_r32( disp, R_ECX );
894 check_walign32( R_ECX );
895 MEM_WRITE_LONG( R_ECX, R_EAX );
901 { /* MOV.B Rm, @Rn */
902 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
903 load_reg( R_EAX, Rm );
904 load_reg( R_ECX, Rn );
905 MEM_WRITE_BYTE( R_ECX, R_EAX );
909 { /* MOV.W Rm, @Rn */
910 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
911 load_reg( R_ECX, Rn );
912 check_walign16( R_ECX );
913 load_reg( R_EAX, Rm );
914 MEM_WRITE_WORD( R_ECX, R_EAX );
918 { /* MOV.L Rm, @Rn */
919 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
920 load_reg( R_EAX, Rm );
921 load_reg( R_ECX, Rn );
922 check_walign32(R_ECX);
923 MEM_WRITE_LONG( R_ECX, R_EAX );
927 { /* MOV.B Rm, @-Rn */
928 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
929 load_reg( R_EAX, Rm );
930 load_reg( R_ECX, Rn );
931 ADD_imm8s_r32( -1, R_ECX );
932 store_reg( R_ECX, Rn );
933 MEM_WRITE_BYTE( R_ECX, R_EAX );
937 { /* MOV.W Rm, @-Rn */
938 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
939 load_reg( R_ECX, Rn );
940 check_walign16( R_ECX );
941 load_reg( R_EAX, Rm );
942 ADD_imm8s_r32( -2, R_ECX );
943 store_reg( R_ECX, Rn );
944 MEM_WRITE_WORD( R_ECX, R_EAX );
948 { /* MOV.L Rm, @-Rn */
949 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
950 load_reg( R_EAX, Rm );
951 load_reg( R_ECX, Rn );
952 check_walign32( R_ECX );
953 ADD_imm8s_r32( -4, R_ECX );
954 store_reg( R_ECX, Rn );
955 MEM_WRITE_LONG( R_ECX, R_EAX );
960 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
961 load_reg( R_EAX, Rm );
962 load_reg( R_ECX, Rn );
963 SHR_imm8_r32( 31, R_EAX );
964 SHR_imm8_r32( 31, R_ECX );
965 store_spreg( R_EAX, R_M );
966 store_spreg( R_ECX, R_Q );
967 CMP_r32_r32( R_EAX, R_ECX );
973 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
974 load_reg( R_EAX, Rm );
975 load_reg( R_ECX, Rn );
976 TEST_r32_r32( R_EAX, R_ECX );
982 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
983 load_reg( R_EAX, Rm );
984 load_reg( R_ECX, Rn );
985 AND_r32_r32( R_EAX, R_ECX );
986 store_reg( R_ECX, Rn );
991 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
992 load_reg( R_EAX, Rm );
993 load_reg( R_ECX, Rn );
994 XOR_r32_r32( R_EAX, R_ECX );
995 store_reg( R_ECX, Rn );
1000 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1001 load_reg( R_EAX, Rm );
1002 load_reg( R_ECX, Rn );
1003 OR_r32_r32( R_EAX, R_ECX );
1004 store_reg( R_ECX, Rn );
1008 { /* CMP/STR Rm, Rn */
1009 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1010 load_reg( R_EAX, Rm );
1011 load_reg( R_ECX, Rn );
1012 XOR_r32_r32( R_ECX, R_EAX );
1013 TEST_r8_r8( R_AL, R_AL );
1014 JE_rel8(13, target1);
1015 TEST_r8_r8( R_AH, R_AH ); // 2
1016 JE_rel8(9, target2);
1017 SHR_imm8_r32( 16, R_EAX ); // 3
1018 TEST_r8_r8( R_AL, R_AL ); // 2
1019 JE_rel8(2, target3);
1020 TEST_r8_r8( R_AH, R_AH ); // 2
1021 JMP_TARGET(target1);
1022 JMP_TARGET(target2);
1023 JMP_TARGET(target3);
1028 { /* XTRCT Rm, Rn */
1029 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1030 load_reg( R_EAX, Rm );
1031 load_reg( R_ECX, Rn );
1032 SHL_imm8_r32( 16, R_EAX );
1033 SHR_imm8_r32( 16, R_ECX );
1034 OR_r32_r32( R_EAX, R_ECX );
1035 store_reg( R_ECX, Rn );
1039 { /* MULU.W Rm, Rn */
1040 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1041 load_reg16u( R_EAX, Rm );
1042 load_reg16u( R_ECX, Rn );
1044 store_spreg( R_EAX, R_MACL );
1048 { /* MULS.W Rm, Rn */
1049 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1050 load_reg16s( R_EAX, Rm );
1051 load_reg16s( R_ECX, Rn );
1053 store_spreg( R_EAX, R_MACL );
1064 { /* CMP/EQ Rm, Rn */
1065 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1066 load_reg( R_EAX, Rm );
1067 load_reg( R_ECX, Rn );
1068 CMP_r32_r32( R_EAX, R_ECX );
1073 { /* CMP/HS Rm, Rn */
1074 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1075 load_reg( R_EAX, Rm );
1076 load_reg( R_ECX, Rn );
1077 CMP_r32_r32( R_EAX, R_ECX );
1082 { /* CMP/GE Rm, Rn */
1083 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1084 load_reg( R_EAX, Rm );
1085 load_reg( R_ECX, Rn );
1086 CMP_r32_r32( R_EAX, R_ECX );
1092 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1093 load_spreg( R_ECX, R_M );
1094 load_reg( R_EAX, Rn );
1097 SETC_r8( R_DL ); // Q'
1098 CMP_sh4r_r32( R_Q, R_ECX );
1099 JE_rel8(5, mqequal);
1100 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
1102 JMP_TARGET(mqequal);
1103 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
1105 store_reg( R_EAX, Rn ); // Done with Rn now
1106 SETC_r8(R_AL); // tmp1
1107 XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
1108 XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
1109 store_spreg( R_ECX, R_Q );
1110 XOR_imm8s_r32( 1, R_AL ); // T = !Q'
1111 MOVZX_r8_r32( R_AL, R_EAX );
1112 store_spreg( R_EAX, R_T );
1116 { /* DMULU.L Rm, Rn */
1117 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1118 load_reg( R_EAX, Rm );
1119 load_reg( R_ECX, Rn );
1121 store_spreg( R_EDX, R_MACH );
1122 store_spreg( R_EAX, R_MACL );
1126 { /* CMP/HI Rm, Rn */
1127 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1128 load_reg( R_EAX, Rm );
1129 load_reg( R_ECX, Rn );
1130 CMP_r32_r32( R_EAX, R_ECX );
1135 { /* CMP/GT Rm, Rn */
1136 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1137 load_reg( R_EAX, Rm );
1138 load_reg( R_ECX, Rn );
1139 CMP_r32_r32( R_EAX, R_ECX );
1145 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1146 load_reg( R_EAX, Rm );
1147 load_reg( R_ECX, Rn );
1148 SUB_r32_r32( R_EAX, R_ECX );
1149 store_reg( R_ECX, Rn );
1154 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1155 load_reg( R_EAX, Rm );
1156 load_reg( R_ECX, Rn );
1158 SBB_r32_r32( R_EAX, R_ECX );
1159 store_reg( R_ECX, Rn );
1165 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1166 load_reg( R_EAX, Rm );
1167 load_reg( R_ECX, Rn );
1168 SUB_r32_r32( R_EAX, R_ECX );
1169 store_reg( R_ECX, Rn );
1175 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1176 load_reg( R_EAX, Rm );
1177 load_reg( R_ECX, Rn );
1178 ADD_r32_r32( R_EAX, R_ECX );
1179 store_reg( R_ECX, Rn );
1183 { /* DMULS.L Rm, Rn */
1184 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1185 load_reg( R_EAX, Rm );
1186 load_reg( R_ECX, Rn );
1188 store_spreg( R_EDX, R_MACH );
1189 store_spreg( R_EAX, R_MACL );
1194 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1195 load_reg( R_EAX, Rm );
1196 load_reg( R_ECX, Rn );
1198 ADC_r32_r32( R_EAX, R_ECX );
1199 store_reg( R_ECX, Rn );
1205 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1206 load_reg( R_EAX, Rm );
1207 load_reg( R_ECX, Rn );
1208 ADD_r32_r32( R_EAX, R_ECX );
1209 store_reg( R_ECX, Rn );
1221 switch( (ir&0xF0) >> 4 ) {
1224 uint32_t Rn = ((ir>>8)&0xF);
1225 load_reg( R_EAX, Rn );
1228 store_reg( R_EAX, Rn );
1233 uint32_t Rn = ((ir>>8)&0xF);
1234 load_reg( R_EAX, Rn );
1235 ADD_imm8s_r32( -1, R_EAX );
1236 store_reg( R_EAX, Rn );
1242 uint32_t Rn = ((ir>>8)&0xF);
1243 load_reg( R_EAX, Rn );
1246 store_reg( R_EAX, Rn );
1255 switch( (ir&0xF0) >> 4 ) {
1258 uint32_t Rn = ((ir>>8)&0xF);
1259 load_reg( R_EAX, Rn );
1262 store_reg( R_EAX, Rn );
1267 uint32_t Rn = ((ir>>8)&0xF);
1268 load_reg( R_EAX, Rn );
1269 CMP_imm8s_r32( 0, R_EAX );
1275 uint32_t Rn = ((ir>>8)&0xF);
1276 load_reg( R_EAX, Rn );
1279 store_reg( R_EAX, Rn );
1288 switch( (ir&0xF0) >> 4 ) {
1290 { /* STS.L MACH, @-Rn */
1291 uint32_t Rn = ((ir>>8)&0xF);
1292 load_reg( R_ECX, Rn );
1293 check_walign32( R_ECX );
1294 ADD_imm8s_r32( -4, R_ECX );
1295 store_reg( R_ECX, Rn );
1296 load_spreg( R_EAX, R_MACH );
1297 MEM_WRITE_LONG( R_ECX, R_EAX );
1301 { /* STS.L MACL, @-Rn */
1302 uint32_t Rn = ((ir>>8)&0xF);
1303 load_reg( R_ECX, Rn );
1304 check_walign32( R_ECX );
1305 ADD_imm8s_r32( -4, R_ECX );
1306 store_reg( R_ECX, Rn );
1307 load_spreg( R_EAX, R_MACL );
1308 MEM_WRITE_LONG( R_ECX, R_EAX );
1312 { /* STS.L PR, @-Rn */
1313 uint32_t Rn = ((ir>>8)&0xF);
1314 load_reg( R_ECX, Rn );
1315 check_walign32( R_ECX );
1316 ADD_imm8s_r32( -4, R_ECX );
1317 store_reg( R_ECX, Rn );
1318 load_spreg( R_EAX, R_PR );
1319 MEM_WRITE_LONG( R_ECX, R_EAX );
1323 { /* STC.L SGR, @-Rn */
1324 uint32_t Rn = ((ir>>8)&0xF);
1326 load_reg( R_ECX, Rn );
1327 check_walign32( R_ECX );
1328 ADD_imm8s_r32( -4, R_ECX );
1329 store_reg( R_ECX, Rn );
1330 load_spreg( R_EAX, R_SGR );
1331 MEM_WRITE_LONG( R_ECX, R_EAX );
1335 { /* STS.L FPUL, @-Rn */
1336 uint32_t Rn = ((ir>>8)&0xF);
1337 load_reg( R_ECX, Rn );
1338 check_walign32( R_ECX );
1339 ADD_imm8s_r32( -4, R_ECX );
1340 store_reg( R_ECX, Rn );
1341 load_spreg( R_EAX, R_FPUL );
1342 MEM_WRITE_LONG( R_ECX, R_EAX );
1346 { /* STS.L FPSCR, @-Rn */
1347 uint32_t Rn = ((ir>>8)&0xF);
1348 load_reg( R_ECX, Rn );
1349 check_walign32( R_ECX );
1350 ADD_imm8s_r32( -4, R_ECX );
1351 store_reg( R_ECX, Rn );
1352 load_spreg( R_EAX, R_FPSCR );
1353 MEM_WRITE_LONG( R_ECX, R_EAX );
1357 { /* STC.L DBR, @-Rn */
1358 uint32_t Rn = ((ir>>8)&0xF);
1360 load_reg( R_ECX, Rn );
1361 check_walign32( R_ECX );
1362 ADD_imm8s_r32( -4, R_ECX );
1363 store_reg( R_ECX, Rn );
1364 load_spreg( R_EAX, R_DBR );
1365 MEM_WRITE_LONG( R_ECX, R_EAX );
1374 switch( (ir&0x80) >> 7 ) {
1376 switch( (ir&0x70) >> 4 ) {
1378 { /* STC.L SR, @-Rn */
1379 uint32_t Rn = ((ir>>8)&0xF);
1381 call_func0( sh4_read_sr );
1382 load_reg( R_ECX, Rn );
1383 check_walign32( R_ECX );
1384 ADD_imm8s_r32( -4, R_ECX );
1385 store_reg( R_ECX, Rn );
1386 MEM_WRITE_LONG( R_ECX, R_EAX );
1390 { /* STC.L GBR, @-Rn */
1391 uint32_t Rn = ((ir>>8)&0xF);
1392 load_reg( R_ECX, Rn );
1393 check_walign32( R_ECX );
1394 ADD_imm8s_r32( -4, R_ECX );
1395 store_reg( R_ECX, Rn );
1396 load_spreg( R_EAX, R_GBR );
1397 MEM_WRITE_LONG( R_ECX, R_EAX );
1401 { /* STC.L VBR, @-Rn */
1402 uint32_t Rn = ((ir>>8)&0xF);
1404 load_reg( R_ECX, Rn );
1405 check_walign32( R_ECX );
1406 ADD_imm8s_r32( -4, R_ECX );
1407 store_reg( R_ECX, Rn );
1408 load_spreg( R_EAX, R_VBR );
1409 MEM_WRITE_LONG( R_ECX, R_EAX );
1413 { /* STC.L SSR, @-Rn */
1414 uint32_t Rn = ((ir>>8)&0xF);
1416 load_reg( R_ECX, Rn );
1417 check_walign32( R_ECX );
1418 ADD_imm8s_r32( -4, R_ECX );
1419 store_reg( R_ECX, Rn );
1420 load_spreg( R_EAX, R_SSR );
1421 MEM_WRITE_LONG( R_ECX, R_EAX );
1425 { /* STC.L SPC, @-Rn */
1426 uint32_t Rn = ((ir>>8)&0xF);
1428 load_reg( R_ECX, Rn );
1429 check_walign32( R_ECX );
1430 ADD_imm8s_r32( -4, R_ECX );
1431 store_reg( R_ECX, Rn );
1432 load_spreg( R_EAX, R_SPC );
1433 MEM_WRITE_LONG( R_ECX, R_EAX );
1442 { /* STC.L Rm_BANK, @-Rn */
1443 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
1445 load_reg( R_ECX, Rn );
1446 check_walign32( R_ECX );
1447 ADD_imm8s_r32( -4, R_ECX );
1448 store_reg( R_ECX, Rn );
1449 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
1450 MEM_WRITE_LONG( R_ECX, R_EAX );
1456 switch( (ir&0xF0) >> 4 ) {
1459 uint32_t Rn = ((ir>>8)&0xF);
1460 load_reg( R_EAX, Rn );
1462 store_reg( R_EAX, Rn );
1468 uint32_t Rn = ((ir>>8)&0xF);
1469 load_reg( R_EAX, Rn );
1472 store_reg( R_EAX, Rn );
1482 switch( (ir&0xF0) >> 4 ) {
1485 uint32_t Rn = ((ir>>8)&0xF);
1486 load_reg( R_EAX, Rn );
1488 store_reg( R_EAX, Rn );
1494 uint32_t Rn = ((ir>>8)&0xF);
1495 load_reg( R_EAX, Rn );
1496 CMP_imm8s_r32( 0, R_EAX );
1502 uint32_t Rn = ((ir>>8)&0xF);
1503 load_reg( R_EAX, Rn );
1506 store_reg( R_EAX, Rn );
1516 switch( (ir&0xF0) >> 4 ) {
1518 { /* LDS.L @Rm+, MACH */
1519 uint32_t Rm = ((ir>>8)&0xF);
1520 load_reg( R_EAX, Rm );
1521 check_ralign32( R_EAX );
1522 MOV_r32_r32( R_EAX, R_ECX );
1523 ADD_imm8s_r32( 4, R_EAX );
1524 store_reg( R_EAX, Rm );
1525 MEM_READ_LONG( R_ECX, R_EAX );
1526 store_spreg( R_EAX, R_MACH );
1530 { /* LDS.L @Rm+, MACL */
1531 uint32_t Rm = ((ir>>8)&0xF);
1532 load_reg( R_EAX, Rm );
1533 check_ralign32( R_EAX );
1534 MOV_r32_r32( R_EAX, R_ECX );
1535 ADD_imm8s_r32( 4, R_EAX );
1536 store_reg( R_EAX, Rm );
1537 MEM_READ_LONG( R_ECX, R_EAX );
1538 store_spreg( R_EAX, R_MACL );
1542 { /* LDS.L @Rm+, PR */
1543 uint32_t Rm = ((ir>>8)&0xF);
1544 load_reg( R_EAX, Rm );
1545 check_ralign32( R_EAX );
1546 MOV_r32_r32( R_EAX, R_ECX );
1547 ADD_imm8s_r32( 4, R_EAX );
1548 store_reg( R_EAX, Rm );
1549 MEM_READ_LONG( R_ECX, R_EAX );
1550 store_spreg( R_EAX, R_PR );
1554 { /* LDC.L @Rm+, SGR */
1555 uint32_t Rm = ((ir>>8)&0xF);
1557 load_reg( R_EAX, Rm );
1558 check_ralign32( R_EAX );
1559 MOV_r32_r32( R_EAX, R_ECX );
1560 ADD_imm8s_r32( 4, R_EAX );
1561 store_reg( R_EAX, Rm );
1562 MEM_READ_LONG( R_ECX, R_EAX );
1563 store_spreg( R_EAX, R_SGR );
1567 { /* LDS.L @Rm+, FPUL */
1568 uint32_t Rm = ((ir>>8)&0xF);
1569 load_reg( R_EAX, Rm );
1570 check_ralign32( R_EAX );
1571 MOV_r32_r32( R_EAX, R_ECX );
1572 ADD_imm8s_r32( 4, R_EAX );
1573 store_reg( R_EAX, Rm );
1574 MEM_READ_LONG( R_ECX, R_EAX );
1575 store_spreg( R_EAX, R_FPUL );
1579 { /* LDS.L @Rm+, FPSCR */
1580 uint32_t Rm = ((ir>>8)&0xF);
1581 load_reg( R_EAX, Rm );
1582 check_ralign32( R_EAX );
1583 MOV_r32_r32( R_EAX, R_ECX );
1584 ADD_imm8s_r32( 4, R_EAX );
1585 store_reg( R_EAX, Rm );
1586 MEM_READ_LONG( R_ECX, R_EAX );
1587 store_spreg( R_EAX, R_FPSCR );
1588 update_fr_bank( R_EAX );
1592 { /* LDC.L @Rm+, DBR */
1593 uint32_t Rm = ((ir>>8)&0xF);
1595 load_reg( R_EAX, Rm );
1596 check_ralign32( R_EAX );
1597 MOV_r32_r32( R_EAX, R_ECX );
1598 ADD_imm8s_r32( 4, R_EAX );
1599 store_reg( R_EAX, Rm );
1600 MEM_READ_LONG( R_ECX, R_EAX );
1601 store_spreg( R_EAX, R_DBR );
1610 switch( (ir&0x80) >> 7 ) {
1612 switch( (ir&0x70) >> 4 ) {
1614 { /* LDC.L @Rm+, SR */
1615 uint32_t Rm = ((ir>>8)&0xF);
1616 if( sh4_x86.in_delay_slot ) {
1620 load_reg( R_EAX, Rm );
1621 check_ralign32( R_EAX );
1622 MOV_r32_r32( R_EAX, R_ECX );
1623 ADD_imm8s_r32( 4, R_EAX );
1624 store_reg( R_EAX, Rm );
1625 MEM_READ_LONG( R_ECX, R_EAX );
1626 call_func1( sh4_write_sr, R_EAX );
1627 sh4_x86.priv_checked = FALSE;
1628 sh4_x86.fpuen_checked = FALSE;
1633 { /* LDC.L @Rm+, GBR */
1634 uint32_t Rm = ((ir>>8)&0xF);
1635 load_reg( R_EAX, Rm );
1636 check_ralign32( R_EAX );
1637 MOV_r32_r32( R_EAX, R_ECX );
1638 ADD_imm8s_r32( 4, R_EAX );
1639 store_reg( R_EAX, Rm );
1640 MEM_READ_LONG( R_ECX, R_EAX );
1641 store_spreg( R_EAX, R_GBR );
1645 { /* LDC.L @Rm+, VBR */
1646 uint32_t Rm = ((ir>>8)&0xF);
1648 load_reg( R_EAX, Rm );
1649 check_ralign32( R_EAX );
1650 MOV_r32_r32( R_EAX, R_ECX );
1651 ADD_imm8s_r32( 4, R_EAX );
1652 store_reg( R_EAX, Rm );
1653 MEM_READ_LONG( R_ECX, R_EAX );
1654 store_spreg( R_EAX, R_VBR );
1658 { /* LDC.L @Rm+, SSR */
1659 uint32_t Rm = ((ir>>8)&0xF);
1661 load_reg( R_EAX, Rm );
1662 MOV_r32_r32( R_EAX, R_ECX );
1663 ADD_imm8s_r32( 4, R_EAX );
1664 store_reg( R_EAX, Rm );
1665 MEM_READ_LONG( R_ECX, R_EAX );
1666 store_spreg( R_EAX, R_SSR );
1670 { /* LDC.L @Rm+, SPC */
1671 uint32_t Rm = ((ir>>8)&0xF);
1673 load_reg( R_EAX, Rm );
1674 check_ralign32( R_EAX );
1675 MOV_r32_r32( R_EAX, R_ECX );
1676 ADD_imm8s_r32( 4, R_EAX );
1677 store_reg( R_EAX, Rm );
1678 MEM_READ_LONG( R_ECX, R_EAX );
1679 store_spreg( R_EAX, R_SPC );
1688 { /* LDC.L @Rm+, Rn_BANK */
1689 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1691 load_reg( R_EAX, Rm );
1692 check_ralign32( R_EAX );
1693 MOV_r32_r32( R_EAX, R_ECX );
1694 ADD_imm8s_r32( 4, R_EAX );
1695 store_reg( R_EAX, Rm );
1696 MEM_READ_LONG( R_ECX, R_EAX );
1697 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1703 switch( (ir&0xF0) >> 4 ) {
1706 uint32_t Rn = ((ir>>8)&0xF);
1707 load_reg( R_EAX, Rn );
1708 SHL_imm8_r32( 2, R_EAX );
1709 store_reg( R_EAX, Rn );
1714 uint32_t Rn = ((ir>>8)&0xF);
1715 load_reg( R_EAX, Rn );
1716 SHL_imm8_r32( 8, R_EAX );
1717 store_reg( R_EAX, Rn );
1722 uint32_t Rn = ((ir>>8)&0xF);
1723 load_reg( R_EAX, Rn );
1724 SHL_imm8_r32( 16, R_EAX );
1725 store_reg( R_EAX, Rn );
1734 switch( (ir&0xF0) >> 4 ) {
1737 uint32_t Rn = ((ir>>8)&0xF);
1738 load_reg( R_EAX, Rn );
1739 SHR_imm8_r32( 2, R_EAX );
1740 store_reg( R_EAX, Rn );
1745 uint32_t Rn = ((ir>>8)&0xF);
1746 load_reg( R_EAX, Rn );
1747 SHR_imm8_r32( 8, R_EAX );
1748 store_reg( R_EAX, Rn );
1753 uint32_t Rn = ((ir>>8)&0xF);
1754 load_reg( R_EAX, Rn );
1755 SHR_imm8_r32( 16, R_EAX );
1756 store_reg( R_EAX, Rn );
1765 switch( (ir&0xF0) >> 4 ) {
1767 { /* LDS Rm, MACH */
1768 uint32_t Rm = ((ir>>8)&0xF);
1769 load_reg( R_EAX, Rm );
1770 store_spreg( R_EAX, R_MACH );
1774 { /* LDS Rm, MACL */
1775 uint32_t Rm = ((ir>>8)&0xF);
1776 load_reg( R_EAX, Rm );
1777 store_spreg( R_EAX, R_MACL );
1782 uint32_t Rm = ((ir>>8)&0xF);
1783 load_reg( R_EAX, Rm );
1784 store_spreg( R_EAX, R_PR );
1789 uint32_t Rm = ((ir>>8)&0xF);
1791 load_reg( R_EAX, Rm );
1792 store_spreg( R_EAX, R_SGR );
1796 { /* LDS Rm, FPUL */
1797 uint32_t Rm = ((ir>>8)&0xF);
1798 load_reg( R_EAX, Rm );
1799 store_spreg( R_EAX, R_FPUL );
1803 { /* LDS Rm, FPSCR */
1804 uint32_t Rm = ((ir>>8)&0xF);
1805 load_reg( R_EAX, Rm );
1806 store_spreg( R_EAX, R_FPSCR );
1807 update_fr_bank( R_EAX );
1812 uint32_t Rm = ((ir>>8)&0xF);
1814 load_reg( R_EAX, Rm );
1815 store_spreg( R_EAX, R_DBR );
1824 switch( (ir&0xF0) >> 4 ) {
1827 uint32_t Rn = ((ir>>8)&0xF);
1828 if( sh4_x86.in_delay_slot ) {
1831 load_imm32( R_EAX, pc + 4 );
1832 store_spreg( R_EAX, R_PR );
1833 load_reg( R_EDI, Rn );
1834 sh4_x86.in_delay_slot = TRUE;
1841 uint32_t Rn = ((ir>>8)&0xF);
1842 load_reg( R_ECX, Rn );
1843 MEM_READ_BYTE( R_ECX, R_EAX );
1844 TEST_r8_r8( R_AL, R_AL );
1846 OR_imm8_r8( 0x80, R_AL );
1847 load_reg( R_ECX, Rn );
1848 MEM_WRITE_BYTE( R_ECX, R_EAX );
1853 uint32_t Rn = ((ir>>8)&0xF);
1854 if( sh4_x86.in_delay_slot ) {
1857 load_reg( R_EDI, Rn );
1858 sh4_x86.in_delay_slot = TRUE;
1870 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1871 /* Annoyingly enough, not directly convertible */
1872 load_reg( R_EAX, Rn );
1873 load_reg( R_ECX, Rm );
1874 CMP_imm32_r32( 0, R_ECX );
1875 JGE_rel8(16, doshl);
1877 NEG_r32( R_ECX ); // 2
1878 AND_imm8_r8( 0x1F, R_CL ); // 3
1879 JE_rel8( 4, emptysar); // 2
1880 SAR_r32_CL( R_EAX ); // 2
1881 JMP_rel8(10, end); // 2
1883 JMP_TARGET(emptysar);
1884 SAR_imm8_r32(31, R_EAX ); // 3
1888 AND_imm8_r8( 0x1F, R_CL ); // 3
1889 SHL_r32_CL( R_EAX ); // 2
1892 store_reg( R_EAX, Rn );
1897 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1898 load_reg( R_EAX, Rn );
1899 load_reg( R_ECX, Rm );
1900 CMP_imm32_r32( 0, R_ECX );
1901 JGE_rel8(15, doshl);
1903 NEG_r32( R_ECX ); // 2
1904 AND_imm8_r8( 0x1F, R_CL ); // 3
1905 JE_rel8( 4, emptyshr );
1906 SHR_r32_CL( R_EAX ); // 2
1907 JMP_rel8(9, end); // 2
1909 JMP_TARGET(emptyshr);
1910 XOR_r32_r32( R_EAX, R_EAX );
1914 AND_imm8_r8( 0x1F, R_CL ); // 3
1915 SHL_r32_CL( R_EAX ); // 2
1918 store_reg( R_EAX, Rn );
1922 switch( (ir&0x80) >> 7 ) {
1924 switch( (ir&0x70) >> 4 ) {
1927 uint32_t Rm = ((ir>>8)&0xF);
1928 if( sh4_x86.in_delay_slot ) {
1932 load_reg( R_EAX, Rm );
1933 call_func1( sh4_write_sr, R_EAX );
1934 sh4_x86.priv_checked = FALSE;
1935 sh4_x86.fpuen_checked = FALSE;
1941 uint32_t Rm = ((ir>>8)&0xF);
1942 load_reg( R_EAX, Rm );
1943 store_spreg( R_EAX, R_GBR );
1948 uint32_t Rm = ((ir>>8)&0xF);
1950 load_reg( R_EAX, Rm );
1951 store_spreg( R_EAX, R_VBR );
1956 uint32_t Rm = ((ir>>8)&0xF);
1958 load_reg( R_EAX, Rm );
1959 store_spreg( R_EAX, R_SSR );
1964 uint32_t Rm = ((ir>>8)&0xF);
1966 load_reg( R_EAX, Rm );
1967 store_spreg( R_EAX, R_SPC );
1976 { /* LDC Rm, Rn_BANK */
1977 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1979 load_reg( R_EAX, Rm );
1980 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1986 { /* MAC.W @Rm+, @Rn+ */
1987 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1988 load_reg( R_ECX, Rm );
1989 check_ralign16( R_ECX );
1990 load_reg( R_ECX, Rn );
1991 check_ralign16( R_ECX );
1992 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
1993 MEM_READ_WORD( R_ECX, R_EAX );
1995 load_reg( R_ECX, Rm );
1996 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
1997 MEM_READ_WORD( R_ECX, R_EAX );
2001 load_spreg( R_ECX, R_S );
2002 TEST_r32_r32( R_ECX, R_ECX );
2003 JE_rel8( 47, nosat );
2005 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
2006 JNO_rel8( 51, end ); // 2
2007 load_imm32( R_EDX, 1 ); // 5
2008 store_spreg( R_EDX, R_MACH ); // 6
2009 JS_rel8( 13, positive ); // 2
2010 load_imm32( R_EAX, 0x80000000 );// 5
2011 store_spreg( R_EAX, R_MACL ); // 6
2012 JMP_rel8( 25, end2 ); // 2
2014 JMP_TARGET(positive);
2015 load_imm32( R_EAX, 0x7FFFFFFF );// 5
2016 store_spreg( R_EAX, R_MACL ); // 6
2017 JMP_rel8( 12, end3); // 2
2020 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
2021 ADC_r32_sh4r( R_EDX, R_MACH ); // 6
2030 { /* MOV.L @(disp, Rm), Rn */
2031 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
2032 load_reg( R_ECX, Rm );
2033 ADD_imm8s_r32( disp, R_ECX );
2034 check_ralign32( R_ECX );
2035 MEM_READ_LONG( R_ECX, R_EAX );
2036 store_reg( R_EAX, Rn );
2042 { /* MOV.B @Rm, Rn */
2043 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2044 load_reg( R_ECX, Rm );
2045 MEM_READ_BYTE( R_ECX, R_EAX );
2046 store_reg( R_EAX, Rn );
2050 { /* MOV.W @Rm, Rn */
2051 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2052 load_reg( R_ECX, Rm );
2053 check_ralign16( R_ECX );
2054 MEM_READ_WORD( R_ECX, R_EAX );
2055 store_reg( R_EAX, Rn );
2059 { /* MOV.L @Rm, Rn */
2060 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2061 load_reg( R_ECX, Rm );
2062 check_ralign32( R_ECX );
2063 MEM_READ_LONG( R_ECX, R_EAX );
2064 store_reg( R_EAX, Rn );
2069 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2070 load_reg( R_EAX, Rm );
2071 store_reg( R_EAX, Rn );
2075 { /* MOV.B @Rm+, Rn */
2076 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2077 load_reg( R_ECX, Rm );
2078 MOV_r32_r32( R_ECX, R_EAX );
2079 ADD_imm8s_r32( 1, R_EAX );
2080 store_reg( R_EAX, Rm );
2081 MEM_READ_BYTE( R_ECX, R_EAX );
2082 store_reg( R_EAX, Rn );
2086 { /* MOV.W @Rm+, Rn */
2087 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2088 load_reg( R_EAX, Rm );
2089 check_ralign16( R_EAX );
2090 MOV_r32_r32( R_EAX, R_ECX );
2091 ADD_imm8s_r32( 2, R_EAX );
2092 store_reg( R_EAX, Rm );
2093 MEM_READ_WORD( R_ECX, R_EAX );
2094 store_reg( R_EAX, Rn );
2098 { /* MOV.L @Rm+, Rn */
2099 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2100 load_reg( R_EAX, Rm );
2101 check_ralign32( R_EAX );
2102 MOV_r32_r32( R_EAX, R_ECX );
2103 ADD_imm8s_r32( 4, R_EAX );
2104 store_reg( R_EAX, Rm );
2105 MEM_READ_LONG( R_ECX, R_EAX );
2106 store_reg( R_EAX, Rn );
2111 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2112 load_reg( R_EAX, Rm );
2114 store_reg( R_EAX, Rn );
2118 { /* SWAP.B Rm, Rn */
2119 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2120 load_reg( R_EAX, Rm );
2121 XCHG_r8_r8( R_AL, R_AH );
2122 store_reg( R_EAX, Rn );
2126 { /* SWAP.W Rm, Rn */
2127 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2128 load_reg( R_EAX, Rm );
2129 MOV_r32_r32( R_EAX, R_ECX );
2130 SHL_imm8_r32( 16, R_ECX );
2131 SHR_imm8_r32( 16, R_EAX );
2132 OR_r32_r32( R_EAX, R_ECX );
2133 store_reg( R_ECX, Rn );
2138 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2139 load_reg( R_EAX, Rm );
2140 XOR_r32_r32( R_ECX, R_ECX );
2142 SBB_r32_r32( R_EAX, R_ECX );
2143 store_reg( R_ECX, Rn );
2149 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2150 load_reg( R_EAX, Rm );
2152 store_reg( R_EAX, Rn );
2156 { /* EXTU.B Rm, Rn */
2157 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2158 load_reg( R_EAX, Rm );
2159 MOVZX_r8_r32( R_EAX, R_EAX );
2160 store_reg( R_EAX, Rn );
2164 { /* EXTU.W Rm, Rn */
2165 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2166 load_reg( R_EAX, Rm );
2167 MOVZX_r16_r32( R_EAX, R_EAX );
2168 store_reg( R_EAX, Rn );
2172 { /* EXTS.B Rm, Rn */
2173 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2174 load_reg( R_EAX, Rm );
2175 MOVSX_r8_r32( R_EAX, R_EAX );
2176 store_reg( R_EAX, Rn );
2180 { /* EXTS.W Rm, Rn */
2181 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2182 load_reg( R_EAX, Rm );
2183 MOVSX_r16_r32( R_EAX, R_EAX );
2184 store_reg( R_EAX, Rn );
2190 { /* ADD #imm, Rn */
2191 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2192 load_reg( R_EAX, Rn );
2193 ADD_imm8s_r32( imm, R_EAX );
2194 store_reg( R_EAX, Rn );
2198 switch( (ir&0xF00) >> 8 ) {
2200 { /* MOV.B R0, @(disp, Rn) */
2201 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2202 load_reg( R_EAX, 0 );
2203 load_reg( R_ECX, Rn );
2204 ADD_imm32_r32( disp, R_ECX );
2205 MEM_WRITE_BYTE( R_ECX, R_EAX );
2209 { /* MOV.W R0, @(disp, Rn) */
2210 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2211 load_reg( R_ECX, Rn );
2212 load_reg( R_EAX, 0 );
2213 ADD_imm32_r32( disp, R_ECX );
2214 check_walign16( R_ECX );
2215 MEM_WRITE_WORD( R_ECX, R_EAX );
2219 { /* MOV.B @(disp, Rm), R0 */
2220 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2221 load_reg( R_ECX, Rm );
2222 ADD_imm32_r32( disp, R_ECX );
2223 MEM_READ_BYTE( R_ECX, R_EAX );
2224 store_reg( R_EAX, 0 );
2228 { /* MOV.W @(disp, Rm), R0 */
2229 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2230 load_reg( R_ECX, Rm );
2231 ADD_imm32_r32( disp, R_ECX );
2232 check_ralign16( R_ECX );
2233 MEM_READ_WORD( R_ECX, R_EAX );
2234 store_reg( R_EAX, 0 );
2238 { /* CMP/EQ #imm, R0 */
2239 int32_t imm = SIGNEXT8(ir&0xFF);
2240 load_reg( R_EAX, 0 );
2241 CMP_imm8s_r32(imm, R_EAX);
2247 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2248 if( sh4_x86.in_delay_slot ) {
2251 load_imm32( R_EDI, pc + 2 );
2252 CMP_imm8s_sh4r( 0, R_T );
2253 JE_rel8( 5, nottaken );
2254 load_imm32( R_EDI, disp + pc + 4 );
2255 JMP_TARGET(nottaken);
2263 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2264 if( sh4_x86.in_delay_slot ) {
2267 load_imm32( R_EDI, pc + 2 );
2268 CMP_imm8s_sh4r( 0, R_T );
2269 JNE_rel8( 5, nottaken );
2270 load_imm32( R_EDI, disp + pc + 4 );
2271 JMP_TARGET(nottaken);
2279 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2280 if( sh4_x86.in_delay_slot ) {
2283 load_imm32( R_EDI, pc + 4 );
2284 CMP_imm8s_sh4r( 0, R_T );
2285 JE_rel8( 5, nottaken );
2286 load_imm32( R_EDI, disp + pc + 4 );
2287 JMP_TARGET(nottaken);
2288 sh4_x86.in_delay_slot = TRUE;
2295 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2296 if( sh4_x86.in_delay_slot ) {
2299 load_imm32( R_EDI, pc + 4 );
2300 CMP_imm8s_sh4r( 0, R_T );
2301 JNE_rel8( 5, nottaken );
2302 load_imm32( R_EDI, disp + pc + 4 );
2303 JMP_TARGET(nottaken);
2304 sh4_x86.in_delay_slot = TRUE;
2315 { /* MOV.W @(disp, PC), Rn */
2316 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1;
2317 if( sh4_x86.in_delay_slot ) {
2320 load_imm32( R_ECX, pc + disp + 4 );
2321 MEM_READ_WORD( R_ECX, R_EAX );
2322 store_reg( R_EAX, Rn );
2328 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2329 if( sh4_x86.in_delay_slot ) {
2332 load_imm32( R_EDI, disp + pc + 4 );
2333 sh4_x86.in_delay_slot = TRUE;
2340 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2341 if( sh4_x86.in_delay_slot ) {
2344 load_imm32( R_EAX, pc + 4 );
2345 store_spreg( R_EAX, R_PR );
2346 load_imm32( R_EDI, disp + pc + 4 );
2347 sh4_x86.in_delay_slot = TRUE;
2353 switch( (ir&0xF00) >> 8 ) {
2355 { /* MOV.B R0, @(disp, GBR) */
2356 uint32_t disp = (ir&0xFF);
2357 load_reg( R_EAX, 0 );
2358 load_spreg( R_ECX, R_GBR );
2359 ADD_imm32_r32( disp, R_ECX );
2360 MEM_WRITE_BYTE( R_ECX, R_EAX );
2364 { /* MOV.W R0, @(disp, GBR) */
2365 uint32_t disp = (ir&0xFF)<<1;
2366 load_spreg( R_ECX, R_GBR );
2367 load_reg( R_EAX, 0 );
2368 ADD_imm32_r32( disp, R_ECX );
2369 check_walign16( R_ECX );
2370 MEM_WRITE_WORD( R_ECX, R_EAX );
2374 { /* MOV.L R0, @(disp, GBR) */
2375 uint32_t disp = (ir&0xFF)<<2;
2376 load_spreg( R_ECX, R_GBR );
2377 load_reg( R_EAX, 0 );
2378 ADD_imm32_r32( disp, R_ECX );
2379 check_walign32( R_ECX );
2380 MEM_WRITE_LONG( R_ECX, R_EAX );
2385 uint32_t imm = (ir&0xFF);
2386 if( sh4_x86.in_delay_slot ) {
2390 call_func0( sh4_raise_trap );
2391 ADD_imm8s_r32( 4, R_ESP );
2396 { /* MOV.B @(disp, GBR), R0 */
2397 uint32_t disp = (ir&0xFF);
2398 load_spreg( R_ECX, R_GBR );
2399 ADD_imm32_r32( disp, R_ECX );
2400 MEM_READ_BYTE( R_ECX, R_EAX );
2401 store_reg( R_EAX, 0 );
2405 { /* MOV.W @(disp, GBR), R0 */
2406 uint32_t disp = (ir&0xFF)<<1;
2407 load_spreg( R_ECX, R_GBR );
2408 ADD_imm32_r32( disp, R_ECX );
2409 check_ralign16( R_ECX );
2410 MEM_READ_WORD( R_ECX, R_EAX );
2411 store_reg( R_EAX, 0 );
2415 { /* MOV.L @(disp, GBR), R0 */
2416 uint32_t disp = (ir&0xFF)<<2;
2417 load_spreg( R_ECX, R_GBR );
2418 ADD_imm32_r32( disp, R_ECX );
2419 check_ralign32( R_ECX );
2420 MEM_READ_LONG( R_ECX, R_EAX );
2421 store_reg( R_EAX, 0 );
2425 { /* MOVA @(disp, PC), R0 */
2426 uint32_t disp = (ir&0xFF)<<2;
2427 if( sh4_x86.in_delay_slot ) {
2430 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2431 store_reg( R_ECX, 0 );
2436 { /* TST #imm, R0 */
2437 uint32_t imm = (ir&0xFF);
2438 load_reg( R_EAX, 0 );
2439 TEST_imm32_r32( imm, R_EAX );
2444 { /* AND #imm, R0 */
2445 uint32_t imm = (ir&0xFF);
2446 load_reg( R_EAX, 0 );
2447 AND_imm32_r32(imm, R_EAX);
2448 store_reg( R_EAX, 0 );
2452 { /* XOR #imm, R0 */
2453 uint32_t imm = (ir&0xFF);
2454 load_reg( R_EAX, 0 );
2455 XOR_imm32_r32( imm, R_EAX );
2456 store_reg( R_EAX, 0 );
2461 uint32_t imm = (ir&0xFF);
2462 load_reg( R_EAX, 0 );
2463 OR_imm32_r32(imm, R_EAX);
2464 store_reg( R_EAX, 0 );
2468 { /* TST.B #imm, @(R0, GBR) */
2469 uint32_t imm = (ir&0xFF);
2470 load_reg( R_EAX, 0);
2471 load_reg( R_ECX, R_GBR);
2472 ADD_r32_r32( R_EAX, R_ECX );
2473 MEM_READ_BYTE( R_ECX, R_EAX );
2474 TEST_imm8_r8( imm, R_AL );
2479 { /* AND.B #imm, @(R0, GBR) */
2480 uint32_t imm = (ir&0xFF);
2481 load_reg( R_EAX, 0 );
2482 load_spreg( R_ECX, R_GBR );
2483 ADD_r32_r32( R_EAX, R_ECX );
2485 call_func0(sh4_read_byte);
2487 AND_imm32_r32(imm, R_EAX );
2488 MEM_WRITE_BYTE( R_ECX, R_EAX );
2492 { /* XOR.B #imm, @(R0, GBR) */
2493 uint32_t imm = (ir&0xFF);
2494 load_reg( R_EAX, 0 );
2495 load_spreg( R_ECX, R_GBR );
2496 ADD_r32_r32( R_EAX, R_ECX );
2498 call_func0(sh4_read_byte);
2500 XOR_imm32_r32( imm, R_EAX );
2501 MEM_WRITE_BYTE( R_ECX, R_EAX );
2505 { /* OR.B #imm, @(R0, GBR) */
2506 uint32_t imm = (ir&0xFF);
2507 load_reg( R_EAX, 0 );
2508 load_spreg( R_ECX, R_GBR );
2509 ADD_r32_r32( R_EAX, R_ECX );
2511 call_func0(sh4_read_byte);
2513 OR_imm32_r32(imm, R_EAX );
2514 MEM_WRITE_BYTE( R_ECX, R_EAX );
2520 { /* MOV.L @(disp, PC), Rn */
2521 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2;
2522 if( sh4_x86.in_delay_slot ) {
2525 uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
2526 char *ptr = mem_get_region(target);
2528 MOV_moff32_EAX( (uint32_t)ptr );
2530 load_imm32( R_ECX, target );
2531 MEM_READ_LONG( R_ECX, R_EAX );
2533 store_reg( R_EAX, Rn );
2538 { /* MOV #imm, Rn */
2539 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2540 load_imm32( R_EAX, imm );
2541 store_reg( R_EAX, Rn );
2547 { /* FADD FRm, FRn */
2548 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2550 load_spreg( R_ECX, R_FPSCR );
2551 TEST_imm32_r32( FPSCR_PR, R_ECX );
2552 load_fr_bank( R_EDX );
2553 JNE_rel8(13,doubleprec);
2554 push_fr(R_EDX, FRm);
2555 push_fr(R_EDX, FRn);
2559 JMP_TARGET(doubleprec);
2560 push_dr(R_EDX, FRm);
2561 push_dr(R_EDX, FRn);
2568 { /* FSUB FRm, FRn */
2569 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2571 load_spreg( R_ECX, R_FPSCR );
2572 TEST_imm32_r32( FPSCR_PR, R_ECX );
2573 load_fr_bank( R_EDX );
2574 JNE_rel8(13, doubleprec);
2575 push_fr(R_EDX, FRn);
2576 push_fr(R_EDX, FRm);
2580 JMP_TARGET(doubleprec);
2581 push_dr(R_EDX, FRn);
2582 push_dr(R_EDX, FRm);
2589 { /* FMUL FRm, FRn */
2590 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2592 load_spreg( R_ECX, R_FPSCR );
2593 TEST_imm32_r32( FPSCR_PR, R_ECX );
2594 load_fr_bank( R_EDX );
2595 JNE_rel8(13, doubleprec);
2596 push_fr(R_EDX, FRm);
2597 push_fr(R_EDX, FRn);
2601 JMP_TARGET(doubleprec);
2602 push_dr(R_EDX, FRm);
2603 push_dr(R_EDX, FRn);
2610 { /* FDIV FRm, FRn */
2611 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2613 load_spreg( R_ECX, R_FPSCR );
2614 TEST_imm32_r32( FPSCR_PR, R_ECX );
2615 load_fr_bank( R_EDX );
2616 JNE_rel8(13, doubleprec);
2617 push_fr(R_EDX, FRn);
2618 push_fr(R_EDX, FRm);
2622 JMP_TARGET(doubleprec);
2623 push_dr(R_EDX, FRn);
2624 push_dr(R_EDX, FRm);
2631 { /* FCMP/EQ FRm, FRn */
2632 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2634 load_spreg( R_ECX, R_FPSCR );
2635 TEST_imm32_r32( FPSCR_PR, R_ECX );
2636 load_fr_bank( R_EDX );
2637 JNE_rel8(8, doubleprec);
2638 push_fr(R_EDX, FRm);
2639 push_fr(R_EDX, FRn);
2641 JMP_TARGET(doubleprec);
2642 push_dr(R_EDX, FRm);
2643 push_dr(R_EDX, FRn);
2651 { /* FCMP/GT FRm, FRn */
2652 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2654 load_spreg( R_ECX, R_FPSCR );
2655 TEST_imm32_r32( FPSCR_PR, R_ECX );
2656 load_fr_bank( R_EDX );
2657 JNE_rel8(8, doubleprec);
2658 push_fr(R_EDX, FRm);
2659 push_fr(R_EDX, FRn);
2661 JMP_TARGET(doubleprec);
2662 push_dr(R_EDX, FRm);
2663 push_dr(R_EDX, FRn);
2671 { /* FMOV @(R0, Rm), FRn */
2672 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2674 load_reg( R_EDX, Rm );
2675 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2676 check_ralign32( R_EDX );
2677 load_spreg( R_ECX, R_FPSCR );
2678 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2679 JNE_rel8(19, doublesize);
2680 MEM_READ_LONG( R_EDX, R_EAX );
2681 load_fr_bank( R_ECX );
2682 store_fr( R_ECX, R_EAX, FRn );
2685 JMP_TARGET(doublesize);
2686 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2687 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2688 load_xf_bank( R_ECX );
2689 store_fr( R_ECX, R_EAX, FRn&0x0E );
2690 store_fr( R_ECX, R_EDX, FRn|0x01 );
2694 JMP_TARGET(doublesize);
2695 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2696 load_fr_bank( R_ECX );
2697 store_fr( R_ECX, R_EAX, FRn&0x0E );
2698 store_fr( R_ECX, R_EDX, FRn|0x01 );
2704 { /* FMOV FRm, @(R0, Rn) */
2705 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2707 load_reg( R_EDX, Rn );
2708 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2709 check_walign32( R_EDX );
2710 load_spreg( R_ECX, R_FPSCR );
2711 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2712 JNE_rel8(20, doublesize);
2713 load_fr_bank( R_ECX );
2714 load_fr( R_ECX, R_EAX, FRm );
2715 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2717 JMP_rel8( 48, end );
2718 JMP_TARGET(doublesize);
2719 load_xf_bank( R_ECX );
2720 load_fr( R_ECX, R_EAX, FRm&0x0E );
2721 load_fr( R_ECX, R_ECX, FRm|0x01 );
2722 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2725 JMP_rel8( 39, end );
2726 JMP_TARGET(doublesize);
2727 load_fr_bank( R_ECX );
2728 load_fr( R_ECX, R_EAX, FRm&0x0E );
2729 load_fr( R_ECX, R_ECX, FRm|0x01 );
2730 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2736 { /* FMOV @Rm, FRn */
2737 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2739 load_reg( R_EDX, Rm );
2740 check_ralign32( R_EDX );
2741 load_spreg( R_ECX, R_FPSCR );
2742 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2743 JNE_rel8(19, doublesize);
2744 MEM_READ_LONG( R_EDX, R_EAX );
2745 load_fr_bank( R_ECX );
2746 store_fr( R_ECX, R_EAX, FRn );
2749 JMP_TARGET(doublesize);
2750 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2751 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2752 load_xf_bank( R_ECX );
2753 store_fr( R_ECX, R_EAX, FRn&0x0E );
2754 store_fr( R_ECX, R_EDX, FRn|0x01 );
2758 JMP_TARGET(doublesize);
2759 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2760 load_fr_bank( R_ECX );
2761 store_fr( R_ECX, R_EAX, FRn&0x0E );
2762 store_fr( R_ECX, R_EDX, FRn|0x01 );
2768 { /* FMOV @Rm+, FRn */
2769 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2771 load_reg( R_EDX, Rm );
2772 check_ralign32( R_EDX );
2773 MOV_r32_r32( R_EDX, R_EAX );
2774 load_spreg( R_ECX, R_FPSCR );
2775 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2776 JNE_rel8(25, doublesize);
2777 ADD_imm8s_r32( 4, R_EAX );
2778 store_reg( R_EAX, Rm );
2779 MEM_READ_LONG( R_EDX, R_EAX );
2780 load_fr_bank( R_ECX );
2781 store_fr( R_ECX, R_EAX, FRn );
2784 JMP_TARGET(doublesize);
2785 ADD_imm8s_r32( 8, R_EAX );
2786 store_reg(R_EAX, Rm);
2787 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2788 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2789 load_xf_bank( R_ECX );
2790 store_fr( R_ECX, R_EAX, FRn&0x0E );
2791 store_fr( R_ECX, R_EDX, FRn|0x01 );
2795 ADD_imm8s_r32( 8, R_EAX );
2796 store_reg(R_EAX, Rm);
2797 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2798 load_fr_bank( R_ECX );
2799 store_fr( R_ECX, R_EAX, FRn&0x0E );
2800 store_fr( R_ECX, R_EDX, FRn|0x01 );
2806 { /* FMOV FRm, @Rn */
2807 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2809 load_reg( R_EDX, Rn );
2810 check_walign32( R_EDX );
2811 load_spreg( R_ECX, R_FPSCR );
2812 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2813 JNE_rel8(20, doublesize);
2814 load_fr_bank( R_ECX );
2815 load_fr( R_ECX, R_EAX, FRm );
2816 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2818 JMP_rel8( 48, end );
2819 JMP_TARGET(doublesize);
2820 load_xf_bank( R_ECX );
2821 load_fr( R_ECX, R_EAX, FRm&0x0E );
2822 load_fr( R_ECX, R_ECX, FRm|0x01 );
2823 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2826 JMP_rel8( 39, end );
2827 JMP_TARGET(doublesize);
2828 load_fr_bank( R_ECX );
2829 load_fr( R_ECX, R_EAX, FRm&0x0E );
2830 load_fr( R_ECX, R_ECX, FRm|0x01 );
2831 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2837 { /* FMOV FRm, @-Rn */
2838 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2840 load_reg( R_EDX, Rn );
2841 check_walign32( R_EDX );
2842 load_spreg( R_ECX, R_FPSCR );
2843 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2844 JNE_rel8(26, doublesize);
2845 load_fr_bank( R_ECX );
2846 load_fr( R_ECX, R_EAX, FRm );
2847 ADD_imm8s_r32(-4,R_EDX);
2848 store_reg( R_EDX, Rn );
2849 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2851 JMP_rel8( 54, end );
2852 JMP_TARGET(doublesize);
2853 load_xf_bank( R_ECX );
2854 load_fr( R_ECX, R_EAX, FRm&0x0E );
2855 load_fr( R_ECX, R_ECX, FRm|0x01 );
2856 ADD_imm8s_r32(-8,R_EDX);
2857 store_reg( R_EDX, Rn );
2858 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2861 JMP_rel8( 45, end );
2862 JMP_TARGET(doublesize);
2863 load_fr_bank( R_ECX );
2864 load_fr( R_ECX, R_EAX, FRm&0x0E );
2865 load_fr( R_ECX, R_ECX, FRm|0x01 );
2866 ADD_imm8s_r32(-8,R_EDX);
2867 store_reg( R_EDX, Rn );
2868 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2874 { /* FMOV FRm, FRn */
2875 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2876 /* As horrible as this looks, it's actually covering 5 separate cases:
2877 * 1. 32-bit fr-to-fr (PR=0)
2878 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
2879 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
2880 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
2881 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
2884 load_spreg( R_ECX, R_FPSCR );
2885 load_fr_bank( R_EDX );
2886 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2887 JNE_rel8(8, doublesize);
2888 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
2889 store_fr( R_EDX, R_EAX, FRn );
2892 JMP_TARGET(doublesize);
2893 load_xf_bank( R_ECX );
2894 load_fr( R_ECX, R_EAX, FRm-1 );
2896 load_fr( R_ECX, R_EDX, FRm );
2897 store_fr( R_ECX, R_EAX, FRn-1 );
2898 store_fr( R_ECX, R_EDX, FRn );
2899 } else /* FRn&1 == 0 */ {
2900 load_fr( R_ECX, R_ECX, FRm );
2901 store_fr( R_EDX, R_EAX, FRn );
2902 store_fr( R_EDX, R_ECX, FRn+1 );
2905 } else /* FRm&1 == 0 */ {
2908 load_xf_bank( R_ECX );
2909 load_fr( R_EDX, R_EAX, FRm );
2910 load_fr( R_EDX, R_EDX, FRm+1 );
2911 store_fr( R_ECX, R_EAX, FRn-1 );
2912 store_fr( R_ECX, R_EDX, FRn );
2914 } else /* FRn&1 == 0 */ {
2916 load_fr( R_EDX, R_EAX, FRm );
2917 load_fr( R_EDX, R_ECX, FRm+1 );
2918 store_fr( R_EDX, R_EAX, FRn );
2919 store_fr( R_EDX, R_ECX, FRn+1 );
2926 switch( (ir&0xF0) >> 4 ) {
2928 { /* FSTS FPUL, FRn */
2929 uint32_t FRn = ((ir>>8)&0xF);
2931 load_fr_bank( R_ECX );
2932 load_spreg( R_EAX, R_FPUL );
2933 store_fr( R_ECX, R_EAX, FRn );
2937 { /* FLDS FRm, FPUL */
2938 uint32_t FRm = ((ir>>8)&0xF);
2940 load_fr_bank( R_ECX );
2941 load_fr( R_ECX, R_EAX, FRm );
2942 store_spreg( R_EAX, R_FPUL );
2946 { /* FLOAT FPUL, FRn */
2947 uint32_t FRn = ((ir>>8)&0xF);
2949 load_spreg( R_ECX, R_FPSCR );
2950 load_spreg(R_EDX, REG_OFFSET(fr_bank));
2952 TEST_imm32_r32( FPSCR_PR, R_ECX );
2953 JNE_rel8(5, doubleprec);
2954 pop_fr( R_EDX, FRn );
2956 JMP_TARGET(doubleprec);
2957 pop_dr( R_EDX, FRn );
2962 { /* FTRC FRm, FPUL */
2963 uint32_t FRm = ((ir>>8)&0xF);
2965 load_spreg( R_ECX, R_FPSCR );
2966 load_fr_bank( R_EDX );
2967 TEST_imm32_r32( FPSCR_PR, R_ECX );
2968 JNE_rel8(5, doubleprec);
2969 push_fr( R_EDX, FRm );
2971 JMP_TARGET(doubleprec);
2972 push_dr( R_EDX, FRm );
2974 load_imm32( R_ECX, (uint32_t)&max_int );
2975 FILD_r32ind( R_ECX );
2977 JNA_rel8( 32, sat );
2978 load_imm32( R_ECX, (uint32_t)&min_int ); // 5
2979 FILD_r32ind( R_ECX ); // 2
2981 JAE_rel8( 21, sat2 ); // 2
2982 load_imm32( R_EAX, (uint32_t)&save_fcw );
2983 FNSTCW_r32ind( R_EAX );
2984 load_imm32( R_EDX, (uint32_t)&trunc_fcw );
2985 FLDCW_r32ind( R_EDX );
2986 FISTP_sh4r(R_FPUL); // 3
2987 FLDCW_r32ind( R_EAX );
2988 JMP_rel8( 9, end ); // 2
2992 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
2993 store_spreg( R_ECX, R_FPUL );
3000 uint32_t FRn = ((ir>>8)&0xF);
3002 load_spreg( R_ECX, R_FPSCR );
3003 TEST_imm32_r32( FPSCR_PR, R_ECX );
3004 load_fr_bank( R_EDX );
3005 JNE_rel8(10, doubleprec);
3006 push_fr(R_EDX, FRn);
3010 JMP_TARGET(doubleprec);
3011 push_dr(R_EDX, FRn);
3019 uint32_t FRn = ((ir>>8)&0xF);
3021 load_spreg( R_ECX, R_FPSCR );
3022 load_fr_bank( R_EDX );
3023 TEST_imm32_r32( FPSCR_PR, R_ECX );
3024 JNE_rel8(10, doubleprec);
3025 push_fr(R_EDX, FRn); // 3
3027 pop_fr( R_EDX, FRn); //3
3028 JMP_rel8(8,end); // 2
3029 JMP_TARGET(doubleprec);
3030 push_dr(R_EDX, FRn);
3038 uint32_t FRn = ((ir>>8)&0xF);
3040 load_spreg( R_ECX, R_FPSCR );
3041 TEST_imm32_r32( FPSCR_PR, R_ECX );
3042 load_fr_bank( R_EDX );
3043 JNE_rel8(10, doubleprec);
3044 push_fr(R_EDX, FRn);
3048 JMP_TARGET(doubleprec);
3049 push_dr(R_EDX, FRn);
3057 uint32_t FRn = ((ir>>8)&0xF);
3059 load_spreg( R_ECX, R_FPSCR );
3060 TEST_imm32_r32( FPSCR_PR, R_ECX );
3061 load_fr_bank( R_EDX );
3062 JNE_rel8(12, end); // PR=0 only
3064 push_fr(R_EDX, FRn);
3073 uint32_t FRn = ((ir>>8)&0xF);
3076 load_spreg( R_ECX, R_FPSCR );
3077 TEST_imm32_r32( FPSCR_PR, R_ECX );
3079 XOR_r32_r32( R_EAX, R_EAX );
3080 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
3081 store_fr( R_ECX, R_EAX, FRn );
3087 uint32_t FRn = ((ir>>8)&0xF);
3090 load_spreg( R_ECX, R_FPSCR );
3091 TEST_imm32_r32( FPSCR_PR, R_ECX );
3093 load_imm32(R_EAX, 0x3F800000);
3094 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
3095 store_fr( R_ECX, R_EAX, FRn );
3100 { /* FCNVSD FPUL, FRn */
3101 uint32_t FRn = ((ir>>8)&0xF);
3103 load_spreg( R_ECX, R_FPSCR );
3104 TEST_imm32_r32( FPSCR_PR, R_ECX );
3105 JE_rel8(9, end); // only when PR=1
3106 load_fr_bank( R_ECX );
3108 pop_dr( R_ECX, FRn );
3113 { /* FCNVDS FRm, FPUL */
3114 uint32_t FRm = ((ir>>8)&0xF);
3116 load_spreg( R_ECX, R_FPSCR );
3117 TEST_imm32_r32( FPSCR_PR, R_ECX );
3118 JE_rel8(9, end); // only when PR=1
3119 load_fr_bank( R_ECX );
3120 push_dr( R_ECX, FRm );
3126 { /* FIPR FVm, FVn */
3127 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3);
3129 load_spreg( R_ECX, R_FPSCR );
3130 TEST_imm32_r32( FPSCR_PR, R_ECX );
3131 JNE_rel8(44, doubleprec);
3133 load_fr_bank( R_ECX );
3134 push_fr( R_ECX, FVm<<2 );
3135 push_fr( R_ECX, FVn<<2 );
3137 push_fr( R_ECX, (FVm<<2)+1);
3138 push_fr( R_ECX, (FVn<<2)+1);
3141 push_fr( R_ECX, (FVm<<2)+2);
3142 push_fr( R_ECX, (FVn<<2)+2);
3145 push_fr( R_ECX, (FVm<<2)+3);
3146 push_fr( R_ECX, (FVn<<2)+3);
3149 pop_fr( R_ECX, (FVn<<2)+3);
3150 JMP_TARGET(doubleprec);
3154 switch( (ir&0x100) >> 8 ) {
3156 { /* FSCA FPUL, FRn */
3157 uint32_t FRn = ((ir>>9)&0x7)<<1;
3159 load_spreg( R_ECX, R_FPSCR );
3160 TEST_imm32_r32( FPSCR_PR, R_ECX );
3161 JNE_rel8( 21, doubleprec );
3162 load_fr_bank( R_ECX );
3163 ADD_imm8s_r32( (FRn&0x0E)<<2, R_ECX );
3164 load_spreg( R_EDX, R_FPUL );
3165 call_func2( sh4_fsca, R_EDX, R_ECX );
3166 JMP_TARGET(doubleprec);
3170 switch( (ir&0x200) >> 9 ) {
3172 { /* FTRV XMTRX, FVn */
3173 uint32_t FVn = ((ir>>10)&0x3);
3175 load_spreg( R_ECX, R_FPSCR );
3176 TEST_imm32_r32( FPSCR_PR, R_ECX );
3177 JNE_rel8( 30, doubleprec );
3178 load_fr_bank( R_EDX ); // 3
3179 ADD_imm8s_r32( FVn<<4, R_EDX ); // 3
3180 load_xf_bank( R_ECX ); // 12
3181 call_func2( sh4_ftrv, R_EDX, R_ECX ); // 12
3182 JMP_TARGET(doubleprec);
3186 switch( (ir&0xC00) >> 10 ) {
3190 load_spreg( R_ECX, R_FPSCR );
3191 XOR_imm32_r32( FPSCR_SZ, R_ECX );
3192 store_spreg( R_ECX, R_FPSCR );
3198 load_spreg( R_ECX, R_FPSCR );
3199 XOR_imm32_r32( FPSCR_FR, R_ECX );
3200 store_spreg( R_ECX, R_FPSCR );
3201 update_fr_bank( R_ECX );
3206 if( sh4_x86.in_delay_slot ) {
3209 JMP_exit(EXIT_ILLEGAL);
3229 { /* FMAC FR0, FRm, FRn */
3230 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
3232 load_spreg( R_ECX, R_FPSCR );
3233 load_spreg( R_EDX, REG_OFFSET(fr_bank));
3234 TEST_imm32_r32( FPSCR_PR, R_ECX );
3235 JNE_rel8(18, doubleprec);
3236 push_fr( R_EDX, 0 );
3237 push_fr( R_EDX, FRm );
3239 push_fr( R_EDX, FRn );
3241 pop_fr( R_EDX, FRn );
3243 JMP_TARGET(doubleprec);
3244 push_dr( R_EDX, 0 );
3245 push_dr( R_EDX, FRm );
3247 push_dr( R_EDX, FRn );
3249 pop_dr( R_EDX, FRn );
3260 if( sh4_x86.in_delay_slot ) {
3261 ADD_imm8s_r32(2,R_ESI);
3262 sh4_x86.in_delay_slot = FALSE;
.