2 * $Id: sh4x86.in,v 1.20 2007-11-08 11:54:16 nkeynes Exp $
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
8 * Copyright (c) 2007 Nathan Keynes.
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
28 #include "sh4/xltcache.h"
29 #include "sh4/sh4core.h"
30 #include "sh4/sh4trans.h"
31 #include "sh4/sh4mmio.h"
32 #include "sh4/x86op.h"
35 #define DEFAULT_BACKPATCH_SIZE 4096
38 * Struct to manage internal translation state. This state is not saved -
39 * it is only valid between calls to sh4_translate_begin_block() and
40 * sh4_translate_end_block()
42 struct sh4_x86_state {
43 gboolean in_delay_slot;
44 gboolean priv_checked; /* true if we've already checked the cpu mode. */
45 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
46 gboolean branch_taken; /* true if we branched unconditionally */
47 uint32_t block_start_pc;
48 uint32_t stack_posn; /* Trace stack height for alignment purposes */
51 /* Allocated memory for the (block-wide) back-patch list */
52 uint32_t **backpatch_list;
53 uint32_t backpatch_posn;
54 uint32_t backpatch_size;
57 #define TSTATE_NONE -1
67 /** Branch if T is set (either in the current cflags, or in sh4r.t) */
68 #define JT_rel8(rel8,label) if( sh4_x86.tstate == TSTATE_NONE ) { \
69 CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
70 OP(0x70+sh4_x86.tstate); OP(rel8); \
72 /** Branch if T is clear (either in the current cflags or in sh4r.t) */
73 #define JF_rel8(rel8,label) if( sh4_x86.tstate == TSTATE_NONE ) { \
74 CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
75 OP(0x70+ (sh4_x86.tstate^1)); OP(rel8); \
79 #define EXIT_DATA_ADDR_READ 0
80 #define EXIT_DATA_ADDR_WRITE 7
81 #define EXIT_ILLEGAL 14
82 #define EXIT_SLOT_ILLEGAL 21
83 #define EXIT_FPU_DISABLED 28
84 #define EXIT_SLOT_FPU_DISABLED 35
86 static struct sh4_x86_state sh4_x86;
88 static uint32_t max_int = 0x7FFFFFFF;
89 static uint32_t min_int = 0x80000000;
90 static uint32_t save_fcw; /* save value for fpu control word */
91 static uint32_t trunc_fcw = 0x0F7F; /* fcw value for truncation mode */
95 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
96 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
100 static void sh4_x86_add_backpatch( uint8_t *ptr )
102 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
103 sh4_x86.backpatch_size <<= 1;
104 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
105 assert( sh4_x86.backpatch_list != NULL );
107 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
110 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
113 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
114 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
119 * Emit an instruction to load an SH4 reg into a real register
121 static inline void load_reg( int x86reg, int sh4reg )
123 /* mov [bp+n], reg */
125 OP(0x45 + (x86reg<<3));
126 OP(REG_OFFSET(r[sh4reg]));
129 static inline void load_reg16s( int x86reg, int sh4reg )
133 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
136 static inline void load_reg16u( int x86reg, int sh4reg )
140 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
144 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
145 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
147 * Emit an instruction to load an immediate value into a register
149 static inline void load_imm32( int x86reg, uint32_t value ) {
150 /* mov #value, reg */
156 * Load an immediate 64-bit quantity (note: x86-64 only)
158 static inline void load_imm64( int x86reg, uint32_t value ) {
159 /* mov #value, reg */
167 * Emit an instruction to store an SH4 reg (RN)
169 void static inline store_reg( int x86reg, int sh4reg ) {
170 /* mov reg, [bp+n] */
172 OP(0x45 + (x86reg<<3));
173 OP(REG_OFFSET(r[sh4reg]));
176 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
179 * Load an FR register (single-precision floating point) into an integer x86
180 * register (eg for register-to-register moves)
182 void static inline load_fr( int bankreg, int x86reg, int frm )
184 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
188 * Store an FR register (single-precision floating point) into an integer x86
189 * register (eg for register-to-register moves)
191 void static inline store_fr( int bankreg, int x86reg, int frn )
193 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
198 * Load a pointer to the back fp back into the specified x86 register. The
199 * bankreg must have been previously loaded with FPSCR.
202 static inline void load_xf_bank( int bankreg )
205 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
206 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
207 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
211 * Update the fr_bank pointer based on the current fpscr value.
213 static inline void update_fr_bank( int fpscrreg )
215 SHR_imm8_r32( (21 - 6), fpscrreg ); // Extract bit 21 then *64 for bank size
216 AND_imm8s_r32( 0x40, fpscrreg ); // Complete extraction
217 OP(0x8D); OP(0x44+(fpscrreg<<3)); OP(0x28+fpscrreg); OP(REG_OFFSET(fr)); // LEA [ebp+fpscrreg+disp], fpscrreg
218 store_spreg( fpscrreg, REG_OFFSET(fr_bank) );
221 * Push FPUL (as a 32-bit float) onto the FPU stack
223 static inline void push_fpul( )
225 OP(0xD9); OP(0x45); OP(R_FPUL);
229 * Pop FPUL (as a 32-bit float) from the FPU stack
231 static inline void pop_fpul( )
233 OP(0xD9); OP(0x5D); OP(R_FPUL);
237 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
238 * with the location of the current fp bank.
240 static inline void push_fr( int bankreg, int frm )
242 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
246 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
247 * with bankreg previously loaded with the location of the current fp bank.
249 static inline void pop_fr( int bankreg, int frm )
251 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
255 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
256 * with the location of the current fp bank.
258 static inline void push_dr( int bankreg, int frm )
260 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
263 static inline void pop_dr( int bankreg, int frm )
265 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
268 /* Exception checks - Note that all exception checks will clobber EAX */
269 #define precheck() load_imm32(R_EDX, (pc-sh4_x86.block_start_pc-(sh4_x86.in_delay_slot?2:0))>>1)
271 #define check_priv( ) \
272 if( !sh4_x86.priv_checked ) { \
273 sh4_x86.priv_checked = TRUE;\
275 load_spreg( R_EAX, R_SR );\
276 AND_imm32_r32( SR_MD, R_EAX );\
277 if( sh4_x86.in_delay_slot ) {\
278 JE_exit( EXIT_SLOT_ILLEGAL );\
280 JE_exit( EXIT_ILLEGAL );\
285 static void check_priv_no_precheck()
287 if( !sh4_x86.priv_checked ) {
288 sh4_x86.priv_checked = TRUE;
289 load_spreg( R_EAX, R_SR );
290 AND_imm32_r32( SR_MD, R_EAX );
291 if( sh4_x86.in_delay_slot ) {
292 JE_exit( EXIT_SLOT_ILLEGAL );
294 JE_exit( EXIT_ILLEGAL );
299 #define check_fpuen( ) \
300 if( !sh4_x86.fpuen_checked ) {\
301 sh4_x86.fpuen_checked = TRUE;\
303 load_spreg( R_EAX, R_SR );\
304 AND_imm32_r32( SR_FD, R_EAX );\
305 if( sh4_x86.in_delay_slot ) {\
306 JNE_exit(EXIT_SLOT_FPU_DISABLED);\
308 JNE_exit(EXIT_FPU_DISABLED);\
312 static void check_fpuen_no_precheck()
314 if( !sh4_x86.fpuen_checked ) {
315 sh4_x86.fpuen_checked = TRUE;
316 load_spreg( R_EAX, R_SR );
317 AND_imm32_r32( SR_FD, R_EAX );
318 if( sh4_x86.in_delay_slot ) {
319 JNE_exit(EXIT_SLOT_FPU_DISABLED);
321 JNE_exit(EXIT_FPU_DISABLED);
327 static void check_ralign16( int x86reg )
329 TEST_imm32_r32( 0x00000001, x86reg );
330 JNE_exit(EXIT_DATA_ADDR_READ);
333 static void check_walign16( int x86reg )
335 TEST_imm32_r32( 0x00000001, x86reg );
336 JNE_exit(EXIT_DATA_ADDR_WRITE);
339 static void check_ralign32( int x86reg )
341 TEST_imm32_r32( 0x00000003, x86reg );
342 JNE_exit(EXIT_DATA_ADDR_READ);
344 static void check_walign32( int x86reg )
346 TEST_imm32_r32( 0x00000003, x86reg );
347 JNE_exit(EXIT_DATA_ADDR_WRITE);
351 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
352 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
353 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
354 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
355 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
356 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
357 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
359 #define SLOTILLEGAL() precheck(); JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
361 extern uint16_t *sh4_icache;
362 extern uint32_t sh4_icache_addr;
364 /****** Import appropriate calling conventions ******/
365 #if SH4_TRANSLATOR == TARGET_X86_64
366 #include "sh4/ia64abi.h"
367 #else /* SH4_TRANSLATOR == TARGET_X86 */
369 #include "sh4/ia32mac.h"
371 #include "sh4/ia32abi.h"
377 * Translate a single instruction. Delayed branches are handled specially
378 * by translating both branch and delayed instruction as a single unit (as
381 * @return true if the instruction marks the end of a basic block
384 uint32_t sh4_translate_instruction( sh4addr_t pc )
387 /* Read instruction */
388 uint32_t pageaddr = pc >> 12;
389 if( sh4_icache != NULL && pageaddr == sh4_icache_addr ) {
390 ir = sh4_icache[(pc&0xFFF)>>1];
392 sh4_icache = (uint16_t *)mem_get_page(pc);
393 if( ((uintptr_t)sh4_icache) < MAX_IO_REGIONS ) {
394 /* If someone's actually been so daft as to try to execute out of an IO
395 * region, fallback on the full-blown memory read
398 ir = sh4_read_word(pc);
400 sh4_icache_addr = pageaddr;
401 ir = sh4_icache[(pc&0xFFF)>>1];
405 switch( (ir&0xF000) >> 12 ) {
409 switch( (ir&0x80) >> 7 ) {
411 switch( (ir&0x70) >> 4 ) {
414 uint32_t Rn = ((ir>>8)&0xF);
416 call_func0(sh4_read_sr);
417 store_reg( R_EAX, Rn );
418 sh4_x86.tstate = TSTATE_NONE;
423 uint32_t Rn = ((ir>>8)&0xF);
424 load_spreg( R_EAX, R_GBR );
425 store_reg( R_EAX, Rn );
430 uint32_t Rn = ((ir>>8)&0xF);
432 load_spreg( R_EAX, R_VBR );
433 store_reg( R_EAX, Rn );
434 sh4_x86.tstate = TSTATE_NONE;
439 uint32_t Rn = ((ir>>8)&0xF);
441 load_spreg( R_EAX, R_SSR );
442 store_reg( R_EAX, Rn );
443 sh4_x86.tstate = TSTATE_NONE;
448 uint32_t Rn = ((ir>>8)&0xF);
450 load_spreg( R_EAX, R_SPC );
451 store_reg( R_EAX, Rn );
452 sh4_x86.tstate = TSTATE_NONE;
461 { /* STC Rm_BANK, Rn */
462 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
464 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
465 store_reg( R_EAX, Rn );
466 sh4_x86.tstate = TSTATE_NONE;
472 switch( (ir&0xF0) >> 4 ) {
475 uint32_t Rn = ((ir>>8)&0xF);
476 if( sh4_x86.in_delay_slot ) {
479 load_imm32( R_ECX, pc + 4 );
480 store_spreg( R_ECX, R_PR );
481 ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_ECX );
482 store_spreg( R_ECX, REG_OFFSET(pc) );
483 sh4_x86.in_delay_slot = TRUE;
484 sh4_x86.tstate = TSTATE_NONE;
485 sh4_translate_instruction( pc + 2 );
486 exit_block_pcset(pc+2);
487 sh4_x86.branch_taken = TRUE;
494 uint32_t Rn = ((ir>>8)&0xF);
495 if( sh4_x86.in_delay_slot ) {
498 load_reg( R_EAX, Rn );
499 ADD_imm32_r32( pc + 4, R_EAX );
500 store_spreg( R_EAX, REG_OFFSET(pc) );
501 sh4_x86.in_delay_slot = TRUE;
502 sh4_x86.tstate = TSTATE_NONE;
503 sh4_translate_instruction( pc + 2 );
504 exit_block_pcset(pc+2);
505 sh4_x86.branch_taken = TRUE;
512 uint32_t Rn = ((ir>>8)&0xF);
513 load_reg( R_EAX, Rn );
514 MOV_r32_r32( R_EAX, R_ECX );
515 AND_imm32_r32( 0xFC000000, R_EAX );
516 CMP_imm32_r32( 0xE0000000, R_EAX );
517 JNE_rel8(CALL_FUNC1_SIZE, end);
518 call_func1( sh4_flush_store_queue, R_ECX );
520 sh4_x86.tstate = TSTATE_NONE;
525 uint32_t Rn = ((ir>>8)&0xF);
530 uint32_t Rn = ((ir>>8)&0xF);
535 uint32_t Rn = ((ir>>8)&0xF);
539 { /* MOVCA.L R0, @Rn */
540 uint32_t Rn = ((ir>>8)&0xF);
541 load_reg( R_EAX, 0 );
542 load_reg( R_ECX, Rn );
544 check_walign32( R_ECX );
545 MEM_WRITE_LONG( R_ECX, R_EAX );
546 sh4_x86.tstate = TSTATE_NONE;
555 { /* MOV.B Rm, @(R0, Rn) */
556 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
557 load_reg( R_EAX, 0 );
558 load_reg( R_ECX, Rn );
559 ADD_r32_r32( R_EAX, R_ECX );
560 load_reg( R_EAX, Rm );
561 MEM_WRITE_BYTE( R_ECX, R_EAX );
562 sh4_x86.tstate = TSTATE_NONE;
566 { /* MOV.W Rm, @(R0, Rn) */
567 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
568 load_reg( R_EAX, 0 );
569 load_reg( R_ECX, Rn );
570 ADD_r32_r32( R_EAX, R_ECX );
572 check_walign16( R_ECX );
573 load_reg( R_EAX, Rm );
574 MEM_WRITE_WORD( R_ECX, R_EAX );
575 sh4_x86.tstate = TSTATE_NONE;
579 { /* MOV.L Rm, @(R0, Rn) */
580 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
581 load_reg( R_EAX, 0 );
582 load_reg( R_ECX, Rn );
583 ADD_r32_r32( R_EAX, R_ECX );
585 check_walign32( R_ECX );
586 load_reg( R_EAX, Rm );
587 MEM_WRITE_LONG( R_ECX, R_EAX );
588 sh4_x86.tstate = TSTATE_NONE;
593 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
594 load_reg( R_EAX, Rm );
595 load_reg( R_ECX, Rn );
597 store_spreg( R_EAX, R_MACL );
598 sh4_x86.tstate = TSTATE_NONE;
602 switch( (ir&0xFF0) >> 4 ) {
607 sh4_x86.tstate = TSTATE_C;
614 sh4_x86.tstate = TSTATE_C;
619 XOR_r32_r32(R_EAX, R_EAX);
620 store_spreg( R_EAX, R_MACL );
621 store_spreg( R_EAX, R_MACH );
622 sh4_x86.tstate = TSTATE_NONE;
633 sh4_x86.tstate = TSTATE_C;
640 sh4_x86.tstate = TSTATE_C;
649 switch( (ir&0xF0) >> 4 ) {
652 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
657 XOR_r32_r32( R_EAX, R_EAX );
658 store_spreg( R_EAX, R_Q );
659 store_spreg( R_EAX, R_M );
660 store_spreg( R_EAX, R_T );
661 sh4_x86.tstate = TSTATE_C; // works for DIV1
666 uint32_t Rn = ((ir>>8)&0xF);
667 load_spreg( R_EAX, R_T );
668 store_reg( R_EAX, Rn );
677 switch( (ir&0xF0) >> 4 ) {
680 uint32_t Rn = ((ir>>8)&0xF);
681 load_spreg( R_EAX, R_MACH );
682 store_reg( R_EAX, Rn );
687 uint32_t Rn = ((ir>>8)&0xF);
688 load_spreg( R_EAX, R_MACL );
689 store_reg( R_EAX, Rn );
694 uint32_t Rn = ((ir>>8)&0xF);
695 load_spreg( R_EAX, R_PR );
696 store_reg( R_EAX, Rn );
701 uint32_t Rn = ((ir>>8)&0xF);
703 load_spreg( R_EAX, R_SGR );
704 store_reg( R_EAX, Rn );
705 sh4_x86.tstate = TSTATE_NONE;
710 uint32_t Rn = ((ir>>8)&0xF);
711 load_spreg( R_EAX, R_FPUL );
712 store_reg( R_EAX, Rn );
716 { /* STS FPSCR, Rn */
717 uint32_t Rn = ((ir>>8)&0xF);
718 load_spreg( R_EAX, R_FPSCR );
719 store_reg( R_EAX, Rn );
724 uint32_t Rn = ((ir>>8)&0xF);
726 load_spreg( R_EAX, R_DBR );
727 store_reg( R_EAX, Rn );
728 sh4_x86.tstate = TSTATE_NONE;
737 switch( (ir&0xFF0) >> 4 ) {
740 if( sh4_x86.in_delay_slot ) {
743 load_spreg( R_ECX, R_PR );
744 store_spreg( R_ECX, REG_OFFSET(pc) );
745 sh4_x86.in_delay_slot = TRUE;
746 sh4_translate_instruction(pc+2);
747 exit_block_pcset(pc+2);
748 sh4_x86.branch_taken = TRUE;
756 call_func0( sh4_sleep );
757 sh4_x86.tstate = TSTATE_NONE;
758 sh4_x86.in_delay_slot = FALSE;
764 if( sh4_x86.in_delay_slot ) {
768 load_spreg( R_ECX, R_SPC );
769 store_spreg( R_ECX, REG_OFFSET(pc) );
770 load_spreg( R_EAX, R_SSR );
771 call_func1( sh4_write_sr, R_EAX );
772 sh4_x86.in_delay_slot = TRUE;
773 sh4_x86.priv_checked = FALSE;
774 sh4_x86.fpuen_checked = FALSE;
775 sh4_x86.tstate = TSTATE_NONE;
776 sh4_translate_instruction(pc+2);
777 exit_block_pcset(pc+2);
778 sh4_x86.branch_taken = TRUE;
789 { /* MOV.B @(R0, Rm), Rn */
790 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
791 load_reg( R_EAX, 0 );
792 load_reg( R_ECX, Rm );
793 ADD_r32_r32( R_EAX, R_ECX );
794 MEM_READ_BYTE( R_ECX, R_EAX );
795 store_reg( R_EAX, Rn );
796 sh4_x86.tstate = TSTATE_NONE;
800 { /* MOV.W @(R0, Rm), Rn */
801 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
802 load_reg( R_EAX, 0 );
803 load_reg( R_ECX, Rm );
804 ADD_r32_r32( R_EAX, R_ECX );
806 check_ralign16( R_ECX );
807 MEM_READ_WORD( R_ECX, R_EAX );
808 store_reg( R_EAX, Rn );
809 sh4_x86.tstate = TSTATE_NONE;
813 { /* MOV.L @(R0, Rm), Rn */
814 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
815 load_reg( R_EAX, 0 );
816 load_reg( R_ECX, Rm );
817 ADD_r32_r32( R_EAX, R_ECX );
819 check_ralign32( R_ECX );
820 MEM_READ_LONG( R_ECX, R_EAX );
821 store_reg( R_EAX, Rn );
822 sh4_x86.tstate = TSTATE_NONE;
826 { /* MAC.L @Rm+, @Rn+ */
827 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
828 load_reg( R_ECX, Rm );
830 check_ralign32( R_ECX );
831 load_reg( R_ECX, Rn );
832 check_ralign32( R_ECX );
833 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
834 MEM_READ_LONG( R_ECX, R_EAX );
836 load_reg( R_ECX, Rm );
837 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
838 MEM_READ_LONG( R_ECX, R_EAX );
841 ADD_r32_sh4r( R_EAX, R_MACL );
842 ADC_r32_sh4r( R_EDX, R_MACH );
844 load_spreg( R_ECX, R_S );
845 TEST_r32_r32(R_ECX, R_ECX);
846 JE_rel8( CALL_FUNC0_SIZE, nosat );
847 call_func0( signsat48 );
849 sh4_x86.tstate = TSTATE_NONE;
858 { /* MOV.L Rm, @(disp, Rn) */
859 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
860 load_reg( R_ECX, Rn );
861 load_reg( R_EAX, Rm );
862 ADD_imm32_r32( disp, R_ECX );
864 check_walign32( R_ECX );
865 MEM_WRITE_LONG( R_ECX, R_EAX );
866 sh4_x86.tstate = TSTATE_NONE;
872 { /* MOV.B Rm, @Rn */
873 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
874 load_reg( R_EAX, Rm );
875 load_reg( R_ECX, Rn );
876 MEM_WRITE_BYTE( R_ECX, R_EAX );
877 sh4_x86.tstate = TSTATE_NONE;
881 { /* MOV.W Rm, @Rn */
882 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
883 load_reg( R_ECX, Rn );
885 check_walign16( R_ECX );
886 load_reg( R_EAX, Rm );
887 MEM_WRITE_WORD( R_ECX, R_EAX );
888 sh4_x86.tstate = TSTATE_NONE;
892 { /* MOV.L Rm, @Rn */
893 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
894 load_reg( R_EAX, Rm );
895 load_reg( R_ECX, Rn );
897 check_walign32(R_ECX);
898 MEM_WRITE_LONG( R_ECX, R_EAX );
899 sh4_x86.tstate = TSTATE_NONE;
903 { /* MOV.B Rm, @-Rn */
904 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
905 load_reg( R_EAX, Rm );
906 load_reg( R_ECX, Rn );
907 ADD_imm8s_r32( -1, R_ECX );
908 store_reg( R_ECX, Rn );
909 MEM_WRITE_BYTE( R_ECX, R_EAX );
910 sh4_x86.tstate = TSTATE_NONE;
914 { /* MOV.W Rm, @-Rn */
915 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
916 load_reg( R_ECX, Rn );
918 check_walign16( R_ECX );
919 load_reg( R_EAX, Rm );
920 ADD_imm8s_r32( -2, R_ECX );
921 store_reg( R_ECX, Rn );
922 MEM_WRITE_WORD( R_ECX, R_EAX );
923 sh4_x86.tstate = TSTATE_NONE;
927 { /* MOV.L Rm, @-Rn */
928 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
929 load_reg( R_EAX, Rm );
930 load_reg( R_ECX, Rn );
932 check_walign32( R_ECX );
933 ADD_imm8s_r32( -4, R_ECX );
934 store_reg( R_ECX, Rn );
935 MEM_WRITE_LONG( R_ECX, R_EAX );
936 sh4_x86.tstate = TSTATE_NONE;
941 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
942 load_reg( R_EAX, Rm );
943 load_reg( R_ECX, Rn );
944 SHR_imm8_r32( 31, R_EAX );
945 SHR_imm8_r32( 31, R_ECX );
946 store_spreg( R_EAX, R_M );
947 store_spreg( R_ECX, R_Q );
948 CMP_r32_r32( R_EAX, R_ECX );
950 sh4_x86.tstate = TSTATE_NE;
955 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
956 load_reg( R_EAX, Rm );
957 load_reg( R_ECX, Rn );
958 TEST_r32_r32( R_EAX, R_ECX );
960 sh4_x86.tstate = TSTATE_E;
965 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
966 load_reg( R_EAX, Rm );
967 load_reg( R_ECX, Rn );
968 AND_r32_r32( R_EAX, R_ECX );
969 store_reg( R_ECX, Rn );
970 sh4_x86.tstate = TSTATE_NONE;
975 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
976 load_reg( R_EAX, Rm );
977 load_reg( R_ECX, Rn );
978 XOR_r32_r32( R_EAX, R_ECX );
979 store_reg( R_ECX, Rn );
980 sh4_x86.tstate = TSTATE_NONE;
985 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
986 load_reg( R_EAX, Rm );
987 load_reg( R_ECX, Rn );
988 OR_r32_r32( R_EAX, R_ECX );
989 store_reg( R_ECX, Rn );
990 sh4_x86.tstate = TSTATE_NONE;
994 { /* CMP/STR Rm, Rn */
995 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
996 load_reg( R_EAX, Rm );
997 load_reg( R_ECX, Rn );
998 XOR_r32_r32( R_ECX, R_EAX );
999 TEST_r8_r8( R_AL, R_AL );
1000 JE_rel8(13, target1);
1001 TEST_r8_r8( R_AH, R_AH ); // 2
1002 JE_rel8(9, target2);
1003 SHR_imm8_r32( 16, R_EAX ); // 3
1004 TEST_r8_r8( R_AL, R_AL ); // 2
1005 JE_rel8(2, target3);
1006 TEST_r8_r8( R_AH, R_AH ); // 2
1007 JMP_TARGET(target1);
1008 JMP_TARGET(target2);
1009 JMP_TARGET(target3);
1011 sh4_x86.tstate = TSTATE_E;
1015 { /* XTRCT Rm, Rn */
1016 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1017 load_reg( R_EAX, Rm );
1018 load_reg( R_ECX, Rn );
1019 SHL_imm8_r32( 16, R_EAX );
1020 SHR_imm8_r32( 16, R_ECX );
1021 OR_r32_r32( R_EAX, R_ECX );
1022 store_reg( R_ECX, Rn );
1023 sh4_x86.tstate = TSTATE_NONE;
1027 { /* MULU.W Rm, Rn */
1028 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1029 load_reg16u( R_EAX, Rm );
1030 load_reg16u( R_ECX, Rn );
1032 store_spreg( R_EAX, R_MACL );
1033 sh4_x86.tstate = TSTATE_NONE;
1037 { /* MULS.W Rm, Rn */
1038 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1039 load_reg16s( R_EAX, Rm );
1040 load_reg16s( R_ECX, Rn );
1042 store_spreg( R_EAX, R_MACL );
1043 sh4_x86.tstate = TSTATE_NONE;
1054 { /* CMP/EQ Rm, Rn */
1055 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1056 load_reg( R_EAX, Rm );
1057 load_reg( R_ECX, Rn );
1058 CMP_r32_r32( R_EAX, R_ECX );
1060 sh4_x86.tstate = TSTATE_E;
1064 { /* CMP/HS Rm, Rn */
1065 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1066 load_reg( R_EAX, Rm );
1067 load_reg( R_ECX, Rn );
1068 CMP_r32_r32( R_EAX, R_ECX );
1070 sh4_x86.tstate = TSTATE_AE;
1074 { /* CMP/GE Rm, Rn */
1075 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1076 load_reg( R_EAX, Rm );
1077 load_reg( R_ECX, Rn );
1078 CMP_r32_r32( R_EAX, R_ECX );
1080 sh4_x86.tstate = TSTATE_GE;
1085 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1086 load_spreg( R_ECX, R_M );
1087 load_reg( R_EAX, Rn );
1088 if( sh4_x86.tstate != TSTATE_C ) {
1092 SETC_r8( R_DL ); // Q'
1093 CMP_sh4r_r32( R_Q, R_ECX );
1094 JE_rel8(5, mqequal);
1095 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
1097 JMP_TARGET(mqequal);
1098 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
1100 store_reg( R_EAX, Rn ); // Done with Rn now
1101 SETC_r8(R_AL); // tmp1
1102 XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
1103 XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
1104 store_spreg( R_ECX, R_Q );
1105 XOR_imm8s_r32( 1, R_AL ); // T = !Q'
1106 MOVZX_r8_r32( R_AL, R_EAX );
1107 store_spreg( R_EAX, R_T );
1108 sh4_x86.tstate = TSTATE_NONE;
1112 { /* DMULU.L Rm, Rn */
1113 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1114 load_reg( R_EAX, Rm );
1115 load_reg( R_ECX, Rn );
1117 store_spreg( R_EDX, R_MACH );
1118 store_spreg( R_EAX, R_MACL );
1119 sh4_x86.tstate = TSTATE_NONE;
1123 { /* CMP/HI Rm, Rn */
1124 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1125 load_reg( R_EAX, Rm );
1126 load_reg( R_ECX, Rn );
1127 CMP_r32_r32( R_EAX, R_ECX );
1129 sh4_x86.tstate = TSTATE_A;
1133 { /* CMP/GT Rm, Rn */
1134 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1135 load_reg( R_EAX, Rm );
1136 load_reg( R_ECX, Rn );
1137 CMP_r32_r32( R_EAX, R_ECX );
1139 sh4_x86.tstate = TSTATE_G;
1144 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1145 load_reg( R_EAX, Rm );
1146 load_reg( R_ECX, Rn );
1147 SUB_r32_r32( R_EAX, R_ECX );
1148 store_reg( R_ECX, Rn );
1149 sh4_x86.tstate = TSTATE_NONE;
1154 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1155 load_reg( R_EAX, Rm );
1156 load_reg( R_ECX, Rn );
1157 if( sh4_x86.tstate != TSTATE_C ) {
1160 SBB_r32_r32( R_EAX, R_ECX );
1161 store_reg( R_ECX, Rn );
1163 sh4_x86.tstate = TSTATE_C;
1168 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1169 load_reg( R_EAX, Rm );
1170 load_reg( R_ECX, Rn );
1171 SUB_r32_r32( R_EAX, R_ECX );
1172 store_reg( R_ECX, Rn );
1174 sh4_x86.tstate = TSTATE_O;
1179 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1180 load_reg( R_EAX, Rm );
1181 load_reg( R_ECX, Rn );
1182 ADD_r32_r32( R_EAX, R_ECX );
1183 store_reg( R_ECX, Rn );
1184 sh4_x86.tstate = TSTATE_NONE;
1188 { /* DMULS.L Rm, Rn */
1189 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1190 load_reg( R_EAX, Rm );
1191 load_reg( R_ECX, Rn );
1193 store_spreg( R_EDX, R_MACH );
1194 store_spreg( R_EAX, R_MACL );
1195 sh4_x86.tstate = TSTATE_NONE;
1200 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1201 if( sh4_x86.tstate != TSTATE_C ) {
1204 load_reg( R_EAX, Rm );
1205 load_reg( R_ECX, Rn );
1206 ADC_r32_r32( R_EAX, R_ECX );
1207 store_reg( R_ECX, Rn );
1209 sh4_x86.tstate = TSTATE_C;
1214 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1215 load_reg( R_EAX, Rm );
1216 load_reg( R_ECX, Rn );
1217 ADD_r32_r32( R_EAX, R_ECX );
1218 store_reg( R_ECX, Rn );
1220 sh4_x86.tstate = TSTATE_O;
1231 switch( (ir&0xF0) >> 4 ) {
1234 uint32_t Rn = ((ir>>8)&0xF);
1235 load_reg( R_EAX, Rn );
1238 store_reg( R_EAX, Rn );
1239 sh4_x86.tstate = TSTATE_C;
1244 uint32_t Rn = ((ir>>8)&0xF);
1245 load_reg( R_EAX, Rn );
1246 ADD_imm8s_r32( -1, R_EAX );
1247 store_reg( R_EAX, Rn );
1249 sh4_x86.tstate = TSTATE_E;
1254 uint32_t Rn = ((ir>>8)&0xF);
1255 load_reg( R_EAX, Rn );
1258 store_reg( R_EAX, Rn );
1259 sh4_x86.tstate = TSTATE_C;
1268 switch( (ir&0xF0) >> 4 ) {
1271 uint32_t Rn = ((ir>>8)&0xF);
1272 load_reg( R_EAX, Rn );
1275 store_reg( R_EAX, Rn );
1276 sh4_x86.tstate = TSTATE_C;
1281 uint32_t Rn = ((ir>>8)&0xF);
1282 load_reg( R_EAX, Rn );
1283 CMP_imm8s_r32( 0, R_EAX );
1285 sh4_x86.tstate = TSTATE_GE;
1290 uint32_t Rn = ((ir>>8)&0xF);
1291 load_reg( R_EAX, Rn );
1294 store_reg( R_EAX, Rn );
1295 sh4_x86.tstate = TSTATE_C;
1304 switch( (ir&0xF0) >> 4 ) {
1306 { /* STS.L MACH, @-Rn */
1307 uint32_t Rn = ((ir>>8)&0xF);
1308 load_reg( R_ECX, Rn );
1310 check_walign32( R_ECX );
1311 ADD_imm8s_r32( -4, R_ECX );
1312 store_reg( R_ECX, Rn );
1313 load_spreg( R_EAX, R_MACH );
1314 MEM_WRITE_LONG( R_ECX, R_EAX );
1315 sh4_x86.tstate = TSTATE_NONE;
1319 { /* STS.L MACL, @-Rn */
1320 uint32_t Rn = ((ir>>8)&0xF);
1321 load_reg( R_ECX, Rn );
1323 check_walign32( R_ECX );
1324 ADD_imm8s_r32( -4, R_ECX );
1325 store_reg( R_ECX, Rn );
1326 load_spreg( R_EAX, R_MACL );
1327 MEM_WRITE_LONG( R_ECX, R_EAX );
1328 sh4_x86.tstate = TSTATE_NONE;
1332 { /* STS.L PR, @-Rn */
1333 uint32_t Rn = ((ir>>8)&0xF);
1334 load_reg( R_ECX, Rn );
1336 check_walign32( R_ECX );
1337 ADD_imm8s_r32( -4, R_ECX );
1338 store_reg( R_ECX, Rn );
1339 load_spreg( R_EAX, R_PR );
1340 MEM_WRITE_LONG( R_ECX, R_EAX );
1341 sh4_x86.tstate = TSTATE_NONE;
1345 { /* STC.L SGR, @-Rn */
1346 uint32_t Rn = ((ir>>8)&0xF);
1348 check_priv_no_precheck();
1349 load_reg( R_ECX, Rn );
1350 check_walign32( R_ECX );
1351 ADD_imm8s_r32( -4, R_ECX );
1352 store_reg( R_ECX, Rn );
1353 load_spreg( R_EAX, R_SGR );
1354 MEM_WRITE_LONG( R_ECX, R_EAX );
1355 sh4_x86.tstate = TSTATE_NONE;
1359 { /* STS.L FPUL, @-Rn */
1360 uint32_t Rn = ((ir>>8)&0xF);
1361 load_reg( R_ECX, Rn );
1363 check_walign32( R_ECX );
1364 ADD_imm8s_r32( -4, R_ECX );
1365 store_reg( R_ECX, Rn );
1366 load_spreg( R_EAX, R_FPUL );
1367 MEM_WRITE_LONG( R_ECX, R_EAX );
1368 sh4_x86.tstate = TSTATE_NONE;
1372 { /* STS.L FPSCR, @-Rn */
1373 uint32_t Rn = ((ir>>8)&0xF);
1374 load_reg( R_ECX, Rn );
1376 check_walign32( R_ECX );
1377 ADD_imm8s_r32( -4, R_ECX );
1378 store_reg( R_ECX, Rn );
1379 load_spreg( R_EAX, R_FPSCR );
1380 MEM_WRITE_LONG( R_ECX, R_EAX );
1381 sh4_x86.tstate = TSTATE_NONE;
1385 { /* STC.L DBR, @-Rn */
1386 uint32_t Rn = ((ir>>8)&0xF);
1388 check_priv_no_precheck();
1389 load_reg( R_ECX, Rn );
1390 check_walign32( R_ECX );
1391 ADD_imm8s_r32( -4, R_ECX );
1392 store_reg( R_ECX, Rn );
1393 load_spreg( R_EAX, R_DBR );
1394 MEM_WRITE_LONG( R_ECX, R_EAX );
1395 sh4_x86.tstate = TSTATE_NONE;
1404 switch( (ir&0x80) >> 7 ) {
1406 switch( (ir&0x70) >> 4 ) {
1408 { /* STC.L SR, @-Rn */
1409 uint32_t Rn = ((ir>>8)&0xF);
1411 check_priv_no_precheck();
1412 call_func0( sh4_read_sr );
1413 load_reg( R_ECX, Rn );
1414 check_walign32( R_ECX );
1415 ADD_imm8s_r32( -4, R_ECX );
1416 store_reg( R_ECX, Rn );
1417 MEM_WRITE_LONG( R_ECX, R_EAX );
1418 sh4_x86.tstate = TSTATE_NONE;
1422 { /* STC.L GBR, @-Rn */
1423 uint32_t Rn = ((ir>>8)&0xF);
1424 load_reg( R_ECX, Rn );
1426 check_walign32( R_ECX );
1427 ADD_imm8s_r32( -4, R_ECX );
1428 store_reg( R_ECX, Rn );
1429 load_spreg( R_EAX, R_GBR );
1430 MEM_WRITE_LONG( R_ECX, R_EAX );
1431 sh4_x86.tstate = TSTATE_NONE;
1435 { /* STC.L VBR, @-Rn */
1436 uint32_t Rn = ((ir>>8)&0xF);
1438 check_priv_no_precheck();
1439 load_reg( R_ECX, Rn );
1440 check_walign32( R_ECX );
1441 ADD_imm8s_r32( -4, R_ECX );
1442 store_reg( R_ECX, Rn );
1443 load_spreg( R_EAX, R_VBR );
1444 MEM_WRITE_LONG( R_ECX, R_EAX );
1445 sh4_x86.tstate = TSTATE_NONE;
1449 { /* STC.L SSR, @-Rn */
1450 uint32_t Rn = ((ir>>8)&0xF);
1452 check_priv_no_precheck();
1453 load_reg( R_ECX, Rn );
1454 check_walign32( R_ECX );
1455 ADD_imm8s_r32( -4, R_ECX );
1456 store_reg( R_ECX, Rn );
1457 load_spreg( R_EAX, R_SSR );
1458 MEM_WRITE_LONG( R_ECX, R_EAX );
1459 sh4_x86.tstate = TSTATE_NONE;
1463 { /* STC.L SPC, @-Rn */
1464 uint32_t Rn = ((ir>>8)&0xF);
1466 check_priv_no_precheck();
1467 load_reg( R_ECX, Rn );
1468 check_walign32( R_ECX );
1469 ADD_imm8s_r32( -4, R_ECX );
1470 store_reg( R_ECX, Rn );
1471 load_spreg( R_EAX, R_SPC );
1472 MEM_WRITE_LONG( R_ECX, R_EAX );
1473 sh4_x86.tstate = TSTATE_NONE;
1482 { /* STC.L Rm_BANK, @-Rn */
1483 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
1485 check_priv_no_precheck();
1486 load_reg( R_ECX, Rn );
1487 check_walign32( R_ECX );
1488 ADD_imm8s_r32( -4, R_ECX );
1489 store_reg( R_ECX, Rn );
1490 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
1491 MEM_WRITE_LONG( R_ECX, R_EAX );
1492 sh4_x86.tstate = TSTATE_NONE;
1498 switch( (ir&0xF0) >> 4 ) {
1501 uint32_t Rn = ((ir>>8)&0xF);
1502 load_reg( R_EAX, Rn );
1504 store_reg( R_EAX, Rn );
1506 sh4_x86.tstate = TSTATE_C;
1511 uint32_t Rn = ((ir>>8)&0xF);
1512 load_reg( R_EAX, Rn );
1513 if( sh4_x86.tstate != TSTATE_C ) {
1517 store_reg( R_EAX, Rn );
1519 sh4_x86.tstate = TSTATE_C;
1528 switch( (ir&0xF0) >> 4 ) {
1531 uint32_t Rn = ((ir>>8)&0xF);
1532 load_reg( R_EAX, Rn );
1534 store_reg( R_EAX, Rn );
1536 sh4_x86.tstate = TSTATE_C;
1541 uint32_t Rn = ((ir>>8)&0xF);
1542 load_reg( R_EAX, Rn );
1543 CMP_imm8s_r32( 0, R_EAX );
1545 sh4_x86.tstate = TSTATE_G;
1550 uint32_t Rn = ((ir>>8)&0xF);
1551 load_reg( R_EAX, Rn );
1552 if( sh4_x86.tstate != TSTATE_C ) {
1556 store_reg( R_EAX, Rn );
1558 sh4_x86.tstate = TSTATE_C;
1567 switch( (ir&0xF0) >> 4 ) {
1569 { /* LDS.L @Rm+, MACH */
1570 uint32_t Rm = ((ir>>8)&0xF);
1571 load_reg( R_EAX, Rm );
1573 check_ralign32( R_EAX );
1574 MOV_r32_r32( R_EAX, R_ECX );
1575 ADD_imm8s_r32( 4, R_EAX );
1576 store_reg( R_EAX, Rm );
1577 MEM_READ_LONG( R_ECX, R_EAX );
1578 store_spreg( R_EAX, R_MACH );
1579 sh4_x86.tstate = TSTATE_NONE;
1583 { /* LDS.L @Rm+, MACL */
1584 uint32_t Rm = ((ir>>8)&0xF);
1585 load_reg( R_EAX, Rm );
1587 check_ralign32( R_EAX );
1588 MOV_r32_r32( R_EAX, R_ECX );
1589 ADD_imm8s_r32( 4, R_EAX );
1590 store_reg( R_EAX, Rm );
1591 MEM_READ_LONG( R_ECX, R_EAX );
1592 store_spreg( R_EAX, R_MACL );
1593 sh4_x86.tstate = TSTATE_NONE;
1597 { /* LDS.L @Rm+, PR */
1598 uint32_t Rm = ((ir>>8)&0xF);
1599 load_reg( R_EAX, Rm );
1601 check_ralign32( R_EAX );
1602 MOV_r32_r32( R_EAX, R_ECX );
1603 ADD_imm8s_r32( 4, R_EAX );
1604 store_reg( R_EAX, Rm );
1605 MEM_READ_LONG( R_ECX, R_EAX );
1606 store_spreg( R_EAX, R_PR );
1607 sh4_x86.tstate = TSTATE_NONE;
1611 { /* LDC.L @Rm+, SGR */
1612 uint32_t Rm = ((ir>>8)&0xF);
1614 check_priv_no_precheck();
1615 load_reg( R_EAX, Rm );
1616 check_ralign32( R_EAX );
1617 MOV_r32_r32( R_EAX, R_ECX );
1618 ADD_imm8s_r32( 4, R_EAX );
1619 store_reg( R_EAX, Rm );
1620 MEM_READ_LONG( R_ECX, R_EAX );
1621 store_spreg( R_EAX, R_SGR );
1622 sh4_x86.tstate = TSTATE_NONE;
1626 { /* LDS.L @Rm+, FPUL */
1627 uint32_t Rm = ((ir>>8)&0xF);
1628 load_reg( R_EAX, Rm );
1630 check_ralign32( R_EAX );
1631 MOV_r32_r32( R_EAX, R_ECX );
1632 ADD_imm8s_r32( 4, R_EAX );
1633 store_reg( R_EAX, Rm );
1634 MEM_READ_LONG( R_ECX, R_EAX );
1635 store_spreg( R_EAX, R_FPUL );
1636 sh4_x86.tstate = TSTATE_NONE;
1640 { /* LDS.L @Rm+, FPSCR */
1641 uint32_t Rm = ((ir>>8)&0xF);
1642 load_reg( R_EAX, Rm );
1644 check_ralign32( R_EAX );
1645 MOV_r32_r32( R_EAX, R_ECX );
1646 ADD_imm8s_r32( 4, R_EAX );
1647 store_reg( R_EAX, Rm );
1648 MEM_READ_LONG( R_ECX, R_EAX );
1649 store_spreg( R_EAX, R_FPSCR );
1650 update_fr_bank( R_EAX );
1651 sh4_x86.tstate = TSTATE_NONE;
1655 { /* LDC.L @Rm+, DBR */
1656 uint32_t Rm = ((ir>>8)&0xF);
1658 check_priv_no_precheck();
1659 load_reg( R_EAX, Rm );
1660 check_ralign32( R_EAX );
1661 MOV_r32_r32( R_EAX, R_ECX );
1662 ADD_imm8s_r32( 4, R_EAX );
1663 store_reg( R_EAX, Rm );
1664 MEM_READ_LONG( R_ECX, R_EAX );
1665 store_spreg( R_EAX, R_DBR );
1666 sh4_x86.tstate = TSTATE_NONE;
1675 switch( (ir&0x80) >> 7 ) {
1677 switch( (ir&0x70) >> 4 ) {
1679 { /* LDC.L @Rm+, SR */
1680 uint32_t Rm = ((ir>>8)&0xF);
1681 if( sh4_x86.in_delay_slot ) {
1685 check_priv_no_precheck();
1686 load_reg( R_EAX, Rm );
1687 check_ralign32( R_EAX );
1688 MOV_r32_r32( R_EAX, R_ECX );
1689 ADD_imm8s_r32( 4, R_EAX );
1690 store_reg( R_EAX, Rm );
1691 MEM_READ_LONG( R_ECX, R_EAX );
1692 call_func1( sh4_write_sr, R_EAX );
1693 sh4_x86.priv_checked = FALSE;
1694 sh4_x86.fpuen_checked = FALSE;
1695 sh4_x86.tstate = TSTATE_NONE;
1700 { /* LDC.L @Rm+, GBR */
1701 uint32_t Rm = ((ir>>8)&0xF);
1702 load_reg( R_EAX, Rm );
1704 check_ralign32( R_EAX );
1705 MOV_r32_r32( R_EAX, R_ECX );
1706 ADD_imm8s_r32( 4, R_EAX );
1707 store_reg( R_EAX, Rm );
1708 MEM_READ_LONG( R_ECX, R_EAX );
1709 store_spreg( R_EAX, R_GBR );
1710 sh4_x86.tstate = TSTATE_NONE;
1714 { /* LDC.L @Rm+, VBR */
1715 uint32_t Rm = ((ir>>8)&0xF);
1717 check_priv_no_precheck();
1718 load_reg( R_EAX, Rm );
1719 check_ralign32( R_EAX );
1720 MOV_r32_r32( R_EAX, R_ECX );
1721 ADD_imm8s_r32( 4, R_EAX );
1722 store_reg( R_EAX, Rm );
1723 MEM_READ_LONG( R_ECX, R_EAX );
1724 store_spreg( R_EAX, R_VBR );
1725 sh4_x86.tstate = TSTATE_NONE;
1729 { /* LDC.L @Rm+, SSR */
1730 uint32_t Rm = ((ir>>8)&0xF);
1732 check_priv_no_precheck();
1733 load_reg( R_EAX, Rm );
1734 check_ralign32( R_EAX );
1735 MOV_r32_r32( R_EAX, R_ECX );
1736 ADD_imm8s_r32( 4, R_EAX );
1737 store_reg( R_EAX, Rm );
1738 MEM_READ_LONG( R_ECX, R_EAX );
1739 store_spreg( R_EAX, R_SSR );
1740 sh4_x86.tstate = TSTATE_NONE;
1744 { /* LDC.L @Rm+, SPC */
1745 uint32_t Rm = ((ir>>8)&0xF);
1747 check_priv_no_precheck();
1748 load_reg( R_EAX, Rm );
1749 check_ralign32( R_EAX );
1750 MOV_r32_r32( R_EAX, R_ECX );
1751 ADD_imm8s_r32( 4, R_EAX );
1752 store_reg( R_EAX, Rm );
1753 MEM_READ_LONG( R_ECX, R_EAX );
1754 store_spreg( R_EAX, R_SPC );
1755 sh4_x86.tstate = TSTATE_NONE;
1764 { /* LDC.L @Rm+, Rn_BANK */
1765 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1767 check_priv_no_precheck();
1768 load_reg( R_EAX, Rm );
1769 check_ralign32( R_EAX );
1770 MOV_r32_r32( R_EAX, R_ECX );
1771 ADD_imm8s_r32( 4, R_EAX );
1772 store_reg( R_EAX, Rm );
1773 MEM_READ_LONG( R_ECX, R_EAX );
1774 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1775 sh4_x86.tstate = TSTATE_NONE;
1781 switch( (ir&0xF0) >> 4 ) {
1784 uint32_t Rn = ((ir>>8)&0xF);
1785 load_reg( R_EAX, Rn );
1786 SHL_imm8_r32( 2, R_EAX );
1787 store_reg( R_EAX, Rn );
1788 sh4_x86.tstate = TSTATE_NONE;
1793 uint32_t Rn = ((ir>>8)&0xF);
1794 load_reg( R_EAX, Rn );
1795 SHL_imm8_r32( 8, R_EAX );
1796 store_reg( R_EAX, Rn );
1797 sh4_x86.tstate = TSTATE_NONE;
1802 uint32_t Rn = ((ir>>8)&0xF);
1803 load_reg( R_EAX, Rn );
1804 SHL_imm8_r32( 16, R_EAX );
1805 store_reg( R_EAX, Rn );
1806 sh4_x86.tstate = TSTATE_NONE;
1815 switch( (ir&0xF0) >> 4 ) {
1818 uint32_t Rn = ((ir>>8)&0xF);
1819 load_reg( R_EAX, Rn );
1820 SHR_imm8_r32( 2, R_EAX );
1821 store_reg( R_EAX, Rn );
1822 sh4_x86.tstate = TSTATE_NONE;
1827 uint32_t Rn = ((ir>>8)&0xF);
1828 load_reg( R_EAX, Rn );
1829 SHR_imm8_r32( 8, R_EAX );
1830 store_reg( R_EAX, Rn );
1831 sh4_x86.tstate = TSTATE_NONE;
1836 uint32_t Rn = ((ir>>8)&0xF);
1837 load_reg( R_EAX, Rn );
1838 SHR_imm8_r32( 16, R_EAX );
1839 store_reg( R_EAX, Rn );
1840 sh4_x86.tstate = TSTATE_NONE;
1849 switch( (ir&0xF0) >> 4 ) {
1851 { /* LDS Rm, MACH */
1852 uint32_t Rm = ((ir>>8)&0xF);
1853 load_reg( R_EAX, Rm );
1854 store_spreg( R_EAX, R_MACH );
1858 { /* LDS Rm, MACL */
1859 uint32_t Rm = ((ir>>8)&0xF);
1860 load_reg( R_EAX, Rm );
1861 store_spreg( R_EAX, R_MACL );
1866 uint32_t Rm = ((ir>>8)&0xF);
1867 load_reg( R_EAX, Rm );
1868 store_spreg( R_EAX, R_PR );
1873 uint32_t Rm = ((ir>>8)&0xF);
1875 load_reg( R_EAX, Rm );
1876 store_spreg( R_EAX, R_SGR );
1877 sh4_x86.tstate = TSTATE_NONE;
1881 { /* LDS Rm, FPUL */
1882 uint32_t Rm = ((ir>>8)&0xF);
1883 load_reg( R_EAX, Rm );
1884 store_spreg( R_EAX, R_FPUL );
1888 { /* LDS Rm, FPSCR */
1889 uint32_t Rm = ((ir>>8)&0xF);
1890 load_reg( R_EAX, Rm );
1891 store_spreg( R_EAX, R_FPSCR );
1892 update_fr_bank( R_EAX );
1893 sh4_x86.tstate = TSTATE_NONE;
1898 uint32_t Rm = ((ir>>8)&0xF);
1900 load_reg( R_EAX, Rm );
1901 store_spreg( R_EAX, R_DBR );
1902 sh4_x86.tstate = TSTATE_NONE;
1911 switch( (ir&0xF0) >> 4 ) {
1914 uint32_t Rn = ((ir>>8)&0xF);
1915 if( sh4_x86.in_delay_slot ) {
1918 load_imm32( R_EAX, pc + 4 );
1919 store_spreg( R_EAX, R_PR );
1920 load_reg( R_ECX, Rn );
1921 store_spreg( R_ECX, REG_OFFSET(pc) );
1922 sh4_x86.in_delay_slot = TRUE;
1923 sh4_translate_instruction(pc+2);
1924 exit_block_pcset(pc+2);
1925 sh4_x86.branch_taken = TRUE;
1932 uint32_t Rn = ((ir>>8)&0xF);
1933 load_reg( R_ECX, Rn );
1934 MEM_READ_BYTE( R_ECX, R_EAX );
1935 TEST_r8_r8( R_AL, R_AL );
1937 OR_imm8_r8( 0x80, R_AL );
1938 load_reg( R_ECX, Rn );
1939 MEM_WRITE_BYTE( R_ECX, R_EAX );
1940 sh4_x86.tstate = TSTATE_NONE;
1945 uint32_t Rn = ((ir>>8)&0xF);
1946 if( sh4_x86.in_delay_slot ) {
1949 load_reg( R_ECX, Rn );
1950 store_spreg( R_ECX, REG_OFFSET(pc) );
1951 sh4_x86.in_delay_slot = TRUE;
1952 sh4_translate_instruction(pc+2);
1953 exit_block_pcset(pc+2);
1954 sh4_x86.branch_taken = TRUE;
1966 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1967 /* Annoyingly enough, not directly convertible */
1968 load_reg( R_EAX, Rn );
1969 load_reg( R_ECX, Rm );
1970 CMP_imm32_r32( 0, R_ECX );
1971 JGE_rel8(16, doshl);
1973 NEG_r32( R_ECX ); // 2
1974 AND_imm8_r8( 0x1F, R_CL ); // 3
1975 JE_rel8( 4, emptysar); // 2
1976 SAR_r32_CL( R_EAX ); // 2
1977 JMP_rel8(10, end); // 2
1979 JMP_TARGET(emptysar);
1980 SAR_imm8_r32(31, R_EAX ); // 3
1984 AND_imm8_r8( 0x1F, R_CL ); // 3
1985 SHL_r32_CL( R_EAX ); // 2
1988 store_reg( R_EAX, Rn );
1989 sh4_x86.tstate = TSTATE_NONE;
1994 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1995 load_reg( R_EAX, Rn );
1996 load_reg( R_ECX, Rm );
1997 CMP_imm32_r32( 0, R_ECX );
1998 JGE_rel8(15, doshl);
2000 NEG_r32( R_ECX ); // 2
2001 AND_imm8_r8( 0x1F, R_CL ); // 3
2002 JE_rel8( 4, emptyshr );
2003 SHR_r32_CL( R_EAX ); // 2
2004 JMP_rel8(9, end); // 2
2006 JMP_TARGET(emptyshr);
2007 XOR_r32_r32( R_EAX, R_EAX );
2011 AND_imm8_r8( 0x1F, R_CL ); // 3
2012 SHL_r32_CL( R_EAX ); // 2
2015 store_reg( R_EAX, Rn );
2016 sh4_x86.tstate = TSTATE_NONE;
2020 switch( (ir&0x80) >> 7 ) {
2022 switch( (ir&0x70) >> 4 ) {
2025 uint32_t Rm = ((ir>>8)&0xF);
2026 if( sh4_x86.in_delay_slot ) {
2030 load_reg( R_EAX, Rm );
2031 call_func1( sh4_write_sr, R_EAX );
2032 sh4_x86.priv_checked = FALSE;
2033 sh4_x86.fpuen_checked = FALSE;
2034 sh4_x86.tstate = TSTATE_NONE;
2040 uint32_t Rm = ((ir>>8)&0xF);
2041 load_reg( R_EAX, Rm );
2042 store_spreg( R_EAX, R_GBR );
2047 uint32_t Rm = ((ir>>8)&0xF);
2049 load_reg( R_EAX, Rm );
2050 store_spreg( R_EAX, R_VBR );
2051 sh4_x86.tstate = TSTATE_NONE;
2056 uint32_t Rm = ((ir>>8)&0xF);
2058 load_reg( R_EAX, Rm );
2059 store_spreg( R_EAX, R_SSR );
2060 sh4_x86.tstate = TSTATE_NONE;
2065 uint32_t Rm = ((ir>>8)&0xF);
2067 load_reg( R_EAX, Rm );
2068 store_spreg( R_EAX, R_SPC );
2069 sh4_x86.tstate = TSTATE_NONE;
2078 { /* LDC Rm, Rn_BANK */
2079 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
2081 load_reg( R_EAX, Rm );
2082 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
2083 sh4_x86.tstate = TSTATE_NONE;
2089 { /* MAC.W @Rm+, @Rn+ */
2090 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2091 load_reg( R_ECX, Rm );
2093 check_ralign16( R_ECX );
2094 load_reg( R_ECX, Rn );
2095 check_ralign16( R_ECX );
2096 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
2097 MEM_READ_WORD( R_ECX, R_EAX );
2099 load_reg( R_ECX, Rm );
2100 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
2101 MEM_READ_WORD( R_ECX, R_EAX );
2105 load_spreg( R_ECX, R_S );
2106 TEST_r32_r32( R_ECX, R_ECX );
2107 JE_rel8( 47, nosat );
2109 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
2110 JNO_rel8( 51, end ); // 2
2111 load_imm32( R_EDX, 1 ); // 5
2112 store_spreg( R_EDX, R_MACH ); // 6
2113 JS_rel8( 13, positive ); // 2
2114 load_imm32( R_EAX, 0x80000000 );// 5
2115 store_spreg( R_EAX, R_MACL ); // 6
2116 JMP_rel8( 25, end2 ); // 2
2118 JMP_TARGET(positive);
2119 load_imm32( R_EAX, 0x7FFFFFFF );// 5
2120 store_spreg( R_EAX, R_MACL ); // 6
2121 JMP_rel8( 12, end3); // 2
2124 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
2125 ADC_r32_sh4r( R_EDX, R_MACH ); // 6
2129 sh4_x86.tstate = TSTATE_NONE;
2135 { /* MOV.L @(disp, Rm), Rn */
2136 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
2137 load_reg( R_ECX, Rm );
2138 ADD_imm8s_r32( disp, R_ECX );
2140 check_ralign32( R_ECX );
2141 MEM_READ_LONG( R_ECX, R_EAX );
2142 store_reg( R_EAX, Rn );
2143 sh4_x86.tstate = TSTATE_NONE;
2149 { /* MOV.B @Rm, Rn */
2150 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2151 load_reg( R_ECX, Rm );
2152 MEM_READ_BYTE( R_ECX, R_EAX );
2153 store_reg( R_EAX, Rn );
2154 sh4_x86.tstate = TSTATE_NONE;
2158 { /* MOV.W @Rm, Rn */
2159 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2160 load_reg( R_ECX, Rm );
2162 check_ralign16( R_ECX );
2163 MEM_READ_WORD( R_ECX, R_EAX );
2164 store_reg( R_EAX, Rn );
2165 sh4_x86.tstate = TSTATE_NONE;
2169 { /* MOV.L @Rm, Rn */
2170 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2171 load_reg( R_ECX, Rm );
2173 check_ralign32( R_ECX );
2174 MEM_READ_LONG( R_ECX, R_EAX );
2175 store_reg( R_EAX, Rn );
2176 sh4_x86.tstate = TSTATE_NONE;
2181 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2182 load_reg( R_EAX, Rm );
2183 store_reg( R_EAX, Rn );
2187 { /* MOV.B @Rm+, Rn */
2188 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2189 load_reg( R_ECX, Rm );
2190 MOV_r32_r32( R_ECX, R_EAX );
2191 ADD_imm8s_r32( 1, R_EAX );
2192 store_reg( R_EAX, Rm );
2193 MEM_READ_BYTE( R_ECX, R_EAX );
2194 store_reg( R_EAX, Rn );
2195 sh4_x86.tstate = TSTATE_NONE;
2199 { /* MOV.W @Rm+, Rn */
2200 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2201 load_reg( R_EAX, Rm );
2203 check_ralign16( R_EAX );
2204 MOV_r32_r32( R_EAX, R_ECX );
2205 ADD_imm8s_r32( 2, R_EAX );
2206 store_reg( R_EAX, Rm );
2207 MEM_READ_WORD( R_ECX, R_EAX );
2208 store_reg( R_EAX, Rn );
2209 sh4_x86.tstate = TSTATE_NONE;
2213 { /* MOV.L @Rm+, Rn */
2214 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2215 load_reg( R_EAX, Rm );
2217 check_ralign32( R_EAX );
2218 MOV_r32_r32( R_EAX, R_ECX );
2219 ADD_imm8s_r32( 4, R_EAX );
2220 store_reg( R_EAX, Rm );
2221 MEM_READ_LONG( R_ECX, R_EAX );
2222 store_reg( R_EAX, Rn );
2223 sh4_x86.tstate = TSTATE_NONE;
2228 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2229 load_reg( R_EAX, Rm );
2231 store_reg( R_EAX, Rn );
2232 sh4_x86.tstate = TSTATE_NONE;
2236 { /* SWAP.B Rm, Rn */
2237 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2238 load_reg( R_EAX, Rm );
2239 XCHG_r8_r8( R_AL, R_AH );
2240 store_reg( R_EAX, Rn );
2244 { /* SWAP.W Rm, Rn */
2245 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2246 load_reg( R_EAX, Rm );
2247 MOV_r32_r32( R_EAX, R_ECX );
2248 SHL_imm8_r32( 16, R_ECX );
2249 SHR_imm8_r32( 16, R_EAX );
2250 OR_r32_r32( R_EAX, R_ECX );
2251 store_reg( R_ECX, Rn );
2252 sh4_x86.tstate = TSTATE_NONE;
2257 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2258 load_reg( R_EAX, Rm );
2259 XOR_r32_r32( R_ECX, R_ECX );
2261 SBB_r32_r32( R_EAX, R_ECX );
2262 store_reg( R_ECX, Rn );
2264 sh4_x86.tstate = TSTATE_C;
2269 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2270 load_reg( R_EAX, Rm );
2272 store_reg( R_EAX, Rn );
2273 sh4_x86.tstate = TSTATE_NONE;
2277 { /* EXTU.B Rm, Rn */
2278 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2279 load_reg( R_EAX, Rm );
2280 MOVZX_r8_r32( R_EAX, R_EAX );
2281 store_reg( R_EAX, Rn );
2285 { /* EXTU.W Rm, Rn */
2286 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2287 load_reg( R_EAX, Rm );
2288 MOVZX_r16_r32( R_EAX, R_EAX );
2289 store_reg( R_EAX, Rn );
2293 { /* EXTS.B Rm, Rn */
2294 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2295 load_reg( R_EAX, Rm );
2296 MOVSX_r8_r32( R_EAX, R_EAX );
2297 store_reg( R_EAX, Rn );
2301 { /* EXTS.W Rm, Rn */
2302 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2303 load_reg( R_EAX, Rm );
2304 MOVSX_r16_r32( R_EAX, R_EAX );
2305 store_reg( R_EAX, Rn );
2311 { /* ADD #imm, Rn */
2312 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2313 load_reg( R_EAX, Rn );
2314 ADD_imm8s_r32( imm, R_EAX );
2315 store_reg( R_EAX, Rn );
2316 sh4_x86.tstate = TSTATE_NONE;
2320 switch( (ir&0xF00) >> 8 ) {
2322 { /* MOV.B R0, @(disp, Rn) */
2323 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2324 load_reg( R_EAX, 0 );
2325 load_reg( R_ECX, Rn );
2326 ADD_imm32_r32( disp, R_ECX );
2327 MEM_WRITE_BYTE( R_ECX, R_EAX );
2328 sh4_x86.tstate = TSTATE_NONE;
2332 { /* MOV.W R0, @(disp, Rn) */
2333 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2334 load_reg( R_ECX, Rn );
2335 load_reg( R_EAX, 0 );
2336 ADD_imm32_r32( disp, R_ECX );
2338 check_walign16( R_ECX );
2339 MEM_WRITE_WORD( R_ECX, R_EAX );
2340 sh4_x86.tstate = TSTATE_NONE;
2344 { /* MOV.B @(disp, Rm), R0 */
2345 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2346 load_reg( R_ECX, Rm );
2347 ADD_imm32_r32( disp, R_ECX );
2348 MEM_READ_BYTE( R_ECX, R_EAX );
2349 store_reg( R_EAX, 0 );
2350 sh4_x86.tstate = TSTATE_NONE;
2354 { /* MOV.W @(disp, Rm), R0 */
2355 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2356 load_reg( R_ECX, Rm );
2357 ADD_imm32_r32( disp, R_ECX );
2359 check_ralign16( R_ECX );
2360 MEM_READ_WORD( R_ECX, R_EAX );
2361 store_reg( R_EAX, 0 );
2362 sh4_x86.tstate = TSTATE_NONE;
2366 { /* CMP/EQ #imm, R0 */
2367 int32_t imm = SIGNEXT8(ir&0xFF);
2368 load_reg( R_EAX, 0 );
2369 CMP_imm8s_r32(imm, R_EAX);
2371 sh4_x86.tstate = TSTATE_E;
2376 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2377 if( sh4_x86.in_delay_slot ) {
2380 JF_rel8( EXIT_BLOCK_SIZE, nottaken );
2381 exit_block( disp + pc + 4, pc+2 );
2382 JMP_TARGET(nottaken);
2389 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2390 if( sh4_x86.in_delay_slot ) {
2393 JT_rel8( EXIT_BLOCK_SIZE, nottaken );
2394 exit_block( disp + pc + 4, pc+2 );
2395 JMP_TARGET(nottaken);
2402 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2403 if( sh4_x86.in_delay_slot ) {
2406 sh4_x86.in_delay_slot = TRUE;
2407 if( sh4_x86.tstate == TSTATE_NONE ) {
2408 CMP_imm8s_sh4r( 1, R_T );
2409 sh4_x86.tstate = TSTATE_E;
2411 OP(0x0F); OP(0x80+(sh4_x86.tstate^1)); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JE rel32
2412 sh4_translate_instruction(pc+2);
2413 exit_block( disp + pc + 4, pc+4 );
2415 *patch = (xlat_output - ((uint8_t *)patch)) - 4;
2416 sh4_translate_instruction(pc+2);
2423 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2424 if( sh4_x86.in_delay_slot ) {
2427 sh4_x86.in_delay_slot = TRUE;
2428 if( sh4_x86.tstate == TSTATE_NONE ) {
2429 CMP_imm8s_sh4r( 1, R_T );
2430 sh4_x86.tstate = TSTATE_E;
2432 OP(0x0F); OP(0x80+sh4_x86.tstate); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JNE rel32
2433 sh4_translate_instruction(pc+2);
2434 exit_block( disp + pc + 4, pc+4 );
2436 *patch = (xlat_output - ((uint8_t *)patch)) - 4;
2437 sh4_translate_instruction(pc+2);
2448 { /* MOV.W @(disp, PC), Rn */
2449 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1;
2450 if( sh4_x86.in_delay_slot ) {
2453 load_imm32( R_ECX, pc + disp + 4 );
2454 MEM_READ_WORD( R_ECX, R_EAX );
2455 store_reg( R_EAX, Rn );
2456 sh4_x86.tstate = TSTATE_NONE;
2462 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2463 if( sh4_x86.in_delay_slot ) {
2466 sh4_x86.in_delay_slot = TRUE;
2467 sh4_translate_instruction( pc + 2 );
2468 exit_block( disp + pc + 4, pc+4 );
2469 sh4_x86.branch_taken = TRUE;
2476 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2477 if( sh4_x86.in_delay_slot ) {
2480 load_imm32( R_EAX, pc + 4 );
2481 store_spreg( R_EAX, R_PR );
2482 sh4_x86.in_delay_slot = TRUE;
2483 sh4_translate_instruction( pc + 2 );
2484 exit_block( disp + pc + 4, pc+4 );
2485 sh4_x86.branch_taken = TRUE;
2491 switch( (ir&0xF00) >> 8 ) {
2493 { /* MOV.B R0, @(disp, GBR) */
2494 uint32_t disp = (ir&0xFF);
2495 load_reg( R_EAX, 0 );
2496 load_spreg( R_ECX, R_GBR );
2497 ADD_imm32_r32( disp, R_ECX );
2498 MEM_WRITE_BYTE( R_ECX, R_EAX );
2499 sh4_x86.tstate = TSTATE_NONE;
2503 { /* MOV.W R0, @(disp, GBR) */
2504 uint32_t disp = (ir&0xFF)<<1;
2505 load_spreg( R_ECX, R_GBR );
2506 load_reg( R_EAX, 0 );
2507 ADD_imm32_r32( disp, R_ECX );
2509 check_walign16( R_ECX );
2510 MEM_WRITE_WORD( R_ECX, R_EAX );
2511 sh4_x86.tstate = TSTATE_NONE;
2515 { /* MOV.L R0, @(disp, GBR) */
2516 uint32_t disp = (ir&0xFF)<<2;
2517 load_spreg( R_ECX, R_GBR );
2518 load_reg( R_EAX, 0 );
2519 ADD_imm32_r32( disp, R_ECX );
2521 check_walign32( R_ECX );
2522 MEM_WRITE_LONG( R_ECX, R_EAX );
2523 sh4_x86.tstate = TSTATE_NONE;
2528 uint32_t imm = (ir&0xFF);
2529 if( sh4_x86.in_delay_slot ) {
2532 load_imm32( R_ECX, pc+2 );
2533 store_spreg( R_ECX, REG_OFFSET(pc) );
2534 load_imm32( R_EAX, imm );
2535 call_func1( sh4_raise_trap, R_EAX );
2536 sh4_x86.tstate = TSTATE_NONE;
2537 exit_block_pcset(pc);
2538 sh4_x86.branch_taken = TRUE;
2544 { /* MOV.B @(disp, GBR), R0 */
2545 uint32_t disp = (ir&0xFF);
2546 load_spreg( R_ECX, R_GBR );
2547 ADD_imm32_r32( disp, R_ECX );
2548 MEM_READ_BYTE( R_ECX, R_EAX );
2549 store_reg( R_EAX, 0 );
2550 sh4_x86.tstate = TSTATE_NONE;
2554 { /* MOV.W @(disp, GBR), R0 */
2555 uint32_t disp = (ir&0xFF)<<1;
2556 load_spreg( R_ECX, R_GBR );
2557 ADD_imm32_r32( disp, R_ECX );
2559 check_ralign16( R_ECX );
2560 MEM_READ_WORD( R_ECX, R_EAX );
2561 store_reg( R_EAX, 0 );
2562 sh4_x86.tstate = TSTATE_NONE;
2566 { /* MOV.L @(disp, GBR), R0 */
2567 uint32_t disp = (ir&0xFF)<<2;
2568 load_spreg( R_ECX, R_GBR );
2569 ADD_imm32_r32( disp, R_ECX );
2571 check_ralign32( R_ECX );
2572 MEM_READ_LONG( R_ECX, R_EAX );
2573 store_reg( R_EAX, 0 );
2574 sh4_x86.tstate = TSTATE_NONE;
2578 { /* MOVA @(disp, PC), R0 */
2579 uint32_t disp = (ir&0xFF)<<2;
2580 if( sh4_x86.in_delay_slot ) {
2583 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2584 store_reg( R_ECX, 0 );
2589 { /* TST #imm, R0 */
2590 uint32_t imm = (ir&0xFF);
2591 load_reg( R_EAX, 0 );
2592 TEST_imm32_r32( imm, R_EAX );
2594 sh4_x86.tstate = TSTATE_E;
2598 { /* AND #imm, R0 */
2599 uint32_t imm = (ir&0xFF);
2600 load_reg( R_EAX, 0 );
2601 AND_imm32_r32(imm, R_EAX);
2602 store_reg( R_EAX, 0 );
2603 sh4_x86.tstate = TSTATE_NONE;
2607 { /* XOR #imm, R0 */
2608 uint32_t imm = (ir&0xFF);
2609 load_reg( R_EAX, 0 );
2610 XOR_imm32_r32( imm, R_EAX );
2611 store_reg( R_EAX, 0 );
2612 sh4_x86.tstate = TSTATE_NONE;
2617 uint32_t imm = (ir&0xFF);
2618 load_reg( R_EAX, 0 );
2619 OR_imm32_r32(imm, R_EAX);
2620 store_reg( R_EAX, 0 );
2621 sh4_x86.tstate = TSTATE_NONE;
2625 { /* TST.B #imm, @(R0, GBR) */
2626 uint32_t imm = (ir&0xFF);
2627 load_reg( R_EAX, 0);
2628 load_reg( R_ECX, R_GBR);
2629 ADD_r32_r32( R_EAX, R_ECX );
2630 MEM_READ_BYTE( R_ECX, R_EAX );
2631 TEST_imm8_r8( imm, R_AL );
2633 sh4_x86.tstate = TSTATE_E;
2637 { /* AND.B #imm, @(R0, GBR) */
2638 uint32_t imm = (ir&0xFF);
2639 load_reg( R_EAX, 0 );
2640 load_spreg( R_ECX, R_GBR );
2641 ADD_r32_r32( R_EAX, R_ECX );
2643 MEM_READ_BYTE( R_ECX, R_EAX );
2645 AND_imm32_r32(imm, R_EAX );
2646 MEM_WRITE_BYTE( R_ECX, R_EAX );
2647 sh4_x86.tstate = TSTATE_NONE;
2651 { /* XOR.B #imm, @(R0, GBR) */
2652 uint32_t imm = (ir&0xFF);
2653 load_reg( R_EAX, 0 );
2654 load_spreg( R_ECX, R_GBR );
2655 ADD_r32_r32( R_EAX, R_ECX );
2657 MEM_READ_BYTE(R_ECX, R_EAX);
2659 XOR_imm32_r32( imm, R_EAX );
2660 MEM_WRITE_BYTE( R_ECX, R_EAX );
2661 sh4_x86.tstate = TSTATE_NONE;
2665 { /* OR.B #imm, @(R0, GBR) */
2666 uint32_t imm = (ir&0xFF);
2667 load_reg( R_EAX, 0 );
2668 load_spreg( R_ECX, R_GBR );
2669 ADD_r32_r32( R_EAX, R_ECX );
2671 MEM_READ_BYTE( R_ECX, R_EAX );
2673 OR_imm32_r32(imm, R_EAX );
2674 MEM_WRITE_BYTE( R_ECX, R_EAX );
2675 sh4_x86.tstate = TSTATE_NONE;
2681 { /* MOV.L @(disp, PC), Rn */
2682 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2;
2683 if( sh4_x86.in_delay_slot ) {
2686 uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
2687 sh4ptr_t ptr = mem_get_region(target);
2689 MOV_moff32_EAX( ptr );
2691 load_imm32( R_ECX, target );
2692 MEM_READ_LONG( R_ECX, R_EAX );
2694 store_reg( R_EAX, Rn );
2695 sh4_x86.tstate = TSTATE_NONE;
2700 { /* MOV #imm, Rn */
2701 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2702 load_imm32( R_EAX, imm );
2703 store_reg( R_EAX, Rn );
2709 { /* FADD FRm, FRn */
2710 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2712 load_spreg( R_ECX, R_FPSCR );
2713 TEST_imm32_r32( FPSCR_PR, R_ECX );
2714 load_fr_bank( R_EDX );
2715 JNE_rel8(13,doubleprec);
2716 push_fr(R_EDX, FRm);
2717 push_fr(R_EDX, FRn);
2721 JMP_TARGET(doubleprec);
2722 push_dr(R_EDX, FRm);
2723 push_dr(R_EDX, FRn);
2727 sh4_x86.tstate = TSTATE_NONE;
2731 { /* FSUB FRm, FRn */
2732 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2734 load_spreg( R_ECX, R_FPSCR );
2735 TEST_imm32_r32( FPSCR_PR, R_ECX );
2736 load_fr_bank( R_EDX );
2737 JNE_rel8(13, doubleprec);
2738 push_fr(R_EDX, FRn);
2739 push_fr(R_EDX, FRm);
2743 JMP_TARGET(doubleprec);
2744 push_dr(R_EDX, FRn);
2745 push_dr(R_EDX, FRm);
2749 sh4_x86.tstate = TSTATE_NONE;
2753 { /* FMUL FRm, FRn */
2754 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2756 load_spreg( R_ECX, R_FPSCR );
2757 TEST_imm32_r32( FPSCR_PR, R_ECX );
2758 load_fr_bank( R_EDX );
2759 JNE_rel8(13, doubleprec);
2760 push_fr(R_EDX, FRm);
2761 push_fr(R_EDX, FRn);
2765 JMP_TARGET(doubleprec);
2766 push_dr(R_EDX, FRm);
2767 push_dr(R_EDX, FRn);
2771 sh4_x86.tstate = TSTATE_NONE;
2775 { /* FDIV FRm, FRn */
2776 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2778 load_spreg( R_ECX, R_FPSCR );
2779 TEST_imm32_r32( FPSCR_PR, R_ECX );
2780 load_fr_bank( R_EDX );
2781 JNE_rel8(13, doubleprec);
2782 push_fr(R_EDX, FRn);
2783 push_fr(R_EDX, FRm);
2787 JMP_TARGET(doubleprec);
2788 push_dr(R_EDX, FRn);
2789 push_dr(R_EDX, FRm);
2793 sh4_x86.tstate = TSTATE_NONE;
2797 { /* FCMP/EQ FRm, FRn */
2798 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2800 load_spreg( R_ECX, R_FPSCR );
2801 TEST_imm32_r32( FPSCR_PR, R_ECX );
2802 load_fr_bank( R_EDX );
2803 JNE_rel8(8, doubleprec);
2804 push_fr(R_EDX, FRm);
2805 push_fr(R_EDX, FRn);
2807 JMP_TARGET(doubleprec);
2808 push_dr(R_EDX, FRm);
2809 push_dr(R_EDX, FRn);
2814 sh4_x86.tstate = TSTATE_NONE;
2818 { /* FCMP/GT FRm, FRn */
2819 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2821 load_spreg( R_ECX, R_FPSCR );
2822 TEST_imm32_r32( FPSCR_PR, R_ECX );
2823 load_fr_bank( R_EDX );
2824 JNE_rel8(8, doubleprec);
2825 push_fr(R_EDX, FRm);
2826 push_fr(R_EDX, FRn);
2828 JMP_TARGET(doubleprec);
2829 push_dr(R_EDX, FRm);
2830 push_dr(R_EDX, FRn);
2835 sh4_x86.tstate = TSTATE_NONE;
2839 { /* FMOV @(R0, Rm), FRn */
2840 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2842 check_fpuen_no_precheck();
2843 load_reg( R_ECX, Rm );
2844 ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
2845 check_ralign32( R_ECX );
2846 load_spreg( R_EDX, R_FPSCR );
2847 TEST_imm32_r32( FPSCR_SZ, R_EDX );
2848 JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
2849 MEM_READ_LONG( R_ECX, R_EAX );
2850 load_fr_bank( R_EDX );
2851 store_fr( R_EDX, R_EAX, FRn );
2853 JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
2854 JMP_TARGET(doublesize);
2855 MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
2856 load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
2857 load_xf_bank( R_EDX );
2858 store_fr( R_EDX, R_EAX, FRn&0x0E );
2859 store_fr( R_EDX, R_ECX, FRn|0x01 );
2862 JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
2863 JMP_TARGET(doublesize);
2864 MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
2865 load_fr_bank( R_EDX );
2866 store_fr( R_EDX, R_EAX, FRn&0x0E );
2867 store_fr( R_EDX, R_ECX, FRn|0x01 );
2870 sh4_x86.tstate = TSTATE_NONE;
2874 { /* FMOV FRm, @(R0, Rn) */
2875 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2877 check_fpuen_no_precheck();
2878 load_reg( R_ECX, Rn );
2879 ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
2880 check_walign32( R_ECX );
2881 load_spreg( R_EDX, R_FPSCR );
2882 TEST_imm32_r32( FPSCR_SZ, R_EDX );
2883 JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
2884 load_fr_bank( R_EDX );
2885 load_fr( R_EDX, R_EAX, FRm );
2886 MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
2888 JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
2889 JMP_TARGET(doublesize);
2890 load_xf_bank( R_EDX );
2891 load_fr( R_EDX, R_EAX, FRm&0x0E );
2892 load_fr( R_EDX, R_EDX, FRm|0x01 );
2893 MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
2896 JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
2897 JMP_TARGET(doublesize);
2898 load_fr_bank( R_EDX );
2899 load_fr( R_EDX, R_EAX, FRm&0x0E );
2900 load_fr( R_EDX, R_EDX, FRm|0x01 );
2901 MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
2904 sh4_x86.tstate = TSTATE_NONE;
2908 { /* FMOV @Rm, FRn */
2909 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2911 check_fpuen_no_precheck();
2912 load_reg( R_ECX, Rm );
2913 check_ralign32( R_ECX );
2914 load_spreg( R_EDX, R_FPSCR );
2915 TEST_imm32_r32( FPSCR_SZ, R_EDX );
2916 JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
2917 MEM_READ_LONG( R_ECX, R_EAX );
2918 load_fr_bank( R_EDX );
2919 store_fr( R_EDX, R_EAX, FRn );
2921 JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
2922 JMP_TARGET(doublesize);
2923 MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
2924 load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
2925 load_xf_bank( R_EDX );
2926 store_fr( R_EDX, R_EAX, FRn&0x0E );
2927 store_fr( R_EDX, R_ECX, FRn|0x01 );
2930 JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
2931 JMP_TARGET(doublesize);
2932 MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
2933 load_fr_bank( R_EDX );
2934 store_fr( R_EDX, R_EAX, FRn&0x0E );
2935 store_fr( R_EDX, R_ECX, FRn|0x01 );
2938 sh4_x86.tstate = TSTATE_NONE;
2942 { /* FMOV @Rm+, FRn */
2943 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2945 check_fpuen_no_precheck();
2946 load_reg( R_ECX, Rm );
2947 check_ralign32( R_ECX );
2948 MOV_r32_r32( R_ECX, R_EAX );
2949 load_spreg( R_EDX, R_FPSCR );
2950 TEST_imm32_r32( FPSCR_SZ, R_EDX );
2951 JNE_rel8(14 + CALL_FUNC1_SIZE, doublesize);
2952 ADD_imm8s_r32( 4, R_EAX );
2953 store_reg( R_EAX, Rm );
2954 MEM_READ_LONG( R_ECX, R_EAX );
2955 load_fr_bank( R_EDX );
2956 store_fr( R_EDX, R_EAX, FRn );
2958 JMP_rel8(27 + MEM_READ_DOUBLE_SIZE, end);
2959 JMP_TARGET(doublesize);
2960 ADD_imm8s_r32( 8, R_EAX );
2961 store_reg(R_EAX, Rm);
2962 MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
2963 load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
2964 load_xf_bank( R_EDX );
2965 store_fr( R_EDX, R_EAX, FRn&0x0E );
2966 store_fr( R_EDX, R_ECX, FRn|0x01 );
2969 JMP_rel8(15 + MEM_READ_DOUBLE_SIZE, end);
2970 ADD_imm8s_r32( 8, R_EAX );
2971 store_reg(R_EAX, Rm);
2972 MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
2973 load_fr_bank( R_EDX );
2974 store_fr( R_EDX, R_EAX, FRn&0x0E );
2975 store_fr( R_EDX, R_ECX, FRn|0x01 );
2978 sh4_x86.tstate = TSTATE_NONE;
2982 { /* FMOV FRm, @Rn */
2983 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2985 check_fpuen_no_precheck();
2986 load_reg( R_ECX, Rn );
2987 check_walign32( R_ECX );
2988 load_spreg( R_EDX, R_FPSCR );
2989 TEST_imm32_r32( FPSCR_SZ, R_EDX );
2990 JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
2991 load_fr_bank( R_EDX );
2992 load_fr( R_EDX, R_EAX, FRm );
2993 MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
2995 JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
2996 JMP_TARGET(doublesize);
2997 load_xf_bank( R_EDX );
2998 load_fr( R_EDX, R_EAX, FRm&0x0E );
2999 load_fr( R_EDX, R_EDX, FRm|0x01 );
3000 MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
3003 JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
3004 JMP_TARGET(doublesize);
3005 load_fr_bank( R_EDX );
3006 load_fr( R_EDX, R_EAX, FRm&0x0E );
3007 load_fr( R_EDX, R_EDX, FRm|0x01 );
3008 MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
3011 sh4_x86.tstate = TSTATE_NONE;
3015 { /* FMOV FRm, @-Rn */
3016 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
3018 check_fpuen_no_precheck();
3019 load_reg( R_ECX, Rn );
3020 check_walign32( R_ECX );
3021 load_spreg( R_EDX, R_FPSCR );
3022 TEST_imm32_r32( FPSCR_SZ, R_EDX );
3023 JNE_rel8(14 + CALL_FUNC2_SIZE, doublesize);
3024 load_fr_bank( R_EDX );
3025 load_fr( R_EDX, R_EAX, FRm );
3026 ADD_imm8s_r32(-4,R_ECX);
3027 store_reg( R_ECX, Rn );
3028 MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
3030 JMP_rel8( 24 + MEM_WRITE_DOUBLE_SIZE, end );
3031 JMP_TARGET(doublesize);
3032 load_xf_bank( R_EDX );
3033 load_fr( R_EDX, R_EAX, FRm&0x0E );
3034 load_fr( R_EDX, R_EDX, FRm|0x01 );
3035 ADD_imm8s_r32(-8,R_ECX);
3036 store_reg( R_ECX, Rn );
3037 MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
3040 JMP_rel8( 15 + MEM_WRITE_DOUBLE_SIZE, end );
3041 JMP_TARGET(doublesize);
3042 load_fr_bank( R_EDX );
3043 load_fr( R_EDX, R_EAX, FRm&0x0E );
3044 load_fr( R_EDX, R_EDX, FRm|0x01 );
3045 ADD_imm8s_r32(-8,R_ECX);
3046 store_reg( R_ECX, Rn );
3047 MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
3050 sh4_x86.tstate = TSTATE_NONE;
3054 { /* FMOV FRm, FRn */
3055 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
3056 /* As horrible as this looks, it's actually covering 5 separate cases:
3057 * 1. 32-bit fr-to-fr (PR=0)
3058 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
3059 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
3060 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
3061 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
3064 load_spreg( R_ECX, R_FPSCR );
3065 load_fr_bank( R_EDX );
3066 TEST_imm32_r32( FPSCR_SZ, R_ECX );
3067 JNE_rel8(8, doublesize);
3068 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
3069 store_fr( R_EDX, R_EAX, FRn );
3072 JMP_TARGET(doublesize);
3073 load_xf_bank( R_ECX );
3074 load_fr( R_ECX, R_EAX, FRm-1 );
3076 load_fr( R_ECX, R_EDX, FRm );
3077 store_fr( R_ECX, R_EAX, FRn-1 );
3078 store_fr( R_ECX, R_EDX, FRn );
3079 } else /* FRn&1 == 0 */ {
3080 load_fr( R_ECX, R_ECX, FRm );
3081 store_fr( R_EDX, R_EAX, FRn );
3082 store_fr( R_EDX, R_ECX, FRn+1 );
3085 } else /* FRm&1 == 0 */ {
3088 load_xf_bank( R_ECX );
3089 load_fr( R_EDX, R_EAX, FRm );
3090 load_fr( R_EDX, R_EDX, FRm+1 );
3091 store_fr( R_ECX, R_EAX, FRn-1 );
3092 store_fr( R_ECX, R_EDX, FRn );
3094 } else /* FRn&1 == 0 */ {
3096 load_fr( R_EDX, R_EAX, FRm );
3097 load_fr( R_EDX, R_ECX, FRm+1 );
3098 store_fr( R_EDX, R_EAX, FRn );
3099 store_fr( R_EDX, R_ECX, FRn+1 );
3103 sh4_x86.tstate = TSTATE_NONE;
3107 switch( (ir&0xF0) >> 4 ) {
3109 { /* FSTS FPUL, FRn */
3110 uint32_t FRn = ((ir>>8)&0xF);
3112 load_fr_bank( R_ECX );
3113 load_spreg( R_EAX, R_FPUL );
3114 store_fr( R_ECX, R_EAX, FRn );
3115 sh4_x86.tstate = TSTATE_NONE;
3119 { /* FLDS FRm, FPUL */
3120 uint32_t FRm = ((ir>>8)&0xF);
3122 load_fr_bank( R_ECX );
3123 load_fr( R_ECX, R_EAX, FRm );
3124 store_spreg( R_EAX, R_FPUL );
3125 sh4_x86.tstate = TSTATE_NONE;
3129 { /* FLOAT FPUL, FRn */
3130 uint32_t FRn = ((ir>>8)&0xF);
3132 load_spreg( R_ECX, R_FPSCR );
3133 load_spreg(R_EDX, REG_OFFSET(fr_bank));
3135 TEST_imm32_r32( FPSCR_PR, R_ECX );
3136 JNE_rel8(5, doubleprec);
3137 pop_fr( R_EDX, FRn );
3139 JMP_TARGET(doubleprec);
3140 pop_dr( R_EDX, FRn );
3142 sh4_x86.tstate = TSTATE_NONE;
3146 { /* FTRC FRm, FPUL */
3147 uint32_t FRm = ((ir>>8)&0xF);
3149 load_spreg( R_ECX, R_FPSCR );
3150 load_fr_bank( R_EDX );
3151 TEST_imm32_r32( FPSCR_PR, R_ECX );
3152 JNE_rel8(5, doubleprec);
3153 push_fr( R_EDX, FRm );
3155 JMP_TARGET(doubleprec);
3156 push_dr( R_EDX, FRm );
3158 load_imm32( R_ECX, (uint32_t)&max_int );
3159 FILD_r32ind( R_ECX );
3161 JNA_rel8( 32, sat );
3162 load_imm32( R_ECX, (uint32_t)&min_int ); // 5
3163 FILD_r32ind( R_ECX ); // 2
3165 JAE_rel8( 21, sat2 ); // 2
3166 load_imm32( R_EAX, (uint32_t)&save_fcw );
3167 FNSTCW_r32ind( R_EAX );
3168 load_imm32( R_EDX, (uint32_t)&trunc_fcw );
3169 FLDCW_r32ind( R_EDX );
3170 FISTP_sh4r(R_FPUL); // 3
3171 FLDCW_r32ind( R_EAX );
3172 JMP_rel8( 9, end ); // 2
3176 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
3177 store_spreg( R_ECX, R_FPUL );
3180 sh4_x86.tstate = TSTATE_NONE;
3185 uint32_t FRn = ((ir>>8)&0xF);
3187 load_spreg( R_ECX, R_FPSCR );
3188 TEST_imm32_r32( FPSCR_PR, R_ECX );
3189 load_fr_bank( R_EDX );
3190 JNE_rel8(10, doubleprec);
3191 push_fr(R_EDX, FRn);
3195 JMP_TARGET(doubleprec);
3196 push_dr(R_EDX, FRn);
3200 sh4_x86.tstate = TSTATE_NONE;
3205 uint32_t FRn = ((ir>>8)&0xF);
3207 load_spreg( R_ECX, R_FPSCR );
3208 load_fr_bank( R_EDX );
3209 TEST_imm32_r32( FPSCR_PR, R_ECX );
3210 JNE_rel8(10, doubleprec);
3211 push_fr(R_EDX, FRn); // 3
3213 pop_fr( R_EDX, FRn); //3
3214 JMP_rel8(8,end); // 2
3215 JMP_TARGET(doubleprec);
3216 push_dr(R_EDX, FRn);
3220 sh4_x86.tstate = TSTATE_NONE;
3225 uint32_t FRn = ((ir>>8)&0xF);
3227 load_spreg( R_ECX, R_FPSCR );
3228 TEST_imm32_r32( FPSCR_PR, R_ECX );
3229 load_fr_bank( R_EDX );
3230 JNE_rel8(10, doubleprec);
3231 push_fr(R_EDX, FRn);
3235 JMP_TARGET(doubleprec);
3236 push_dr(R_EDX, FRn);
3240 sh4_x86.tstate = TSTATE_NONE;
3245 uint32_t FRn = ((ir>>8)&0xF);
3247 load_spreg( R_ECX, R_FPSCR );
3248 TEST_imm32_r32( FPSCR_PR, R_ECX );
3249 load_fr_bank( R_EDX );
3250 JNE_rel8(12, end); // PR=0 only
3252 push_fr(R_EDX, FRn);
3257 sh4_x86.tstate = TSTATE_NONE;
3262 uint32_t FRn = ((ir>>8)&0xF);
3265 load_spreg( R_ECX, R_FPSCR );
3266 TEST_imm32_r32( FPSCR_PR, R_ECX );
3268 XOR_r32_r32( R_EAX, R_EAX );
3269 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
3270 store_fr( R_ECX, R_EAX, FRn );
3272 sh4_x86.tstate = TSTATE_NONE;
3277 uint32_t FRn = ((ir>>8)&0xF);
3280 load_spreg( R_ECX, R_FPSCR );
3281 TEST_imm32_r32( FPSCR_PR, R_ECX );
3283 load_imm32(R_EAX, 0x3F800000);
3284 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
3285 store_fr( R_ECX, R_EAX, FRn );
3287 sh4_x86.tstate = TSTATE_NONE;
3291 { /* FCNVSD FPUL, FRn */
3292 uint32_t FRn = ((ir>>8)&0xF);
3294 load_spreg( R_ECX, R_FPSCR );
3295 TEST_imm32_r32( FPSCR_PR, R_ECX );
3296 JE_rel8(9, end); // only when PR=1
3297 load_fr_bank( R_ECX );
3299 pop_dr( R_ECX, FRn );
3301 sh4_x86.tstate = TSTATE_NONE;
3305 { /* FCNVDS FRm, FPUL */
3306 uint32_t FRm = ((ir>>8)&0xF);
3308 load_spreg( R_ECX, R_FPSCR );
3309 TEST_imm32_r32( FPSCR_PR, R_ECX );
3310 JE_rel8(9, end); // only when PR=1
3311 load_fr_bank( R_ECX );
3312 push_dr( R_ECX, FRm );
3315 sh4_x86.tstate = TSTATE_NONE;
3319 { /* FIPR FVm, FVn */
3320 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3);
3322 load_spreg( R_ECX, R_FPSCR );
3323 TEST_imm32_r32( FPSCR_PR, R_ECX );
3324 JNE_rel8(44, doubleprec);
3326 load_fr_bank( R_ECX );
3327 push_fr( R_ECX, FVm<<2 );
3328 push_fr( R_ECX, FVn<<2 );
3330 push_fr( R_ECX, (FVm<<2)+1);
3331 push_fr( R_ECX, (FVn<<2)+1);
3334 push_fr( R_ECX, (FVm<<2)+2);
3335 push_fr( R_ECX, (FVn<<2)+2);
3338 push_fr( R_ECX, (FVm<<2)+3);
3339 push_fr( R_ECX, (FVn<<2)+3);
3342 pop_fr( R_ECX, (FVn<<2)+3);
3343 JMP_TARGET(doubleprec);
3344 sh4_x86.tstate = TSTATE_NONE;
3348 switch( (ir&0x100) >> 8 ) {
3350 { /* FSCA FPUL, FRn */
3351 uint32_t FRn = ((ir>>9)&0x7)<<1;
3353 load_spreg( R_ECX, R_FPSCR );
3354 TEST_imm32_r32( FPSCR_PR, R_ECX );
3355 JNE_rel8( CALL_FUNC2_SIZE + 9, doubleprec );
3356 load_fr_bank( R_ECX );
3357 ADD_imm8s_r32( (FRn&0x0E)<<2, R_ECX );
3358 load_spreg( R_EDX, R_FPUL );
3359 call_func2( sh4_fsca, R_EDX, R_ECX );
3360 JMP_TARGET(doubleprec);
3361 sh4_x86.tstate = TSTATE_NONE;
3365 switch( (ir&0x200) >> 9 ) {
3367 { /* FTRV XMTRX, FVn */
3368 uint32_t FVn = ((ir>>10)&0x3);
3370 load_spreg( R_ECX, R_FPSCR );
3371 TEST_imm32_r32( FPSCR_PR, R_ECX );
3372 JNE_rel8( 18 + CALL_FUNC2_SIZE, doubleprec );
3373 load_fr_bank( R_EDX ); // 3
3374 ADD_imm8s_r32( FVn<<4, R_EDX ); // 3
3375 load_xf_bank( R_ECX ); // 12
3376 call_func2( sh4_ftrv, R_EDX, R_ECX ); // 12
3377 JMP_TARGET(doubleprec);
3378 sh4_x86.tstate = TSTATE_NONE;
3382 switch( (ir&0xC00) >> 10 ) {
3386 load_spreg( R_ECX, R_FPSCR );
3387 XOR_imm32_r32( FPSCR_SZ, R_ECX );
3388 store_spreg( R_ECX, R_FPSCR );
3389 sh4_x86.tstate = TSTATE_NONE;
3395 load_spreg( R_ECX, R_FPSCR );
3396 XOR_imm32_r32( FPSCR_FR, R_ECX );
3397 store_spreg( R_ECX, R_FPSCR );
3398 update_fr_bank( R_ECX );
3399 sh4_x86.tstate = TSTATE_NONE;
3404 if( sh4_x86.in_delay_slot ) {
3408 JMP_exit(EXIT_ILLEGAL);
3428 { /* FMAC FR0, FRm, FRn */
3429 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
3431 load_spreg( R_ECX, R_FPSCR );
3432 load_spreg( R_EDX, REG_OFFSET(fr_bank));
3433 TEST_imm32_r32( FPSCR_PR, R_ECX );
3434 JNE_rel8(18, doubleprec);
3435 push_fr( R_EDX, 0 );
3436 push_fr( R_EDX, FRm );
3438 push_fr( R_EDX, FRn );
3440 pop_fr( R_EDX, FRn );
3442 JMP_TARGET(doubleprec);
3443 push_dr( R_EDX, 0 );
3444 push_dr( R_EDX, FRm );
3446 push_dr( R_EDX, FRn );
3448 pop_dr( R_EDX, FRn );
3450 sh4_x86.tstate = TSTATE_NONE;
3460 sh4_x86.in_delay_slot = FALSE;
.