Search
lxdream.org :: lxdream/src/sh4/sh4x86.c
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.c
changeset 733:633ee022f52e
prev732:f05753bbe723
author nkeynes
date Mon Jul 14 07:44:42 2008 +0000 (13 years ago)
permissions -rw-r--r--
last change Re-indent everything consistently
Fix include guards for consistency as well
view annotate diff log raw
     1 /**
     2  * $Id$
     3  * 
     4  * SH4 => x86 translation. This version does no real optimization, it just
     5  * outputs straight-line x86 code - it mainly exists to provide a baseline
     6  * to test the optimizing versions against.
     7  *
     8  * Copyright (c) 2007 Nathan Keynes.
     9  *
    10  * This program is free software; you can redistribute it and/or modify
    11  * it under the terms of the GNU General Public License as published by
    12  * the Free Software Foundation; either version 2 of the License, or
    13  * (at your option) any later version.
    14  *
    15  * This program is distributed in the hope that it will be useful,
    16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
    17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    18  * GNU General Public License for more details.
    19  */
    21 #include <assert.h>
    22 #include <math.h>
    24 #ifndef NDEBUG
    25 #define DEBUG_JUMPS 1
    26 #endif
    28 #include "sh4/xltcache.h"
    29 #include "sh4/sh4core.h"
    30 #include "sh4/sh4trans.h"
    31 #include "sh4/sh4stat.h"
    32 #include "sh4/sh4mmio.h"
    33 #include "sh4/x86op.h"
    34 #include "clock.h"
    36 #define DEFAULT_BACKPATCH_SIZE 4096
    38 struct backpatch_record {
    39     uint32_t fixup_offset;
    40     uint32_t fixup_icount;
    41     int32_t exc_code;
    42 };
    44 #define DELAY_NONE 0
    45 #define DELAY_PC 1
    46 #define DELAY_PC_PR 2
    48 /** 
    49  * Struct to manage internal translation state. This state is not saved -
    50  * it is only valid between calls to sh4_translate_begin_block() and
    51  * sh4_translate_end_block()
    52  */
    53 struct sh4_x86_state {
    54     int in_delay_slot;
    55     gboolean priv_checked; /* true if we've already checked the cpu mode. */
    56     gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
    57     gboolean branch_taken; /* true if we branched unconditionally */
    58     uint32_t block_start_pc;
    59     uint32_t stack_posn;   /* Trace stack height for alignment purposes */
    60     int tstate;
    62     /* mode flags */
    63     gboolean tlb_on; /* True if tlb translation is active */
    65     /* Allocated memory for the (block-wide) back-patch list */
    66     struct backpatch_record *backpatch_list;
    67     uint32_t backpatch_posn;
    68     uint32_t backpatch_size;
    69 };
    71 #define TSTATE_NONE -1
    72 #define TSTATE_O    0
    73 #define TSTATE_C    2
    74 #define TSTATE_E    4
    75 #define TSTATE_NE   5
    76 #define TSTATE_G    0xF
    77 #define TSTATE_GE   0xD
    78 #define TSTATE_A    7
    79 #define TSTATE_AE   3
    81 #ifdef ENABLE_SH4STATS
    82 #define COUNT_INST(id) load_imm32(R_EAX,id); call_func1(sh4_stats_add, R_EAX); sh4_x86.tstate = TSTATE_NONE
    83 #else
    84 #define COUNT_INST(id)
    85 #endif
    87 /** Branch if T is set (either in the current cflags, or in sh4r.t) */
    88 #define JT_rel8(label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    89 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    90     OP(0x70+sh4_x86.tstate); MARK_JMP8(label); OP(-1)
    92 /** Branch if T is clear (either in the current cflags or in sh4r.t) */
    93 #define JF_rel8(label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    94 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    95     OP(0x70+ (sh4_x86.tstate^1)); MARK_JMP8(label); OP(-1)
    97 static struct sh4_x86_state sh4_x86;
    99 static uint32_t max_int = 0x7FFFFFFF;
   100 static uint32_t min_int = 0x80000000;
   101 static uint32_t save_fcw; /* save value for fpu control word */
   102 static uint32_t trunc_fcw = 0x0F7F; /* fcw value for truncation mode */
   104 void sh4_translate_init(void)
   105 {
   106     sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
   107     sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(struct backpatch_record);
   108 }
   111 static void sh4_x86_add_backpatch( uint8_t *fixup_addr, uint32_t fixup_pc, uint32_t exc_code )
   112 {
   113     if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
   114 	sh4_x86.backpatch_size <<= 1;
   115 	sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, 
   116 					  sh4_x86.backpatch_size * sizeof(struct backpatch_record));
   117 	assert( sh4_x86.backpatch_list != NULL );
   118     }
   119     if( sh4_x86.in_delay_slot ) {
   120 	fixup_pc -= 2;
   121     }
   122     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_offset = 
   123 	((uint8_t *)fixup_addr) - ((uint8_t *)xlat_current_block->code);
   124     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_icount = (fixup_pc - sh4_x86.block_start_pc)>>1;
   125     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].exc_code = exc_code;
   126     sh4_x86.backpatch_posn++;
   127 }
   129 /**
   130  * Emit an instruction to load an SH4 reg into a real register
   131  */
   132 static inline void load_reg( int x86reg, int sh4reg ) 
   133 {
   134     /* mov [bp+n], reg */
   135     OP(0x8B);
   136     OP(0x45 + (x86reg<<3));
   137     OP(REG_OFFSET(r[sh4reg]));
   138 }
   140 static inline void load_reg16s( int x86reg, int sh4reg )
   141 {
   142     OP(0x0F);
   143     OP(0xBF);
   144     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   145 }
   147 static inline void load_reg16u( int x86reg, int sh4reg )
   148 {
   149     OP(0x0F);
   150     OP(0xB7);
   151     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   153 }
   155 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
   156 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
   157 /**
   158  * Emit an instruction to load an immediate value into a register
   159  */
   160 static inline void load_imm32( int x86reg, uint32_t value ) {
   161     /* mov #value, reg */
   162     OP(0xB8 + x86reg);
   163     OP32(value);
   164 }
   166 /**
   167  * Load an immediate 64-bit quantity (note: x86-64 only)
   168  */
   169 static inline void load_imm64( int x86reg, uint32_t value ) {
   170     /* mov #value, reg */
   171     REXW();
   172     OP(0xB8 + x86reg);
   173     OP64(value);
   174 }
   176 /**
   177  * Emit an instruction to store an SH4 reg (RN)
   178  */
   179 void static inline store_reg( int x86reg, int sh4reg ) {
   180     /* mov reg, [bp+n] */
   181     OP(0x89);
   182     OP(0x45 + (x86reg<<3));
   183     OP(REG_OFFSET(r[sh4reg]));
   184 }
   186 /**
   187  * Load an FR register (single-precision floating point) into an integer x86
   188  * register (eg for register-to-register moves)
   189  */
   190 #define load_fr(reg,frm)  OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[0][(frm)^1]) )
   191 #define load_xf(reg,frm)  OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[1][(frm)^1]) )
   193 /**
   194  * Load the low half of a DR register (DR or XD) into an integer x86 register 
   195  */
   196 #define load_dr0(reg,frm) OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[frm&1][frm|0x01]) )
   197 #define load_dr1(reg,frm) OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[frm&1][frm&0x0E]) )
   199 /**
   200  * Store an FR register (single-precision floating point) from an integer x86+
   201  * register (eg for register-to-register moves)
   202  */
   203 #define store_fr(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[0][(frm)^1]) )
   204 #define store_xf(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[1][(frm)^1]) )
   206 #define store_dr0(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[frm&1][frm|0x01]) )
   207 #define store_dr1(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[frm&1][frm&0x0E]) )
   210 #define push_fpul()  FLDF_sh4r(R_FPUL)
   211 #define pop_fpul()   FSTPF_sh4r(R_FPUL)
   212 #define push_fr(frm) FLDF_sh4r( REG_OFFSET(fr[0][(frm)^1]) )
   213 #define pop_fr(frm)  FSTPF_sh4r( REG_OFFSET(fr[0][(frm)^1]) )
   214 #define push_xf(frm) FLDF_sh4r( REG_OFFSET(fr[1][(frm)^1]) )
   215 #define pop_xf(frm)  FSTPF_sh4r( REG_OFFSET(fr[1][(frm)^1]) )
   216 #define push_dr(frm) FLDD_sh4r( REG_OFFSET(fr[0][(frm)&0x0E]) )
   217 #define pop_dr(frm)  FSTPD_sh4r( REG_OFFSET(fr[0][(frm)&0x0E]) )
   218 #define push_xdr(frm) FLDD_sh4r( REG_OFFSET(fr[1][(frm)&0x0E]) )
   219 #define pop_xdr(frm)  FSTPD_sh4r( REG_OFFSET(fr[1][(frm)&0x0E]) )
   223 /* Exception checks - Note that all exception checks will clobber EAX */
   225 #define check_priv( ) \
   226     if( !sh4_x86.priv_checked ) { \
   227 	sh4_x86.priv_checked = TRUE;\
   228 	load_spreg( R_EAX, R_SR );\
   229 	AND_imm32_r32( SR_MD, R_EAX );\
   230 	if( sh4_x86.in_delay_slot ) {\
   231 	    JE_exc( EXC_SLOT_ILLEGAL );\
   232 	} else {\
   233 	    JE_exc( EXC_ILLEGAL );\
   234 	}\
   235     }\
   237 #define check_fpuen( ) \
   238     if( !sh4_x86.fpuen_checked ) {\
   239 	sh4_x86.fpuen_checked = TRUE;\
   240 	load_spreg( R_EAX, R_SR );\
   241 	AND_imm32_r32( SR_FD, R_EAX );\
   242 	if( sh4_x86.in_delay_slot ) {\
   243 	    JNE_exc(EXC_SLOT_FPU_DISABLED);\
   244 	} else {\
   245 	    JNE_exc(EXC_FPU_DISABLED);\
   246 	}\
   247     }
   249 #define check_ralign16( x86reg ) \
   250     TEST_imm32_r32( 0x00000001, x86reg ); \
   251     JNE_exc(EXC_DATA_ADDR_READ)
   253 #define check_walign16( x86reg ) \
   254     TEST_imm32_r32( 0x00000001, x86reg ); \
   255     JNE_exc(EXC_DATA_ADDR_WRITE);
   257 #define check_ralign32( x86reg ) \
   258     TEST_imm32_r32( 0x00000003, x86reg ); \
   259     JNE_exc(EXC_DATA_ADDR_READ)
   261 #define check_walign32( x86reg ) \
   262     TEST_imm32_r32( 0x00000003, x86reg ); \
   263     JNE_exc(EXC_DATA_ADDR_WRITE);
   265 #define check_ralign64( x86reg ) \
   266     TEST_imm32_r32( 0x00000007, x86reg ); \
   267     JNE_exc(EXC_DATA_ADDR_READ)
   269 #define check_walign64( x86reg ) \
   270     TEST_imm32_r32( 0x00000007, x86reg ); \
   271     JNE_exc(EXC_DATA_ADDR_WRITE);
   273 #define UNDEF()
   274 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
   275 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
   276 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
   277 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
   278 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
   279 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
   280 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
   282 /**
   283  * Perform MMU translation on the address in addr_reg for a read operation, iff the TLB is turned 
   284  * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
   285  */
   286 #define MMU_TRANSLATE_READ( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
   288 #define MMU_TRANSLATE_READ_EXC( addr_reg, exc_code ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(exc_code); MEM_RESULT(addr_reg) }
   289 /**
   290  * Perform MMU translation on the address in addr_reg for a write operation, iff the TLB is turned 
   291  * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
   292  */
   293 #define MMU_TRANSLATE_WRITE( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
   295 #define MEM_READ_SIZE (CALL_FUNC1_SIZE)
   296 #define MEM_WRITE_SIZE (CALL_FUNC2_SIZE)
   297 #define MMU_TRANSLATE_SIZE (sh4_x86.tlb_on ? (CALL_FUNC1_SIZE + 12) : 0 )
   299 #define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = DELAY_NONE; return 1;
   301 /****** Import appropriate calling conventions ******/
   302 #if SIZEOF_VOID_P == 8
   303 #include "sh4/ia64abi.h"
   304 #else /* 32-bit system */
   305 #ifdef APPLE_BUILD
   306 #include "sh4/ia32mac.h"
   307 #else
   308 #include "sh4/ia32abi.h"
   309 #endif
   310 #endif
   312 uint32_t sh4_translate_end_block_size()
   313 {
   314     if( sh4_x86.backpatch_posn <= 3 ) {
   315 	return EPILOGUE_SIZE + (sh4_x86.backpatch_posn*12);
   316     } else {
   317 	return EPILOGUE_SIZE + 48 + (sh4_x86.backpatch_posn-3)*15;
   318     }
   319 }
   322 /**
   323  * Embed a breakpoint into the generated code
   324  */
   325 void sh4_translate_emit_breakpoint( sh4vma_t pc )
   326 {
   327     load_imm32( R_EAX, pc );
   328     call_func1( sh4_translate_breakpoint_hit, R_EAX );
   329 }
   332 #define UNTRANSLATABLE(pc) !IS_IN_ICACHE(pc)
   334 /**
   335  * Embed a call to sh4_execute_instruction for situations that we
   336  * can't translate (just page-crossing delay slots at the moment).
   337  * Caller is responsible for setting new_pc before calling this function.
   338  *
   339  * Performs:
   340  *   Set PC = endpc
   341  *   Set sh4r.in_delay_slot = sh4_x86.in_delay_slot
   342  *   Update slice_cycle for endpc+2 (single step doesn't update slice_cycle)
   343  *   Call sh4_execute_instruction
   344  *   Call xlat_get_code_by_vma / xlat_get_code as for normal exit
   345  */
   346 void exit_block_emu( sh4vma_t endpc )
   347 {
   348     load_imm32( R_ECX, endpc - sh4_x86.block_start_pc );   // 5
   349     ADD_r32_sh4r( R_ECX, R_PC );
   351     load_imm32( R_ECX, (((endpc - sh4_x86.block_start_pc)>>1)+1)*sh4_cpu_period ); // 5
   352     ADD_r32_sh4r( R_ECX, REG_OFFSET(slice_cycle) );     // 6
   353     load_imm32( R_ECX, sh4_x86.in_delay_slot ? 1 : 0 );
   354     store_spreg( R_ECX, REG_OFFSET(in_delay_slot) );
   356     call_func0( sh4_execute_instruction );    
   357     load_spreg( R_EAX, R_PC );
   358     if( sh4_x86.tlb_on ) {
   359 	call_func1(xlat_get_code_by_vma,R_EAX);
   360     } else {
   361 	call_func1(xlat_get_code,R_EAX);
   362     }
   363     AND_imm8s_rptr( 0xFC, R_EAX );
   364     POP_r32(R_EBP);
   365     RET();
   366 } 
   368 /**
   369  * Translate a single instruction. Delayed branches are handled specially
   370  * by translating both branch and delayed instruction as a single unit (as
   371  * 
   372  * The instruction MUST be in the icache (assert check)
   373  *
   374  * @return true if the instruction marks the end of a basic block
   375  * (eg a branch or 
   376  */
   377 uint32_t sh4_translate_instruction( sh4vma_t pc )
   378 {
   379     uint32_t ir;
   380     /* Read instruction from icache */
   381     assert( IS_IN_ICACHE(pc) );
   382     ir = *(uint16_t *)GET_ICACHE_PTR(pc);
   384 	/* PC is not in the current icache - this usually means we're running
   385 	 * with MMU on, and we've gone past the end of the page. And since 
   386 	 * sh4_translate_block is pretty careful about this, it means we're
   387 	 * almost certainly in a delay slot.
   388 	 *
   389 	 * Since we can't assume the page is present (and we can't fault it in
   390 	 * at this point, inline a call to sh4_execute_instruction (with a few
   391 	 * small repairs to cope with the different environment).
   392 	 */
   394     if( !sh4_x86.in_delay_slot ) {
   395 	sh4_translate_add_recovery( (pc - sh4_x86.block_start_pc)>>1 );
   396     }
   397         switch( (ir&0xF000) >> 12 ) {
   398             case 0x0:
   399                 switch( ir&0xF ) {
   400                     case 0x2:
   401                         switch( (ir&0x80) >> 7 ) {
   402                             case 0x0:
   403                                 switch( (ir&0x70) >> 4 ) {
   404                                     case 0x0:
   405                                         { /* STC SR, Rn */
   406                                         uint32_t Rn = ((ir>>8)&0xF); 
   407                                         COUNT_INST(I_STCSR);
   408                                         check_priv();
   409                                         call_func0(sh4_read_sr);
   410                                         store_reg( R_EAX, Rn );
   411                                         sh4_x86.tstate = TSTATE_NONE;
   412                                         }
   413                                         break;
   414                                     case 0x1:
   415                                         { /* STC GBR, Rn */
   416                                         uint32_t Rn = ((ir>>8)&0xF); 
   417                                         COUNT_INST(I_STC);
   418                                         load_spreg( R_EAX, R_GBR );
   419                                         store_reg( R_EAX, Rn );
   420                                         }
   421                                         break;
   422                                     case 0x2:
   423                                         { /* STC VBR, Rn */
   424                                         uint32_t Rn = ((ir>>8)&0xF); 
   425                                         COUNT_INST(I_STC);
   426                                         check_priv();
   427                                         load_spreg( R_EAX, R_VBR );
   428                                         store_reg( R_EAX, Rn );
   429                                         sh4_x86.tstate = TSTATE_NONE;
   430                                         }
   431                                         break;
   432                                     case 0x3:
   433                                         { /* STC SSR, Rn */
   434                                         uint32_t Rn = ((ir>>8)&0xF); 
   435                                         COUNT_INST(I_STC);
   436                                         check_priv();
   437                                         load_spreg( R_EAX, R_SSR );
   438                                         store_reg( R_EAX, Rn );
   439                                         sh4_x86.tstate = TSTATE_NONE;
   440                                         }
   441                                         break;
   442                                     case 0x4:
   443                                         { /* STC SPC, Rn */
   444                                         uint32_t Rn = ((ir>>8)&0xF); 
   445                                         COUNT_INST(I_STC);
   446                                         check_priv();
   447                                         load_spreg( R_EAX, R_SPC );
   448                                         store_reg( R_EAX, Rn );
   449                                         sh4_x86.tstate = TSTATE_NONE;
   450                                         }
   451                                         break;
   452                                     default:
   453                                         UNDEF();
   454                                         break;
   455                                 }
   456                                 break;
   457                             case 0x1:
   458                                 { /* STC Rm_BANK, Rn */
   459                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
   460                                 COUNT_INST(I_STC);
   461                                 check_priv();
   462                                 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
   463                                 store_reg( R_EAX, Rn );
   464                                 sh4_x86.tstate = TSTATE_NONE;
   465                                 }
   466                                 break;
   467                         }
   468                         break;
   469                     case 0x3:
   470                         switch( (ir&0xF0) >> 4 ) {
   471                             case 0x0:
   472                                 { /* BSRF Rn */
   473                                 uint32_t Rn = ((ir>>8)&0xF); 
   474                                 COUNT_INST(I_BSRF);
   475                                 if( sh4_x86.in_delay_slot ) {
   476                             	SLOTILLEGAL();
   477                                 } else {
   478                             	load_spreg( R_EAX, R_PC );
   479                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
   480                             	store_spreg( R_EAX, R_PR );
   481                             	ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_EAX );
   482                             	store_spreg( R_EAX, R_NEW_PC );
   484                             	sh4_x86.in_delay_slot = DELAY_PC;
   485                             	sh4_x86.tstate = TSTATE_NONE;
   486                             	sh4_x86.branch_taken = TRUE;
   487                             	if( UNTRANSLATABLE(pc+2) ) {
   488                             	    exit_block_emu(pc+2);
   489                             	    return 2;
   490                             	} else {
   491                             	    sh4_translate_instruction( pc + 2 );
   492                             	    exit_block_newpcset(pc+2);
   493                             	    return 4;
   494                             	}
   495                                 }
   496                                 }
   497                                 break;
   498                             case 0x2:
   499                                 { /* BRAF Rn */
   500                                 uint32_t Rn = ((ir>>8)&0xF); 
   501                                 COUNT_INST(I_BRAF);
   502                                 if( sh4_x86.in_delay_slot ) {
   503                             	SLOTILLEGAL();
   504                                 } else {
   505                             	load_spreg( R_EAX, R_PC );
   506                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
   507                             	ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_EAX );
   508                             	store_spreg( R_EAX, R_NEW_PC );
   509                             	sh4_x86.in_delay_slot = DELAY_PC;
   510                             	sh4_x86.tstate = TSTATE_NONE;
   511                             	sh4_x86.branch_taken = TRUE;
   512                             	if( UNTRANSLATABLE(pc+2) ) {
   513                             	    exit_block_emu(pc+2);
   514                             	    return 2;
   515                             	} else {
   516                             	    sh4_translate_instruction( pc + 2 );
   517                             	    exit_block_newpcset(pc+2);
   518                             	    return 4;
   519                             	}
   520                                 }
   521                                 }
   522                                 break;
   523                             case 0x8:
   524                                 { /* PREF @Rn */
   525                                 uint32_t Rn = ((ir>>8)&0xF); 
   526                                 COUNT_INST(I_PREF);
   527                                 load_reg( R_EAX, Rn );
   528                                 MOV_r32_r32( R_EAX, R_ECX );
   529                                 AND_imm32_r32( 0xFC000000, R_EAX );
   530                                 CMP_imm32_r32( 0xE0000000, R_EAX );
   531                                 JNE_rel8(end);
   532                                 call_func1( sh4_flush_store_queue, R_ECX );
   533                                 TEST_r32_r32( R_EAX, R_EAX );
   534                                 JE_exc(-1);
   535                                 JMP_TARGET(end);
   536                                 sh4_x86.tstate = TSTATE_NONE;
   537                                 }
   538                                 break;
   539                             case 0x9:
   540                                 { /* OCBI @Rn */
   541                                 uint32_t Rn = ((ir>>8)&0xF); 
   542                                 COUNT_INST(I_OCBI);
   543                                 }
   544                                 break;
   545                             case 0xA:
   546                                 { /* OCBP @Rn */
   547                                 uint32_t Rn = ((ir>>8)&0xF); 
   548                                 COUNT_INST(I_OCBP);
   549                                 }
   550                                 break;
   551                             case 0xB:
   552                                 { /* OCBWB @Rn */
   553                                 uint32_t Rn = ((ir>>8)&0xF); 
   554                                 COUNT_INST(I_OCBWB);
   555                                 }
   556                                 break;
   557                             case 0xC:
   558                                 { /* MOVCA.L R0, @Rn */
   559                                 uint32_t Rn = ((ir>>8)&0xF); 
   560                                 COUNT_INST(I_MOVCA);
   561                                 load_reg( R_EAX, Rn );
   562                                 check_walign32( R_EAX );
   563                                 MMU_TRANSLATE_WRITE( R_EAX );
   564                                 load_reg( R_EDX, 0 );
   565                                 MEM_WRITE_LONG( R_EAX, R_EDX );
   566                                 sh4_x86.tstate = TSTATE_NONE;
   567                                 }
   568                                 break;
   569                             default:
   570                                 UNDEF();
   571                                 break;
   572                         }
   573                         break;
   574                     case 0x4:
   575                         { /* MOV.B Rm, @(R0, Rn) */
   576                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   577                         COUNT_INST(I_MOVB);
   578                         load_reg( R_EAX, 0 );
   579                         load_reg( R_ECX, Rn );
   580                         ADD_r32_r32( R_ECX, R_EAX );
   581                         MMU_TRANSLATE_WRITE( R_EAX );
   582                         load_reg( R_EDX, Rm );
   583                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   584                         sh4_x86.tstate = TSTATE_NONE;
   585                         }
   586                         break;
   587                     case 0x5:
   588                         { /* MOV.W Rm, @(R0, Rn) */
   589                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   590                         COUNT_INST(I_MOVW);
   591                         load_reg( R_EAX, 0 );
   592                         load_reg( R_ECX, Rn );
   593                         ADD_r32_r32( R_ECX, R_EAX );
   594                         check_walign16( R_EAX );
   595                         MMU_TRANSLATE_WRITE( R_EAX );
   596                         load_reg( R_EDX, Rm );
   597                         MEM_WRITE_WORD( R_EAX, R_EDX );
   598                         sh4_x86.tstate = TSTATE_NONE;
   599                         }
   600                         break;
   601                     case 0x6:
   602                         { /* MOV.L Rm, @(R0, Rn) */
   603                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   604                         COUNT_INST(I_MOVL);
   605                         load_reg( R_EAX, 0 );
   606                         load_reg( R_ECX, Rn );
   607                         ADD_r32_r32( R_ECX, R_EAX );
   608                         check_walign32( R_EAX );
   609                         MMU_TRANSLATE_WRITE( R_EAX );
   610                         load_reg( R_EDX, Rm );
   611                         MEM_WRITE_LONG( R_EAX, R_EDX );
   612                         sh4_x86.tstate = TSTATE_NONE;
   613                         }
   614                         break;
   615                     case 0x7:
   616                         { /* MUL.L Rm, Rn */
   617                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   618                         COUNT_INST(I_MULL);
   619                         load_reg( R_EAX, Rm );
   620                         load_reg( R_ECX, Rn );
   621                         MUL_r32( R_ECX );
   622                         store_spreg( R_EAX, R_MACL );
   623                         sh4_x86.tstate = TSTATE_NONE;
   624                         }
   625                         break;
   626                     case 0x8:
   627                         switch( (ir&0xFF0) >> 4 ) {
   628                             case 0x0:
   629                                 { /* CLRT */
   630                                 COUNT_INST(I_CLRT);
   631                                 CLC();
   632                                 SETC_t();
   633                                 sh4_x86.tstate = TSTATE_C;
   634                                 }
   635                                 break;
   636                             case 0x1:
   637                                 { /* SETT */
   638                                 COUNT_INST(I_SETT);
   639                                 STC();
   640                                 SETC_t();
   641                                 sh4_x86.tstate = TSTATE_C;
   642                                 }
   643                                 break;
   644                             case 0x2:
   645                                 { /* CLRMAC */
   646                                 COUNT_INST(I_CLRMAC);
   647                                 XOR_r32_r32(R_EAX, R_EAX);
   648                                 store_spreg( R_EAX, R_MACL );
   649                                 store_spreg( R_EAX, R_MACH );
   650                                 sh4_x86.tstate = TSTATE_NONE;
   651                                 }
   652                                 break;
   653                             case 0x3:
   654                                 { /* LDTLB */
   655                                 COUNT_INST(I_LDTLB);
   656                                 call_func0( MMU_ldtlb );
   657                                 }
   658                                 break;
   659                             case 0x4:
   660                                 { /* CLRS */
   661                                 COUNT_INST(I_CLRS);
   662                                 CLC();
   663                                 SETC_sh4r(R_S);
   664                                 sh4_x86.tstate = TSTATE_C;
   665                                 }
   666                                 break;
   667                             case 0x5:
   668                                 { /* SETS */
   669                                 COUNT_INST(I_SETS);
   670                                 STC();
   671                                 SETC_sh4r(R_S);
   672                                 sh4_x86.tstate = TSTATE_C;
   673                                 }
   674                                 break;
   675                             default:
   676                                 UNDEF();
   677                                 break;
   678                         }
   679                         break;
   680                     case 0x9:
   681                         switch( (ir&0xF0) >> 4 ) {
   682                             case 0x0:
   683                                 { /* NOP */
   684                                 COUNT_INST(I_NOP);
   685                                 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
   686                                 }
   687                                 break;
   688                             case 0x1:
   689                                 { /* DIV0U */
   690                                 COUNT_INST(I_DIV0U);
   691                                 XOR_r32_r32( R_EAX, R_EAX );
   692                                 store_spreg( R_EAX, R_Q );
   693                                 store_spreg( R_EAX, R_M );
   694                                 store_spreg( R_EAX, R_T );
   695                                 sh4_x86.tstate = TSTATE_C; // works for DIV1
   696                                 }
   697                                 break;
   698                             case 0x2:
   699                                 { /* MOVT Rn */
   700                                 uint32_t Rn = ((ir>>8)&0xF); 
   701                                 COUNT_INST(I_MOVT);
   702                                 load_spreg( R_EAX, R_T );
   703                                 store_reg( R_EAX, Rn );
   704                                 }
   705                                 break;
   706                             default:
   707                                 UNDEF();
   708                                 break;
   709                         }
   710                         break;
   711                     case 0xA:
   712                         switch( (ir&0xF0) >> 4 ) {
   713                             case 0x0:
   714                                 { /* STS MACH, Rn */
   715                                 uint32_t Rn = ((ir>>8)&0xF); 
   716                                 COUNT_INST(I_STS);
   717                                 load_spreg( R_EAX, R_MACH );
   718                                 store_reg( R_EAX, Rn );
   719                                 }
   720                                 break;
   721                             case 0x1:
   722                                 { /* STS MACL, Rn */
   723                                 uint32_t Rn = ((ir>>8)&0xF); 
   724                                 COUNT_INST(I_STS);
   725                                 load_spreg( R_EAX, R_MACL );
   726                                 store_reg( R_EAX, Rn );
   727                                 }
   728                                 break;
   729                             case 0x2:
   730                                 { /* STS PR, Rn */
   731                                 uint32_t Rn = ((ir>>8)&0xF); 
   732                                 COUNT_INST(I_STS);
   733                                 load_spreg( R_EAX, R_PR );
   734                                 store_reg( R_EAX, Rn );
   735                                 }
   736                                 break;
   737                             case 0x3:
   738                                 { /* STC SGR, Rn */
   739                                 uint32_t Rn = ((ir>>8)&0xF); 
   740                                 COUNT_INST(I_STC);
   741                                 check_priv();
   742                                 load_spreg( R_EAX, R_SGR );
   743                                 store_reg( R_EAX, Rn );
   744                                 sh4_x86.tstate = TSTATE_NONE;
   745                                 }
   746                                 break;
   747                             case 0x5:
   748                                 { /* STS FPUL, Rn */
   749                                 uint32_t Rn = ((ir>>8)&0xF); 
   750                                 COUNT_INST(I_STS);
   751                                 check_fpuen();
   752                                 load_spreg( R_EAX, R_FPUL );
   753                                 store_reg( R_EAX, Rn );
   754                                 }
   755                                 break;
   756                             case 0x6:
   757                                 { /* STS FPSCR, Rn */
   758                                 uint32_t Rn = ((ir>>8)&0xF); 
   759                                 COUNT_INST(I_STSFPSCR);
   760                                 check_fpuen();
   761                                 load_spreg( R_EAX, R_FPSCR );
   762                                 store_reg( R_EAX, Rn );
   763                                 }
   764                                 break;
   765                             case 0xF:
   766                                 { /* STC DBR, Rn */
   767                                 uint32_t Rn = ((ir>>8)&0xF); 
   768                                 COUNT_INST(I_STC);
   769                                 check_priv();
   770                                 load_spreg( R_EAX, R_DBR );
   771                                 store_reg( R_EAX, Rn );
   772                                 sh4_x86.tstate = TSTATE_NONE;
   773                                 }
   774                                 break;
   775                             default:
   776                                 UNDEF();
   777                                 break;
   778                         }
   779                         break;
   780                     case 0xB:
   781                         switch( (ir&0xFF0) >> 4 ) {
   782                             case 0x0:
   783                                 { /* RTS */
   784                                 COUNT_INST(I_RTS);
   785                                 if( sh4_x86.in_delay_slot ) {
   786                             	SLOTILLEGAL();
   787                                 } else {
   788                             	load_spreg( R_ECX, R_PR );
   789                             	store_spreg( R_ECX, R_NEW_PC );
   790                             	sh4_x86.in_delay_slot = DELAY_PC;
   791                             	sh4_x86.branch_taken = TRUE;
   792                             	if( UNTRANSLATABLE(pc+2) ) {
   793                             	    exit_block_emu(pc+2);
   794                             	    return 2;
   795                             	} else {
   796                             	    sh4_translate_instruction(pc+2);
   797                             	    exit_block_newpcset(pc+2);
   798                             	    return 4;
   799                             	}
   800                                 }
   801                                 }
   802                                 break;
   803                             case 0x1:
   804                                 { /* SLEEP */
   805                                 COUNT_INST(I_SLEEP);
   806                                 check_priv();
   807                                 call_func0( sh4_sleep );
   808                                 sh4_x86.tstate = TSTATE_NONE;
   809                                 sh4_x86.in_delay_slot = DELAY_NONE;
   810                                 return 2;
   811                                 }
   812                                 break;
   813                             case 0x2:
   814                                 { /* RTE */
   815                                 COUNT_INST(I_RTE);
   816                                 if( sh4_x86.in_delay_slot ) {
   817                             	SLOTILLEGAL();
   818                                 } else {
   819                             	check_priv();
   820                             	load_spreg( R_ECX, R_SPC );
   821                             	store_spreg( R_ECX, R_NEW_PC );
   822                             	load_spreg( R_EAX, R_SSR );
   823                             	call_func1( sh4_write_sr, R_EAX );
   824                             	sh4_x86.in_delay_slot = DELAY_PC;
   825                             	sh4_x86.priv_checked = FALSE;
   826                             	sh4_x86.fpuen_checked = FALSE;
   827                             	sh4_x86.tstate = TSTATE_NONE;
   828                             	sh4_x86.branch_taken = TRUE;
   829                             	if( UNTRANSLATABLE(pc+2) ) {
   830                             	    exit_block_emu(pc+2);
   831                             	    return 2;
   832                             	} else {
   833                             	    sh4_translate_instruction(pc+2);
   834                             	    exit_block_newpcset(pc+2);
   835                             	    return 4;
   836                             	}
   837                                 }
   838                                 }
   839                                 break;
   840                             default:
   841                                 UNDEF();
   842                                 break;
   843                         }
   844                         break;
   845                     case 0xC:
   846                         { /* MOV.B @(R0, Rm), Rn */
   847                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   848                         COUNT_INST(I_MOVB);
   849                         load_reg( R_EAX, 0 );
   850                         load_reg( R_ECX, Rm );
   851                         ADD_r32_r32( R_ECX, R_EAX );
   852                         MMU_TRANSLATE_READ( R_EAX )
   853                         MEM_READ_BYTE( R_EAX, R_EAX );
   854                         store_reg( R_EAX, Rn );
   855                         sh4_x86.tstate = TSTATE_NONE;
   856                         }
   857                         break;
   858                     case 0xD:
   859                         { /* MOV.W @(R0, Rm), Rn */
   860                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   861                         COUNT_INST(I_MOVW);
   862                         load_reg( R_EAX, 0 );
   863                         load_reg( R_ECX, Rm );
   864                         ADD_r32_r32( R_ECX, R_EAX );
   865                         check_ralign16( R_EAX );
   866                         MMU_TRANSLATE_READ( R_EAX );
   867                         MEM_READ_WORD( R_EAX, R_EAX );
   868                         store_reg( R_EAX, Rn );
   869                         sh4_x86.tstate = TSTATE_NONE;
   870                         }
   871                         break;
   872                     case 0xE:
   873                         { /* MOV.L @(R0, Rm), Rn */
   874                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   875                         COUNT_INST(I_MOVL);
   876                         load_reg( R_EAX, 0 );
   877                         load_reg( R_ECX, Rm );
   878                         ADD_r32_r32( R_ECX, R_EAX );
   879                         check_ralign32( R_EAX );
   880                         MMU_TRANSLATE_READ( R_EAX );
   881                         MEM_READ_LONG( R_EAX, R_EAX );
   882                         store_reg( R_EAX, Rn );
   883                         sh4_x86.tstate = TSTATE_NONE;
   884                         }
   885                         break;
   886                     case 0xF:
   887                         { /* MAC.L @Rm+, @Rn+ */
   888                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   889                         COUNT_INST(I_MACL);
   890                         if( Rm == Rn ) {
   891                     	load_reg( R_EAX, Rm );
   892                     	check_ralign32( R_EAX );
   893                     	MMU_TRANSLATE_READ( R_EAX );
   894                     	PUSH_realigned_r32( R_EAX );
   895                     	load_reg( R_EAX, Rn );
   896                     	ADD_imm8s_r32( 4, R_EAX );
   897                     	MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
   898                     	ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rn]) );
   899                     	// Note translate twice in case of page boundaries. Maybe worth
   900                     	// adding a page-boundary check to skip the second translation
   901                         } else {
   902                     	load_reg( R_EAX, Rm );
   903                     	check_ralign32( R_EAX );
   904                     	MMU_TRANSLATE_READ( R_EAX );
   905                     	load_reg( R_ECX, Rn );
   906                     	check_ralign32( R_ECX );
   907                     	PUSH_realigned_r32( R_EAX );
   908                     	MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
   909                     	MOV_r32_r32( R_ECX, R_EAX );
   910                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
   911                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
   912                         }
   913                         MEM_READ_LONG( R_EAX, R_EAX );
   914                         POP_r32( R_ECX );
   915                         PUSH_r32( R_EAX );
   916                         MEM_READ_LONG( R_ECX, R_EAX );
   917                         POP_realigned_r32( R_ECX );
   919                         IMUL_r32( R_ECX );
   920                         ADD_r32_sh4r( R_EAX, R_MACL );
   921                         ADC_r32_sh4r( R_EDX, R_MACH );
   923                         load_spreg( R_ECX, R_S );
   924                         TEST_r32_r32(R_ECX, R_ECX);
   925                         JE_rel8( nosat );
   926                         call_func0( signsat48 );
   927                         JMP_TARGET( nosat );
   928                         sh4_x86.tstate = TSTATE_NONE;
   929                         }
   930                         break;
   931                     default:
   932                         UNDEF();
   933                         break;
   934                 }
   935                 break;
   936             case 0x1:
   937                 { /* MOV.L Rm, @(disp, Rn) */
   938                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
   939                 COUNT_INST(I_MOVL);
   940                 load_reg( R_EAX, Rn );
   941                 ADD_imm32_r32( disp, R_EAX );
   942                 check_walign32( R_EAX );
   943                 MMU_TRANSLATE_WRITE( R_EAX );
   944                 load_reg( R_EDX, Rm );
   945                 MEM_WRITE_LONG( R_EAX, R_EDX );
   946                 sh4_x86.tstate = TSTATE_NONE;
   947                 }
   948                 break;
   949             case 0x2:
   950                 switch( ir&0xF ) {
   951                     case 0x0:
   952                         { /* MOV.B Rm, @Rn */
   953                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   954                         COUNT_INST(I_MOVB);
   955                         load_reg( R_EAX, Rn );
   956                         MMU_TRANSLATE_WRITE( R_EAX );
   957                         load_reg( R_EDX, Rm );
   958                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   959                         sh4_x86.tstate = TSTATE_NONE;
   960                         }
   961                         break;
   962                     case 0x1:
   963                         { /* MOV.W Rm, @Rn */
   964                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   965                         COUNT_INST(I_MOVW);
   966                         load_reg( R_EAX, Rn );
   967                         check_walign16( R_EAX );
   968                         MMU_TRANSLATE_WRITE( R_EAX )
   969                         load_reg( R_EDX, Rm );
   970                         MEM_WRITE_WORD( R_EAX, R_EDX );
   971                         sh4_x86.tstate = TSTATE_NONE;
   972                         }
   973                         break;
   974                     case 0x2:
   975                         { /* MOV.L Rm, @Rn */
   976                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   977                         COUNT_INST(I_MOVL);
   978                         load_reg( R_EAX, Rn );
   979                         check_walign32(R_EAX);
   980                         MMU_TRANSLATE_WRITE( R_EAX );
   981                         load_reg( R_EDX, Rm );
   982                         MEM_WRITE_LONG( R_EAX, R_EDX );
   983                         sh4_x86.tstate = TSTATE_NONE;
   984                         }
   985                         break;
   986                     case 0x4:
   987                         { /* MOV.B Rm, @-Rn */
   988                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   989                         COUNT_INST(I_MOVB);
   990                         load_reg( R_EAX, Rn );
   991                         ADD_imm8s_r32( -1, R_EAX );
   992                         MMU_TRANSLATE_WRITE( R_EAX );
   993                         load_reg( R_EDX, Rm );
   994                         ADD_imm8s_sh4r( -1, REG_OFFSET(r[Rn]) );
   995                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   996                         sh4_x86.tstate = TSTATE_NONE;
   997                         }
   998                         break;
   999                     case 0x5:
  1000                         { /* MOV.W Rm, @-Rn */
  1001                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1002                         COUNT_INST(I_MOVW);
  1003                         load_reg( R_EAX, Rn );
  1004                         ADD_imm8s_r32( -2, R_EAX );
  1005                         check_walign16( R_EAX );
  1006                         MMU_TRANSLATE_WRITE( R_EAX );
  1007                         load_reg( R_EDX, Rm );
  1008                         ADD_imm8s_sh4r( -2, REG_OFFSET(r[Rn]) );
  1009                         MEM_WRITE_WORD( R_EAX, R_EDX );
  1010                         sh4_x86.tstate = TSTATE_NONE;
  1012                         break;
  1013                     case 0x6:
  1014                         { /* MOV.L Rm, @-Rn */
  1015                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1016                         COUNT_INST(I_MOVL);
  1017                         load_reg( R_EAX, Rn );
  1018                         ADD_imm8s_r32( -4, R_EAX );
  1019                         check_walign32( R_EAX );
  1020                         MMU_TRANSLATE_WRITE( R_EAX );
  1021                         load_reg( R_EDX, Rm );
  1022                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1023                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1024                         sh4_x86.tstate = TSTATE_NONE;
  1026                         break;
  1027                     case 0x7:
  1028                         { /* DIV0S Rm, Rn */
  1029                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1030                         COUNT_INST(I_DIV0S);
  1031                         load_reg( R_EAX, Rm );
  1032                         load_reg( R_ECX, Rn );
  1033                         SHR_imm8_r32( 31, R_EAX );
  1034                         SHR_imm8_r32( 31, R_ECX );
  1035                         store_spreg( R_EAX, R_M );
  1036                         store_spreg( R_ECX, R_Q );
  1037                         CMP_r32_r32( R_EAX, R_ECX );
  1038                         SETNE_t();
  1039                         sh4_x86.tstate = TSTATE_NE;
  1041                         break;
  1042                     case 0x8:
  1043                         { /* TST Rm, Rn */
  1044                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1045                         COUNT_INST(I_TST);
  1046                         load_reg( R_EAX, Rm );
  1047                         load_reg( R_ECX, Rn );
  1048                         TEST_r32_r32( R_EAX, R_ECX );
  1049                         SETE_t();
  1050                         sh4_x86.tstate = TSTATE_E;
  1052                         break;
  1053                     case 0x9:
  1054                         { /* AND Rm, Rn */
  1055                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1056                         COUNT_INST(I_AND);
  1057                         load_reg( R_EAX, Rm );
  1058                         load_reg( R_ECX, Rn );
  1059                         AND_r32_r32( R_EAX, R_ECX );
  1060                         store_reg( R_ECX, Rn );
  1061                         sh4_x86.tstate = TSTATE_NONE;
  1063                         break;
  1064                     case 0xA:
  1065                         { /* XOR Rm, Rn */
  1066                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1067                         COUNT_INST(I_XOR);
  1068                         load_reg( R_EAX, Rm );
  1069                         load_reg( R_ECX, Rn );
  1070                         XOR_r32_r32( R_EAX, R_ECX );
  1071                         store_reg( R_ECX, Rn );
  1072                         sh4_x86.tstate = TSTATE_NONE;
  1074                         break;
  1075                     case 0xB:
  1076                         { /* OR Rm, Rn */
  1077                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1078                         COUNT_INST(I_OR);
  1079                         load_reg( R_EAX, Rm );
  1080                         load_reg( R_ECX, Rn );
  1081                         OR_r32_r32( R_EAX, R_ECX );
  1082                         store_reg( R_ECX, Rn );
  1083                         sh4_x86.tstate = TSTATE_NONE;
  1085                         break;
  1086                     case 0xC:
  1087                         { /* CMP/STR Rm, Rn */
  1088                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1089                         COUNT_INST(I_CMPSTR);
  1090                         load_reg( R_EAX, Rm );
  1091                         load_reg( R_ECX, Rn );
  1092                         XOR_r32_r32( R_ECX, R_EAX );
  1093                         TEST_r8_r8( R_AL, R_AL );
  1094                         JE_rel8(target1);
  1095                         TEST_r8_r8( R_AH, R_AH );
  1096                         JE_rel8(target2);
  1097                         SHR_imm8_r32( 16, R_EAX );
  1098                         TEST_r8_r8( R_AL, R_AL );
  1099                         JE_rel8(target3);
  1100                         TEST_r8_r8( R_AH, R_AH );
  1101                         JMP_TARGET(target1);
  1102                         JMP_TARGET(target2);
  1103                         JMP_TARGET(target3);
  1104                         SETE_t();
  1105                         sh4_x86.tstate = TSTATE_E;
  1107                         break;
  1108                     case 0xD:
  1109                         { /* XTRCT Rm, Rn */
  1110                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1111                         COUNT_INST(I_XTRCT);
  1112                         load_reg( R_EAX, Rm );
  1113                         load_reg( R_ECX, Rn );
  1114                         SHL_imm8_r32( 16, R_EAX );
  1115                         SHR_imm8_r32( 16, R_ECX );
  1116                         OR_r32_r32( R_EAX, R_ECX );
  1117                         store_reg( R_ECX, Rn );
  1118                         sh4_x86.tstate = TSTATE_NONE;
  1120                         break;
  1121                     case 0xE:
  1122                         { /* MULU.W Rm, Rn */
  1123                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1124                         COUNT_INST(I_MULUW);
  1125                         load_reg16u( R_EAX, Rm );
  1126                         load_reg16u( R_ECX, Rn );
  1127                         MUL_r32( R_ECX );
  1128                         store_spreg( R_EAX, R_MACL );
  1129                         sh4_x86.tstate = TSTATE_NONE;
  1131                         break;
  1132                     case 0xF:
  1133                         { /* MULS.W Rm, Rn */
  1134                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1135                         COUNT_INST(I_MULSW);
  1136                         load_reg16s( R_EAX, Rm );
  1137                         load_reg16s( R_ECX, Rn );
  1138                         MUL_r32( R_ECX );
  1139                         store_spreg( R_EAX, R_MACL );
  1140                         sh4_x86.tstate = TSTATE_NONE;
  1142                         break;
  1143                     default:
  1144                         UNDEF();
  1145                         break;
  1147                 break;
  1148             case 0x3:
  1149                 switch( ir&0xF ) {
  1150                     case 0x0:
  1151                         { /* CMP/EQ Rm, Rn */
  1152                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1153                         COUNT_INST(I_CMPEQ);
  1154                         load_reg( R_EAX, Rm );
  1155                         load_reg( R_ECX, Rn );
  1156                         CMP_r32_r32( R_EAX, R_ECX );
  1157                         SETE_t();
  1158                         sh4_x86.tstate = TSTATE_E;
  1160                         break;
  1161                     case 0x2:
  1162                         { /* CMP/HS Rm, Rn */
  1163                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1164                         COUNT_INST(I_CMPHS);
  1165                         load_reg( R_EAX, Rm );
  1166                         load_reg( R_ECX, Rn );
  1167                         CMP_r32_r32( R_EAX, R_ECX );
  1168                         SETAE_t();
  1169                         sh4_x86.tstate = TSTATE_AE;
  1171                         break;
  1172                     case 0x3:
  1173                         { /* CMP/GE Rm, Rn */
  1174                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1175                         COUNT_INST(I_CMPGE);
  1176                         load_reg( R_EAX, Rm );
  1177                         load_reg( R_ECX, Rn );
  1178                         CMP_r32_r32( R_EAX, R_ECX );
  1179                         SETGE_t();
  1180                         sh4_x86.tstate = TSTATE_GE;
  1182                         break;
  1183                     case 0x4:
  1184                         { /* DIV1 Rm, Rn */
  1185                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1186                         COUNT_INST(I_DIV1);
  1187                         load_spreg( R_ECX, R_M );
  1188                         load_reg( R_EAX, Rn );
  1189                         if( sh4_x86.tstate != TSTATE_C ) {
  1190                     	LDC_t();
  1192                         RCL1_r32( R_EAX );
  1193                         SETC_r8( R_DL ); // Q'
  1194                         CMP_sh4r_r32( R_Q, R_ECX );
  1195                         JE_rel8(mqequal);
  1196                         ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1197                         JMP_rel8(end);
  1198                         JMP_TARGET(mqequal);
  1199                         SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1200                         JMP_TARGET(end);
  1201                         store_reg( R_EAX, Rn ); // Done with Rn now
  1202                         SETC_r8(R_AL); // tmp1
  1203                         XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
  1204                         XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
  1205                         store_spreg( R_ECX, R_Q );
  1206                         XOR_imm8s_r32( 1, R_AL );   // T = !Q'
  1207                         MOVZX_r8_r32( R_AL, R_EAX );
  1208                         store_spreg( R_EAX, R_T );
  1209                         sh4_x86.tstate = TSTATE_NONE;
  1211                         break;
  1212                     case 0x5:
  1213                         { /* DMULU.L Rm, Rn */
  1214                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1215                         COUNT_INST(I_DMULU);
  1216                         load_reg( R_EAX, Rm );
  1217                         load_reg( R_ECX, Rn );
  1218                         MUL_r32(R_ECX);
  1219                         store_spreg( R_EDX, R_MACH );
  1220                         store_spreg( R_EAX, R_MACL );    
  1221                         sh4_x86.tstate = TSTATE_NONE;
  1223                         break;
  1224                     case 0x6:
  1225                         { /* CMP/HI Rm, Rn */
  1226                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1227                         COUNT_INST(I_CMPHI);
  1228                         load_reg( R_EAX, Rm );
  1229                         load_reg( R_ECX, Rn );
  1230                         CMP_r32_r32( R_EAX, R_ECX );
  1231                         SETA_t();
  1232                         sh4_x86.tstate = TSTATE_A;
  1234                         break;
  1235                     case 0x7:
  1236                         { /* CMP/GT Rm, Rn */
  1237                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1238                         COUNT_INST(I_CMPGT);
  1239                         load_reg( R_EAX, Rm );
  1240                         load_reg( R_ECX, Rn );
  1241                         CMP_r32_r32( R_EAX, R_ECX );
  1242                         SETG_t();
  1243                         sh4_x86.tstate = TSTATE_G;
  1245                         break;
  1246                     case 0x8:
  1247                         { /* SUB Rm, Rn */
  1248                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1249                         COUNT_INST(I_SUB);
  1250                         load_reg( R_EAX, Rm );
  1251                         load_reg( R_ECX, Rn );
  1252                         SUB_r32_r32( R_EAX, R_ECX );
  1253                         store_reg( R_ECX, Rn );
  1254                         sh4_x86.tstate = TSTATE_NONE;
  1256                         break;
  1257                     case 0xA:
  1258                         { /* SUBC Rm, Rn */
  1259                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1260                         COUNT_INST(I_SUBC);
  1261                         load_reg( R_EAX, Rm );
  1262                         load_reg( R_ECX, Rn );
  1263                         if( sh4_x86.tstate != TSTATE_C ) {
  1264                     	LDC_t();
  1266                         SBB_r32_r32( R_EAX, R_ECX );
  1267                         store_reg( R_ECX, Rn );
  1268                         SETC_t();
  1269                         sh4_x86.tstate = TSTATE_C;
  1271                         break;
  1272                     case 0xB:
  1273                         { /* SUBV Rm, Rn */
  1274                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1275                         COUNT_INST(I_SUBV);
  1276                         load_reg( R_EAX, Rm );
  1277                         load_reg( R_ECX, Rn );
  1278                         SUB_r32_r32( R_EAX, R_ECX );
  1279                         store_reg( R_ECX, Rn );
  1280                         SETO_t();
  1281                         sh4_x86.tstate = TSTATE_O;
  1283                         break;
  1284                     case 0xC:
  1285                         { /* ADD Rm, Rn */
  1286                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1287                         COUNT_INST(I_ADD);
  1288                         load_reg( R_EAX, Rm );
  1289                         load_reg( R_ECX, Rn );
  1290                         ADD_r32_r32( R_EAX, R_ECX );
  1291                         store_reg( R_ECX, Rn );
  1292                         sh4_x86.tstate = TSTATE_NONE;
  1294                         break;
  1295                     case 0xD:
  1296                         { /* DMULS.L Rm, Rn */
  1297                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1298                         COUNT_INST(I_DMULS);
  1299                         load_reg( R_EAX, Rm );
  1300                         load_reg( R_ECX, Rn );
  1301                         IMUL_r32(R_ECX);
  1302                         store_spreg( R_EDX, R_MACH );
  1303                         store_spreg( R_EAX, R_MACL );
  1304                         sh4_x86.tstate = TSTATE_NONE;
  1306                         break;
  1307                     case 0xE:
  1308                         { /* ADDC Rm, Rn */
  1309                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1310                         COUNT_INST(I_ADDC);
  1311                         if( sh4_x86.tstate != TSTATE_C ) {
  1312                     	LDC_t();
  1314                         load_reg( R_EAX, Rm );
  1315                         load_reg( R_ECX, Rn );
  1316                         ADC_r32_r32( R_EAX, R_ECX );
  1317                         store_reg( R_ECX, Rn );
  1318                         SETC_t();
  1319                         sh4_x86.tstate = TSTATE_C;
  1321                         break;
  1322                     case 0xF:
  1323                         { /* ADDV Rm, Rn */
  1324                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1325                         COUNT_INST(I_ADDV);
  1326                         load_reg( R_EAX, Rm );
  1327                         load_reg( R_ECX, Rn );
  1328                         ADD_r32_r32( R_EAX, R_ECX );
  1329                         store_reg( R_ECX, Rn );
  1330                         SETO_t();
  1331                         sh4_x86.tstate = TSTATE_O;
  1333                         break;
  1334                     default:
  1335                         UNDEF();
  1336                         break;
  1338                 break;
  1339             case 0x4:
  1340                 switch( ir&0xF ) {
  1341                     case 0x0:
  1342                         switch( (ir&0xF0) >> 4 ) {
  1343                             case 0x0:
  1344                                 { /* SHLL Rn */
  1345                                 uint32_t Rn = ((ir>>8)&0xF); 
  1346                                 COUNT_INST(I_SHLL);
  1347                                 load_reg( R_EAX, Rn );
  1348                                 SHL1_r32( R_EAX );
  1349                                 SETC_t();
  1350                                 store_reg( R_EAX, Rn );
  1351                                 sh4_x86.tstate = TSTATE_C;
  1353                                 break;
  1354                             case 0x1:
  1355                                 { /* DT Rn */
  1356                                 uint32_t Rn = ((ir>>8)&0xF); 
  1357                                 COUNT_INST(I_DT);
  1358                                 load_reg( R_EAX, Rn );
  1359                                 ADD_imm8s_r32( -1, R_EAX );
  1360                                 store_reg( R_EAX, Rn );
  1361                                 SETE_t();
  1362                                 sh4_x86.tstate = TSTATE_E;
  1364                                 break;
  1365                             case 0x2:
  1366                                 { /* SHAL Rn */
  1367                                 uint32_t Rn = ((ir>>8)&0xF); 
  1368                                 COUNT_INST(I_SHAL);
  1369                                 load_reg( R_EAX, Rn );
  1370                                 SHL1_r32( R_EAX );
  1371                                 SETC_t();
  1372                                 store_reg( R_EAX, Rn );
  1373                                 sh4_x86.tstate = TSTATE_C;
  1375                                 break;
  1376                             default:
  1377                                 UNDEF();
  1378                                 break;
  1380                         break;
  1381                     case 0x1:
  1382                         switch( (ir&0xF0) >> 4 ) {
  1383                             case 0x0:
  1384                                 { /* SHLR Rn */
  1385                                 uint32_t Rn = ((ir>>8)&0xF); 
  1386                                 COUNT_INST(I_SHLR);
  1387                                 load_reg( R_EAX, Rn );
  1388                                 SHR1_r32( R_EAX );
  1389                                 SETC_t();
  1390                                 store_reg( R_EAX, Rn );
  1391                                 sh4_x86.tstate = TSTATE_C;
  1393                                 break;
  1394                             case 0x1:
  1395                                 { /* CMP/PZ Rn */
  1396                                 uint32_t Rn = ((ir>>8)&0xF); 
  1397                                 COUNT_INST(I_CMPPZ);
  1398                                 load_reg( R_EAX, Rn );
  1399                                 CMP_imm8s_r32( 0, R_EAX );
  1400                                 SETGE_t();
  1401                                 sh4_x86.tstate = TSTATE_GE;
  1403                                 break;
  1404                             case 0x2:
  1405                                 { /* SHAR Rn */
  1406                                 uint32_t Rn = ((ir>>8)&0xF); 
  1407                                 COUNT_INST(I_SHAR);
  1408                                 load_reg( R_EAX, Rn );
  1409                                 SAR1_r32( R_EAX );
  1410                                 SETC_t();
  1411                                 store_reg( R_EAX, Rn );
  1412                                 sh4_x86.tstate = TSTATE_C;
  1414                                 break;
  1415                             default:
  1416                                 UNDEF();
  1417                                 break;
  1419                         break;
  1420                     case 0x2:
  1421                         switch( (ir&0xF0) >> 4 ) {
  1422                             case 0x0:
  1423                                 { /* STS.L MACH, @-Rn */
  1424                                 uint32_t Rn = ((ir>>8)&0xF); 
  1425                                 COUNT_INST(I_STSM);
  1426                                 load_reg( R_EAX, Rn );
  1427                                 check_walign32( R_EAX );
  1428                                 ADD_imm8s_r32( -4, R_EAX );
  1429                                 MMU_TRANSLATE_WRITE( R_EAX );
  1430                                 load_spreg( R_EDX, R_MACH );
  1431                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1432                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1433                                 sh4_x86.tstate = TSTATE_NONE;
  1435                                 break;
  1436                             case 0x1:
  1437                                 { /* STS.L MACL, @-Rn */
  1438                                 uint32_t Rn = ((ir>>8)&0xF); 
  1439                                 COUNT_INST(I_STSM);
  1440                                 load_reg( R_EAX, Rn );
  1441                                 check_walign32( R_EAX );
  1442                                 ADD_imm8s_r32( -4, R_EAX );
  1443                                 MMU_TRANSLATE_WRITE( R_EAX );
  1444                                 load_spreg( R_EDX, R_MACL );
  1445                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1446                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1447                                 sh4_x86.tstate = TSTATE_NONE;
  1449                                 break;
  1450                             case 0x2:
  1451                                 { /* STS.L PR, @-Rn */
  1452                                 uint32_t Rn = ((ir>>8)&0xF); 
  1453                                 COUNT_INST(I_STSM);
  1454                                 load_reg( R_EAX, Rn );
  1455                                 check_walign32( R_EAX );
  1456                                 ADD_imm8s_r32( -4, R_EAX );
  1457                                 MMU_TRANSLATE_WRITE( R_EAX );
  1458                                 load_spreg( R_EDX, R_PR );
  1459                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1460                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1461                                 sh4_x86.tstate = TSTATE_NONE;
  1463                                 break;
  1464                             case 0x3:
  1465                                 { /* STC.L SGR, @-Rn */
  1466                                 uint32_t Rn = ((ir>>8)&0xF); 
  1467                                 COUNT_INST(I_STCM);
  1468                                 check_priv();
  1469                                 load_reg( R_EAX, Rn );
  1470                                 check_walign32( R_EAX );
  1471                                 ADD_imm8s_r32( -4, R_EAX );
  1472                                 MMU_TRANSLATE_WRITE( R_EAX );
  1473                                 load_spreg( R_EDX, R_SGR );
  1474                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1475                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1476                                 sh4_x86.tstate = TSTATE_NONE;
  1478                                 break;
  1479                             case 0x5:
  1480                                 { /* STS.L FPUL, @-Rn */
  1481                                 uint32_t Rn = ((ir>>8)&0xF); 
  1482                                 COUNT_INST(I_STSM);
  1483                                 check_fpuen();
  1484                                 load_reg( R_EAX, Rn );
  1485                                 check_walign32( R_EAX );
  1486                                 ADD_imm8s_r32( -4, R_EAX );
  1487                                 MMU_TRANSLATE_WRITE( R_EAX );
  1488                                 load_spreg( R_EDX, R_FPUL );
  1489                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1490                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1491                                 sh4_x86.tstate = TSTATE_NONE;
  1493                                 break;
  1494                             case 0x6:
  1495                                 { /* STS.L FPSCR, @-Rn */
  1496                                 uint32_t Rn = ((ir>>8)&0xF); 
  1497                                 COUNT_INST(I_STSFPSCRM);
  1498                                 check_fpuen();
  1499                                 load_reg( R_EAX, Rn );
  1500                                 check_walign32( R_EAX );
  1501                                 ADD_imm8s_r32( -4, R_EAX );
  1502                                 MMU_TRANSLATE_WRITE( R_EAX );
  1503                                 load_spreg( R_EDX, R_FPSCR );
  1504                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1505                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1506                                 sh4_x86.tstate = TSTATE_NONE;
  1508                                 break;
  1509                             case 0xF:
  1510                                 { /* STC.L DBR, @-Rn */
  1511                                 uint32_t Rn = ((ir>>8)&0xF); 
  1512                                 COUNT_INST(I_STCM);
  1513                                 check_priv();
  1514                                 load_reg( R_EAX, Rn );
  1515                                 check_walign32( R_EAX );
  1516                                 ADD_imm8s_r32( -4, R_EAX );
  1517                                 MMU_TRANSLATE_WRITE( R_EAX );
  1518                                 load_spreg( R_EDX, R_DBR );
  1519                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1520                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1521                                 sh4_x86.tstate = TSTATE_NONE;
  1523                                 break;
  1524                             default:
  1525                                 UNDEF();
  1526                                 break;
  1528                         break;
  1529                     case 0x3:
  1530                         switch( (ir&0x80) >> 7 ) {
  1531                             case 0x0:
  1532                                 switch( (ir&0x70) >> 4 ) {
  1533                                     case 0x0:
  1534                                         { /* STC.L SR, @-Rn */
  1535                                         uint32_t Rn = ((ir>>8)&0xF); 
  1536                                         COUNT_INST(I_STCSRM);
  1537                                         check_priv();
  1538                                         load_reg( R_EAX, Rn );
  1539                                         check_walign32( R_EAX );
  1540                                         ADD_imm8s_r32( -4, R_EAX );
  1541                                         MMU_TRANSLATE_WRITE( R_EAX );
  1542                                         PUSH_realigned_r32( R_EAX );
  1543                                         call_func0( sh4_read_sr );
  1544                                         POP_realigned_r32( R_ECX );
  1545                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1546                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1547                                         sh4_x86.tstate = TSTATE_NONE;
  1549                                         break;
  1550                                     case 0x1:
  1551                                         { /* STC.L GBR, @-Rn */
  1552                                         uint32_t Rn = ((ir>>8)&0xF); 
  1553                                         COUNT_INST(I_STCM);
  1554                                         load_reg( R_EAX, Rn );
  1555                                         check_walign32( R_EAX );
  1556                                         ADD_imm8s_r32( -4, R_EAX );
  1557                                         MMU_TRANSLATE_WRITE( R_EAX );
  1558                                         load_spreg( R_EDX, R_GBR );
  1559                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1560                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1561                                         sh4_x86.tstate = TSTATE_NONE;
  1563                                         break;
  1564                                     case 0x2:
  1565                                         { /* STC.L VBR, @-Rn */
  1566                                         uint32_t Rn = ((ir>>8)&0xF); 
  1567                                         COUNT_INST(I_STCM);
  1568                                         check_priv();
  1569                                         load_reg( R_EAX, Rn );
  1570                                         check_walign32( R_EAX );
  1571                                         ADD_imm8s_r32( -4, R_EAX );
  1572                                         MMU_TRANSLATE_WRITE( R_EAX );
  1573                                         load_spreg( R_EDX, R_VBR );
  1574                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1575                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1576                                         sh4_x86.tstate = TSTATE_NONE;
  1578                                         break;
  1579                                     case 0x3:
  1580                                         { /* STC.L SSR, @-Rn */
  1581                                         uint32_t Rn = ((ir>>8)&0xF); 
  1582                                         COUNT_INST(I_STCM);
  1583                                         check_priv();
  1584                                         load_reg( R_EAX, Rn );
  1585                                         check_walign32( R_EAX );
  1586                                         ADD_imm8s_r32( -4, R_EAX );
  1587                                         MMU_TRANSLATE_WRITE( R_EAX );
  1588                                         load_spreg( R_EDX, R_SSR );
  1589                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1590                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1591                                         sh4_x86.tstate = TSTATE_NONE;
  1593                                         break;
  1594                                     case 0x4:
  1595                                         { /* STC.L SPC, @-Rn */
  1596                                         uint32_t Rn = ((ir>>8)&0xF); 
  1597                                         COUNT_INST(I_STCM);
  1598                                         check_priv();
  1599                                         load_reg( R_EAX, Rn );
  1600                                         check_walign32( R_EAX );
  1601                                         ADD_imm8s_r32( -4, R_EAX );
  1602                                         MMU_TRANSLATE_WRITE( R_EAX );
  1603                                         load_spreg( R_EDX, R_SPC );
  1604                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1605                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1606                                         sh4_x86.tstate = TSTATE_NONE;
  1608                                         break;
  1609                                     default:
  1610                                         UNDEF();
  1611                                         break;
  1613                                 break;
  1614                             case 0x1:
  1615                                 { /* STC.L Rm_BANK, @-Rn */
  1616                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
  1617                                 COUNT_INST(I_STCM);
  1618                                 check_priv();
  1619                                 load_reg( R_EAX, Rn );
  1620                                 check_walign32( R_EAX );
  1621                                 ADD_imm8s_r32( -4, R_EAX );
  1622                                 MMU_TRANSLATE_WRITE( R_EAX );
  1623                                 load_spreg( R_EDX, REG_OFFSET(r_bank[Rm_BANK]) );
  1624                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1625                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1626                                 sh4_x86.tstate = TSTATE_NONE;
  1628                                 break;
  1630                         break;
  1631                     case 0x4:
  1632                         switch( (ir&0xF0) >> 4 ) {
  1633                             case 0x0:
  1634                                 { /* ROTL Rn */
  1635                                 uint32_t Rn = ((ir>>8)&0xF); 
  1636                                 COUNT_INST(I_ROTL);
  1637                                 load_reg( R_EAX, Rn );
  1638                                 ROL1_r32( R_EAX );
  1639                                 store_reg( R_EAX, Rn );
  1640                                 SETC_t();
  1641                                 sh4_x86.tstate = TSTATE_C;
  1643                                 break;
  1644                             case 0x2:
  1645                                 { /* ROTCL Rn */
  1646                                 uint32_t Rn = ((ir>>8)&0xF); 
  1647                                 COUNT_INST(I_ROTCL);
  1648                                 load_reg( R_EAX, Rn );
  1649                                 if( sh4_x86.tstate != TSTATE_C ) {
  1650                             	LDC_t();
  1652                                 RCL1_r32( R_EAX );
  1653                                 store_reg( R_EAX, Rn );
  1654                                 SETC_t();
  1655                                 sh4_x86.tstate = TSTATE_C;
  1657                                 break;
  1658                             default:
  1659                                 UNDEF();
  1660                                 break;
  1662                         break;
  1663                     case 0x5:
  1664                         switch( (ir&0xF0) >> 4 ) {
  1665                             case 0x0:
  1666                                 { /* ROTR Rn */
  1667                                 uint32_t Rn = ((ir>>8)&0xF); 
  1668                                 COUNT_INST(I_ROTR);
  1669                                 load_reg( R_EAX, Rn );
  1670                                 ROR1_r32( R_EAX );
  1671                                 store_reg( R_EAX, Rn );
  1672                                 SETC_t();
  1673                                 sh4_x86.tstate = TSTATE_C;
  1675                                 break;
  1676                             case 0x1:
  1677                                 { /* CMP/PL Rn */
  1678                                 uint32_t Rn = ((ir>>8)&0xF); 
  1679                                 COUNT_INST(I_CMPPL);
  1680                                 load_reg( R_EAX, Rn );
  1681                                 CMP_imm8s_r32( 0, R_EAX );
  1682                                 SETG_t();
  1683                                 sh4_x86.tstate = TSTATE_G;
  1685                                 break;
  1686                             case 0x2:
  1687                                 { /* ROTCR Rn */
  1688                                 uint32_t Rn = ((ir>>8)&0xF); 
  1689                                 COUNT_INST(I_ROTCR);
  1690                                 load_reg( R_EAX, Rn );
  1691                                 if( sh4_x86.tstate != TSTATE_C ) {
  1692                             	LDC_t();
  1694                                 RCR1_r32( R_EAX );
  1695                                 store_reg( R_EAX, Rn );
  1696                                 SETC_t();
  1697                                 sh4_x86.tstate = TSTATE_C;
  1699                                 break;
  1700                             default:
  1701                                 UNDEF();
  1702                                 break;
  1704                         break;
  1705                     case 0x6:
  1706                         switch( (ir&0xF0) >> 4 ) {
  1707                             case 0x0:
  1708                                 { /* LDS.L @Rm+, MACH */
  1709                                 uint32_t Rm = ((ir>>8)&0xF); 
  1710                                 COUNT_INST(I_LDSM);
  1711                                 load_reg( R_EAX, Rm );
  1712                                 check_ralign32( R_EAX );
  1713                                 MMU_TRANSLATE_READ( R_EAX );
  1714                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1715                                 MEM_READ_LONG( R_EAX, R_EAX );
  1716                                 store_spreg( R_EAX, R_MACH );
  1717                                 sh4_x86.tstate = TSTATE_NONE;
  1719                                 break;
  1720                             case 0x1:
  1721                                 { /* LDS.L @Rm+, MACL */
  1722                                 uint32_t Rm = ((ir>>8)&0xF); 
  1723                                 COUNT_INST(I_LDSM);
  1724                                 load_reg( R_EAX, Rm );
  1725                                 check_ralign32( R_EAX );
  1726                                 MMU_TRANSLATE_READ( R_EAX );
  1727                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1728                                 MEM_READ_LONG( R_EAX, R_EAX );
  1729                                 store_spreg( R_EAX, R_MACL );
  1730                                 sh4_x86.tstate = TSTATE_NONE;
  1732                                 break;
  1733                             case 0x2:
  1734                                 { /* LDS.L @Rm+, PR */
  1735                                 uint32_t Rm = ((ir>>8)&0xF); 
  1736                                 COUNT_INST(I_LDSM);
  1737                                 load_reg( R_EAX, Rm );
  1738                                 check_ralign32( R_EAX );
  1739                                 MMU_TRANSLATE_READ( R_EAX );
  1740                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1741                                 MEM_READ_LONG( R_EAX, R_EAX );
  1742                                 store_spreg( R_EAX, R_PR );
  1743                                 sh4_x86.tstate = TSTATE_NONE;
  1745                                 break;
  1746                             case 0x3:
  1747                                 { /* LDC.L @Rm+, SGR */
  1748                                 uint32_t Rm = ((ir>>8)&0xF); 
  1749                                 COUNT_INST(I_LDCM);
  1750                                 check_priv();
  1751                                 load_reg( R_EAX, Rm );
  1752                                 check_ralign32( R_EAX );
  1753                                 MMU_TRANSLATE_READ( R_EAX );
  1754                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1755                                 MEM_READ_LONG( R_EAX, R_EAX );
  1756                                 store_spreg( R_EAX, R_SGR );
  1757                                 sh4_x86.tstate = TSTATE_NONE;
  1759                                 break;
  1760                             case 0x5:
  1761                                 { /* LDS.L @Rm+, FPUL */
  1762                                 uint32_t Rm = ((ir>>8)&0xF); 
  1763                                 COUNT_INST(I_LDSM);
  1764                                 check_fpuen();
  1765                                 load_reg( R_EAX, Rm );
  1766                                 check_ralign32( R_EAX );
  1767                                 MMU_TRANSLATE_READ( R_EAX );
  1768                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1769                                 MEM_READ_LONG( R_EAX, R_EAX );
  1770                                 store_spreg( R_EAX, R_FPUL );
  1771                                 sh4_x86.tstate = TSTATE_NONE;
  1773                                 break;
  1774                             case 0x6:
  1775                                 { /* LDS.L @Rm+, FPSCR */
  1776                                 uint32_t Rm = ((ir>>8)&0xF); 
  1777                                 COUNT_INST(I_LDSFPSCRM);
  1778                                 check_fpuen();
  1779                                 load_reg( R_EAX, Rm );
  1780                                 check_ralign32( R_EAX );
  1781                                 MMU_TRANSLATE_READ( R_EAX );
  1782                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1783                                 MEM_READ_LONG( R_EAX, R_EAX );
  1784                                 call_func1( sh4_write_fpscr, R_EAX );
  1785                                 sh4_x86.tstate = TSTATE_NONE;
  1787                                 break;
  1788                             case 0xF:
  1789                                 { /* LDC.L @Rm+, DBR */
  1790                                 uint32_t Rm = ((ir>>8)&0xF); 
  1791                                 COUNT_INST(I_LDCM);
  1792                                 check_priv();
  1793                                 load_reg( R_EAX, Rm );
  1794                                 check_ralign32( R_EAX );
  1795                                 MMU_TRANSLATE_READ( R_EAX );
  1796                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1797                                 MEM_READ_LONG( R_EAX, R_EAX );
  1798                                 store_spreg( R_EAX, R_DBR );
  1799                                 sh4_x86.tstate = TSTATE_NONE;
  1801                                 break;
  1802                             default:
  1803                                 UNDEF();
  1804                                 break;
  1806                         break;
  1807                     case 0x7:
  1808                         switch( (ir&0x80) >> 7 ) {
  1809                             case 0x0:
  1810                                 switch( (ir&0x70) >> 4 ) {
  1811                                     case 0x0:
  1812                                         { /* LDC.L @Rm+, SR */
  1813                                         uint32_t Rm = ((ir>>8)&0xF); 
  1814                                         COUNT_INST(I_LDCSRM);
  1815                                         if( sh4_x86.in_delay_slot ) {
  1816                                     	SLOTILLEGAL();
  1817                                         } else {
  1818                                     	check_priv();
  1819                                     	load_reg( R_EAX, Rm );
  1820                                     	check_ralign32( R_EAX );
  1821                                     	MMU_TRANSLATE_READ( R_EAX );
  1822                                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1823                                     	MEM_READ_LONG( R_EAX, R_EAX );
  1824                                     	call_func1( sh4_write_sr, R_EAX );
  1825                                     	sh4_x86.priv_checked = FALSE;
  1826                                     	sh4_x86.fpuen_checked = FALSE;
  1827                                     	sh4_x86.tstate = TSTATE_NONE;
  1830                                         break;
  1831                                     case 0x1:
  1832                                         { /* LDC.L @Rm+, GBR */
  1833                                         uint32_t Rm = ((ir>>8)&0xF); 
  1834                                         COUNT_INST(I_LDCM);
  1835                                         load_reg( R_EAX, Rm );
  1836                                         check_ralign32( R_EAX );
  1837                                         MMU_TRANSLATE_READ( R_EAX );
  1838                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1839                                         MEM_READ_LONG( R_EAX, R_EAX );
  1840                                         store_spreg( R_EAX, R_GBR );
  1841                                         sh4_x86.tstate = TSTATE_NONE;
  1843                                         break;
  1844                                     case 0x2:
  1845                                         { /* LDC.L @Rm+, VBR */
  1846                                         uint32_t Rm = ((ir>>8)&0xF); 
  1847                                         COUNT_INST(I_LDCM);
  1848                                         check_priv();
  1849                                         load_reg( R_EAX, Rm );
  1850                                         check_ralign32( R_EAX );
  1851                                         MMU_TRANSLATE_READ( R_EAX );
  1852                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1853                                         MEM_READ_LONG( R_EAX, R_EAX );
  1854                                         store_spreg( R_EAX, R_VBR );
  1855                                         sh4_x86.tstate = TSTATE_NONE;
  1857                                         break;
  1858                                     case 0x3:
  1859                                         { /* LDC.L @Rm+, SSR */
  1860                                         uint32_t Rm = ((ir>>8)&0xF); 
  1861                                         COUNT_INST(I_LDCM);
  1862                                         check_priv();
  1863                                         load_reg( R_EAX, Rm );
  1864                                         check_ralign32( R_EAX );
  1865                                         MMU_TRANSLATE_READ( R_EAX );
  1866                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1867                                         MEM_READ_LONG( R_EAX, R_EAX );
  1868                                         store_spreg( R_EAX, R_SSR );
  1869                                         sh4_x86.tstate = TSTATE_NONE;
  1871                                         break;
  1872                                     case 0x4:
  1873                                         { /* LDC.L @Rm+, SPC */
  1874                                         uint32_t Rm = ((ir>>8)&0xF); 
  1875                                         COUNT_INST(I_LDCM);
  1876                                         check_priv();
  1877                                         load_reg( R_EAX, Rm );
  1878                                         check_ralign32( R_EAX );
  1879                                         MMU_TRANSLATE_READ( R_EAX );
  1880                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1881                                         MEM_READ_LONG( R_EAX, R_EAX );
  1882                                         store_spreg( R_EAX, R_SPC );
  1883                                         sh4_x86.tstate = TSTATE_NONE;
  1885                                         break;
  1886                                     default:
  1887                                         UNDEF();
  1888                                         break;
  1890                                 break;
  1891                             case 0x1:
  1892                                 { /* LDC.L @Rm+, Rn_BANK */
  1893                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  1894                                 COUNT_INST(I_LDCM);
  1895                                 check_priv();
  1896                                 load_reg( R_EAX, Rm );
  1897                                 check_ralign32( R_EAX );
  1898                                 MMU_TRANSLATE_READ( R_EAX );
  1899                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1900                                 MEM_READ_LONG( R_EAX, R_EAX );
  1901                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  1902                                 sh4_x86.tstate = TSTATE_NONE;
  1904                                 break;
  1906                         break;
  1907                     case 0x8:
  1908                         switch( (ir&0xF0) >> 4 ) {
  1909                             case 0x0:
  1910                                 { /* SHLL2 Rn */
  1911                                 uint32_t Rn = ((ir>>8)&0xF); 
  1912                                 COUNT_INST(I_SHLL);
  1913                                 load_reg( R_EAX, Rn );
  1914                                 SHL_imm8_r32( 2, R_EAX );
  1915                                 store_reg( R_EAX, Rn );
  1916                                 sh4_x86.tstate = TSTATE_NONE;
  1918                                 break;
  1919                             case 0x1:
  1920                                 { /* SHLL8 Rn */
  1921                                 uint32_t Rn = ((ir>>8)&0xF); 
  1922                                 COUNT_INST(I_SHLL);
  1923                                 load_reg( R_EAX, Rn );
  1924                                 SHL_imm8_r32( 8, R_EAX );
  1925                                 store_reg( R_EAX, Rn );
  1926                                 sh4_x86.tstate = TSTATE_NONE;
  1928                                 break;
  1929                             case 0x2:
  1930                                 { /* SHLL16 Rn */
  1931                                 uint32_t Rn = ((ir>>8)&0xF); 
  1932                                 COUNT_INST(I_SHLL);
  1933                                 load_reg( R_EAX, Rn );
  1934                                 SHL_imm8_r32( 16, R_EAX );
  1935                                 store_reg( R_EAX, Rn );
  1936                                 sh4_x86.tstate = TSTATE_NONE;
  1938                                 break;
  1939                             default:
  1940                                 UNDEF();
  1941                                 break;
  1943                         break;
  1944                     case 0x9:
  1945                         switch( (ir&0xF0) >> 4 ) {
  1946                             case 0x0:
  1947                                 { /* SHLR2 Rn */
  1948                                 uint32_t Rn = ((ir>>8)&0xF); 
  1949                                 COUNT_INST(I_SHLR);
  1950                                 load_reg( R_EAX, Rn );
  1951                                 SHR_imm8_r32( 2, R_EAX );
  1952                                 store_reg( R_EAX, Rn );
  1953                                 sh4_x86.tstate = TSTATE_NONE;
  1955                                 break;
  1956                             case 0x1:
  1957                                 { /* SHLR8 Rn */
  1958                                 uint32_t Rn = ((ir>>8)&0xF); 
  1959                                 COUNT_INST(I_SHLR);
  1960                                 load_reg( R_EAX, Rn );
  1961                                 SHR_imm8_r32( 8, R_EAX );
  1962                                 store_reg( R_EAX, Rn );
  1963                                 sh4_x86.tstate = TSTATE_NONE;
  1965                                 break;
  1966                             case 0x2:
  1967                                 { /* SHLR16 Rn */
  1968                                 uint32_t Rn = ((ir>>8)&0xF); 
  1969                                 COUNT_INST(I_SHLR);
  1970                                 load_reg( R_EAX, Rn );
  1971                                 SHR_imm8_r32( 16, R_EAX );
  1972                                 store_reg( R_EAX, Rn );
  1973                                 sh4_x86.tstate = TSTATE_NONE;
  1975                                 break;
  1976                             default:
  1977                                 UNDEF();
  1978                                 break;
  1980                         break;
  1981                     case 0xA:
  1982                         switch( (ir&0xF0) >> 4 ) {
  1983                             case 0x0:
  1984                                 { /* LDS Rm, MACH */
  1985                                 uint32_t Rm = ((ir>>8)&0xF); 
  1986                                 COUNT_INST(I_LDS);
  1987                                 load_reg( R_EAX, Rm );
  1988                                 store_spreg( R_EAX, R_MACH );
  1990                                 break;
  1991                             case 0x1:
  1992                                 { /* LDS Rm, MACL */
  1993                                 uint32_t Rm = ((ir>>8)&0xF); 
  1994                                 COUNT_INST(I_LDS);
  1995                                 load_reg( R_EAX, Rm );
  1996                                 store_spreg( R_EAX, R_MACL );
  1998                                 break;
  1999                             case 0x2:
  2000                                 { /* LDS Rm, PR */
  2001                                 uint32_t Rm = ((ir>>8)&0xF); 
  2002                                 COUNT_INST(I_LDS);
  2003                                 load_reg( R_EAX, Rm );
  2004                                 store_spreg( R_EAX, R_PR );
  2006                                 break;
  2007                             case 0x3:
  2008                                 { /* LDC Rm, SGR */
  2009                                 uint32_t Rm = ((ir>>8)&0xF); 
  2010                                 COUNT_INST(I_LDC);
  2011                                 check_priv();
  2012                                 load_reg( R_EAX, Rm );
  2013                                 store_spreg( R_EAX, R_SGR );
  2014                                 sh4_x86.tstate = TSTATE_NONE;
  2016                                 break;
  2017                             case 0x5:
  2018                                 { /* LDS Rm, FPUL */
  2019                                 uint32_t Rm = ((ir>>8)&0xF); 
  2020                                 COUNT_INST(I_LDS);
  2021                                 check_fpuen();
  2022                                 load_reg( R_EAX, Rm );
  2023                                 store_spreg( R_EAX, R_FPUL );
  2025                                 break;
  2026                             case 0x6:
  2027                                 { /* LDS Rm, FPSCR */
  2028                                 uint32_t Rm = ((ir>>8)&0xF); 
  2029                                 COUNT_INST(I_LDSFPSCR);
  2030                                 check_fpuen();
  2031                                 load_reg( R_EAX, Rm );
  2032                                 call_func1( sh4_write_fpscr, R_EAX );
  2033                                 sh4_x86.tstate = TSTATE_NONE;
  2035                                 break;
  2036                             case 0xF:
  2037                                 { /* LDC Rm, DBR */
  2038                                 uint32_t Rm = ((ir>>8)&0xF); 
  2039                                 COUNT_INST(I_LDC);
  2040                                 check_priv();
  2041                                 load_reg( R_EAX, Rm );
  2042                                 store_spreg( R_EAX, R_DBR );
  2043                                 sh4_x86.tstate = TSTATE_NONE;
  2045                                 break;
  2046                             default:
  2047                                 UNDEF();
  2048                                 break;
  2050                         break;
  2051                     case 0xB:
  2052                         switch( (ir&0xF0) >> 4 ) {
  2053                             case 0x0:
  2054                                 { /* JSR @Rn */
  2055                                 uint32_t Rn = ((ir>>8)&0xF); 
  2056                                 COUNT_INST(I_JSR);
  2057                                 if( sh4_x86.in_delay_slot ) {
  2058                             	SLOTILLEGAL();
  2059                                 } else {
  2060                             	load_spreg( R_EAX, R_PC );
  2061                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
  2062                             	store_spreg( R_EAX, R_PR );
  2063                             	load_reg( R_ECX, Rn );
  2064                             	store_spreg( R_ECX, R_NEW_PC );
  2065                             	sh4_x86.in_delay_slot = DELAY_PC;
  2066                             	sh4_x86.branch_taken = TRUE;
  2067                             	sh4_x86.tstate = TSTATE_NONE;
  2068                             	if( UNTRANSLATABLE(pc+2) ) {
  2069                             	    exit_block_emu(pc+2);
  2070                             	    return 2;
  2071                             	} else {
  2072                             	    sh4_translate_instruction(pc+2);
  2073                             	    exit_block_newpcset(pc+2);
  2074                             	    return 4;
  2078                                 break;
  2079                             case 0x1:
  2080                                 { /* TAS.B @Rn */
  2081                                 uint32_t Rn = ((ir>>8)&0xF); 
  2082                                 COUNT_INST(I_TASB);
  2083                                 load_reg( R_EAX, Rn );
  2084                                 MMU_TRANSLATE_WRITE( R_EAX );
  2085                                 PUSH_realigned_r32( R_EAX );
  2086                                 MEM_READ_BYTE( R_EAX, R_EAX );
  2087                                 TEST_r8_r8( R_AL, R_AL );
  2088                                 SETE_t();
  2089                                 OR_imm8_r8( 0x80, R_AL );
  2090                                 POP_realigned_r32( R_ECX );
  2091                                 MEM_WRITE_BYTE( R_ECX, R_EAX );
  2092                                 sh4_x86.tstate = TSTATE_NONE;
  2094                                 break;
  2095                             case 0x2:
  2096                                 { /* JMP @Rn */
  2097                                 uint32_t Rn = ((ir>>8)&0xF); 
  2098                                 COUNT_INST(I_JMP);
  2099                                 if( sh4_x86.in_delay_slot ) {
  2100                             	SLOTILLEGAL();
  2101                                 } else {
  2102                             	load_reg( R_ECX, Rn );
  2103                             	store_spreg( R_ECX, R_NEW_PC );
  2104                             	sh4_x86.in_delay_slot = DELAY_PC;
  2105                             	sh4_x86.branch_taken = TRUE;
  2106                             	if( UNTRANSLATABLE(pc+2) ) {
  2107                             	    exit_block_emu(pc+2);
  2108                             	    return 2;
  2109                             	} else {
  2110                             	    sh4_translate_instruction(pc+2);
  2111                             	    exit_block_newpcset(pc+2);
  2112                             	    return 4;
  2116                                 break;
  2117                             default:
  2118                                 UNDEF();
  2119                                 break;
  2121                         break;
  2122                     case 0xC:
  2123                         { /* SHAD Rm, Rn */
  2124                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2125                         COUNT_INST(I_SHAD);
  2126                         /* Annoyingly enough, not directly convertible */
  2127                         load_reg( R_EAX, Rn );
  2128                         load_reg( R_ECX, Rm );
  2129                         CMP_imm32_r32( 0, R_ECX );
  2130                         JGE_rel8(doshl);
  2132                         NEG_r32( R_ECX );      // 2
  2133                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2134                         JE_rel8(emptysar);     // 2
  2135                         SAR_r32_CL( R_EAX );       // 2
  2136                         JMP_rel8(end);          // 2
  2138                         JMP_TARGET(emptysar);
  2139                         SAR_imm8_r32(31, R_EAX );  // 3
  2140                         JMP_rel8(end2);
  2142                         JMP_TARGET(doshl);
  2143                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2144                         SHL_r32_CL( R_EAX );       // 2
  2145                         JMP_TARGET(end);
  2146                         JMP_TARGET(end2);
  2147                         store_reg( R_EAX, Rn );
  2148                         sh4_x86.tstate = TSTATE_NONE;
  2150                         break;
  2151                     case 0xD:
  2152                         { /* SHLD Rm, Rn */
  2153                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2154                         COUNT_INST(I_SHLD);
  2155                         load_reg( R_EAX, Rn );
  2156                         load_reg( R_ECX, Rm );
  2157                         CMP_imm32_r32( 0, R_ECX );
  2158                         JGE_rel8(doshl);
  2160                         NEG_r32( R_ECX );      // 2
  2161                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2162                         JE_rel8(emptyshr );
  2163                         SHR_r32_CL( R_EAX );       // 2
  2164                         JMP_rel8(end);          // 2
  2166                         JMP_TARGET(emptyshr);
  2167                         XOR_r32_r32( R_EAX, R_EAX );
  2168                         JMP_rel8(end2);
  2170                         JMP_TARGET(doshl);
  2171                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2172                         SHL_r32_CL( R_EAX );       // 2
  2173                         JMP_TARGET(end);
  2174                         JMP_TARGET(end2);
  2175                         store_reg( R_EAX, Rn );
  2176                         sh4_x86.tstate = TSTATE_NONE;
  2178                         break;
  2179                     case 0xE:
  2180                         switch( (ir&0x80) >> 7 ) {
  2181                             case 0x0:
  2182                                 switch( (ir&0x70) >> 4 ) {
  2183                                     case 0x0:
  2184                                         { /* LDC Rm, SR */
  2185                                         uint32_t Rm = ((ir>>8)&0xF); 
  2186                                         COUNT_INST(I_LDCSR);
  2187                                         if( sh4_x86.in_delay_slot ) {
  2188                                     	SLOTILLEGAL();
  2189                                         } else {
  2190                                     	check_priv();
  2191                                     	load_reg( R_EAX, Rm );
  2192                                     	call_func1( sh4_write_sr, R_EAX );
  2193                                     	sh4_x86.priv_checked = FALSE;
  2194                                     	sh4_x86.fpuen_checked = FALSE;
  2195                                     	sh4_x86.tstate = TSTATE_NONE;
  2198                                         break;
  2199                                     case 0x1:
  2200                                         { /* LDC Rm, GBR */
  2201                                         uint32_t Rm = ((ir>>8)&0xF); 
  2202                                         COUNT_INST(I_LDC);
  2203                                         load_reg( R_EAX, Rm );
  2204                                         store_spreg( R_EAX, R_GBR );
  2206                                         break;
  2207                                     case 0x2:
  2208                                         { /* LDC Rm, VBR */
  2209                                         uint32_t Rm = ((ir>>8)&0xF); 
  2210                                         COUNT_INST(I_LDC);
  2211                                         check_priv();
  2212                                         load_reg( R_EAX, Rm );
  2213                                         store_spreg( R_EAX, R_VBR );
  2214                                         sh4_x86.tstate = TSTATE_NONE;
  2216                                         break;
  2217                                     case 0x3:
  2218                                         { /* LDC Rm, SSR */
  2219                                         uint32_t Rm = ((ir>>8)&0xF); 
  2220                                         COUNT_INST(I_LDC);
  2221                                         check_priv();
  2222                                         load_reg( R_EAX, Rm );
  2223                                         store_spreg( R_EAX, R_SSR );
  2224                                         sh4_x86.tstate = TSTATE_NONE;
  2226                                         break;
  2227                                     case 0x4:
  2228                                         { /* LDC Rm, SPC */
  2229                                         uint32_t Rm = ((ir>>8)&0xF); 
  2230                                         COUNT_INST(I_LDC);
  2231                                         check_priv();
  2232                                         load_reg( R_EAX, Rm );
  2233                                         store_spreg( R_EAX, R_SPC );
  2234                                         sh4_x86.tstate = TSTATE_NONE;
  2236                                         break;
  2237                                     default:
  2238                                         UNDEF();
  2239                                         break;
  2241                                 break;
  2242                             case 0x1:
  2243                                 { /* LDC Rm, Rn_BANK */
  2244                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  2245                                 COUNT_INST(I_LDC);
  2246                                 check_priv();
  2247                                 load_reg( R_EAX, Rm );
  2248                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  2249                                 sh4_x86.tstate = TSTATE_NONE;
  2251                                 break;
  2253                         break;
  2254                     case 0xF:
  2255                         { /* MAC.W @Rm+, @Rn+ */
  2256                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2257                         COUNT_INST(I_MACW);
  2258                         if( Rm == Rn ) {
  2259                     	load_reg( R_EAX, Rm );
  2260                     	check_ralign16( R_EAX );
  2261                     	MMU_TRANSLATE_READ( R_EAX );
  2262                     	PUSH_realigned_r32( R_EAX );
  2263                     	load_reg( R_EAX, Rn );
  2264                     	ADD_imm8s_r32( 2, R_EAX );
  2265                     	MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
  2266                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
  2267                     	// Note translate twice in case of page boundaries. Maybe worth
  2268                     	// adding a page-boundary check to skip the second translation
  2269                         } else {
  2270                     	load_reg( R_EAX, Rm );
  2271                     	check_ralign16( R_EAX );
  2272                     	MMU_TRANSLATE_READ( R_EAX );
  2273                     	load_reg( R_ECX, Rn );
  2274                     	check_ralign16( R_ECX );
  2275                     	PUSH_realigned_r32( R_EAX );
  2276                     	MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
  2277                     	MOV_r32_r32( R_ECX, R_EAX );
  2278                     	ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
  2279                     	ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
  2281                         MEM_READ_WORD( R_EAX, R_EAX );
  2282                         POP_r32( R_ECX );
  2283                         PUSH_r32( R_EAX );
  2284                         MEM_READ_WORD( R_ECX, R_EAX );
  2285                         POP_realigned_r32( R_ECX );
  2286                         IMUL_r32( R_ECX );
  2288                         load_spreg( R_ECX, R_S );
  2289                         TEST_r32_r32( R_ECX, R_ECX );
  2290                         JE_rel8( nosat );
  2292                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2293                         JNO_rel8( end );            // 2
  2294                         load_imm32( R_EDX, 1 );         // 5
  2295                         store_spreg( R_EDX, R_MACH );   // 6
  2296                         JS_rel8( positive );        // 2
  2297                         load_imm32( R_EAX, 0x80000000 );// 5
  2298                         store_spreg( R_EAX, R_MACL );   // 6
  2299                         JMP_rel8(end2);           // 2
  2301                         JMP_TARGET(positive);
  2302                         load_imm32( R_EAX, 0x7FFFFFFF );// 5
  2303                         store_spreg( R_EAX, R_MACL );   // 6
  2304                         JMP_rel8(end3);            // 2
  2306                         JMP_TARGET(nosat);
  2307                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2308                         ADC_r32_sh4r( R_EDX, R_MACH );  // 6
  2309                         JMP_TARGET(end);
  2310                         JMP_TARGET(end2);
  2311                         JMP_TARGET(end3);
  2312                         sh4_x86.tstate = TSTATE_NONE;
  2314                         break;
  2316                 break;
  2317             case 0x5:
  2318                 { /* MOV.L @(disp, Rm), Rn */
  2319                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
  2320                 COUNT_INST(I_MOVL);
  2321                 load_reg( R_EAX, Rm );
  2322                 ADD_imm8s_r32( disp, R_EAX );
  2323                 check_ralign32( R_EAX );
  2324                 MMU_TRANSLATE_READ( R_EAX );
  2325                 MEM_READ_LONG( R_EAX, R_EAX );
  2326                 store_reg( R_EAX, Rn );
  2327                 sh4_x86.tstate = TSTATE_NONE;
  2329                 break;
  2330             case 0x6:
  2331                 switch( ir&0xF ) {
  2332                     case 0x0:
  2333                         { /* MOV.B @Rm, Rn */
  2334                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2335                         COUNT_INST(I_MOVB);
  2336                         load_reg( R_EAX, Rm );
  2337                         MMU_TRANSLATE_READ( R_EAX );
  2338                         MEM_READ_BYTE( R_EAX, R_EAX );
  2339                         store_reg( R_EAX, Rn );
  2340                         sh4_x86.tstate = TSTATE_NONE;
  2342                         break;
  2343                     case 0x1:
  2344                         { /* MOV.W @Rm, Rn */
  2345                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2346                         COUNT_INST(I_MOVW);
  2347                         load_reg( R_EAX, Rm );
  2348                         check_ralign16( R_EAX );
  2349                         MMU_TRANSLATE_READ( R_EAX );
  2350                         MEM_READ_WORD( R_EAX, R_EAX );
  2351                         store_reg( R_EAX, Rn );
  2352                         sh4_x86.tstate = TSTATE_NONE;
  2354                         break;
  2355                     case 0x2:
  2356                         { /* MOV.L @Rm, Rn */
  2357                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2358                         COUNT_INST(I_MOVL);
  2359                         load_reg( R_EAX, Rm );
  2360                         check_ralign32( R_EAX );
  2361                         MMU_TRANSLATE_READ( R_EAX );
  2362                         MEM_READ_LONG( R_EAX, R_EAX );
  2363                         store_reg( R_EAX, Rn );
  2364                         sh4_x86.tstate = TSTATE_NONE;
  2366                         break;
  2367                     case 0x3:
  2368                         { /* MOV Rm, Rn */
  2369                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2370                         COUNT_INST(I_MOV);
  2371                         load_reg( R_EAX, Rm );
  2372                         store_reg( R_EAX, Rn );
  2374                         break;
  2375                     case 0x4:
  2376                         { /* MOV.B @Rm+, Rn */
  2377                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2378                         COUNT_INST(I_MOVB);
  2379                         load_reg( R_EAX, Rm );
  2380                         MMU_TRANSLATE_READ( R_EAX );
  2381                         ADD_imm8s_sh4r( 1, REG_OFFSET(r[Rm]) );
  2382                         MEM_READ_BYTE( R_EAX, R_EAX );
  2383                         store_reg( R_EAX, Rn );
  2384                         sh4_x86.tstate = TSTATE_NONE;
  2386                         break;
  2387                     case 0x5:
  2388                         { /* MOV.W @Rm+, Rn */
  2389                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2390                         COUNT_INST(I_MOVW);
  2391                         load_reg( R_EAX, Rm );
  2392                         check_ralign16( R_EAX );
  2393                         MMU_TRANSLATE_READ( R_EAX );
  2394                         ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
  2395                         MEM_READ_WORD( R_EAX, R_EAX );
  2396                         store_reg( R_EAX, Rn );
  2397                         sh4_x86.tstate = TSTATE_NONE;
  2399                         break;
  2400                     case 0x6:
  2401                         { /* MOV.L @Rm+, Rn */
  2402                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2403                         COUNT_INST(I_MOVL);
  2404                         load_reg( R_EAX, Rm );
  2405                         check_ralign32( R_EAX );
  2406                         MMU_TRANSLATE_READ( R_EAX );
  2407                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  2408                         MEM_READ_LONG( R_EAX, R_EAX );
  2409                         store_reg( R_EAX, Rn );
  2410                         sh4_x86.tstate = TSTATE_NONE;
  2412                         break;
  2413                     case 0x7:
  2414                         { /* NOT Rm, Rn */
  2415                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2416                         COUNT_INST(I_NOT);
  2417                         load_reg( R_EAX, Rm );
  2418                         NOT_r32( R_EAX );
  2419                         store_reg( R_EAX, Rn );
  2420                         sh4_x86.tstate = TSTATE_NONE;
  2422                         break;
  2423                     case 0x8:
  2424                         { /* SWAP.B Rm, Rn */
  2425                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2426                         COUNT_INST(I_SWAPB);
  2427                         load_reg( R_EAX, Rm );
  2428                         XCHG_r8_r8( R_AL, R_AH ); // NB: does not touch EFLAGS
  2429                         store_reg( R_EAX, Rn );
  2431                         break;
  2432                     case 0x9:
  2433                         { /* SWAP.W Rm, Rn */
  2434                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2435                         COUNT_INST(I_SWAPB);
  2436                         load_reg( R_EAX, Rm );
  2437                         MOV_r32_r32( R_EAX, R_ECX );
  2438                         SHL_imm8_r32( 16, R_ECX );
  2439                         SHR_imm8_r32( 16, R_EAX );
  2440                         OR_r32_r32( R_EAX, R_ECX );
  2441                         store_reg( R_ECX, Rn );
  2442                         sh4_x86.tstate = TSTATE_NONE;
  2444                         break;
  2445                     case 0xA:
  2446                         { /* NEGC Rm, Rn */
  2447                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2448                         COUNT_INST(I_NEGC);
  2449                         load_reg( R_EAX, Rm );
  2450                         XOR_r32_r32( R_ECX, R_ECX );
  2451                         LDC_t();
  2452                         SBB_r32_r32( R_EAX, R_ECX );
  2453                         store_reg( R_ECX, Rn );
  2454                         SETC_t();
  2455                         sh4_x86.tstate = TSTATE_C;
  2457                         break;
  2458                     case 0xB:
  2459                         { /* NEG Rm, Rn */
  2460                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2461                         COUNT_INST(I_NEG);
  2462                         load_reg( R_EAX, Rm );
  2463                         NEG_r32( R_EAX );
  2464                         store_reg( R_EAX, Rn );
  2465                         sh4_x86.tstate = TSTATE_NONE;
  2467                         break;
  2468                     case 0xC:
  2469                         { /* EXTU.B Rm, Rn */
  2470                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2471                         COUNT_INST(I_EXTUB);
  2472                         load_reg( R_EAX, Rm );
  2473                         MOVZX_r8_r32( R_EAX, R_EAX );
  2474                         store_reg( R_EAX, Rn );
  2476                         break;
  2477                     case 0xD:
  2478                         { /* EXTU.W Rm, Rn */
  2479                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2480                         COUNT_INST(I_EXTUW);
  2481                         load_reg( R_EAX, Rm );
  2482                         MOVZX_r16_r32( R_EAX, R_EAX );
  2483                         store_reg( R_EAX, Rn );
  2485                         break;
  2486                     case 0xE:
  2487                         { /* EXTS.B Rm, Rn */
  2488                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2489                         COUNT_INST(I_EXTSB);
  2490                         load_reg( R_EAX, Rm );
  2491                         MOVSX_r8_r32( R_EAX, R_EAX );
  2492                         store_reg( R_EAX, Rn );
  2494                         break;
  2495                     case 0xF:
  2496                         { /* EXTS.W Rm, Rn */
  2497                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2498                         COUNT_INST(I_EXTSW);
  2499                         load_reg( R_EAX, Rm );
  2500                         MOVSX_r16_r32( R_EAX, R_EAX );
  2501                         store_reg( R_EAX, Rn );
  2503                         break;
  2505                 break;
  2506             case 0x7:
  2507                 { /* ADD #imm, Rn */
  2508                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  2509                 COUNT_INST(I_ADDI);
  2510                 load_reg( R_EAX, Rn );
  2511                 ADD_imm8s_r32( imm, R_EAX );
  2512                 store_reg( R_EAX, Rn );
  2513                 sh4_x86.tstate = TSTATE_NONE;
  2515                 break;
  2516             case 0x8:
  2517                 switch( (ir&0xF00) >> 8 ) {
  2518                     case 0x0:
  2519                         { /* MOV.B R0, @(disp, Rn) */
  2520                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2521                         COUNT_INST(I_MOVB);
  2522                         load_reg( R_EAX, Rn );
  2523                         ADD_imm32_r32( disp, R_EAX );
  2524                         MMU_TRANSLATE_WRITE( R_EAX );
  2525                         load_reg( R_EDX, 0 );
  2526                         MEM_WRITE_BYTE( R_EAX, R_EDX );
  2527                         sh4_x86.tstate = TSTATE_NONE;
  2529                         break;
  2530                     case 0x1:
  2531                         { /* MOV.W R0, @(disp, Rn) */
  2532                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2533                         COUNT_INST(I_MOVW);
  2534                         load_reg( R_EAX, Rn );
  2535                         ADD_imm32_r32( disp, R_EAX );
  2536                         check_walign16( R_EAX );
  2537                         MMU_TRANSLATE_WRITE( R_EAX );
  2538                         load_reg( R_EDX, 0 );
  2539                         MEM_WRITE_WORD( R_EAX, R_EDX );
  2540                         sh4_x86.tstate = TSTATE_NONE;
  2542                         break;
  2543                     case 0x4:
  2544                         { /* MOV.B @(disp, Rm), R0 */
  2545                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2546                         COUNT_INST(I_MOVB);
  2547                         load_reg( R_EAX, Rm );
  2548                         ADD_imm32_r32( disp, R_EAX );
  2549                         MMU_TRANSLATE_READ( R_EAX );
  2550                         MEM_READ_BYTE( R_EAX, R_EAX );
  2551                         store_reg( R_EAX, 0 );
  2552                         sh4_x86.tstate = TSTATE_NONE;
  2554                         break;
  2555                     case 0x5:
  2556                         { /* MOV.W @(disp, Rm), R0 */
  2557                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2558                         COUNT_INST(I_MOVW);
  2559                         load_reg( R_EAX, Rm );
  2560                         ADD_imm32_r32( disp, R_EAX );
  2561                         check_ralign16( R_EAX );
  2562                         MMU_TRANSLATE_READ( R_EAX );
  2563                         MEM_READ_WORD( R_EAX, R_EAX );
  2564                         store_reg( R_EAX, 0 );
  2565                         sh4_x86.tstate = TSTATE_NONE;
  2567                         break;
  2568                     case 0x8:
  2569                         { /* CMP/EQ #imm, R0 */
  2570                         int32_t imm = SIGNEXT8(ir&0xFF); 
  2571                         COUNT_INST(I_CMPEQI);
  2572                         load_reg( R_EAX, 0 );
  2573                         CMP_imm8s_r32(imm, R_EAX);
  2574                         SETE_t();
  2575                         sh4_x86.tstate = TSTATE_E;
  2577                         break;
  2578                     case 0x9:
  2579                         { /* BT disp */
  2580                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2581                         COUNT_INST(I_BT);
  2582                         if( sh4_x86.in_delay_slot ) {
  2583                     	SLOTILLEGAL();
  2584                         } else {
  2585                     	sh4vma_t target = disp + pc + 4;
  2586                     	JF_rel8( nottaken );
  2587                     	exit_block_rel(target, pc+2 );
  2588                     	JMP_TARGET(nottaken);
  2589                     	return 2;
  2592                         break;
  2593                     case 0xB:
  2594                         { /* BF disp */
  2595                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2596                         COUNT_INST(I_BF);
  2597                         if( sh4_x86.in_delay_slot ) {
  2598                     	SLOTILLEGAL();
  2599                         } else {
  2600                     	sh4vma_t target = disp + pc + 4;
  2601                     	JT_rel8( nottaken );
  2602                     	exit_block_rel(target, pc+2 );
  2603                     	JMP_TARGET(nottaken);
  2604                     	return 2;
  2607                         break;
  2608                     case 0xD:
  2609                         { /* BT/S disp */
  2610                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2611                         COUNT_INST(I_BTS);
  2612                         if( sh4_x86.in_delay_slot ) {
  2613                     	SLOTILLEGAL();
  2614                         } else {
  2615                     	sh4_x86.in_delay_slot = DELAY_PC;
  2616                     	if( UNTRANSLATABLE(pc+2) ) {
  2617                     	    load_imm32( R_EAX, pc + 4 - sh4_x86.block_start_pc );
  2618                     	    JF_rel8(nottaken);
  2619                     	    ADD_imm32_r32( disp, R_EAX );
  2620                     	    JMP_TARGET(nottaken);
  2621                     	    ADD_sh4r_r32( R_PC, R_EAX );
  2622                     	    store_spreg( R_EAX, R_NEW_PC );
  2623                     	    exit_block_emu(pc+2);
  2624                     	    sh4_x86.branch_taken = TRUE;
  2625                     	    return 2;
  2626                     	} else {
  2627                     	    if( sh4_x86.tstate == TSTATE_NONE ) {
  2628                     		CMP_imm8s_sh4r( 1, R_T );
  2629                     		sh4_x86.tstate = TSTATE_E;
  2631                     	    OP(0x0F); OP(0x80+(sh4_x86.tstate^1)); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JF rel32
  2632                     	    sh4_translate_instruction(pc+2);
  2633                     	    exit_block_rel( disp + pc + 4, pc+4 );
  2634                     	    // not taken
  2635                     	    *patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2636                     	    sh4_translate_instruction(pc+2);
  2637                     	    return 4;
  2641                         break;
  2642                     case 0xF:
  2643                         { /* BF/S disp */
  2644                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2645                         COUNT_INST(I_BFS);
  2646                         if( sh4_x86.in_delay_slot ) {
  2647                     	SLOTILLEGAL();
  2648                         } else {
  2649                     	sh4_x86.in_delay_slot = DELAY_PC;
  2650                     	if( UNTRANSLATABLE(pc+2) ) {
  2651                     	    load_imm32( R_EAX, pc + 4 - sh4_x86.block_start_pc );
  2652                     	    JT_rel8(nottaken);
  2653                     	    ADD_imm32_r32( disp, R_EAX );
  2654                     	    JMP_TARGET(nottaken);
  2655                     	    ADD_sh4r_r32( R_PC, R_EAX );
  2656                     	    store_spreg( R_EAX, R_NEW_PC );
  2657                     	    exit_block_emu(pc+2);
  2658                     	    sh4_x86.branch_taken = TRUE;
  2659                     	    return 2;
  2660                     	} else {
  2661                     	    if( sh4_x86.tstate == TSTATE_NONE ) {
  2662                     		CMP_imm8s_sh4r( 1, R_T );
  2663                     		sh4_x86.tstate = TSTATE_E;
  2665                     	    sh4vma_t target = disp + pc + 4;
  2666                     	    OP(0x0F); OP(0x80+sh4_x86.tstate); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JT rel32
  2667                     	    sh4_translate_instruction(pc+2);
  2668                     	    exit_block_rel( target, pc+4 );
  2670                     	    // not taken
  2671                     	    *patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2672                     	    sh4_translate_instruction(pc+2);
  2673                     	    return 4;
  2677                         break;
  2678                     default:
  2679                         UNDEF();
  2680                         break;
  2682                 break;
  2683             case 0x9:
  2684                 { /* MOV.W @(disp, PC), Rn */
  2685                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1; 
  2686                 COUNT_INST(I_MOVW);
  2687                 if( sh4_x86.in_delay_slot ) {
  2688             	SLOTILLEGAL();
  2689                 } else {
  2690             	// See comments for MOV.L @(disp, PC), Rn
  2691             	uint32_t target = pc + disp + 4;
  2692             	if( IS_IN_ICACHE(target) ) {
  2693             	    sh4ptr_t ptr = GET_ICACHE_PTR(target);
  2694             	    MOV_moff32_EAX( ptr );
  2695             	    MOVSX_r16_r32( R_EAX, R_EAX );
  2696             	} else {
  2697             	    load_imm32( R_EAX, (pc - sh4_x86.block_start_pc) + disp + 4 );
  2698             	    ADD_sh4r_r32( R_PC, R_EAX );
  2699             	    MMU_TRANSLATE_READ( R_EAX );
  2700             	    MEM_READ_WORD( R_EAX, R_EAX );
  2701             	    sh4_x86.tstate = TSTATE_NONE;
  2703             	store_reg( R_EAX, Rn );
  2706                 break;
  2707             case 0xA:
  2708                 { /* BRA disp */
  2709                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2710                 COUNT_INST(I_BRA);
  2711                 if( sh4_x86.in_delay_slot ) {
  2712             	SLOTILLEGAL();
  2713                 } else {
  2714             	sh4_x86.in_delay_slot = DELAY_PC;
  2715             	sh4_x86.branch_taken = TRUE;
  2716             	if( UNTRANSLATABLE(pc+2) ) {
  2717             	    load_spreg( R_EAX, R_PC );
  2718             	    ADD_imm32_r32( pc + disp + 4 - sh4_x86.block_start_pc, R_EAX );
  2719             	    store_spreg( R_EAX, R_NEW_PC );
  2720             	    exit_block_emu(pc+2);
  2721             	    return 2;
  2722             	} else {
  2723             	    sh4_translate_instruction( pc + 2 );
  2724             	    exit_block_rel( disp + pc + 4, pc+4 );
  2725             	    return 4;
  2729                 break;
  2730             case 0xB:
  2731                 { /* BSR disp */
  2732                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2733                 COUNT_INST(I_BSR);
  2734                 if( sh4_x86.in_delay_slot ) {
  2735             	SLOTILLEGAL();
  2736                 } else {
  2737             	load_spreg( R_EAX, R_PC );
  2738             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
  2739             	store_spreg( R_EAX, R_PR );
  2740             	sh4_x86.in_delay_slot = DELAY_PC;
  2741             	sh4_x86.branch_taken = TRUE;
  2742             	sh4_x86.tstate = TSTATE_NONE;
  2743             	if( UNTRANSLATABLE(pc+2) ) {
  2744             	    ADD_imm32_r32( disp, R_EAX );
  2745             	    store_spreg( R_EAX, R_NEW_PC );
  2746             	    exit_block_emu(pc+2);
  2747             	    return 2;
  2748             	} else {
  2749             	    sh4_translate_instruction( pc + 2 );
  2750             	    exit_block_rel( disp + pc + 4, pc+4 );
  2751             	    return 4;
  2755                 break;
  2756             case 0xC:
  2757                 switch( (ir&0xF00) >> 8 ) {
  2758                     case 0x0:
  2759                         { /* MOV.B R0, @(disp, GBR) */
  2760                         uint32_t disp = (ir&0xFF); 
  2761                         COUNT_INST(I_MOVB);
  2762                         load_spreg( R_EAX, R_GBR );
  2763                         ADD_imm32_r32( disp, R_EAX );
  2764                         MMU_TRANSLATE_WRITE( R_EAX );
  2765                         load_reg( R_EDX, 0 );
  2766                         MEM_WRITE_BYTE( R_EAX, R_EDX );
  2767                         sh4_x86.tstate = TSTATE_NONE;
  2769                         break;
  2770                     case 0x1:
  2771                         { /* MOV.W R0, @(disp, GBR) */
  2772                         uint32_t disp = (ir&0xFF)<<1; 
  2773                         COUNT_INST(I_MOVW);
  2774                         load_spreg( R_EAX, R_GBR );
  2775                         ADD_imm32_r32( disp, R_EAX );
  2776                         check_walign16( R_EAX );
  2777                         MMU_TRANSLATE_WRITE( R_EAX );
  2778                         load_reg( R_EDX, 0 );
  2779                         MEM_WRITE_WORD( R_EAX, R_EDX );
  2780                         sh4_x86.tstate = TSTATE_NONE;
  2782                         break;
  2783                     case 0x2:
  2784                         { /* MOV.L R0, @(disp, GBR) */
  2785                         uint32_t disp = (ir&0xFF)<<2; 
  2786                         COUNT_INST(I_MOVL);
  2787                         load_spreg( R_EAX, R_GBR );
  2788                         ADD_imm32_r32( disp, R_EAX );
  2789                         check_walign32( R_EAX );
  2790                         MMU_TRANSLATE_WRITE( R_EAX );
  2791                         load_reg( R_EDX, 0 );
  2792                         MEM_WRITE_LONG( R_EAX, R_EDX );
  2793                         sh4_x86.tstate = TSTATE_NONE;
  2795                         break;
  2796                     case 0x3:
  2797                         { /* TRAPA #imm */
  2798                         uint32_t imm = (ir&0xFF); 
  2799                         COUNT_INST(I_TRAPA);
  2800                         if( sh4_x86.in_delay_slot ) {
  2801                     	SLOTILLEGAL();
  2802                         } else {
  2803                     	load_imm32( R_ECX, pc+2 - sh4_x86.block_start_pc );   // 5
  2804                     	ADD_r32_sh4r( R_ECX, R_PC );
  2805                     	load_imm32( R_EAX, imm );
  2806                     	call_func1( sh4_raise_trap, R_EAX );
  2807                     	sh4_x86.tstate = TSTATE_NONE;
  2808                     	exit_block_pcset(pc);
  2809                     	sh4_x86.branch_taken = TRUE;
  2810                     	return 2;
  2813                         break;
  2814                     case 0x4:
  2815                         { /* MOV.B @(disp, GBR), R0 */
  2816                         uint32_t disp = (ir&0xFF); 
  2817                         COUNT_INST(I_MOVB);
  2818                         load_spreg( R_EAX, R_GBR );
  2819                         ADD_imm32_r32( disp, R_EAX );
  2820                         MMU_TRANSLATE_READ( R_EAX );
  2821                         MEM_READ_BYTE( R_EAX, R_EAX );
  2822                         store_reg( R_EAX, 0 );
  2823                         sh4_x86.tstate = TSTATE_NONE;
  2825                         break;
  2826                     case 0x5:
  2827                         { /* MOV.W @(disp, GBR), R0 */
  2828                         uint32_t disp = (ir&0xFF)<<1; 
  2829                         COUNT_INST(I_MOVW);
  2830                         load_spreg( R_EAX, R_GBR );
  2831                         ADD_imm32_r32( disp, R_EAX );
  2832                         check_ralign16( R_EAX );
  2833                         MMU_TRANSLATE_READ( R_EAX );
  2834                         MEM_READ_WORD( R_EAX, R_EAX );
  2835                         store_reg( R_EAX, 0 );
  2836                         sh4_x86.tstate = TSTATE_NONE;
  2838                         break;
  2839                     case 0x6:
  2840                         { /* MOV.L @(disp, GBR), R0 */
  2841                         uint32_t disp = (ir&0xFF)<<2; 
  2842                         COUNT_INST(I_MOVL);
  2843                         load_spreg( R_EAX, R_GBR );
  2844                         ADD_imm32_r32( disp, R_EAX );
  2845                         check_ralign32( R_EAX );
  2846                         MMU_TRANSLATE_READ( R_EAX );
  2847                         MEM_READ_LONG( R_EAX, R_EAX );
  2848                         store_reg( R_EAX, 0 );
  2849                         sh4_x86.tstate = TSTATE_NONE;
  2851                         break;
  2852                     case 0x7:
  2853                         { /* MOVA @(disp, PC), R0 */
  2854                         uint32_t disp = (ir&0xFF)<<2; 
  2855                         COUNT_INST(I_MOVA);
  2856                         if( sh4_x86.in_delay_slot ) {
  2857                     	SLOTILLEGAL();
  2858                         } else {
  2859                     	load_imm32( R_ECX, (pc - sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
  2860                     	ADD_sh4r_r32( R_PC, R_ECX );
  2861                     	store_reg( R_ECX, 0 );
  2862                     	sh4_x86.tstate = TSTATE_NONE;
  2865                         break;
  2866                     case 0x8:
  2867                         { /* TST #imm, R0 */
  2868                         uint32_t imm = (ir&0xFF); 
  2869                         COUNT_INST(I_TSTI);
  2870                         load_reg( R_EAX, 0 );
  2871                         TEST_imm32_r32( imm, R_EAX );
  2872                         SETE_t();
  2873                         sh4_x86.tstate = TSTATE_E;
  2875                         break;
  2876                     case 0x9:
  2877                         { /* AND #imm, R0 */
  2878                         uint32_t imm = (ir&0xFF); 
  2879                         COUNT_INST(I_ANDI);
  2880                         load_reg( R_EAX, 0 );
  2881                         AND_imm32_r32(imm, R_EAX); 
  2882                         store_reg( R_EAX, 0 );
  2883                         sh4_x86.tstate = TSTATE_NONE;
  2885                         break;
  2886                     case 0xA:
  2887                         { /* XOR #imm, R0 */
  2888                         uint32_t imm = (ir&0xFF); 
  2889                         COUNT_INST(I_XORI);
  2890                         load_reg( R_EAX, 0 );
  2891                         XOR_imm32_r32( imm, R_EAX );
  2892                         store_reg( R_EAX, 0 );
  2893                         sh4_x86.tstate = TSTATE_NONE;
  2895                         break;
  2896                     case 0xB:
  2897                         { /* OR #imm, R0 */
  2898                         uint32_t imm = (ir&0xFF); 
  2899                         COUNT_INST(I_ORI);
  2900                         load_reg( R_EAX, 0 );
  2901                         OR_imm32_r32(imm, R_EAX);
  2902                         store_reg( R_EAX, 0 );
  2903                         sh4_x86.tstate = TSTATE_NONE;
  2905                         break;
  2906                     case 0xC:
  2907                         { /* TST.B #imm, @(R0, GBR) */
  2908                         uint32_t imm = (ir&0xFF); 
  2909                         COUNT_INST(I_TSTB);
  2910                         load_reg( R_EAX, 0);
  2911                         load_reg( R_ECX, R_GBR);
  2912                         ADD_r32_r32( R_ECX, R_EAX );
  2913                         MMU_TRANSLATE_READ( R_EAX );
  2914                         MEM_READ_BYTE( R_EAX, R_EAX );
  2915                         TEST_imm8_r8( imm, R_AL );
  2916                         SETE_t();
  2917                         sh4_x86.tstate = TSTATE_E;
  2919                         break;
  2920                     case 0xD:
  2921                         { /* AND.B #imm, @(R0, GBR) */
  2922                         uint32_t imm = (ir&0xFF); 
  2923                         COUNT_INST(I_ANDB);
  2924                         load_reg( R_EAX, 0 );
  2925                         load_spreg( R_ECX, R_GBR );
  2926                         ADD_r32_r32( R_ECX, R_EAX );
  2927                         MMU_TRANSLATE_WRITE( R_EAX );
  2928                         PUSH_realigned_r32(R_EAX);
  2929                         MEM_READ_BYTE( R_EAX, R_EAX );
  2930                         POP_realigned_r32(R_ECX);
  2931                         AND_imm32_r32(imm, R_EAX );
  2932                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2933                         sh4_x86.tstate = TSTATE_NONE;
  2935                         break;
  2936                     case 0xE:
  2937                         { /* XOR.B #imm, @(R0, GBR) */
  2938                         uint32_t imm = (ir&0xFF); 
  2939                         COUNT_INST(I_XORB);
  2940                         load_reg( R_EAX, 0 );
  2941                         load_spreg( R_ECX, R_GBR );
  2942                         ADD_r32_r32( R_ECX, R_EAX );
  2943                         MMU_TRANSLATE_WRITE( R_EAX );
  2944                         PUSH_realigned_r32(R_EAX);
  2945                         MEM_READ_BYTE(R_EAX, R_EAX);
  2946                         POP_realigned_r32(R_ECX);
  2947                         XOR_imm32_r32( imm, R_EAX );
  2948                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2949                         sh4_x86.tstate = TSTATE_NONE;
  2951                         break;
  2952                     case 0xF:
  2953                         { /* OR.B #imm, @(R0, GBR) */
  2954                         uint32_t imm = (ir&0xFF); 
  2955                         COUNT_INST(I_ORB);
  2956                         load_reg( R_EAX, 0 );
  2957                         load_spreg( R_ECX, R_GBR );
  2958                         ADD_r32_r32( R_ECX, R_EAX );
  2959                         MMU_TRANSLATE_WRITE( R_EAX );
  2960                         PUSH_realigned_r32(R_EAX);
  2961                         MEM_READ_BYTE( R_EAX, R_EAX );
  2962                         POP_realigned_r32(R_ECX);
  2963                         OR_imm32_r32(imm, R_EAX );
  2964                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2965                         sh4_x86.tstate = TSTATE_NONE;
  2967                         break;
  2969                 break;
  2970             case 0xD:
  2971                 { /* MOV.L @(disp, PC), Rn */
  2972                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2; 
  2973                 COUNT_INST(I_MOVLPC);
  2974                 if( sh4_x86.in_delay_slot ) {
  2975             	SLOTILLEGAL();
  2976                 } else {
  2977             	uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
  2978             	if( IS_IN_ICACHE(target) ) {
  2979             	    // If the target address is in the same page as the code, it's
  2980             	    // pretty safe to just ref it directly and circumvent the whole
  2981             	    // memory subsystem. (this is a big performance win)
  2983             	    // FIXME: There's a corner-case that's not handled here when
  2984             	    // the current code-page is in the ITLB but not in the UTLB.
  2985             	    // (should generate a TLB miss although need to test SH4 
  2986             	    // behaviour to confirm) Unlikely to be anyone depending on this
  2987             	    // behaviour though.
  2988             	    sh4ptr_t ptr = GET_ICACHE_PTR(target);
  2989             	    MOV_moff32_EAX( ptr );
  2990             	} else {
  2991             	    // Note: we use sh4r.pc for the calc as we could be running at a
  2992             	    // different virtual address than the translation was done with,
  2993             	    // but we can safely assume that the low bits are the same.
  2994             	    load_imm32( R_EAX, (pc-sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
  2995             	    ADD_sh4r_r32( R_PC, R_EAX );
  2996             	    MMU_TRANSLATE_READ( R_EAX );
  2997             	    MEM_READ_LONG( R_EAX, R_EAX );
  2998             	    sh4_x86.tstate = TSTATE_NONE;
  3000             	store_reg( R_EAX, Rn );
  3003                 break;
  3004             case 0xE:
  3005                 { /* MOV #imm, Rn */
  3006                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  3007                 COUNT_INST(I_MOVI);
  3008                 load_imm32( R_EAX, imm );
  3009                 store_reg( R_EAX, Rn );
  3011                 break;
  3012             case 0xF:
  3013                 switch( ir&0xF ) {
  3014                     case 0x0:
  3015                         { /* FADD FRm, FRn */
  3016                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3017                         COUNT_INST(I_FADD);
  3018                         check_fpuen();
  3019                         load_spreg( R_ECX, R_FPSCR );
  3020                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3021                         JNE_rel8(doubleprec);
  3022                         push_fr(FRm);
  3023                         push_fr(FRn);
  3024                         FADDP_st(1);
  3025                         pop_fr(FRn);
  3026                         JMP_rel8(end);
  3027                         JMP_TARGET(doubleprec);
  3028                         push_dr(FRm);
  3029                         push_dr(FRn);
  3030                         FADDP_st(1);
  3031                         pop_dr(FRn);
  3032                         JMP_TARGET(end);
  3033                         sh4_x86.tstate = TSTATE_NONE;
  3035                         break;
  3036                     case 0x1:
  3037                         { /* FSUB FRm, FRn */
  3038                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3039                         COUNT_INST(I_FSUB);
  3040                         check_fpuen();
  3041                         load_spreg( R_ECX, R_FPSCR );
  3042                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3043                         JNE_rel8(doubleprec);
  3044                         push_fr(FRn);
  3045                         push_fr(FRm);
  3046                         FSUBP_st(1);
  3047                         pop_fr(FRn);
  3048                         JMP_rel8(end);
  3049                         JMP_TARGET(doubleprec);
  3050                         push_dr(FRn);
  3051                         push_dr(FRm);
  3052                         FSUBP_st(1);
  3053                         pop_dr(FRn);
  3054                         JMP_TARGET(end);
  3055                         sh4_x86.tstate = TSTATE_NONE;
  3057                         break;
  3058                     case 0x2:
  3059                         { /* FMUL FRm, FRn */
  3060                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3061                         COUNT_INST(I_FMUL);
  3062                         check_fpuen();
  3063                         load_spreg( R_ECX, R_FPSCR );
  3064                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3065                         JNE_rel8(doubleprec);
  3066                         push_fr(FRm);
  3067                         push_fr(FRn);
  3068                         FMULP_st(1);
  3069                         pop_fr(FRn);
  3070                         JMP_rel8(end);
  3071                         JMP_TARGET(doubleprec);
  3072                         push_dr(FRm);
  3073                         push_dr(FRn);
  3074                         FMULP_st(1);
  3075                         pop_dr(FRn);
  3076                         JMP_TARGET(end);
  3077                         sh4_x86.tstate = TSTATE_NONE;
  3079                         break;
  3080                     case 0x3:
  3081                         { /* FDIV FRm, FRn */
  3082                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3083                         COUNT_INST(I_FDIV);
  3084                         check_fpuen();
  3085                         load_spreg( R_ECX, R_FPSCR );
  3086                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3087                         JNE_rel8(doubleprec);
  3088                         push_fr(FRn);
  3089                         push_fr(FRm);
  3090                         FDIVP_st(1);
  3091                         pop_fr(FRn);
  3092                         JMP_rel8(end);
  3093                         JMP_TARGET(doubleprec);
  3094                         push_dr(FRn);
  3095                         push_dr(FRm);
  3096                         FDIVP_st(1);
  3097                         pop_dr(FRn);
  3098                         JMP_TARGET(end);
  3099                         sh4_x86.tstate = TSTATE_NONE;
  3101                         break;
  3102                     case 0x4:
  3103                         { /* FCMP/EQ FRm, FRn */
  3104                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3105                         COUNT_INST(I_FCMPEQ);
  3106                         check_fpuen();
  3107                         load_spreg( R_ECX, R_FPSCR );
  3108                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3109                         JNE_rel8(doubleprec);
  3110                         push_fr(FRm);
  3111                         push_fr(FRn);
  3112                         JMP_rel8(end);
  3113                         JMP_TARGET(doubleprec);
  3114                         push_dr(FRm);
  3115                         push_dr(FRn);
  3116                         JMP_TARGET(end);
  3117                         FCOMIP_st(1);
  3118                         SETE_t();
  3119                         FPOP_st();
  3120                         sh4_x86.tstate = TSTATE_NONE;
  3122                         break;
  3123                     case 0x5:
  3124                         { /* FCMP/GT FRm, FRn */
  3125                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3126                         COUNT_INST(I_FCMPGT);
  3127                         check_fpuen();
  3128                         load_spreg( R_ECX, R_FPSCR );
  3129                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3130                         JNE_rel8(doubleprec);
  3131                         push_fr(FRm);
  3132                         push_fr(FRn);
  3133                         JMP_rel8(end);
  3134                         JMP_TARGET(doubleprec);
  3135                         push_dr(FRm);
  3136                         push_dr(FRn);
  3137                         JMP_TARGET(end);
  3138                         FCOMIP_st(1);
  3139                         SETA_t();
  3140                         FPOP_st();
  3141                         sh4_x86.tstate = TSTATE_NONE;
  3143                         break;
  3144                     case 0x6:
  3145                         { /* FMOV @(R0, Rm), FRn */
  3146                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3147                         COUNT_INST(I_FMOV7);
  3148                         check_fpuen();
  3149                         load_reg( R_EAX, Rm );
  3150                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
  3151                         load_spreg( R_EDX, R_FPSCR );
  3152                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3153                         JNE_rel8(doublesize);
  3155                         check_ralign32( R_EAX );
  3156                         MMU_TRANSLATE_READ( R_EAX );
  3157                         MEM_READ_LONG( R_EAX, R_EAX );
  3158                         store_fr( R_EAX, FRn );
  3159                         JMP_rel8(end);
  3161                         JMP_TARGET(doublesize);
  3162                         check_ralign64( R_EAX );
  3163                         MMU_TRANSLATE_READ( R_EAX );
  3164                         MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3165                         store_dr0( R_ECX, FRn );
  3166                         store_dr1( R_EAX, FRn );
  3167                         JMP_TARGET(end);
  3169                         sh4_x86.tstate = TSTATE_NONE;
  3171                         break;
  3172                     case 0x7:
  3173                         { /* FMOV FRm, @(R0, Rn) */
  3174                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3175                         COUNT_INST(I_FMOV4);
  3176                         check_fpuen();
  3177                         load_reg( R_EAX, Rn );
  3178                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
  3179                         load_spreg( R_EDX, R_FPSCR );
  3180                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3181                         JNE_rel8(doublesize);
  3183                         check_walign32( R_EAX );
  3184                         MMU_TRANSLATE_WRITE( R_EAX );
  3185                         load_fr( R_ECX, FRm );
  3186                         MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
  3187                         JMP_rel8(end);
  3189                         JMP_TARGET(doublesize);
  3190                         check_walign64( R_EAX );
  3191                         MMU_TRANSLATE_WRITE( R_EAX );
  3192                         load_dr0( R_ECX, FRm );
  3193                         load_dr1( R_EDX, FRm );
  3194                         MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3195                         JMP_TARGET(end);
  3197                         sh4_x86.tstate = TSTATE_NONE;
  3199                         break;
  3200                     case 0x8:
  3201                         { /* FMOV @Rm, FRn */
  3202                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3203                         COUNT_INST(I_FMOV5);
  3204                         check_fpuen();
  3205                         load_reg( R_EAX, Rm );
  3206                         load_spreg( R_EDX, R_FPSCR );
  3207                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3208                         JNE_rel8(doublesize);
  3210                         check_ralign32( R_EAX );
  3211                         MMU_TRANSLATE_READ( R_EAX );
  3212                         MEM_READ_LONG( R_EAX, R_EAX );
  3213                         store_fr( R_EAX, FRn );
  3214                         JMP_rel8(end);
  3216                         JMP_TARGET(doublesize);
  3217                         check_ralign64( R_EAX );
  3218                         MMU_TRANSLATE_READ( R_EAX );
  3219                         MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3220                         store_dr0( R_ECX, FRn );
  3221                         store_dr1( R_EAX, FRn );
  3222                         JMP_TARGET(end);
  3223                         sh4_x86.tstate = TSTATE_NONE;
  3225                         break;
  3226                     case 0x9:
  3227                         { /* FMOV @Rm+, FRn */
  3228                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3229                         COUNT_INST(I_FMOV6);
  3230                         check_fpuen();
  3231                         load_reg( R_EAX, Rm );
  3232                         load_spreg( R_EDX, R_FPSCR );
  3233                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3234                         JNE_rel8(doublesize);
  3236                         check_ralign32( R_EAX );
  3237                         MMU_TRANSLATE_READ( R_EAX );
  3238                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  3239                         MEM_READ_LONG( R_EAX, R_EAX );
  3240                         store_fr( R_EAX, FRn );
  3241                         JMP_rel8(end);
  3243                         JMP_TARGET(doublesize);
  3244                         check_ralign64( R_EAX );
  3245                         MMU_TRANSLATE_READ( R_EAX );
  3246                         ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rm]) );
  3247                         MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3248                         store_dr0( R_ECX, FRn );
  3249                         store_dr1( R_EAX, FRn );
  3250                         JMP_TARGET(end);
  3252                         sh4_x86.tstate = TSTATE_NONE;
  3254                         break;
  3255                     case 0xA:
  3256                         { /* FMOV FRm, @Rn */
  3257                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3258                         COUNT_INST(I_FMOV2);
  3259                         check_fpuen();
  3260                         load_reg( R_EAX, Rn );
  3261                         load_spreg( R_EDX, R_FPSCR );
  3262                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3263                         JNE_rel8(doublesize);
  3265                         check_walign32( R_EAX );
  3266                         MMU_TRANSLATE_WRITE( R_EAX );
  3267                         load_fr( R_ECX, FRm );
  3268                         MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
  3269                         JMP_rel8(end);
  3271                         JMP_TARGET(doublesize);
  3272                         check_walign64( R_EAX );
  3273                         MMU_TRANSLATE_WRITE( R_EAX );
  3274                         load_dr0( R_ECX, FRm );
  3275                         load_dr1( R_EDX, FRm );
  3276                         MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3277                         JMP_TARGET(end);
  3278                         sh4_x86.tstate = TSTATE_NONE;
  3280                         break;
  3281                     case 0xB:
  3282                         { /* FMOV FRm, @-Rn */
  3283                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3284                         COUNT_INST(I_FMOV3);
  3285                         check_fpuen();
  3286                         load_reg( R_EAX, Rn );
  3287                         load_spreg( R_EDX, R_FPSCR );
  3288                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3289                         JNE_rel8(doublesize);
  3291                         check_walign32( R_EAX );
  3292                         ADD_imm8s_r32( -4, R_EAX );
  3293                         MMU_TRANSLATE_WRITE( R_EAX );
  3294                         load_fr( R_ECX, FRm );
  3295                         ADD_imm8s_sh4r(-4,REG_OFFSET(r[Rn]));
  3296                         MEM_WRITE_LONG( R_EAX, R_ECX );
  3297                         JMP_rel8(end);
  3299                         JMP_TARGET(doublesize);
  3300                         check_walign64( R_EAX );
  3301                         ADD_imm8s_r32(-8,R_EAX);
  3302                         MMU_TRANSLATE_WRITE( R_EAX );
  3303                         load_dr0( R_ECX, FRm );
  3304                         load_dr1( R_EDX, FRm );
  3305                         ADD_imm8s_sh4r(-8,REG_OFFSET(r[Rn]));
  3306                         MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3307                         JMP_TARGET(end);
  3309                         sh4_x86.tstate = TSTATE_NONE;
  3311                         break;
  3312                     case 0xC:
  3313                         { /* FMOV FRm, FRn */
  3314                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3315                         COUNT_INST(I_FMOV1);
  3316                         check_fpuen();
  3317                         load_spreg( R_ECX, R_FPSCR );
  3318                         TEST_imm32_r32( FPSCR_SZ, R_ECX );
  3319                         JNE_rel8(doublesize);
  3320                         load_fr( R_EAX, FRm ); // SZ=0 branch
  3321                         store_fr( R_EAX, FRn );
  3322                         JMP_rel8(end);
  3323                         JMP_TARGET(doublesize);
  3324                         load_dr0( R_EAX, FRm );
  3325                         load_dr1( R_ECX, FRm );
  3326                         store_dr0( R_EAX, FRn );
  3327                         store_dr1( R_ECX, FRn );
  3328                         JMP_TARGET(end);
  3329                         sh4_x86.tstate = TSTATE_NONE;
  3331                         break;
  3332                     case 0xD:
  3333                         switch( (ir&0xF0) >> 4 ) {
  3334                             case 0x0:
  3335                                 { /* FSTS FPUL, FRn */
  3336                                 uint32_t FRn = ((ir>>8)&0xF); 
  3337                                 COUNT_INST(I_FSTS);
  3338                                 check_fpuen();
  3339                                 load_spreg( R_EAX, R_FPUL );
  3340                                 store_fr( R_EAX, FRn );
  3341                                 sh4_x86.tstate = TSTATE_NONE;
  3343                                 break;
  3344                             case 0x1:
  3345                                 { /* FLDS FRm, FPUL */
  3346                                 uint32_t FRm = ((ir>>8)&0xF); 
  3347                                 COUNT_INST(I_FLDS);
  3348                                 check_fpuen();
  3349                                 load_fr( R_EAX, FRm );
  3350                                 store_spreg( R_EAX, R_FPUL );
  3351                                 sh4_x86.tstate = TSTATE_NONE;
  3353                                 break;
  3354                             case 0x2:
  3355                                 { /* FLOAT FPUL, FRn */
  3356                                 uint32_t FRn = ((ir>>8)&0xF); 
  3357                                 COUNT_INST(I_FLOAT);
  3358                                 check_fpuen();
  3359                                 load_spreg( R_ECX, R_FPSCR );
  3360                                 FILD_sh4r(R_FPUL);
  3361                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3362                                 JNE_rel8(doubleprec);
  3363                                 pop_fr( FRn );
  3364                                 JMP_rel8(end);
  3365                                 JMP_TARGET(doubleprec);
  3366                                 pop_dr( FRn );
  3367                                 JMP_TARGET(end);
  3368                                 sh4_x86.tstate = TSTATE_NONE;
  3370                                 break;
  3371                             case 0x3:
  3372                                 { /* FTRC FRm, FPUL */
  3373                                 uint32_t FRm = ((ir>>8)&0xF); 
  3374                                 COUNT_INST(I_FTRC);
  3375                                 check_fpuen();
  3376                                 load_spreg( R_ECX, R_FPSCR );
  3377                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3378                                 JNE_rel8(doubleprec);
  3379                                 push_fr( FRm );
  3380                                 JMP_rel8(doop);
  3381                                 JMP_TARGET(doubleprec);
  3382                                 push_dr( FRm );
  3383                                 JMP_TARGET( doop );
  3384                                 load_imm32( R_ECX, (uint32_t)&max_int );
  3385                                 FILD_r32ind( R_ECX );
  3386                                 FCOMIP_st(1);
  3387                                 JNA_rel8( sat );
  3388                                 load_imm32( R_ECX, (uint32_t)&min_int );  // 5
  3389                                 FILD_r32ind( R_ECX );           // 2
  3390                                 FCOMIP_st(1);                   // 2
  3391                                 JAE_rel8( sat2 );            // 2
  3392                                 load_imm32( R_EAX, (uint32_t)&save_fcw );
  3393                                 FNSTCW_r32ind( R_EAX );
  3394                                 load_imm32( R_EDX, (uint32_t)&trunc_fcw );
  3395                                 FLDCW_r32ind( R_EDX );
  3396                                 FISTP_sh4r(R_FPUL);             // 3
  3397                                 FLDCW_r32ind( R_EAX );
  3398                                 JMP_rel8(end);             // 2
  3400                                 JMP_TARGET(sat);
  3401                                 JMP_TARGET(sat2);
  3402                                 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
  3403                                 store_spreg( R_ECX, R_FPUL );
  3404                                 FPOP_st();
  3405                                 JMP_TARGET(end);
  3406                                 sh4_x86.tstate = TSTATE_NONE;
  3408                                 break;
  3409                             case 0x4:
  3410                                 { /* FNEG FRn */
  3411                                 uint32_t FRn = ((ir>>8)&0xF); 
  3412                                 COUNT_INST(I_FNEG);
  3413                                 check_fpuen();
  3414                                 load_spreg( R_ECX, R_FPSCR );
  3415                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3416                                 JNE_rel8(doubleprec);
  3417                                 push_fr(FRn);
  3418                                 FCHS_st0();
  3419                                 pop_fr(FRn);
  3420                                 JMP_rel8(end);
  3421                                 JMP_TARGET(doubleprec);
  3422                                 push_dr(FRn);
  3423                                 FCHS_st0();
  3424                                 pop_dr(FRn);
  3425                                 JMP_TARGET(end);
  3426                                 sh4_x86.tstate = TSTATE_NONE;
  3428                                 break;
  3429                             case 0x5:
  3430                                 { /* FABS FRn */
  3431                                 uint32_t FRn = ((ir>>8)&0xF); 
  3432                                 COUNT_INST(I_FABS);
  3433                                 check_fpuen();
  3434                                 load_spreg( R_ECX, R_FPSCR );
  3435                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3436                                 JNE_rel8(doubleprec);
  3437                                 push_fr(FRn); // 6
  3438                                 FABS_st0(); // 2
  3439                                 pop_fr(FRn); //6
  3440                                 JMP_rel8(end); // 2
  3441                                 JMP_TARGET(doubleprec);
  3442                                 push_dr(FRn);
  3443                                 FABS_st0();
  3444                                 pop_dr(FRn);
  3445                                 JMP_TARGET(end);
  3446                                 sh4_x86.tstate = TSTATE_NONE;
  3448                                 break;
  3449                             case 0x6:
  3450                                 { /* FSQRT FRn */
  3451                                 uint32_t FRn = ((ir>>8)&0xF); 
  3452                                 COUNT_INST(I_FSQRT);
  3453                                 check_fpuen();
  3454                                 load_spreg( R_ECX, R_FPSCR );
  3455                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3456                                 JNE_rel8(doubleprec);
  3457                                 push_fr(FRn);
  3458                                 FSQRT_st0();
  3459                                 pop_fr(FRn);
  3460                                 JMP_rel8(end);
  3461                                 JMP_TARGET(doubleprec);
  3462                                 push_dr(FRn);
  3463                                 FSQRT_st0();
  3464                                 pop_dr(FRn);
  3465                                 JMP_TARGET(end);
  3466                                 sh4_x86.tstate = TSTATE_NONE;
  3468                                 break;
  3469                             case 0x7:
  3470                                 { /* FSRRA FRn */
  3471                                 uint32_t FRn = ((ir>>8)&0xF); 
  3472                                 COUNT_INST(I_FSRRA);
  3473                                 check_fpuen();
  3474                                 load_spreg( R_ECX, R_FPSCR );
  3475                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3476                                 JNE_rel8(end); // PR=0 only
  3477                                 FLD1_st0();
  3478                                 push_fr(FRn);
  3479                                 FSQRT_st0();
  3480                                 FDIVP_st(1);
  3481                                 pop_fr(FRn);
  3482                                 JMP_TARGET(end);
  3483                                 sh4_x86.tstate = TSTATE_NONE;
  3485                                 break;
  3486                             case 0x8:
  3487                                 { /* FLDI0 FRn */
  3488                                 uint32_t FRn = ((ir>>8)&0xF); 
  3489                                 /* IFF PR=0 */
  3490                                   COUNT_INST(I_FLDI0);
  3491                                   check_fpuen();
  3492                                   load_spreg( R_ECX, R_FPSCR );
  3493                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3494                                   JNE_rel8(end);
  3495                                   XOR_r32_r32( R_EAX, R_EAX );
  3496                                   store_fr( R_EAX, FRn );
  3497                                   JMP_TARGET(end);
  3498                                   sh4_x86.tstate = TSTATE_NONE;
  3500                                 break;
  3501                             case 0x9:
  3502                                 { /* FLDI1 FRn */
  3503                                 uint32_t FRn = ((ir>>8)&0xF); 
  3504                                 /* IFF PR=0 */
  3505                                   COUNT_INST(I_FLDI1);
  3506                                   check_fpuen();
  3507                                   load_spreg( R_ECX, R_FPSCR );
  3508                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3509                                   JNE_rel8(end);
  3510                                   load_imm32(R_EAX, 0x3F800000);
  3511                                   store_fr( R_EAX, FRn );
  3512                                   JMP_TARGET(end);
  3513                                   sh4_x86.tstate = TSTATE_NONE;
  3515                                 break;
  3516                             case 0xA:
  3517                                 { /* FCNVSD FPUL, FRn */
  3518                                 uint32_t FRn = ((ir>>8)&0xF); 
  3519                                 COUNT_INST(I_FCNVSD);
  3520                                 check_fpuen();
  3521                                 load_spreg( R_ECX, R_FPSCR );
  3522                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3523                                 JE_rel8(end); // only when PR=1
  3524                                 push_fpul();
  3525                                 pop_dr( FRn );
  3526                                 JMP_TARGET(end);
  3527                                 sh4_x86.tstate = TSTATE_NONE;
  3529                                 break;
  3530                             case 0xB:
  3531                                 { /* FCNVDS FRm, FPUL */
  3532                                 uint32_t FRm = ((ir>>8)&0xF); 
  3533                                 COUNT_INST(I_FCNVDS);
  3534                                 check_fpuen();
  3535                                 load_spreg( R_ECX, R_FPSCR );
  3536                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3537                                 JE_rel8(end); // only when PR=1
  3538                                 push_dr( FRm );
  3539                                 pop_fpul();
  3540                                 JMP_TARGET(end);
  3541                                 sh4_x86.tstate = TSTATE_NONE;
  3543                                 break;
  3544                             case 0xE:
  3545                                 { /* FIPR FVm, FVn */
  3546                                 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3); 
  3547                                 COUNT_INST(I_FIPR);
  3548                                 check_fpuen();
  3549                                 load_spreg( R_ECX, R_FPSCR );
  3550                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3551                                 JNE_rel8( doubleprec);
  3553                                 push_fr( FVm<<2 );
  3554                                 push_fr( FVn<<2 );
  3555                                 FMULP_st(1);
  3556                                 push_fr( (FVm<<2)+1);
  3557                                 push_fr( (FVn<<2)+1);
  3558                                 FMULP_st(1);
  3559                                 FADDP_st(1);
  3560                                 push_fr( (FVm<<2)+2);
  3561                                 push_fr( (FVn<<2)+2);
  3562                                 FMULP_st(1);
  3563                                 FADDP_st(1);
  3564                                 push_fr( (FVm<<2)+3);
  3565                                 push_fr( (FVn<<2)+3);
  3566                                 FMULP_st(1);
  3567                                 FADDP_st(1);
  3568                                 pop_fr( (FVn<<2)+3);
  3569                                 JMP_TARGET(doubleprec);
  3570                                 sh4_x86.tstate = TSTATE_NONE;
  3572                                 break;
  3573                             case 0xF:
  3574                                 switch( (ir&0x100) >> 8 ) {
  3575                                     case 0x0:
  3576                                         { /* FSCA FPUL, FRn */
  3577                                         uint32_t FRn = ((ir>>9)&0x7)<<1; 
  3578                                         COUNT_INST(I_FSCA);
  3579                                         check_fpuen();
  3580                                         load_spreg( R_ECX, R_FPSCR );
  3581                                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3582                                         JNE_rel8(doubleprec );
  3583                                         LEA_sh4r_r32( REG_OFFSET(fr[0][FRn&0x0E]), R_ECX );
  3584                                         load_spreg( R_EDX, R_FPUL );
  3585                                         call_func2( sh4_fsca, R_EDX, R_ECX );
  3586                                         JMP_TARGET(doubleprec);
  3587                                         sh4_x86.tstate = TSTATE_NONE;
  3589                                         break;
  3590                                     case 0x1:
  3591                                         switch( (ir&0x200) >> 9 ) {
  3592                                             case 0x0:
  3593                                                 { /* FTRV XMTRX, FVn */
  3594                                                 uint32_t FVn = ((ir>>10)&0x3); 
  3595                                                 COUNT_INST(I_FTRV);
  3596                                                 check_fpuen();
  3597                                                 load_spreg( R_ECX, R_FPSCR );
  3598                                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3599                                                 JNE_rel8( doubleprec );
  3600                                                 LEA_sh4r_r32( REG_OFFSET(fr[0][FVn<<2]), R_EDX );
  3601                                                 call_func1( sh4_ftrv, R_EDX );  // 12
  3602                                                 JMP_TARGET(doubleprec);
  3603                                                 sh4_x86.tstate = TSTATE_NONE;
  3605                                                 break;
  3606                                             case 0x1:
  3607                                                 switch( (ir&0xC00) >> 10 ) {
  3608                                                     case 0x0:
  3609                                                         { /* FSCHG */
  3610                                                         COUNT_INST(I_FSCHG);
  3611                                                         check_fpuen();
  3612                                                         load_spreg( R_ECX, R_FPSCR );
  3613                                                         XOR_imm32_r32( FPSCR_SZ, R_ECX );
  3614                                                         store_spreg( R_ECX, R_FPSCR );
  3615                                                         sh4_x86.tstate = TSTATE_NONE;
  3617                                                         break;
  3618                                                     case 0x2:
  3619                                                         { /* FRCHG */
  3620                                                         COUNT_INST(I_FRCHG);
  3621                                                         check_fpuen();
  3622                                                         load_spreg( R_ECX, R_FPSCR );
  3623                                                         XOR_imm32_r32( FPSCR_FR, R_ECX );
  3624                                                         store_spreg( R_ECX, R_FPSCR );
  3625                                                         call_func0( sh4_switch_fr_banks );
  3626                                                         sh4_x86.tstate = TSTATE_NONE;
  3628                                                         break;
  3629                                                     case 0x3:
  3630                                                         { /* UNDEF */
  3631                                                         COUNT_INST(I_UNDEF);
  3632                                                         if( sh4_x86.in_delay_slot ) {
  3633                                                     	SLOTILLEGAL();
  3634                                                         } else {
  3635                                                     	JMP_exc(EXC_ILLEGAL);
  3636                                                     	return 2;
  3639                                                         break;
  3640                                                     default:
  3641                                                         UNDEF();
  3642                                                         break;
  3644                                                 break;
  3646                                         break;
  3648                                 break;
  3649                             default:
  3650                                 UNDEF();
  3651                                 break;
  3653                         break;
  3654                     case 0xE:
  3655                         { /* FMAC FR0, FRm, FRn */
  3656                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3657                         COUNT_INST(I_FMAC);
  3658                         check_fpuen();
  3659                         load_spreg( R_ECX, R_FPSCR );
  3660                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3661                         JNE_rel8(doubleprec);
  3662                         push_fr( 0 );
  3663                         push_fr( FRm );
  3664                         FMULP_st(1);
  3665                         push_fr( FRn );
  3666                         FADDP_st(1);
  3667                         pop_fr( FRn );
  3668                         JMP_rel8(end);
  3669                         JMP_TARGET(doubleprec);
  3670                         push_dr( 0 );
  3671                         push_dr( FRm );
  3672                         FMULP_st(1);
  3673                         push_dr( FRn );
  3674                         FADDP_st(1);
  3675                         pop_dr( FRn );
  3676                         JMP_TARGET(end);
  3677                         sh4_x86.tstate = TSTATE_NONE;
  3679                         break;
  3680                     default:
  3681                         UNDEF();
  3682                         break;
  3684                 break;
  3687     sh4_x86.in_delay_slot = DELAY_NONE;
  3688     return 0;
.