Search
lxdream.org :: lxdream/src/sh4/sh4x86.c
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.c
changeset 675:b97020f9af1c
prev673:44c579439d73
next732:f05753bbe723
author nkeynes
date Wed Jun 25 10:03:28 2008 +0000 (15 years ago)
permissions -rw-r--r--
last change Add sh4_dump_region convenience function
view annotate diff log raw
     1 /**
     2  * $Id$
     3  * 
     4  * SH4 => x86 translation. This version does no real optimization, it just
     5  * outputs straight-line x86 code - it mainly exists to provide a baseline
     6  * to test the optimizing versions against.
     7  *
     8  * Copyright (c) 2007 Nathan Keynes.
     9  *
    10  * This program is free software; you can redistribute it and/or modify
    11  * it under the terms of the GNU General Public License as published by
    12  * the Free Software Foundation; either version 2 of the License, or
    13  * (at your option) any later version.
    14  *
    15  * This program is distributed in the hope that it will be useful,
    16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
    17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    18  * GNU General Public License for more details.
    19  */
    21 #include <assert.h>
    22 #include <math.h>
    24 #ifndef NDEBUG
    25 #define DEBUG_JUMPS 1
    26 #endif
    28 #include "sh4/xltcache.h"
    29 #include "sh4/sh4core.h"
    30 #include "sh4/sh4trans.h"
    31 #include "sh4/sh4stat.h"
    32 #include "sh4/sh4mmio.h"
    33 #include "sh4/x86op.h"
    34 #include "clock.h"
    36 #define DEFAULT_BACKPATCH_SIZE 4096
    38 struct backpatch_record {
    39     uint32_t fixup_offset;
    40     uint32_t fixup_icount;
    41     int32_t exc_code;
    42 };
    44 #define MAX_RECOVERY_SIZE 2048
    46 #define DELAY_NONE 0
    47 #define DELAY_PC 1
    48 #define DELAY_PC_PR 2
    50 /** 
    51  * Struct to manage internal translation state. This state is not saved -
    52  * it is only valid between calls to sh4_translate_begin_block() and
    53  * sh4_translate_end_block()
    54  */
    55 struct sh4_x86_state {
    56     int in_delay_slot;
    57     gboolean priv_checked; /* true if we've already checked the cpu mode. */
    58     gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
    59     gboolean branch_taken; /* true if we branched unconditionally */
    60     uint32_t block_start_pc;
    61     uint32_t stack_posn;   /* Trace stack height for alignment purposes */
    62     int tstate;
    64     /* mode flags */
    65     gboolean tlb_on; /* True if tlb translation is active */
    67     /* Allocated memory for the (block-wide) back-patch list */
    68     struct backpatch_record *backpatch_list;
    69     uint32_t backpatch_posn;
    70     uint32_t backpatch_size;
    71 };
    73 #define TSTATE_NONE -1
    74 #define TSTATE_O    0
    75 #define TSTATE_C    2
    76 #define TSTATE_E    4
    77 #define TSTATE_NE   5
    78 #define TSTATE_G    0xF
    79 #define TSTATE_GE   0xD
    80 #define TSTATE_A    7
    81 #define TSTATE_AE   3
    83 #ifdef ENABLE_SH4STATS
    84 #define COUNT_INST(id) load_imm32(R_EAX,id); call_func1(sh4_stats_add, R_EAX); sh4_x86.tstate = TSTATE_NONE
    85 #else
    86 #define COUNT_INST(id)
    87 #endif
    89 /** Branch if T is set (either in the current cflags, or in sh4r.t) */
    90 #define JT_rel8(label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    91 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    92     OP(0x70+sh4_x86.tstate); MARK_JMP8(label); OP(-1)
    94 /** Branch if T is clear (either in the current cflags or in sh4r.t) */
    95 #define JF_rel8(label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    96 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    97     OP(0x70+ (sh4_x86.tstate^1)); MARK_JMP8(label); OP(-1)
    99 static struct sh4_x86_state sh4_x86;
   101 static uint32_t max_int = 0x7FFFFFFF;
   102 static uint32_t min_int = 0x80000000;
   103 static uint32_t save_fcw; /* save value for fpu control word */
   104 static uint32_t trunc_fcw = 0x0F7F; /* fcw value for truncation mode */
   106 void sh4_translate_init(void)
   107 {
   108     sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
   109     sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(struct backpatch_record);
   110 }
   113 static void sh4_x86_add_backpatch( uint8_t *fixup_addr, uint32_t fixup_pc, uint32_t exc_code )
   114 {
   115     if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
   116 	sh4_x86.backpatch_size <<= 1;
   117 	sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, 
   118 					  sh4_x86.backpatch_size * sizeof(struct backpatch_record));
   119 	assert( sh4_x86.backpatch_list != NULL );
   120     }
   121     if( sh4_x86.in_delay_slot ) {
   122 	fixup_pc -= 2;
   123     }
   124     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_offset = 
   125 	((uint8_t *)fixup_addr) - ((uint8_t *)xlat_current_block->code);
   126     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_icount = (fixup_pc - sh4_x86.block_start_pc)>>1;
   127     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].exc_code = exc_code;
   128     sh4_x86.backpatch_posn++;
   129 }
   131 /**
   132  * Emit an instruction to load an SH4 reg into a real register
   133  */
   134 static inline void load_reg( int x86reg, int sh4reg ) 
   135 {
   136     /* mov [bp+n], reg */
   137     OP(0x8B);
   138     OP(0x45 + (x86reg<<3));
   139     OP(REG_OFFSET(r[sh4reg]));
   140 }
   142 static inline void load_reg16s( int x86reg, int sh4reg )
   143 {
   144     OP(0x0F);
   145     OP(0xBF);
   146     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   147 }
   149 static inline void load_reg16u( int x86reg, int sh4reg )
   150 {
   151     OP(0x0F);
   152     OP(0xB7);
   153     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   155 }
   157 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
   158 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
   159 /**
   160  * Emit an instruction to load an immediate value into a register
   161  */
   162 static inline void load_imm32( int x86reg, uint32_t value ) {
   163     /* mov #value, reg */
   164     OP(0xB8 + x86reg);
   165     OP32(value);
   166 }
   168 /**
   169  * Load an immediate 64-bit quantity (note: x86-64 only)
   170  */
   171 static inline void load_imm64( int x86reg, uint32_t value ) {
   172     /* mov #value, reg */
   173     REXW();
   174     OP(0xB8 + x86reg);
   175     OP64(value);
   176 }
   178 /**
   179  * Emit an instruction to store an SH4 reg (RN)
   180  */
   181 void static inline store_reg( int x86reg, int sh4reg ) {
   182     /* mov reg, [bp+n] */
   183     OP(0x89);
   184     OP(0x45 + (x86reg<<3));
   185     OP(REG_OFFSET(r[sh4reg]));
   186 }
   188 /**
   189  * Load an FR register (single-precision floating point) into an integer x86
   190  * register (eg for register-to-register moves)
   191  */
   192 #define load_fr(reg,frm)  OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[0][(frm)^1]) )
   193 #define load_xf(reg,frm)  OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[1][(frm)^1]) )
   195 /**
   196  * Load the low half of a DR register (DR or XD) into an integer x86 register 
   197  */
   198 #define load_dr0(reg,frm) OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[frm&1][frm|0x01]) )
   199 #define load_dr1(reg,frm) OP(0x8B); MODRM_r32_ebp32(reg, REG_OFFSET(fr[frm&1][frm&0x0E]) )
   201 /**
   202  * Store an FR register (single-precision floating point) from an integer x86+
   203  * register (eg for register-to-register moves)
   204  */
   205 #define store_fr(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[0][(frm)^1]) )
   206 #define store_xf(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[1][(frm)^1]) )
   208 #define store_dr0(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[frm&1][frm|0x01]) )
   209 #define store_dr1(reg,frm) OP(0x89); MODRM_r32_ebp32( reg, REG_OFFSET(fr[frm&1][frm&0x0E]) )
   212 #define push_fpul()  FLDF_sh4r(R_FPUL)
   213 #define pop_fpul()   FSTPF_sh4r(R_FPUL)
   214 #define push_fr(frm) FLDF_sh4r( REG_OFFSET(fr[0][(frm)^1]) )
   215 #define pop_fr(frm)  FSTPF_sh4r( REG_OFFSET(fr[0][(frm)^1]) )
   216 #define push_xf(frm) FLDF_sh4r( REG_OFFSET(fr[1][(frm)^1]) )
   217 #define pop_xf(frm)  FSTPF_sh4r( REG_OFFSET(fr[1][(frm)^1]) )
   218 #define push_dr(frm) FLDD_sh4r( REG_OFFSET(fr[0][(frm)&0x0E]) )
   219 #define pop_dr(frm)  FSTPD_sh4r( REG_OFFSET(fr[0][(frm)&0x0E]) )
   220 #define push_xdr(frm) FLDD_sh4r( REG_OFFSET(fr[1][(frm)&0x0E]) )
   221 #define pop_xdr(frm)  FSTPD_sh4r( REG_OFFSET(fr[1][(frm)&0x0E]) )
   225 /* Exception checks - Note that all exception checks will clobber EAX */
   227 #define check_priv( ) \
   228     if( !sh4_x86.priv_checked ) { \
   229 	sh4_x86.priv_checked = TRUE;\
   230 	load_spreg( R_EAX, R_SR );\
   231 	AND_imm32_r32( SR_MD, R_EAX );\
   232 	if( sh4_x86.in_delay_slot ) {\
   233 	    JE_exc( EXC_SLOT_ILLEGAL );\
   234 	} else {\
   235 	    JE_exc( EXC_ILLEGAL );\
   236 	}\
   237     }\
   239 #define check_fpuen( ) \
   240     if( !sh4_x86.fpuen_checked ) {\
   241 	sh4_x86.fpuen_checked = TRUE;\
   242 	load_spreg( R_EAX, R_SR );\
   243 	AND_imm32_r32( SR_FD, R_EAX );\
   244 	if( sh4_x86.in_delay_slot ) {\
   245 	    JNE_exc(EXC_SLOT_FPU_DISABLED);\
   246 	} else {\
   247 	    JNE_exc(EXC_FPU_DISABLED);\
   248 	}\
   249     }
   251 #define check_ralign16( x86reg ) \
   252     TEST_imm32_r32( 0x00000001, x86reg ); \
   253     JNE_exc(EXC_DATA_ADDR_READ)
   255 #define check_walign16( x86reg ) \
   256     TEST_imm32_r32( 0x00000001, x86reg ); \
   257     JNE_exc(EXC_DATA_ADDR_WRITE);
   259 #define check_ralign32( x86reg ) \
   260     TEST_imm32_r32( 0x00000003, x86reg ); \
   261     JNE_exc(EXC_DATA_ADDR_READ)
   263 #define check_walign32( x86reg ) \
   264     TEST_imm32_r32( 0x00000003, x86reg ); \
   265     JNE_exc(EXC_DATA_ADDR_WRITE);
   267 #define UNDEF()
   268 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
   269 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
   270 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
   271 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
   272 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
   273 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
   274 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
   276 /**
   277  * Perform MMU translation on the address in addr_reg for a read operation, iff the TLB is turned 
   278  * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
   279  */
   280 #define MMU_TRANSLATE_READ( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
   282 #define MMU_TRANSLATE_READ_EXC( addr_reg, exc_code ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(exc_code); MEM_RESULT(addr_reg) }
   283 /**
   284  * Perform MMU translation on the address in addr_reg for a write operation, iff the TLB is turned 
   285  * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
   286  */
   287 #define MMU_TRANSLATE_WRITE( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
   289 #define MEM_READ_SIZE (CALL_FUNC1_SIZE)
   290 #define MEM_WRITE_SIZE (CALL_FUNC2_SIZE)
   291 #define MMU_TRANSLATE_SIZE (sh4_x86.tlb_on ? (CALL_FUNC1_SIZE + 12) : 0 )
   293 #define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = DELAY_NONE; return 1;
   295 /****** Import appropriate calling conventions ******/
   296 #if SIZEOF_VOID_P == 8
   297 #include "sh4/ia64abi.h"
   298 #else /* 32-bit system */
   299 #ifdef APPLE_BUILD
   300 #include "sh4/ia32mac.h"
   301 #else
   302 #include "sh4/ia32abi.h"
   303 #endif
   304 #endif
   306 uint32_t sh4_translate_end_block_size()
   307 {
   308     if( sh4_x86.backpatch_posn <= 3 ) {
   309 	return EPILOGUE_SIZE + (sh4_x86.backpatch_posn*12);
   310     } else {
   311 	return EPILOGUE_SIZE + 48 + (sh4_x86.backpatch_posn-3)*15;
   312     }
   313 }
   316 /**
   317  * Embed a breakpoint into the generated code
   318  */
   319 void sh4_translate_emit_breakpoint( sh4vma_t pc )
   320 {
   321     load_imm32( R_EAX, pc );
   322     call_func1( sh4_translate_breakpoint_hit, R_EAX );
   323 }
   326 #define UNTRANSLATABLE(pc) !IS_IN_ICACHE(pc)
   328 /**
   329  * Embed a call to sh4_execute_instruction for situations that we
   330  * can't translate (just page-crossing delay slots at the moment).
   331  * Caller is responsible for setting new_pc before calling this function.
   332  *
   333  * Performs:
   334  *   Set PC = endpc
   335  *   Set sh4r.in_delay_slot = sh4_x86.in_delay_slot
   336  *   Update slice_cycle for endpc+2 (single step doesn't update slice_cycle)
   337  *   Call sh4_execute_instruction
   338  *   Call xlat_get_code_by_vma / xlat_get_code as for normal exit
   339  */
   340 void exit_block_emu( sh4vma_t endpc )
   341 {
   342     load_imm32( R_ECX, endpc - sh4_x86.block_start_pc );   // 5
   343     ADD_r32_sh4r( R_ECX, R_PC );
   345     load_imm32( R_ECX, (((endpc - sh4_x86.block_start_pc)>>1)+1)*sh4_cpu_period ); // 5
   346     ADD_r32_sh4r( R_ECX, REG_OFFSET(slice_cycle) );     // 6
   347     load_imm32( R_ECX, sh4_x86.in_delay_slot ? 1 : 0 );
   348     store_spreg( R_ECX, REG_OFFSET(in_delay_slot) );
   350     call_func0( sh4_execute_instruction );    
   351     load_spreg( R_EAX, R_PC );
   352     if( sh4_x86.tlb_on ) {
   353 	call_func1(xlat_get_code_by_vma,R_EAX);
   354     } else {
   355 	call_func1(xlat_get_code,R_EAX);
   356     }
   357     AND_imm8s_rptr( 0xFC, R_EAX );
   358     POP_r32(R_EBP);
   359     RET();
   360 } 
   362 /**
   363  * Translate a single instruction. Delayed branches are handled specially
   364  * by translating both branch and delayed instruction as a single unit (as
   365  * 
   366  * The instruction MUST be in the icache (assert check)
   367  *
   368  * @return true if the instruction marks the end of a basic block
   369  * (eg a branch or 
   370  */
   371 uint32_t sh4_translate_instruction( sh4vma_t pc )
   372 {
   373     uint32_t ir;
   374     /* Read instruction from icache */
   375     assert( IS_IN_ICACHE(pc) );
   376     ir = *(uint16_t *)GET_ICACHE_PTR(pc);
   378 	/* PC is not in the current icache - this usually means we're running
   379 	 * with MMU on, and we've gone past the end of the page. And since 
   380 	 * sh4_translate_block is pretty careful about this, it means we're
   381 	 * almost certainly in a delay slot.
   382 	 *
   383 	 * Since we can't assume the page is present (and we can't fault it in
   384 	 * at this point, inline a call to sh4_execute_instruction (with a few
   385 	 * small repairs to cope with the different environment).
   386 	 */
   388     if( !sh4_x86.in_delay_slot ) {
   389 	sh4_translate_add_recovery( (pc - sh4_x86.block_start_pc)>>1 );
   390     }
   391         switch( (ir&0xF000) >> 12 ) {
   392             case 0x0:
   393                 switch( ir&0xF ) {
   394                     case 0x2:
   395                         switch( (ir&0x80) >> 7 ) {
   396                             case 0x0:
   397                                 switch( (ir&0x70) >> 4 ) {
   398                                     case 0x0:
   399                                         { /* STC SR, Rn */
   400                                         uint32_t Rn = ((ir>>8)&0xF); 
   401                                         COUNT_INST(I_STCSR);
   402                                         check_priv();
   403                                         call_func0(sh4_read_sr);
   404                                         store_reg( R_EAX, Rn );
   405                                         sh4_x86.tstate = TSTATE_NONE;
   406                                         }
   407                                         break;
   408                                     case 0x1:
   409                                         { /* STC GBR, Rn */
   410                                         uint32_t Rn = ((ir>>8)&0xF); 
   411                                         COUNT_INST(I_STC);
   412                                         load_spreg( R_EAX, R_GBR );
   413                                         store_reg( R_EAX, Rn );
   414                                         }
   415                                         break;
   416                                     case 0x2:
   417                                         { /* STC VBR, Rn */
   418                                         uint32_t Rn = ((ir>>8)&0xF); 
   419                                         COUNT_INST(I_STC);
   420                                         check_priv();
   421                                         load_spreg( R_EAX, R_VBR );
   422                                         store_reg( R_EAX, Rn );
   423                                         sh4_x86.tstate = TSTATE_NONE;
   424                                         }
   425                                         break;
   426                                     case 0x3:
   427                                         { /* STC SSR, Rn */
   428                                         uint32_t Rn = ((ir>>8)&0xF); 
   429                                         COUNT_INST(I_STC);
   430                                         check_priv();
   431                                         load_spreg( R_EAX, R_SSR );
   432                                         store_reg( R_EAX, Rn );
   433                                         sh4_x86.tstate = TSTATE_NONE;
   434                                         }
   435                                         break;
   436                                     case 0x4:
   437                                         { /* STC SPC, Rn */
   438                                         uint32_t Rn = ((ir>>8)&0xF); 
   439                                         COUNT_INST(I_STC);
   440                                         check_priv();
   441                                         load_spreg( R_EAX, R_SPC );
   442                                         store_reg( R_EAX, Rn );
   443                                         sh4_x86.tstate = TSTATE_NONE;
   444                                         }
   445                                         break;
   446                                     default:
   447                                         UNDEF();
   448                                         break;
   449                                 }
   450                                 break;
   451                             case 0x1:
   452                                 { /* STC Rm_BANK, Rn */
   453                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
   454                                 COUNT_INST(I_STC);
   455                                 check_priv();
   456                                 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
   457                                 store_reg( R_EAX, Rn );
   458                                 sh4_x86.tstate = TSTATE_NONE;
   459                                 }
   460                                 break;
   461                         }
   462                         break;
   463                     case 0x3:
   464                         switch( (ir&0xF0) >> 4 ) {
   465                             case 0x0:
   466                                 { /* BSRF Rn */
   467                                 uint32_t Rn = ((ir>>8)&0xF); 
   468                                 COUNT_INST(I_BSRF);
   469                                 if( sh4_x86.in_delay_slot ) {
   470                             	SLOTILLEGAL();
   471                                 } else {
   472                             	load_spreg( R_EAX, R_PC );
   473                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
   474                             	store_spreg( R_EAX, R_PR );
   475                             	ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_EAX );
   476                             	store_spreg( R_EAX, R_NEW_PC );
   478                             	sh4_x86.in_delay_slot = DELAY_PC;
   479                             	sh4_x86.tstate = TSTATE_NONE;
   480                             	sh4_x86.branch_taken = TRUE;
   481                             	if( UNTRANSLATABLE(pc+2) ) {
   482                             	    exit_block_emu(pc+2);
   483                             	    return 2;
   484                             	} else {
   485                             	    sh4_translate_instruction( pc + 2 );
   486                             	    exit_block_newpcset(pc+2);
   487                             	    return 4;
   488                             	}
   489                                 }
   490                                 }
   491                                 break;
   492                             case 0x2:
   493                                 { /* BRAF Rn */
   494                                 uint32_t Rn = ((ir>>8)&0xF); 
   495                                 COUNT_INST(I_BRAF);
   496                                 if( sh4_x86.in_delay_slot ) {
   497                             	SLOTILLEGAL();
   498                                 } else {
   499                             	load_spreg( R_EAX, R_PC );
   500                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
   501                             	ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_EAX );
   502                             	store_spreg( R_EAX, R_NEW_PC );
   503                             	sh4_x86.in_delay_slot = DELAY_PC;
   504                             	sh4_x86.tstate = TSTATE_NONE;
   505                             	sh4_x86.branch_taken = TRUE;
   506                             	if( UNTRANSLATABLE(pc+2) ) {
   507                             	    exit_block_emu(pc+2);
   508                             	    return 2;
   509                             	} else {
   510                             	    sh4_translate_instruction( pc + 2 );
   511                             	    exit_block_newpcset(pc+2);
   512                             	    return 4;
   513                             	}
   514                                 }
   515                                 }
   516                                 break;
   517                             case 0x8:
   518                                 { /* PREF @Rn */
   519                                 uint32_t Rn = ((ir>>8)&0xF); 
   520                                 COUNT_INST(I_PREF);
   521                                 load_reg( R_EAX, Rn );
   522                                 MOV_r32_r32( R_EAX, R_ECX );
   523                                 AND_imm32_r32( 0xFC000000, R_EAX );
   524                                 CMP_imm32_r32( 0xE0000000, R_EAX );
   525                                 JNE_rel8(end);
   526                                 call_func1( sh4_flush_store_queue, R_ECX );
   527                                 TEST_r32_r32( R_EAX, R_EAX );
   528                                 JE_exc(-1);
   529                                 JMP_TARGET(end);
   530                                 sh4_x86.tstate = TSTATE_NONE;
   531                                 }
   532                                 break;
   533                             case 0x9:
   534                                 { /* OCBI @Rn */
   535                                 uint32_t Rn = ((ir>>8)&0xF); 
   536                                 COUNT_INST(I_OCBI);
   537                                 }
   538                                 break;
   539                             case 0xA:
   540                                 { /* OCBP @Rn */
   541                                 uint32_t Rn = ((ir>>8)&0xF); 
   542                                 COUNT_INST(I_OCBP);
   543                                 }
   544                                 break;
   545                             case 0xB:
   546                                 { /* OCBWB @Rn */
   547                                 uint32_t Rn = ((ir>>8)&0xF); 
   548                                 COUNT_INST(I_OCBWB);
   549                                 }
   550                                 break;
   551                             case 0xC:
   552                                 { /* MOVCA.L R0, @Rn */
   553                                 uint32_t Rn = ((ir>>8)&0xF); 
   554                                 COUNT_INST(I_MOVCA);
   555                                 load_reg( R_EAX, Rn );
   556                                 check_walign32( R_EAX );
   557                                 MMU_TRANSLATE_WRITE( R_EAX );
   558                                 load_reg( R_EDX, 0 );
   559                                 MEM_WRITE_LONG( R_EAX, R_EDX );
   560                                 sh4_x86.tstate = TSTATE_NONE;
   561                                 }
   562                                 break;
   563                             default:
   564                                 UNDEF();
   565                                 break;
   566                         }
   567                         break;
   568                     case 0x4:
   569                         { /* MOV.B Rm, @(R0, Rn) */
   570                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   571                         COUNT_INST(I_MOVB);
   572                         load_reg( R_EAX, 0 );
   573                         load_reg( R_ECX, Rn );
   574                         ADD_r32_r32( R_ECX, R_EAX );
   575                         MMU_TRANSLATE_WRITE( R_EAX );
   576                         load_reg( R_EDX, Rm );
   577                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   578                         sh4_x86.tstate = TSTATE_NONE;
   579                         }
   580                         break;
   581                     case 0x5:
   582                         { /* MOV.W Rm, @(R0, Rn) */
   583                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   584                         COUNT_INST(I_MOVW);
   585                         load_reg( R_EAX, 0 );
   586                         load_reg( R_ECX, Rn );
   587                         ADD_r32_r32( R_ECX, R_EAX );
   588                         check_walign16( R_EAX );
   589                         MMU_TRANSLATE_WRITE( R_EAX );
   590                         load_reg( R_EDX, Rm );
   591                         MEM_WRITE_WORD( R_EAX, R_EDX );
   592                         sh4_x86.tstate = TSTATE_NONE;
   593                         }
   594                         break;
   595                     case 0x6:
   596                         { /* MOV.L Rm, @(R0, Rn) */
   597                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   598                         COUNT_INST(I_MOVL);
   599                         load_reg( R_EAX, 0 );
   600                         load_reg( R_ECX, Rn );
   601                         ADD_r32_r32( R_ECX, R_EAX );
   602                         check_walign32( R_EAX );
   603                         MMU_TRANSLATE_WRITE( R_EAX );
   604                         load_reg( R_EDX, Rm );
   605                         MEM_WRITE_LONG( R_EAX, R_EDX );
   606                         sh4_x86.tstate = TSTATE_NONE;
   607                         }
   608                         break;
   609                     case 0x7:
   610                         { /* MUL.L Rm, Rn */
   611                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   612                         COUNT_INST(I_MULL);
   613                         load_reg( R_EAX, Rm );
   614                         load_reg( R_ECX, Rn );
   615                         MUL_r32( R_ECX );
   616                         store_spreg( R_EAX, R_MACL );
   617                         sh4_x86.tstate = TSTATE_NONE;
   618                         }
   619                         break;
   620                     case 0x8:
   621                         switch( (ir&0xFF0) >> 4 ) {
   622                             case 0x0:
   623                                 { /* CLRT */
   624                                 COUNT_INST(I_CLRT);
   625                                 CLC();
   626                                 SETC_t();
   627                                 sh4_x86.tstate = TSTATE_C;
   628                                 }
   629                                 break;
   630                             case 0x1:
   631                                 { /* SETT */
   632                                 COUNT_INST(I_SETT);
   633                                 STC();
   634                                 SETC_t();
   635                                 sh4_x86.tstate = TSTATE_C;
   636                                 }
   637                                 break;
   638                             case 0x2:
   639                                 { /* CLRMAC */
   640                                 COUNT_INST(I_CLRMAC);
   641                                 XOR_r32_r32(R_EAX, R_EAX);
   642                                 store_spreg( R_EAX, R_MACL );
   643                                 store_spreg( R_EAX, R_MACH );
   644                                 sh4_x86.tstate = TSTATE_NONE;
   645                                 }
   646                                 break;
   647                             case 0x3:
   648                                 { /* LDTLB */
   649                                 COUNT_INST(I_LDTLB);
   650                                 call_func0( MMU_ldtlb );
   651                                 }
   652                                 break;
   653                             case 0x4:
   654                                 { /* CLRS */
   655                                 COUNT_INST(I_CLRS);
   656                                 CLC();
   657                                 SETC_sh4r(R_S);
   658                                 sh4_x86.tstate = TSTATE_C;
   659                                 }
   660                                 break;
   661                             case 0x5:
   662                                 { /* SETS */
   663                                 COUNT_INST(I_SETS);
   664                                 STC();
   665                                 SETC_sh4r(R_S);
   666                                 sh4_x86.tstate = TSTATE_C;
   667                                 }
   668                                 break;
   669                             default:
   670                                 UNDEF();
   671                                 break;
   672                         }
   673                         break;
   674                     case 0x9:
   675                         switch( (ir&0xF0) >> 4 ) {
   676                             case 0x0:
   677                                 { /* NOP */
   678                                 COUNT_INST(I_NOP);
   679                                 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
   680                                 }
   681                                 break;
   682                             case 0x1:
   683                                 { /* DIV0U */
   684                                 COUNT_INST(I_DIV0U);
   685                                 XOR_r32_r32( R_EAX, R_EAX );
   686                                 store_spreg( R_EAX, R_Q );
   687                                 store_spreg( R_EAX, R_M );
   688                                 store_spreg( R_EAX, R_T );
   689                                 sh4_x86.tstate = TSTATE_C; // works for DIV1
   690                                 }
   691                                 break;
   692                             case 0x2:
   693                                 { /* MOVT Rn */
   694                                 uint32_t Rn = ((ir>>8)&0xF); 
   695                                 COUNT_INST(I_MOVT);
   696                                 load_spreg( R_EAX, R_T );
   697                                 store_reg( R_EAX, Rn );
   698                                 }
   699                                 break;
   700                             default:
   701                                 UNDEF();
   702                                 break;
   703                         }
   704                         break;
   705                     case 0xA:
   706                         switch( (ir&0xF0) >> 4 ) {
   707                             case 0x0:
   708                                 { /* STS MACH, Rn */
   709                                 uint32_t Rn = ((ir>>8)&0xF); 
   710                                 COUNT_INST(I_STS);
   711                                 load_spreg( R_EAX, R_MACH );
   712                                 store_reg( R_EAX, Rn );
   713                                 }
   714                                 break;
   715                             case 0x1:
   716                                 { /* STS MACL, Rn */
   717                                 uint32_t Rn = ((ir>>8)&0xF); 
   718                                 COUNT_INST(I_STS);
   719                                 load_spreg( R_EAX, R_MACL );
   720                                 store_reg( R_EAX, Rn );
   721                                 }
   722                                 break;
   723                             case 0x2:
   724                                 { /* STS PR, Rn */
   725                                 uint32_t Rn = ((ir>>8)&0xF); 
   726                                 COUNT_INST(I_STS);
   727                                 load_spreg( R_EAX, R_PR );
   728                                 store_reg( R_EAX, Rn );
   729                                 }
   730                                 break;
   731                             case 0x3:
   732                                 { /* STC SGR, Rn */
   733                                 uint32_t Rn = ((ir>>8)&0xF); 
   734                                 COUNT_INST(I_STC);
   735                                 check_priv();
   736                                 load_spreg( R_EAX, R_SGR );
   737                                 store_reg( R_EAX, Rn );
   738                                 sh4_x86.tstate = TSTATE_NONE;
   739                                 }
   740                                 break;
   741                             case 0x5:
   742                                 { /* STS FPUL, Rn */
   743                                 uint32_t Rn = ((ir>>8)&0xF); 
   744                                 COUNT_INST(I_STS);
   745                                 check_fpuen();
   746                                 load_spreg( R_EAX, R_FPUL );
   747                                 store_reg( R_EAX, Rn );
   748                                 }
   749                                 break;
   750                             case 0x6:
   751                                 { /* STS FPSCR, Rn */
   752                                 uint32_t Rn = ((ir>>8)&0xF); 
   753                                 COUNT_INST(I_STSFPSCR);
   754                                 check_fpuen();
   755                                 load_spreg( R_EAX, R_FPSCR );
   756                                 store_reg( R_EAX, Rn );
   757                                 }
   758                                 break;
   759                             case 0xF:
   760                                 { /* STC DBR, Rn */
   761                                 uint32_t Rn = ((ir>>8)&0xF); 
   762                                 COUNT_INST(I_STC);
   763                                 check_priv();
   764                                 load_spreg( R_EAX, R_DBR );
   765                                 store_reg( R_EAX, Rn );
   766                                 sh4_x86.tstate = TSTATE_NONE;
   767                                 }
   768                                 break;
   769                             default:
   770                                 UNDEF();
   771                                 break;
   772                         }
   773                         break;
   774                     case 0xB:
   775                         switch( (ir&0xFF0) >> 4 ) {
   776                             case 0x0:
   777                                 { /* RTS */
   778                                 COUNT_INST(I_RTS);
   779                                 if( sh4_x86.in_delay_slot ) {
   780                             	SLOTILLEGAL();
   781                                 } else {
   782                             	load_spreg( R_ECX, R_PR );
   783                             	store_spreg( R_ECX, R_NEW_PC );
   784                             	sh4_x86.in_delay_slot = DELAY_PC;
   785                             	sh4_x86.branch_taken = TRUE;
   786                             	if( UNTRANSLATABLE(pc+2) ) {
   787                             	    exit_block_emu(pc+2);
   788                             	    return 2;
   789                             	} else {
   790                             	    sh4_translate_instruction(pc+2);
   791                             	    exit_block_newpcset(pc+2);
   792                             	    return 4;
   793                             	}
   794                                 }
   795                                 }
   796                                 break;
   797                             case 0x1:
   798                                 { /* SLEEP */
   799                                 COUNT_INST(I_SLEEP);
   800                                 check_priv();
   801                                 call_func0( sh4_sleep );
   802                                 sh4_x86.tstate = TSTATE_NONE;
   803                                 sh4_x86.in_delay_slot = DELAY_NONE;
   804                                 return 2;
   805                                 }
   806                                 break;
   807                             case 0x2:
   808                                 { /* RTE */
   809                                 COUNT_INST(I_RTE);
   810                                 if( sh4_x86.in_delay_slot ) {
   811                             	SLOTILLEGAL();
   812                                 } else {
   813                             	check_priv();
   814                             	load_spreg( R_ECX, R_SPC );
   815                             	store_spreg( R_ECX, R_NEW_PC );
   816                             	load_spreg( R_EAX, R_SSR );
   817                             	call_func1( sh4_write_sr, R_EAX );
   818                             	sh4_x86.in_delay_slot = DELAY_PC;
   819                             	sh4_x86.priv_checked = FALSE;
   820                             	sh4_x86.fpuen_checked = FALSE;
   821                             	sh4_x86.tstate = TSTATE_NONE;
   822                             	sh4_x86.branch_taken = TRUE;
   823                             	if( UNTRANSLATABLE(pc+2) ) {
   824                             	    exit_block_emu(pc+2);
   825                             	    return 2;
   826                             	} else {
   827                             	    sh4_translate_instruction(pc+2);
   828                             	    exit_block_newpcset(pc+2);
   829                             	    return 4;
   830                             	}
   831                                 }
   832                                 }
   833                                 break;
   834                             default:
   835                                 UNDEF();
   836                                 break;
   837                         }
   838                         break;
   839                     case 0xC:
   840                         { /* MOV.B @(R0, Rm), Rn */
   841                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   842                         COUNT_INST(I_MOVB);
   843                         load_reg( R_EAX, 0 );
   844                         load_reg( R_ECX, Rm );
   845                         ADD_r32_r32( R_ECX, R_EAX );
   846                         MMU_TRANSLATE_READ( R_EAX )
   847                         MEM_READ_BYTE( R_EAX, R_EAX );
   848                         store_reg( R_EAX, Rn );
   849                         sh4_x86.tstate = TSTATE_NONE;
   850                         }
   851                         break;
   852                     case 0xD:
   853                         { /* MOV.W @(R0, Rm), Rn */
   854                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   855                         COUNT_INST(I_MOVW);
   856                         load_reg( R_EAX, 0 );
   857                         load_reg( R_ECX, Rm );
   858                         ADD_r32_r32( R_ECX, R_EAX );
   859                         check_ralign16( R_EAX );
   860                         MMU_TRANSLATE_READ( R_EAX );
   861                         MEM_READ_WORD( R_EAX, R_EAX );
   862                         store_reg( R_EAX, Rn );
   863                         sh4_x86.tstate = TSTATE_NONE;
   864                         }
   865                         break;
   866                     case 0xE:
   867                         { /* MOV.L @(R0, Rm), Rn */
   868                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   869                         COUNT_INST(I_MOVL);
   870                         load_reg( R_EAX, 0 );
   871                         load_reg( R_ECX, Rm );
   872                         ADD_r32_r32( R_ECX, R_EAX );
   873                         check_ralign32( R_EAX );
   874                         MMU_TRANSLATE_READ( R_EAX );
   875                         MEM_READ_LONG( R_EAX, R_EAX );
   876                         store_reg( R_EAX, Rn );
   877                         sh4_x86.tstate = TSTATE_NONE;
   878                         }
   879                         break;
   880                     case 0xF:
   881                         { /* MAC.L @Rm+, @Rn+ */
   882                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   883                         COUNT_INST(I_MACL);
   884                         if( Rm == Rn ) {
   885                     	load_reg( R_EAX, Rm );
   886                     	check_ralign32( R_EAX );
   887                     	MMU_TRANSLATE_READ( R_EAX );
   888                     	PUSH_realigned_r32( R_EAX );
   889                     	load_reg( R_EAX, Rn );
   890                     	ADD_imm8s_r32( 4, R_EAX );
   891                     	MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
   892                     	ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rn]) );
   893                     	// Note translate twice in case of page boundaries. Maybe worth
   894                     	// adding a page-boundary check to skip the second translation
   895                         } else {
   896                     	load_reg( R_EAX, Rm );
   897                     	check_ralign32( R_EAX );
   898                     	MMU_TRANSLATE_READ( R_EAX );
   899                     	load_reg( R_ECX, Rn );
   900                     	check_ralign32( R_ECX );
   901                     	PUSH_realigned_r32( R_EAX );
   902                     	MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
   903                     	MOV_r32_r32( R_ECX, R_EAX );
   904                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
   905                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
   906                         }
   907                         MEM_READ_LONG( R_EAX, R_EAX );
   908                         POP_r32( R_ECX );
   909                         PUSH_r32( R_EAX );
   910                         MEM_READ_LONG( R_ECX, R_EAX );
   911                         POP_realigned_r32( R_ECX );
   913                         IMUL_r32( R_ECX );
   914                         ADD_r32_sh4r( R_EAX, R_MACL );
   915                         ADC_r32_sh4r( R_EDX, R_MACH );
   917                         load_spreg( R_ECX, R_S );
   918                         TEST_r32_r32(R_ECX, R_ECX);
   919                         JE_rel8( nosat );
   920                         call_func0( signsat48 );
   921                         JMP_TARGET( nosat );
   922                         sh4_x86.tstate = TSTATE_NONE;
   923                         }
   924                         break;
   925                     default:
   926                         UNDEF();
   927                         break;
   928                 }
   929                 break;
   930             case 0x1:
   931                 { /* MOV.L Rm, @(disp, Rn) */
   932                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
   933                 COUNT_INST(I_MOVL);
   934                 load_reg( R_EAX, Rn );
   935                 ADD_imm32_r32( disp, R_EAX );
   936                 check_walign32( R_EAX );
   937                 MMU_TRANSLATE_WRITE( R_EAX );
   938                 load_reg( R_EDX, Rm );
   939                 MEM_WRITE_LONG( R_EAX, R_EDX );
   940                 sh4_x86.tstate = TSTATE_NONE;
   941                 }
   942                 break;
   943             case 0x2:
   944                 switch( ir&0xF ) {
   945                     case 0x0:
   946                         { /* MOV.B Rm, @Rn */
   947                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   948                         COUNT_INST(I_MOVB);
   949                         load_reg( R_EAX, Rn );
   950                         MMU_TRANSLATE_WRITE( R_EAX );
   951                         load_reg( R_EDX, Rm );
   952                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   953                         sh4_x86.tstate = TSTATE_NONE;
   954                         }
   955                         break;
   956                     case 0x1:
   957                         { /* MOV.W Rm, @Rn */
   958                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   959                         COUNT_INST(I_MOVW);
   960                         load_reg( R_EAX, Rn );
   961                         check_walign16( R_EAX );
   962                         MMU_TRANSLATE_WRITE( R_EAX )
   963                         load_reg( R_EDX, Rm );
   964                         MEM_WRITE_WORD( R_EAX, R_EDX );
   965                         sh4_x86.tstate = TSTATE_NONE;
   966                         }
   967                         break;
   968                     case 0x2:
   969                         { /* MOV.L Rm, @Rn */
   970                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   971                         COUNT_INST(I_MOVL);
   972                         load_reg( R_EAX, Rn );
   973                         check_walign32(R_EAX);
   974                         MMU_TRANSLATE_WRITE( R_EAX );
   975                         load_reg( R_EDX, Rm );
   976                         MEM_WRITE_LONG( R_EAX, R_EDX );
   977                         sh4_x86.tstate = TSTATE_NONE;
   978                         }
   979                         break;
   980                     case 0x4:
   981                         { /* MOV.B Rm, @-Rn */
   982                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   983                         COUNT_INST(I_MOVB);
   984                         load_reg( R_EAX, Rn );
   985                         ADD_imm8s_r32( -1, R_EAX );
   986                         MMU_TRANSLATE_WRITE( R_EAX );
   987                         load_reg( R_EDX, Rm );
   988                         ADD_imm8s_sh4r( -1, REG_OFFSET(r[Rn]) );
   989                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   990                         sh4_x86.tstate = TSTATE_NONE;
   991                         }
   992                         break;
   993                     case 0x5:
   994                         { /* MOV.W Rm, @-Rn */
   995                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   996                         COUNT_INST(I_MOVW);
   997                         load_reg( R_EAX, Rn );
   998                         ADD_imm8s_r32( -2, R_EAX );
   999                         check_walign16( R_EAX );
  1000                         MMU_TRANSLATE_WRITE( R_EAX );
  1001                         load_reg( R_EDX, Rm );
  1002                         ADD_imm8s_sh4r( -2, REG_OFFSET(r[Rn]) );
  1003                         MEM_WRITE_WORD( R_EAX, R_EDX );
  1004                         sh4_x86.tstate = TSTATE_NONE;
  1006                         break;
  1007                     case 0x6:
  1008                         { /* MOV.L Rm, @-Rn */
  1009                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1010                         COUNT_INST(I_MOVL);
  1011                         load_reg( R_EAX, Rn );
  1012                         ADD_imm8s_r32( -4, R_EAX );
  1013                         check_walign32( R_EAX );
  1014                         MMU_TRANSLATE_WRITE( R_EAX );
  1015                         load_reg( R_EDX, Rm );
  1016                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1017                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1018                         sh4_x86.tstate = TSTATE_NONE;
  1020                         break;
  1021                     case 0x7:
  1022                         { /* DIV0S Rm, Rn */
  1023                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1024                         COUNT_INST(I_DIV0S);
  1025                         load_reg( R_EAX, Rm );
  1026                         load_reg( R_ECX, Rn );
  1027                         SHR_imm8_r32( 31, R_EAX );
  1028                         SHR_imm8_r32( 31, R_ECX );
  1029                         store_spreg( R_EAX, R_M );
  1030                         store_spreg( R_ECX, R_Q );
  1031                         CMP_r32_r32( R_EAX, R_ECX );
  1032                         SETNE_t();
  1033                         sh4_x86.tstate = TSTATE_NE;
  1035                         break;
  1036                     case 0x8:
  1037                         { /* TST Rm, Rn */
  1038                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1039                         COUNT_INST(I_TST);
  1040                         load_reg( R_EAX, Rm );
  1041                         load_reg( R_ECX, Rn );
  1042                         TEST_r32_r32( R_EAX, R_ECX );
  1043                         SETE_t();
  1044                         sh4_x86.tstate = TSTATE_E;
  1046                         break;
  1047                     case 0x9:
  1048                         { /* AND Rm, Rn */
  1049                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1050                         COUNT_INST(I_AND);
  1051                         load_reg( R_EAX, Rm );
  1052                         load_reg( R_ECX, Rn );
  1053                         AND_r32_r32( R_EAX, R_ECX );
  1054                         store_reg( R_ECX, Rn );
  1055                         sh4_x86.tstate = TSTATE_NONE;
  1057                         break;
  1058                     case 0xA:
  1059                         { /* XOR Rm, Rn */
  1060                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1061                         COUNT_INST(I_XOR);
  1062                         load_reg( R_EAX, Rm );
  1063                         load_reg( R_ECX, Rn );
  1064                         XOR_r32_r32( R_EAX, R_ECX );
  1065                         store_reg( R_ECX, Rn );
  1066                         sh4_x86.tstate = TSTATE_NONE;
  1068                         break;
  1069                     case 0xB:
  1070                         { /* OR Rm, Rn */
  1071                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1072                         COUNT_INST(I_OR);
  1073                         load_reg( R_EAX, Rm );
  1074                         load_reg( R_ECX, Rn );
  1075                         OR_r32_r32( R_EAX, R_ECX );
  1076                         store_reg( R_ECX, Rn );
  1077                         sh4_x86.tstate = TSTATE_NONE;
  1079                         break;
  1080                     case 0xC:
  1081                         { /* CMP/STR Rm, Rn */
  1082                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1083                         COUNT_INST(I_CMPSTR);
  1084                         load_reg( R_EAX, Rm );
  1085                         load_reg( R_ECX, Rn );
  1086                         XOR_r32_r32( R_ECX, R_EAX );
  1087                         TEST_r8_r8( R_AL, R_AL );
  1088                         JE_rel8(target1);
  1089                         TEST_r8_r8( R_AH, R_AH );
  1090                         JE_rel8(target2);
  1091                         SHR_imm8_r32( 16, R_EAX );
  1092                         TEST_r8_r8( R_AL, R_AL );
  1093                         JE_rel8(target3);
  1094                         TEST_r8_r8( R_AH, R_AH );
  1095                         JMP_TARGET(target1);
  1096                         JMP_TARGET(target2);
  1097                         JMP_TARGET(target3);
  1098                         SETE_t();
  1099                         sh4_x86.tstate = TSTATE_E;
  1101                         break;
  1102                     case 0xD:
  1103                         { /* XTRCT Rm, Rn */
  1104                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1105                         COUNT_INST(I_XTRCT);
  1106                         load_reg( R_EAX, Rm );
  1107                         load_reg( R_ECX, Rn );
  1108                         SHL_imm8_r32( 16, R_EAX );
  1109                         SHR_imm8_r32( 16, R_ECX );
  1110                         OR_r32_r32( R_EAX, R_ECX );
  1111                         store_reg( R_ECX, Rn );
  1112                         sh4_x86.tstate = TSTATE_NONE;
  1114                         break;
  1115                     case 0xE:
  1116                         { /* MULU.W Rm, Rn */
  1117                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1118                         COUNT_INST(I_MULUW);
  1119                         load_reg16u( R_EAX, Rm );
  1120                         load_reg16u( R_ECX, Rn );
  1121                         MUL_r32( R_ECX );
  1122                         store_spreg( R_EAX, R_MACL );
  1123                         sh4_x86.tstate = TSTATE_NONE;
  1125                         break;
  1126                     case 0xF:
  1127                         { /* MULS.W Rm, Rn */
  1128                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1129                         COUNT_INST(I_MULSW);
  1130                         load_reg16s( R_EAX, Rm );
  1131                         load_reg16s( R_ECX, Rn );
  1132                         MUL_r32( R_ECX );
  1133                         store_spreg( R_EAX, R_MACL );
  1134                         sh4_x86.tstate = TSTATE_NONE;
  1136                         break;
  1137                     default:
  1138                         UNDEF();
  1139                         break;
  1141                 break;
  1142             case 0x3:
  1143                 switch( ir&0xF ) {
  1144                     case 0x0:
  1145                         { /* CMP/EQ Rm, Rn */
  1146                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1147                         COUNT_INST(I_CMPEQ);
  1148                         load_reg( R_EAX, Rm );
  1149                         load_reg( R_ECX, Rn );
  1150                         CMP_r32_r32( R_EAX, R_ECX );
  1151                         SETE_t();
  1152                         sh4_x86.tstate = TSTATE_E;
  1154                         break;
  1155                     case 0x2:
  1156                         { /* CMP/HS Rm, Rn */
  1157                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1158                         COUNT_INST(I_CMPHS);
  1159                         load_reg( R_EAX, Rm );
  1160                         load_reg( R_ECX, Rn );
  1161                         CMP_r32_r32( R_EAX, R_ECX );
  1162                         SETAE_t();
  1163                         sh4_x86.tstate = TSTATE_AE;
  1165                         break;
  1166                     case 0x3:
  1167                         { /* CMP/GE Rm, Rn */
  1168                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1169                         COUNT_INST(I_CMPGE);
  1170                         load_reg( R_EAX, Rm );
  1171                         load_reg( R_ECX, Rn );
  1172                         CMP_r32_r32( R_EAX, R_ECX );
  1173                         SETGE_t();
  1174                         sh4_x86.tstate = TSTATE_GE;
  1176                         break;
  1177                     case 0x4:
  1178                         { /* DIV1 Rm, Rn */
  1179                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1180                         COUNT_INST(I_DIV1);
  1181                         load_spreg( R_ECX, R_M );
  1182                         load_reg( R_EAX, Rn );
  1183                         if( sh4_x86.tstate != TSTATE_C ) {
  1184                     	LDC_t();
  1186                         RCL1_r32( R_EAX );
  1187                         SETC_r8( R_DL ); // Q'
  1188                         CMP_sh4r_r32( R_Q, R_ECX );
  1189                         JE_rel8(mqequal);
  1190                         ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1191                         JMP_rel8(end);
  1192                         JMP_TARGET(mqequal);
  1193                         SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1194                         JMP_TARGET(end);
  1195                         store_reg( R_EAX, Rn ); // Done with Rn now
  1196                         SETC_r8(R_AL); // tmp1
  1197                         XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
  1198                         XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
  1199                         store_spreg( R_ECX, R_Q );
  1200                         XOR_imm8s_r32( 1, R_AL );   // T = !Q'
  1201                         MOVZX_r8_r32( R_AL, R_EAX );
  1202                         store_spreg( R_EAX, R_T );
  1203                         sh4_x86.tstate = TSTATE_NONE;
  1205                         break;
  1206                     case 0x5:
  1207                         { /* DMULU.L Rm, Rn */
  1208                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1209                         COUNT_INST(I_DMULU);
  1210                         load_reg( R_EAX, Rm );
  1211                         load_reg( R_ECX, Rn );
  1212                         MUL_r32(R_ECX);
  1213                         store_spreg( R_EDX, R_MACH );
  1214                         store_spreg( R_EAX, R_MACL );    
  1215                         sh4_x86.tstate = TSTATE_NONE;
  1217                         break;
  1218                     case 0x6:
  1219                         { /* CMP/HI Rm, Rn */
  1220                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1221                         COUNT_INST(I_CMPHI);
  1222                         load_reg( R_EAX, Rm );
  1223                         load_reg( R_ECX, Rn );
  1224                         CMP_r32_r32( R_EAX, R_ECX );
  1225                         SETA_t();
  1226                         sh4_x86.tstate = TSTATE_A;
  1228                         break;
  1229                     case 0x7:
  1230                         { /* CMP/GT Rm, Rn */
  1231                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1232                         COUNT_INST(I_CMPGT);
  1233                         load_reg( R_EAX, Rm );
  1234                         load_reg( R_ECX, Rn );
  1235                         CMP_r32_r32( R_EAX, R_ECX );
  1236                         SETG_t();
  1237                         sh4_x86.tstate = TSTATE_G;
  1239                         break;
  1240                     case 0x8:
  1241                         { /* SUB Rm, Rn */
  1242                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1243                         COUNT_INST(I_SUB);
  1244                         load_reg( R_EAX, Rm );
  1245                         load_reg( R_ECX, Rn );
  1246                         SUB_r32_r32( R_EAX, R_ECX );
  1247                         store_reg( R_ECX, Rn );
  1248                         sh4_x86.tstate = TSTATE_NONE;
  1250                         break;
  1251                     case 0xA:
  1252                         { /* SUBC Rm, Rn */
  1253                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1254                         COUNT_INST(I_SUBC);
  1255                         load_reg( R_EAX, Rm );
  1256                         load_reg( R_ECX, Rn );
  1257                         if( sh4_x86.tstate != TSTATE_C ) {
  1258                     	LDC_t();
  1260                         SBB_r32_r32( R_EAX, R_ECX );
  1261                         store_reg( R_ECX, Rn );
  1262                         SETC_t();
  1263                         sh4_x86.tstate = TSTATE_C;
  1265                         break;
  1266                     case 0xB:
  1267                         { /* SUBV Rm, Rn */
  1268                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1269                         COUNT_INST(I_SUBV);
  1270                         load_reg( R_EAX, Rm );
  1271                         load_reg( R_ECX, Rn );
  1272                         SUB_r32_r32( R_EAX, R_ECX );
  1273                         store_reg( R_ECX, Rn );
  1274                         SETO_t();
  1275                         sh4_x86.tstate = TSTATE_O;
  1277                         break;
  1278                     case 0xC:
  1279                         { /* ADD Rm, Rn */
  1280                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1281                         COUNT_INST(I_ADD);
  1282                         load_reg( R_EAX, Rm );
  1283                         load_reg( R_ECX, Rn );
  1284                         ADD_r32_r32( R_EAX, R_ECX );
  1285                         store_reg( R_ECX, Rn );
  1286                         sh4_x86.tstate = TSTATE_NONE;
  1288                         break;
  1289                     case 0xD:
  1290                         { /* DMULS.L Rm, Rn */
  1291                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1292                         COUNT_INST(I_DMULS);
  1293                         load_reg( R_EAX, Rm );
  1294                         load_reg( R_ECX, Rn );
  1295                         IMUL_r32(R_ECX);
  1296                         store_spreg( R_EDX, R_MACH );
  1297                         store_spreg( R_EAX, R_MACL );
  1298                         sh4_x86.tstate = TSTATE_NONE;
  1300                         break;
  1301                     case 0xE:
  1302                         { /* ADDC Rm, Rn */
  1303                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1304                         COUNT_INST(I_ADDC);
  1305                         if( sh4_x86.tstate != TSTATE_C ) {
  1306                     	LDC_t();
  1308                         load_reg( R_EAX, Rm );
  1309                         load_reg( R_ECX, Rn );
  1310                         ADC_r32_r32( R_EAX, R_ECX );
  1311                         store_reg( R_ECX, Rn );
  1312                         SETC_t();
  1313                         sh4_x86.tstate = TSTATE_C;
  1315                         break;
  1316                     case 0xF:
  1317                         { /* ADDV Rm, Rn */
  1318                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1319                         COUNT_INST(I_ADDV);
  1320                         load_reg( R_EAX, Rm );
  1321                         load_reg( R_ECX, Rn );
  1322                         ADD_r32_r32( R_EAX, R_ECX );
  1323                         store_reg( R_ECX, Rn );
  1324                         SETO_t();
  1325                         sh4_x86.tstate = TSTATE_O;
  1327                         break;
  1328                     default:
  1329                         UNDEF();
  1330                         break;
  1332                 break;
  1333             case 0x4:
  1334                 switch( ir&0xF ) {
  1335                     case 0x0:
  1336                         switch( (ir&0xF0) >> 4 ) {
  1337                             case 0x0:
  1338                                 { /* SHLL Rn */
  1339                                 uint32_t Rn = ((ir>>8)&0xF); 
  1340                                 COUNT_INST(I_SHLL);
  1341                                 load_reg( R_EAX, Rn );
  1342                                 SHL1_r32( R_EAX );
  1343                                 SETC_t();
  1344                                 store_reg( R_EAX, Rn );
  1345                                 sh4_x86.tstate = TSTATE_C;
  1347                                 break;
  1348                             case 0x1:
  1349                                 { /* DT Rn */
  1350                                 uint32_t Rn = ((ir>>8)&0xF); 
  1351                                 COUNT_INST(I_DT);
  1352                                 load_reg( R_EAX, Rn );
  1353                                 ADD_imm8s_r32( -1, R_EAX );
  1354                                 store_reg( R_EAX, Rn );
  1355                                 SETE_t();
  1356                                 sh4_x86.tstate = TSTATE_E;
  1358                                 break;
  1359                             case 0x2:
  1360                                 { /* SHAL Rn */
  1361                                 uint32_t Rn = ((ir>>8)&0xF); 
  1362                                 COUNT_INST(I_SHAL);
  1363                                 load_reg( R_EAX, Rn );
  1364                                 SHL1_r32( R_EAX );
  1365                                 SETC_t();
  1366                                 store_reg( R_EAX, Rn );
  1367                                 sh4_x86.tstate = TSTATE_C;
  1369                                 break;
  1370                             default:
  1371                                 UNDEF();
  1372                                 break;
  1374                         break;
  1375                     case 0x1:
  1376                         switch( (ir&0xF0) >> 4 ) {
  1377                             case 0x0:
  1378                                 { /* SHLR Rn */
  1379                                 uint32_t Rn = ((ir>>8)&0xF); 
  1380                                 COUNT_INST(I_SHLR);
  1381                                 load_reg( R_EAX, Rn );
  1382                                 SHR1_r32( R_EAX );
  1383                                 SETC_t();
  1384                                 store_reg( R_EAX, Rn );
  1385                                 sh4_x86.tstate = TSTATE_C;
  1387                                 break;
  1388                             case 0x1:
  1389                                 { /* CMP/PZ Rn */
  1390                                 uint32_t Rn = ((ir>>8)&0xF); 
  1391                                 COUNT_INST(I_CMPPZ);
  1392                                 load_reg( R_EAX, Rn );
  1393                                 CMP_imm8s_r32( 0, R_EAX );
  1394                                 SETGE_t();
  1395                                 sh4_x86.tstate = TSTATE_GE;
  1397                                 break;
  1398                             case 0x2:
  1399                                 { /* SHAR Rn */
  1400                                 uint32_t Rn = ((ir>>8)&0xF); 
  1401                                 COUNT_INST(I_SHAR);
  1402                                 load_reg( R_EAX, Rn );
  1403                                 SAR1_r32( R_EAX );
  1404                                 SETC_t();
  1405                                 store_reg( R_EAX, Rn );
  1406                                 sh4_x86.tstate = TSTATE_C;
  1408                                 break;
  1409                             default:
  1410                                 UNDEF();
  1411                                 break;
  1413                         break;
  1414                     case 0x2:
  1415                         switch( (ir&0xF0) >> 4 ) {
  1416                             case 0x0:
  1417                                 { /* STS.L MACH, @-Rn */
  1418                                 uint32_t Rn = ((ir>>8)&0xF); 
  1419                                 COUNT_INST(I_STSM);
  1420                                 load_reg( R_EAX, Rn );
  1421                                 check_walign32( R_EAX );
  1422                                 ADD_imm8s_r32( -4, R_EAX );
  1423                                 MMU_TRANSLATE_WRITE( R_EAX );
  1424                                 load_spreg( R_EDX, R_MACH );
  1425                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1426                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1427                                 sh4_x86.tstate = TSTATE_NONE;
  1429                                 break;
  1430                             case 0x1:
  1431                                 { /* STS.L MACL, @-Rn */
  1432                                 uint32_t Rn = ((ir>>8)&0xF); 
  1433                                 COUNT_INST(I_STSM);
  1434                                 load_reg( R_EAX, Rn );
  1435                                 check_walign32( R_EAX );
  1436                                 ADD_imm8s_r32( -4, R_EAX );
  1437                                 MMU_TRANSLATE_WRITE( R_EAX );
  1438                                 load_spreg( R_EDX, R_MACL );
  1439                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1440                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1441                                 sh4_x86.tstate = TSTATE_NONE;
  1443                                 break;
  1444                             case 0x2:
  1445                                 { /* STS.L PR, @-Rn */
  1446                                 uint32_t Rn = ((ir>>8)&0xF); 
  1447                                 COUNT_INST(I_STSM);
  1448                                 load_reg( R_EAX, Rn );
  1449                                 check_walign32( R_EAX );
  1450                                 ADD_imm8s_r32( -4, R_EAX );
  1451                                 MMU_TRANSLATE_WRITE( R_EAX );
  1452                                 load_spreg( R_EDX, R_PR );
  1453                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1454                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1455                                 sh4_x86.tstate = TSTATE_NONE;
  1457                                 break;
  1458                             case 0x3:
  1459                                 { /* STC.L SGR, @-Rn */
  1460                                 uint32_t Rn = ((ir>>8)&0xF); 
  1461                                 COUNT_INST(I_STCM);
  1462                                 check_priv();
  1463                                 load_reg( R_EAX, Rn );
  1464                                 check_walign32( R_EAX );
  1465                                 ADD_imm8s_r32( -4, R_EAX );
  1466                                 MMU_TRANSLATE_WRITE( R_EAX );
  1467                                 load_spreg( R_EDX, R_SGR );
  1468                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1469                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1470                                 sh4_x86.tstate = TSTATE_NONE;
  1472                                 break;
  1473                             case 0x5:
  1474                                 { /* STS.L FPUL, @-Rn */
  1475                                 uint32_t Rn = ((ir>>8)&0xF); 
  1476                                 COUNT_INST(I_STSM);
  1477                                 check_fpuen();
  1478                                 load_reg( R_EAX, Rn );
  1479                                 check_walign32( R_EAX );
  1480                                 ADD_imm8s_r32( -4, R_EAX );
  1481                                 MMU_TRANSLATE_WRITE( R_EAX );
  1482                                 load_spreg( R_EDX, R_FPUL );
  1483                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1484                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1485                                 sh4_x86.tstate = TSTATE_NONE;
  1487                                 break;
  1488                             case 0x6:
  1489                                 { /* STS.L FPSCR, @-Rn */
  1490                                 uint32_t Rn = ((ir>>8)&0xF); 
  1491                                 COUNT_INST(I_STSFPSCRM);
  1492                                 check_fpuen();
  1493                                 load_reg( R_EAX, Rn );
  1494                                 check_walign32( R_EAX );
  1495                                 ADD_imm8s_r32( -4, R_EAX );
  1496                                 MMU_TRANSLATE_WRITE( R_EAX );
  1497                                 load_spreg( R_EDX, R_FPSCR );
  1498                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1499                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1500                                 sh4_x86.tstate = TSTATE_NONE;
  1502                                 break;
  1503                             case 0xF:
  1504                                 { /* STC.L DBR, @-Rn */
  1505                                 uint32_t Rn = ((ir>>8)&0xF); 
  1506                                 COUNT_INST(I_STCM);
  1507                                 check_priv();
  1508                                 load_reg( R_EAX, Rn );
  1509                                 check_walign32( R_EAX );
  1510                                 ADD_imm8s_r32( -4, R_EAX );
  1511                                 MMU_TRANSLATE_WRITE( R_EAX );
  1512                                 load_spreg( R_EDX, R_DBR );
  1513                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1514                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1515                                 sh4_x86.tstate = TSTATE_NONE;
  1517                                 break;
  1518                             default:
  1519                                 UNDEF();
  1520                                 break;
  1522                         break;
  1523                     case 0x3:
  1524                         switch( (ir&0x80) >> 7 ) {
  1525                             case 0x0:
  1526                                 switch( (ir&0x70) >> 4 ) {
  1527                                     case 0x0:
  1528                                         { /* STC.L SR, @-Rn */
  1529                                         uint32_t Rn = ((ir>>8)&0xF); 
  1530                                         COUNT_INST(I_STCSRM);
  1531                                         check_priv();
  1532                                         load_reg( R_EAX, Rn );
  1533                                         check_walign32( R_EAX );
  1534                                         ADD_imm8s_r32( -4, R_EAX );
  1535                                         MMU_TRANSLATE_WRITE( R_EAX );
  1536                                         PUSH_realigned_r32( R_EAX );
  1537                                         call_func0( sh4_read_sr );
  1538                                         POP_realigned_r32( R_ECX );
  1539                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1540                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1541                                         sh4_x86.tstate = TSTATE_NONE;
  1543                                         break;
  1544                                     case 0x1:
  1545                                         { /* STC.L GBR, @-Rn */
  1546                                         uint32_t Rn = ((ir>>8)&0xF); 
  1547                                         COUNT_INST(I_STCM);
  1548                                         load_reg( R_EAX, Rn );
  1549                                         check_walign32( R_EAX );
  1550                                         ADD_imm8s_r32( -4, R_EAX );
  1551                                         MMU_TRANSLATE_WRITE( R_EAX );
  1552                                         load_spreg( R_EDX, R_GBR );
  1553                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1554                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1555                                         sh4_x86.tstate = TSTATE_NONE;
  1557                                         break;
  1558                                     case 0x2:
  1559                                         { /* STC.L VBR, @-Rn */
  1560                                         uint32_t Rn = ((ir>>8)&0xF); 
  1561                                         COUNT_INST(I_STCM);
  1562                                         check_priv();
  1563                                         load_reg( R_EAX, Rn );
  1564                                         check_walign32( R_EAX );
  1565                                         ADD_imm8s_r32( -4, R_EAX );
  1566                                         MMU_TRANSLATE_WRITE( R_EAX );
  1567                                         load_spreg( R_EDX, R_VBR );
  1568                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1569                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1570                                         sh4_x86.tstate = TSTATE_NONE;
  1572                                         break;
  1573                                     case 0x3:
  1574                                         { /* STC.L SSR, @-Rn */
  1575                                         uint32_t Rn = ((ir>>8)&0xF); 
  1576                                         COUNT_INST(I_STCM);
  1577                                         check_priv();
  1578                                         load_reg( R_EAX, Rn );
  1579                                         check_walign32( R_EAX );
  1580                                         ADD_imm8s_r32( -4, R_EAX );
  1581                                         MMU_TRANSLATE_WRITE( R_EAX );
  1582                                         load_spreg( R_EDX, R_SSR );
  1583                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1584                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1585                                         sh4_x86.tstate = TSTATE_NONE;
  1587                                         break;
  1588                                     case 0x4:
  1589                                         { /* STC.L SPC, @-Rn */
  1590                                         uint32_t Rn = ((ir>>8)&0xF); 
  1591                                         COUNT_INST(I_STCM);
  1592                                         check_priv();
  1593                                         load_reg( R_EAX, Rn );
  1594                                         check_walign32( R_EAX );
  1595                                         ADD_imm8s_r32( -4, R_EAX );
  1596                                         MMU_TRANSLATE_WRITE( R_EAX );
  1597                                         load_spreg( R_EDX, R_SPC );
  1598                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1599                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1600                                         sh4_x86.tstate = TSTATE_NONE;
  1602                                         break;
  1603                                     default:
  1604                                         UNDEF();
  1605                                         break;
  1607                                 break;
  1608                             case 0x1:
  1609                                 { /* STC.L Rm_BANK, @-Rn */
  1610                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
  1611                                 COUNT_INST(I_STCM);
  1612                                 check_priv();
  1613                                 load_reg( R_EAX, Rn );
  1614                                 check_walign32( R_EAX );
  1615                                 ADD_imm8s_r32( -4, R_EAX );
  1616                                 MMU_TRANSLATE_WRITE( R_EAX );
  1617                                 load_spreg( R_EDX, REG_OFFSET(r_bank[Rm_BANK]) );
  1618                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1619                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1620                                 sh4_x86.tstate = TSTATE_NONE;
  1622                                 break;
  1624                         break;
  1625                     case 0x4:
  1626                         switch( (ir&0xF0) >> 4 ) {
  1627                             case 0x0:
  1628                                 { /* ROTL Rn */
  1629                                 uint32_t Rn = ((ir>>8)&0xF); 
  1630                                 COUNT_INST(I_ROTL);
  1631                                 load_reg( R_EAX, Rn );
  1632                                 ROL1_r32( R_EAX );
  1633                                 store_reg( R_EAX, Rn );
  1634                                 SETC_t();
  1635                                 sh4_x86.tstate = TSTATE_C;
  1637                                 break;
  1638                             case 0x2:
  1639                                 { /* ROTCL Rn */
  1640                                 uint32_t Rn = ((ir>>8)&0xF); 
  1641                                 COUNT_INST(I_ROTCL);
  1642                                 load_reg( R_EAX, Rn );
  1643                                 if( sh4_x86.tstate != TSTATE_C ) {
  1644                             	LDC_t();
  1646                                 RCL1_r32( R_EAX );
  1647                                 store_reg( R_EAX, Rn );
  1648                                 SETC_t();
  1649                                 sh4_x86.tstate = TSTATE_C;
  1651                                 break;
  1652                             default:
  1653                                 UNDEF();
  1654                                 break;
  1656                         break;
  1657                     case 0x5:
  1658                         switch( (ir&0xF0) >> 4 ) {
  1659                             case 0x0:
  1660                                 { /* ROTR Rn */
  1661                                 uint32_t Rn = ((ir>>8)&0xF); 
  1662                                 COUNT_INST(I_ROTR);
  1663                                 load_reg( R_EAX, Rn );
  1664                                 ROR1_r32( R_EAX );
  1665                                 store_reg( R_EAX, Rn );
  1666                                 SETC_t();
  1667                                 sh4_x86.tstate = TSTATE_C;
  1669                                 break;
  1670                             case 0x1:
  1671                                 { /* CMP/PL Rn */
  1672                                 uint32_t Rn = ((ir>>8)&0xF); 
  1673                                 COUNT_INST(I_CMPPL);
  1674                                 load_reg( R_EAX, Rn );
  1675                                 CMP_imm8s_r32( 0, R_EAX );
  1676                                 SETG_t();
  1677                                 sh4_x86.tstate = TSTATE_G;
  1679                                 break;
  1680                             case 0x2:
  1681                                 { /* ROTCR Rn */
  1682                                 uint32_t Rn = ((ir>>8)&0xF); 
  1683                                 COUNT_INST(I_ROTCR);
  1684                                 load_reg( R_EAX, Rn );
  1685                                 if( sh4_x86.tstate != TSTATE_C ) {
  1686                             	LDC_t();
  1688                                 RCR1_r32( R_EAX );
  1689                                 store_reg( R_EAX, Rn );
  1690                                 SETC_t();
  1691                                 sh4_x86.tstate = TSTATE_C;
  1693                                 break;
  1694                             default:
  1695                                 UNDEF();
  1696                                 break;
  1698                         break;
  1699                     case 0x6:
  1700                         switch( (ir&0xF0) >> 4 ) {
  1701                             case 0x0:
  1702                                 { /* LDS.L @Rm+, MACH */
  1703                                 uint32_t Rm = ((ir>>8)&0xF); 
  1704                                 COUNT_INST(I_LDSM);
  1705                                 load_reg( R_EAX, Rm );
  1706                                 check_ralign32( R_EAX );
  1707                                 MMU_TRANSLATE_READ( R_EAX );
  1708                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1709                                 MEM_READ_LONG( R_EAX, R_EAX );
  1710                                 store_spreg( R_EAX, R_MACH );
  1711                                 sh4_x86.tstate = TSTATE_NONE;
  1713                                 break;
  1714                             case 0x1:
  1715                                 { /* LDS.L @Rm+, MACL */
  1716                                 uint32_t Rm = ((ir>>8)&0xF); 
  1717                                 COUNT_INST(I_LDSM);
  1718                                 load_reg( R_EAX, Rm );
  1719                                 check_ralign32( R_EAX );
  1720                                 MMU_TRANSLATE_READ( R_EAX );
  1721                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1722                                 MEM_READ_LONG( R_EAX, R_EAX );
  1723                                 store_spreg( R_EAX, R_MACL );
  1724                                 sh4_x86.tstate = TSTATE_NONE;
  1726                                 break;
  1727                             case 0x2:
  1728                                 { /* LDS.L @Rm+, PR */
  1729                                 uint32_t Rm = ((ir>>8)&0xF); 
  1730                                 COUNT_INST(I_LDSM);
  1731                                 load_reg( R_EAX, Rm );
  1732                                 check_ralign32( R_EAX );
  1733                                 MMU_TRANSLATE_READ( R_EAX );
  1734                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1735                                 MEM_READ_LONG( R_EAX, R_EAX );
  1736                                 store_spreg( R_EAX, R_PR );
  1737                                 sh4_x86.tstate = TSTATE_NONE;
  1739                                 break;
  1740                             case 0x3:
  1741                                 { /* LDC.L @Rm+, SGR */
  1742                                 uint32_t Rm = ((ir>>8)&0xF); 
  1743                                 COUNT_INST(I_LDCM);
  1744                                 check_priv();
  1745                                 load_reg( R_EAX, Rm );
  1746                                 check_ralign32( R_EAX );
  1747                                 MMU_TRANSLATE_READ( R_EAX );
  1748                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1749                                 MEM_READ_LONG( R_EAX, R_EAX );
  1750                                 store_spreg( R_EAX, R_SGR );
  1751                                 sh4_x86.tstate = TSTATE_NONE;
  1753                                 break;
  1754                             case 0x5:
  1755                                 { /* LDS.L @Rm+, FPUL */
  1756                                 uint32_t Rm = ((ir>>8)&0xF); 
  1757                                 COUNT_INST(I_LDSM);
  1758                                 check_fpuen();
  1759                                 load_reg( R_EAX, Rm );
  1760                                 check_ralign32( R_EAX );
  1761                                 MMU_TRANSLATE_READ( R_EAX );
  1762                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1763                                 MEM_READ_LONG( R_EAX, R_EAX );
  1764                                 store_spreg( R_EAX, R_FPUL );
  1765                                 sh4_x86.tstate = TSTATE_NONE;
  1767                                 break;
  1768                             case 0x6:
  1769                                 { /* LDS.L @Rm+, FPSCR */
  1770                                 uint32_t Rm = ((ir>>8)&0xF); 
  1771                                 COUNT_INST(I_LDSFPSCRM);
  1772                                 check_fpuen();
  1773                                 load_reg( R_EAX, Rm );
  1774                                 check_ralign32( R_EAX );
  1775                                 MMU_TRANSLATE_READ( R_EAX );
  1776                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1777                                 MEM_READ_LONG( R_EAX, R_EAX );
  1778                                 call_func1( sh4_write_fpscr, R_EAX );
  1779                                 sh4_x86.tstate = TSTATE_NONE;
  1781                                 break;
  1782                             case 0xF:
  1783                                 { /* LDC.L @Rm+, DBR */
  1784                                 uint32_t Rm = ((ir>>8)&0xF); 
  1785                                 COUNT_INST(I_LDCM);
  1786                                 check_priv();
  1787                                 load_reg( R_EAX, Rm );
  1788                                 check_ralign32( R_EAX );
  1789                                 MMU_TRANSLATE_READ( R_EAX );
  1790                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1791                                 MEM_READ_LONG( R_EAX, R_EAX );
  1792                                 store_spreg( R_EAX, R_DBR );
  1793                                 sh4_x86.tstate = TSTATE_NONE;
  1795                                 break;
  1796                             default:
  1797                                 UNDEF();
  1798                                 break;
  1800                         break;
  1801                     case 0x7:
  1802                         switch( (ir&0x80) >> 7 ) {
  1803                             case 0x0:
  1804                                 switch( (ir&0x70) >> 4 ) {
  1805                                     case 0x0:
  1806                                         { /* LDC.L @Rm+, SR */
  1807                                         uint32_t Rm = ((ir>>8)&0xF); 
  1808                                         COUNT_INST(I_LDCSRM);
  1809                                         if( sh4_x86.in_delay_slot ) {
  1810                                     	SLOTILLEGAL();
  1811                                         } else {
  1812                                     	check_priv();
  1813                                     	load_reg( R_EAX, Rm );
  1814                                     	check_ralign32( R_EAX );
  1815                                     	MMU_TRANSLATE_READ( R_EAX );
  1816                                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1817                                     	MEM_READ_LONG( R_EAX, R_EAX );
  1818                                     	call_func1( sh4_write_sr, R_EAX );
  1819                                     	sh4_x86.priv_checked = FALSE;
  1820                                     	sh4_x86.fpuen_checked = FALSE;
  1821                                     	sh4_x86.tstate = TSTATE_NONE;
  1824                                         break;
  1825                                     case 0x1:
  1826                                         { /* LDC.L @Rm+, GBR */
  1827                                         uint32_t Rm = ((ir>>8)&0xF); 
  1828                                         COUNT_INST(I_LDCM);
  1829                                         load_reg( R_EAX, Rm );
  1830                                         check_ralign32( R_EAX );
  1831                                         MMU_TRANSLATE_READ( R_EAX );
  1832                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1833                                         MEM_READ_LONG( R_EAX, R_EAX );
  1834                                         store_spreg( R_EAX, R_GBR );
  1835                                         sh4_x86.tstate = TSTATE_NONE;
  1837                                         break;
  1838                                     case 0x2:
  1839                                         { /* LDC.L @Rm+, VBR */
  1840                                         uint32_t Rm = ((ir>>8)&0xF); 
  1841                                         COUNT_INST(I_LDCM);
  1842                                         check_priv();
  1843                                         load_reg( R_EAX, Rm );
  1844                                         check_ralign32( R_EAX );
  1845                                         MMU_TRANSLATE_READ( R_EAX );
  1846                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1847                                         MEM_READ_LONG( R_EAX, R_EAX );
  1848                                         store_spreg( R_EAX, R_VBR );
  1849                                         sh4_x86.tstate = TSTATE_NONE;
  1851                                         break;
  1852                                     case 0x3:
  1853                                         { /* LDC.L @Rm+, SSR */
  1854                                         uint32_t Rm = ((ir>>8)&0xF); 
  1855                                         COUNT_INST(I_LDCM);
  1856                                         check_priv();
  1857                                         load_reg( R_EAX, Rm );
  1858                                         check_ralign32( R_EAX );
  1859                                         MMU_TRANSLATE_READ( R_EAX );
  1860                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1861                                         MEM_READ_LONG( R_EAX, R_EAX );
  1862                                         store_spreg( R_EAX, R_SSR );
  1863                                         sh4_x86.tstate = TSTATE_NONE;
  1865                                         break;
  1866                                     case 0x4:
  1867                                         { /* LDC.L @Rm+, SPC */
  1868                                         uint32_t Rm = ((ir>>8)&0xF); 
  1869                                         COUNT_INST(I_LDCM);
  1870                                         check_priv();
  1871                                         load_reg( R_EAX, Rm );
  1872                                         check_ralign32( R_EAX );
  1873                                         MMU_TRANSLATE_READ( R_EAX );
  1874                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1875                                         MEM_READ_LONG( R_EAX, R_EAX );
  1876                                         store_spreg( R_EAX, R_SPC );
  1877                                         sh4_x86.tstate = TSTATE_NONE;
  1879                                         break;
  1880                                     default:
  1881                                         UNDEF();
  1882                                         break;
  1884                                 break;
  1885                             case 0x1:
  1886                                 { /* LDC.L @Rm+, Rn_BANK */
  1887                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  1888                                 COUNT_INST(I_LDCM);
  1889                                 check_priv();
  1890                                 load_reg( R_EAX, Rm );
  1891                                 check_ralign32( R_EAX );
  1892                                 MMU_TRANSLATE_READ( R_EAX );
  1893                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1894                                 MEM_READ_LONG( R_EAX, R_EAX );
  1895                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  1896                                 sh4_x86.tstate = TSTATE_NONE;
  1898                                 break;
  1900                         break;
  1901                     case 0x8:
  1902                         switch( (ir&0xF0) >> 4 ) {
  1903                             case 0x0:
  1904                                 { /* SHLL2 Rn */
  1905                                 uint32_t Rn = ((ir>>8)&0xF); 
  1906                                 COUNT_INST(I_SHLL);
  1907                                 load_reg( R_EAX, Rn );
  1908                                 SHL_imm8_r32( 2, R_EAX );
  1909                                 store_reg( R_EAX, Rn );
  1910                                 sh4_x86.tstate = TSTATE_NONE;
  1912                                 break;
  1913                             case 0x1:
  1914                                 { /* SHLL8 Rn */
  1915                                 uint32_t Rn = ((ir>>8)&0xF); 
  1916                                 COUNT_INST(I_SHLL);
  1917                                 load_reg( R_EAX, Rn );
  1918                                 SHL_imm8_r32( 8, R_EAX );
  1919                                 store_reg( R_EAX, Rn );
  1920                                 sh4_x86.tstate = TSTATE_NONE;
  1922                                 break;
  1923                             case 0x2:
  1924                                 { /* SHLL16 Rn */
  1925                                 uint32_t Rn = ((ir>>8)&0xF); 
  1926                                 COUNT_INST(I_SHLL);
  1927                                 load_reg( R_EAX, Rn );
  1928                                 SHL_imm8_r32( 16, R_EAX );
  1929                                 store_reg( R_EAX, Rn );
  1930                                 sh4_x86.tstate = TSTATE_NONE;
  1932                                 break;
  1933                             default:
  1934                                 UNDEF();
  1935                                 break;
  1937                         break;
  1938                     case 0x9:
  1939                         switch( (ir&0xF0) >> 4 ) {
  1940                             case 0x0:
  1941                                 { /* SHLR2 Rn */
  1942                                 uint32_t Rn = ((ir>>8)&0xF); 
  1943                                 COUNT_INST(I_SHLR);
  1944                                 load_reg( R_EAX, Rn );
  1945                                 SHR_imm8_r32( 2, R_EAX );
  1946                                 store_reg( R_EAX, Rn );
  1947                                 sh4_x86.tstate = TSTATE_NONE;
  1949                                 break;
  1950                             case 0x1:
  1951                                 { /* SHLR8 Rn */
  1952                                 uint32_t Rn = ((ir>>8)&0xF); 
  1953                                 COUNT_INST(I_SHLR);
  1954                                 load_reg( R_EAX, Rn );
  1955                                 SHR_imm8_r32( 8, R_EAX );
  1956                                 store_reg( R_EAX, Rn );
  1957                                 sh4_x86.tstate = TSTATE_NONE;
  1959                                 break;
  1960                             case 0x2:
  1961                                 { /* SHLR16 Rn */
  1962                                 uint32_t Rn = ((ir>>8)&0xF); 
  1963                                 COUNT_INST(I_SHLR);
  1964                                 load_reg( R_EAX, Rn );
  1965                                 SHR_imm8_r32( 16, R_EAX );
  1966                                 store_reg( R_EAX, Rn );
  1967                                 sh4_x86.tstate = TSTATE_NONE;
  1969                                 break;
  1970                             default:
  1971                                 UNDEF();
  1972                                 break;
  1974                         break;
  1975                     case 0xA:
  1976                         switch( (ir&0xF0) >> 4 ) {
  1977                             case 0x0:
  1978                                 { /* LDS Rm, MACH */
  1979                                 uint32_t Rm = ((ir>>8)&0xF); 
  1980                                 COUNT_INST(I_LDS);
  1981                                 load_reg( R_EAX, Rm );
  1982                                 store_spreg( R_EAX, R_MACH );
  1984                                 break;
  1985                             case 0x1:
  1986                                 { /* LDS Rm, MACL */
  1987                                 uint32_t Rm = ((ir>>8)&0xF); 
  1988                                 COUNT_INST(I_LDS);
  1989                                 load_reg( R_EAX, Rm );
  1990                                 store_spreg( R_EAX, R_MACL );
  1992                                 break;
  1993                             case 0x2:
  1994                                 { /* LDS Rm, PR */
  1995                                 uint32_t Rm = ((ir>>8)&0xF); 
  1996                                 COUNT_INST(I_LDS);
  1997                                 load_reg( R_EAX, Rm );
  1998                                 store_spreg( R_EAX, R_PR );
  2000                                 break;
  2001                             case 0x3:
  2002                                 { /* LDC Rm, SGR */
  2003                                 uint32_t Rm = ((ir>>8)&0xF); 
  2004                                 COUNT_INST(I_LDC);
  2005                                 check_priv();
  2006                                 load_reg( R_EAX, Rm );
  2007                                 store_spreg( R_EAX, R_SGR );
  2008                                 sh4_x86.tstate = TSTATE_NONE;
  2010                                 break;
  2011                             case 0x5:
  2012                                 { /* LDS Rm, FPUL */
  2013                                 uint32_t Rm = ((ir>>8)&0xF); 
  2014                                 COUNT_INST(I_LDS);
  2015                                 check_fpuen();
  2016                                 load_reg( R_EAX, Rm );
  2017                                 store_spreg( R_EAX, R_FPUL );
  2019                                 break;
  2020                             case 0x6:
  2021                                 { /* LDS Rm, FPSCR */
  2022                                 uint32_t Rm = ((ir>>8)&0xF); 
  2023                                 COUNT_INST(I_LDSFPSCR);
  2024                                 check_fpuen();
  2025                                 load_reg( R_EAX, Rm );
  2026                                 call_func1( sh4_write_fpscr, R_EAX );
  2027                                 sh4_x86.tstate = TSTATE_NONE;
  2029                                 break;
  2030                             case 0xF:
  2031                                 { /* LDC Rm, DBR */
  2032                                 uint32_t Rm = ((ir>>8)&0xF); 
  2033                                 COUNT_INST(I_LDC);
  2034                                 check_priv();
  2035                                 load_reg( R_EAX, Rm );
  2036                                 store_spreg( R_EAX, R_DBR );
  2037                                 sh4_x86.tstate = TSTATE_NONE;
  2039                                 break;
  2040                             default:
  2041                                 UNDEF();
  2042                                 break;
  2044                         break;
  2045                     case 0xB:
  2046                         switch( (ir&0xF0) >> 4 ) {
  2047                             case 0x0:
  2048                                 { /* JSR @Rn */
  2049                                 uint32_t Rn = ((ir>>8)&0xF); 
  2050                                 COUNT_INST(I_JSR);
  2051                                 if( sh4_x86.in_delay_slot ) {
  2052                             	SLOTILLEGAL();
  2053                                 } else {
  2054                             	load_spreg( R_EAX, R_PC );
  2055                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
  2056                             	store_spreg( R_EAX, R_PR );
  2057                             	load_reg( R_ECX, Rn );
  2058                             	store_spreg( R_ECX, R_NEW_PC );
  2059                             	sh4_x86.in_delay_slot = DELAY_PC;
  2060                             	sh4_x86.branch_taken = TRUE;
  2061                             	sh4_x86.tstate = TSTATE_NONE;
  2062                             	if( UNTRANSLATABLE(pc+2) ) {
  2063                             	    exit_block_emu(pc+2);
  2064                             	    return 2;
  2065                             	} else {
  2066                             	    sh4_translate_instruction(pc+2);
  2067                             	    exit_block_newpcset(pc+2);
  2068                             	    return 4;
  2072                                 break;
  2073                             case 0x1:
  2074                                 { /* TAS.B @Rn */
  2075                                 uint32_t Rn = ((ir>>8)&0xF); 
  2076                                 COUNT_INST(I_TASB);
  2077                                 load_reg( R_EAX, Rn );
  2078                                 MMU_TRANSLATE_WRITE( R_EAX );
  2079                                 PUSH_realigned_r32( R_EAX );
  2080                                 MEM_READ_BYTE( R_EAX, R_EAX );
  2081                                 TEST_r8_r8( R_AL, R_AL );
  2082                                 SETE_t();
  2083                                 OR_imm8_r8( 0x80, R_AL );
  2084                                 POP_realigned_r32( R_ECX );
  2085                                 MEM_WRITE_BYTE( R_ECX, R_EAX );
  2086                                 sh4_x86.tstate = TSTATE_NONE;
  2088                                 break;
  2089                             case 0x2:
  2090                                 { /* JMP @Rn */
  2091                                 uint32_t Rn = ((ir>>8)&0xF); 
  2092                                 COUNT_INST(I_JMP);
  2093                                 if( sh4_x86.in_delay_slot ) {
  2094                             	SLOTILLEGAL();
  2095                                 } else {
  2096                             	load_reg( R_ECX, Rn );
  2097                             	store_spreg( R_ECX, R_NEW_PC );
  2098                             	sh4_x86.in_delay_slot = DELAY_PC;
  2099                             	sh4_x86.branch_taken = TRUE;
  2100                             	if( UNTRANSLATABLE(pc+2) ) {
  2101                             	    exit_block_emu(pc+2);
  2102                             	    return 2;
  2103                             	} else {
  2104                             	    sh4_translate_instruction(pc+2);
  2105                             	    exit_block_newpcset(pc+2);
  2106                             	    return 4;
  2110                                 break;
  2111                             default:
  2112                                 UNDEF();
  2113                                 break;
  2115                         break;
  2116                     case 0xC:
  2117                         { /* SHAD Rm, Rn */
  2118                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2119                         COUNT_INST(I_SHAD);
  2120                         /* Annoyingly enough, not directly convertible */
  2121                         load_reg( R_EAX, Rn );
  2122                         load_reg( R_ECX, Rm );
  2123                         CMP_imm32_r32( 0, R_ECX );
  2124                         JGE_rel8(doshl);
  2126                         NEG_r32( R_ECX );      // 2
  2127                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2128                         JE_rel8(emptysar);     // 2
  2129                         SAR_r32_CL( R_EAX );       // 2
  2130                         JMP_rel8(end);          // 2
  2132                         JMP_TARGET(emptysar);
  2133                         SAR_imm8_r32(31, R_EAX );  // 3
  2134                         JMP_rel8(end2);
  2136                         JMP_TARGET(doshl);
  2137                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2138                         SHL_r32_CL( R_EAX );       // 2
  2139                         JMP_TARGET(end);
  2140                         JMP_TARGET(end2);
  2141                         store_reg( R_EAX, Rn );
  2142                         sh4_x86.tstate = TSTATE_NONE;
  2144                         break;
  2145                     case 0xD:
  2146                         { /* SHLD Rm, Rn */
  2147                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2148                         COUNT_INST(I_SHLD);
  2149                         load_reg( R_EAX, Rn );
  2150                         load_reg( R_ECX, Rm );
  2151                         CMP_imm32_r32( 0, R_ECX );
  2152                         JGE_rel8(doshl);
  2154                         NEG_r32( R_ECX );      // 2
  2155                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2156                         JE_rel8(emptyshr );
  2157                         SHR_r32_CL( R_EAX );       // 2
  2158                         JMP_rel8(end);          // 2
  2160                         JMP_TARGET(emptyshr);
  2161                         XOR_r32_r32( R_EAX, R_EAX );
  2162                         JMP_rel8(end2);
  2164                         JMP_TARGET(doshl);
  2165                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2166                         SHL_r32_CL( R_EAX );       // 2
  2167                         JMP_TARGET(end);
  2168                         JMP_TARGET(end2);
  2169                         store_reg( R_EAX, Rn );
  2170                         sh4_x86.tstate = TSTATE_NONE;
  2172                         break;
  2173                     case 0xE:
  2174                         switch( (ir&0x80) >> 7 ) {
  2175                             case 0x0:
  2176                                 switch( (ir&0x70) >> 4 ) {
  2177                                     case 0x0:
  2178                                         { /* LDC Rm, SR */
  2179                                         uint32_t Rm = ((ir>>8)&0xF); 
  2180                                         COUNT_INST(I_LDCSR);
  2181                                         if( sh4_x86.in_delay_slot ) {
  2182                                     	SLOTILLEGAL();
  2183                                         } else {
  2184                                     	check_priv();
  2185                                     	load_reg( R_EAX, Rm );
  2186                                     	call_func1( sh4_write_sr, R_EAX );
  2187                                     	sh4_x86.priv_checked = FALSE;
  2188                                     	sh4_x86.fpuen_checked = FALSE;
  2189                                     	sh4_x86.tstate = TSTATE_NONE;
  2192                                         break;
  2193                                     case 0x1:
  2194                                         { /* LDC Rm, GBR */
  2195                                         uint32_t Rm = ((ir>>8)&0xF); 
  2196                                         COUNT_INST(I_LDC);
  2197                                         load_reg( R_EAX, Rm );
  2198                                         store_spreg( R_EAX, R_GBR );
  2200                                         break;
  2201                                     case 0x2:
  2202                                         { /* LDC Rm, VBR */
  2203                                         uint32_t Rm = ((ir>>8)&0xF); 
  2204                                         COUNT_INST(I_LDC);
  2205                                         check_priv();
  2206                                         load_reg( R_EAX, Rm );
  2207                                         store_spreg( R_EAX, R_VBR );
  2208                                         sh4_x86.tstate = TSTATE_NONE;
  2210                                         break;
  2211                                     case 0x3:
  2212                                         { /* LDC Rm, SSR */
  2213                                         uint32_t Rm = ((ir>>8)&0xF); 
  2214                                         COUNT_INST(I_LDC);
  2215                                         check_priv();
  2216                                         load_reg( R_EAX, Rm );
  2217                                         store_spreg( R_EAX, R_SSR );
  2218                                         sh4_x86.tstate = TSTATE_NONE;
  2220                                         break;
  2221                                     case 0x4:
  2222                                         { /* LDC Rm, SPC */
  2223                                         uint32_t Rm = ((ir>>8)&0xF); 
  2224                                         COUNT_INST(I_LDC);
  2225                                         check_priv();
  2226                                         load_reg( R_EAX, Rm );
  2227                                         store_spreg( R_EAX, R_SPC );
  2228                                         sh4_x86.tstate = TSTATE_NONE;
  2230                                         break;
  2231                                     default:
  2232                                         UNDEF();
  2233                                         break;
  2235                                 break;
  2236                             case 0x1:
  2237                                 { /* LDC Rm, Rn_BANK */
  2238                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  2239                                 COUNT_INST(I_LDC);
  2240                                 check_priv();
  2241                                 load_reg( R_EAX, Rm );
  2242                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  2243                                 sh4_x86.tstate = TSTATE_NONE;
  2245                                 break;
  2247                         break;
  2248                     case 0xF:
  2249                         { /* MAC.W @Rm+, @Rn+ */
  2250                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2251                         COUNT_INST(I_MACW);
  2252                         if( Rm == Rn ) {
  2253                     	load_reg( R_EAX, Rm );
  2254                     	check_ralign16( R_EAX );
  2255                     	MMU_TRANSLATE_READ( R_EAX );
  2256                     	PUSH_realigned_r32( R_EAX );
  2257                     	load_reg( R_EAX, Rn );
  2258                     	ADD_imm8s_r32( 2, R_EAX );
  2259                     	MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
  2260                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
  2261                     	// Note translate twice in case of page boundaries. Maybe worth
  2262                     	// adding a page-boundary check to skip the second translation
  2263                         } else {
  2264                     	load_reg( R_EAX, Rm );
  2265                     	check_ralign16( R_EAX );
  2266                     	MMU_TRANSLATE_READ( R_EAX );
  2267                     	load_reg( R_ECX, Rn );
  2268                     	check_ralign16( R_ECX );
  2269                     	PUSH_realigned_r32( R_EAX );
  2270                     	MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
  2271                     	MOV_r32_r32( R_ECX, R_EAX );
  2272                     	ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
  2273                     	ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
  2275                         MEM_READ_WORD( R_EAX, R_EAX );
  2276                         POP_r32( R_ECX );
  2277                         PUSH_r32( R_EAX );
  2278                         MEM_READ_WORD( R_ECX, R_EAX );
  2279                         POP_realigned_r32( R_ECX );
  2280                         IMUL_r32( R_ECX );
  2282                         load_spreg( R_ECX, R_S );
  2283                         TEST_r32_r32( R_ECX, R_ECX );
  2284                         JE_rel8( nosat );
  2286                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2287                         JNO_rel8( end );            // 2
  2288                         load_imm32( R_EDX, 1 );         // 5
  2289                         store_spreg( R_EDX, R_MACH );   // 6
  2290                         JS_rel8( positive );        // 2
  2291                         load_imm32( R_EAX, 0x80000000 );// 5
  2292                         store_spreg( R_EAX, R_MACL );   // 6
  2293                         JMP_rel8(end2);           // 2
  2295                         JMP_TARGET(positive);
  2296                         load_imm32( R_EAX, 0x7FFFFFFF );// 5
  2297                         store_spreg( R_EAX, R_MACL );   // 6
  2298                         JMP_rel8(end3);            // 2
  2300                         JMP_TARGET(nosat);
  2301                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2302                         ADC_r32_sh4r( R_EDX, R_MACH );  // 6
  2303                         JMP_TARGET(end);
  2304                         JMP_TARGET(end2);
  2305                         JMP_TARGET(end3);
  2306                         sh4_x86.tstate = TSTATE_NONE;
  2308                         break;
  2310                 break;
  2311             case 0x5:
  2312                 { /* MOV.L @(disp, Rm), Rn */
  2313                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
  2314                 COUNT_INST(I_MOVL);
  2315                 load_reg( R_EAX, Rm );
  2316                 ADD_imm8s_r32( disp, R_EAX );
  2317                 check_ralign32( R_EAX );
  2318                 MMU_TRANSLATE_READ( R_EAX );
  2319                 MEM_READ_LONG( R_EAX, R_EAX );
  2320                 store_reg( R_EAX, Rn );
  2321                 sh4_x86.tstate = TSTATE_NONE;
  2323                 break;
  2324             case 0x6:
  2325                 switch( ir&0xF ) {
  2326                     case 0x0:
  2327                         { /* MOV.B @Rm, Rn */
  2328                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2329                         COUNT_INST(I_MOVB);
  2330                         load_reg( R_EAX, Rm );
  2331                         MMU_TRANSLATE_READ( R_EAX );
  2332                         MEM_READ_BYTE( R_EAX, R_EAX );
  2333                         store_reg( R_EAX, Rn );
  2334                         sh4_x86.tstate = TSTATE_NONE;
  2336                         break;
  2337                     case 0x1:
  2338                         { /* MOV.W @Rm, Rn */
  2339                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2340                         COUNT_INST(I_MOVW);
  2341                         load_reg( R_EAX, Rm );
  2342                         check_ralign16( R_EAX );
  2343                         MMU_TRANSLATE_READ( R_EAX );
  2344                         MEM_READ_WORD( R_EAX, R_EAX );
  2345                         store_reg( R_EAX, Rn );
  2346                         sh4_x86.tstate = TSTATE_NONE;
  2348                         break;
  2349                     case 0x2:
  2350                         { /* MOV.L @Rm, Rn */
  2351                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2352                         COUNT_INST(I_MOVL);
  2353                         load_reg( R_EAX, Rm );
  2354                         check_ralign32( R_EAX );
  2355                         MMU_TRANSLATE_READ( R_EAX );
  2356                         MEM_READ_LONG( R_EAX, R_EAX );
  2357                         store_reg( R_EAX, Rn );
  2358                         sh4_x86.tstate = TSTATE_NONE;
  2360                         break;
  2361                     case 0x3:
  2362                         { /* MOV Rm, Rn */
  2363                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2364                         COUNT_INST(I_MOV);
  2365                         load_reg( R_EAX, Rm );
  2366                         store_reg( R_EAX, Rn );
  2368                         break;
  2369                     case 0x4:
  2370                         { /* MOV.B @Rm+, Rn */
  2371                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2372                         COUNT_INST(I_MOVB);
  2373                         load_reg( R_EAX, Rm );
  2374                         MMU_TRANSLATE_READ( R_EAX );
  2375                         ADD_imm8s_sh4r( 1, REG_OFFSET(r[Rm]) );
  2376                         MEM_READ_BYTE( R_EAX, R_EAX );
  2377                         store_reg( R_EAX, Rn );
  2378                         sh4_x86.tstate = TSTATE_NONE;
  2380                         break;
  2381                     case 0x5:
  2382                         { /* MOV.W @Rm+, Rn */
  2383                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2384                         COUNT_INST(I_MOVW);
  2385                         load_reg( R_EAX, Rm );
  2386                         check_ralign16( R_EAX );
  2387                         MMU_TRANSLATE_READ( R_EAX );
  2388                         ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
  2389                         MEM_READ_WORD( R_EAX, R_EAX );
  2390                         store_reg( R_EAX, Rn );
  2391                         sh4_x86.tstate = TSTATE_NONE;
  2393                         break;
  2394                     case 0x6:
  2395                         { /* MOV.L @Rm+, Rn */
  2396                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2397                         COUNT_INST(I_MOVL);
  2398                         load_reg( R_EAX, Rm );
  2399                         check_ralign32( R_EAX );
  2400                         MMU_TRANSLATE_READ( R_EAX );
  2401                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  2402                         MEM_READ_LONG( R_EAX, R_EAX );
  2403                         store_reg( R_EAX, Rn );
  2404                         sh4_x86.tstate = TSTATE_NONE;
  2406                         break;
  2407                     case 0x7:
  2408                         { /* NOT Rm, Rn */
  2409                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2410                         COUNT_INST(I_NOT);
  2411                         load_reg( R_EAX, Rm );
  2412                         NOT_r32( R_EAX );
  2413                         store_reg( R_EAX, Rn );
  2414                         sh4_x86.tstate = TSTATE_NONE;
  2416                         break;
  2417                     case 0x8:
  2418                         { /* SWAP.B Rm, Rn */
  2419                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2420                         COUNT_INST(I_SWAPB);
  2421                         load_reg( R_EAX, Rm );
  2422                         XCHG_r8_r8( R_AL, R_AH ); // NB: does not touch EFLAGS
  2423                         store_reg( R_EAX, Rn );
  2425                         break;
  2426                     case 0x9:
  2427                         { /* SWAP.W Rm, Rn */
  2428                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2429                         COUNT_INST(I_SWAPB);
  2430                         load_reg( R_EAX, Rm );
  2431                         MOV_r32_r32( R_EAX, R_ECX );
  2432                         SHL_imm8_r32( 16, R_ECX );
  2433                         SHR_imm8_r32( 16, R_EAX );
  2434                         OR_r32_r32( R_EAX, R_ECX );
  2435                         store_reg( R_ECX, Rn );
  2436                         sh4_x86.tstate = TSTATE_NONE;
  2438                         break;
  2439                     case 0xA:
  2440                         { /* NEGC Rm, Rn */
  2441                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2442                         COUNT_INST(I_NEGC);
  2443                         load_reg( R_EAX, Rm );
  2444                         XOR_r32_r32( R_ECX, R_ECX );
  2445                         LDC_t();
  2446                         SBB_r32_r32( R_EAX, R_ECX );
  2447                         store_reg( R_ECX, Rn );
  2448                         SETC_t();
  2449                         sh4_x86.tstate = TSTATE_C;
  2451                         break;
  2452                     case 0xB:
  2453                         { /* NEG Rm, Rn */
  2454                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2455                         COUNT_INST(I_NEG);
  2456                         load_reg( R_EAX, Rm );
  2457                         NEG_r32( R_EAX );
  2458                         store_reg( R_EAX, Rn );
  2459                         sh4_x86.tstate = TSTATE_NONE;
  2461                         break;
  2462                     case 0xC:
  2463                         { /* EXTU.B Rm, Rn */
  2464                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2465                         COUNT_INST(I_EXTUB);
  2466                         load_reg( R_EAX, Rm );
  2467                         MOVZX_r8_r32( R_EAX, R_EAX );
  2468                         store_reg( R_EAX, Rn );
  2470                         break;
  2471                     case 0xD:
  2472                         { /* EXTU.W Rm, Rn */
  2473                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2474                         COUNT_INST(I_EXTUW);
  2475                         load_reg( R_EAX, Rm );
  2476                         MOVZX_r16_r32( R_EAX, R_EAX );
  2477                         store_reg( R_EAX, Rn );
  2479                         break;
  2480                     case 0xE:
  2481                         { /* EXTS.B Rm, Rn */
  2482                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2483                         COUNT_INST(I_EXTSB);
  2484                         load_reg( R_EAX, Rm );
  2485                         MOVSX_r8_r32( R_EAX, R_EAX );
  2486                         store_reg( R_EAX, Rn );
  2488                         break;
  2489                     case 0xF:
  2490                         { /* EXTS.W Rm, Rn */
  2491                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2492                         COUNT_INST(I_EXTSW);
  2493                         load_reg( R_EAX, Rm );
  2494                         MOVSX_r16_r32( R_EAX, R_EAX );
  2495                         store_reg( R_EAX, Rn );
  2497                         break;
  2499                 break;
  2500             case 0x7:
  2501                 { /* ADD #imm, Rn */
  2502                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  2503                 COUNT_INST(I_ADDI);
  2504                 load_reg( R_EAX, Rn );
  2505                 ADD_imm8s_r32( imm, R_EAX );
  2506                 store_reg( R_EAX, Rn );
  2507                 sh4_x86.tstate = TSTATE_NONE;
  2509                 break;
  2510             case 0x8:
  2511                 switch( (ir&0xF00) >> 8 ) {
  2512                     case 0x0:
  2513                         { /* MOV.B R0, @(disp, Rn) */
  2514                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2515                         COUNT_INST(I_MOVB);
  2516                         load_reg( R_EAX, Rn );
  2517                         ADD_imm32_r32( disp, R_EAX );
  2518                         MMU_TRANSLATE_WRITE( R_EAX );
  2519                         load_reg( R_EDX, 0 );
  2520                         MEM_WRITE_BYTE( R_EAX, R_EDX );
  2521                         sh4_x86.tstate = TSTATE_NONE;
  2523                         break;
  2524                     case 0x1:
  2525                         { /* MOV.W R0, @(disp, Rn) */
  2526                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2527                         COUNT_INST(I_MOVW);
  2528                         load_reg( R_EAX, Rn );
  2529                         ADD_imm32_r32( disp, R_EAX );
  2530                         check_walign16( R_EAX );
  2531                         MMU_TRANSLATE_WRITE( R_EAX );
  2532                         load_reg( R_EDX, 0 );
  2533                         MEM_WRITE_WORD( R_EAX, R_EDX );
  2534                         sh4_x86.tstate = TSTATE_NONE;
  2536                         break;
  2537                     case 0x4:
  2538                         { /* MOV.B @(disp, Rm), R0 */
  2539                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2540                         COUNT_INST(I_MOVB);
  2541                         load_reg( R_EAX, Rm );
  2542                         ADD_imm32_r32( disp, R_EAX );
  2543                         MMU_TRANSLATE_READ( R_EAX );
  2544                         MEM_READ_BYTE( R_EAX, R_EAX );
  2545                         store_reg( R_EAX, 0 );
  2546                         sh4_x86.tstate = TSTATE_NONE;
  2548                         break;
  2549                     case 0x5:
  2550                         { /* MOV.W @(disp, Rm), R0 */
  2551                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2552                         COUNT_INST(I_MOVW);
  2553                         load_reg( R_EAX, Rm );
  2554                         ADD_imm32_r32( disp, R_EAX );
  2555                         check_ralign16( R_EAX );
  2556                         MMU_TRANSLATE_READ( R_EAX );
  2557                         MEM_READ_WORD( R_EAX, R_EAX );
  2558                         store_reg( R_EAX, 0 );
  2559                         sh4_x86.tstate = TSTATE_NONE;
  2561                         break;
  2562                     case 0x8:
  2563                         { /* CMP/EQ #imm, R0 */
  2564                         int32_t imm = SIGNEXT8(ir&0xFF); 
  2565                         COUNT_INST(I_CMPEQI);
  2566                         load_reg( R_EAX, 0 );
  2567                         CMP_imm8s_r32(imm, R_EAX);
  2568                         SETE_t();
  2569                         sh4_x86.tstate = TSTATE_E;
  2571                         break;
  2572                     case 0x9:
  2573                         { /* BT disp */
  2574                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2575                         COUNT_INST(I_BT);
  2576                         if( sh4_x86.in_delay_slot ) {
  2577                     	SLOTILLEGAL();
  2578                         } else {
  2579                     	sh4vma_t target = disp + pc + 4;
  2580                     	JF_rel8( nottaken );
  2581                     	exit_block_rel(target, pc+2 );
  2582                     	JMP_TARGET(nottaken);
  2583                     	return 2;
  2586                         break;
  2587                     case 0xB:
  2588                         { /* BF disp */
  2589                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2590                         COUNT_INST(I_BF);
  2591                         if( sh4_x86.in_delay_slot ) {
  2592                     	SLOTILLEGAL();
  2593                         } else {
  2594                     	sh4vma_t target = disp + pc + 4;
  2595                     	JT_rel8( nottaken );
  2596                     	exit_block_rel(target, pc+2 );
  2597                     	JMP_TARGET(nottaken);
  2598                     	return 2;
  2601                         break;
  2602                     case 0xD:
  2603                         { /* BT/S disp */
  2604                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2605                         COUNT_INST(I_BTS);
  2606                         if( sh4_x86.in_delay_slot ) {
  2607                     	SLOTILLEGAL();
  2608                         } else {
  2609                     	sh4_x86.in_delay_slot = DELAY_PC;
  2610                     	if( UNTRANSLATABLE(pc+2) ) {
  2611                     	    load_imm32( R_EAX, pc + 4 - sh4_x86.block_start_pc );
  2612                     	    JF_rel8(nottaken);
  2613                     	    ADD_imm32_r32( disp, R_EAX );
  2614                     	    JMP_TARGET(nottaken);
  2615                     	    ADD_sh4r_r32( R_PC, R_EAX );
  2616                     	    store_spreg( R_EAX, R_NEW_PC );
  2617                     	    exit_block_emu(pc+2);
  2618                     	    sh4_x86.branch_taken = TRUE;
  2619                     	    return 2;
  2620                     	} else {
  2621                     	    if( sh4_x86.tstate == TSTATE_NONE ) {
  2622                     		CMP_imm8s_sh4r( 1, R_T );
  2623                     		sh4_x86.tstate = TSTATE_E;
  2625                     	    OP(0x0F); OP(0x80+(sh4_x86.tstate^1)); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JF rel32
  2626                     	    sh4_translate_instruction(pc+2);
  2627                     	    exit_block_rel( disp + pc + 4, pc+4 );
  2628                     	    // not taken
  2629                     	    *patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2630                     	    sh4_translate_instruction(pc+2);
  2631                     	    return 4;
  2635                         break;
  2636                     case 0xF:
  2637                         { /* BF/S disp */
  2638                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2639                         COUNT_INST(I_BFS);
  2640                         if( sh4_x86.in_delay_slot ) {
  2641                     	SLOTILLEGAL();
  2642                         } else {
  2643                     	sh4_x86.in_delay_slot = DELAY_PC;
  2644                     	if( UNTRANSLATABLE(pc+2) ) {
  2645                     	    load_imm32( R_EAX, pc + 4 - sh4_x86.block_start_pc );
  2646                     	    JT_rel8(nottaken);
  2647                     	    ADD_imm32_r32( disp, R_EAX );
  2648                     	    JMP_TARGET(nottaken);
  2649                     	    ADD_sh4r_r32( R_PC, R_EAX );
  2650                     	    store_spreg( R_EAX, R_NEW_PC );
  2651                     	    exit_block_emu(pc+2);
  2652                     	    sh4_x86.branch_taken = TRUE;
  2653                     	    return 2;
  2654                     	} else {
  2655                     	    if( sh4_x86.tstate == TSTATE_NONE ) {
  2656                     		CMP_imm8s_sh4r( 1, R_T );
  2657                     		sh4_x86.tstate = TSTATE_E;
  2659                     	    sh4vma_t target = disp + pc + 4;
  2660                     	    OP(0x0F); OP(0x80+sh4_x86.tstate); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JT rel32
  2661                     	    sh4_translate_instruction(pc+2);
  2662                     	    exit_block_rel( target, pc+4 );
  2664                     	    // not taken
  2665                     	    *patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2666                     	    sh4_translate_instruction(pc+2);
  2667                     	    return 4;
  2671                         break;
  2672                     default:
  2673                         UNDEF();
  2674                         break;
  2676                 break;
  2677             case 0x9:
  2678                 { /* MOV.W @(disp, PC), Rn */
  2679                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1; 
  2680                 COUNT_INST(I_MOVW);
  2681                 if( sh4_x86.in_delay_slot ) {
  2682             	SLOTILLEGAL();
  2683                 } else {
  2684             	// See comments for MOV.L @(disp, PC), Rn
  2685             	uint32_t target = pc + disp + 4;
  2686             	if( IS_IN_ICACHE(target) ) {
  2687             	    sh4ptr_t ptr = GET_ICACHE_PTR(target);
  2688             	    MOV_moff32_EAX( ptr );
  2689             	    MOVSX_r16_r32( R_EAX, R_EAX );
  2690             	} else {
  2691             	    load_imm32( R_EAX, (pc - sh4_x86.block_start_pc) + disp + 4 );
  2692             	    ADD_sh4r_r32( R_PC, R_EAX );
  2693             	    MMU_TRANSLATE_READ( R_EAX );
  2694             	    MEM_READ_WORD( R_EAX, R_EAX );
  2695             	    sh4_x86.tstate = TSTATE_NONE;
  2697             	store_reg( R_EAX, Rn );
  2700                 break;
  2701             case 0xA:
  2702                 { /* BRA disp */
  2703                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2704                 COUNT_INST(I_BRA);
  2705                 if( sh4_x86.in_delay_slot ) {
  2706             	SLOTILLEGAL();
  2707                 } else {
  2708             	sh4_x86.in_delay_slot = DELAY_PC;
  2709             	sh4_x86.branch_taken = TRUE;
  2710             	if( UNTRANSLATABLE(pc+2) ) {
  2711             	    load_spreg( R_EAX, R_PC );
  2712             	    ADD_imm32_r32( pc + disp + 4 - sh4_x86.block_start_pc, R_EAX );
  2713             	    store_spreg( R_EAX, R_NEW_PC );
  2714             	    exit_block_emu(pc+2);
  2715             	    return 2;
  2716             	} else {
  2717             	    sh4_translate_instruction( pc + 2 );
  2718             	    exit_block_rel( disp + pc + 4, pc+4 );
  2719             	    return 4;
  2723                 break;
  2724             case 0xB:
  2725                 { /* BSR disp */
  2726                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2727                 COUNT_INST(I_BSR);
  2728                 if( sh4_x86.in_delay_slot ) {
  2729             	SLOTILLEGAL();
  2730                 } else {
  2731             	load_spreg( R_EAX, R_PC );
  2732             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
  2733             	store_spreg( R_EAX, R_PR );
  2734             	sh4_x86.in_delay_slot = DELAY_PC;
  2735             	sh4_x86.branch_taken = TRUE;
  2736             	sh4_x86.tstate = TSTATE_NONE;
  2737             	if( UNTRANSLATABLE(pc+2) ) {
  2738             	    ADD_imm32_r32( disp, R_EAX );
  2739             	    store_spreg( R_EAX, R_NEW_PC );
  2740             	    exit_block_emu(pc+2);
  2741             	    return 2;
  2742             	} else {
  2743             	    sh4_translate_instruction( pc + 2 );
  2744             	    exit_block_rel( disp + pc + 4, pc+4 );
  2745             	    return 4;
  2749                 break;
  2750             case 0xC:
  2751                 switch( (ir&0xF00) >> 8 ) {
  2752                     case 0x0:
  2753                         { /* MOV.B R0, @(disp, GBR) */
  2754                         uint32_t disp = (ir&0xFF); 
  2755                         COUNT_INST(I_MOVB);
  2756                         load_spreg( R_EAX, R_GBR );
  2757                         ADD_imm32_r32( disp, R_EAX );
  2758                         MMU_TRANSLATE_WRITE( R_EAX );
  2759                         load_reg( R_EDX, 0 );
  2760                         MEM_WRITE_BYTE( R_EAX, R_EDX );
  2761                         sh4_x86.tstate = TSTATE_NONE;
  2763                         break;
  2764                     case 0x1:
  2765                         { /* MOV.W R0, @(disp, GBR) */
  2766                         uint32_t disp = (ir&0xFF)<<1; 
  2767                         COUNT_INST(I_MOVW);
  2768                         load_spreg( R_EAX, R_GBR );
  2769                         ADD_imm32_r32( disp, R_EAX );
  2770                         check_walign16( R_EAX );
  2771                         MMU_TRANSLATE_WRITE( R_EAX );
  2772                         load_reg( R_EDX, 0 );
  2773                         MEM_WRITE_WORD( R_EAX, R_EDX );
  2774                         sh4_x86.tstate = TSTATE_NONE;
  2776                         break;
  2777                     case 0x2:
  2778                         { /* MOV.L R0, @(disp, GBR) */
  2779                         uint32_t disp = (ir&0xFF)<<2; 
  2780                         COUNT_INST(I_MOVL);
  2781                         load_spreg( R_EAX, R_GBR );
  2782                         ADD_imm32_r32( disp, R_EAX );
  2783                         check_walign32( R_EAX );
  2784                         MMU_TRANSLATE_WRITE( R_EAX );
  2785                         load_reg( R_EDX, 0 );
  2786                         MEM_WRITE_LONG( R_EAX, R_EDX );
  2787                         sh4_x86.tstate = TSTATE_NONE;
  2789                         break;
  2790                     case 0x3:
  2791                         { /* TRAPA #imm */
  2792                         uint32_t imm = (ir&0xFF); 
  2793                         COUNT_INST(I_TRAPA);
  2794                         if( sh4_x86.in_delay_slot ) {
  2795                     	SLOTILLEGAL();
  2796                         } else {
  2797                     	load_imm32( R_ECX, pc+2 - sh4_x86.block_start_pc );   // 5
  2798                     	ADD_r32_sh4r( R_ECX, R_PC );
  2799                     	load_imm32( R_EAX, imm );
  2800                     	call_func1( sh4_raise_trap, R_EAX );
  2801                     	sh4_x86.tstate = TSTATE_NONE;
  2802                     	exit_block_pcset(pc);
  2803                     	sh4_x86.branch_taken = TRUE;
  2804                     	return 2;
  2807                         break;
  2808                     case 0x4:
  2809                         { /* MOV.B @(disp, GBR), R0 */
  2810                         uint32_t disp = (ir&0xFF); 
  2811                         COUNT_INST(I_MOVB);
  2812                         load_spreg( R_EAX, R_GBR );
  2813                         ADD_imm32_r32( disp, R_EAX );
  2814                         MMU_TRANSLATE_READ( R_EAX );
  2815                         MEM_READ_BYTE( R_EAX, R_EAX );
  2816                         store_reg( R_EAX, 0 );
  2817                         sh4_x86.tstate = TSTATE_NONE;
  2819                         break;
  2820                     case 0x5:
  2821                         { /* MOV.W @(disp, GBR), R0 */
  2822                         uint32_t disp = (ir&0xFF)<<1; 
  2823                         COUNT_INST(I_MOVW);
  2824                         load_spreg( R_EAX, R_GBR );
  2825                         ADD_imm32_r32( disp, R_EAX );
  2826                         check_ralign16( R_EAX );
  2827                         MMU_TRANSLATE_READ( R_EAX );
  2828                         MEM_READ_WORD( R_EAX, R_EAX );
  2829                         store_reg( R_EAX, 0 );
  2830                         sh4_x86.tstate = TSTATE_NONE;
  2832                         break;
  2833                     case 0x6:
  2834                         { /* MOV.L @(disp, GBR), R0 */
  2835                         uint32_t disp = (ir&0xFF)<<2; 
  2836                         COUNT_INST(I_MOVL);
  2837                         load_spreg( R_EAX, R_GBR );
  2838                         ADD_imm32_r32( disp, R_EAX );
  2839                         check_ralign32( R_EAX );
  2840                         MMU_TRANSLATE_READ( R_EAX );
  2841                         MEM_READ_LONG( R_EAX, R_EAX );
  2842                         store_reg( R_EAX, 0 );
  2843                         sh4_x86.tstate = TSTATE_NONE;
  2845                         break;
  2846                     case 0x7:
  2847                         { /* MOVA @(disp, PC), R0 */
  2848                         uint32_t disp = (ir&0xFF)<<2; 
  2849                         COUNT_INST(I_MOVA);
  2850                         if( sh4_x86.in_delay_slot ) {
  2851                     	SLOTILLEGAL();
  2852                         } else {
  2853                     	load_imm32( R_ECX, (pc - sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
  2854                     	ADD_sh4r_r32( R_PC, R_ECX );
  2855                     	store_reg( R_ECX, 0 );
  2856                     	sh4_x86.tstate = TSTATE_NONE;
  2859                         break;
  2860                     case 0x8:
  2861                         { /* TST #imm, R0 */
  2862                         uint32_t imm = (ir&0xFF); 
  2863                         COUNT_INST(I_TSTI);
  2864                         load_reg( R_EAX, 0 );
  2865                         TEST_imm32_r32( imm, R_EAX );
  2866                         SETE_t();
  2867                         sh4_x86.tstate = TSTATE_E;
  2869                         break;
  2870                     case 0x9:
  2871                         { /* AND #imm, R0 */
  2872                         uint32_t imm = (ir&0xFF); 
  2873                         COUNT_INST(I_ANDI);
  2874                         load_reg( R_EAX, 0 );
  2875                         AND_imm32_r32(imm, R_EAX); 
  2876                         store_reg( R_EAX, 0 );
  2877                         sh4_x86.tstate = TSTATE_NONE;
  2879                         break;
  2880                     case 0xA:
  2881                         { /* XOR #imm, R0 */
  2882                         uint32_t imm = (ir&0xFF); 
  2883                         COUNT_INST(I_XORI);
  2884                         load_reg( R_EAX, 0 );
  2885                         XOR_imm32_r32( imm, R_EAX );
  2886                         store_reg( R_EAX, 0 );
  2887                         sh4_x86.tstate = TSTATE_NONE;
  2889                         break;
  2890                     case 0xB:
  2891                         { /* OR #imm, R0 */
  2892                         uint32_t imm = (ir&0xFF); 
  2893                         COUNT_INST(I_ORI);
  2894                         load_reg( R_EAX, 0 );
  2895                         OR_imm32_r32(imm, R_EAX);
  2896                         store_reg( R_EAX, 0 );
  2897                         sh4_x86.tstate = TSTATE_NONE;
  2899                         break;
  2900                     case 0xC:
  2901                         { /* TST.B #imm, @(R0, GBR) */
  2902                         uint32_t imm = (ir&0xFF); 
  2903                         COUNT_INST(I_TSTB);
  2904                         load_reg( R_EAX, 0);
  2905                         load_reg( R_ECX, R_GBR);
  2906                         ADD_r32_r32( R_ECX, R_EAX );
  2907                         MMU_TRANSLATE_READ( R_EAX );
  2908                         MEM_READ_BYTE( R_EAX, R_EAX );
  2909                         TEST_imm8_r8( imm, R_AL );
  2910                         SETE_t();
  2911                         sh4_x86.tstate = TSTATE_E;
  2913                         break;
  2914                     case 0xD:
  2915                         { /* AND.B #imm, @(R0, GBR) */
  2916                         uint32_t imm = (ir&0xFF); 
  2917                         COUNT_INST(I_ANDB);
  2918                         load_reg( R_EAX, 0 );
  2919                         load_spreg( R_ECX, R_GBR );
  2920                         ADD_r32_r32( R_ECX, R_EAX );
  2921                         MMU_TRANSLATE_WRITE( R_EAX );
  2922                         PUSH_realigned_r32(R_EAX);
  2923                         MEM_READ_BYTE( R_EAX, R_EAX );
  2924                         POP_realigned_r32(R_ECX);
  2925                         AND_imm32_r32(imm, R_EAX );
  2926                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2927                         sh4_x86.tstate = TSTATE_NONE;
  2929                         break;
  2930                     case 0xE:
  2931                         { /* XOR.B #imm, @(R0, GBR) */
  2932                         uint32_t imm = (ir&0xFF); 
  2933                         COUNT_INST(I_XORB);
  2934                         load_reg( R_EAX, 0 );
  2935                         load_spreg( R_ECX, R_GBR );
  2936                         ADD_r32_r32( R_ECX, R_EAX );
  2937                         MMU_TRANSLATE_WRITE( R_EAX );
  2938                         PUSH_realigned_r32(R_EAX);
  2939                         MEM_READ_BYTE(R_EAX, R_EAX);
  2940                         POP_realigned_r32(R_ECX);
  2941                         XOR_imm32_r32( imm, R_EAX );
  2942                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2943                         sh4_x86.tstate = TSTATE_NONE;
  2945                         break;
  2946                     case 0xF:
  2947                         { /* OR.B #imm, @(R0, GBR) */
  2948                         uint32_t imm = (ir&0xFF); 
  2949                         COUNT_INST(I_ORB);
  2950                         load_reg( R_EAX, 0 );
  2951                         load_spreg( R_ECX, R_GBR );
  2952                         ADD_r32_r32( R_ECX, R_EAX );
  2953                         MMU_TRANSLATE_WRITE( R_EAX );
  2954                         PUSH_realigned_r32(R_EAX);
  2955                         MEM_READ_BYTE( R_EAX, R_EAX );
  2956                         POP_realigned_r32(R_ECX);
  2957                         OR_imm32_r32(imm, R_EAX );
  2958                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2959                         sh4_x86.tstate = TSTATE_NONE;
  2961                         break;
  2963                 break;
  2964             case 0xD:
  2965                 { /* MOV.L @(disp, PC), Rn */
  2966                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2; 
  2967                 COUNT_INST(I_MOVLPC);
  2968                 if( sh4_x86.in_delay_slot ) {
  2969             	SLOTILLEGAL();
  2970                 } else {
  2971             	uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
  2972             	if( IS_IN_ICACHE(target) ) {
  2973             	    // If the target address is in the same page as the code, it's
  2974             	    // pretty safe to just ref it directly and circumvent the whole
  2975             	    // memory subsystem. (this is a big performance win)
  2977             	    // FIXME: There's a corner-case that's not handled here when
  2978             	    // the current code-page is in the ITLB but not in the UTLB.
  2979             	    // (should generate a TLB miss although need to test SH4 
  2980             	    // behaviour to confirm) Unlikely to be anyone depending on this
  2981             	    // behaviour though.
  2982             	    sh4ptr_t ptr = GET_ICACHE_PTR(target);
  2983             	    MOV_moff32_EAX( ptr );
  2984             	} else {
  2985             	    // Note: we use sh4r.pc for the calc as we could be running at a
  2986             	    // different virtual address than the translation was done with,
  2987             	    // but we can safely assume that the low bits are the same.
  2988             	    load_imm32( R_EAX, (pc-sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
  2989             	    ADD_sh4r_r32( R_PC, R_EAX );
  2990             	    MMU_TRANSLATE_READ( R_EAX );
  2991             	    MEM_READ_LONG( R_EAX, R_EAX );
  2992             	    sh4_x86.tstate = TSTATE_NONE;
  2994             	store_reg( R_EAX, Rn );
  2997                 break;
  2998             case 0xE:
  2999                 { /* MOV #imm, Rn */
  3000                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  3001                 COUNT_INST(I_MOVI);
  3002                 load_imm32( R_EAX, imm );
  3003                 store_reg( R_EAX, Rn );
  3005                 break;
  3006             case 0xF:
  3007                 switch( ir&0xF ) {
  3008                     case 0x0:
  3009                         { /* FADD FRm, FRn */
  3010                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3011                         COUNT_INST(I_FADD);
  3012                         check_fpuen();
  3013                         load_spreg( R_ECX, R_FPSCR );
  3014                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3015                         JNE_rel8(doubleprec);
  3016                         push_fr(FRm);
  3017                         push_fr(FRn);
  3018                         FADDP_st(1);
  3019                         pop_fr(FRn);
  3020                         JMP_rel8(end);
  3021                         JMP_TARGET(doubleprec);
  3022                         push_dr(FRm);
  3023                         push_dr(FRn);
  3024                         FADDP_st(1);
  3025                         pop_dr(FRn);
  3026                         JMP_TARGET(end);
  3027                         sh4_x86.tstate = TSTATE_NONE;
  3029                         break;
  3030                     case 0x1:
  3031                         { /* FSUB FRm, FRn */
  3032                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3033                         COUNT_INST(I_FSUB);
  3034                         check_fpuen();
  3035                         load_spreg( R_ECX, R_FPSCR );
  3036                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3037                         JNE_rel8(doubleprec);
  3038                         push_fr(FRn);
  3039                         push_fr(FRm);
  3040                         FSUBP_st(1);
  3041                         pop_fr(FRn);
  3042                         JMP_rel8(end);
  3043                         JMP_TARGET(doubleprec);
  3044                         push_dr(FRn);
  3045                         push_dr(FRm);
  3046                         FSUBP_st(1);
  3047                         pop_dr(FRn);
  3048                         JMP_TARGET(end);
  3049                         sh4_x86.tstate = TSTATE_NONE;
  3051                         break;
  3052                     case 0x2:
  3053                         { /* FMUL FRm, FRn */
  3054                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3055                         COUNT_INST(I_FMUL);
  3056                         check_fpuen();
  3057                         load_spreg( R_ECX, R_FPSCR );
  3058                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3059                         JNE_rel8(doubleprec);
  3060                         push_fr(FRm);
  3061                         push_fr(FRn);
  3062                         FMULP_st(1);
  3063                         pop_fr(FRn);
  3064                         JMP_rel8(end);
  3065                         JMP_TARGET(doubleprec);
  3066                         push_dr(FRm);
  3067                         push_dr(FRn);
  3068                         FMULP_st(1);
  3069                         pop_dr(FRn);
  3070                         JMP_TARGET(end);
  3071                         sh4_x86.tstate = TSTATE_NONE;
  3073                         break;
  3074                     case 0x3:
  3075                         { /* FDIV FRm, FRn */
  3076                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3077                         COUNT_INST(I_FDIV);
  3078                         check_fpuen();
  3079                         load_spreg( R_ECX, R_FPSCR );
  3080                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3081                         JNE_rel8(doubleprec);
  3082                         push_fr(FRn);
  3083                         push_fr(FRm);
  3084                         FDIVP_st(1);
  3085                         pop_fr(FRn);
  3086                         JMP_rel8(end);
  3087                         JMP_TARGET(doubleprec);
  3088                         push_dr(FRn);
  3089                         push_dr(FRm);
  3090                         FDIVP_st(1);
  3091                         pop_dr(FRn);
  3092                         JMP_TARGET(end);
  3093                         sh4_x86.tstate = TSTATE_NONE;
  3095                         break;
  3096                     case 0x4:
  3097                         { /* FCMP/EQ FRm, FRn */
  3098                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3099                         COUNT_INST(I_FCMPEQ);
  3100                         check_fpuen();
  3101                         load_spreg( R_ECX, R_FPSCR );
  3102                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3103                         JNE_rel8(doubleprec);
  3104                         push_fr(FRm);
  3105                         push_fr(FRn);
  3106                         JMP_rel8(end);
  3107                         JMP_TARGET(doubleprec);
  3108                         push_dr(FRm);
  3109                         push_dr(FRn);
  3110                         JMP_TARGET(end);
  3111                         FCOMIP_st(1);
  3112                         SETE_t();
  3113                         FPOP_st();
  3114                         sh4_x86.tstate = TSTATE_NONE;
  3116                         break;
  3117                     case 0x5:
  3118                         { /* FCMP/GT FRm, FRn */
  3119                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3120                         COUNT_INST(I_FCMPGT);
  3121                         check_fpuen();
  3122                         load_spreg( R_ECX, R_FPSCR );
  3123                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3124                         JNE_rel8(doubleprec);
  3125                         push_fr(FRm);
  3126                         push_fr(FRn);
  3127                         JMP_rel8(end);
  3128                         JMP_TARGET(doubleprec);
  3129                         push_dr(FRm);
  3130                         push_dr(FRn);
  3131                         JMP_TARGET(end);
  3132                         FCOMIP_st(1);
  3133                         SETA_t();
  3134                         FPOP_st();
  3135                         sh4_x86.tstate = TSTATE_NONE;
  3137                         break;
  3138                     case 0x6:
  3139                         { /* FMOV @(R0, Rm), FRn */
  3140                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3141                         COUNT_INST(I_FMOV7);
  3142                         check_fpuen();
  3143                         load_reg( R_EAX, Rm );
  3144                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
  3145                         check_ralign32( R_EAX );
  3146                         MMU_TRANSLATE_READ( R_EAX );
  3147                         load_spreg( R_EDX, R_FPSCR );
  3148                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3149                         JNE_rel8(doublesize);
  3151                         MEM_READ_LONG( R_EAX, R_EAX );
  3152                         store_fr( R_EAX, FRn );
  3153                         JMP_rel8(end);
  3155                         JMP_TARGET(doublesize);
  3156                         MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3157                         store_dr0( R_ECX, FRn );
  3158                         store_dr1( R_EAX, FRn );
  3159                         JMP_TARGET(end);
  3161                         sh4_x86.tstate = TSTATE_NONE;
  3163                         break;
  3164                     case 0x7:
  3165                         { /* FMOV FRm, @(R0, Rn) */
  3166                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3167                         COUNT_INST(I_FMOV4);
  3168                         check_fpuen();
  3169                         load_reg( R_EAX, Rn );
  3170                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
  3171                         check_walign32( R_EAX );
  3172                         MMU_TRANSLATE_WRITE( R_EAX );
  3173                         load_spreg( R_EDX, R_FPSCR );
  3174                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3175                         JNE_rel8(doublesize);
  3177                         load_fr( R_ECX, FRm );
  3178                         MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
  3179                         JMP_rel8(end);
  3181                         JMP_TARGET(doublesize);
  3182                         load_dr0( R_ECX, FRm );
  3183                         load_dr1( R_EDX, FRm );
  3184                         MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3185                         JMP_TARGET(end);
  3187                         sh4_x86.tstate = TSTATE_NONE;
  3189                         break;
  3190                     case 0x8:
  3191                         { /* FMOV @Rm, FRn */
  3192                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3193                         COUNT_INST(I_FMOV5);
  3194                         check_fpuen();
  3195                         load_reg( R_EAX, Rm );
  3196                         check_ralign32( R_EAX );
  3197                         MMU_TRANSLATE_READ( R_EAX );
  3198                         load_spreg( R_EDX, R_FPSCR );
  3199                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3200                         JNE_rel8(doublesize);
  3202                         MEM_READ_LONG( R_EAX, R_EAX );
  3203                         store_fr( R_EAX, FRn );
  3204                         JMP_rel8(end);
  3206                         JMP_TARGET(doublesize);
  3207                         MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3208                         store_dr0( R_ECX, FRn );
  3209                         store_dr1( R_EAX, FRn );
  3210                         JMP_TARGET(end);
  3211                         sh4_x86.tstate = TSTATE_NONE;
  3213                         break;
  3214                     case 0x9:
  3215                         { /* FMOV @Rm+, FRn */
  3216                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3217                         COUNT_INST(I_FMOV6);
  3218                         check_fpuen();
  3219                         load_reg( R_EAX, Rm );
  3220                         check_ralign32( R_EAX );
  3221                         MMU_TRANSLATE_READ( R_EAX );
  3222                         load_spreg( R_EDX, R_FPSCR );
  3223                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3224                         JNE_rel8(doublesize);
  3226                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  3227                         MEM_READ_LONG( R_EAX, R_EAX );
  3228                         store_fr( R_EAX, FRn );
  3229                         JMP_rel8(end);
  3231                         JMP_TARGET(doublesize);
  3232                         ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rm]) );
  3233                         MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3234                         store_dr0( R_ECX, FRn );
  3235                         store_dr1( R_EAX, FRn );
  3236                         JMP_TARGET(end);
  3238                         sh4_x86.tstate = TSTATE_NONE;
  3240                         break;
  3241                     case 0xA:
  3242                         { /* FMOV FRm, @Rn */
  3243                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3244                         COUNT_INST(I_FMOV2);
  3245                         check_fpuen();
  3246                         load_reg( R_EAX, Rn );
  3247                         check_walign32( R_EAX );
  3248                         MMU_TRANSLATE_WRITE( R_EAX );
  3249                         load_spreg( R_EDX, R_FPSCR );
  3250                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3251                         JNE_rel8(doublesize);
  3253                         load_fr( R_ECX, FRm );
  3254                         MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
  3255                         JMP_rel8(end);
  3257                         JMP_TARGET(doublesize);
  3258                         load_dr0( R_ECX, FRm );
  3259                         load_dr1( R_EDX, FRm );
  3260                         MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3261                         JMP_TARGET(end);
  3262                         sh4_x86.tstate = TSTATE_NONE;
  3264                         break;
  3265                     case 0xB:
  3266                         { /* FMOV FRm, @-Rn */
  3267                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3268                         COUNT_INST(I_FMOV3);
  3269                         check_fpuen();
  3270                         load_reg( R_EAX, Rn );
  3271                         check_walign32( R_EAX );
  3272                         load_spreg( R_EDX, R_FPSCR );
  3273                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3274                         JNE_rel8(doublesize);
  3276                         ADD_imm8s_r32( -4, R_EAX );
  3277                         MMU_TRANSLATE_WRITE( R_EAX );
  3278                         load_fr( R_ECX, FRm );
  3279                         ADD_imm8s_sh4r(-4,REG_OFFSET(r[Rn]));
  3280                         MEM_WRITE_LONG( R_EAX, R_ECX );
  3281                         JMP_rel8(end);
  3283                         JMP_TARGET(doublesize);
  3284                         ADD_imm8s_r32(-8,R_EAX);
  3285                         MMU_TRANSLATE_WRITE( R_EAX );
  3286                         load_dr0( R_ECX, FRm );
  3287                         load_dr1( R_EDX, FRm );
  3288                         ADD_imm8s_sh4r(-8,REG_OFFSET(r[Rn]));
  3289                         MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3290                         JMP_TARGET(end);
  3292                         sh4_x86.tstate = TSTATE_NONE;
  3294                         break;
  3295                     case 0xC:
  3296                         { /* FMOV FRm, FRn */
  3297                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3298                         COUNT_INST(I_FMOV1);
  3299                         check_fpuen();
  3300                         load_spreg( R_ECX, R_FPSCR );
  3301                         TEST_imm32_r32( FPSCR_SZ, R_ECX );
  3302                         JNE_rel8(doublesize);
  3303                         load_fr( R_EAX, FRm ); // SZ=0 branch
  3304                         store_fr( R_EAX, FRn );
  3305                         JMP_rel8(end);
  3306                         JMP_TARGET(doublesize);
  3307                         load_dr0( R_EAX, FRm );
  3308                         load_dr1( R_ECX, FRm );
  3309                         store_dr0( R_EAX, FRn );
  3310                         store_dr1( R_ECX, FRn );
  3311                         JMP_TARGET(end);
  3312                         sh4_x86.tstate = TSTATE_NONE;
  3314                         break;
  3315                     case 0xD:
  3316                         switch( (ir&0xF0) >> 4 ) {
  3317                             case 0x0:
  3318                                 { /* FSTS FPUL, FRn */
  3319                                 uint32_t FRn = ((ir>>8)&0xF); 
  3320                                 COUNT_INST(I_FSTS);
  3321                                 check_fpuen();
  3322                                 load_spreg( R_EAX, R_FPUL );
  3323                                 store_fr( R_EAX, FRn );
  3324                                 sh4_x86.tstate = TSTATE_NONE;
  3326                                 break;
  3327                             case 0x1:
  3328                                 { /* FLDS FRm, FPUL */
  3329                                 uint32_t FRm = ((ir>>8)&0xF); 
  3330                                 COUNT_INST(I_FLDS);
  3331                                 check_fpuen();
  3332                                 load_fr( R_EAX, FRm );
  3333                                 store_spreg( R_EAX, R_FPUL );
  3334                                 sh4_x86.tstate = TSTATE_NONE;
  3336                                 break;
  3337                             case 0x2:
  3338                                 { /* FLOAT FPUL, FRn */
  3339                                 uint32_t FRn = ((ir>>8)&0xF); 
  3340                                 COUNT_INST(I_FLOAT);
  3341                                 check_fpuen();
  3342                                 load_spreg( R_ECX, R_FPSCR );
  3343                                 FILD_sh4r(R_FPUL);
  3344                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3345                                 JNE_rel8(doubleprec);
  3346                                 pop_fr( FRn );
  3347                                 JMP_rel8(end);
  3348                                 JMP_TARGET(doubleprec);
  3349                                 pop_dr( FRn );
  3350                                 JMP_TARGET(end);
  3351                                 sh4_x86.tstate = TSTATE_NONE;
  3353                                 break;
  3354                             case 0x3:
  3355                                 { /* FTRC FRm, FPUL */
  3356                                 uint32_t FRm = ((ir>>8)&0xF); 
  3357                                 COUNT_INST(I_FTRC);
  3358                                 check_fpuen();
  3359                                 load_spreg( R_ECX, R_FPSCR );
  3360                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3361                                 JNE_rel8(doubleprec);
  3362                                 push_fr( FRm );
  3363                                 JMP_rel8(doop);
  3364                                 JMP_TARGET(doubleprec);
  3365                                 push_dr( FRm );
  3366                                 JMP_TARGET( doop );
  3367                                 load_imm32( R_ECX, (uint32_t)&max_int );
  3368                                 FILD_r32ind( R_ECX );
  3369                                 FCOMIP_st(1);
  3370                                 JNA_rel8( sat );
  3371                                 load_imm32( R_ECX, (uint32_t)&min_int );  // 5
  3372                                 FILD_r32ind( R_ECX );           // 2
  3373                                 FCOMIP_st(1);                   // 2
  3374                                 JAE_rel8( sat2 );            // 2
  3375                                 load_imm32( R_EAX, (uint32_t)&save_fcw );
  3376                                 FNSTCW_r32ind( R_EAX );
  3377                                 load_imm32( R_EDX, (uint32_t)&trunc_fcw );
  3378                                 FLDCW_r32ind( R_EDX );
  3379                                 FISTP_sh4r(R_FPUL);             // 3
  3380                                 FLDCW_r32ind( R_EAX );
  3381                                 JMP_rel8(end);             // 2
  3383                                 JMP_TARGET(sat);
  3384                                 JMP_TARGET(sat2);
  3385                                 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
  3386                                 store_spreg( R_ECX, R_FPUL );
  3387                                 FPOP_st();
  3388                                 JMP_TARGET(end);
  3389                                 sh4_x86.tstate = TSTATE_NONE;
  3391                                 break;
  3392                             case 0x4:
  3393                                 { /* FNEG FRn */
  3394                                 uint32_t FRn = ((ir>>8)&0xF); 
  3395                                 COUNT_INST(I_FNEG);
  3396                                 check_fpuen();
  3397                                 load_spreg( R_ECX, R_FPSCR );
  3398                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3399                                 JNE_rel8(doubleprec);
  3400                                 push_fr(FRn);
  3401                                 FCHS_st0();
  3402                                 pop_fr(FRn);
  3403                                 JMP_rel8(end);
  3404                                 JMP_TARGET(doubleprec);
  3405                                 push_dr(FRn);
  3406                                 FCHS_st0();
  3407                                 pop_dr(FRn);
  3408                                 JMP_TARGET(end);
  3409                                 sh4_x86.tstate = TSTATE_NONE;
  3411                                 break;
  3412                             case 0x5:
  3413                                 { /* FABS FRn */
  3414                                 uint32_t FRn = ((ir>>8)&0xF); 
  3415                                 COUNT_INST(I_FABS);
  3416                                 check_fpuen();
  3417                                 load_spreg( R_ECX, R_FPSCR );
  3418                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3419                                 JNE_rel8(doubleprec);
  3420                                 push_fr(FRn); // 6
  3421                                 FABS_st0(); // 2
  3422                                 pop_fr(FRn); //6
  3423                                 JMP_rel8(end); // 2
  3424                                 JMP_TARGET(doubleprec);
  3425                                 push_dr(FRn);
  3426                                 FABS_st0();
  3427                                 pop_dr(FRn);
  3428                                 JMP_TARGET(end);
  3429                                 sh4_x86.tstate = TSTATE_NONE;
  3431                                 break;
  3432                             case 0x6:
  3433                                 { /* FSQRT FRn */
  3434                                 uint32_t FRn = ((ir>>8)&0xF); 
  3435                                 COUNT_INST(I_FSQRT);
  3436                                 check_fpuen();
  3437                                 load_spreg( R_ECX, R_FPSCR );
  3438                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3439                                 JNE_rel8(doubleprec);
  3440                                 push_fr(FRn);
  3441                                 FSQRT_st0();
  3442                                 pop_fr(FRn);
  3443                                 JMP_rel8(end);
  3444                                 JMP_TARGET(doubleprec);
  3445                                 push_dr(FRn);
  3446                                 FSQRT_st0();
  3447                                 pop_dr(FRn);
  3448                                 JMP_TARGET(end);
  3449                                 sh4_x86.tstate = TSTATE_NONE;
  3451                                 break;
  3452                             case 0x7:
  3453                                 { /* FSRRA FRn */
  3454                                 uint32_t FRn = ((ir>>8)&0xF); 
  3455                                 COUNT_INST(I_FSRRA);
  3456                                 check_fpuen();
  3457                                 load_spreg( R_ECX, R_FPSCR );
  3458                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3459                                 JNE_rel8(end); // PR=0 only
  3460                                 FLD1_st0();
  3461                                 push_fr(FRn);
  3462                                 FSQRT_st0();
  3463                                 FDIVP_st(1);
  3464                                 pop_fr(FRn);
  3465                                 JMP_TARGET(end);
  3466                                 sh4_x86.tstate = TSTATE_NONE;
  3468                                 break;
  3469                             case 0x8:
  3470                                 { /* FLDI0 FRn */
  3471                                 uint32_t FRn = ((ir>>8)&0xF); 
  3472                                 /* IFF PR=0 */
  3473                                   COUNT_INST(I_FLDI0);
  3474                                   check_fpuen();
  3475                                   load_spreg( R_ECX, R_FPSCR );
  3476                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3477                                   JNE_rel8(end);
  3478                                   XOR_r32_r32( R_EAX, R_EAX );
  3479                                   store_fr( R_EAX, FRn );
  3480                                   JMP_TARGET(end);
  3481                                   sh4_x86.tstate = TSTATE_NONE;
  3483                                 break;
  3484                             case 0x9:
  3485                                 { /* FLDI1 FRn */
  3486                                 uint32_t FRn = ((ir>>8)&0xF); 
  3487                                 /* IFF PR=0 */
  3488                                   COUNT_INST(I_FLDI1);
  3489                                   check_fpuen();
  3490                                   load_spreg( R_ECX, R_FPSCR );
  3491                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3492                                   JNE_rel8(end);
  3493                                   load_imm32(R_EAX, 0x3F800000);
  3494                                   store_fr( R_EAX, FRn );
  3495                                   JMP_TARGET(end);
  3496                                   sh4_x86.tstate = TSTATE_NONE;
  3498                                 break;
  3499                             case 0xA:
  3500                                 { /* FCNVSD FPUL, FRn */
  3501                                 uint32_t FRn = ((ir>>8)&0xF); 
  3502                                 COUNT_INST(I_FCNVSD);
  3503                                 check_fpuen();
  3504                                 load_spreg( R_ECX, R_FPSCR );
  3505                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3506                                 JE_rel8(end); // only when PR=1
  3507                                 push_fpul();
  3508                                 pop_dr( FRn );
  3509                                 JMP_TARGET(end);
  3510                                 sh4_x86.tstate = TSTATE_NONE;
  3512                                 break;
  3513                             case 0xB:
  3514                                 { /* FCNVDS FRm, FPUL */
  3515                                 uint32_t FRm = ((ir>>8)&0xF); 
  3516                                 COUNT_INST(I_FCNVDS);
  3517                                 check_fpuen();
  3518                                 load_spreg( R_ECX, R_FPSCR );
  3519                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3520                                 JE_rel8(end); // only when PR=1
  3521                                 push_dr( FRm );
  3522                                 pop_fpul();
  3523                                 JMP_TARGET(end);
  3524                                 sh4_x86.tstate = TSTATE_NONE;
  3526                                 break;
  3527                             case 0xE:
  3528                                 { /* FIPR FVm, FVn */
  3529                                 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3); 
  3530                                 COUNT_INST(I_FIPR);
  3531                                 check_fpuen();
  3532                                 load_spreg( R_ECX, R_FPSCR );
  3533                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3534                                 JNE_rel8( doubleprec);
  3536                                 push_fr( FVm<<2 );
  3537                                 push_fr( FVn<<2 );
  3538                                 FMULP_st(1);
  3539                                 push_fr( (FVm<<2)+1);
  3540                                 push_fr( (FVn<<2)+1);
  3541                                 FMULP_st(1);
  3542                                 FADDP_st(1);
  3543                                 push_fr( (FVm<<2)+2);
  3544                                 push_fr( (FVn<<2)+2);
  3545                                 FMULP_st(1);
  3546                                 FADDP_st(1);
  3547                                 push_fr( (FVm<<2)+3);
  3548                                 push_fr( (FVn<<2)+3);
  3549                                 FMULP_st(1);
  3550                                 FADDP_st(1);
  3551                                 pop_fr( (FVn<<2)+3);
  3552                                 JMP_TARGET(doubleprec);
  3553                                 sh4_x86.tstate = TSTATE_NONE;
  3555                                 break;
  3556                             case 0xF:
  3557                                 switch( (ir&0x100) >> 8 ) {
  3558                                     case 0x0:
  3559                                         { /* FSCA FPUL, FRn */
  3560                                         uint32_t FRn = ((ir>>9)&0x7)<<1; 
  3561                                         COUNT_INST(I_FSCA);
  3562                                         check_fpuen();
  3563                                         load_spreg( R_ECX, R_FPSCR );
  3564                                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3565                                         JNE_rel8(doubleprec );
  3566                                         LEA_sh4r_r32( REG_OFFSET(fr[0][FRn&0x0E]), R_ECX );
  3567                                         load_spreg( R_EDX, R_FPUL );
  3568                                         call_func2( sh4_fsca, R_EDX, R_ECX );
  3569                                         JMP_TARGET(doubleprec);
  3570                                         sh4_x86.tstate = TSTATE_NONE;
  3572                                         break;
  3573                                     case 0x1:
  3574                                         switch( (ir&0x200) >> 9 ) {
  3575                                             case 0x0:
  3576                                                 { /* FTRV XMTRX, FVn */
  3577                                                 uint32_t FVn = ((ir>>10)&0x3); 
  3578                                                 COUNT_INST(I_FTRV);
  3579                                                 check_fpuen();
  3580                                                 load_spreg( R_ECX, R_FPSCR );
  3581                                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3582                                                 JNE_rel8( doubleprec );
  3583                                                 LEA_sh4r_r32( REG_OFFSET(fr[0][FVn<<2]), R_EDX );
  3584                                                 call_func1( sh4_ftrv, R_EDX );  // 12
  3585                                                 JMP_TARGET(doubleprec);
  3586                                                 sh4_x86.tstate = TSTATE_NONE;
  3588                                                 break;
  3589                                             case 0x1:
  3590                                                 switch( (ir&0xC00) >> 10 ) {
  3591                                                     case 0x0:
  3592                                                         { /* FSCHG */
  3593                                                         COUNT_INST(I_FSCHG);
  3594                                                         check_fpuen();
  3595                                                         load_spreg( R_ECX, R_FPSCR );
  3596                                                         XOR_imm32_r32( FPSCR_SZ, R_ECX );
  3597                                                         store_spreg( R_ECX, R_FPSCR );
  3598                                                         sh4_x86.tstate = TSTATE_NONE;
  3600                                                         break;
  3601                                                     case 0x2:
  3602                                                         { /* FRCHG */
  3603                                                         COUNT_INST(I_FRCHG);
  3604                                                         check_fpuen();
  3605                                                         load_spreg( R_ECX, R_FPSCR );
  3606                                                         XOR_imm32_r32( FPSCR_FR, R_ECX );
  3607                                                         store_spreg( R_ECX, R_FPSCR );
  3608                                                         call_func0( sh4_switch_fr_banks );
  3609                                                         sh4_x86.tstate = TSTATE_NONE;
  3611                                                         break;
  3612                                                     case 0x3:
  3613                                                         { /* UNDEF */
  3614                                                         COUNT_INST(I_UNDEF);
  3615                                                         if( sh4_x86.in_delay_slot ) {
  3616                                                     	SLOTILLEGAL();
  3617                                                         } else {
  3618                                                     	JMP_exc(EXC_ILLEGAL);
  3619                                                     	return 2;
  3622                                                         break;
  3623                                                     default:
  3624                                                         UNDEF();
  3625                                                         break;
  3627                                                 break;
  3629                                         break;
  3631                                 break;
  3632                             default:
  3633                                 UNDEF();
  3634                                 break;
  3636                         break;
  3637                     case 0xE:
  3638                         { /* FMAC FR0, FRm, FRn */
  3639                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3640                         COUNT_INST(I_FMAC);
  3641                         check_fpuen();
  3642                         load_spreg( R_ECX, R_FPSCR );
  3643                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3644                         JNE_rel8(doubleprec);
  3645                         push_fr( 0 );
  3646                         push_fr( FRm );
  3647                         FMULP_st(1);
  3648                         push_fr( FRn );
  3649                         FADDP_st(1);
  3650                         pop_fr( FRn );
  3651                         JMP_rel8(end);
  3652                         JMP_TARGET(doubleprec);
  3653                         push_dr( 0 );
  3654                         push_dr( FRm );
  3655                         FMULP_st(1);
  3656                         push_dr( FRn );
  3657                         FADDP_st(1);
  3658                         pop_dr( FRn );
  3659                         JMP_TARGET(end);
  3660                         sh4_x86.tstate = TSTATE_NONE;
  3662                         break;
  3663                     default:
  3664                         UNDEF();
  3665                         break;
  3667                 break;
  3670     sh4_x86.in_delay_slot = DELAY_NONE;
  3671     return 0;
.