Search
lxdream.org :: lxdream/src/sh4/sh4x86.c
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.c
changeset 604:1024c3a9cb88
prev601:d8d1af0d133c
next626:a010e30a30e9
author nkeynes
date Sat Jan 26 02:45:27 2008 +0000 (16 years ago)
permissions -rw-r--r--
last change Bug #50: Implement mouse and keyboard
view annotate diff log raw
     1 /**
     2  * $Id$
     3  * 
     4  * SH4 => x86 translation. This version does no real optimization, it just
     5  * outputs straight-line x86 code - it mainly exists to provide a baseline
     6  * to test the optimizing versions against.
     7  *
     8  * Copyright (c) 2007 Nathan Keynes.
     9  *
    10  * This program is free software; you can redistribute it and/or modify
    11  * it under the terms of the GNU General Public License as published by
    12  * the Free Software Foundation; either version 2 of the License, or
    13  * (at your option) any later version.
    14  *
    15  * This program is distributed in the hope that it will be useful,
    16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
    17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    18  * GNU General Public License for more details.
    19  */
    21 #include <assert.h>
    22 #include <math.h>
    24 #ifndef NDEBUG
    25 #define DEBUG_JUMPS 1
    26 #endif
    28 #include "sh4/xltcache.h"
    29 #include "sh4/sh4core.h"
    30 #include "sh4/sh4trans.h"
    31 #include "sh4/sh4mmio.h"
    32 #include "sh4/x86op.h"
    33 #include "clock.h"
    35 #define DEFAULT_BACKPATCH_SIZE 4096
    37 struct backpatch_record {
    38     uint32_t fixup_offset;
    39     uint32_t fixup_icount;
    40     int32_t exc_code;
    41 };
    43 #define MAX_RECOVERY_SIZE 2048
    45 #define DELAY_NONE 0
    46 #define DELAY_PC 1
    47 #define DELAY_PC_PR 2
    49 /** 
    50  * Struct to manage internal translation state. This state is not saved -
    51  * it is only valid between calls to sh4_translate_begin_block() and
    52  * sh4_translate_end_block()
    53  */
    54 struct sh4_x86_state {
    55     int in_delay_slot;
    56     gboolean priv_checked; /* true if we've already checked the cpu mode. */
    57     gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
    58     gboolean branch_taken; /* true if we branched unconditionally */
    59     uint32_t block_start_pc;
    60     uint32_t stack_posn;   /* Trace stack height for alignment purposes */
    61     int tstate;
    63     /* mode flags */
    64     gboolean tlb_on; /* True if tlb translation is active */
    66     /* Allocated memory for the (block-wide) back-patch list */
    67     struct backpatch_record *backpatch_list;
    68     uint32_t backpatch_posn;
    69     uint32_t backpatch_size;
    70 };
    72 #define TSTATE_NONE -1
    73 #define TSTATE_O    0
    74 #define TSTATE_C    2
    75 #define TSTATE_E    4
    76 #define TSTATE_NE   5
    77 #define TSTATE_G    0xF
    78 #define TSTATE_GE   0xD
    79 #define TSTATE_A    7
    80 #define TSTATE_AE   3
    82 /** Branch if T is set (either in the current cflags, or in sh4r.t) */
    83 #define JT_rel8(rel8,label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    84 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    85     OP(0x70+sh4_x86.tstate); OP(rel8); \
    86     MARK_JMP(rel8,label)
    87 /** Branch if T is clear (either in the current cflags or in sh4r.t) */
    88 #define JF_rel8(rel8,label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    89 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    90     OP(0x70+ (sh4_x86.tstate^1)); OP(rel8); \
    91     MARK_JMP(rel8, label)
    93 static struct sh4_x86_state sh4_x86;
    95 static uint32_t max_int = 0x7FFFFFFF;
    96 static uint32_t min_int = 0x80000000;
    97 static uint32_t save_fcw; /* save value for fpu control word */
    98 static uint32_t trunc_fcw = 0x0F7F; /* fcw value for truncation mode */
   100 void sh4_x86_init()
   101 {
   102     sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
   103     sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(struct backpatch_record);
   104 }
   107 static void sh4_x86_add_backpatch( uint8_t *fixup_addr, uint32_t fixup_pc, uint32_t exc_code )
   108 {
   109     if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
   110 	sh4_x86.backpatch_size <<= 1;
   111 	sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, 
   112 					  sh4_x86.backpatch_size * sizeof(struct backpatch_record));
   113 	assert( sh4_x86.backpatch_list != NULL );
   114     }
   115     if( sh4_x86.in_delay_slot ) {
   116 	fixup_pc -= 2;
   117     }
   118     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_offset = 
   119 	((uint8_t *)fixup_addr) - ((uint8_t *)xlat_current_block->code);
   120     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_icount = (fixup_pc - sh4_x86.block_start_pc)>>1;
   121     sh4_x86.backpatch_list[sh4_x86.backpatch_posn].exc_code = exc_code;
   122     sh4_x86.backpatch_posn++;
   123 }
   125 /**
   126  * Emit an instruction to load an SH4 reg into a real register
   127  */
   128 static inline void load_reg( int x86reg, int sh4reg ) 
   129 {
   130     /* mov [bp+n], reg */
   131     OP(0x8B);
   132     OP(0x45 + (x86reg<<3));
   133     OP(REG_OFFSET(r[sh4reg]));
   134 }
   136 static inline void load_reg16s( int x86reg, int sh4reg )
   137 {
   138     OP(0x0F);
   139     OP(0xBF);
   140     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   141 }
   143 static inline void load_reg16u( int x86reg, int sh4reg )
   144 {
   145     OP(0x0F);
   146     OP(0xB7);
   147     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   149 }
   151 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
   152 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
   153 /**
   154  * Emit an instruction to load an immediate value into a register
   155  */
   156 static inline void load_imm32( int x86reg, uint32_t value ) {
   157     /* mov #value, reg */
   158     OP(0xB8 + x86reg);
   159     OP32(value);
   160 }
   162 /**
   163  * Load an immediate 64-bit quantity (note: x86-64 only)
   164  */
   165 static inline void load_imm64( int x86reg, uint32_t value ) {
   166     /* mov #value, reg */
   167     REXW();
   168     OP(0xB8 + x86reg);
   169     OP64(value);
   170 }
   173 /**
   174  * Emit an instruction to store an SH4 reg (RN)
   175  */
   176 void static inline store_reg( int x86reg, int sh4reg ) {
   177     /* mov reg, [bp+n] */
   178     OP(0x89);
   179     OP(0x45 + (x86reg<<3));
   180     OP(REG_OFFSET(r[sh4reg]));
   181 }
   183 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
   185 /**
   186  * Load an FR register (single-precision floating point) into an integer x86
   187  * register (eg for register-to-register moves)
   188  */
   189 void static inline load_fr( int bankreg, int x86reg, int frm )
   190 {
   191     OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
   192 }
   194 /**
   195  * Store an FR register (single-precision floating point) into an integer x86
   196  * register (eg for register-to-register moves)
   197  */
   198 void static inline store_fr( int bankreg, int x86reg, int frn )
   199 {
   200     OP(0x89);  OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
   201 }
   204 /**
   205  * Load a pointer to the back fp back into the specified x86 register. The
   206  * bankreg must have been previously loaded with FPSCR.
   207  * NB: 12 bytes
   208  */
   209 static inline void load_xf_bank( int bankreg )
   210 {
   211     NOT_r32( bankreg );
   212     SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
   213     AND_imm8s_r32( 0x40, bankreg );    // Complete extraction
   214     OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
   215 }
   217 /**
   218  * Update the fr_bank pointer based on the current fpscr value.
   219  */
   220 static inline void update_fr_bank( int fpscrreg )
   221 {
   222     SHR_imm8_r32( (21 - 6), fpscrreg ); // Extract bit 21 then *64 for bank size
   223     AND_imm8s_r32( 0x40, fpscrreg );    // Complete extraction
   224     OP(0x8D); OP(0x44+(fpscrreg<<3)); OP(0x28+fpscrreg); OP(REG_OFFSET(fr)); // LEA [ebp+fpscrreg+disp], fpscrreg
   225     store_spreg( fpscrreg, REG_OFFSET(fr_bank) );
   226 }
   227 /**
   228  * Push FPUL (as a 32-bit float) onto the FPU stack
   229  */
   230 static inline void push_fpul( )
   231 {
   232     OP(0xD9); OP(0x45); OP(R_FPUL);
   233 }
   235 /**
   236  * Pop FPUL (as a 32-bit float) from the FPU stack
   237  */
   238 static inline void pop_fpul( )
   239 {
   240     OP(0xD9); OP(0x5D); OP(R_FPUL);
   241 }
   243 /**
   244  * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
   245  * with the location of the current fp bank.
   246  */
   247 static inline void push_fr( int bankreg, int frm ) 
   248 {
   249     OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2);  // FLD.S [bankreg + frm^1*4]
   250 }
   252 /**
   253  * Pop a 32-bit float from the FPU stack and store it back into the fp bank, 
   254  * with bankreg previously loaded with the location of the current fp bank.
   255  */
   256 static inline void pop_fr( int bankreg, int frm )
   257 {
   258     OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
   259 }
   261 /**
   262  * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
   263  * with the location of the current fp bank.
   264  */
   265 static inline void push_dr( int bankreg, int frm )
   266 {
   267     OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
   268 }
   270 static inline void pop_dr( int bankreg, int frm )
   271 {
   272     OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
   273 }
   275 /* Exception checks - Note that all exception checks will clobber EAX */
   277 #define check_priv( ) \
   278     if( !sh4_x86.priv_checked ) { \
   279 	sh4_x86.priv_checked = TRUE;\
   280 	load_spreg( R_EAX, R_SR );\
   281 	AND_imm32_r32( SR_MD, R_EAX );\
   282 	if( sh4_x86.in_delay_slot ) {\
   283 	    JE_exc( EXC_SLOT_ILLEGAL );\
   284 	} else {\
   285 	    JE_exc( EXC_ILLEGAL );\
   286 	}\
   287     }\
   289 #define check_fpuen( ) \
   290     if( !sh4_x86.fpuen_checked ) {\
   291 	sh4_x86.fpuen_checked = TRUE;\
   292 	load_spreg( R_EAX, R_SR );\
   293 	AND_imm32_r32( SR_FD, R_EAX );\
   294 	if( sh4_x86.in_delay_slot ) {\
   295 	    JNE_exc(EXC_SLOT_FPU_DISABLED);\
   296 	} else {\
   297 	    JNE_exc(EXC_FPU_DISABLED);\
   298 	}\
   299     }
   301 #define check_ralign16( x86reg ) \
   302     TEST_imm32_r32( 0x00000001, x86reg ); \
   303     JNE_exc(EXC_DATA_ADDR_READ)
   305 #define check_walign16( x86reg ) \
   306     TEST_imm32_r32( 0x00000001, x86reg ); \
   307     JNE_exc(EXC_DATA_ADDR_WRITE);
   309 #define check_ralign32( x86reg ) \
   310     TEST_imm32_r32( 0x00000003, x86reg ); \
   311     JNE_exc(EXC_DATA_ADDR_READ)
   313 #define check_walign32( x86reg ) \
   314     TEST_imm32_r32( 0x00000003, x86reg ); \
   315     JNE_exc(EXC_DATA_ADDR_WRITE);
   317 #define UNDEF()
   318 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
   319 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
   320 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
   321 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
   322 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
   323 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
   324 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
   326 /**
   327  * Perform MMU translation on the address in addr_reg for a read operation, iff the TLB is turned 
   328  * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
   329  */
   330 #define MMU_TRANSLATE_READ( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
   332 #define MMU_TRANSLATE_READ_EXC( addr_reg, exc_code ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(exc_code); MEM_RESULT(addr_reg) }
   333 /**
   334  * Perform MMU translation on the address in addr_reg for a write operation, iff the TLB is turned 
   335  * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
   336  */
   337 #define MMU_TRANSLATE_WRITE( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
   339 #define MEM_READ_SIZE (CALL_FUNC1_SIZE)
   340 #define MEM_WRITE_SIZE (CALL_FUNC2_SIZE)
   341 #define MMU_TRANSLATE_SIZE (sh4_x86.tlb_on ? (CALL_FUNC1_SIZE + 12) : 0 )
   343 #define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = DELAY_NONE; return 1;
   345 /****** Import appropriate calling conventions ******/
   346 #if SH4_TRANSLATOR == TARGET_X86_64
   347 #include "sh4/ia64abi.h"
   348 #else /* SH4_TRANSLATOR == TARGET_X86 */
   349 #ifdef APPLE_BUILD
   350 #include "sh4/ia32mac.h"
   351 #else
   352 #include "sh4/ia32abi.h"
   353 #endif
   354 #endif
   356 uint32_t sh4_translate_end_block_size()
   357 {
   358     if( sh4_x86.backpatch_posn <= 3 ) {
   359 	return EPILOGUE_SIZE + (sh4_x86.backpatch_posn*12);
   360     } else {
   361 	return EPILOGUE_SIZE + 48 + (sh4_x86.backpatch_posn-3)*15;
   362     }
   363 }
   366 /**
   367  * Embed a breakpoint into the generated code
   368  */
   369 void sh4_translate_emit_breakpoint( sh4vma_t pc )
   370 {
   371     load_imm32( R_EAX, pc );
   372     call_func1( sh4_translate_breakpoint_hit, R_EAX );
   373 }
   376 #define UNTRANSLATABLE(pc) !IS_IN_ICACHE(pc)
   378 /**
   379  * Embed a call to sh4_execute_instruction for situations that we
   380  * can't translate (just page-crossing delay slots at the moment).
   381  * Caller is responsible for setting new_pc before calling this function.
   382  *
   383  * Performs:
   384  *   Set PC = endpc
   385  *   Set sh4r.in_delay_slot = sh4_x86.in_delay_slot
   386  *   Update slice_cycle for endpc+2 (single step doesn't update slice_cycle)
   387  *   Call sh4_execute_instruction
   388  *   Call xlat_get_code_by_vma / xlat_get_code as for normal exit
   389  */
   390 void exit_block_emu( sh4vma_t endpc )
   391 {
   392     load_imm32( R_ECX, endpc - sh4_x86.block_start_pc );   // 5
   393     ADD_r32_sh4r( R_ECX, R_PC );
   395     load_imm32( R_ECX, (((endpc - sh4_x86.block_start_pc)>>1)+1)*sh4_cpu_period ); // 5
   396     ADD_r32_sh4r( R_ECX, REG_OFFSET(slice_cycle) );     // 6
   397     load_imm32( R_ECX, sh4_x86.in_delay_slot ? 1 : 0 );
   398     store_spreg( R_ECX, REG_OFFSET(in_delay_slot) );
   400     call_func0( sh4_execute_instruction );    
   401     load_spreg( R_EAX, R_PC );
   402     if( sh4_x86.tlb_on ) {
   403 	call_func1(xlat_get_code_by_vma,R_EAX);
   404     } else {
   405 	call_func1(xlat_get_code,R_EAX);
   406     }
   407     AND_imm8s_rptr( 0xFC, R_EAX );
   408     POP_r32(R_EBP);
   409     RET();
   410 } 
   412 /**
   413  * Translate a single instruction. Delayed branches are handled specially
   414  * by translating both branch and delayed instruction as a single unit (as
   415  * 
   416  * The instruction MUST be in the icache (assert check)
   417  *
   418  * @return true if the instruction marks the end of a basic block
   419  * (eg a branch or 
   420  */
   421 uint32_t sh4_translate_instruction( sh4vma_t pc )
   422 {
   423     uint32_t ir;
   424     /* Read instruction from icache */
   425     assert( IS_IN_ICACHE(pc) );
   426     ir = *(uint16_t *)GET_ICACHE_PTR(pc);
   428 	/* PC is not in the current icache - this usually means we're running
   429 	 * with MMU on, and we've gone past the end of the page. And since 
   430 	 * sh4_translate_block is pretty careful about this, it means we're
   431 	 * almost certainly in a delay slot.
   432 	 *
   433 	 * Since we can't assume the page is present (and we can't fault it in
   434 	 * at this point, inline a call to sh4_execute_instruction (with a few
   435 	 * small repairs to cope with the different environment).
   436 	 */
   438     if( !sh4_x86.in_delay_slot ) {
   439 	sh4_translate_add_recovery( (pc - sh4_x86.block_start_pc)>>1 );
   440     }
   441         switch( (ir&0xF000) >> 12 ) {
   442             case 0x0:
   443                 switch( ir&0xF ) {
   444                     case 0x2:
   445                         switch( (ir&0x80) >> 7 ) {
   446                             case 0x0:
   447                                 switch( (ir&0x70) >> 4 ) {
   448                                     case 0x0:
   449                                         { /* STC SR, Rn */
   450                                         uint32_t Rn = ((ir>>8)&0xF); 
   451                                         check_priv();
   452                                         call_func0(sh4_read_sr);
   453                                         store_reg( R_EAX, Rn );
   454                                         sh4_x86.tstate = TSTATE_NONE;
   455                                         }
   456                                         break;
   457                                     case 0x1:
   458                                         { /* STC GBR, Rn */
   459                                         uint32_t Rn = ((ir>>8)&0xF); 
   460                                         load_spreg( R_EAX, R_GBR );
   461                                         store_reg( R_EAX, Rn );
   462                                         }
   463                                         break;
   464                                     case 0x2:
   465                                         { /* STC VBR, Rn */
   466                                         uint32_t Rn = ((ir>>8)&0xF); 
   467                                         check_priv();
   468                                         load_spreg( R_EAX, R_VBR );
   469                                         store_reg( R_EAX, Rn );
   470                                         sh4_x86.tstate = TSTATE_NONE;
   471                                         }
   472                                         break;
   473                                     case 0x3:
   474                                         { /* STC SSR, Rn */
   475                                         uint32_t Rn = ((ir>>8)&0xF); 
   476                                         check_priv();
   477                                         load_spreg( R_EAX, R_SSR );
   478                                         store_reg( R_EAX, Rn );
   479                                         sh4_x86.tstate = TSTATE_NONE;
   480                                         }
   481                                         break;
   482                                     case 0x4:
   483                                         { /* STC SPC, Rn */
   484                                         uint32_t Rn = ((ir>>8)&0xF); 
   485                                         check_priv();
   486                                         load_spreg( R_EAX, R_SPC );
   487                                         store_reg( R_EAX, Rn );
   488                                         sh4_x86.tstate = TSTATE_NONE;
   489                                         }
   490                                         break;
   491                                     default:
   492                                         UNDEF();
   493                                         break;
   494                                 }
   495                                 break;
   496                             case 0x1:
   497                                 { /* STC Rm_BANK, Rn */
   498                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
   499                                 check_priv();
   500                                 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
   501                                 store_reg( R_EAX, Rn );
   502                                 sh4_x86.tstate = TSTATE_NONE;
   503                                 }
   504                                 break;
   505                         }
   506                         break;
   507                     case 0x3:
   508                         switch( (ir&0xF0) >> 4 ) {
   509                             case 0x0:
   510                                 { /* BSRF Rn */
   511                                 uint32_t Rn = ((ir>>8)&0xF); 
   512                                 if( sh4_x86.in_delay_slot ) {
   513                             	SLOTILLEGAL();
   514                                 } else {
   515                             	load_spreg( R_EAX, R_PC );
   516                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
   517                             	store_spreg( R_EAX, R_PR );
   518                             	ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_EAX );
   519                             	store_spreg( R_EAX, R_NEW_PC );
   521                             	sh4_x86.in_delay_slot = DELAY_PC;
   522                             	sh4_x86.tstate = TSTATE_NONE;
   523                             	sh4_x86.branch_taken = TRUE;
   524                             	if( UNTRANSLATABLE(pc+2) ) {
   525                             	    exit_block_emu(pc+2);
   526                             	    return 2;
   527                             	} else {
   528                             	    sh4_translate_instruction( pc + 2 );
   529                             	    exit_block_newpcset(pc+2);
   530                             	    return 4;
   531                             	}
   532                                 }
   533                                 }
   534                                 break;
   535                             case 0x2:
   536                                 { /* BRAF Rn */
   537                                 uint32_t Rn = ((ir>>8)&0xF); 
   538                                 if( sh4_x86.in_delay_slot ) {
   539                             	SLOTILLEGAL();
   540                                 } else {
   541                             	load_spreg( R_EAX, R_PC );
   542                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
   543                             	ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_EAX );
   544                             	store_spreg( R_EAX, R_NEW_PC );
   545                             	sh4_x86.in_delay_slot = DELAY_PC;
   546                             	sh4_x86.tstate = TSTATE_NONE;
   547                             	sh4_x86.branch_taken = TRUE;
   548                             	if( UNTRANSLATABLE(pc+2) ) {
   549                             	    exit_block_emu(pc+2);
   550                             	    return 2;
   551                             	} else {
   552                             	    sh4_translate_instruction( pc + 2 );
   553                             	    exit_block_newpcset(pc+2);
   554                             	    return 4;
   555                             	}
   556                                 }
   557                                 }
   558                                 break;
   559                             case 0x8:
   560                                 { /* PREF @Rn */
   561                                 uint32_t Rn = ((ir>>8)&0xF); 
   562                                 load_reg( R_EAX, Rn );
   563                                 MOV_r32_r32( R_EAX, R_ECX );
   564                                 AND_imm32_r32( 0xFC000000, R_EAX );
   565                                 CMP_imm32_r32( 0xE0000000, R_EAX );
   566                                 JNE_rel8(8+CALL_FUNC1_SIZE, end);
   567                                 call_func1( sh4_flush_store_queue, R_ECX );
   568                                 TEST_r32_r32( R_EAX, R_EAX );
   569                                 JE_exc(-1);
   570                                 JMP_TARGET(end);
   571                                 sh4_x86.tstate = TSTATE_NONE;
   572                                 }
   573                                 break;
   574                             case 0x9:
   575                                 { /* OCBI @Rn */
   576                                 uint32_t Rn = ((ir>>8)&0xF); 
   577                                 }
   578                                 break;
   579                             case 0xA:
   580                                 { /* OCBP @Rn */
   581                                 uint32_t Rn = ((ir>>8)&0xF); 
   582                                 }
   583                                 break;
   584                             case 0xB:
   585                                 { /* OCBWB @Rn */
   586                                 uint32_t Rn = ((ir>>8)&0xF); 
   587                                 }
   588                                 break;
   589                             case 0xC:
   590                                 { /* MOVCA.L R0, @Rn */
   591                                 uint32_t Rn = ((ir>>8)&0xF); 
   592                                 load_reg( R_EAX, Rn );
   593                                 check_walign32( R_EAX );
   594                                 MMU_TRANSLATE_WRITE( R_EAX );
   595                                 load_reg( R_EDX, 0 );
   596                                 MEM_WRITE_LONG( R_EAX, R_EDX );
   597                                 sh4_x86.tstate = TSTATE_NONE;
   598                                 }
   599                                 break;
   600                             default:
   601                                 UNDEF();
   602                                 break;
   603                         }
   604                         break;
   605                     case 0x4:
   606                         { /* MOV.B Rm, @(R0, Rn) */
   607                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   608                         load_reg( R_EAX, 0 );
   609                         load_reg( R_ECX, Rn );
   610                         ADD_r32_r32( R_ECX, R_EAX );
   611                         MMU_TRANSLATE_WRITE( R_EAX );
   612                         load_reg( R_EDX, Rm );
   613                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   614                         sh4_x86.tstate = TSTATE_NONE;
   615                         }
   616                         break;
   617                     case 0x5:
   618                         { /* MOV.W Rm, @(R0, Rn) */
   619                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   620                         load_reg( R_EAX, 0 );
   621                         load_reg( R_ECX, Rn );
   622                         ADD_r32_r32( R_ECX, R_EAX );
   623                         check_walign16( R_EAX );
   624                         MMU_TRANSLATE_WRITE( R_EAX );
   625                         load_reg( R_EDX, Rm );
   626                         MEM_WRITE_WORD( R_EAX, R_EDX );
   627                         sh4_x86.tstate = TSTATE_NONE;
   628                         }
   629                         break;
   630                     case 0x6:
   631                         { /* MOV.L Rm, @(R0, Rn) */
   632                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   633                         load_reg( R_EAX, 0 );
   634                         load_reg( R_ECX, Rn );
   635                         ADD_r32_r32( R_ECX, R_EAX );
   636                         check_walign32( R_EAX );
   637                         MMU_TRANSLATE_WRITE( R_EAX );
   638                         load_reg( R_EDX, Rm );
   639                         MEM_WRITE_LONG( R_EAX, R_EDX );
   640                         sh4_x86.tstate = TSTATE_NONE;
   641                         }
   642                         break;
   643                     case 0x7:
   644                         { /* MUL.L Rm, Rn */
   645                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   646                         load_reg( R_EAX, Rm );
   647                         load_reg( R_ECX, Rn );
   648                         MUL_r32( R_ECX );
   649                         store_spreg( R_EAX, R_MACL );
   650                         sh4_x86.tstate = TSTATE_NONE;
   651                         }
   652                         break;
   653                     case 0x8:
   654                         switch( (ir&0xFF0) >> 4 ) {
   655                             case 0x0:
   656                                 { /* CLRT */
   657                                 CLC();
   658                                 SETC_t();
   659                                 sh4_x86.tstate = TSTATE_C;
   660                                 }
   661                                 break;
   662                             case 0x1:
   663                                 { /* SETT */
   664                                 STC();
   665                                 SETC_t();
   666                                 sh4_x86.tstate = TSTATE_C;
   667                                 }
   668                                 break;
   669                             case 0x2:
   670                                 { /* CLRMAC */
   671                                 XOR_r32_r32(R_EAX, R_EAX);
   672                                 store_spreg( R_EAX, R_MACL );
   673                                 store_spreg( R_EAX, R_MACH );
   674                                 sh4_x86.tstate = TSTATE_NONE;
   675                                 }
   676                                 break;
   677                             case 0x3:
   678                                 { /* LDTLB */
   679                                 call_func0( MMU_ldtlb );
   680                                 }
   681                                 break;
   682                             case 0x4:
   683                                 { /* CLRS */
   684                                 CLC();
   685                                 SETC_sh4r(R_S);
   686                                 sh4_x86.tstate = TSTATE_C;
   687                                 }
   688                                 break;
   689                             case 0x5:
   690                                 { /* SETS */
   691                                 STC();
   692                                 SETC_sh4r(R_S);
   693                                 sh4_x86.tstate = TSTATE_C;
   694                                 }
   695                                 break;
   696                             default:
   697                                 UNDEF();
   698                                 break;
   699                         }
   700                         break;
   701                     case 0x9:
   702                         switch( (ir&0xF0) >> 4 ) {
   703                             case 0x0:
   704                                 { /* NOP */
   705                                 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
   706                                 }
   707                                 break;
   708                             case 0x1:
   709                                 { /* DIV0U */
   710                                 XOR_r32_r32( R_EAX, R_EAX );
   711                                 store_spreg( R_EAX, R_Q );
   712                                 store_spreg( R_EAX, R_M );
   713                                 store_spreg( R_EAX, R_T );
   714                                 sh4_x86.tstate = TSTATE_C; // works for DIV1
   715                                 }
   716                                 break;
   717                             case 0x2:
   718                                 { /* MOVT Rn */
   719                                 uint32_t Rn = ((ir>>8)&0xF); 
   720                                 load_spreg( R_EAX, R_T );
   721                                 store_reg( R_EAX, Rn );
   722                                 }
   723                                 break;
   724                             default:
   725                                 UNDEF();
   726                                 break;
   727                         }
   728                         break;
   729                     case 0xA:
   730                         switch( (ir&0xF0) >> 4 ) {
   731                             case 0x0:
   732                                 { /* STS MACH, Rn */
   733                                 uint32_t Rn = ((ir>>8)&0xF); 
   734                                 load_spreg( R_EAX, R_MACH );
   735                                 store_reg( R_EAX, Rn );
   736                                 }
   737                                 break;
   738                             case 0x1:
   739                                 { /* STS MACL, Rn */
   740                                 uint32_t Rn = ((ir>>8)&0xF); 
   741                                 load_spreg( R_EAX, R_MACL );
   742                                 store_reg( R_EAX, Rn );
   743                                 }
   744                                 break;
   745                             case 0x2:
   746                                 { /* STS PR, Rn */
   747                                 uint32_t Rn = ((ir>>8)&0xF); 
   748                                 load_spreg( R_EAX, R_PR );
   749                                 store_reg( R_EAX, Rn );
   750                                 }
   751                                 break;
   752                             case 0x3:
   753                                 { /* STC SGR, Rn */
   754                                 uint32_t Rn = ((ir>>8)&0xF); 
   755                                 check_priv();
   756                                 load_spreg( R_EAX, R_SGR );
   757                                 store_reg( R_EAX, Rn );
   758                                 sh4_x86.tstate = TSTATE_NONE;
   759                                 }
   760                                 break;
   761                             case 0x5:
   762                                 { /* STS FPUL, Rn */
   763                                 uint32_t Rn = ((ir>>8)&0xF); 
   764                                 load_spreg( R_EAX, R_FPUL );
   765                                 store_reg( R_EAX, Rn );
   766                                 }
   767                                 break;
   768                             case 0x6:
   769                                 { /* STS FPSCR, Rn */
   770                                 uint32_t Rn = ((ir>>8)&0xF); 
   771                                 load_spreg( R_EAX, R_FPSCR );
   772                                 store_reg( R_EAX, Rn );
   773                                 }
   774                                 break;
   775                             case 0xF:
   776                                 { /* STC DBR, Rn */
   777                                 uint32_t Rn = ((ir>>8)&0xF); 
   778                                 check_priv();
   779                                 load_spreg( R_EAX, R_DBR );
   780                                 store_reg( R_EAX, Rn );
   781                                 sh4_x86.tstate = TSTATE_NONE;
   782                                 }
   783                                 break;
   784                             default:
   785                                 UNDEF();
   786                                 break;
   787                         }
   788                         break;
   789                     case 0xB:
   790                         switch( (ir&0xFF0) >> 4 ) {
   791                             case 0x0:
   792                                 { /* RTS */
   793                                 if( sh4_x86.in_delay_slot ) {
   794                             	SLOTILLEGAL();
   795                                 } else {
   796                             	load_spreg( R_ECX, R_PR );
   797                             	store_spreg( R_ECX, R_NEW_PC );
   798                             	sh4_x86.in_delay_slot = DELAY_PC;
   799                             	sh4_x86.branch_taken = TRUE;
   800                             	if( UNTRANSLATABLE(pc+2) ) {
   801                             	    exit_block_emu(pc+2);
   802                             	    return 2;
   803                             	} else {
   804                             	    sh4_translate_instruction(pc+2);
   805                             	    exit_block_newpcset(pc+2);
   806                             	    return 4;
   807                             	}
   808                                 }
   809                                 }
   810                                 break;
   811                             case 0x1:
   812                                 { /* SLEEP */
   813                                 check_priv();
   814                                 call_func0( sh4_sleep );
   815                                 sh4_x86.tstate = TSTATE_NONE;
   816                                 sh4_x86.in_delay_slot = DELAY_NONE;
   817                                 return 2;
   818                                 }
   819                                 break;
   820                             case 0x2:
   821                                 { /* RTE */
   822                                 if( sh4_x86.in_delay_slot ) {
   823                             	SLOTILLEGAL();
   824                                 } else {
   825                             	check_priv();
   826                             	load_spreg( R_ECX, R_SPC );
   827                             	store_spreg( R_ECX, R_NEW_PC );
   828                             	load_spreg( R_EAX, R_SSR );
   829                             	call_func1( sh4_write_sr, R_EAX );
   830                             	sh4_x86.in_delay_slot = DELAY_PC;
   831                             	sh4_x86.priv_checked = FALSE;
   832                             	sh4_x86.fpuen_checked = FALSE;
   833                             	sh4_x86.tstate = TSTATE_NONE;
   834                             	sh4_x86.branch_taken = TRUE;
   835                             	if( UNTRANSLATABLE(pc+2) ) {
   836                             	    exit_block_emu(pc+2);
   837                             	    return 2;
   838                             	} else {
   839                             	    sh4_translate_instruction(pc+2);
   840                             	    exit_block_newpcset(pc+2);
   841                             	    return 4;
   842                             	}
   843                                 }
   844                                 }
   845                                 break;
   846                             default:
   847                                 UNDEF();
   848                                 break;
   849                         }
   850                         break;
   851                     case 0xC:
   852                         { /* MOV.B @(R0, Rm), Rn */
   853                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   854                         load_reg( R_EAX, 0 );
   855                         load_reg( R_ECX, Rm );
   856                         ADD_r32_r32( R_ECX, R_EAX );
   857                         MMU_TRANSLATE_READ( R_EAX )
   858                         MEM_READ_BYTE( R_EAX, R_EAX );
   859                         store_reg( R_EAX, Rn );
   860                         sh4_x86.tstate = TSTATE_NONE;
   861                         }
   862                         break;
   863                     case 0xD:
   864                         { /* MOV.W @(R0, Rm), Rn */
   865                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   866                         load_reg( R_EAX, 0 );
   867                         load_reg( R_ECX, Rm );
   868                         ADD_r32_r32( R_ECX, R_EAX );
   869                         check_ralign16( R_EAX );
   870                         MMU_TRANSLATE_READ( R_EAX );
   871                         MEM_READ_WORD( R_EAX, R_EAX );
   872                         store_reg( R_EAX, Rn );
   873                         sh4_x86.tstate = TSTATE_NONE;
   874                         }
   875                         break;
   876                     case 0xE:
   877                         { /* MOV.L @(R0, Rm), Rn */
   878                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   879                         load_reg( R_EAX, 0 );
   880                         load_reg( R_ECX, Rm );
   881                         ADD_r32_r32( R_ECX, R_EAX );
   882                         check_ralign32( R_EAX );
   883                         MMU_TRANSLATE_READ( R_EAX );
   884                         MEM_READ_LONG( R_EAX, R_EAX );
   885                         store_reg( R_EAX, Rn );
   886                         sh4_x86.tstate = TSTATE_NONE;
   887                         }
   888                         break;
   889                     case 0xF:
   890                         { /* MAC.L @Rm+, @Rn+ */
   891                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   892                         if( Rm == Rn ) {
   893                     	load_reg( R_EAX, Rm );
   894                     	check_ralign32( R_EAX );
   895                     	MMU_TRANSLATE_READ( R_EAX );
   896                     	PUSH_realigned_r32( R_EAX );
   897                     	load_reg( R_EAX, Rn );
   898                     	ADD_imm8s_r32( 4, R_EAX );
   899                     	MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
   900                     	ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rn]) );
   901                     	// Note translate twice in case of page boundaries. Maybe worth
   902                     	// adding a page-boundary check to skip the second translation
   903                         } else {
   904                     	load_reg( R_EAX, Rm );
   905                     	check_ralign32( R_EAX );
   906                     	MMU_TRANSLATE_READ( R_EAX );
   907                     	load_reg( R_ECX, Rn );
   908                     	check_ralign32( R_ECX );
   909                     	PUSH_realigned_r32( R_EAX );
   910                     	MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
   911                     	MOV_r32_r32( R_ECX, R_EAX );
   912                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
   913                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
   914                         }
   915                         MEM_READ_LONG( R_EAX, R_EAX );
   916                         POP_r32( R_ECX );
   917                         PUSH_r32( R_EAX );
   918                         MEM_READ_LONG( R_ECX, R_EAX );
   919                         POP_realigned_r32( R_ECX );
   921                         IMUL_r32( R_ECX );
   922                         ADD_r32_sh4r( R_EAX, R_MACL );
   923                         ADC_r32_sh4r( R_EDX, R_MACH );
   925                         load_spreg( R_ECX, R_S );
   926                         TEST_r32_r32(R_ECX, R_ECX);
   927                         JE_rel8( CALL_FUNC0_SIZE, nosat );
   928                         call_func0( signsat48 );
   929                         JMP_TARGET( nosat );
   930                         sh4_x86.tstate = TSTATE_NONE;
   931                         }
   932                         break;
   933                     default:
   934                         UNDEF();
   935                         break;
   936                 }
   937                 break;
   938             case 0x1:
   939                 { /* MOV.L Rm, @(disp, Rn) */
   940                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
   941                 load_reg( R_EAX, Rn );
   942                 ADD_imm32_r32( disp, R_EAX );
   943                 check_walign32( R_EAX );
   944                 MMU_TRANSLATE_WRITE( R_EAX );
   945                 load_reg( R_EDX, Rm );
   946                 MEM_WRITE_LONG( R_EAX, R_EDX );
   947                 sh4_x86.tstate = TSTATE_NONE;
   948                 }
   949                 break;
   950             case 0x2:
   951                 switch( ir&0xF ) {
   952                     case 0x0:
   953                         { /* MOV.B Rm, @Rn */
   954                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   955                         load_reg( R_EAX, Rn );
   956                         MMU_TRANSLATE_WRITE( R_EAX );
   957                         load_reg( R_EDX, Rm );
   958                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   959                         sh4_x86.tstate = TSTATE_NONE;
   960                         }
   961                         break;
   962                     case 0x1:
   963                         { /* MOV.W Rm, @Rn */
   964                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   965                         load_reg( R_EAX, Rn );
   966                         check_walign16( R_EAX );
   967                         MMU_TRANSLATE_WRITE( R_EAX )
   968                         load_reg( R_EDX, Rm );
   969                         MEM_WRITE_WORD( R_EAX, R_EDX );
   970                         sh4_x86.tstate = TSTATE_NONE;
   971                         }
   972                         break;
   973                     case 0x2:
   974                         { /* MOV.L Rm, @Rn */
   975                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   976                         load_reg( R_EAX, Rn );
   977                         check_walign32(R_EAX);
   978                         MMU_TRANSLATE_WRITE( R_EAX );
   979                         load_reg( R_EDX, Rm );
   980                         MEM_WRITE_LONG( R_EAX, R_EDX );
   981                         sh4_x86.tstate = TSTATE_NONE;
   982                         }
   983                         break;
   984                     case 0x4:
   985                         { /* MOV.B Rm, @-Rn */
   986                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   987                         load_reg( R_EAX, Rn );
   988                         ADD_imm8s_r32( -1, R_EAX );
   989                         MMU_TRANSLATE_WRITE( R_EAX );
   990                         load_reg( R_EDX, Rm );
   991                         ADD_imm8s_sh4r( -1, REG_OFFSET(r[Rn]) );
   992                         MEM_WRITE_BYTE( R_EAX, R_EDX );
   993                         sh4_x86.tstate = TSTATE_NONE;
   994                         }
   995                         break;
   996                     case 0x5:
   997                         { /* MOV.W Rm, @-Rn */
   998                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   999                         load_reg( R_EAX, Rn );
  1000                         ADD_imm8s_r32( -2, R_EAX );
  1001                         check_walign16( R_EAX );
  1002                         MMU_TRANSLATE_WRITE( R_EAX );
  1003                         load_reg( R_EDX, Rm );
  1004                         ADD_imm8s_sh4r( -2, REG_OFFSET(r[Rn]) );
  1005                         MEM_WRITE_WORD( R_EAX, R_EDX );
  1006                         sh4_x86.tstate = TSTATE_NONE;
  1008                         break;
  1009                     case 0x6:
  1010                         { /* MOV.L Rm, @-Rn */
  1011                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1012                         load_reg( R_EAX, Rn );
  1013                         ADD_imm8s_r32( -4, R_EAX );
  1014                         check_walign32( R_EAX );
  1015                         MMU_TRANSLATE_WRITE( R_EAX );
  1016                         load_reg( R_EDX, Rm );
  1017                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1018                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1019                         sh4_x86.tstate = TSTATE_NONE;
  1021                         break;
  1022                     case 0x7:
  1023                         { /* DIV0S Rm, Rn */
  1024                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1025                         load_reg( R_EAX, Rm );
  1026                         load_reg( R_ECX, Rn );
  1027                         SHR_imm8_r32( 31, R_EAX );
  1028                         SHR_imm8_r32( 31, R_ECX );
  1029                         store_spreg( R_EAX, R_M );
  1030                         store_spreg( R_ECX, R_Q );
  1031                         CMP_r32_r32( R_EAX, R_ECX );
  1032                         SETNE_t();
  1033                         sh4_x86.tstate = TSTATE_NE;
  1035                         break;
  1036                     case 0x8:
  1037                         { /* TST Rm, Rn */
  1038                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1039                         load_reg( R_EAX, Rm );
  1040                         load_reg( R_ECX, Rn );
  1041                         TEST_r32_r32( R_EAX, R_ECX );
  1042                         SETE_t();
  1043                         sh4_x86.tstate = TSTATE_E;
  1045                         break;
  1046                     case 0x9:
  1047                         { /* AND Rm, Rn */
  1048                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1049                         load_reg( R_EAX, Rm );
  1050                         load_reg( R_ECX, Rn );
  1051                         AND_r32_r32( R_EAX, R_ECX );
  1052                         store_reg( R_ECX, Rn );
  1053                         sh4_x86.tstate = TSTATE_NONE;
  1055                         break;
  1056                     case 0xA:
  1057                         { /* XOR Rm, Rn */
  1058                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1059                         load_reg( R_EAX, Rm );
  1060                         load_reg( R_ECX, Rn );
  1061                         XOR_r32_r32( R_EAX, R_ECX );
  1062                         store_reg( R_ECX, Rn );
  1063                         sh4_x86.tstate = TSTATE_NONE;
  1065                         break;
  1066                     case 0xB:
  1067                         { /* OR Rm, Rn */
  1068                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1069                         load_reg( R_EAX, Rm );
  1070                         load_reg( R_ECX, Rn );
  1071                         OR_r32_r32( R_EAX, R_ECX );
  1072                         store_reg( R_ECX, Rn );
  1073                         sh4_x86.tstate = TSTATE_NONE;
  1075                         break;
  1076                     case 0xC:
  1077                         { /* CMP/STR Rm, Rn */
  1078                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1079                         load_reg( R_EAX, Rm );
  1080                         load_reg( R_ECX, Rn );
  1081                         XOR_r32_r32( R_ECX, R_EAX );
  1082                         TEST_r8_r8( R_AL, R_AL );
  1083                         JE_rel8(13, target1);
  1084                         TEST_r8_r8( R_AH, R_AH ); // 2
  1085                         JE_rel8(9, target2);
  1086                         SHR_imm8_r32( 16, R_EAX ); // 3
  1087                         TEST_r8_r8( R_AL, R_AL ); // 2
  1088                         JE_rel8(2, target3);
  1089                         TEST_r8_r8( R_AH, R_AH ); // 2
  1090                         JMP_TARGET(target1);
  1091                         JMP_TARGET(target2);
  1092                         JMP_TARGET(target3);
  1093                         SETE_t();
  1094                         sh4_x86.tstate = TSTATE_E;
  1096                         break;
  1097                     case 0xD:
  1098                         { /* XTRCT Rm, Rn */
  1099                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1100                         load_reg( R_EAX, Rm );
  1101                         load_reg( R_ECX, Rn );
  1102                         SHL_imm8_r32( 16, R_EAX );
  1103                         SHR_imm8_r32( 16, R_ECX );
  1104                         OR_r32_r32( R_EAX, R_ECX );
  1105                         store_reg( R_ECX, Rn );
  1106                         sh4_x86.tstate = TSTATE_NONE;
  1108                         break;
  1109                     case 0xE:
  1110                         { /* MULU.W Rm, Rn */
  1111                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1112                         load_reg16u( R_EAX, Rm );
  1113                         load_reg16u( R_ECX, Rn );
  1114                         MUL_r32( R_ECX );
  1115                         store_spreg( R_EAX, R_MACL );
  1116                         sh4_x86.tstate = TSTATE_NONE;
  1118                         break;
  1119                     case 0xF:
  1120                         { /* MULS.W Rm, Rn */
  1121                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1122                         load_reg16s( R_EAX, Rm );
  1123                         load_reg16s( R_ECX, Rn );
  1124                         MUL_r32( R_ECX );
  1125                         store_spreg( R_EAX, R_MACL );
  1126                         sh4_x86.tstate = TSTATE_NONE;
  1128                         break;
  1129                     default:
  1130                         UNDEF();
  1131                         break;
  1133                 break;
  1134             case 0x3:
  1135                 switch( ir&0xF ) {
  1136                     case 0x0:
  1137                         { /* CMP/EQ Rm, Rn */
  1138                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1139                         load_reg( R_EAX, Rm );
  1140                         load_reg( R_ECX, Rn );
  1141                         CMP_r32_r32( R_EAX, R_ECX );
  1142                         SETE_t();
  1143                         sh4_x86.tstate = TSTATE_E;
  1145                         break;
  1146                     case 0x2:
  1147                         { /* CMP/HS Rm, Rn */
  1148                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1149                         load_reg( R_EAX, Rm );
  1150                         load_reg( R_ECX, Rn );
  1151                         CMP_r32_r32( R_EAX, R_ECX );
  1152                         SETAE_t();
  1153                         sh4_x86.tstate = TSTATE_AE;
  1155                         break;
  1156                     case 0x3:
  1157                         { /* CMP/GE Rm, Rn */
  1158                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1159                         load_reg( R_EAX, Rm );
  1160                         load_reg( R_ECX, Rn );
  1161                         CMP_r32_r32( R_EAX, R_ECX );
  1162                         SETGE_t();
  1163                         sh4_x86.tstate = TSTATE_GE;
  1165                         break;
  1166                     case 0x4:
  1167                         { /* DIV1 Rm, Rn */
  1168                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1169                         load_spreg( R_ECX, R_M );
  1170                         load_reg( R_EAX, Rn );
  1171                         if( sh4_x86.tstate != TSTATE_C ) {
  1172                     	LDC_t();
  1174                         RCL1_r32( R_EAX );
  1175                         SETC_r8( R_DL ); // Q'
  1176                         CMP_sh4r_r32( R_Q, R_ECX );
  1177                         JE_rel8(5, mqequal);
  1178                         ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1179                         JMP_rel8(3, end);
  1180                         JMP_TARGET(mqequal);
  1181                         SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1182                         JMP_TARGET(end);
  1183                         store_reg( R_EAX, Rn ); // Done with Rn now
  1184                         SETC_r8(R_AL); // tmp1
  1185                         XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
  1186                         XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
  1187                         store_spreg( R_ECX, R_Q );
  1188                         XOR_imm8s_r32( 1, R_AL );   // T = !Q'
  1189                         MOVZX_r8_r32( R_AL, R_EAX );
  1190                         store_spreg( R_EAX, R_T );
  1191                         sh4_x86.tstate = TSTATE_NONE;
  1193                         break;
  1194                     case 0x5:
  1195                         { /* DMULU.L Rm, Rn */
  1196                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1197                         load_reg( R_EAX, Rm );
  1198                         load_reg( R_ECX, Rn );
  1199                         MUL_r32(R_ECX);
  1200                         store_spreg( R_EDX, R_MACH );
  1201                         store_spreg( R_EAX, R_MACL );    
  1202                         sh4_x86.tstate = TSTATE_NONE;
  1204                         break;
  1205                     case 0x6:
  1206                         { /* CMP/HI Rm, Rn */
  1207                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1208                         load_reg( R_EAX, Rm );
  1209                         load_reg( R_ECX, Rn );
  1210                         CMP_r32_r32( R_EAX, R_ECX );
  1211                         SETA_t();
  1212                         sh4_x86.tstate = TSTATE_A;
  1214                         break;
  1215                     case 0x7:
  1216                         { /* CMP/GT Rm, Rn */
  1217                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1218                         load_reg( R_EAX, Rm );
  1219                         load_reg( R_ECX, Rn );
  1220                         CMP_r32_r32( R_EAX, R_ECX );
  1221                         SETG_t();
  1222                         sh4_x86.tstate = TSTATE_G;
  1224                         break;
  1225                     case 0x8:
  1226                         { /* SUB Rm, Rn */
  1227                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1228                         load_reg( R_EAX, Rm );
  1229                         load_reg( R_ECX, Rn );
  1230                         SUB_r32_r32( R_EAX, R_ECX );
  1231                         store_reg( R_ECX, Rn );
  1232                         sh4_x86.tstate = TSTATE_NONE;
  1234                         break;
  1235                     case 0xA:
  1236                         { /* SUBC Rm, Rn */
  1237                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1238                         load_reg( R_EAX, Rm );
  1239                         load_reg( R_ECX, Rn );
  1240                         if( sh4_x86.tstate != TSTATE_C ) {
  1241                     	LDC_t();
  1243                         SBB_r32_r32( R_EAX, R_ECX );
  1244                         store_reg( R_ECX, Rn );
  1245                         SETC_t();
  1246                         sh4_x86.tstate = TSTATE_C;
  1248                         break;
  1249                     case 0xB:
  1250                         { /* SUBV Rm, Rn */
  1251                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1252                         load_reg( R_EAX, Rm );
  1253                         load_reg( R_ECX, Rn );
  1254                         SUB_r32_r32( R_EAX, R_ECX );
  1255                         store_reg( R_ECX, Rn );
  1256                         SETO_t();
  1257                         sh4_x86.tstate = TSTATE_O;
  1259                         break;
  1260                     case 0xC:
  1261                         { /* ADD Rm, Rn */
  1262                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1263                         load_reg( R_EAX, Rm );
  1264                         load_reg( R_ECX, Rn );
  1265                         ADD_r32_r32( R_EAX, R_ECX );
  1266                         store_reg( R_ECX, Rn );
  1267                         sh4_x86.tstate = TSTATE_NONE;
  1269                         break;
  1270                     case 0xD:
  1271                         { /* DMULS.L Rm, Rn */
  1272                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1273                         load_reg( R_EAX, Rm );
  1274                         load_reg( R_ECX, Rn );
  1275                         IMUL_r32(R_ECX);
  1276                         store_spreg( R_EDX, R_MACH );
  1277                         store_spreg( R_EAX, R_MACL );
  1278                         sh4_x86.tstate = TSTATE_NONE;
  1280                         break;
  1281                     case 0xE:
  1282                         { /* ADDC Rm, Rn */
  1283                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1284                         if( sh4_x86.tstate != TSTATE_C ) {
  1285                     	LDC_t();
  1287                         load_reg( R_EAX, Rm );
  1288                         load_reg( R_ECX, Rn );
  1289                         ADC_r32_r32( R_EAX, R_ECX );
  1290                         store_reg( R_ECX, Rn );
  1291                         SETC_t();
  1292                         sh4_x86.tstate = TSTATE_C;
  1294                         break;
  1295                     case 0xF:
  1296                         { /* ADDV Rm, Rn */
  1297                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1298                         load_reg( R_EAX, Rm );
  1299                         load_reg( R_ECX, Rn );
  1300                         ADD_r32_r32( R_EAX, R_ECX );
  1301                         store_reg( R_ECX, Rn );
  1302                         SETO_t();
  1303                         sh4_x86.tstate = TSTATE_O;
  1305                         break;
  1306                     default:
  1307                         UNDEF();
  1308                         break;
  1310                 break;
  1311             case 0x4:
  1312                 switch( ir&0xF ) {
  1313                     case 0x0:
  1314                         switch( (ir&0xF0) >> 4 ) {
  1315                             case 0x0:
  1316                                 { /* SHLL Rn */
  1317                                 uint32_t Rn = ((ir>>8)&0xF); 
  1318                                 load_reg( R_EAX, Rn );
  1319                                 SHL1_r32( R_EAX );
  1320                                 SETC_t();
  1321                                 store_reg( R_EAX, Rn );
  1322                                 sh4_x86.tstate = TSTATE_C;
  1324                                 break;
  1325                             case 0x1:
  1326                                 { /* DT Rn */
  1327                                 uint32_t Rn = ((ir>>8)&0xF); 
  1328                                 load_reg( R_EAX, Rn );
  1329                                 ADD_imm8s_r32( -1, R_EAX );
  1330                                 store_reg( R_EAX, Rn );
  1331                                 SETE_t();
  1332                                 sh4_x86.tstate = TSTATE_E;
  1334                                 break;
  1335                             case 0x2:
  1336                                 { /* SHAL Rn */
  1337                                 uint32_t Rn = ((ir>>8)&0xF); 
  1338                                 load_reg( R_EAX, Rn );
  1339                                 SHL1_r32( R_EAX );
  1340                                 SETC_t();
  1341                                 store_reg( R_EAX, Rn );
  1342                                 sh4_x86.tstate = TSTATE_C;
  1344                                 break;
  1345                             default:
  1346                                 UNDEF();
  1347                                 break;
  1349                         break;
  1350                     case 0x1:
  1351                         switch( (ir&0xF0) >> 4 ) {
  1352                             case 0x0:
  1353                                 { /* SHLR Rn */
  1354                                 uint32_t Rn = ((ir>>8)&0xF); 
  1355                                 load_reg( R_EAX, Rn );
  1356                                 SHR1_r32( R_EAX );
  1357                                 SETC_t();
  1358                                 store_reg( R_EAX, Rn );
  1359                                 sh4_x86.tstate = TSTATE_C;
  1361                                 break;
  1362                             case 0x1:
  1363                                 { /* CMP/PZ Rn */
  1364                                 uint32_t Rn = ((ir>>8)&0xF); 
  1365                                 load_reg( R_EAX, Rn );
  1366                                 CMP_imm8s_r32( 0, R_EAX );
  1367                                 SETGE_t();
  1368                                 sh4_x86.tstate = TSTATE_GE;
  1370                                 break;
  1371                             case 0x2:
  1372                                 { /* SHAR Rn */
  1373                                 uint32_t Rn = ((ir>>8)&0xF); 
  1374                                 load_reg( R_EAX, Rn );
  1375                                 SAR1_r32( R_EAX );
  1376                                 SETC_t();
  1377                                 store_reg( R_EAX, Rn );
  1378                                 sh4_x86.tstate = TSTATE_C;
  1380                                 break;
  1381                             default:
  1382                                 UNDEF();
  1383                                 break;
  1385                         break;
  1386                     case 0x2:
  1387                         switch( (ir&0xF0) >> 4 ) {
  1388                             case 0x0:
  1389                                 { /* STS.L MACH, @-Rn */
  1390                                 uint32_t Rn = ((ir>>8)&0xF); 
  1391                                 load_reg( R_EAX, Rn );
  1392                                 check_walign32( R_EAX );
  1393                                 ADD_imm8s_r32( -4, R_EAX );
  1394                                 MMU_TRANSLATE_WRITE( R_EAX );
  1395                                 load_spreg( R_EDX, R_MACH );
  1396                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1397                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1398                                 sh4_x86.tstate = TSTATE_NONE;
  1400                                 break;
  1401                             case 0x1:
  1402                                 { /* STS.L MACL, @-Rn */
  1403                                 uint32_t Rn = ((ir>>8)&0xF); 
  1404                                 load_reg( R_EAX, Rn );
  1405                                 check_walign32( R_EAX );
  1406                                 ADD_imm8s_r32( -4, R_EAX );
  1407                                 MMU_TRANSLATE_WRITE( R_EAX );
  1408                                 load_spreg( R_EDX, R_MACL );
  1409                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1410                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1411                                 sh4_x86.tstate = TSTATE_NONE;
  1413                                 break;
  1414                             case 0x2:
  1415                                 { /* STS.L PR, @-Rn */
  1416                                 uint32_t Rn = ((ir>>8)&0xF); 
  1417                                 load_reg( R_EAX, Rn );
  1418                                 check_walign32( R_EAX );
  1419                                 ADD_imm8s_r32( -4, R_EAX );
  1420                                 MMU_TRANSLATE_WRITE( R_EAX );
  1421                                 load_spreg( R_EDX, R_PR );
  1422                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1423                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1424                                 sh4_x86.tstate = TSTATE_NONE;
  1426                                 break;
  1427                             case 0x3:
  1428                                 { /* STC.L SGR, @-Rn */
  1429                                 uint32_t Rn = ((ir>>8)&0xF); 
  1430                                 check_priv();
  1431                                 load_reg( R_EAX, Rn );
  1432                                 check_walign32( R_EAX );
  1433                                 ADD_imm8s_r32( -4, R_EAX );
  1434                                 MMU_TRANSLATE_WRITE( R_EAX );
  1435                                 load_spreg( R_EDX, R_SGR );
  1436                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1437                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1438                                 sh4_x86.tstate = TSTATE_NONE;
  1440                                 break;
  1441                             case 0x5:
  1442                                 { /* STS.L FPUL, @-Rn */
  1443                                 uint32_t Rn = ((ir>>8)&0xF); 
  1444                                 load_reg( R_EAX, Rn );
  1445                                 check_walign32( R_EAX );
  1446                                 ADD_imm8s_r32( -4, R_EAX );
  1447                                 MMU_TRANSLATE_WRITE( R_EAX );
  1448                                 load_spreg( R_EDX, R_FPUL );
  1449                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1450                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1451                                 sh4_x86.tstate = TSTATE_NONE;
  1453                                 break;
  1454                             case 0x6:
  1455                                 { /* STS.L FPSCR, @-Rn */
  1456                                 uint32_t Rn = ((ir>>8)&0xF); 
  1457                                 load_reg( R_EAX, Rn );
  1458                                 check_walign32( R_EAX );
  1459                                 ADD_imm8s_r32( -4, R_EAX );
  1460                                 MMU_TRANSLATE_WRITE( R_EAX );
  1461                                 load_spreg( R_EDX, R_FPSCR );
  1462                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1463                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1464                                 sh4_x86.tstate = TSTATE_NONE;
  1466                                 break;
  1467                             case 0xF:
  1468                                 { /* STC.L DBR, @-Rn */
  1469                                 uint32_t Rn = ((ir>>8)&0xF); 
  1470                                 check_priv();
  1471                                 load_reg( R_EAX, Rn );
  1472                                 check_walign32( R_EAX );
  1473                                 ADD_imm8s_r32( -4, R_EAX );
  1474                                 MMU_TRANSLATE_WRITE( R_EAX );
  1475                                 load_spreg( R_EDX, R_DBR );
  1476                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1477                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1478                                 sh4_x86.tstate = TSTATE_NONE;
  1480                                 break;
  1481                             default:
  1482                                 UNDEF();
  1483                                 break;
  1485                         break;
  1486                     case 0x3:
  1487                         switch( (ir&0x80) >> 7 ) {
  1488                             case 0x0:
  1489                                 switch( (ir&0x70) >> 4 ) {
  1490                                     case 0x0:
  1491                                         { /* STC.L SR, @-Rn */
  1492                                         uint32_t Rn = ((ir>>8)&0xF); 
  1493                                         check_priv();
  1494                                         load_reg( R_EAX, Rn );
  1495                                         check_walign32( R_EAX );
  1496                                         ADD_imm8s_r32( -4, R_EAX );
  1497                                         MMU_TRANSLATE_WRITE( R_EAX );
  1498                                         PUSH_realigned_r32( R_EAX );
  1499                                         call_func0( sh4_read_sr );
  1500                                         POP_realigned_r32( R_ECX );
  1501                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1502                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1503                                         sh4_x86.tstate = TSTATE_NONE;
  1505                                         break;
  1506                                     case 0x1:
  1507                                         { /* STC.L GBR, @-Rn */
  1508                                         uint32_t Rn = ((ir>>8)&0xF); 
  1509                                         load_reg( R_EAX, Rn );
  1510                                         check_walign32( R_EAX );
  1511                                         ADD_imm8s_r32( -4, R_EAX );
  1512                                         MMU_TRANSLATE_WRITE( R_EAX );
  1513                                         load_spreg( R_EDX, R_GBR );
  1514                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1515                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1516                                         sh4_x86.tstate = TSTATE_NONE;
  1518                                         break;
  1519                                     case 0x2:
  1520                                         { /* STC.L VBR, @-Rn */
  1521                                         uint32_t Rn = ((ir>>8)&0xF); 
  1522                                         check_priv();
  1523                                         load_reg( R_EAX, Rn );
  1524                                         check_walign32( R_EAX );
  1525                                         ADD_imm8s_r32( -4, R_EAX );
  1526                                         MMU_TRANSLATE_WRITE( R_EAX );
  1527                                         load_spreg( R_EDX, R_VBR );
  1528                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1529                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1530                                         sh4_x86.tstate = TSTATE_NONE;
  1532                                         break;
  1533                                     case 0x3:
  1534                                         { /* STC.L SSR, @-Rn */
  1535                                         uint32_t Rn = ((ir>>8)&0xF); 
  1536                                         check_priv();
  1537                                         load_reg( R_EAX, Rn );
  1538                                         check_walign32( R_EAX );
  1539                                         ADD_imm8s_r32( -4, R_EAX );
  1540                                         MMU_TRANSLATE_WRITE( R_EAX );
  1541                                         load_spreg( R_EDX, R_SSR );
  1542                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1543                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1544                                         sh4_x86.tstate = TSTATE_NONE;
  1546                                         break;
  1547                                     case 0x4:
  1548                                         { /* STC.L SPC, @-Rn */
  1549                                         uint32_t Rn = ((ir>>8)&0xF); 
  1550                                         check_priv();
  1551                                         load_reg( R_EAX, Rn );
  1552                                         check_walign32( R_EAX );
  1553                                         ADD_imm8s_r32( -4, R_EAX );
  1554                                         MMU_TRANSLATE_WRITE( R_EAX );
  1555                                         load_spreg( R_EDX, R_SPC );
  1556                                         ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1557                                         MEM_WRITE_LONG( R_EAX, R_EDX );
  1558                                         sh4_x86.tstate = TSTATE_NONE;
  1560                                         break;
  1561                                     default:
  1562                                         UNDEF();
  1563                                         break;
  1565                                 break;
  1566                             case 0x1:
  1567                                 { /* STC.L Rm_BANK, @-Rn */
  1568                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
  1569                                 check_priv();
  1570                                 load_reg( R_EAX, Rn );
  1571                                 check_walign32( R_EAX );
  1572                                 ADD_imm8s_r32( -4, R_EAX );
  1573                                 MMU_TRANSLATE_WRITE( R_EAX );
  1574                                 load_spreg( R_EDX, REG_OFFSET(r_bank[Rm_BANK]) );
  1575                                 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
  1576                                 MEM_WRITE_LONG( R_EAX, R_EDX );
  1577                                 sh4_x86.tstate = TSTATE_NONE;
  1579                                 break;
  1581                         break;
  1582                     case 0x4:
  1583                         switch( (ir&0xF0) >> 4 ) {
  1584                             case 0x0:
  1585                                 { /* ROTL Rn */
  1586                                 uint32_t Rn = ((ir>>8)&0xF); 
  1587                                 load_reg( R_EAX, Rn );
  1588                                 ROL1_r32( R_EAX );
  1589                                 store_reg( R_EAX, Rn );
  1590                                 SETC_t();
  1591                                 sh4_x86.tstate = TSTATE_C;
  1593                                 break;
  1594                             case 0x2:
  1595                                 { /* ROTCL Rn */
  1596                                 uint32_t Rn = ((ir>>8)&0xF); 
  1597                                 load_reg( R_EAX, Rn );
  1598                                 if( sh4_x86.tstate != TSTATE_C ) {
  1599                             	LDC_t();
  1601                                 RCL1_r32( R_EAX );
  1602                                 store_reg( R_EAX, Rn );
  1603                                 SETC_t();
  1604                                 sh4_x86.tstate = TSTATE_C;
  1606                                 break;
  1607                             default:
  1608                                 UNDEF();
  1609                                 break;
  1611                         break;
  1612                     case 0x5:
  1613                         switch( (ir&0xF0) >> 4 ) {
  1614                             case 0x0:
  1615                                 { /* ROTR Rn */
  1616                                 uint32_t Rn = ((ir>>8)&0xF); 
  1617                                 load_reg( R_EAX, Rn );
  1618                                 ROR1_r32( R_EAX );
  1619                                 store_reg( R_EAX, Rn );
  1620                                 SETC_t();
  1621                                 sh4_x86.tstate = TSTATE_C;
  1623                                 break;
  1624                             case 0x1:
  1625                                 { /* CMP/PL Rn */
  1626                                 uint32_t Rn = ((ir>>8)&0xF); 
  1627                                 load_reg( R_EAX, Rn );
  1628                                 CMP_imm8s_r32( 0, R_EAX );
  1629                                 SETG_t();
  1630                                 sh4_x86.tstate = TSTATE_G;
  1632                                 break;
  1633                             case 0x2:
  1634                                 { /* ROTCR Rn */
  1635                                 uint32_t Rn = ((ir>>8)&0xF); 
  1636                                 load_reg( R_EAX, Rn );
  1637                                 if( sh4_x86.tstate != TSTATE_C ) {
  1638                             	LDC_t();
  1640                                 RCR1_r32( R_EAX );
  1641                                 store_reg( R_EAX, Rn );
  1642                                 SETC_t();
  1643                                 sh4_x86.tstate = TSTATE_C;
  1645                                 break;
  1646                             default:
  1647                                 UNDEF();
  1648                                 break;
  1650                         break;
  1651                     case 0x6:
  1652                         switch( (ir&0xF0) >> 4 ) {
  1653                             case 0x0:
  1654                                 { /* LDS.L @Rm+, MACH */
  1655                                 uint32_t Rm = ((ir>>8)&0xF); 
  1656                                 load_reg( R_EAX, Rm );
  1657                                 check_ralign32( R_EAX );
  1658                                 MMU_TRANSLATE_READ( R_EAX );
  1659                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1660                                 MEM_READ_LONG( R_EAX, R_EAX );
  1661                                 store_spreg( R_EAX, R_MACH );
  1662                                 sh4_x86.tstate = TSTATE_NONE;
  1664                                 break;
  1665                             case 0x1:
  1666                                 { /* LDS.L @Rm+, MACL */
  1667                                 uint32_t Rm = ((ir>>8)&0xF); 
  1668                                 load_reg( R_EAX, Rm );
  1669                                 check_ralign32( R_EAX );
  1670                                 MMU_TRANSLATE_READ( R_EAX );
  1671                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1672                                 MEM_READ_LONG( R_EAX, R_EAX );
  1673                                 store_spreg( R_EAX, R_MACL );
  1674                                 sh4_x86.tstate = TSTATE_NONE;
  1676                                 break;
  1677                             case 0x2:
  1678                                 { /* LDS.L @Rm+, PR */
  1679                                 uint32_t Rm = ((ir>>8)&0xF); 
  1680                                 load_reg( R_EAX, Rm );
  1681                                 check_ralign32( R_EAX );
  1682                                 MMU_TRANSLATE_READ( R_EAX );
  1683                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1684                                 MEM_READ_LONG( R_EAX, R_EAX );
  1685                                 store_spreg( R_EAX, R_PR );
  1686                                 sh4_x86.tstate = TSTATE_NONE;
  1688                                 break;
  1689                             case 0x3:
  1690                                 { /* LDC.L @Rm+, SGR */
  1691                                 uint32_t Rm = ((ir>>8)&0xF); 
  1692                                 check_priv();
  1693                                 load_reg( R_EAX, Rm );
  1694                                 check_ralign32( R_EAX );
  1695                                 MMU_TRANSLATE_READ( R_EAX );
  1696                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1697                                 MEM_READ_LONG( R_EAX, R_EAX );
  1698                                 store_spreg( R_EAX, R_SGR );
  1699                                 sh4_x86.tstate = TSTATE_NONE;
  1701                                 break;
  1702                             case 0x5:
  1703                                 { /* LDS.L @Rm+, FPUL */
  1704                                 uint32_t Rm = ((ir>>8)&0xF); 
  1705                                 load_reg( R_EAX, Rm );
  1706                                 check_ralign32( R_EAX );
  1707                                 MMU_TRANSLATE_READ( R_EAX );
  1708                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1709                                 MEM_READ_LONG( R_EAX, R_EAX );
  1710                                 store_spreg( R_EAX, R_FPUL );
  1711                                 sh4_x86.tstate = TSTATE_NONE;
  1713                                 break;
  1714                             case 0x6:
  1715                                 { /* LDS.L @Rm+, FPSCR */
  1716                                 uint32_t Rm = ((ir>>8)&0xF); 
  1717                                 load_reg( R_EAX, Rm );
  1718                                 check_ralign32( R_EAX );
  1719                                 MMU_TRANSLATE_READ( R_EAX );
  1720                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1721                                 MEM_READ_LONG( R_EAX, R_EAX );
  1722                                 store_spreg( R_EAX, R_FPSCR );
  1723                                 update_fr_bank( R_EAX );
  1724                                 sh4_x86.tstate = TSTATE_NONE;
  1726                                 break;
  1727                             case 0xF:
  1728                                 { /* LDC.L @Rm+, DBR */
  1729                                 uint32_t Rm = ((ir>>8)&0xF); 
  1730                                 check_priv();
  1731                                 load_reg( R_EAX, Rm );
  1732                                 check_ralign32( R_EAX );
  1733                                 MMU_TRANSLATE_READ( R_EAX );
  1734                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1735                                 MEM_READ_LONG( R_EAX, R_EAX );
  1736                                 store_spreg( R_EAX, R_DBR );
  1737                                 sh4_x86.tstate = TSTATE_NONE;
  1739                                 break;
  1740                             default:
  1741                                 UNDEF();
  1742                                 break;
  1744                         break;
  1745                     case 0x7:
  1746                         switch( (ir&0x80) >> 7 ) {
  1747                             case 0x0:
  1748                                 switch( (ir&0x70) >> 4 ) {
  1749                                     case 0x0:
  1750                                         { /* LDC.L @Rm+, SR */
  1751                                         uint32_t Rm = ((ir>>8)&0xF); 
  1752                                         if( sh4_x86.in_delay_slot ) {
  1753                                     	SLOTILLEGAL();
  1754                                         } else {
  1755                                     	check_priv();
  1756                                     	load_reg( R_EAX, Rm );
  1757                                     	check_ralign32( R_EAX );
  1758                                     	MMU_TRANSLATE_READ( R_EAX );
  1759                                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1760                                     	MEM_READ_LONG( R_EAX, R_EAX );
  1761                                     	call_func1( sh4_write_sr, R_EAX );
  1762                                     	sh4_x86.priv_checked = FALSE;
  1763                                     	sh4_x86.fpuen_checked = FALSE;
  1764                                     	sh4_x86.tstate = TSTATE_NONE;
  1767                                         break;
  1768                                     case 0x1:
  1769                                         { /* LDC.L @Rm+, GBR */
  1770                                         uint32_t Rm = ((ir>>8)&0xF); 
  1771                                         load_reg( R_EAX, Rm );
  1772                                         check_ralign32( R_EAX );
  1773                                         MMU_TRANSLATE_READ( R_EAX );
  1774                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1775                                         MEM_READ_LONG( R_EAX, R_EAX );
  1776                                         store_spreg( R_EAX, R_GBR );
  1777                                         sh4_x86.tstate = TSTATE_NONE;
  1779                                         break;
  1780                                     case 0x2:
  1781                                         { /* LDC.L @Rm+, VBR */
  1782                                         uint32_t Rm = ((ir>>8)&0xF); 
  1783                                         check_priv();
  1784                                         load_reg( R_EAX, Rm );
  1785                                         check_ralign32( R_EAX );
  1786                                         MMU_TRANSLATE_READ( R_EAX );
  1787                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1788                                         MEM_READ_LONG( R_EAX, R_EAX );
  1789                                         store_spreg( R_EAX, R_VBR );
  1790                                         sh4_x86.tstate = TSTATE_NONE;
  1792                                         break;
  1793                                     case 0x3:
  1794                                         { /* LDC.L @Rm+, SSR */
  1795                                         uint32_t Rm = ((ir>>8)&0xF); 
  1796                                         check_priv();
  1797                                         load_reg( R_EAX, Rm );
  1798                                         check_ralign32( R_EAX );
  1799                                         MMU_TRANSLATE_READ( R_EAX );
  1800                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1801                                         MEM_READ_LONG( R_EAX, R_EAX );
  1802                                         store_spreg( R_EAX, R_SSR );
  1803                                         sh4_x86.tstate = TSTATE_NONE;
  1805                                         break;
  1806                                     case 0x4:
  1807                                         { /* LDC.L @Rm+, SPC */
  1808                                         uint32_t Rm = ((ir>>8)&0xF); 
  1809                                         check_priv();
  1810                                         load_reg( R_EAX, Rm );
  1811                                         check_ralign32( R_EAX );
  1812                                         MMU_TRANSLATE_READ( R_EAX );
  1813                                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1814                                         MEM_READ_LONG( R_EAX, R_EAX );
  1815                                         store_spreg( R_EAX, R_SPC );
  1816                                         sh4_x86.tstate = TSTATE_NONE;
  1818                                         break;
  1819                                     default:
  1820                                         UNDEF();
  1821                                         break;
  1823                                 break;
  1824                             case 0x1:
  1825                                 { /* LDC.L @Rm+, Rn_BANK */
  1826                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  1827                                 check_priv();
  1828                                 load_reg( R_EAX, Rm );
  1829                                 check_ralign32( R_EAX );
  1830                                 MMU_TRANSLATE_READ( R_EAX );
  1831                                 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1832                                 MEM_READ_LONG( R_EAX, R_EAX );
  1833                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  1834                                 sh4_x86.tstate = TSTATE_NONE;
  1836                                 break;
  1838                         break;
  1839                     case 0x8:
  1840                         switch( (ir&0xF0) >> 4 ) {
  1841                             case 0x0:
  1842                                 { /* SHLL2 Rn */
  1843                                 uint32_t Rn = ((ir>>8)&0xF); 
  1844                                 load_reg( R_EAX, Rn );
  1845                                 SHL_imm8_r32( 2, R_EAX );
  1846                                 store_reg( R_EAX, Rn );
  1847                                 sh4_x86.tstate = TSTATE_NONE;
  1849                                 break;
  1850                             case 0x1:
  1851                                 { /* SHLL8 Rn */
  1852                                 uint32_t Rn = ((ir>>8)&0xF); 
  1853                                 load_reg( R_EAX, Rn );
  1854                                 SHL_imm8_r32( 8, R_EAX );
  1855                                 store_reg( R_EAX, Rn );
  1856                                 sh4_x86.tstate = TSTATE_NONE;
  1858                                 break;
  1859                             case 0x2:
  1860                                 { /* SHLL16 Rn */
  1861                                 uint32_t Rn = ((ir>>8)&0xF); 
  1862                                 load_reg( R_EAX, Rn );
  1863                                 SHL_imm8_r32( 16, R_EAX );
  1864                                 store_reg( R_EAX, Rn );
  1865                                 sh4_x86.tstate = TSTATE_NONE;
  1867                                 break;
  1868                             default:
  1869                                 UNDEF();
  1870                                 break;
  1872                         break;
  1873                     case 0x9:
  1874                         switch( (ir&0xF0) >> 4 ) {
  1875                             case 0x0:
  1876                                 { /* SHLR2 Rn */
  1877                                 uint32_t Rn = ((ir>>8)&0xF); 
  1878                                 load_reg( R_EAX, Rn );
  1879                                 SHR_imm8_r32( 2, R_EAX );
  1880                                 store_reg( R_EAX, Rn );
  1881                                 sh4_x86.tstate = TSTATE_NONE;
  1883                                 break;
  1884                             case 0x1:
  1885                                 { /* SHLR8 Rn */
  1886                                 uint32_t Rn = ((ir>>8)&0xF); 
  1887                                 load_reg( R_EAX, Rn );
  1888                                 SHR_imm8_r32( 8, R_EAX );
  1889                                 store_reg( R_EAX, Rn );
  1890                                 sh4_x86.tstate = TSTATE_NONE;
  1892                                 break;
  1893                             case 0x2:
  1894                                 { /* SHLR16 Rn */
  1895                                 uint32_t Rn = ((ir>>8)&0xF); 
  1896                                 load_reg( R_EAX, Rn );
  1897                                 SHR_imm8_r32( 16, R_EAX );
  1898                                 store_reg( R_EAX, Rn );
  1899                                 sh4_x86.tstate = TSTATE_NONE;
  1901                                 break;
  1902                             default:
  1903                                 UNDEF();
  1904                                 break;
  1906                         break;
  1907                     case 0xA:
  1908                         switch( (ir&0xF0) >> 4 ) {
  1909                             case 0x0:
  1910                                 { /* LDS Rm, MACH */
  1911                                 uint32_t Rm = ((ir>>8)&0xF); 
  1912                                 load_reg( R_EAX, Rm );
  1913                                 store_spreg( R_EAX, R_MACH );
  1915                                 break;
  1916                             case 0x1:
  1917                                 { /* LDS Rm, MACL */
  1918                                 uint32_t Rm = ((ir>>8)&0xF); 
  1919                                 load_reg( R_EAX, Rm );
  1920                                 store_spreg( R_EAX, R_MACL );
  1922                                 break;
  1923                             case 0x2:
  1924                                 { /* LDS Rm, PR */
  1925                                 uint32_t Rm = ((ir>>8)&0xF); 
  1926                                 load_reg( R_EAX, Rm );
  1927                                 store_spreg( R_EAX, R_PR );
  1929                                 break;
  1930                             case 0x3:
  1931                                 { /* LDC Rm, SGR */
  1932                                 uint32_t Rm = ((ir>>8)&0xF); 
  1933                                 check_priv();
  1934                                 load_reg( R_EAX, Rm );
  1935                                 store_spreg( R_EAX, R_SGR );
  1936                                 sh4_x86.tstate = TSTATE_NONE;
  1938                                 break;
  1939                             case 0x5:
  1940                                 { /* LDS Rm, FPUL */
  1941                                 uint32_t Rm = ((ir>>8)&0xF); 
  1942                                 load_reg( R_EAX, Rm );
  1943                                 store_spreg( R_EAX, R_FPUL );
  1945                                 break;
  1946                             case 0x6:
  1947                                 { /* LDS Rm, FPSCR */
  1948                                 uint32_t Rm = ((ir>>8)&0xF); 
  1949                                 load_reg( R_EAX, Rm );
  1950                                 store_spreg( R_EAX, R_FPSCR );
  1951                                 update_fr_bank( R_EAX );
  1952                                 sh4_x86.tstate = TSTATE_NONE;
  1954                                 break;
  1955                             case 0xF:
  1956                                 { /* LDC Rm, DBR */
  1957                                 uint32_t Rm = ((ir>>8)&0xF); 
  1958                                 check_priv();
  1959                                 load_reg( R_EAX, Rm );
  1960                                 store_spreg( R_EAX, R_DBR );
  1961                                 sh4_x86.tstate = TSTATE_NONE;
  1963                                 break;
  1964                             default:
  1965                                 UNDEF();
  1966                                 break;
  1968                         break;
  1969                     case 0xB:
  1970                         switch( (ir&0xF0) >> 4 ) {
  1971                             case 0x0:
  1972                                 { /* JSR @Rn */
  1973                                 uint32_t Rn = ((ir>>8)&0xF); 
  1974                                 if( sh4_x86.in_delay_slot ) {
  1975                             	SLOTILLEGAL();
  1976                                 } else {
  1977                             	load_spreg( R_EAX, R_PC );
  1978                             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
  1979                             	store_spreg( R_EAX, R_PR );
  1980                             	load_reg( R_ECX, Rn );
  1981                             	store_spreg( R_ECX, R_NEW_PC );
  1982                             	sh4_x86.in_delay_slot = DELAY_PC;
  1983                             	sh4_x86.branch_taken = TRUE;
  1984                             	sh4_x86.tstate = TSTATE_NONE;
  1985                             	if( UNTRANSLATABLE(pc+2) ) {
  1986                             	    exit_block_emu(pc+2);
  1987                             	    return 2;
  1988                             	} else {
  1989                             	    sh4_translate_instruction(pc+2);
  1990                             	    exit_block_newpcset(pc+2);
  1991                             	    return 4;
  1995                                 break;
  1996                             case 0x1:
  1997                                 { /* TAS.B @Rn */
  1998                                 uint32_t Rn = ((ir>>8)&0xF); 
  1999                                 load_reg( R_EAX, Rn );
  2000                                 MMU_TRANSLATE_WRITE( R_EAX );
  2001                                 PUSH_realigned_r32( R_EAX );
  2002                                 MEM_READ_BYTE( R_EAX, R_EAX );
  2003                                 TEST_r8_r8( R_AL, R_AL );
  2004                                 SETE_t();
  2005                                 OR_imm8_r8( 0x80, R_AL );
  2006                                 POP_realigned_r32( R_ECX );
  2007                                 MEM_WRITE_BYTE( R_ECX, R_EAX );
  2008                                 sh4_x86.tstate = TSTATE_NONE;
  2010                                 break;
  2011                             case 0x2:
  2012                                 { /* JMP @Rn */
  2013                                 uint32_t Rn = ((ir>>8)&0xF); 
  2014                                 if( sh4_x86.in_delay_slot ) {
  2015                             	SLOTILLEGAL();
  2016                                 } else {
  2017                             	load_reg( R_ECX, Rn );
  2018                             	store_spreg( R_ECX, R_NEW_PC );
  2019                             	sh4_x86.in_delay_slot = DELAY_PC;
  2020                             	sh4_x86.branch_taken = TRUE;
  2021                             	if( UNTRANSLATABLE(pc+2) ) {
  2022                             	    exit_block_emu(pc+2);
  2023                             	    return 2;
  2024                             	} else {
  2025                             	    sh4_translate_instruction(pc+2);
  2026                             	    exit_block_newpcset(pc+2);
  2027                             	    return 4;
  2031                                 break;
  2032                             default:
  2033                                 UNDEF();
  2034                                 break;
  2036                         break;
  2037                     case 0xC:
  2038                         { /* SHAD Rm, Rn */
  2039                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2040                         /* Annoyingly enough, not directly convertible */
  2041                         load_reg( R_EAX, Rn );
  2042                         load_reg( R_ECX, Rm );
  2043                         CMP_imm32_r32( 0, R_ECX );
  2044                         JGE_rel8(16, doshl);
  2046                         NEG_r32( R_ECX );      // 2
  2047                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2048                         JE_rel8( 4, emptysar);     // 2
  2049                         SAR_r32_CL( R_EAX );       // 2
  2050                         JMP_rel8(10, end);          // 2
  2052                         JMP_TARGET(emptysar);
  2053                         SAR_imm8_r32(31, R_EAX );  // 3
  2054                         JMP_rel8(5, end2);
  2056                         JMP_TARGET(doshl);
  2057                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2058                         SHL_r32_CL( R_EAX );       // 2
  2059                         JMP_TARGET(end);
  2060                         JMP_TARGET(end2);
  2061                         store_reg( R_EAX, Rn );
  2062                         sh4_x86.tstate = TSTATE_NONE;
  2064                         break;
  2065                     case 0xD:
  2066                         { /* SHLD Rm, Rn */
  2067                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2068                         load_reg( R_EAX, Rn );
  2069                         load_reg( R_ECX, Rm );
  2070                         CMP_imm32_r32( 0, R_ECX );
  2071                         JGE_rel8(15, doshl);
  2073                         NEG_r32( R_ECX );      // 2
  2074                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2075                         JE_rel8( 4, emptyshr );
  2076                         SHR_r32_CL( R_EAX );       // 2
  2077                         JMP_rel8(9, end);          // 2
  2079                         JMP_TARGET(emptyshr);
  2080                         XOR_r32_r32( R_EAX, R_EAX );
  2081                         JMP_rel8(5, end2);
  2083                         JMP_TARGET(doshl);
  2084                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2085                         SHL_r32_CL( R_EAX );       // 2
  2086                         JMP_TARGET(end);
  2087                         JMP_TARGET(end2);
  2088                         store_reg( R_EAX, Rn );
  2089                         sh4_x86.tstate = TSTATE_NONE;
  2091                         break;
  2092                     case 0xE:
  2093                         switch( (ir&0x80) >> 7 ) {
  2094                             case 0x0:
  2095                                 switch( (ir&0x70) >> 4 ) {
  2096                                     case 0x0:
  2097                                         { /* LDC Rm, SR */
  2098                                         uint32_t Rm = ((ir>>8)&0xF); 
  2099                                         if( sh4_x86.in_delay_slot ) {
  2100                                     	SLOTILLEGAL();
  2101                                         } else {
  2102                                     	check_priv();
  2103                                     	load_reg( R_EAX, Rm );
  2104                                     	call_func1( sh4_write_sr, R_EAX );
  2105                                     	sh4_x86.priv_checked = FALSE;
  2106                                     	sh4_x86.fpuen_checked = FALSE;
  2107                                     	sh4_x86.tstate = TSTATE_NONE;
  2110                                         break;
  2111                                     case 0x1:
  2112                                         { /* LDC Rm, GBR */
  2113                                         uint32_t Rm = ((ir>>8)&0xF); 
  2114                                         load_reg( R_EAX, Rm );
  2115                                         store_spreg( R_EAX, R_GBR );
  2117                                         break;
  2118                                     case 0x2:
  2119                                         { /* LDC Rm, VBR */
  2120                                         uint32_t Rm = ((ir>>8)&0xF); 
  2121                                         check_priv();
  2122                                         load_reg( R_EAX, Rm );
  2123                                         store_spreg( R_EAX, R_VBR );
  2124                                         sh4_x86.tstate = TSTATE_NONE;
  2126                                         break;
  2127                                     case 0x3:
  2128                                         { /* LDC Rm, SSR */
  2129                                         uint32_t Rm = ((ir>>8)&0xF); 
  2130                                         check_priv();
  2131                                         load_reg( R_EAX, Rm );
  2132                                         store_spreg( R_EAX, R_SSR );
  2133                                         sh4_x86.tstate = TSTATE_NONE;
  2135                                         break;
  2136                                     case 0x4:
  2137                                         { /* LDC Rm, SPC */
  2138                                         uint32_t Rm = ((ir>>8)&0xF); 
  2139                                         check_priv();
  2140                                         load_reg( R_EAX, Rm );
  2141                                         store_spreg( R_EAX, R_SPC );
  2142                                         sh4_x86.tstate = TSTATE_NONE;
  2144                                         break;
  2145                                     default:
  2146                                         UNDEF();
  2147                                         break;
  2149                                 break;
  2150                             case 0x1:
  2151                                 { /* LDC Rm, Rn_BANK */
  2152                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  2153                                 check_priv();
  2154                                 load_reg( R_EAX, Rm );
  2155                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  2156                                 sh4_x86.tstate = TSTATE_NONE;
  2158                                 break;
  2160                         break;
  2161                     case 0xF:
  2162                         { /* MAC.W @Rm+, @Rn+ */
  2163                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2164                         if( Rm == Rn ) {
  2165                     	load_reg( R_EAX, Rm );
  2166                     	check_ralign16( R_EAX );
  2167                     	MMU_TRANSLATE_READ( R_EAX );
  2168                     	PUSH_realigned_r32( R_EAX );
  2169                     	load_reg( R_EAX, Rn );
  2170                     	ADD_imm8s_r32( 2, R_EAX );
  2171                     	MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
  2172                     	ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
  2173                     	// Note translate twice in case of page boundaries. Maybe worth
  2174                     	// adding a page-boundary check to skip the second translation
  2175                         } else {
  2176                     	load_reg( R_EAX, Rm );
  2177                     	check_ralign16( R_EAX );
  2178                     	MMU_TRANSLATE_READ( R_EAX );
  2179                     	load_reg( R_ECX, Rn );
  2180                     	check_ralign16( R_ECX );
  2181                     	PUSH_realigned_r32( R_EAX );
  2182                     	MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
  2183                     	MOV_r32_r32( R_ECX, R_EAX );
  2184                     	ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
  2185                     	ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
  2187                         MEM_READ_WORD( R_EAX, R_EAX );
  2188                         POP_r32( R_ECX );
  2189                         PUSH_r32( R_EAX );
  2190                         MEM_READ_WORD( R_ECX, R_EAX );
  2191                         POP_realigned_r32( R_ECX );
  2192                         IMUL_r32( R_ECX );
  2194                         load_spreg( R_ECX, R_S );
  2195                         TEST_r32_r32( R_ECX, R_ECX );
  2196                         JE_rel8( 47, nosat );
  2198                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2199                         JNO_rel8( 51, end );            // 2
  2200                         load_imm32( R_EDX, 1 );         // 5
  2201                         store_spreg( R_EDX, R_MACH );   // 6
  2202                         JS_rel8( 13, positive );        // 2
  2203                         load_imm32( R_EAX, 0x80000000 );// 5
  2204                         store_spreg( R_EAX, R_MACL );   // 6
  2205                         JMP_rel8( 25, end2 );           // 2
  2207                         JMP_TARGET(positive);
  2208                         load_imm32( R_EAX, 0x7FFFFFFF );// 5
  2209                         store_spreg( R_EAX, R_MACL );   // 6
  2210                         JMP_rel8( 12, end3);            // 2
  2212                         JMP_TARGET(nosat);
  2213                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2214                         ADC_r32_sh4r( R_EDX, R_MACH );  // 6
  2215                         JMP_TARGET(end);
  2216                         JMP_TARGET(end2);
  2217                         JMP_TARGET(end3);
  2218                         sh4_x86.tstate = TSTATE_NONE;
  2220                         break;
  2222                 break;
  2223             case 0x5:
  2224                 { /* MOV.L @(disp, Rm), Rn */
  2225                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
  2226                 load_reg( R_EAX, Rm );
  2227                 ADD_imm8s_r32( disp, R_EAX );
  2228                 check_ralign32( R_EAX );
  2229                 MMU_TRANSLATE_READ( R_EAX );
  2230                 MEM_READ_LONG( R_EAX, R_EAX );
  2231                 store_reg( R_EAX, Rn );
  2232                 sh4_x86.tstate = TSTATE_NONE;
  2234                 break;
  2235             case 0x6:
  2236                 switch( ir&0xF ) {
  2237                     case 0x0:
  2238                         { /* MOV.B @Rm, Rn */
  2239                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2240                         load_reg( R_EAX, Rm );
  2241                         MMU_TRANSLATE_READ( R_EAX );
  2242                         MEM_READ_BYTE( R_EAX, R_EAX );
  2243                         store_reg( R_EAX, Rn );
  2244                         sh4_x86.tstate = TSTATE_NONE;
  2246                         break;
  2247                     case 0x1:
  2248                         { /* MOV.W @Rm, Rn */
  2249                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2250                         load_reg( R_EAX, Rm );
  2251                         check_ralign16( R_EAX );
  2252                         MMU_TRANSLATE_READ( R_EAX );
  2253                         MEM_READ_WORD( R_EAX, R_EAX );
  2254                         store_reg( R_EAX, Rn );
  2255                         sh4_x86.tstate = TSTATE_NONE;
  2257                         break;
  2258                     case 0x2:
  2259                         { /* MOV.L @Rm, Rn */
  2260                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2261                         load_reg( R_EAX, Rm );
  2262                         check_ralign32( R_EAX );
  2263                         MMU_TRANSLATE_READ( R_EAX );
  2264                         MEM_READ_LONG( R_EAX, R_EAX );
  2265                         store_reg( R_EAX, Rn );
  2266                         sh4_x86.tstate = TSTATE_NONE;
  2268                         break;
  2269                     case 0x3:
  2270                         { /* MOV Rm, Rn */
  2271                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2272                         load_reg( R_EAX, Rm );
  2273                         store_reg( R_EAX, Rn );
  2275                         break;
  2276                     case 0x4:
  2277                         { /* MOV.B @Rm+, Rn */
  2278                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2279                         load_reg( R_EAX, Rm );
  2280                         MMU_TRANSLATE_READ( R_EAX );
  2281                         ADD_imm8s_sh4r( 1, REG_OFFSET(r[Rm]) );
  2282                         MEM_READ_BYTE( R_EAX, R_EAX );
  2283                         store_reg( R_EAX, Rn );
  2284                         sh4_x86.tstate = TSTATE_NONE;
  2286                         break;
  2287                     case 0x5:
  2288                         { /* MOV.W @Rm+, Rn */
  2289                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2290                         load_reg( R_EAX, Rm );
  2291                         check_ralign16( R_EAX );
  2292                         MMU_TRANSLATE_READ( R_EAX );
  2293                         ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
  2294                         MEM_READ_WORD( R_EAX, R_EAX );
  2295                         store_reg( R_EAX, Rn );
  2296                         sh4_x86.tstate = TSTATE_NONE;
  2298                         break;
  2299                     case 0x6:
  2300                         { /* MOV.L @Rm+, Rn */
  2301                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2302                         load_reg( R_EAX, Rm );
  2303                         check_ralign32( R_EAX );
  2304                         MMU_TRANSLATE_READ( R_EAX );
  2305                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  2306                         MEM_READ_LONG( R_EAX, R_EAX );
  2307                         store_reg( R_EAX, Rn );
  2308                         sh4_x86.tstate = TSTATE_NONE;
  2310                         break;
  2311                     case 0x7:
  2312                         { /* NOT Rm, Rn */
  2313                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2314                         load_reg( R_EAX, Rm );
  2315                         NOT_r32( R_EAX );
  2316                         store_reg( R_EAX, Rn );
  2317                         sh4_x86.tstate = TSTATE_NONE;
  2319                         break;
  2320                     case 0x8:
  2321                         { /* SWAP.B Rm, Rn */
  2322                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2323                         load_reg( R_EAX, Rm );
  2324                         XCHG_r8_r8( R_AL, R_AH ); // NB: does not touch EFLAGS
  2325                         store_reg( R_EAX, Rn );
  2327                         break;
  2328                     case 0x9:
  2329                         { /* SWAP.W Rm, Rn */
  2330                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2331                         load_reg( R_EAX, Rm );
  2332                         MOV_r32_r32( R_EAX, R_ECX );
  2333                         SHL_imm8_r32( 16, R_ECX );
  2334                         SHR_imm8_r32( 16, R_EAX );
  2335                         OR_r32_r32( R_EAX, R_ECX );
  2336                         store_reg( R_ECX, Rn );
  2337                         sh4_x86.tstate = TSTATE_NONE;
  2339                         break;
  2340                     case 0xA:
  2341                         { /* NEGC Rm, Rn */
  2342                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2343                         load_reg( R_EAX, Rm );
  2344                         XOR_r32_r32( R_ECX, R_ECX );
  2345                         LDC_t();
  2346                         SBB_r32_r32( R_EAX, R_ECX );
  2347                         store_reg( R_ECX, Rn );
  2348                         SETC_t();
  2349                         sh4_x86.tstate = TSTATE_C;
  2351                         break;
  2352                     case 0xB:
  2353                         { /* NEG Rm, Rn */
  2354                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2355                         load_reg( R_EAX, Rm );
  2356                         NEG_r32( R_EAX );
  2357                         store_reg( R_EAX, Rn );
  2358                         sh4_x86.tstate = TSTATE_NONE;
  2360                         break;
  2361                     case 0xC:
  2362                         { /* EXTU.B Rm, Rn */
  2363                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2364                         load_reg( R_EAX, Rm );
  2365                         MOVZX_r8_r32( R_EAX, R_EAX );
  2366                         store_reg( R_EAX, Rn );
  2368                         break;
  2369                     case 0xD:
  2370                         { /* EXTU.W Rm, Rn */
  2371                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2372                         load_reg( R_EAX, Rm );
  2373                         MOVZX_r16_r32( R_EAX, R_EAX );
  2374                         store_reg( R_EAX, Rn );
  2376                         break;
  2377                     case 0xE:
  2378                         { /* EXTS.B Rm, Rn */
  2379                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2380                         load_reg( R_EAX, Rm );
  2381                         MOVSX_r8_r32( R_EAX, R_EAX );
  2382                         store_reg( R_EAX, Rn );
  2384                         break;
  2385                     case 0xF:
  2386                         { /* EXTS.W Rm, Rn */
  2387                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2388                         load_reg( R_EAX, Rm );
  2389                         MOVSX_r16_r32( R_EAX, R_EAX );
  2390                         store_reg( R_EAX, Rn );
  2392                         break;
  2394                 break;
  2395             case 0x7:
  2396                 { /* ADD #imm, Rn */
  2397                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  2398                 load_reg( R_EAX, Rn );
  2399                 ADD_imm8s_r32( imm, R_EAX );
  2400                 store_reg( R_EAX, Rn );
  2401                 sh4_x86.tstate = TSTATE_NONE;
  2403                 break;
  2404             case 0x8:
  2405                 switch( (ir&0xF00) >> 8 ) {
  2406                     case 0x0:
  2407                         { /* MOV.B R0, @(disp, Rn) */
  2408                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2409                         load_reg( R_EAX, Rn );
  2410                         ADD_imm32_r32( disp, R_EAX );
  2411                         MMU_TRANSLATE_WRITE( R_EAX );
  2412                         load_reg( R_EDX, 0 );
  2413                         MEM_WRITE_BYTE( R_EAX, R_EDX );
  2414                         sh4_x86.tstate = TSTATE_NONE;
  2416                         break;
  2417                     case 0x1:
  2418                         { /* MOV.W R0, @(disp, Rn) */
  2419                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2420                         load_reg( R_EAX, Rn );
  2421                         ADD_imm32_r32( disp, R_EAX );
  2422                         check_walign16( R_EAX );
  2423                         MMU_TRANSLATE_WRITE( R_EAX );
  2424                         load_reg( R_EDX, 0 );
  2425                         MEM_WRITE_WORD( R_EAX, R_EDX );
  2426                         sh4_x86.tstate = TSTATE_NONE;
  2428                         break;
  2429                     case 0x4:
  2430                         { /* MOV.B @(disp, Rm), R0 */
  2431                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2432                         load_reg( R_EAX, Rm );
  2433                         ADD_imm32_r32( disp, R_EAX );
  2434                         MMU_TRANSLATE_READ( R_EAX );
  2435                         MEM_READ_BYTE( R_EAX, R_EAX );
  2436                         store_reg( R_EAX, 0 );
  2437                         sh4_x86.tstate = TSTATE_NONE;
  2439                         break;
  2440                     case 0x5:
  2441                         { /* MOV.W @(disp, Rm), R0 */
  2442                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2443                         load_reg( R_EAX, Rm );
  2444                         ADD_imm32_r32( disp, R_EAX );
  2445                         check_ralign16( R_EAX );
  2446                         MMU_TRANSLATE_READ( R_EAX );
  2447                         MEM_READ_WORD( R_EAX, R_EAX );
  2448                         store_reg( R_EAX, 0 );
  2449                         sh4_x86.tstate = TSTATE_NONE;
  2451                         break;
  2452                     case 0x8:
  2453                         { /* CMP/EQ #imm, R0 */
  2454                         int32_t imm = SIGNEXT8(ir&0xFF); 
  2455                         load_reg( R_EAX, 0 );
  2456                         CMP_imm8s_r32(imm, R_EAX);
  2457                         SETE_t();
  2458                         sh4_x86.tstate = TSTATE_E;
  2460                         break;
  2461                     case 0x9:
  2462                         { /* BT disp */
  2463                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2464                         if( sh4_x86.in_delay_slot ) {
  2465                     	SLOTILLEGAL();
  2466                         } else {
  2467                     	sh4vma_t target = disp + pc + 4;
  2468                     	JF_rel8( EXIT_BLOCK_REL_SIZE(target), nottaken );
  2469                     	exit_block_rel(target, pc+2 );
  2470                     	JMP_TARGET(nottaken);
  2471                     	return 2;
  2474                         break;
  2475                     case 0xB:
  2476                         { /* BF disp */
  2477                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2478                         if( sh4_x86.in_delay_slot ) {
  2479                     	SLOTILLEGAL();
  2480                         } else {
  2481                     	sh4vma_t target = disp + pc + 4;
  2482                     	JT_rel8( EXIT_BLOCK_REL_SIZE(target), nottaken );
  2483                     	exit_block_rel(target, pc+2 );
  2484                     	JMP_TARGET(nottaken);
  2485                     	return 2;
  2488                         break;
  2489                     case 0xD:
  2490                         { /* BT/S disp */
  2491                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2492                         if( sh4_x86.in_delay_slot ) {
  2493                     	SLOTILLEGAL();
  2494                         } else {
  2495                     	sh4_x86.in_delay_slot = DELAY_PC;
  2496                     	if( UNTRANSLATABLE(pc+2) ) {
  2497                     	    load_imm32( R_EAX, pc + 4 - sh4_x86.block_start_pc );
  2498                     	    JF_rel8(6,nottaken);
  2499                     	    ADD_imm32_r32( disp, R_EAX );
  2500                     	    JMP_TARGET(nottaken);
  2501                     	    ADD_sh4r_r32( R_PC, R_EAX );
  2502                     	    store_spreg( R_EAX, R_NEW_PC );
  2503                     	    exit_block_emu(pc+2);
  2504                     	    sh4_x86.branch_taken = TRUE;
  2505                     	    return 2;
  2506                     	} else {
  2507                     	    if( sh4_x86.tstate == TSTATE_NONE ) {
  2508                     		CMP_imm8s_sh4r( 1, R_T );
  2509                     		sh4_x86.tstate = TSTATE_E;
  2511                     	    OP(0x0F); OP(0x80+(sh4_x86.tstate^1)); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JF rel32
  2512                     	    sh4_translate_instruction(pc+2);
  2513                     	    exit_block_rel( disp + pc + 4, pc+4 );
  2514                     	    // not taken
  2515                     	    *patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2516                     	    sh4_translate_instruction(pc+2);
  2517                     	    return 4;
  2521                         break;
  2522                     case 0xF:
  2523                         { /* BF/S disp */
  2524                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2525                         if( sh4_x86.in_delay_slot ) {
  2526                     	SLOTILLEGAL();
  2527                         } else {
  2528                     	sh4_x86.in_delay_slot = DELAY_PC;
  2529                     	if( UNTRANSLATABLE(pc+2) ) {
  2530                     	    load_imm32( R_EAX, pc + 4 - sh4_x86.block_start_pc );
  2531                     	    JT_rel8(6,nottaken);
  2532                     	    ADD_imm32_r32( disp, R_EAX );
  2533                     	    JMP_TARGET(nottaken);
  2534                     	    ADD_sh4r_r32( R_PC, R_EAX );
  2535                     	    store_spreg( R_EAX, R_NEW_PC );
  2536                     	    exit_block_emu(pc+2);
  2537                     	    sh4_x86.branch_taken = TRUE;
  2538                     	    return 2;
  2539                     	} else {
  2540                     	    if( sh4_x86.tstate == TSTATE_NONE ) {
  2541                     		CMP_imm8s_sh4r( 1, R_T );
  2542                     		sh4_x86.tstate = TSTATE_E;
  2544                     	    sh4vma_t target = disp + pc + 4;
  2545                     	    OP(0x0F); OP(0x80+sh4_x86.tstate); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JT rel32
  2546                     	    sh4_translate_instruction(pc+2);
  2547                     	    exit_block_rel( target, pc+4 );
  2549                     	    // not taken
  2550                     	    *patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2551                     	    sh4_translate_instruction(pc+2);
  2552                     	    return 4;
  2556                         break;
  2557                     default:
  2558                         UNDEF();
  2559                         break;
  2561                 break;
  2562             case 0x9:
  2563                 { /* MOV.W @(disp, PC), Rn */
  2564                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1; 
  2565                 if( sh4_x86.in_delay_slot ) {
  2566             	SLOTILLEGAL();
  2567                 } else {
  2568             	// See comments for MOV.L @(disp, PC), Rn
  2569             	uint32_t target = pc + disp + 4;
  2570             	if( IS_IN_ICACHE(target) ) {
  2571             	    sh4ptr_t ptr = GET_ICACHE_PTR(target);
  2572             	    MOV_moff32_EAX( ptr );
  2573             	    MOVSX_r16_r32( R_EAX, R_EAX );
  2574             	} else {
  2575             	    load_imm32( R_EAX, (pc - sh4_x86.block_start_pc) + disp + 4 );
  2576             	    ADD_sh4r_r32( R_PC, R_EAX );
  2577             	    MMU_TRANSLATE_READ( R_EAX );
  2578             	    MEM_READ_WORD( R_EAX, R_EAX );
  2579             	    sh4_x86.tstate = TSTATE_NONE;
  2581             	store_reg( R_EAX, Rn );
  2584                 break;
  2585             case 0xA:
  2586                 { /* BRA disp */
  2587                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2588                 if( sh4_x86.in_delay_slot ) {
  2589             	SLOTILLEGAL();
  2590                 } else {
  2591             	sh4_x86.in_delay_slot = DELAY_PC;
  2592             	sh4_x86.branch_taken = TRUE;
  2593             	if( UNTRANSLATABLE(pc+2) ) {
  2594             	    load_spreg( R_EAX, R_PC );
  2595             	    ADD_imm32_r32( pc + disp + 4 - sh4_x86.block_start_pc, R_EAX );
  2596             	    store_spreg( R_EAX, R_NEW_PC );
  2597             	    exit_block_emu(pc+2);
  2598             	    return 2;
  2599             	} else {
  2600             	    sh4_translate_instruction( pc + 2 );
  2601             	    exit_block_rel( disp + pc + 4, pc+4 );
  2602             	    return 4;
  2606                 break;
  2607             case 0xB:
  2608                 { /* BSR disp */
  2609                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2610                 if( sh4_x86.in_delay_slot ) {
  2611             	SLOTILLEGAL();
  2612                 } else {
  2613             	load_spreg( R_EAX, R_PC );
  2614             	ADD_imm32_r32( pc + 4 - sh4_x86.block_start_pc, R_EAX );
  2615             	store_spreg( R_EAX, R_PR );
  2616             	sh4_x86.in_delay_slot = DELAY_PC;
  2617             	sh4_x86.branch_taken = TRUE;
  2618             	sh4_x86.tstate = TSTATE_NONE;
  2619             	if( UNTRANSLATABLE(pc+2) ) {
  2620             	    ADD_imm32_r32( disp, R_EAX );
  2621             	    store_spreg( R_EAX, R_NEW_PC );
  2622             	    exit_block_emu(pc+2);
  2623             	    return 2;
  2624             	} else {
  2625             	    sh4_translate_instruction( pc + 2 );
  2626             	    exit_block_rel( disp + pc + 4, pc+4 );
  2627             	    return 4;
  2631                 break;
  2632             case 0xC:
  2633                 switch( (ir&0xF00) >> 8 ) {
  2634                     case 0x0:
  2635                         { /* MOV.B R0, @(disp, GBR) */
  2636                         uint32_t disp = (ir&0xFF); 
  2637                         load_spreg( R_EAX, R_GBR );
  2638                         ADD_imm32_r32( disp, R_EAX );
  2639                         MMU_TRANSLATE_WRITE( R_EAX );
  2640                         load_reg( R_EDX, 0 );
  2641                         MEM_WRITE_BYTE( R_EAX, R_EDX );
  2642                         sh4_x86.tstate = TSTATE_NONE;
  2644                         break;
  2645                     case 0x1:
  2646                         { /* MOV.W R0, @(disp, GBR) */
  2647                         uint32_t disp = (ir&0xFF)<<1; 
  2648                         load_spreg( R_EAX, R_GBR );
  2649                         ADD_imm32_r32( disp, R_EAX );
  2650                         check_walign16( R_EAX );
  2651                         MMU_TRANSLATE_WRITE( R_EAX );
  2652                         load_reg( R_EDX, 0 );
  2653                         MEM_WRITE_WORD( R_EAX, R_EDX );
  2654                         sh4_x86.tstate = TSTATE_NONE;
  2656                         break;
  2657                     case 0x2:
  2658                         { /* MOV.L R0, @(disp, GBR) */
  2659                         uint32_t disp = (ir&0xFF)<<2; 
  2660                         load_spreg( R_EAX, R_GBR );
  2661                         ADD_imm32_r32( disp, R_EAX );
  2662                         check_walign32( R_EAX );
  2663                         MMU_TRANSLATE_WRITE( R_EAX );
  2664                         load_reg( R_EDX, 0 );
  2665                         MEM_WRITE_LONG( R_EAX, R_EDX );
  2666                         sh4_x86.tstate = TSTATE_NONE;
  2668                         break;
  2669                     case 0x3:
  2670                         { /* TRAPA #imm */
  2671                         uint32_t imm = (ir&0xFF); 
  2672                         if( sh4_x86.in_delay_slot ) {
  2673                     	SLOTILLEGAL();
  2674                         } else {
  2675                     	load_imm32( R_ECX, pc+2 - sh4_x86.block_start_pc );   // 5
  2676                     	ADD_r32_sh4r( R_ECX, R_PC );
  2677                     	load_imm32( R_EAX, imm );
  2678                     	call_func1( sh4_raise_trap, R_EAX );
  2679                     	sh4_x86.tstate = TSTATE_NONE;
  2680                     	exit_block_pcset(pc);
  2681                     	sh4_x86.branch_taken = TRUE;
  2682                     	return 2;
  2685                         break;
  2686                     case 0x4:
  2687                         { /* MOV.B @(disp, GBR), R0 */
  2688                         uint32_t disp = (ir&0xFF); 
  2689                         load_spreg( R_EAX, R_GBR );
  2690                         ADD_imm32_r32( disp, R_EAX );
  2691                         MMU_TRANSLATE_READ( R_EAX );
  2692                         MEM_READ_BYTE( R_EAX, R_EAX );
  2693                         store_reg( R_EAX, 0 );
  2694                         sh4_x86.tstate = TSTATE_NONE;
  2696                         break;
  2697                     case 0x5:
  2698                         { /* MOV.W @(disp, GBR), R0 */
  2699                         uint32_t disp = (ir&0xFF)<<1; 
  2700                         load_spreg( R_EAX, R_GBR );
  2701                         ADD_imm32_r32( disp, R_EAX );
  2702                         check_ralign16( R_EAX );
  2703                         MMU_TRANSLATE_READ( R_EAX );
  2704                         MEM_READ_WORD( R_EAX, R_EAX );
  2705                         store_reg( R_EAX, 0 );
  2706                         sh4_x86.tstate = TSTATE_NONE;
  2708                         break;
  2709                     case 0x6:
  2710                         { /* MOV.L @(disp, GBR), R0 */
  2711                         uint32_t disp = (ir&0xFF)<<2; 
  2712                         load_spreg( R_EAX, R_GBR );
  2713                         ADD_imm32_r32( disp, R_EAX );
  2714                         check_ralign32( R_EAX );
  2715                         MMU_TRANSLATE_READ( R_EAX );
  2716                         MEM_READ_LONG( R_EAX, R_EAX );
  2717                         store_reg( R_EAX, 0 );
  2718                         sh4_x86.tstate = TSTATE_NONE;
  2720                         break;
  2721                     case 0x7:
  2722                         { /* MOVA @(disp, PC), R0 */
  2723                         uint32_t disp = (ir&0xFF)<<2; 
  2724                         if( sh4_x86.in_delay_slot ) {
  2725                     	SLOTILLEGAL();
  2726                         } else {
  2727                     	load_imm32( R_ECX, (pc - sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
  2728                     	ADD_sh4r_r32( R_PC, R_ECX );
  2729                     	store_reg( R_ECX, 0 );
  2730                     	sh4_x86.tstate = TSTATE_NONE;
  2733                         break;
  2734                     case 0x8:
  2735                         { /* TST #imm, R0 */
  2736                         uint32_t imm = (ir&0xFF); 
  2737                         load_reg( R_EAX, 0 );
  2738                         TEST_imm32_r32( imm, R_EAX );
  2739                         SETE_t();
  2740                         sh4_x86.tstate = TSTATE_E;
  2742                         break;
  2743                     case 0x9:
  2744                         { /* AND #imm, R0 */
  2745                         uint32_t imm = (ir&0xFF); 
  2746                         load_reg( R_EAX, 0 );
  2747                         AND_imm32_r32(imm, R_EAX); 
  2748                         store_reg( R_EAX, 0 );
  2749                         sh4_x86.tstate = TSTATE_NONE;
  2751                         break;
  2752                     case 0xA:
  2753                         { /* XOR #imm, R0 */
  2754                         uint32_t imm = (ir&0xFF); 
  2755                         load_reg( R_EAX, 0 );
  2756                         XOR_imm32_r32( imm, R_EAX );
  2757                         store_reg( R_EAX, 0 );
  2758                         sh4_x86.tstate = TSTATE_NONE;
  2760                         break;
  2761                     case 0xB:
  2762                         { /* OR #imm, R0 */
  2763                         uint32_t imm = (ir&0xFF); 
  2764                         load_reg( R_EAX, 0 );
  2765                         OR_imm32_r32(imm, R_EAX);
  2766                         store_reg( R_EAX, 0 );
  2767                         sh4_x86.tstate = TSTATE_NONE;
  2769                         break;
  2770                     case 0xC:
  2771                         { /* TST.B #imm, @(R0, GBR) */
  2772                         uint32_t imm = (ir&0xFF); 
  2773                         load_reg( R_EAX, 0);
  2774                         load_reg( R_ECX, R_GBR);
  2775                         ADD_r32_r32( R_ECX, R_EAX );
  2776                         MMU_TRANSLATE_READ( R_EAX );
  2777                         MEM_READ_BYTE( R_EAX, R_EAX );
  2778                         TEST_imm8_r8( imm, R_AL );
  2779                         SETE_t();
  2780                         sh4_x86.tstate = TSTATE_E;
  2782                         break;
  2783                     case 0xD:
  2784                         { /* AND.B #imm, @(R0, GBR) */
  2785                         uint32_t imm = (ir&0xFF); 
  2786                         load_reg( R_EAX, 0 );
  2787                         load_spreg( R_ECX, R_GBR );
  2788                         ADD_r32_r32( R_ECX, R_EAX );
  2789                         MMU_TRANSLATE_WRITE( R_EAX );
  2790                         PUSH_realigned_r32(R_EAX);
  2791                         MEM_READ_BYTE( R_EAX, R_EAX );
  2792                         POP_realigned_r32(R_ECX);
  2793                         AND_imm32_r32(imm, R_EAX );
  2794                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2795                         sh4_x86.tstate = TSTATE_NONE;
  2797                         break;
  2798                     case 0xE:
  2799                         { /* XOR.B #imm, @(R0, GBR) */
  2800                         uint32_t imm = (ir&0xFF); 
  2801                         load_reg( R_EAX, 0 );
  2802                         load_spreg( R_ECX, R_GBR );
  2803                         ADD_r32_r32( R_ECX, R_EAX );
  2804                         MMU_TRANSLATE_WRITE( R_EAX );
  2805                         PUSH_realigned_r32(R_EAX);
  2806                         MEM_READ_BYTE(R_EAX, R_EAX);
  2807                         POP_realigned_r32(R_ECX);
  2808                         XOR_imm32_r32( imm, R_EAX );
  2809                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2810                         sh4_x86.tstate = TSTATE_NONE;
  2812                         break;
  2813                     case 0xF:
  2814                         { /* OR.B #imm, @(R0, GBR) */
  2815                         uint32_t imm = (ir&0xFF); 
  2816                         load_reg( R_EAX, 0 );
  2817                         load_spreg( R_ECX, R_GBR );
  2818                         ADD_r32_r32( R_ECX, R_EAX );
  2819                         MMU_TRANSLATE_WRITE( R_EAX );
  2820                         PUSH_realigned_r32(R_EAX);
  2821                         MEM_READ_BYTE( R_EAX, R_EAX );
  2822                         POP_realigned_r32(R_ECX);
  2823                         OR_imm32_r32(imm, R_EAX );
  2824                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2825                         sh4_x86.tstate = TSTATE_NONE;
  2827                         break;
  2829                 break;
  2830             case 0xD:
  2831                 { /* MOV.L @(disp, PC), Rn */
  2832                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2; 
  2833                 if( sh4_x86.in_delay_slot ) {
  2834             	SLOTILLEGAL();
  2835                 } else {
  2836             	uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
  2837             	if( IS_IN_ICACHE(target) ) {
  2838             	    // If the target address is in the same page as the code, it's
  2839             	    // pretty safe to just ref it directly and circumvent the whole
  2840             	    // memory subsystem. (this is a big performance win)
  2842             	    // FIXME: There's a corner-case that's not handled here when
  2843             	    // the current code-page is in the ITLB but not in the UTLB.
  2844             	    // (should generate a TLB miss although need to test SH4 
  2845             	    // behaviour to confirm) Unlikely to be anyone depending on this
  2846             	    // behaviour though.
  2847             	    sh4ptr_t ptr = GET_ICACHE_PTR(target);
  2848             	    MOV_moff32_EAX( ptr );
  2849             	} else {
  2850             	    // Note: we use sh4r.pc for the calc as we could be running at a
  2851             	    // different virtual address than the translation was done with,
  2852             	    // but we can safely assume that the low bits are the same.
  2853             	    load_imm32( R_EAX, (pc-sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
  2854             	    ADD_sh4r_r32( R_PC, R_EAX );
  2855             	    MMU_TRANSLATE_READ( R_EAX );
  2856             	    MEM_READ_LONG( R_EAX, R_EAX );
  2857             	    sh4_x86.tstate = TSTATE_NONE;
  2859             	store_reg( R_EAX, Rn );
  2862                 break;
  2863             case 0xE:
  2864                 { /* MOV #imm, Rn */
  2865                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  2866                 load_imm32( R_EAX, imm );
  2867                 store_reg( R_EAX, Rn );
  2869                 break;
  2870             case 0xF:
  2871                 switch( ir&0xF ) {
  2872                     case 0x0:
  2873                         { /* FADD FRm, FRn */
  2874                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  2875                         check_fpuen();
  2876                         load_spreg( R_ECX, R_FPSCR );
  2877                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  2878                         load_fr_bank( R_EDX );
  2879                         JNE_rel8(13,doubleprec);
  2880                         push_fr(R_EDX, FRm);
  2881                         push_fr(R_EDX, FRn);
  2882                         FADDP_st(1);
  2883                         pop_fr(R_EDX, FRn);
  2884                         JMP_rel8(11,end);
  2885                         JMP_TARGET(doubleprec);
  2886                         push_dr(R_EDX, FRm);
  2887                         push_dr(R_EDX, FRn);
  2888                         FADDP_st(1);
  2889                         pop_dr(R_EDX, FRn);
  2890                         JMP_TARGET(end);
  2891                         sh4_x86.tstate = TSTATE_NONE;
  2893                         break;
  2894                     case 0x1:
  2895                         { /* FSUB FRm, FRn */
  2896                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  2897                         check_fpuen();
  2898                         load_spreg( R_ECX, R_FPSCR );
  2899                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  2900                         load_fr_bank( R_EDX );
  2901                         JNE_rel8(13, doubleprec);
  2902                         push_fr(R_EDX, FRn);
  2903                         push_fr(R_EDX, FRm);
  2904                         FSUBP_st(1);
  2905                         pop_fr(R_EDX, FRn);
  2906                         JMP_rel8(11, end);
  2907                         JMP_TARGET(doubleprec);
  2908                         push_dr(R_EDX, FRn);
  2909                         push_dr(R_EDX, FRm);
  2910                         FSUBP_st(1);
  2911                         pop_dr(R_EDX, FRn);
  2912                         JMP_TARGET(end);
  2913                         sh4_x86.tstate = TSTATE_NONE;
  2915                         break;
  2916                     case 0x2:
  2917                         { /* FMUL FRm, FRn */
  2918                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  2919                         check_fpuen();
  2920                         load_spreg( R_ECX, R_FPSCR );
  2921                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  2922                         load_fr_bank( R_EDX );
  2923                         JNE_rel8(13, doubleprec);
  2924                         push_fr(R_EDX, FRm);
  2925                         push_fr(R_EDX, FRn);
  2926                         FMULP_st(1);
  2927                         pop_fr(R_EDX, FRn);
  2928                         JMP_rel8(11, end);
  2929                         JMP_TARGET(doubleprec);
  2930                         push_dr(R_EDX, FRm);
  2931                         push_dr(R_EDX, FRn);
  2932                         FMULP_st(1);
  2933                         pop_dr(R_EDX, FRn);
  2934                         JMP_TARGET(end);
  2935                         sh4_x86.tstate = TSTATE_NONE;
  2937                         break;
  2938                     case 0x3:
  2939                         { /* FDIV FRm, FRn */
  2940                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  2941                         check_fpuen();
  2942                         load_spreg( R_ECX, R_FPSCR );
  2943                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  2944                         load_fr_bank( R_EDX );
  2945                         JNE_rel8(13, doubleprec);
  2946                         push_fr(R_EDX, FRn);
  2947                         push_fr(R_EDX, FRm);
  2948                         FDIVP_st(1);
  2949                         pop_fr(R_EDX, FRn);
  2950                         JMP_rel8(11, end);
  2951                         JMP_TARGET(doubleprec);
  2952                         push_dr(R_EDX, FRn);
  2953                         push_dr(R_EDX, FRm);
  2954                         FDIVP_st(1);
  2955                         pop_dr(R_EDX, FRn);
  2956                         JMP_TARGET(end);
  2957                         sh4_x86.tstate = TSTATE_NONE;
  2959                         break;
  2960                     case 0x4:
  2961                         { /* FCMP/EQ FRm, FRn */
  2962                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  2963                         check_fpuen();
  2964                         load_spreg( R_ECX, R_FPSCR );
  2965                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  2966                         load_fr_bank( R_EDX );
  2967                         JNE_rel8(8, doubleprec);
  2968                         push_fr(R_EDX, FRm);
  2969                         push_fr(R_EDX, FRn);
  2970                         JMP_rel8(6, end);
  2971                         JMP_TARGET(doubleprec);
  2972                         push_dr(R_EDX, FRm);
  2973                         push_dr(R_EDX, FRn);
  2974                         JMP_TARGET(end);
  2975                         FCOMIP_st(1);
  2976                         SETE_t();
  2977                         FPOP_st();
  2978                         sh4_x86.tstate = TSTATE_NONE;
  2980                         break;
  2981                     case 0x5:
  2982                         { /* FCMP/GT FRm, FRn */
  2983                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  2984                         check_fpuen();
  2985                         load_spreg( R_ECX, R_FPSCR );
  2986                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  2987                         load_fr_bank( R_EDX );
  2988                         JNE_rel8(8, doubleprec);
  2989                         push_fr(R_EDX, FRm);
  2990                         push_fr(R_EDX, FRn);
  2991                         JMP_rel8(6, end);
  2992                         JMP_TARGET(doubleprec);
  2993                         push_dr(R_EDX, FRm);
  2994                         push_dr(R_EDX, FRn);
  2995                         JMP_TARGET(end);
  2996                         FCOMIP_st(1);
  2997                         SETA_t();
  2998                         FPOP_st();
  2999                         sh4_x86.tstate = TSTATE_NONE;
  3001                         break;
  3002                     case 0x6:
  3003                         { /* FMOV @(R0, Rm), FRn */
  3004                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3005                         check_fpuen();
  3006                         load_reg( R_EAX, Rm );
  3007                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
  3008                         check_ralign32( R_EAX );
  3009                         MMU_TRANSLATE_READ( R_EAX );
  3010                         load_spreg( R_EDX, R_FPSCR );
  3011                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3012                         JNE_rel8(8 + MEM_READ_SIZE, doublesize);
  3013                         MEM_READ_LONG( R_EAX, R_EAX );
  3014                         load_fr_bank( R_EDX );
  3015                         store_fr( R_EDX, R_EAX, FRn );
  3016                         if( FRn&1 ) {
  3017                     	JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
  3018                     	JMP_TARGET(doublesize);
  3019                     	MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3020                     	load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
  3021                     	load_xf_bank( R_EDX );
  3022                     	store_fr( R_EDX, R_ECX, FRn&0x0E );
  3023                     	store_fr( R_EDX, R_EAX, FRn|0x01 );
  3024                     	JMP_TARGET(end);
  3025                         } else {
  3026                     	JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
  3027                     	JMP_TARGET(doublesize);
  3028                     	MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3029                     	load_fr_bank( R_EDX );
  3030                     	store_fr( R_EDX, R_ECX, FRn&0x0E );
  3031                     	store_fr( R_EDX, R_EAX, FRn|0x01 );
  3032                     	JMP_TARGET(end);
  3034                         sh4_x86.tstate = TSTATE_NONE;
  3036                         break;
  3037                     case 0x7:
  3038                         { /* FMOV FRm, @(R0, Rn) */
  3039                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3040                         check_fpuen();
  3041                         load_reg( R_EAX, Rn );
  3042                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
  3043                         check_walign32( R_EAX );
  3044                         MMU_TRANSLATE_WRITE( R_EAX );
  3045                         load_spreg( R_EDX, R_FPSCR );
  3046                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3047                         JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
  3048                         load_fr_bank( R_EDX );
  3049                         load_fr( R_EDX, R_ECX, FRm );
  3050                         MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
  3051                         if( FRm&1 ) {
  3052                     	JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
  3053                     	JMP_TARGET(doublesize);
  3054                     	load_xf_bank( R_EDX );
  3055                     	load_fr( R_EDX, R_ECX, FRm&0x0E );
  3056                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3057                     	MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3058                     	JMP_TARGET(end);
  3059                         } else {
  3060                     	JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
  3061                     	JMP_TARGET(doublesize);
  3062                     	load_fr_bank( R_EDX );
  3063                     	load_fr( R_EDX, R_ECX, FRm&0x0E );
  3064                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3065                     	MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3066                     	JMP_TARGET(end);
  3068                         sh4_x86.tstate = TSTATE_NONE;
  3070                         break;
  3071                     case 0x8:
  3072                         { /* FMOV @Rm, FRn */
  3073                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3074                         check_fpuen();
  3075                         load_reg( R_EAX, Rm );
  3076                         check_ralign32( R_EAX );
  3077                         MMU_TRANSLATE_READ( R_EAX );
  3078                         load_spreg( R_EDX, R_FPSCR );
  3079                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3080                         JNE_rel8(8 + MEM_READ_SIZE, doublesize);
  3081                         MEM_READ_LONG( R_EAX, R_EAX );
  3082                         load_fr_bank( R_EDX );
  3083                         store_fr( R_EDX, R_EAX, FRn );
  3084                         if( FRn&1 ) {
  3085                     	JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
  3086                     	JMP_TARGET(doublesize);
  3087                     	MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3088                     	load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
  3089                     	load_xf_bank( R_EDX );
  3090                     	store_fr( R_EDX, R_ECX, FRn&0x0E );
  3091                     	store_fr( R_EDX, R_EAX, FRn|0x01 );
  3092                     	JMP_TARGET(end);
  3093                         } else {
  3094                     	JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
  3095                     	JMP_TARGET(doublesize);
  3096                     	MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3097                     	load_fr_bank( R_EDX );
  3098                     	store_fr( R_EDX, R_ECX, FRn&0x0E );
  3099                     	store_fr( R_EDX, R_EAX, FRn|0x01 );
  3100                     	JMP_TARGET(end);
  3102                         sh4_x86.tstate = TSTATE_NONE;
  3104                         break;
  3105                     case 0x9:
  3106                         { /* FMOV @Rm+, FRn */
  3107                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3108                         check_fpuen();
  3109                         load_reg( R_EAX, Rm );
  3110                         check_ralign32( R_EAX );
  3111                         MMU_TRANSLATE_READ( R_EAX );
  3112                         load_spreg( R_EDX, R_FPSCR );
  3113                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3114                         JNE_rel8(12 + MEM_READ_SIZE, doublesize);
  3115                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  3116                         MEM_READ_LONG( R_EAX, R_EAX );
  3117                         load_fr_bank( R_EDX );
  3118                         store_fr( R_EDX, R_EAX, FRn );
  3119                         if( FRn&1 ) {
  3120                     	JMP_rel8(25 + MEM_READ_DOUBLE_SIZE, end);
  3121                     	JMP_TARGET(doublesize);
  3122                     	ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rm]) );
  3123                     	MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3124                     	load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
  3125                     	load_xf_bank( R_EDX );
  3126                     	store_fr( R_EDX, R_ECX, FRn&0x0E );
  3127                     	store_fr( R_EDX, R_EAX, FRn|0x01 );
  3128                     	JMP_TARGET(end);
  3129                         } else {
  3130                     	JMP_rel8(13 + MEM_READ_DOUBLE_SIZE, end);
  3131                     	ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rm]) );
  3132                     	MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
  3133                     	load_fr_bank( R_EDX );
  3134                     	store_fr( R_EDX, R_ECX, FRn&0x0E );
  3135                     	store_fr( R_EDX, R_EAX, FRn|0x01 );
  3136                     	JMP_TARGET(end);
  3138                         sh4_x86.tstate = TSTATE_NONE;
  3140                         break;
  3141                     case 0xA:
  3142                         { /* FMOV FRm, @Rn */
  3143                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3144                         check_fpuen();
  3145                         load_reg( R_EAX, Rn );
  3146                         check_walign32( R_EAX );
  3147                         MMU_TRANSLATE_WRITE( R_EAX );
  3148                         load_spreg( R_EDX, R_FPSCR );
  3149                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3150                         JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
  3151                         load_fr_bank( R_EDX );
  3152                         load_fr( R_EDX, R_ECX, FRm );
  3153                         MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
  3154                         if( FRm&1 ) {
  3155                     	JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
  3156                     	JMP_TARGET(doublesize);
  3157                     	load_xf_bank( R_EDX );
  3158                     	load_fr( R_EDX, R_ECX, FRm&0x0E );
  3159                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3160                     	MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3161                     	JMP_TARGET(end);
  3162                         } else {
  3163                     	JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
  3164                     	JMP_TARGET(doublesize);
  3165                     	load_fr_bank( R_EDX );
  3166                     	load_fr( R_EDX, R_ECX, FRm&0x0E );
  3167                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3168                     	MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3169                     	JMP_TARGET(end);
  3171                         sh4_x86.tstate = TSTATE_NONE;
  3173                         break;
  3174                     case 0xB:
  3175                         { /* FMOV FRm, @-Rn */
  3176                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3177                         check_fpuen();
  3178                         load_reg( R_EAX, Rn );
  3179                         check_walign32( R_EAX );
  3180                         load_spreg( R_EDX, R_FPSCR );
  3181                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3182                         JNE_rel8(15 + MEM_WRITE_SIZE + MMU_TRANSLATE_SIZE, doublesize);
  3183                         ADD_imm8s_r32( -4, R_EAX );
  3184                         MMU_TRANSLATE_WRITE( R_EAX );
  3185                         load_fr_bank( R_EDX );
  3186                         load_fr( R_EDX, R_ECX, FRm );
  3187                         ADD_imm8s_sh4r(-4,REG_OFFSET(r[Rn]));
  3188                         MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
  3189                         if( FRm&1 ) {
  3190                     	JMP_rel8( 25 + MEM_WRITE_DOUBLE_SIZE + MMU_TRANSLATE_SIZE, end );
  3191                     	JMP_TARGET(doublesize);
  3192                     	ADD_imm8s_r32(-8,R_EAX);
  3193                     	MMU_TRANSLATE_WRITE( R_EAX );
  3194                     	load_xf_bank( R_EDX );
  3195                     	load_fr( R_EDX, R_ECX, FRm&0x0E );
  3196                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3197                     	ADD_imm8s_sh4r(-8,REG_OFFSET(r[Rn]));
  3198                     	MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3199                     	JMP_TARGET(end);
  3200                         } else {
  3201                     	JMP_rel8( 16 + MEM_WRITE_DOUBLE_SIZE + MMU_TRANSLATE_SIZE, end );
  3202                     	JMP_TARGET(doublesize);
  3203                     	ADD_imm8s_r32(-8,R_EAX);
  3204                     	MMU_TRANSLATE_WRITE( R_EAX );
  3205                     	load_fr_bank( R_EDX );
  3206                     	load_fr( R_EDX, R_ECX, FRm&0x0E );
  3207                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3208                     	ADD_imm8s_sh4r(-8,REG_OFFSET(r[Rn]));
  3209                     	MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
  3210                     	JMP_TARGET(end);
  3212                         sh4_x86.tstate = TSTATE_NONE;
  3214                         break;
  3215                     case 0xC:
  3216                         { /* FMOV FRm, FRn */
  3217                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3218                         /* As horrible as this looks, it's actually covering 5 separate cases:
  3219                          * 1. 32-bit fr-to-fr (PR=0)
  3220                          * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
  3221                          * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
  3222                          * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
  3223                          * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
  3224                          */
  3225                         check_fpuen();
  3226                         load_spreg( R_ECX, R_FPSCR );
  3227                         load_fr_bank( R_EDX );
  3228                         TEST_imm32_r32( FPSCR_SZ, R_ECX );
  3229                         JNE_rel8(8, doublesize);
  3230                         load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
  3231                         store_fr( R_EDX, R_EAX, FRn );
  3232                         if( FRm&1 ) {
  3233                     	JMP_rel8(24, end);
  3234                     	JMP_TARGET(doublesize);
  3235                     	load_xf_bank( R_ECX ); 
  3236                     	load_fr( R_ECX, R_EAX, FRm-1 );
  3237                     	if( FRn&1 ) {
  3238                     	    load_fr( R_ECX, R_EDX, FRm );
  3239                     	    store_fr( R_ECX, R_EAX, FRn-1 );
  3240                     	    store_fr( R_ECX, R_EDX, FRn );
  3241                     	} else /* FRn&1 == 0 */ {
  3242                     	    load_fr( R_ECX, R_ECX, FRm );
  3243                     	    store_fr( R_EDX, R_EAX, FRn );
  3244                     	    store_fr( R_EDX, R_ECX, FRn+1 );
  3246                     	JMP_TARGET(end);
  3247                         } else /* FRm&1 == 0 */ {
  3248                     	if( FRn&1 ) {
  3249                     	    JMP_rel8(24, end);
  3250                     	    load_xf_bank( R_ECX );
  3251                     	    load_fr( R_EDX, R_EAX, FRm );
  3252                     	    load_fr( R_EDX, R_EDX, FRm+1 );
  3253                     	    store_fr( R_ECX, R_EAX, FRn-1 );
  3254                     	    store_fr( R_ECX, R_EDX, FRn );
  3255                     	    JMP_TARGET(end);
  3256                     	} else /* FRn&1 == 0 */ {
  3257                     	    JMP_rel8(12, end);
  3258                     	    load_fr( R_EDX, R_EAX, FRm );
  3259                     	    load_fr( R_EDX, R_ECX, FRm+1 );
  3260                     	    store_fr( R_EDX, R_EAX, FRn );
  3261                     	    store_fr( R_EDX, R_ECX, FRn+1 );
  3262                     	    JMP_TARGET(end);
  3265                         sh4_x86.tstate = TSTATE_NONE;
  3267                         break;
  3268                     case 0xD:
  3269                         switch( (ir&0xF0) >> 4 ) {
  3270                             case 0x0:
  3271                                 { /* FSTS FPUL, FRn */
  3272                                 uint32_t FRn = ((ir>>8)&0xF); 
  3273                                 check_fpuen();
  3274                                 load_fr_bank( R_ECX );
  3275                                 load_spreg( R_EAX, R_FPUL );
  3276                                 store_fr( R_ECX, R_EAX, FRn );
  3277                                 sh4_x86.tstate = TSTATE_NONE;
  3279                                 break;
  3280                             case 0x1:
  3281                                 { /* FLDS FRm, FPUL */
  3282                                 uint32_t FRm = ((ir>>8)&0xF); 
  3283                                 check_fpuen();
  3284                                 load_fr_bank( R_ECX );
  3285                                 load_fr( R_ECX, R_EAX, FRm );
  3286                                 store_spreg( R_EAX, R_FPUL );
  3287                                 sh4_x86.tstate = TSTATE_NONE;
  3289                                 break;
  3290                             case 0x2:
  3291                                 { /* FLOAT FPUL, FRn */
  3292                                 uint32_t FRn = ((ir>>8)&0xF); 
  3293                                 check_fpuen();
  3294                                 load_spreg( R_ECX, R_FPSCR );
  3295                                 load_spreg(R_EDX, REG_OFFSET(fr_bank));
  3296                                 FILD_sh4r(R_FPUL);
  3297                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3298                                 JNE_rel8(5, doubleprec);
  3299                                 pop_fr( R_EDX, FRn );
  3300                                 JMP_rel8(3, end);
  3301                                 JMP_TARGET(doubleprec);
  3302                                 pop_dr( R_EDX, FRn );
  3303                                 JMP_TARGET(end);
  3304                                 sh4_x86.tstate = TSTATE_NONE;
  3306                                 break;
  3307                             case 0x3:
  3308                                 { /* FTRC FRm, FPUL */
  3309                                 uint32_t FRm = ((ir>>8)&0xF); 
  3310                                 check_fpuen();
  3311                                 load_spreg( R_ECX, R_FPSCR );
  3312                                 load_fr_bank( R_EDX );
  3313                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3314                                 JNE_rel8(5, doubleprec);
  3315                                 push_fr( R_EDX, FRm );
  3316                                 JMP_rel8(3, doop);
  3317                                 JMP_TARGET(doubleprec);
  3318                                 push_dr( R_EDX, FRm );
  3319                                 JMP_TARGET( doop );
  3320                                 load_imm32( R_ECX, (uint32_t)&max_int );
  3321                                 FILD_r32ind( R_ECX );
  3322                                 FCOMIP_st(1);
  3323                                 JNA_rel8( 32, sat );
  3324                                 load_imm32( R_ECX, (uint32_t)&min_int );  // 5
  3325                                 FILD_r32ind( R_ECX );           // 2
  3326                                 FCOMIP_st(1);                   // 2
  3327                                 JAE_rel8( 21, sat2 );            // 2
  3328                                 load_imm32( R_EAX, (uint32_t)&save_fcw );
  3329                                 FNSTCW_r32ind( R_EAX );
  3330                                 load_imm32( R_EDX, (uint32_t)&trunc_fcw );
  3331                                 FLDCW_r32ind( R_EDX );
  3332                                 FISTP_sh4r(R_FPUL);             // 3
  3333                                 FLDCW_r32ind( R_EAX );
  3334                                 JMP_rel8( 9, end );             // 2
  3336                                 JMP_TARGET(sat);
  3337                                 JMP_TARGET(sat2);
  3338                                 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
  3339                                 store_spreg( R_ECX, R_FPUL );
  3340                                 FPOP_st();
  3341                                 JMP_TARGET(end);
  3342                                 sh4_x86.tstate = TSTATE_NONE;
  3344                                 break;
  3345                             case 0x4:
  3346                                 { /* FNEG FRn */
  3347                                 uint32_t FRn = ((ir>>8)&0xF); 
  3348                                 check_fpuen();
  3349                                 load_spreg( R_ECX, R_FPSCR );
  3350                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3351                                 load_fr_bank( R_EDX );
  3352                                 JNE_rel8(10, doubleprec);
  3353                                 push_fr(R_EDX, FRn);
  3354                                 FCHS_st0();
  3355                                 pop_fr(R_EDX, FRn);
  3356                                 JMP_rel8(8, end);
  3357                                 JMP_TARGET(doubleprec);
  3358                                 push_dr(R_EDX, FRn);
  3359                                 FCHS_st0();
  3360                                 pop_dr(R_EDX, FRn);
  3361                                 JMP_TARGET(end);
  3362                                 sh4_x86.tstate = TSTATE_NONE;
  3364                                 break;
  3365                             case 0x5:
  3366                                 { /* FABS FRn */
  3367                                 uint32_t FRn = ((ir>>8)&0xF); 
  3368                                 check_fpuen();
  3369                                 load_spreg( R_ECX, R_FPSCR );
  3370                                 load_fr_bank( R_EDX );
  3371                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3372                                 JNE_rel8(10, doubleprec);
  3373                                 push_fr(R_EDX, FRn); // 3
  3374                                 FABS_st0(); // 2
  3375                                 pop_fr( R_EDX, FRn); //3
  3376                                 JMP_rel8(8,end); // 2
  3377                                 JMP_TARGET(doubleprec);
  3378                                 push_dr(R_EDX, FRn);
  3379                                 FABS_st0();
  3380                                 pop_dr(R_EDX, FRn);
  3381                                 JMP_TARGET(end);
  3382                                 sh4_x86.tstate = TSTATE_NONE;
  3384                                 break;
  3385                             case 0x6:
  3386                                 { /* FSQRT FRn */
  3387                                 uint32_t FRn = ((ir>>8)&0xF); 
  3388                                 check_fpuen();
  3389                                 load_spreg( R_ECX, R_FPSCR );
  3390                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3391                                 load_fr_bank( R_EDX );
  3392                                 JNE_rel8(10, doubleprec);
  3393                                 push_fr(R_EDX, FRn);
  3394                                 FSQRT_st0();
  3395                                 pop_fr(R_EDX, FRn);
  3396                                 JMP_rel8(8, end);
  3397                                 JMP_TARGET(doubleprec);
  3398                                 push_dr(R_EDX, FRn);
  3399                                 FSQRT_st0();
  3400                                 pop_dr(R_EDX, FRn);
  3401                                 JMP_TARGET(end);
  3402                                 sh4_x86.tstate = TSTATE_NONE;
  3404                                 break;
  3405                             case 0x7:
  3406                                 { /* FSRRA FRn */
  3407                                 uint32_t FRn = ((ir>>8)&0xF); 
  3408                                 check_fpuen();
  3409                                 load_spreg( R_ECX, R_FPSCR );
  3410                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3411                                 load_fr_bank( R_EDX );
  3412                                 JNE_rel8(12, end); // PR=0 only
  3413                                 FLD1_st0();
  3414                                 push_fr(R_EDX, FRn);
  3415                                 FSQRT_st0();
  3416                                 FDIVP_st(1);
  3417                                 pop_fr(R_EDX, FRn);
  3418                                 JMP_TARGET(end);
  3419                                 sh4_x86.tstate = TSTATE_NONE;
  3421                                 break;
  3422                             case 0x8:
  3423                                 { /* FLDI0 FRn */
  3424                                 uint32_t FRn = ((ir>>8)&0xF); 
  3425                                 /* IFF PR=0 */
  3426                                   check_fpuen();
  3427                                   load_spreg( R_ECX, R_FPSCR );
  3428                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3429                                   JNE_rel8(8, end);
  3430                                   XOR_r32_r32( R_EAX, R_EAX );
  3431                                   load_spreg( R_ECX, REG_OFFSET(fr_bank) );
  3432                                   store_fr( R_ECX, R_EAX, FRn );
  3433                                   JMP_TARGET(end);
  3434                                   sh4_x86.tstate = TSTATE_NONE;
  3436                                 break;
  3437                             case 0x9:
  3438                                 { /* FLDI1 FRn */
  3439                                 uint32_t FRn = ((ir>>8)&0xF); 
  3440                                 /* IFF PR=0 */
  3441                                   check_fpuen();
  3442                                   load_spreg( R_ECX, R_FPSCR );
  3443                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3444                                   JNE_rel8(11, end);
  3445                                   load_imm32(R_EAX, 0x3F800000);
  3446                                   load_spreg( R_ECX, REG_OFFSET(fr_bank) );
  3447                                   store_fr( R_ECX, R_EAX, FRn );
  3448                                   JMP_TARGET(end);
  3449                                   sh4_x86.tstate = TSTATE_NONE;
  3451                                 break;
  3452                             case 0xA:
  3453                                 { /* FCNVSD FPUL, FRn */
  3454                                 uint32_t FRn = ((ir>>8)&0xF); 
  3455                                 check_fpuen();
  3456                                 load_spreg( R_ECX, R_FPSCR );
  3457                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3458                                 JE_rel8(9, end); // only when PR=1
  3459                                 load_fr_bank( R_ECX );
  3460                                 push_fpul();
  3461                                 pop_dr( R_ECX, FRn );
  3462                                 JMP_TARGET(end);
  3463                                 sh4_x86.tstate = TSTATE_NONE;
  3465                                 break;
  3466                             case 0xB:
  3467                                 { /* FCNVDS FRm, FPUL */
  3468                                 uint32_t FRm = ((ir>>8)&0xF); 
  3469                                 check_fpuen();
  3470                                 load_spreg( R_ECX, R_FPSCR );
  3471                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3472                                 JE_rel8(9, end); // only when PR=1
  3473                                 load_fr_bank( R_ECX );
  3474                                 push_dr( R_ECX, FRm );
  3475                                 pop_fpul();
  3476                                 JMP_TARGET(end);
  3477                                 sh4_x86.tstate = TSTATE_NONE;
  3479                                 break;
  3480                             case 0xE:
  3481                                 { /* FIPR FVm, FVn */
  3482                                 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3); 
  3483                                 check_fpuen();
  3484                                 load_spreg( R_ECX, R_FPSCR );
  3485                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3486                                 JNE_rel8(44, doubleprec);
  3488                                 load_fr_bank( R_ECX );
  3489                                 push_fr( R_ECX, FVm<<2 );
  3490                                 push_fr( R_ECX, FVn<<2 );
  3491                                 FMULP_st(1);
  3492                                 push_fr( R_ECX, (FVm<<2)+1);
  3493                                 push_fr( R_ECX, (FVn<<2)+1);
  3494                                 FMULP_st(1);
  3495                                 FADDP_st(1);
  3496                                 push_fr( R_ECX, (FVm<<2)+2);
  3497                                 push_fr( R_ECX, (FVn<<2)+2);
  3498                                 FMULP_st(1);
  3499                                 FADDP_st(1);
  3500                                 push_fr( R_ECX, (FVm<<2)+3);
  3501                                 push_fr( R_ECX, (FVn<<2)+3);
  3502                                 FMULP_st(1);
  3503                                 FADDP_st(1);
  3504                                 pop_fr( R_ECX, (FVn<<2)+3);
  3505                                 JMP_TARGET(doubleprec);
  3506                                 sh4_x86.tstate = TSTATE_NONE;
  3508                                 break;
  3509                             case 0xF:
  3510                                 switch( (ir&0x100) >> 8 ) {
  3511                                     case 0x0:
  3512                                         { /* FSCA FPUL, FRn */
  3513                                         uint32_t FRn = ((ir>>9)&0x7)<<1; 
  3514                                         check_fpuen();
  3515                                         load_spreg( R_ECX, R_FPSCR );
  3516                                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3517                                         JNE_rel8( CALL_FUNC2_SIZE + 9, doubleprec );
  3518                                         load_fr_bank( R_ECX );
  3519                                         ADD_imm8s_r32( (FRn&0x0E)<<2, R_ECX );
  3520                                         load_spreg( R_EDX, R_FPUL );
  3521                                         call_func2( sh4_fsca, R_EDX, R_ECX );
  3522                                         JMP_TARGET(doubleprec);
  3523                                         sh4_x86.tstate = TSTATE_NONE;
  3525                                         break;
  3526                                     case 0x1:
  3527                                         switch( (ir&0x200) >> 9 ) {
  3528                                             case 0x0:
  3529                                                 { /* FTRV XMTRX, FVn */
  3530                                                 uint32_t FVn = ((ir>>10)&0x3); 
  3531                                                 check_fpuen();
  3532                                                 load_spreg( R_ECX, R_FPSCR );
  3533                                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3534                                                 JNE_rel8( 18 + CALL_FUNC2_SIZE, doubleprec );
  3535                                                 load_fr_bank( R_EDX );                 // 3
  3536                                                 ADD_imm8s_r32( FVn<<4, R_EDX );        // 3
  3537                                                 load_xf_bank( R_ECX );                 // 12
  3538                                                 call_func2( sh4_ftrv, R_EDX, R_ECX );  // 12
  3539                                                 JMP_TARGET(doubleprec);
  3540                                                 sh4_x86.tstate = TSTATE_NONE;
  3542                                                 break;
  3543                                             case 0x1:
  3544                                                 switch( (ir&0xC00) >> 10 ) {
  3545                                                     case 0x0:
  3546                                                         { /* FSCHG */
  3547                                                         check_fpuen();
  3548                                                         load_spreg( R_ECX, R_FPSCR );
  3549                                                         XOR_imm32_r32( FPSCR_SZ, R_ECX );
  3550                                                         store_spreg( R_ECX, R_FPSCR );
  3551                                                         sh4_x86.tstate = TSTATE_NONE;
  3553                                                         break;
  3554                                                     case 0x2:
  3555                                                         { /* FRCHG */
  3556                                                         check_fpuen();
  3557                                                         load_spreg( R_ECX, R_FPSCR );
  3558                                                         XOR_imm32_r32( FPSCR_FR, R_ECX );
  3559                                                         store_spreg( R_ECX, R_FPSCR );
  3560                                                         update_fr_bank( R_ECX );
  3561                                                         sh4_x86.tstate = TSTATE_NONE;
  3563                                                         break;
  3564                                                     case 0x3:
  3565                                                         { /* UNDEF */
  3566                                                         if( sh4_x86.in_delay_slot ) {
  3567                                                     	SLOTILLEGAL();
  3568                                                         } else {
  3569                                                     	JMP_exc(EXC_ILLEGAL);
  3570                                                     	return 2;
  3573                                                         break;
  3574                                                     default:
  3575                                                         UNDEF();
  3576                                                         break;
  3578                                                 break;
  3580                                         break;
  3582                                 break;
  3583                             default:
  3584                                 UNDEF();
  3585                                 break;
  3587                         break;
  3588                     case 0xE:
  3589                         { /* FMAC FR0, FRm, FRn */
  3590                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3591                         check_fpuen();
  3592                         load_spreg( R_ECX, R_FPSCR );
  3593                         load_spreg( R_EDX, REG_OFFSET(fr_bank));
  3594                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3595                         JNE_rel8(18, doubleprec);
  3596                         push_fr( R_EDX, 0 );
  3597                         push_fr( R_EDX, FRm );
  3598                         FMULP_st(1);
  3599                         push_fr( R_EDX, FRn );
  3600                         FADDP_st(1);
  3601                         pop_fr( R_EDX, FRn );
  3602                         JMP_rel8(16, end);
  3603                         JMP_TARGET(doubleprec);
  3604                         push_dr( R_EDX, 0 );
  3605                         push_dr( R_EDX, FRm );
  3606                         FMULP_st(1);
  3607                         push_dr( R_EDX, FRn );
  3608                         FADDP_st(1);
  3609                         pop_dr( R_EDX, FRn );
  3610                         JMP_TARGET(end);
  3611                         sh4_x86.tstate = TSTATE_NONE;
  3613                         break;
  3614                     default:
  3615                         UNDEF();
  3616                         break;
  3618                 break;
  3621     sh4_x86.in_delay_slot = DELAY_NONE;
  3622     return 0;
.