Search
lxdream.org :: lxdream/src/sh4/sh4x86.c
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.c
changeset 533:9764673fd4a5
prev532:43653e748030
next539:75f3e594d4a7
author nkeynes
date Tue Nov 20 08:31:34 2007 +0000 (15 years ago)
permissions -rw-r--r--
last change Fix TRAPA (translator) and add test case
view annotate diff log raw
     1 /**
     2  * $Id: sh4x86.in,v 1.20 2007-11-08 11:54:16 nkeynes Exp $
     3  * 
     4  * SH4 => x86 translation. This version does no real optimization, it just
     5  * outputs straight-line x86 code - it mainly exists to provide a baseline
     6  * to test the optimizing versions against.
     7  *
     8  * Copyright (c) 2007 Nathan Keynes.
     9  *
    10  * This program is free software; you can redistribute it and/or modify
    11  * it under the terms of the GNU General Public License as published by
    12  * the Free Software Foundation; either version 2 of the License, or
    13  * (at your option) any later version.
    14  *
    15  * This program is distributed in the hope that it will be useful,
    16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
    17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    18  * GNU General Public License for more details.
    19  */
    21 #include <assert.h>
    22 #include <math.h>
    24 #ifndef NDEBUG
    25 #define DEBUG_JUMPS 1
    26 #endif
    28 #include "sh4/xltcache.h"
    29 #include "sh4/sh4core.h"
    30 #include "sh4/sh4trans.h"
    31 #include "sh4/sh4mmio.h"
    32 #include "sh4/x86op.h"
    33 #include "clock.h"
    35 #define DEFAULT_BACKPATCH_SIZE 4096
    37 /** 
    38  * Struct to manage internal translation state. This state is not saved -
    39  * it is only valid between calls to sh4_translate_begin_block() and
    40  * sh4_translate_end_block()
    41  */
    42 struct sh4_x86_state {
    43     gboolean in_delay_slot;
    44     gboolean priv_checked; /* true if we've already checked the cpu mode. */
    45     gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
    46     gboolean branch_taken; /* true if we branched unconditionally */
    47     uint32_t block_start_pc;
    48     int tstate;
    50     /* Allocated memory for the (block-wide) back-patch list */
    51     uint32_t **backpatch_list;
    52     uint32_t backpatch_posn;
    53     uint32_t backpatch_size;
    54 };
    56 #define TSTATE_NONE -1
    57 #define TSTATE_O    0
    58 #define TSTATE_C    2
    59 #define TSTATE_E    4
    60 #define TSTATE_NE   5
    61 #define TSTATE_G    0xF
    62 #define TSTATE_GE   0xD
    63 #define TSTATE_A    7
    64 #define TSTATE_AE   3
    66 /** Branch if T is set (either in the current cflags, or in sh4r.t) */
    67 #define JT_rel8(rel8,label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    68 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    69     OP(0x70+sh4_x86.tstate); OP(rel8); \
    70     MARK_JMP(rel8,label)
    71 /** Branch if T is clear (either in the current cflags or in sh4r.t) */
    72 #define JF_rel8(rel8,label) if( sh4_x86.tstate == TSTATE_NONE ) { \
    73 	CMP_imm8s_sh4r( 1, R_T ); sh4_x86.tstate = TSTATE_E; } \
    74     OP(0x70+ (sh4_x86.tstate^1)); OP(rel8); \
    75     MARK_JMP(rel8, label)
    78 #define EXIT_DATA_ADDR_READ 0
    79 #define EXIT_DATA_ADDR_WRITE 7
    80 #define EXIT_ILLEGAL 14
    81 #define EXIT_SLOT_ILLEGAL 21
    82 #define EXIT_FPU_DISABLED 28
    83 #define EXIT_SLOT_FPU_DISABLED 35
    85 static struct sh4_x86_state sh4_x86;
    87 static uint32_t max_int = 0x7FFFFFFF;
    88 static uint32_t min_int = 0x80000000;
    89 static uint32_t save_fcw; /* save value for fpu control word */
    90 static uint32_t trunc_fcw = 0x0F7F; /* fcw value for truncation mode */
    92 void sh4_x86_init()
    93 {
    94     sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
    95     sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
    96 }
    99 static void sh4_x86_add_backpatch( uint8_t *ptr )
   100 {
   101     if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
   102 	sh4_x86.backpatch_size <<= 1;
   103 	sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
   104 	assert( sh4_x86.backpatch_list != NULL );
   105     }
   106     sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
   107 }
   109 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
   110 {
   111     unsigned int i;
   112     for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
   113 	*sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
   114     }
   115 }
   117 /**
   118  * Emit an instruction to load an SH4 reg into a real register
   119  */
   120 static inline void load_reg( int x86reg, int sh4reg ) 
   121 {
   122     /* mov [bp+n], reg */
   123     OP(0x8B);
   124     OP(0x45 + (x86reg<<3));
   125     OP(REG_OFFSET(r[sh4reg]));
   126 }
   128 static inline void load_reg16s( int x86reg, int sh4reg )
   129 {
   130     OP(0x0F);
   131     OP(0xBF);
   132     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   133 }
   135 static inline void load_reg16u( int x86reg, int sh4reg )
   136 {
   137     OP(0x0F);
   138     OP(0xB7);
   139     MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
   141 }
   143 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
   144 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
   145 /**
   146  * Emit an instruction to load an immediate value into a register
   147  */
   148 static inline void load_imm32( int x86reg, uint32_t value ) {
   149     /* mov #value, reg */
   150     OP(0xB8 + x86reg);
   151     OP32(value);
   152 }
   154 /**
   155  * Load an immediate 64-bit quantity (note: x86-64 only)
   156  */
   157 static inline void load_imm64( int x86reg, uint32_t value ) {
   158     /* mov #value, reg */
   159     REXW();
   160     OP(0xB8 + x86reg);
   161     OP64(value);
   162 }
   165 /**
   166  * Emit an instruction to store an SH4 reg (RN)
   167  */
   168 void static inline store_reg( int x86reg, int sh4reg ) {
   169     /* mov reg, [bp+n] */
   170     OP(0x89);
   171     OP(0x45 + (x86reg<<3));
   172     OP(REG_OFFSET(r[sh4reg]));
   173 }
   175 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
   177 /**
   178  * Load an FR register (single-precision floating point) into an integer x86
   179  * register (eg for register-to-register moves)
   180  */
   181 void static inline load_fr( int bankreg, int x86reg, int frm )
   182 {
   183     OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
   184 }
   186 /**
   187  * Store an FR register (single-precision floating point) into an integer x86
   188  * register (eg for register-to-register moves)
   189  */
   190 void static inline store_fr( int bankreg, int x86reg, int frn )
   191 {
   192     OP(0x89);  OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
   193 }
   196 /**
   197  * Load a pointer to the back fp back into the specified x86 register. The
   198  * bankreg must have been previously loaded with FPSCR.
   199  * NB: 12 bytes
   200  */
   201 static inline void load_xf_bank( int bankreg )
   202 {
   203     NOT_r32( bankreg );
   204     SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
   205     AND_imm8s_r32( 0x40, bankreg );    // Complete extraction
   206     OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
   207 }
   209 /**
   210  * Update the fr_bank pointer based on the current fpscr value.
   211  */
   212 static inline void update_fr_bank( int fpscrreg )
   213 {
   214     SHR_imm8_r32( (21 - 6), fpscrreg ); // Extract bit 21 then *64 for bank size
   215     AND_imm8s_r32( 0x40, fpscrreg );    // Complete extraction
   216     OP(0x8D); OP(0x44+(fpscrreg<<3)); OP(0x28+fpscrreg); OP(REG_OFFSET(fr)); // LEA [ebp+fpscrreg+disp], fpscrreg
   217     store_spreg( fpscrreg, REG_OFFSET(fr_bank) );
   218 }
   219 /**
   220  * Push FPUL (as a 32-bit float) onto the FPU stack
   221  */
   222 static inline void push_fpul( )
   223 {
   224     OP(0xD9); OP(0x45); OP(R_FPUL);
   225 }
   227 /**
   228  * Pop FPUL (as a 32-bit float) from the FPU stack
   229  */
   230 static inline void pop_fpul( )
   231 {
   232     OP(0xD9); OP(0x5D); OP(R_FPUL);
   233 }
   235 /**
   236  * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
   237  * with the location of the current fp bank.
   238  */
   239 static inline void push_fr( int bankreg, int frm ) 
   240 {
   241     OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2);  // FLD.S [bankreg + frm^1*4]
   242 }
   244 /**
   245  * Pop a 32-bit float from the FPU stack and store it back into the fp bank, 
   246  * with bankreg previously loaded with the location of the current fp bank.
   247  */
   248 static inline void pop_fr( int bankreg, int frm )
   249 {
   250     OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
   251 }
   253 /**
   254  * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
   255  * with the location of the current fp bank.
   256  */
   257 static inline void push_dr( int bankreg, int frm )
   258 {
   259     OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
   260 }
   262 static inline void pop_dr( int bankreg, int frm )
   263 {
   264     OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
   265 }
   267 #if SH4_TRANSLATOR == TARGET_X86_64
   268 /* X86-64 has different calling conventions... */
   270 #define load_ptr( reg, ptr ) load_imm64( reg, (uint64_t)ptr );
   272 /**
   273  * Note: clobbers EAX to make the indirect call - this isn't usually
   274  * a problem since the callee will usually clobber it anyway.
   275  * Size: 12 bytes
   276  */
   277 #define CALL_FUNC0_SIZE 12
   278 static inline void call_func0( void *ptr )
   279 {
   280     load_imm64(R_EAX, (uint64_t)ptr);
   281     CALL_r32(R_EAX);
   282 }
   284 #define CALL_FUNC1_SIZE 14
   285 static inline void call_func1( void *ptr, int arg1 )
   286 {
   287     MOV_r32_r32(arg1, R_EDI);
   288     call_func0(ptr);
   289 }
   291 #define CALL_FUNC2_SIZE 16
   292 static inline void call_func2( void *ptr, int arg1, int arg2 )
   293 {
   294     MOV_r32_r32(arg1, R_EDI);
   295     MOV_r32_r32(arg2, R_ESI);
   296     call_func0(ptr);
   297 }
   299 #define MEM_WRITE_DOUBLE_SIZE 39
   300 /**
   301  * Write a double (64-bit) value into memory, with the first word in arg2a, and
   302  * the second in arg2b
   303  */
   304 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
   305 {
   306 /*
   307     MOV_r32_r32( addr, R_EDI );
   308     MOV_r32_r32( arg2b, R_ESI );
   309     REXW(); SHL_imm8_r32( 32, R_ESI );
   310     REXW(); MOVZX_r16_r32( arg2a, arg2a );
   311     REXW(); OR_r32_r32( arg2a, R_ESI );
   312     call_func0(sh4_write_quad);
   313 */
   314     PUSH_r32(arg2b);
   315     PUSH_r32(addr);
   316     call_func2(sh4_write_long, addr, arg2a);
   317     POP_r32(addr);
   318     POP_r32(arg2b);
   319     ADD_imm8s_r32(4, addr);
   320     call_func2(sh4_write_long, addr, arg2b);
   321 }
   323 #define MEM_READ_DOUBLE_SIZE 35
   324 /**
   325  * Read a double (64-bit) value from memory, writing the first word into arg2a
   326  * and the second into arg2b. The addr must not be in EAX
   327  */
   328 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
   329 {
   330 /*
   331     MOV_r32_r32( addr, R_EDI );
   332     call_func0(sh4_read_quad);
   333     REXW(); MOV_r32_r32( R_EAX, arg2a );
   334     REXW(); MOV_r32_r32( R_EAX, arg2b );
   335     REXW(); SHR_imm8_r32( 32, arg2b );
   336 */
   337     PUSH_r32(addr);
   338     call_func1(sh4_read_long, addr);
   339     POP_r32(R_EDI);
   340     PUSH_r32(R_EAX);
   341     ADD_imm8s_r32(4, R_EDI);
   342     call_func0(sh4_read_long);
   343     MOV_r32_r32(R_EAX, arg2b);
   344     POP_r32(arg2a);
   345 }
   347 #define EXIT_BLOCK_SIZE 35
   348 /**
   349  * Exit the block to an absolute PC
   350  */
   351 void exit_block( sh4addr_t pc, sh4addr_t endpc )
   352 {
   353     load_imm32( R_ECX, pc );                            // 5
   354     store_spreg( R_ECX, REG_OFFSET(pc) );               // 3
   355     REXW(); MOV_moff32_EAX( xlat_get_lut_entry(pc) );
   356     REXW(); AND_imm8s_r32( 0xFC, R_EAX ); // 3
   357     load_imm32( R_ECX, ((endpc - sh4_x86.block_start_pc)>>1)*sh4_cpu_period ); // 5
   358     ADD_r32_sh4r( R_ECX, REG_OFFSET(slice_cycle) );     // 6
   359     POP_r32(R_EBP);
   360     RET();
   361 }
   364 /**
   365  * Write the block trailer (exception handling block)
   366  */
   367 void sh4_translate_end_block( sh4addr_t pc ) {
   368     if( sh4_x86.branch_taken == FALSE ) {
   369 	// Didn't exit unconditionally already, so write the termination here
   370 	exit_block( pc, pc );
   371     }
   372     if( sh4_x86.backpatch_posn != 0 ) {
   373 	uint8_t *end_ptr = xlat_output;
   374 	// Exception termination. Jump block for various exception codes:
   375 	load_imm32( R_EDI, EXC_DATA_ADDR_READ );
   376 	JMP_rel8( 33, target1 );
   377 	load_imm32( R_EDI, EXC_DATA_ADDR_WRITE );
   378 	JMP_rel8( 26, target2 );
   379 	load_imm32( R_EDI, EXC_ILLEGAL );
   380 	JMP_rel8( 19, target3 );
   381 	load_imm32( R_EDI, EXC_SLOT_ILLEGAL ); 
   382 	JMP_rel8( 12, target4 );
   383 	load_imm32( R_EDI, EXC_FPU_DISABLED ); 
   384 	JMP_rel8( 5, target5 );
   385 	load_imm32( R_EDI, EXC_SLOT_FPU_DISABLED );
   386 	// target
   387 	JMP_TARGET(target1);
   388 	JMP_TARGET(target2);
   389 	JMP_TARGET(target3);
   390 	JMP_TARGET(target4);
   391 	JMP_TARGET(target5);
   392 	// Raise exception
   393 	load_spreg( R_ECX, REG_OFFSET(pc) );
   394 	ADD_r32_r32( R_EDX, R_ECX );
   395 	ADD_r32_r32( R_EDX, R_ECX );
   396 	store_spreg( R_ECX, REG_OFFSET(pc) );
   397 	MOV_moff32_EAX( &sh4_cpu_period );
   398 	MUL_r32( R_EDX );
   399 	ADD_r32_sh4r( R_EAX, REG_OFFSET(slice_cycle) );
   401 	call_func0( sh4_raise_exception );
   402 	load_spreg( R_EAX, REG_OFFSET(pc) );
   403 	call_func1(xlat_get_code,R_EAX);
   404 	POP_r32(R_EBP);
   405 	RET();
   407 	sh4_x86_do_backpatch( end_ptr );
   408     }
   409 }
   411 #else /* SH4_TRANSLATOR == TARGET_X86 */
   413 #define load_ptr( reg, ptr ) load_imm32( reg, (uint32_t)ptr );
   415 /**
   416  * Note: clobbers EAX to make the indirect call - this isn't usually
   417  * a problem since the callee will usually clobber it anyway.
   418  */
   419 #define CALL_FUNC0_SIZE 7
   420 static inline void call_func0( void *ptr )
   421 {
   422     load_imm32(R_EAX, (uint32_t)ptr);
   423     CALL_r32(R_EAX);
   424 }
   426 #define CALL_FUNC1_SIZE 11
   427 static inline void call_func1( void *ptr, int arg1 )
   428 {
   429     PUSH_r32(arg1);
   430     call_func0(ptr);
   431     ADD_imm8s_r32( 4, R_ESP );
   432 }
   434 #define CALL_FUNC2_SIZE 12
   435 static inline void call_func2( void *ptr, int arg1, int arg2 )
   436 {
   437     PUSH_r32(arg2);
   438     PUSH_r32(arg1);
   439     call_func0(ptr);
   440     ADD_imm8s_r32( 8, R_ESP );
   441 }
   443 /**
   444  * Write a double (64-bit) value into memory, with the first word in arg2a, and
   445  * the second in arg2b
   446  * NB: 30 bytes
   447  */
   448 #define MEM_WRITE_DOUBLE_SIZE 30
   449 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
   450 {
   451     ADD_imm8s_r32( 4, addr );
   452     PUSH_r32(arg2b);
   453     PUSH_r32(addr);
   454     ADD_imm8s_r32( -4, addr );
   455     PUSH_r32(arg2a);
   456     PUSH_r32(addr);
   457     call_func0(sh4_write_long);
   458     ADD_imm8s_r32( 8, R_ESP );
   459     call_func0(sh4_write_long);
   460     ADD_imm8s_r32( 8, R_ESP );
   461 }
   463 /**
   464  * Read a double (64-bit) value from memory, writing the first word into arg2a
   465  * and the second into arg2b. The addr must not be in EAX
   466  * NB: 27 bytes
   467  */
   468 #define MEM_READ_DOUBLE_SIZE 27
   469 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
   470 {
   471     PUSH_r32(addr);
   472     call_func0(sh4_read_long);
   473     POP_r32(addr);
   474     PUSH_r32(R_EAX);
   475     ADD_imm8s_r32( 4, addr );
   476     PUSH_r32(addr);
   477     call_func0(sh4_read_long);
   478     ADD_imm8s_r32( 4, R_ESP );
   479     MOV_r32_r32( R_EAX, arg2b );
   480     POP_r32(arg2a);
   481 }
   483 #define EXIT_BLOCK_SIZE 29
   484 /**
   485  * Exit the block to an absolute PC
   486  */
   487 void exit_block( sh4addr_t pc, sh4addr_t endpc )
   488 {
   489     load_imm32( R_ECX, pc );                            // 5
   490     store_spreg( R_ECX, REG_OFFSET(pc) );               // 3
   491     MOV_moff32_EAX( xlat_get_lut_entry(pc) ); // 5
   492     AND_imm8s_r32( 0xFC, R_EAX ); // 3
   493     load_imm32( R_ECX, ((endpc - sh4_x86.block_start_pc)>>1)*sh4_cpu_period ); // 5
   494     ADD_r32_sh4r( R_ECX, REG_OFFSET(slice_cycle) );     // 6
   495     POP_r32(R_EBP);
   496     RET();
   497 }
   499 /**
   500  * Write the block trailer (exception handling block)
   501  */
   502 void sh4_translate_end_block( sh4addr_t pc ) {
   503     if( sh4_x86.branch_taken == FALSE ) {
   504 	// Didn't exit unconditionally already, so write the termination here
   505 	exit_block( pc, pc );
   506     }
   507     if( sh4_x86.backpatch_posn != 0 ) {
   508 	uint8_t *end_ptr = xlat_output;
   509 	// Exception termination. Jump block for various exception codes:
   510 	PUSH_imm32( EXC_DATA_ADDR_READ );
   511 	JMP_rel8( 33, target1 );
   512 	PUSH_imm32( EXC_DATA_ADDR_WRITE );
   513 	JMP_rel8( 26, target2 );
   514 	PUSH_imm32( EXC_ILLEGAL );
   515 	JMP_rel8( 19, target3 );
   516 	PUSH_imm32( EXC_SLOT_ILLEGAL ); 
   517 	JMP_rel8( 12, target4 );
   518 	PUSH_imm32( EXC_FPU_DISABLED ); 
   519 	JMP_rel8( 5, target5 );
   520 	PUSH_imm32( EXC_SLOT_FPU_DISABLED );
   521 	// target
   522 	JMP_TARGET(target1);
   523 	JMP_TARGET(target2);
   524 	JMP_TARGET(target3);
   525 	JMP_TARGET(target4);
   526 	JMP_TARGET(target5);
   527 	// Raise exception
   528 	load_spreg( R_ECX, REG_OFFSET(pc) );
   529 	ADD_r32_r32( R_EDX, R_ECX );
   530 	ADD_r32_r32( R_EDX, R_ECX );
   531 	store_spreg( R_ECX, REG_OFFSET(pc) );
   532 	MOV_moff32_EAX( &sh4_cpu_period );
   533 	MUL_r32( R_EDX );
   534 	ADD_r32_sh4r( R_EAX, REG_OFFSET(slice_cycle) );
   536 	call_func0( sh4_raise_exception );
   537 	ADD_imm8s_r32( 4, R_ESP );
   538 	load_spreg( R_EAX, REG_OFFSET(pc) );
   539 	call_func1(xlat_get_code,R_EAX);
   540 	POP_r32(R_EBP);
   541 	RET();
   543 	sh4_x86_do_backpatch( end_ptr );
   544     }
   545 }
   546 #endif
   548 /* Exception checks - Note that all exception checks will clobber EAX */
   549 #define precheck() load_imm32(R_EDX, (pc-sh4_x86.block_start_pc-(sh4_x86.in_delay_slot?2:0))>>1)
   551 #define check_priv( ) \
   552     if( !sh4_x86.priv_checked ) { \
   553 	sh4_x86.priv_checked = TRUE;\
   554 	precheck();\
   555 	load_spreg( R_EAX, R_SR );\
   556 	AND_imm32_r32( SR_MD, R_EAX );\
   557 	if( sh4_x86.in_delay_slot ) {\
   558 	    JE_exit( EXIT_SLOT_ILLEGAL );\
   559 	} else {\
   560 	    JE_exit( EXIT_ILLEGAL );\
   561 	}\
   562     }\
   565 static void check_priv_no_precheck()
   566 {
   567     if( !sh4_x86.priv_checked ) {
   568 	sh4_x86.priv_checked = TRUE;
   569 	load_spreg( R_EAX, R_SR );
   570 	AND_imm32_r32( SR_MD, R_EAX );
   571 	if( sh4_x86.in_delay_slot ) {
   572 	    JE_exit( EXIT_SLOT_ILLEGAL );
   573 	} else {
   574 	    JE_exit( EXIT_ILLEGAL );
   575 	}
   576     }
   577 }
   579 #define check_fpuen( ) \
   580     if( !sh4_x86.fpuen_checked ) {\
   581 	sh4_x86.fpuen_checked = TRUE;\
   582 	precheck();\
   583 	load_spreg( R_EAX, R_SR );\
   584 	AND_imm32_r32( SR_FD, R_EAX );\
   585 	if( sh4_x86.in_delay_slot ) {\
   586 	    JNE_exit(EXIT_SLOT_FPU_DISABLED);\
   587 	} else {\
   588 	    JNE_exit(EXIT_FPU_DISABLED);\
   589 	}\
   590     }
   592 static void check_fpuen_no_precheck()
   593 {
   594     if( !sh4_x86.fpuen_checked ) {
   595 	sh4_x86.fpuen_checked = TRUE;
   596 	load_spreg( R_EAX, R_SR );
   597 	AND_imm32_r32( SR_FD, R_EAX );
   598 	if( sh4_x86.in_delay_slot ) {
   599 	    JNE_exit(EXIT_SLOT_FPU_DISABLED);
   600 	} else {
   601 	    JNE_exit(EXIT_FPU_DISABLED);
   602 	}
   603     }
   605 }
   607 static void check_ralign16( int x86reg )
   608 {
   609     TEST_imm32_r32( 0x00000001, x86reg );
   610     JNE_exit(EXIT_DATA_ADDR_READ);
   611 }
   613 static void check_walign16( int x86reg )
   614 {
   615     TEST_imm32_r32( 0x00000001, x86reg );
   616     JNE_exit(EXIT_DATA_ADDR_WRITE);
   617 }
   619 static void check_ralign32( int x86reg )
   620 {
   621     TEST_imm32_r32( 0x00000003, x86reg );
   622     JNE_exit(EXIT_DATA_ADDR_READ);
   623 }
   624 static void check_walign32( int x86reg )
   625 {
   626     TEST_imm32_r32( 0x00000003, x86reg );
   627     JNE_exit(EXIT_DATA_ADDR_WRITE);
   628 }
   630 #define UNDEF()
   631 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
   632 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
   633 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
   634 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
   635 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
   636 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
   637 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
   639 #define SLOTILLEGAL() precheck(); JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
   643 /**
   644  * Emit the 'start of block' assembly. Sets up the stack frame and save
   645  * SI/DI as required
   646  */
   647 void sh4_translate_begin_block( sh4addr_t pc ) 
   648 {
   649     PUSH_r32(R_EBP);
   650     /* mov &sh4r, ebp */
   651     load_ptr( R_EBP, &sh4r );
   653     sh4_x86.in_delay_slot = FALSE;
   654     sh4_x86.priv_checked = FALSE;
   655     sh4_x86.fpuen_checked = FALSE;
   656     sh4_x86.branch_taken = FALSE;
   657     sh4_x86.backpatch_posn = 0;
   658     sh4_x86.block_start_pc = pc;
   659     sh4_x86.tstate = TSTATE_NONE;
   660 }
   662 /**
   663  * Exit the block with sh4r.pc already written
   664  * Bytes: 15
   665  */
   666 void exit_block_pcset( pc )
   667 {
   668     load_imm32( R_ECX, ((pc - sh4_x86.block_start_pc)>>1)*sh4_cpu_period ); // 5
   669     ADD_r32_sh4r( R_ECX, REG_OFFSET(slice_cycle) );    // 6
   670     load_spreg( R_EAX, REG_OFFSET(pc) );
   671     call_func1(xlat_get_code,R_EAX);
   672     POP_r32(R_EBP);
   673     RET();
   674 }
   676 extern uint16_t *sh4_icache;
   677 extern uint32_t sh4_icache_addr;
   679 /**
   680  * Translate a single instruction. Delayed branches are handled specially
   681  * by translating both branch and delayed instruction as a single unit (as
   682  * 
   683  *
   684  * @return true if the instruction marks the end of a basic block
   685  * (eg a branch or 
   686  */
   687 uint32_t sh4_translate_instruction( sh4addr_t pc )
   688 {
   689     uint32_t ir;
   690     /* Read instruction */
   691     uint32_t pageaddr = pc >> 12;
   692     if( sh4_icache != NULL && pageaddr == sh4_icache_addr ) {
   693 	ir = sh4_icache[(pc&0xFFF)>>1];
   694     } else {
   695 	sh4_icache = (uint16_t *)mem_get_page(pc);
   696 	if( ((uintptr_t)sh4_icache) < MAX_IO_REGIONS ) {
   697 	    /* If someone's actually been so daft as to try to execute out of an IO
   698 	     * region, fallback on the full-blown memory read
   699 	     */
   700 	    sh4_icache = NULL;
   701 	    ir = sh4_read_word(pc);
   702 	} else {
   703 	    sh4_icache_addr = pageaddr;
   704 	    ir = sh4_icache[(pc&0xFFF)>>1];
   705 	}
   706     }
   708         switch( (ir&0xF000) >> 12 ) {
   709             case 0x0:
   710                 switch( ir&0xF ) {
   711                     case 0x2:
   712                         switch( (ir&0x80) >> 7 ) {
   713                             case 0x0:
   714                                 switch( (ir&0x70) >> 4 ) {
   715                                     case 0x0:
   716                                         { /* STC SR, Rn */
   717                                         uint32_t Rn = ((ir>>8)&0xF); 
   718                                         check_priv();
   719                                         call_func0(sh4_read_sr);
   720                                         store_reg( R_EAX, Rn );
   721                                         sh4_x86.tstate = TSTATE_NONE;
   722                                         }
   723                                         break;
   724                                     case 0x1:
   725                                         { /* STC GBR, Rn */
   726                                         uint32_t Rn = ((ir>>8)&0xF); 
   727                                         load_spreg( R_EAX, R_GBR );
   728                                         store_reg( R_EAX, Rn );
   729                                         }
   730                                         break;
   731                                     case 0x2:
   732                                         { /* STC VBR, Rn */
   733                                         uint32_t Rn = ((ir>>8)&0xF); 
   734                                         check_priv();
   735                                         load_spreg( R_EAX, R_VBR );
   736                                         store_reg( R_EAX, Rn );
   737                                         sh4_x86.tstate = TSTATE_NONE;
   738                                         }
   739                                         break;
   740                                     case 0x3:
   741                                         { /* STC SSR, Rn */
   742                                         uint32_t Rn = ((ir>>8)&0xF); 
   743                                         check_priv();
   744                                         load_spreg( R_EAX, R_SSR );
   745                                         store_reg( R_EAX, Rn );
   746                                         sh4_x86.tstate = TSTATE_NONE;
   747                                         }
   748                                         break;
   749                                     case 0x4:
   750                                         { /* STC SPC, Rn */
   751                                         uint32_t Rn = ((ir>>8)&0xF); 
   752                                         check_priv();
   753                                         load_spreg( R_EAX, R_SPC );
   754                                         store_reg( R_EAX, Rn );
   755                                         sh4_x86.tstate = TSTATE_NONE;
   756                                         }
   757                                         break;
   758                                     default:
   759                                         UNDEF();
   760                                         break;
   761                                 }
   762                                 break;
   763                             case 0x1:
   764                                 { /* STC Rm_BANK, Rn */
   765                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
   766                                 check_priv();
   767                                 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
   768                                 store_reg( R_EAX, Rn );
   769                                 sh4_x86.tstate = TSTATE_NONE;
   770                                 }
   771                                 break;
   772                         }
   773                         break;
   774                     case 0x3:
   775                         switch( (ir&0xF0) >> 4 ) {
   776                             case 0x0:
   777                                 { /* BSRF Rn */
   778                                 uint32_t Rn = ((ir>>8)&0xF); 
   779                                 if( sh4_x86.in_delay_slot ) {
   780                             	SLOTILLEGAL();
   781                                 } else {
   782                             	load_imm32( R_ECX, pc + 4 );
   783                             	store_spreg( R_ECX, R_PR );
   784                             	ADD_sh4r_r32( REG_OFFSET(r[Rn]), R_ECX );
   785                             	store_spreg( R_ECX, REG_OFFSET(pc) );
   786                             	sh4_x86.in_delay_slot = TRUE;
   787                             	sh4_x86.tstate = TSTATE_NONE;
   788                             	sh4_translate_instruction( pc + 2 );
   789                             	exit_block_pcset(pc+2);
   790                             	sh4_x86.branch_taken = TRUE;
   791                             	return 4;
   792                                 }
   793                                 }
   794                                 break;
   795                             case 0x2:
   796                                 { /* BRAF Rn */
   797                                 uint32_t Rn = ((ir>>8)&0xF); 
   798                                 if( sh4_x86.in_delay_slot ) {
   799                             	SLOTILLEGAL();
   800                                 } else {
   801                             	load_reg( R_EAX, Rn );
   802                             	ADD_imm32_r32( pc + 4, R_EAX );
   803                             	store_spreg( R_EAX, REG_OFFSET(pc) );
   804                             	sh4_x86.in_delay_slot = TRUE;
   805                             	sh4_x86.tstate = TSTATE_NONE;
   806                             	sh4_translate_instruction( pc + 2 );
   807                             	exit_block_pcset(pc+2);
   808                             	sh4_x86.branch_taken = TRUE;
   809                             	return 4;
   810                                 }
   811                                 }
   812                                 break;
   813                             case 0x8:
   814                                 { /* PREF @Rn */
   815                                 uint32_t Rn = ((ir>>8)&0xF); 
   816                                 load_reg( R_EAX, Rn );
   817                                 MOV_r32_r32( R_EAX, R_ECX );
   818                                 AND_imm32_r32( 0xFC000000, R_EAX );
   819                                 CMP_imm32_r32( 0xE0000000, R_EAX );
   820                                 JNE_rel8(CALL_FUNC1_SIZE, end);
   821                                 call_func1( sh4_flush_store_queue, R_ECX );
   822                                 JMP_TARGET(end);
   823                                 sh4_x86.tstate = TSTATE_NONE;
   824                                 }
   825                                 break;
   826                             case 0x9:
   827                                 { /* OCBI @Rn */
   828                                 uint32_t Rn = ((ir>>8)&0xF); 
   829                                 }
   830                                 break;
   831                             case 0xA:
   832                                 { /* OCBP @Rn */
   833                                 uint32_t Rn = ((ir>>8)&0xF); 
   834                                 }
   835                                 break;
   836                             case 0xB:
   837                                 { /* OCBWB @Rn */
   838                                 uint32_t Rn = ((ir>>8)&0xF); 
   839                                 }
   840                                 break;
   841                             case 0xC:
   842                                 { /* MOVCA.L R0, @Rn */
   843                                 uint32_t Rn = ((ir>>8)&0xF); 
   844                                 load_reg( R_EAX, 0 );
   845                                 load_reg( R_ECX, Rn );
   846                                 precheck();
   847                                 check_walign32( R_ECX );
   848                                 MEM_WRITE_LONG( R_ECX, R_EAX );
   849                                 sh4_x86.tstate = TSTATE_NONE;
   850                                 }
   851                                 break;
   852                             default:
   853                                 UNDEF();
   854                                 break;
   855                         }
   856                         break;
   857                     case 0x4:
   858                         { /* MOV.B Rm, @(R0, Rn) */
   859                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   860                         load_reg( R_EAX, 0 );
   861                         load_reg( R_ECX, Rn );
   862                         ADD_r32_r32( R_EAX, R_ECX );
   863                         load_reg( R_EAX, Rm );
   864                         MEM_WRITE_BYTE( R_ECX, R_EAX );
   865                         sh4_x86.tstate = TSTATE_NONE;
   866                         }
   867                         break;
   868                     case 0x5:
   869                         { /* MOV.W Rm, @(R0, Rn) */
   870                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   871                         load_reg( R_EAX, 0 );
   872                         load_reg( R_ECX, Rn );
   873                         ADD_r32_r32( R_EAX, R_ECX );
   874                         precheck();
   875                         check_walign16( R_ECX );
   876                         load_reg( R_EAX, Rm );
   877                         MEM_WRITE_WORD( R_ECX, R_EAX );
   878                         sh4_x86.tstate = TSTATE_NONE;
   879                         }
   880                         break;
   881                     case 0x6:
   882                         { /* MOV.L Rm, @(R0, Rn) */
   883                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   884                         load_reg( R_EAX, 0 );
   885                         load_reg( R_ECX, Rn );
   886                         ADD_r32_r32( R_EAX, R_ECX );
   887                         precheck();
   888                         check_walign32( R_ECX );
   889                         load_reg( R_EAX, Rm );
   890                         MEM_WRITE_LONG( R_ECX, R_EAX );
   891                         sh4_x86.tstate = TSTATE_NONE;
   892                         }
   893                         break;
   894                     case 0x7:
   895                         { /* MUL.L Rm, Rn */
   896                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
   897                         load_reg( R_EAX, Rm );
   898                         load_reg( R_ECX, Rn );
   899                         MUL_r32( R_ECX );
   900                         store_spreg( R_EAX, R_MACL );
   901                         sh4_x86.tstate = TSTATE_NONE;
   902                         }
   903                         break;
   904                     case 0x8:
   905                         switch( (ir&0xFF0) >> 4 ) {
   906                             case 0x0:
   907                                 { /* CLRT */
   908                                 CLC();
   909                                 SETC_t();
   910                                 sh4_x86.tstate = TSTATE_C;
   911                                 }
   912                                 break;
   913                             case 0x1:
   914                                 { /* SETT */
   915                                 STC();
   916                                 SETC_t();
   917                                 sh4_x86.tstate = TSTATE_C;
   918                                 }
   919                                 break;
   920                             case 0x2:
   921                                 { /* CLRMAC */
   922                                 XOR_r32_r32(R_EAX, R_EAX);
   923                                 store_spreg( R_EAX, R_MACL );
   924                                 store_spreg( R_EAX, R_MACH );
   925                                 sh4_x86.tstate = TSTATE_NONE;
   926                                 }
   927                                 break;
   928                             case 0x3:
   929                                 { /* LDTLB */
   930                                 }
   931                                 break;
   932                             case 0x4:
   933                                 { /* CLRS */
   934                                 CLC();
   935                                 SETC_sh4r(R_S);
   936                                 sh4_x86.tstate = TSTATE_C;
   937                                 }
   938                                 break;
   939                             case 0x5:
   940                                 { /* SETS */
   941                                 STC();
   942                                 SETC_sh4r(R_S);
   943                                 sh4_x86.tstate = TSTATE_C;
   944                                 }
   945                                 break;
   946                             default:
   947                                 UNDEF();
   948                                 break;
   949                         }
   950                         break;
   951                     case 0x9:
   952                         switch( (ir&0xF0) >> 4 ) {
   953                             case 0x0:
   954                                 { /* NOP */
   955                                 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
   956                                 }
   957                                 break;
   958                             case 0x1:
   959                                 { /* DIV0U */
   960                                 XOR_r32_r32( R_EAX, R_EAX );
   961                                 store_spreg( R_EAX, R_Q );
   962                                 store_spreg( R_EAX, R_M );
   963                                 store_spreg( R_EAX, R_T );
   964                                 sh4_x86.tstate = TSTATE_C; // works for DIV1
   965                                 }
   966                                 break;
   967                             case 0x2:
   968                                 { /* MOVT Rn */
   969                                 uint32_t Rn = ((ir>>8)&0xF); 
   970                                 load_spreg( R_EAX, R_T );
   971                                 store_reg( R_EAX, Rn );
   972                                 }
   973                                 break;
   974                             default:
   975                                 UNDEF();
   976                                 break;
   977                         }
   978                         break;
   979                     case 0xA:
   980                         switch( (ir&0xF0) >> 4 ) {
   981                             case 0x0:
   982                                 { /* STS MACH, Rn */
   983                                 uint32_t Rn = ((ir>>8)&0xF); 
   984                                 load_spreg( R_EAX, R_MACH );
   985                                 store_reg( R_EAX, Rn );
   986                                 }
   987                                 break;
   988                             case 0x1:
   989                                 { /* STS MACL, Rn */
   990                                 uint32_t Rn = ((ir>>8)&0xF); 
   991                                 load_spreg( R_EAX, R_MACL );
   992                                 store_reg( R_EAX, Rn );
   993                                 }
   994                                 break;
   995                             case 0x2:
   996                                 { /* STS PR, Rn */
   997                                 uint32_t Rn = ((ir>>8)&0xF); 
   998                                 load_spreg( R_EAX, R_PR );
   999                                 store_reg( R_EAX, Rn );
  1001                                 break;
  1002                             case 0x3:
  1003                                 { /* STC SGR, Rn */
  1004                                 uint32_t Rn = ((ir>>8)&0xF); 
  1005                                 check_priv();
  1006                                 load_spreg( R_EAX, R_SGR );
  1007                                 store_reg( R_EAX, Rn );
  1008                                 sh4_x86.tstate = TSTATE_NONE;
  1010                                 break;
  1011                             case 0x5:
  1012                                 { /* STS FPUL, Rn */
  1013                                 uint32_t Rn = ((ir>>8)&0xF); 
  1014                                 load_spreg( R_EAX, R_FPUL );
  1015                                 store_reg( R_EAX, Rn );
  1017                                 break;
  1018                             case 0x6:
  1019                                 { /* STS FPSCR, Rn */
  1020                                 uint32_t Rn = ((ir>>8)&0xF); 
  1021                                 load_spreg( R_EAX, R_FPSCR );
  1022                                 store_reg( R_EAX, Rn );
  1024                                 break;
  1025                             case 0xF:
  1026                                 { /* STC DBR, Rn */
  1027                                 uint32_t Rn = ((ir>>8)&0xF); 
  1028                                 check_priv();
  1029                                 load_spreg( R_EAX, R_DBR );
  1030                                 store_reg( R_EAX, Rn );
  1031                                 sh4_x86.tstate = TSTATE_NONE;
  1033                                 break;
  1034                             default:
  1035                                 UNDEF();
  1036                                 break;
  1038                         break;
  1039                     case 0xB:
  1040                         switch( (ir&0xFF0) >> 4 ) {
  1041                             case 0x0:
  1042                                 { /* RTS */
  1043                                 if( sh4_x86.in_delay_slot ) {
  1044                             	SLOTILLEGAL();
  1045                                 } else {
  1046                             	load_spreg( R_ECX, R_PR );
  1047                             	store_spreg( R_ECX, REG_OFFSET(pc) );
  1048                             	sh4_x86.in_delay_slot = TRUE;
  1049                             	sh4_translate_instruction(pc+2);
  1050                             	exit_block_pcset(pc+2);
  1051                             	sh4_x86.branch_taken = TRUE;
  1052                             	return 4;
  1055                                 break;
  1056                             case 0x1:
  1057                                 { /* SLEEP */
  1058                                 check_priv();
  1059                                 call_func0( sh4_sleep );
  1060                                 sh4_x86.tstate = TSTATE_NONE;
  1061                                 sh4_x86.in_delay_slot = FALSE;
  1062                                 return 2;
  1064                                 break;
  1065                             case 0x2:
  1066                                 { /* RTE */
  1067                                 if( sh4_x86.in_delay_slot ) {
  1068                             	SLOTILLEGAL();
  1069                                 } else {
  1070                             	check_priv();
  1071                             	load_spreg( R_ECX, R_SPC );
  1072                             	store_spreg( R_ECX, REG_OFFSET(pc) );
  1073                             	load_spreg( R_EAX, R_SSR );
  1074                             	call_func1( sh4_write_sr, R_EAX );
  1075                             	sh4_x86.in_delay_slot = TRUE;
  1076                             	sh4_x86.priv_checked = FALSE;
  1077                             	sh4_x86.fpuen_checked = FALSE;
  1078                             	sh4_x86.tstate = TSTATE_NONE;
  1079                             	sh4_translate_instruction(pc+2);
  1080                             	exit_block_pcset(pc+2);
  1081                             	sh4_x86.branch_taken = TRUE;
  1082                             	return 4;
  1085                                 break;
  1086                             default:
  1087                                 UNDEF();
  1088                                 break;
  1090                         break;
  1091                     case 0xC:
  1092                         { /* MOV.B @(R0, Rm), Rn */
  1093                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1094                         load_reg( R_EAX, 0 );
  1095                         load_reg( R_ECX, Rm );
  1096                         ADD_r32_r32( R_EAX, R_ECX );
  1097                         MEM_READ_BYTE( R_ECX, R_EAX );
  1098                         store_reg( R_EAX, Rn );
  1099                         sh4_x86.tstate = TSTATE_NONE;
  1101                         break;
  1102                     case 0xD:
  1103                         { /* MOV.W @(R0, Rm), Rn */
  1104                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1105                         load_reg( R_EAX, 0 );
  1106                         load_reg( R_ECX, Rm );
  1107                         ADD_r32_r32( R_EAX, R_ECX );
  1108                         precheck();
  1109                         check_ralign16( R_ECX );
  1110                         MEM_READ_WORD( R_ECX, R_EAX );
  1111                         store_reg( R_EAX, Rn );
  1112                         sh4_x86.tstate = TSTATE_NONE;
  1114                         break;
  1115                     case 0xE:
  1116                         { /* MOV.L @(R0, Rm), Rn */
  1117                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1118                         load_reg( R_EAX, 0 );
  1119                         load_reg( R_ECX, Rm );
  1120                         ADD_r32_r32( R_EAX, R_ECX );
  1121                         precheck();
  1122                         check_ralign32( R_ECX );
  1123                         MEM_READ_LONG( R_ECX, R_EAX );
  1124                         store_reg( R_EAX, Rn );
  1125                         sh4_x86.tstate = TSTATE_NONE;
  1127                         break;
  1128                     case 0xF:
  1129                         { /* MAC.L @Rm+, @Rn+ */
  1130                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1131                         load_reg( R_ECX, Rm );
  1132                         precheck();
  1133                         check_ralign32( R_ECX );
  1134                         load_reg( R_ECX, Rn );
  1135                         check_ralign32( R_ECX );
  1136                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
  1137                         MEM_READ_LONG( R_ECX, R_EAX );
  1138                         PUSH_r32( R_EAX );
  1139                         load_reg( R_ECX, Rm );
  1140                         ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
  1141                         MEM_READ_LONG( R_ECX, R_EAX );
  1142                         POP_r32( R_ECX );
  1143                         IMUL_r32( R_ECX );
  1144                         ADD_r32_sh4r( R_EAX, R_MACL );
  1145                         ADC_r32_sh4r( R_EDX, R_MACH );
  1147                         load_spreg( R_ECX, R_S );
  1148                         TEST_r32_r32(R_ECX, R_ECX);
  1149                         JE_rel8( CALL_FUNC0_SIZE, nosat );
  1150                         call_func0( signsat48 );
  1151                         JMP_TARGET( nosat );
  1152                         sh4_x86.tstate = TSTATE_NONE;
  1154                         break;
  1155                     default:
  1156                         UNDEF();
  1157                         break;
  1159                 break;
  1160             case 0x1:
  1161                 { /* MOV.L Rm, @(disp, Rn) */
  1162                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
  1163                 load_reg( R_ECX, Rn );
  1164                 load_reg( R_EAX, Rm );
  1165                 ADD_imm32_r32( disp, R_ECX );
  1166                 precheck();
  1167                 check_walign32( R_ECX );
  1168                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1169                 sh4_x86.tstate = TSTATE_NONE;
  1171                 break;
  1172             case 0x2:
  1173                 switch( ir&0xF ) {
  1174                     case 0x0:
  1175                         { /* MOV.B Rm, @Rn */
  1176                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1177                         load_reg( R_EAX, Rm );
  1178                         load_reg( R_ECX, Rn );
  1179                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  1180                         sh4_x86.tstate = TSTATE_NONE;
  1182                         break;
  1183                     case 0x1:
  1184                         { /* MOV.W Rm, @Rn */
  1185                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1186                         load_reg( R_ECX, Rn );
  1187                         precheck();
  1188                         check_walign16( R_ECX );
  1189                         load_reg( R_EAX, Rm );
  1190                         MEM_WRITE_WORD( R_ECX, R_EAX );
  1191                         sh4_x86.tstate = TSTATE_NONE;
  1193                         break;
  1194                     case 0x2:
  1195                         { /* MOV.L Rm, @Rn */
  1196                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1197                         load_reg( R_EAX, Rm );
  1198                         load_reg( R_ECX, Rn );
  1199                         precheck();
  1200                         check_walign32(R_ECX);
  1201                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1202                         sh4_x86.tstate = TSTATE_NONE;
  1204                         break;
  1205                     case 0x4:
  1206                         { /* MOV.B Rm, @-Rn */
  1207                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1208                         load_reg( R_EAX, Rm );
  1209                         load_reg( R_ECX, Rn );
  1210                         ADD_imm8s_r32( -1, R_ECX );
  1211                         store_reg( R_ECX, Rn );
  1212                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  1213                         sh4_x86.tstate = TSTATE_NONE;
  1215                         break;
  1216                     case 0x5:
  1217                         { /* MOV.W Rm, @-Rn */
  1218                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1219                         load_reg( R_ECX, Rn );
  1220                         precheck();
  1221                         check_walign16( R_ECX );
  1222                         load_reg( R_EAX, Rm );
  1223                         ADD_imm8s_r32( -2, R_ECX );
  1224                         store_reg( R_ECX, Rn );
  1225                         MEM_WRITE_WORD( R_ECX, R_EAX );
  1226                         sh4_x86.tstate = TSTATE_NONE;
  1228                         break;
  1229                     case 0x6:
  1230                         { /* MOV.L Rm, @-Rn */
  1231                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1232                         load_reg( R_EAX, Rm );
  1233                         load_reg( R_ECX, Rn );
  1234                         precheck();
  1235                         check_walign32( R_ECX );
  1236                         ADD_imm8s_r32( -4, R_ECX );
  1237                         store_reg( R_ECX, Rn );
  1238                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1239                         sh4_x86.tstate = TSTATE_NONE;
  1241                         break;
  1242                     case 0x7:
  1243                         { /* DIV0S Rm, Rn */
  1244                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1245                         load_reg( R_EAX, Rm );
  1246                         load_reg( R_ECX, Rn );
  1247                         SHR_imm8_r32( 31, R_EAX );
  1248                         SHR_imm8_r32( 31, R_ECX );
  1249                         store_spreg( R_EAX, R_M );
  1250                         store_spreg( R_ECX, R_Q );
  1251                         CMP_r32_r32( R_EAX, R_ECX );
  1252                         SETNE_t();
  1253                         sh4_x86.tstate = TSTATE_NE;
  1255                         break;
  1256                     case 0x8:
  1257                         { /* TST Rm, Rn */
  1258                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1259                         load_reg( R_EAX, Rm );
  1260                         load_reg( R_ECX, Rn );
  1261                         TEST_r32_r32( R_EAX, R_ECX );
  1262                         SETE_t();
  1263                         sh4_x86.tstate = TSTATE_E;
  1265                         break;
  1266                     case 0x9:
  1267                         { /* AND Rm, Rn */
  1268                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1269                         load_reg( R_EAX, Rm );
  1270                         load_reg( R_ECX, Rn );
  1271                         AND_r32_r32( R_EAX, R_ECX );
  1272                         store_reg( R_ECX, Rn );
  1273                         sh4_x86.tstate = TSTATE_NONE;
  1275                         break;
  1276                     case 0xA:
  1277                         { /* XOR Rm, Rn */
  1278                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1279                         load_reg( R_EAX, Rm );
  1280                         load_reg( R_ECX, Rn );
  1281                         XOR_r32_r32( R_EAX, R_ECX );
  1282                         store_reg( R_ECX, Rn );
  1283                         sh4_x86.tstate = TSTATE_NONE;
  1285                         break;
  1286                     case 0xB:
  1287                         { /* OR Rm, Rn */
  1288                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1289                         load_reg( R_EAX, Rm );
  1290                         load_reg( R_ECX, Rn );
  1291                         OR_r32_r32( R_EAX, R_ECX );
  1292                         store_reg( R_ECX, Rn );
  1293                         sh4_x86.tstate = TSTATE_NONE;
  1295                         break;
  1296                     case 0xC:
  1297                         { /* CMP/STR Rm, Rn */
  1298                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1299                         load_reg( R_EAX, Rm );
  1300                         load_reg( R_ECX, Rn );
  1301                         XOR_r32_r32( R_ECX, R_EAX );
  1302                         TEST_r8_r8( R_AL, R_AL );
  1303                         JE_rel8(13, target1);
  1304                         TEST_r8_r8( R_AH, R_AH ); // 2
  1305                         JE_rel8(9, target2);
  1306                         SHR_imm8_r32( 16, R_EAX ); // 3
  1307                         TEST_r8_r8( R_AL, R_AL ); // 2
  1308                         JE_rel8(2, target3);
  1309                         TEST_r8_r8( R_AH, R_AH ); // 2
  1310                         JMP_TARGET(target1);
  1311                         JMP_TARGET(target2);
  1312                         JMP_TARGET(target3);
  1313                         SETE_t();
  1314                         sh4_x86.tstate = TSTATE_E;
  1316                         break;
  1317                     case 0xD:
  1318                         { /* XTRCT Rm, Rn */
  1319                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1320                         load_reg( R_EAX, Rm );
  1321                         load_reg( R_ECX, Rn );
  1322                         SHL_imm8_r32( 16, R_EAX );
  1323                         SHR_imm8_r32( 16, R_ECX );
  1324                         OR_r32_r32( R_EAX, R_ECX );
  1325                         store_reg( R_ECX, Rn );
  1326                         sh4_x86.tstate = TSTATE_NONE;
  1328                         break;
  1329                     case 0xE:
  1330                         { /* MULU.W Rm, Rn */
  1331                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1332                         load_reg16u( R_EAX, Rm );
  1333                         load_reg16u( R_ECX, Rn );
  1334                         MUL_r32( R_ECX );
  1335                         store_spreg( R_EAX, R_MACL );
  1336                         sh4_x86.tstate = TSTATE_NONE;
  1338                         break;
  1339                     case 0xF:
  1340                         { /* MULS.W Rm, Rn */
  1341                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1342                         load_reg16s( R_EAX, Rm );
  1343                         load_reg16s( R_ECX, Rn );
  1344                         MUL_r32( R_ECX );
  1345                         store_spreg( R_EAX, R_MACL );
  1346                         sh4_x86.tstate = TSTATE_NONE;
  1348                         break;
  1349                     default:
  1350                         UNDEF();
  1351                         break;
  1353                 break;
  1354             case 0x3:
  1355                 switch( ir&0xF ) {
  1356                     case 0x0:
  1357                         { /* CMP/EQ Rm, Rn */
  1358                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1359                         load_reg( R_EAX, Rm );
  1360                         load_reg( R_ECX, Rn );
  1361                         CMP_r32_r32( R_EAX, R_ECX );
  1362                         SETE_t();
  1363                         sh4_x86.tstate = TSTATE_E;
  1365                         break;
  1366                     case 0x2:
  1367                         { /* CMP/HS Rm, Rn */
  1368                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1369                         load_reg( R_EAX, Rm );
  1370                         load_reg( R_ECX, Rn );
  1371                         CMP_r32_r32( R_EAX, R_ECX );
  1372                         SETAE_t();
  1373                         sh4_x86.tstate = TSTATE_AE;
  1375                         break;
  1376                     case 0x3:
  1377                         { /* CMP/GE Rm, Rn */
  1378                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1379                         load_reg( R_EAX, Rm );
  1380                         load_reg( R_ECX, Rn );
  1381                         CMP_r32_r32( R_EAX, R_ECX );
  1382                         SETGE_t();
  1383                         sh4_x86.tstate = TSTATE_GE;
  1385                         break;
  1386                     case 0x4:
  1387                         { /* DIV1 Rm, Rn */
  1388                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1389                         load_spreg( R_ECX, R_M );
  1390                         load_reg( R_EAX, Rn );
  1391                         if( sh4_x86.tstate != TSTATE_C ) {
  1392                     	LDC_t();
  1394                         RCL1_r32( R_EAX );
  1395                         SETC_r8( R_DL ); // Q'
  1396                         CMP_sh4r_r32( R_Q, R_ECX );
  1397                         JE_rel8(5, mqequal);
  1398                         ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1399                         JMP_rel8(3, end);
  1400                         JMP_TARGET(mqequal);
  1401                         SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
  1402                         JMP_TARGET(end);
  1403                         store_reg( R_EAX, Rn ); // Done with Rn now
  1404                         SETC_r8(R_AL); // tmp1
  1405                         XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
  1406                         XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
  1407                         store_spreg( R_ECX, R_Q );
  1408                         XOR_imm8s_r32( 1, R_AL );   // T = !Q'
  1409                         MOVZX_r8_r32( R_AL, R_EAX );
  1410                         store_spreg( R_EAX, R_T );
  1411                         sh4_x86.tstate = TSTATE_NONE;
  1413                         break;
  1414                     case 0x5:
  1415                         { /* DMULU.L Rm, Rn */
  1416                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1417                         load_reg( R_EAX, Rm );
  1418                         load_reg( R_ECX, Rn );
  1419                         MUL_r32(R_ECX);
  1420                         store_spreg( R_EDX, R_MACH );
  1421                         store_spreg( R_EAX, R_MACL );    
  1422                         sh4_x86.tstate = TSTATE_NONE;
  1424                         break;
  1425                     case 0x6:
  1426                         { /* CMP/HI Rm, Rn */
  1427                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1428                         load_reg( R_EAX, Rm );
  1429                         load_reg( R_ECX, Rn );
  1430                         CMP_r32_r32( R_EAX, R_ECX );
  1431                         SETA_t();
  1432                         sh4_x86.tstate = TSTATE_A;
  1434                         break;
  1435                     case 0x7:
  1436                         { /* CMP/GT Rm, Rn */
  1437                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1438                         load_reg( R_EAX, Rm );
  1439                         load_reg( R_ECX, Rn );
  1440                         CMP_r32_r32( R_EAX, R_ECX );
  1441                         SETG_t();
  1442                         sh4_x86.tstate = TSTATE_G;
  1444                         break;
  1445                     case 0x8:
  1446                         { /* SUB Rm, Rn */
  1447                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1448                         load_reg( R_EAX, Rm );
  1449                         load_reg( R_ECX, Rn );
  1450                         SUB_r32_r32( R_EAX, R_ECX );
  1451                         store_reg( R_ECX, Rn );
  1452                         sh4_x86.tstate = TSTATE_NONE;
  1454                         break;
  1455                     case 0xA:
  1456                         { /* SUBC Rm, Rn */
  1457                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1458                         load_reg( R_EAX, Rm );
  1459                         load_reg( R_ECX, Rn );
  1460                         if( sh4_x86.tstate != TSTATE_C ) {
  1461                     	LDC_t();
  1463                         SBB_r32_r32( R_EAX, R_ECX );
  1464                         store_reg( R_ECX, Rn );
  1465                         SETC_t();
  1466                         sh4_x86.tstate = TSTATE_C;
  1468                         break;
  1469                     case 0xB:
  1470                         { /* SUBV Rm, Rn */
  1471                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1472                         load_reg( R_EAX, Rm );
  1473                         load_reg( R_ECX, Rn );
  1474                         SUB_r32_r32( R_EAX, R_ECX );
  1475                         store_reg( R_ECX, Rn );
  1476                         SETO_t();
  1477                         sh4_x86.tstate = TSTATE_O;
  1479                         break;
  1480                     case 0xC:
  1481                         { /* ADD Rm, Rn */
  1482                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1483                         load_reg( R_EAX, Rm );
  1484                         load_reg( R_ECX, Rn );
  1485                         ADD_r32_r32( R_EAX, R_ECX );
  1486                         store_reg( R_ECX, Rn );
  1487                         sh4_x86.tstate = TSTATE_NONE;
  1489                         break;
  1490                     case 0xD:
  1491                         { /* DMULS.L Rm, Rn */
  1492                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1493                         load_reg( R_EAX, Rm );
  1494                         load_reg( R_ECX, Rn );
  1495                         IMUL_r32(R_ECX);
  1496                         store_spreg( R_EDX, R_MACH );
  1497                         store_spreg( R_EAX, R_MACL );
  1498                         sh4_x86.tstate = TSTATE_NONE;
  1500                         break;
  1501                     case 0xE:
  1502                         { /* ADDC Rm, Rn */
  1503                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1504                         if( sh4_x86.tstate != TSTATE_C ) {
  1505                     	LDC_t();
  1507                         load_reg( R_EAX, Rm );
  1508                         load_reg( R_ECX, Rn );
  1509                         ADC_r32_r32( R_EAX, R_ECX );
  1510                         store_reg( R_ECX, Rn );
  1511                         SETC_t();
  1512                         sh4_x86.tstate = TSTATE_C;
  1514                         break;
  1515                     case 0xF:
  1516                         { /* ADDV Rm, Rn */
  1517                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  1518                         load_reg( R_EAX, Rm );
  1519                         load_reg( R_ECX, Rn );
  1520                         ADD_r32_r32( R_EAX, R_ECX );
  1521                         store_reg( R_ECX, Rn );
  1522                         SETO_t();
  1523                         sh4_x86.tstate = TSTATE_O;
  1525                         break;
  1526                     default:
  1527                         UNDEF();
  1528                         break;
  1530                 break;
  1531             case 0x4:
  1532                 switch( ir&0xF ) {
  1533                     case 0x0:
  1534                         switch( (ir&0xF0) >> 4 ) {
  1535                             case 0x0:
  1536                                 { /* SHLL Rn */
  1537                                 uint32_t Rn = ((ir>>8)&0xF); 
  1538                                 load_reg( R_EAX, Rn );
  1539                                 SHL1_r32( R_EAX );
  1540                                 SETC_t();
  1541                                 store_reg( R_EAX, Rn );
  1542                                 sh4_x86.tstate = TSTATE_C;
  1544                                 break;
  1545                             case 0x1:
  1546                                 { /* DT Rn */
  1547                                 uint32_t Rn = ((ir>>8)&0xF); 
  1548                                 load_reg( R_EAX, Rn );
  1549                                 ADD_imm8s_r32( -1, R_EAX );
  1550                                 store_reg( R_EAX, Rn );
  1551                                 SETE_t();
  1552                                 sh4_x86.tstate = TSTATE_E;
  1554                                 break;
  1555                             case 0x2:
  1556                                 { /* SHAL Rn */
  1557                                 uint32_t Rn = ((ir>>8)&0xF); 
  1558                                 load_reg( R_EAX, Rn );
  1559                                 SHL1_r32( R_EAX );
  1560                                 SETC_t();
  1561                                 store_reg( R_EAX, Rn );
  1562                                 sh4_x86.tstate = TSTATE_C;
  1564                                 break;
  1565                             default:
  1566                                 UNDEF();
  1567                                 break;
  1569                         break;
  1570                     case 0x1:
  1571                         switch( (ir&0xF0) >> 4 ) {
  1572                             case 0x0:
  1573                                 { /* SHLR Rn */
  1574                                 uint32_t Rn = ((ir>>8)&0xF); 
  1575                                 load_reg( R_EAX, Rn );
  1576                                 SHR1_r32( R_EAX );
  1577                                 SETC_t();
  1578                                 store_reg( R_EAX, Rn );
  1579                                 sh4_x86.tstate = TSTATE_C;
  1581                                 break;
  1582                             case 0x1:
  1583                                 { /* CMP/PZ Rn */
  1584                                 uint32_t Rn = ((ir>>8)&0xF); 
  1585                                 load_reg( R_EAX, Rn );
  1586                                 CMP_imm8s_r32( 0, R_EAX );
  1587                                 SETGE_t();
  1588                                 sh4_x86.tstate = TSTATE_GE;
  1590                                 break;
  1591                             case 0x2:
  1592                                 { /* SHAR Rn */
  1593                                 uint32_t Rn = ((ir>>8)&0xF); 
  1594                                 load_reg( R_EAX, Rn );
  1595                                 SAR1_r32( R_EAX );
  1596                                 SETC_t();
  1597                                 store_reg( R_EAX, Rn );
  1598                                 sh4_x86.tstate = TSTATE_C;
  1600                                 break;
  1601                             default:
  1602                                 UNDEF();
  1603                                 break;
  1605                         break;
  1606                     case 0x2:
  1607                         switch( (ir&0xF0) >> 4 ) {
  1608                             case 0x0:
  1609                                 { /* STS.L MACH, @-Rn */
  1610                                 uint32_t Rn = ((ir>>8)&0xF); 
  1611                                 load_reg( R_ECX, Rn );
  1612                                 precheck();
  1613                                 check_walign32( R_ECX );
  1614                                 ADD_imm8s_r32( -4, R_ECX );
  1615                                 store_reg( R_ECX, Rn );
  1616                                 load_spreg( R_EAX, R_MACH );
  1617                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1618                                 sh4_x86.tstate = TSTATE_NONE;
  1620                                 break;
  1621                             case 0x1:
  1622                                 { /* STS.L MACL, @-Rn */
  1623                                 uint32_t Rn = ((ir>>8)&0xF); 
  1624                                 load_reg( R_ECX, Rn );
  1625                                 precheck();
  1626                                 check_walign32( R_ECX );
  1627                                 ADD_imm8s_r32( -4, R_ECX );
  1628                                 store_reg( R_ECX, Rn );
  1629                                 load_spreg( R_EAX, R_MACL );
  1630                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1631                                 sh4_x86.tstate = TSTATE_NONE;
  1633                                 break;
  1634                             case 0x2:
  1635                                 { /* STS.L PR, @-Rn */
  1636                                 uint32_t Rn = ((ir>>8)&0xF); 
  1637                                 load_reg( R_ECX, Rn );
  1638                                 precheck();
  1639                                 check_walign32( R_ECX );
  1640                                 ADD_imm8s_r32( -4, R_ECX );
  1641                                 store_reg( R_ECX, Rn );
  1642                                 load_spreg( R_EAX, R_PR );
  1643                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1644                                 sh4_x86.tstate = TSTATE_NONE;
  1646                                 break;
  1647                             case 0x3:
  1648                                 { /* STC.L SGR, @-Rn */
  1649                                 uint32_t Rn = ((ir>>8)&0xF); 
  1650                                 precheck();
  1651                                 check_priv_no_precheck();
  1652                                 load_reg( R_ECX, Rn );
  1653                                 check_walign32( R_ECX );
  1654                                 ADD_imm8s_r32( -4, R_ECX );
  1655                                 store_reg( R_ECX, Rn );
  1656                                 load_spreg( R_EAX, R_SGR );
  1657                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1658                                 sh4_x86.tstate = TSTATE_NONE;
  1660                                 break;
  1661                             case 0x5:
  1662                                 { /* STS.L FPUL, @-Rn */
  1663                                 uint32_t Rn = ((ir>>8)&0xF); 
  1664                                 load_reg( R_ECX, Rn );
  1665                                 precheck();
  1666                                 check_walign32( R_ECX );
  1667                                 ADD_imm8s_r32( -4, R_ECX );
  1668                                 store_reg( R_ECX, Rn );
  1669                                 load_spreg( R_EAX, R_FPUL );
  1670                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1671                                 sh4_x86.tstate = TSTATE_NONE;
  1673                                 break;
  1674                             case 0x6:
  1675                                 { /* STS.L FPSCR, @-Rn */
  1676                                 uint32_t Rn = ((ir>>8)&0xF); 
  1677                                 load_reg( R_ECX, Rn );
  1678                                 precheck();
  1679                                 check_walign32( R_ECX );
  1680                                 ADD_imm8s_r32( -4, R_ECX );
  1681                                 store_reg( R_ECX, Rn );
  1682                                 load_spreg( R_EAX, R_FPSCR );
  1683                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1684                                 sh4_x86.tstate = TSTATE_NONE;
  1686                                 break;
  1687                             case 0xF:
  1688                                 { /* STC.L DBR, @-Rn */
  1689                                 uint32_t Rn = ((ir>>8)&0xF); 
  1690                                 precheck();
  1691                                 check_priv_no_precheck();
  1692                                 load_reg( R_ECX, Rn );
  1693                                 check_walign32( R_ECX );
  1694                                 ADD_imm8s_r32( -4, R_ECX );
  1695                                 store_reg( R_ECX, Rn );
  1696                                 load_spreg( R_EAX, R_DBR );
  1697                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1698                                 sh4_x86.tstate = TSTATE_NONE;
  1700                                 break;
  1701                             default:
  1702                                 UNDEF();
  1703                                 break;
  1705                         break;
  1706                     case 0x3:
  1707                         switch( (ir&0x80) >> 7 ) {
  1708                             case 0x0:
  1709                                 switch( (ir&0x70) >> 4 ) {
  1710                                     case 0x0:
  1711                                         { /* STC.L SR, @-Rn */
  1712                                         uint32_t Rn = ((ir>>8)&0xF); 
  1713                                         precheck();
  1714                                         check_priv_no_precheck();
  1715                                         call_func0( sh4_read_sr );
  1716                                         load_reg( R_ECX, Rn );
  1717                                         check_walign32( R_ECX );
  1718                                         ADD_imm8s_r32( -4, R_ECX );
  1719                                         store_reg( R_ECX, Rn );
  1720                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1721                                         sh4_x86.tstate = TSTATE_NONE;
  1723                                         break;
  1724                                     case 0x1:
  1725                                         { /* STC.L GBR, @-Rn */
  1726                                         uint32_t Rn = ((ir>>8)&0xF); 
  1727                                         load_reg( R_ECX, Rn );
  1728                                         precheck();
  1729                                         check_walign32( R_ECX );
  1730                                         ADD_imm8s_r32( -4, R_ECX );
  1731                                         store_reg( R_ECX, Rn );
  1732                                         load_spreg( R_EAX, R_GBR );
  1733                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1734                                         sh4_x86.tstate = TSTATE_NONE;
  1736                                         break;
  1737                                     case 0x2:
  1738                                         { /* STC.L VBR, @-Rn */
  1739                                         uint32_t Rn = ((ir>>8)&0xF); 
  1740                                         precheck();
  1741                                         check_priv_no_precheck();
  1742                                         load_reg( R_ECX, Rn );
  1743                                         check_walign32( R_ECX );
  1744                                         ADD_imm8s_r32( -4, R_ECX );
  1745                                         store_reg( R_ECX, Rn );
  1746                                         load_spreg( R_EAX, R_VBR );
  1747                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1748                                         sh4_x86.tstate = TSTATE_NONE;
  1750                                         break;
  1751                                     case 0x3:
  1752                                         { /* STC.L SSR, @-Rn */
  1753                                         uint32_t Rn = ((ir>>8)&0xF); 
  1754                                         precheck();
  1755                                         check_priv_no_precheck();
  1756                                         load_reg( R_ECX, Rn );
  1757                                         check_walign32( R_ECX );
  1758                                         ADD_imm8s_r32( -4, R_ECX );
  1759                                         store_reg( R_ECX, Rn );
  1760                                         load_spreg( R_EAX, R_SSR );
  1761                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1762                                         sh4_x86.tstate = TSTATE_NONE;
  1764                                         break;
  1765                                     case 0x4:
  1766                                         { /* STC.L SPC, @-Rn */
  1767                                         uint32_t Rn = ((ir>>8)&0xF); 
  1768                                         precheck();
  1769                                         check_priv_no_precheck();
  1770                                         load_reg( R_ECX, Rn );
  1771                                         check_walign32( R_ECX );
  1772                                         ADD_imm8s_r32( -4, R_ECX );
  1773                                         store_reg( R_ECX, Rn );
  1774                                         load_spreg( R_EAX, R_SPC );
  1775                                         MEM_WRITE_LONG( R_ECX, R_EAX );
  1776                                         sh4_x86.tstate = TSTATE_NONE;
  1778                                         break;
  1779                                     default:
  1780                                         UNDEF();
  1781                                         break;
  1783                                 break;
  1784                             case 0x1:
  1785                                 { /* STC.L Rm_BANK, @-Rn */
  1786                                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7); 
  1787                                 precheck();
  1788                                 check_priv_no_precheck();
  1789                                 load_reg( R_ECX, Rn );
  1790                                 check_walign32( R_ECX );
  1791                                 ADD_imm8s_r32( -4, R_ECX );
  1792                                 store_reg( R_ECX, Rn );
  1793                                 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
  1794                                 MEM_WRITE_LONG( R_ECX, R_EAX );
  1795                                 sh4_x86.tstate = TSTATE_NONE;
  1797                                 break;
  1799                         break;
  1800                     case 0x4:
  1801                         switch( (ir&0xF0) >> 4 ) {
  1802                             case 0x0:
  1803                                 { /* ROTL Rn */
  1804                                 uint32_t Rn = ((ir>>8)&0xF); 
  1805                                 load_reg( R_EAX, Rn );
  1806                                 ROL1_r32( R_EAX );
  1807                                 store_reg( R_EAX, Rn );
  1808                                 SETC_t();
  1809                                 sh4_x86.tstate = TSTATE_C;
  1811                                 break;
  1812                             case 0x2:
  1813                                 { /* ROTCL Rn */
  1814                                 uint32_t Rn = ((ir>>8)&0xF); 
  1815                                 load_reg( R_EAX, Rn );
  1816                                 if( sh4_x86.tstate != TSTATE_C ) {
  1817                             	LDC_t();
  1819                                 RCL1_r32( R_EAX );
  1820                                 store_reg( R_EAX, Rn );
  1821                                 SETC_t();
  1822                                 sh4_x86.tstate = TSTATE_C;
  1824                                 break;
  1825                             default:
  1826                                 UNDEF();
  1827                                 break;
  1829                         break;
  1830                     case 0x5:
  1831                         switch( (ir&0xF0) >> 4 ) {
  1832                             case 0x0:
  1833                                 { /* ROTR Rn */
  1834                                 uint32_t Rn = ((ir>>8)&0xF); 
  1835                                 load_reg( R_EAX, Rn );
  1836                                 ROR1_r32( R_EAX );
  1837                                 store_reg( R_EAX, Rn );
  1838                                 SETC_t();
  1839                                 sh4_x86.tstate = TSTATE_C;
  1841                                 break;
  1842                             case 0x1:
  1843                                 { /* CMP/PL Rn */
  1844                                 uint32_t Rn = ((ir>>8)&0xF); 
  1845                                 load_reg( R_EAX, Rn );
  1846                                 CMP_imm8s_r32( 0, R_EAX );
  1847                                 SETG_t();
  1848                                 sh4_x86.tstate = TSTATE_G;
  1850                                 break;
  1851                             case 0x2:
  1852                                 { /* ROTCR Rn */
  1853                                 uint32_t Rn = ((ir>>8)&0xF); 
  1854                                 load_reg( R_EAX, Rn );
  1855                                 if( sh4_x86.tstate != TSTATE_C ) {
  1856                             	LDC_t();
  1858                                 RCR1_r32( R_EAX );
  1859                                 store_reg( R_EAX, Rn );
  1860                                 SETC_t();
  1861                                 sh4_x86.tstate = TSTATE_C;
  1863                                 break;
  1864                             default:
  1865                                 UNDEF();
  1866                                 break;
  1868                         break;
  1869                     case 0x6:
  1870                         switch( (ir&0xF0) >> 4 ) {
  1871                             case 0x0:
  1872                                 { /* LDS.L @Rm+, MACH */
  1873                                 uint32_t Rm = ((ir>>8)&0xF); 
  1874                                 load_reg( R_EAX, Rm );
  1875                                 precheck();
  1876                                 check_ralign32( R_EAX );
  1877                                 MOV_r32_r32( R_EAX, R_ECX );
  1878                                 ADD_imm8s_r32( 4, R_EAX );
  1879                                 store_reg( R_EAX, Rm );
  1880                                 MEM_READ_LONG( R_ECX, R_EAX );
  1881                                 store_spreg( R_EAX, R_MACH );
  1882                                 sh4_x86.tstate = TSTATE_NONE;
  1884                                 break;
  1885                             case 0x1:
  1886                                 { /* LDS.L @Rm+, MACL */
  1887                                 uint32_t Rm = ((ir>>8)&0xF); 
  1888                                 load_reg( R_EAX, Rm );
  1889                                 precheck();
  1890                                 check_ralign32( R_EAX );
  1891                                 MOV_r32_r32( R_EAX, R_ECX );
  1892                                 ADD_imm8s_r32( 4, R_EAX );
  1893                                 store_reg( R_EAX, Rm );
  1894                                 MEM_READ_LONG( R_ECX, R_EAX );
  1895                                 store_spreg( R_EAX, R_MACL );
  1896                                 sh4_x86.tstate = TSTATE_NONE;
  1898                                 break;
  1899                             case 0x2:
  1900                                 { /* LDS.L @Rm+, PR */
  1901                                 uint32_t Rm = ((ir>>8)&0xF); 
  1902                                 load_reg( R_EAX, Rm );
  1903                                 precheck();
  1904                                 check_ralign32( R_EAX );
  1905                                 MOV_r32_r32( R_EAX, R_ECX );
  1906                                 ADD_imm8s_r32( 4, R_EAX );
  1907                                 store_reg( R_EAX, Rm );
  1908                                 MEM_READ_LONG( R_ECX, R_EAX );
  1909                                 store_spreg( R_EAX, R_PR );
  1910                                 sh4_x86.tstate = TSTATE_NONE;
  1912                                 break;
  1913                             case 0x3:
  1914                                 { /* LDC.L @Rm+, SGR */
  1915                                 uint32_t Rm = ((ir>>8)&0xF); 
  1916                                 precheck();
  1917                                 check_priv_no_precheck();
  1918                                 load_reg( R_EAX, Rm );
  1919                                 check_ralign32( R_EAX );
  1920                                 MOV_r32_r32( R_EAX, R_ECX );
  1921                                 ADD_imm8s_r32( 4, R_EAX );
  1922                                 store_reg( R_EAX, Rm );
  1923                                 MEM_READ_LONG( R_ECX, R_EAX );
  1924                                 store_spreg( R_EAX, R_SGR );
  1925                                 sh4_x86.tstate = TSTATE_NONE;
  1927                                 break;
  1928                             case 0x5:
  1929                                 { /* LDS.L @Rm+, FPUL */
  1930                                 uint32_t Rm = ((ir>>8)&0xF); 
  1931                                 load_reg( R_EAX, Rm );
  1932                                 precheck();
  1933                                 check_ralign32( R_EAX );
  1934                                 MOV_r32_r32( R_EAX, R_ECX );
  1935                                 ADD_imm8s_r32( 4, R_EAX );
  1936                                 store_reg( R_EAX, Rm );
  1937                                 MEM_READ_LONG( R_ECX, R_EAX );
  1938                                 store_spreg( R_EAX, R_FPUL );
  1939                                 sh4_x86.tstate = TSTATE_NONE;
  1941                                 break;
  1942                             case 0x6:
  1943                                 { /* LDS.L @Rm+, FPSCR */
  1944                                 uint32_t Rm = ((ir>>8)&0xF); 
  1945                                 load_reg( R_EAX, Rm );
  1946                                 precheck();
  1947                                 check_ralign32( R_EAX );
  1948                                 MOV_r32_r32( R_EAX, R_ECX );
  1949                                 ADD_imm8s_r32( 4, R_EAX );
  1950                                 store_reg( R_EAX, Rm );
  1951                                 MEM_READ_LONG( R_ECX, R_EAX );
  1952                                 store_spreg( R_EAX, R_FPSCR );
  1953                                 update_fr_bank( R_EAX );
  1954                                 sh4_x86.tstate = TSTATE_NONE;
  1956                                 break;
  1957                             case 0xF:
  1958                                 { /* LDC.L @Rm+, DBR */
  1959                                 uint32_t Rm = ((ir>>8)&0xF); 
  1960                                 precheck();
  1961                                 check_priv_no_precheck();
  1962                                 load_reg( R_EAX, Rm );
  1963                                 check_ralign32( R_EAX );
  1964                                 MOV_r32_r32( R_EAX, R_ECX );
  1965                                 ADD_imm8s_r32( 4, R_EAX );
  1966                                 store_reg( R_EAX, Rm );
  1967                                 MEM_READ_LONG( R_ECX, R_EAX );
  1968                                 store_spreg( R_EAX, R_DBR );
  1969                                 sh4_x86.tstate = TSTATE_NONE;
  1971                                 break;
  1972                             default:
  1973                                 UNDEF();
  1974                                 break;
  1976                         break;
  1977                     case 0x7:
  1978                         switch( (ir&0x80) >> 7 ) {
  1979                             case 0x0:
  1980                                 switch( (ir&0x70) >> 4 ) {
  1981                                     case 0x0:
  1982                                         { /* LDC.L @Rm+, SR */
  1983                                         uint32_t Rm = ((ir>>8)&0xF); 
  1984                                         if( sh4_x86.in_delay_slot ) {
  1985                                     	SLOTILLEGAL();
  1986                                         } else {
  1987                                     	precheck();
  1988                                     	check_priv_no_precheck();
  1989                                     	load_reg( R_EAX, Rm );
  1990                                     	check_ralign32( R_EAX );
  1991                                     	MOV_r32_r32( R_EAX, R_ECX );
  1992                                     	ADD_imm8s_r32( 4, R_EAX );
  1993                                     	store_reg( R_EAX, Rm );
  1994                                     	MEM_READ_LONG( R_ECX, R_EAX );
  1995                                     	call_func1( sh4_write_sr, R_EAX );
  1996                                     	sh4_x86.priv_checked = FALSE;
  1997                                     	sh4_x86.fpuen_checked = FALSE;
  1998                                     	sh4_x86.tstate = TSTATE_NONE;
  2001                                         break;
  2002                                     case 0x1:
  2003                                         { /* LDC.L @Rm+, GBR */
  2004                                         uint32_t Rm = ((ir>>8)&0xF); 
  2005                                         load_reg( R_EAX, Rm );
  2006                                         precheck();
  2007                                         check_ralign32( R_EAX );
  2008                                         MOV_r32_r32( R_EAX, R_ECX );
  2009                                         ADD_imm8s_r32( 4, R_EAX );
  2010                                         store_reg( R_EAX, Rm );
  2011                                         MEM_READ_LONG( R_ECX, R_EAX );
  2012                                         store_spreg( R_EAX, R_GBR );
  2013                                         sh4_x86.tstate = TSTATE_NONE;
  2015                                         break;
  2016                                     case 0x2:
  2017                                         { /* LDC.L @Rm+, VBR */
  2018                                         uint32_t Rm = ((ir>>8)&0xF); 
  2019                                         precheck();
  2020                                         check_priv_no_precheck();
  2021                                         load_reg( R_EAX, Rm );
  2022                                         check_ralign32( R_EAX );
  2023                                         MOV_r32_r32( R_EAX, R_ECX );
  2024                                         ADD_imm8s_r32( 4, R_EAX );
  2025                                         store_reg( R_EAX, Rm );
  2026                                         MEM_READ_LONG( R_ECX, R_EAX );
  2027                                         store_spreg( R_EAX, R_VBR );
  2028                                         sh4_x86.tstate = TSTATE_NONE;
  2030                                         break;
  2031                                     case 0x3:
  2032                                         { /* LDC.L @Rm+, SSR */
  2033                                         uint32_t Rm = ((ir>>8)&0xF); 
  2034                                         precheck();
  2035                                         check_priv_no_precheck();
  2036                                         load_reg( R_EAX, Rm );
  2037                                         check_ralign32( R_EAX );
  2038                                         MOV_r32_r32( R_EAX, R_ECX );
  2039                                         ADD_imm8s_r32( 4, R_EAX );
  2040                                         store_reg( R_EAX, Rm );
  2041                                         MEM_READ_LONG( R_ECX, R_EAX );
  2042                                         store_spreg( R_EAX, R_SSR );
  2043                                         sh4_x86.tstate = TSTATE_NONE;
  2045                                         break;
  2046                                     case 0x4:
  2047                                         { /* LDC.L @Rm+, SPC */
  2048                                         uint32_t Rm = ((ir>>8)&0xF); 
  2049                                         precheck();
  2050                                         check_priv_no_precheck();
  2051                                         load_reg( R_EAX, Rm );
  2052                                         check_ralign32( R_EAX );
  2053                                         MOV_r32_r32( R_EAX, R_ECX );
  2054                                         ADD_imm8s_r32( 4, R_EAX );
  2055                                         store_reg( R_EAX, Rm );
  2056                                         MEM_READ_LONG( R_ECX, R_EAX );
  2057                                         store_spreg( R_EAX, R_SPC );
  2058                                         sh4_x86.tstate = TSTATE_NONE;
  2060                                         break;
  2061                                     default:
  2062                                         UNDEF();
  2063                                         break;
  2065                                 break;
  2066                             case 0x1:
  2067                                 { /* LDC.L @Rm+, Rn_BANK */
  2068                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  2069                                 precheck();
  2070                                 check_priv_no_precheck();
  2071                                 load_reg( R_EAX, Rm );
  2072                                 check_ralign32( R_EAX );
  2073                                 MOV_r32_r32( R_EAX, R_ECX );
  2074                                 ADD_imm8s_r32( 4, R_EAX );
  2075                                 store_reg( R_EAX, Rm );
  2076                                 MEM_READ_LONG( R_ECX, R_EAX );
  2077                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  2078                                 sh4_x86.tstate = TSTATE_NONE;
  2080                                 break;
  2082                         break;
  2083                     case 0x8:
  2084                         switch( (ir&0xF0) >> 4 ) {
  2085                             case 0x0:
  2086                                 { /* SHLL2 Rn */
  2087                                 uint32_t Rn = ((ir>>8)&0xF); 
  2088                                 load_reg( R_EAX, Rn );
  2089                                 SHL_imm8_r32( 2, R_EAX );
  2090                                 store_reg( R_EAX, Rn );
  2091                                 sh4_x86.tstate = TSTATE_NONE;
  2093                                 break;
  2094                             case 0x1:
  2095                                 { /* SHLL8 Rn */
  2096                                 uint32_t Rn = ((ir>>8)&0xF); 
  2097                                 load_reg( R_EAX, Rn );
  2098                                 SHL_imm8_r32( 8, R_EAX );
  2099                                 store_reg( R_EAX, Rn );
  2100                                 sh4_x86.tstate = TSTATE_NONE;
  2102                                 break;
  2103                             case 0x2:
  2104                                 { /* SHLL16 Rn */
  2105                                 uint32_t Rn = ((ir>>8)&0xF); 
  2106                                 load_reg( R_EAX, Rn );
  2107                                 SHL_imm8_r32( 16, R_EAX );
  2108                                 store_reg( R_EAX, Rn );
  2109                                 sh4_x86.tstate = TSTATE_NONE;
  2111                                 break;
  2112                             default:
  2113                                 UNDEF();
  2114                                 break;
  2116                         break;
  2117                     case 0x9:
  2118                         switch( (ir&0xF0) >> 4 ) {
  2119                             case 0x0:
  2120                                 { /* SHLR2 Rn */
  2121                                 uint32_t Rn = ((ir>>8)&0xF); 
  2122                                 load_reg( R_EAX, Rn );
  2123                                 SHR_imm8_r32( 2, R_EAX );
  2124                                 store_reg( R_EAX, Rn );
  2125                                 sh4_x86.tstate = TSTATE_NONE;
  2127                                 break;
  2128                             case 0x1:
  2129                                 { /* SHLR8 Rn */
  2130                                 uint32_t Rn = ((ir>>8)&0xF); 
  2131                                 load_reg( R_EAX, Rn );
  2132                                 SHR_imm8_r32( 8, R_EAX );
  2133                                 store_reg( R_EAX, Rn );
  2134                                 sh4_x86.tstate = TSTATE_NONE;
  2136                                 break;
  2137                             case 0x2:
  2138                                 { /* SHLR16 Rn */
  2139                                 uint32_t Rn = ((ir>>8)&0xF); 
  2140                                 load_reg( R_EAX, Rn );
  2141                                 SHR_imm8_r32( 16, R_EAX );
  2142                                 store_reg( R_EAX, Rn );
  2143                                 sh4_x86.tstate = TSTATE_NONE;
  2145                                 break;
  2146                             default:
  2147                                 UNDEF();
  2148                                 break;
  2150                         break;
  2151                     case 0xA:
  2152                         switch( (ir&0xF0) >> 4 ) {
  2153                             case 0x0:
  2154                                 { /* LDS Rm, MACH */
  2155                                 uint32_t Rm = ((ir>>8)&0xF); 
  2156                                 load_reg( R_EAX, Rm );
  2157                                 store_spreg( R_EAX, R_MACH );
  2159                                 break;
  2160                             case 0x1:
  2161                                 { /* LDS Rm, MACL */
  2162                                 uint32_t Rm = ((ir>>8)&0xF); 
  2163                                 load_reg( R_EAX, Rm );
  2164                                 store_spreg( R_EAX, R_MACL );
  2166                                 break;
  2167                             case 0x2:
  2168                                 { /* LDS Rm, PR */
  2169                                 uint32_t Rm = ((ir>>8)&0xF); 
  2170                                 load_reg( R_EAX, Rm );
  2171                                 store_spreg( R_EAX, R_PR );
  2173                                 break;
  2174                             case 0x3:
  2175                                 { /* LDC Rm, SGR */
  2176                                 uint32_t Rm = ((ir>>8)&0xF); 
  2177                                 check_priv();
  2178                                 load_reg( R_EAX, Rm );
  2179                                 store_spreg( R_EAX, R_SGR );
  2180                                 sh4_x86.tstate = TSTATE_NONE;
  2182                                 break;
  2183                             case 0x5:
  2184                                 { /* LDS Rm, FPUL */
  2185                                 uint32_t Rm = ((ir>>8)&0xF); 
  2186                                 load_reg( R_EAX, Rm );
  2187                                 store_spreg( R_EAX, R_FPUL );
  2189                                 break;
  2190                             case 0x6:
  2191                                 { /* LDS Rm, FPSCR */
  2192                                 uint32_t Rm = ((ir>>8)&0xF); 
  2193                                 load_reg( R_EAX, Rm );
  2194                                 store_spreg( R_EAX, R_FPSCR );
  2195                                 update_fr_bank( R_EAX );
  2196                                 sh4_x86.tstate = TSTATE_NONE;
  2198                                 break;
  2199                             case 0xF:
  2200                                 { /* LDC Rm, DBR */
  2201                                 uint32_t Rm = ((ir>>8)&0xF); 
  2202                                 check_priv();
  2203                                 load_reg( R_EAX, Rm );
  2204                                 store_spreg( R_EAX, R_DBR );
  2205                                 sh4_x86.tstate = TSTATE_NONE;
  2207                                 break;
  2208                             default:
  2209                                 UNDEF();
  2210                                 break;
  2212                         break;
  2213                     case 0xB:
  2214                         switch( (ir&0xF0) >> 4 ) {
  2215                             case 0x0:
  2216                                 { /* JSR @Rn */
  2217                                 uint32_t Rn = ((ir>>8)&0xF); 
  2218                                 if( sh4_x86.in_delay_slot ) {
  2219                             	SLOTILLEGAL();
  2220                                 } else {
  2221                             	load_imm32( R_EAX, pc + 4 );
  2222                             	store_spreg( R_EAX, R_PR );
  2223                             	load_reg( R_ECX, Rn );
  2224                             	store_spreg( R_ECX, REG_OFFSET(pc) );
  2225                             	sh4_x86.in_delay_slot = TRUE;
  2226                             	sh4_translate_instruction(pc+2);
  2227                             	exit_block_pcset(pc+2);
  2228                             	sh4_x86.branch_taken = TRUE;
  2229                             	return 4;
  2232                                 break;
  2233                             case 0x1:
  2234                                 { /* TAS.B @Rn */
  2235                                 uint32_t Rn = ((ir>>8)&0xF); 
  2236                                 load_reg( R_ECX, Rn );
  2237                                 MEM_READ_BYTE( R_ECX, R_EAX );
  2238                                 TEST_r8_r8( R_AL, R_AL );
  2239                                 SETE_t();
  2240                                 OR_imm8_r8( 0x80, R_AL );
  2241                                 load_reg( R_ECX, Rn );
  2242                                 MEM_WRITE_BYTE( R_ECX, R_EAX );
  2243                                 sh4_x86.tstate = TSTATE_NONE;
  2245                                 break;
  2246                             case 0x2:
  2247                                 { /* JMP @Rn */
  2248                                 uint32_t Rn = ((ir>>8)&0xF); 
  2249                                 if( sh4_x86.in_delay_slot ) {
  2250                             	SLOTILLEGAL();
  2251                                 } else {
  2252                             	load_reg( R_ECX, Rn );
  2253                             	store_spreg( R_ECX, REG_OFFSET(pc) );
  2254                             	sh4_x86.in_delay_slot = TRUE;
  2255                             	sh4_translate_instruction(pc+2);
  2256                             	exit_block_pcset(pc+2);
  2257                             	sh4_x86.branch_taken = TRUE;
  2258                             	return 4;
  2261                                 break;
  2262                             default:
  2263                                 UNDEF();
  2264                                 break;
  2266                         break;
  2267                     case 0xC:
  2268                         { /* SHAD Rm, Rn */
  2269                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2270                         /* Annoyingly enough, not directly convertible */
  2271                         load_reg( R_EAX, Rn );
  2272                         load_reg( R_ECX, Rm );
  2273                         CMP_imm32_r32( 0, R_ECX );
  2274                         JGE_rel8(16, doshl);
  2276                         NEG_r32( R_ECX );      // 2
  2277                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2278                         JE_rel8( 4, emptysar);     // 2
  2279                         SAR_r32_CL( R_EAX );       // 2
  2280                         JMP_rel8(10, end);          // 2
  2282                         JMP_TARGET(emptysar);
  2283                         SAR_imm8_r32(31, R_EAX );  // 3
  2284                         JMP_rel8(5, end2);
  2286                         JMP_TARGET(doshl);
  2287                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2288                         SHL_r32_CL( R_EAX );       // 2
  2289                         JMP_TARGET(end);
  2290                         JMP_TARGET(end2);
  2291                         store_reg( R_EAX, Rn );
  2292                         sh4_x86.tstate = TSTATE_NONE;
  2294                         break;
  2295                     case 0xD:
  2296                         { /* SHLD Rm, Rn */
  2297                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2298                         load_reg( R_EAX, Rn );
  2299                         load_reg( R_ECX, Rm );
  2300                         CMP_imm32_r32( 0, R_ECX );
  2301                         JGE_rel8(15, doshl);
  2303                         NEG_r32( R_ECX );      // 2
  2304                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2305                         JE_rel8( 4, emptyshr );
  2306                         SHR_r32_CL( R_EAX );       // 2
  2307                         JMP_rel8(9, end);          // 2
  2309                         JMP_TARGET(emptyshr);
  2310                         XOR_r32_r32( R_EAX, R_EAX );
  2311                         JMP_rel8(5, end2);
  2313                         JMP_TARGET(doshl);
  2314                         AND_imm8_r8( 0x1F, R_CL ); // 3
  2315                         SHL_r32_CL( R_EAX );       // 2
  2316                         JMP_TARGET(end);
  2317                         JMP_TARGET(end2);
  2318                         store_reg( R_EAX, Rn );
  2319                         sh4_x86.tstate = TSTATE_NONE;
  2321                         break;
  2322                     case 0xE:
  2323                         switch( (ir&0x80) >> 7 ) {
  2324                             case 0x0:
  2325                                 switch( (ir&0x70) >> 4 ) {
  2326                                     case 0x0:
  2327                                         { /* LDC Rm, SR */
  2328                                         uint32_t Rm = ((ir>>8)&0xF); 
  2329                                         if( sh4_x86.in_delay_slot ) {
  2330                                     	SLOTILLEGAL();
  2331                                         } else {
  2332                                     	check_priv();
  2333                                     	load_reg( R_EAX, Rm );
  2334                                     	call_func1( sh4_write_sr, R_EAX );
  2335                                     	sh4_x86.priv_checked = FALSE;
  2336                                     	sh4_x86.fpuen_checked = FALSE;
  2337                                     	sh4_x86.tstate = TSTATE_NONE;
  2340                                         break;
  2341                                     case 0x1:
  2342                                         { /* LDC Rm, GBR */
  2343                                         uint32_t Rm = ((ir>>8)&0xF); 
  2344                                         load_reg( R_EAX, Rm );
  2345                                         store_spreg( R_EAX, R_GBR );
  2347                                         break;
  2348                                     case 0x2:
  2349                                         { /* LDC Rm, VBR */
  2350                                         uint32_t Rm = ((ir>>8)&0xF); 
  2351                                         check_priv();
  2352                                         load_reg( R_EAX, Rm );
  2353                                         store_spreg( R_EAX, R_VBR );
  2354                                         sh4_x86.tstate = TSTATE_NONE;
  2356                                         break;
  2357                                     case 0x3:
  2358                                         { /* LDC Rm, SSR */
  2359                                         uint32_t Rm = ((ir>>8)&0xF); 
  2360                                         check_priv();
  2361                                         load_reg( R_EAX, Rm );
  2362                                         store_spreg( R_EAX, R_SSR );
  2363                                         sh4_x86.tstate = TSTATE_NONE;
  2365                                         break;
  2366                                     case 0x4:
  2367                                         { /* LDC Rm, SPC */
  2368                                         uint32_t Rm = ((ir>>8)&0xF); 
  2369                                         check_priv();
  2370                                         load_reg( R_EAX, Rm );
  2371                                         store_spreg( R_EAX, R_SPC );
  2372                                         sh4_x86.tstate = TSTATE_NONE;
  2374                                         break;
  2375                                     default:
  2376                                         UNDEF();
  2377                                         break;
  2379                                 break;
  2380                             case 0x1:
  2381                                 { /* LDC Rm, Rn_BANK */
  2382                                 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7); 
  2383                                 check_priv();
  2384                                 load_reg( R_EAX, Rm );
  2385                                 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
  2386                                 sh4_x86.tstate = TSTATE_NONE;
  2388                                 break;
  2390                         break;
  2391                     case 0xF:
  2392                         { /* MAC.W @Rm+, @Rn+ */
  2393                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2394                         load_reg( R_ECX, Rm );
  2395                         precheck();
  2396                         check_ralign16( R_ECX );
  2397                         load_reg( R_ECX, Rn );
  2398                         check_ralign16( R_ECX );
  2399                         ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
  2400                         MEM_READ_WORD( R_ECX, R_EAX );
  2401                         PUSH_r32( R_EAX );
  2402                         load_reg( R_ECX, Rm );
  2403                         ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
  2404                         MEM_READ_WORD( R_ECX, R_EAX );
  2405                         POP_r32( R_ECX );
  2406                         IMUL_r32( R_ECX );
  2408                         load_spreg( R_ECX, R_S );
  2409                         TEST_r32_r32( R_ECX, R_ECX );
  2410                         JE_rel8( 47, nosat );
  2412                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2413                         JNO_rel8( 51, end );            // 2
  2414                         load_imm32( R_EDX, 1 );         // 5
  2415                         store_spreg( R_EDX, R_MACH );   // 6
  2416                         JS_rel8( 13, positive );        // 2
  2417                         load_imm32( R_EAX, 0x80000000 );// 5
  2418                         store_spreg( R_EAX, R_MACL );   // 6
  2419                         JMP_rel8( 25, end2 );           // 2
  2421                         JMP_TARGET(positive);
  2422                         load_imm32( R_EAX, 0x7FFFFFFF );// 5
  2423                         store_spreg( R_EAX, R_MACL );   // 6
  2424                         JMP_rel8( 12, end3);            // 2
  2426                         JMP_TARGET(nosat);
  2427                         ADD_r32_sh4r( R_EAX, R_MACL );  // 6
  2428                         ADC_r32_sh4r( R_EDX, R_MACH );  // 6
  2429                         JMP_TARGET(end);
  2430                         JMP_TARGET(end2);
  2431                         JMP_TARGET(end3);
  2432                         sh4_x86.tstate = TSTATE_NONE;
  2434                         break;
  2436                 break;
  2437             case 0x5:
  2438                 { /* MOV.L @(disp, Rm), Rn */
  2439                 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2; 
  2440                 load_reg( R_ECX, Rm );
  2441                 ADD_imm8s_r32( disp, R_ECX );
  2442                 precheck();
  2443                 check_ralign32( R_ECX );
  2444                 MEM_READ_LONG( R_ECX, R_EAX );
  2445                 store_reg( R_EAX, Rn );
  2446                 sh4_x86.tstate = TSTATE_NONE;
  2448                 break;
  2449             case 0x6:
  2450                 switch( ir&0xF ) {
  2451                     case 0x0:
  2452                         { /* MOV.B @Rm, Rn */
  2453                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2454                         load_reg( R_ECX, Rm );
  2455                         MEM_READ_BYTE( R_ECX, R_EAX );
  2456                         store_reg( R_EAX, Rn );
  2457                         sh4_x86.tstate = TSTATE_NONE;
  2459                         break;
  2460                     case 0x1:
  2461                         { /* MOV.W @Rm, Rn */
  2462                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2463                         load_reg( R_ECX, Rm );
  2464                         precheck();
  2465                         check_ralign16( R_ECX );
  2466                         MEM_READ_WORD( R_ECX, R_EAX );
  2467                         store_reg( R_EAX, Rn );
  2468                         sh4_x86.tstate = TSTATE_NONE;
  2470                         break;
  2471                     case 0x2:
  2472                         { /* MOV.L @Rm, Rn */
  2473                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2474                         load_reg( R_ECX, Rm );
  2475                         precheck();
  2476                         check_ralign32( R_ECX );
  2477                         MEM_READ_LONG( R_ECX, R_EAX );
  2478                         store_reg( R_EAX, Rn );
  2479                         sh4_x86.tstate = TSTATE_NONE;
  2481                         break;
  2482                     case 0x3:
  2483                         { /* MOV Rm, Rn */
  2484                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2485                         load_reg( R_EAX, Rm );
  2486                         store_reg( R_EAX, Rn );
  2488                         break;
  2489                     case 0x4:
  2490                         { /* MOV.B @Rm+, Rn */
  2491                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2492                         load_reg( R_ECX, Rm );
  2493                         MOV_r32_r32( R_ECX, R_EAX );
  2494                         ADD_imm8s_r32( 1, R_EAX );
  2495                         store_reg( R_EAX, Rm );
  2496                         MEM_READ_BYTE( R_ECX, R_EAX );
  2497                         store_reg( R_EAX, Rn );
  2498                         sh4_x86.tstate = TSTATE_NONE;
  2500                         break;
  2501                     case 0x5:
  2502                         { /* MOV.W @Rm+, Rn */
  2503                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2504                         load_reg( R_EAX, Rm );
  2505                         precheck();
  2506                         check_ralign16( R_EAX );
  2507                         MOV_r32_r32( R_EAX, R_ECX );
  2508                         ADD_imm8s_r32( 2, R_EAX );
  2509                         store_reg( R_EAX, Rm );
  2510                         MEM_READ_WORD( R_ECX, R_EAX );
  2511                         store_reg( R_EAX, Rn );
  2512                         sh4_x86.tstate = TSTATE_NONE;
  2514                         break;
  2515                     case 0x6:
  2516                         { /* MOV.L @Rm+, Rn */
  2517                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2518                         load_reg( R_EAX, Rm );
  2519                         precheck();
  2520                         check_ralign32( R_EAX );
  2521                         MOV_r32_r32( R_EAX, R_ECX );
  2522                         ADD_imm8s_r32( 4, R_EAX );
  2523                         store_reg( R_EAX, Rm );
  2524                         MEM_READ_LONG( R_ECX, R_EAX );
  2525                         store_reg( R_EAX, Rn );
  2526                         sh4_x86.tstate = TSTATE_NONE;
  2528                         break;
  2529                     case 0x7:
  2530                         { /* NOT Rm, Rn */
  2531                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2532                         load_reg( R_EAX, Rm );
  2533                         NOT_r32( R_EAX );
  2534                         store_reg( R_EAX, Rn );
  2535                         sh4_x86.tstate = TSTATE_NONE;
  2537                         break;
  2538                     case 0x8:
  2539                         { /* SWAP.B Rm, Rn */
  2540                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2541                         load_reg( R_EAX, Rm );
  2542                         XCHG_r8_r8( R_AL, R_AH );
  2543                         store_reg( R_EAX, Rn );
  2545                         break;
  2546                     case 0x9:
  2547                         { /* SWAP.W Rm, Rn */
  2548                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2549                         load_reg( R_EAX, Rm );
  2550                         MOV_r32_r32( R_EAX, R_ECX );
  2551                         SHL_imm8_r32( 16, R_ECX );
  2552                         SHR_imm8_r32( 16, R_EAX );
  2553                         OR_r32_r32( R_EAX, R_ECX );
  2554                         store_reg( R_ECX, Rn );
  2555                         sh4_x86.tstate = TSTATE_NONE;
  2557                         break;
  2558                     case 0xA:
  2559                         { /* NEGC Rm, Rn */
  2560                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2561                         load_reg( R_EAX, Rm );
  2562                         XOR_r32_r32( R_ECX, R_ECX );
  2563                         LDC_t();
  2564                         SBB_r32_r32( R_EAX, R_ECX );
  2565                         store_reg( R_ECX, Rn );
  2566                         SETC_t();
  2567                         sh4_x86.tstate = TSTATE_C;
  2569                         break;
  2570                     case 0xB:
  2571                         { /* NEG Rm, Rn */
  2572                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2573                         load_reg( R_EAX, Rm );
  2574                         NEG_r32( R_EAX );
  2575                         store_reg( R_EAX, Rn );
  2576                         sh4_x86.tstate = TSTATE_NONE;
  2578                         break;
  2579                     case 0xC:
  2580                         { /* EXTU.B Rm, Rn */
  2581                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2582                         load_reg( R_EAX, Rm );
  2583                         MOVZX_r8_r32( R_EAX, R_EAX );
  2584                         store_reg( R_EAX, Rn );
  2586                         break;
  2587                     case 0xD:
  2588                         { /* EXTU.W Rm, Rn */
  2589                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2590                         load_reg( R_EAX, Rm );
  2591                         MOVZX_r16_r32( R_EAX, R_EAX );
  2592                         store_reg( R_EAX, Rn );
  2594                         break;
  2595                     case 0xE:
  2596                         { /* EXTS.B Rm, Rn */
  2597                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2598                         load_reg( R_EAX, Rm );
  2599                         MOVSX_r8_r32( R_EAX, R_EAX );
  2600                         store_reg( R_EAX, Rn );
  2602                         break;
  2603                     case 0xF:
  2604                         { /* EXTS.W Rm, Rn */
  2605                         uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  2606                         load_reg( R_EAX, Rm );
  2607                         MOVSX_r16_r32( R_EAX, R_EAX );
  2608                         store_reg( R_EAX, Rn );
  2610                         break;
  2612                 break;
  2613             case 0x7:
  2614                 { /* ADD #imm, Rn */
  2615                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  2616                 load_reg( R_EAX, Rn );
  2617                 ADD_imm8s_r32( imm, R_EAX );
  2618                 store_reg( R_EAX, Rn );
  2619                 sh4_x86.tstate = TSTATE_NONE;
  2621                 break;
  2622             case 0x8:
  2623                 switch( (ir&0xF00) >> 8 ) {
  2624                     case 0x0:
  2625                         { /* MOV.B R0, @(disp, Rn) */
  2626                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2627                         load_reg( R_EAX, 0 );
  2628                         load_reg( R_ECX, Rn );
  2629                         ADD_imm32_r32( disp, R_ECX );
  2630                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2631                         sh4_x86.tstate = TSTATE_NONE;
  2633                         break;
  2634                     case 0x1:
  2635                         { /* MOV.W R0, @(disp, Rn) */
  2636                         uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2637                         load_reg( R_ECX, Rn );
  2638                         load_reg( R_EAX, 0 );
  2639                         ADD_imm32_r32( disp, R_ECX );
  2640                         precheck();
  2641                         check_walign16( R_ECX );
  2642                         MEM_WRITE_WORD( R_ECX, R_EAX );
  2643                         sh4_x86.tstate = TSTATE_NONE;
  2645                         break;
  2646                     case 0x4:
  2647                         { /* MOV.B @(disp, Rm), R0 */
  2648                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF); 
  2649                         load_reg( R_ECX, Rm );
  2650                         ADD_imm32_r32( disp, R_ECX );
  2651                         MEM_READ_BYTE( R_ECX, R_EAX );
  2652                         store_reg( R_EAX, 0 );
  2653                         sh4_x86.tstate = TSTATE_NONE;
  2655                         break;
  2656                     case 0x5:
  2657                         { /* MOV.W @(disp, Rm), R0 */
  2658                         uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1; 
  2659                         load_reg( R_ECX, Rm );
  2660                         ADD_imm32_r32( disp, R_ECX );
  2661                         precheck();
  2662                         check_ralign16( R_ECX );
  2663                         MEM_READ_WORD( R_ECX, R_EAX );
  2664                         store_reg( R_EAX, 0 );
  2665                         sh4_x86.tstate = TSTATE_NONE;
  2667                         break;
  2668                     case 0x8:
  2669                         { /* CMP/EQ #imm, R0 */
  2670                         int32_t imm = SIGNEXT8(ir&0xFF); 
  2671                         load_reg( R_EAX, 0 );
  2672                         CMP_imm8s_r32(imm, R_EAX);
  2673                         SETE_t();
  2674                         sh4_x86.tstate = TSTATE_E;
  2676                         break;
  2677                     case 0x9:
  2678                         { /* BT disp */
  2679                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2680                         if( sh4_x86.in_delay_slot ) {
  2681                     	SLOTILLEGAL();
  2682                         } else {
  2683                     	JF_rel8( EXIT_BLOCK_SIZE, nottaken );
  2684                     	exit_block( disp + pc + 4, pc+2 );
  2685                     	JMP_TARGET(nottaken);
  2686                     	return 2;
  2689                         break;
  2690                     case 0xB:
  2691                         { /* BF disp */
  2692                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2693                         if( sh4_x86.in_delay_slot ) {
  2694                     	SLOTILLEGAL();
  2695                         } else {
  2696                     	JT_rel8( EXIT_BLOCK_SIZE, nottaken );
  2697                     	exit_block( disp + pc + 4, pc+2 );
  2698                     	JMP_TARGET(nottaken);
  2699                     	return 2;
  2702                         break;
  2703                     case 0xD:
  2704                         { /* BT/S disp */
  2705                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2706                         if( sh4_x86.in_delay_slot ) {
  2707                     	SLOTILLEGAL();
  2708                         } else {
  2709                     	sh4_x86.in_delay_slot = TRUE;
  2710                     	if( sh4_x86.tstate == TSTATE_NONE ) {
  2711                     	    CMP_imm8s_sh4r( 1, R_T );
  2712                     	    sh4_x86.tstate = TSTATE_E;
  2714                     	OP(0x0F); OP(0x80+(sh4_x86.tstate^1)); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JE rel32
  2715                     	sh4_translate_instruction(pc+2);
  2716                     	exit_block( disp + pc + 4, pc+4 );
  2717                     	// not taken
  2718                     	*patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2719                     	sh4_translate_instruction(pc+2);
  2720                     	return 4;
  2723                         break;
  2724                     case 0xF:
  2725                         { /* BF/S disp */
  2726                         int32_t disp = SIGNEXT8(ir&0xFF)<<1; 
  2727                         if( sh4_x86.in_delay_slot ) {
  2728                     	SLOTILLEGAL();
  2729                         } else {
  2730                     	sh4_x86.in_delay_slot = TRUE;
  2731                     	if( sh4_x86.tstate == TSTATE_NONE ) {
  2732                     	    CMP_imm8s_sh4r( 1, R_T );
  2733                     	    sh4_x86.tstate = TSTATE_E;
  2735                     	OP(0x0F); OP(0x80+sh4_x86.tstate); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JNE rel32
  2736                     	sh4_translate_instruction(pc+2);
  2737                     	exit_block( disp + pc + 4, pc+4 );
  2738                     	// not taken
  2739                     	*patch = (xlat_output - ((uint8_t *)patch)) - 4;
  2740                     	sh4_translate_instruction(pc+2);
  2741                     	return 4;
  2744                         break;
  2745                     default:
  2746                         UNDEF();
  2747                         break;
  2749                 break;
  2750             case 0x9:
  2751                 { /* MOV.W @(disp, PC), Rn */
  2752                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1; 
  2753                 if( sh4_x86.in_delay_slot ) {
  2754             	SLOTILLEGAL();
  2755                 } else {
  2756             	load_imm32( R_ECX, pc + disp + 4 );
  2757             	MEM_READ_WORD( R_ECX, R_EAX );
  2758             	store_reg( R_EAX, Rn );
  2759             	sh4_x86.tstate = TSTATE_NONE;
  2762                 break;
  2763             case 0xA:
  2764                 { /* BRA disp */
  2765                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2766                 if( sh4_x86.in_delay_slot ) {
  2767             	SLOTILLEGAL();
  2768                 } else {
  2769             	sh4_x86.in_delay_slot = TRUE;
  2770             	sh4_translate_instruction( pc + 2 );
  2771             	exit_block( disp + pc + 4, pc+4 );
  2772             	sh4_x86.branch_taken = TRUE;
  2773             	return 4;
  2776                 break;
  2777             case 0xB:
  2778                 { /* BSR disp */
  2779                 int32_t disp = SIGNEXT12(ir&0xFFF)<<1; 
  2780                 if( sh4_x86.in_delay_slot ) {
  2781             	SLOTILLEGAL();
  2782                 } else {
  2783             	load_imm32( R_EAX, pc + 4 );
  2784             	store_spreg( R_EAX, R_PR );
  2785             	sh4_x86.in_delay_slot = TRUE;
  2786             	sh4_translate_instruction( pc + 2 );
  2787             	exit_block( disp + pc + 4, pc+4 );
  2788             	sh4_x86.branch_taken = TRUE;
  2789             	return 4;
  2792                 break;
  2793             case 0xC:
  2794                 switch( (ir&0xF00) >> 8 ) {
  2795                     case 0x0:
  2796                         { /* MOV.B R0, @(disp, GBR) */
  2797                         uint32_t disp = (ir&0xFF); 
  2798                         load_reg( R_EAX, 0 );
  2799                         load_spreg( R_ECX, R_GBR );
  2800                         ADD_imm32_r32( disp, R_ECX );
  2801                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2802                         sh4_x86.tstate = TSTATE_NONE;
  2804                         break;
  2805                     case 0x1:
  2806                         { /* MOV.W R0, @(disp, GBR) */
  2807                         uint32_t disp = (ir&0xFF)<<1; 
  2808                         load_spreg( R_ECX, R_GBR );
  2809                         load_reg( R_EAX, 0 );
  2810                         ADD_imm32_r32( disp, R_ECX );
  2811                         precheck();
  2812                         check_walign16( R_ECX );
  2813                         MEM_WRITE_WORD( R_ECX, R_EAX );
  2814                         sh4_x86.tstate = TSTATE_NONE;
  2816                         break;
  2817                     case 0x2:
  2818                         { /* MOV.L R0, @(disp, GBR) */
  2819                         uint32_t disp = (ir&0xFF)<<2; 
  2820                         load_spreg( R_ECX, R_GBR );
  2821                         load_reg( R_EAX, 0 );
  2822                         ADD_imm32_r32( disp, R_ECX );
  2823                         precheck();
  2824                         check_walign32( R_ECX );
  2825                         MEM_WRITE_LONG( R_ECX, R_EAX );
  2826                         sh4_x86.tstate = TSTATE_NONE;
  2828                         break;
  2829                     case 0x3:
  2830                         { /* TRAPA #imm */
  2831                         uint32_t imm = (ir&0xFF); 
  2832                         if( sh4_x86.in_delay_slot ) {
  2833                     	SLOTILLEGAL();
  2834                         } else {
  2835                     	load_imm32( R_ECX, pc+2 );
  2836                     	store_spreg( R_ECX, REG_OFFSET(pc) );
  2837                     	load_imm32( R_EAX, imm );
  2838                     	call_func1( sh4_raise_trap, R_EAX );
  2839                     	sh4_x86.tstate = TSTATE_NONE;
  2840                     	exit_block_pcset(pc);
  2841                     	sh4_x86.branch_taken = TRUE;
  2842                     	return 2;
  2845                         break;
  2846                     case 0x4:
  2847                         { /* MOV.B @(disp, GBR), R0 */
  2848                         uint32_t disp = (ir&0xFF); 
  2849                         load_spreg( R_ECX, R_GBR );
  2850                         ADD_imm32_r32( disp, R_ECX );
  2851                         MEM_READ_BYTE( R_ECX, R_EAX );
  2852                         store_reg( R_EAX, 0 );
  2853                         sh4_x86.tstate = TSTATE_NONE;
  2855                         break;
  2856                     case 0x5:
  2857                         { /* MOV.W @(disp, GBR), R0 */
  2858                         uint32_t disp = (ir&0xFF)<<1; 
  2859                         load_spreg( R_ECX, R_GBR );
  2860                         ADD_imm32_r32( disp, R_ECX );
  2861                         precheck();
  2862                         check_ralign16( R_ECX );
  2863                         MEM_READ_WORD( R_ECX, R_EAX );
  2864                         store_reg( R_EAX, 0 );
  2865                         sh4_x86.tstate = TSTATE_NONE;
  2867                         break;
  2868                     case 0x6:
  2869                         { /* MOV.L @(disp, GBR), R0 */
  2870                         uint32_t disp = (ir&0xFF)<<2; 
  2871                         load_spreg( R_ECX, R_GBR );
  2872                         ADD_imm32_r32( disp, R_ECX );
  2873                         precheck();
  2874                         check_ralign32( R_ECX );
  2875                         MEM_READ_LONG( R_ECX, R_EAX );
  2876                         store_reg( R_EAX, 0 );
  2877                         sh4_x86.tstate = TSTATE_NONE;
  2879                         break;
  2880                     case 0x7:
  2881                         { /* MOVA @(disp, PC), R0 */
  2882                         uint32_t disp = (ir&0xFF)<<2; 
  2883                         if( sh4_x86.in_delay_slot ) {
  2884                     	SLOTILLEGAL();
  2885                         } else {
  2886                     	load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
  2887                     	store_reg( R_ECX, 0 );
  2890                         break;
  2891                     case 0x8:
  2892                         { /* TST #imm, R0 */
  2893                         uint32_t imm = (ir&0xFF); 
  2894                         load_reg( R_EAX, 0 );
  2895                         TEST_imm32_r32( imm, R_EAX );
  2896                         SETE_t();
  2897                         sh4_x86.tstate = TSTATE_E;
  2899                         break;
  2900                     case 0x9:
  2901                         { /* AND #imm, R0 */
  2902                         uint32_t imm = (ir&0xFF); 
  2903                         load_reg( R_EAX, 0 );
  2904                         AND_imm32_r32(imm, R_EAX); 
  2905                         store_reg( R_EAX, 0 );
  2906                         sh4_x86.tstate = TSTATE_NONE;
  2908                         break;
  2909                     case 0xA:
  2910                         { /* XOR #imm, R0 */
  2911                         uint32_t imm = (ir&0xFF); 
  2912                         load_reg( R_EAX, 0 );
  2913                         XOR_imm32_r32( imm, R_EAX );
  2914                         store_reg( R_EAX, 0 );
  2915                         sh4_x86.tstate = TSTATE_NONE;
  2917                         break;
  2918                     case 0xB:
  2919                         { /* OR #imm, R0 */
  2920                         uint32_t imm = (ir&0xFF); 
  2921                         load_reg( R_EAX, 0 );
  2922                         OR_imm32_r32(imm, R_EAX);
  2923                         store_reg( R_EAX, 0 );
  2924                         sh4_x86.tstate = TSTATE_NONE;
  2926                         break;
  2927                     case 0xC:
  2928                         { /* TST.B #imm, @(R0, GBR) */
  2929                         uint32_t imm = (ir&0xFF); 
  2930                         load_reg( R_EAX, 0);
  2931                         load_reg( R_ECX, R_GBR);
  2932                         ADD_r32_r32( R_EAX, R_ECX );
  2933                         MEM_READ_BYTE( R_ECX, R_EAX );
  2934                         TEST_imm8_r8( imm, R_AL );
  2935                         SETE_t();
  2936                         sh4_x86.tstate = TSTATE_E;
  2938                         break;
  2939                     case 0xD:
  2940                         { /* AND.B #imm, @(R0, GBR) */
  2941                         uint32_t imm = (ir&0xFF); 
  2942                         load_reg( R_EAX, 0 );
  2943                         load_spreg( R_ECX, R_GBR );
  2944                         ADD_r32_r32( R_EAX, R_ECX );
  2945                         PUSH_r32(R_ECX);
  2946                         MEM_READ_BYTE( R_ECX, R_EAX );
  2947                         POP_r32(R_ECX);
  2948                         AND_imm32_r32(imm, R_EAX );
  2949                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2950                         sh4_x86.tstate = TSTATE_NONE;
  2952                         break;
  2953                     case 0xE:
  2954                         { /* XOR.B #imm, @(R0, GBR) */
  2955                         uint32_t imm = (ir&0xFF); 
  2956                         load_reg( R_EAX, 0 );
  2957                         load_spreg( R_ECX, R_GBR );
  2958                         ADD_r32_r32( R_EAX, R_ECX );
  2959                         PUSH_r32(R_ECX);
  2960                         MEM_READ_BYTE(R_ECX, R_EAX);
  2961                         POP_r32(R_ECX);
  2962                         XOR_imm32_r32( imm, R_EAX );
  2963                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2964                         sh4_x86.tstate = TSTATE_NONE;
  2966                         break;
  2967                     case 0xF:
  2968                         { /* OR.B #imm, @(R0, GBR) */
  2969                         uint32_t imm = (ir&0xFF); 
  2970                         load_reg( R_EAX, 0 );
  2971                         load_spreg( R_ECX, R_GBR );
  2972                         ADD_r32_r32( R_EAX, R_ECX );
  2973                         PUSH_r32(R_ECX);
  2974                         MEM_READ_BYTE( R_ECX, R_EAX );
  2975                         POP_r32(R_ECX);
  2976                         OR_imm32_r32(imm, R_EAX );
  2977                         MEM_WRITE_BYTE( R_ECX, R_EAX );
  2978                         sh4_x86.tstate = TSTATE_NONE;
  2980                         break;
  2982                 break;
  2983             case 0xD:
  2984                 { /* MOV.L @(disp, PC), Rn */
  2985                 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2; 
  2986                 if( sh4_x86.in_delay_slot ) {
  2987             	SLOTILLEGAL();
  2988                 } else {
  2989             	uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
  2990             	sh4ptr_t ptr = mem_get_region(target);
  2991             	if( ptr != NULL ) {
  2992             	    MOV_moff32_EAX( ptr );
  2993             	} else {
  2994             	    load_imm32( R_ECX, target );
  2995             	    MEM_READ_LONG( R_ECX, R_EAX );
  2997             	store_reg( R_EAX, Rn );
  2998             	sh4_x86.tstate = TSTATE_NONE;
  3001                 break;
  3002             case 0xE:
  3003                 { /* MOV #imm, Rn */
  3004                 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF); 
  3005                 load_imm32( R_EAX, imm );
  3006                 store_reg( R_EAX, Rn );
  3008                 break;
  3009             case 0xF:
  3010                 switch( ir&0xF ) {
  3011                     case 0x0:
  3012                         { /* FADD FRm, FRn */
  3013                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3014                         check_fpuen();
  3015                         load_spreg( R_ECX, R_FPSCR );
  3016                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3017                         load_fr_bank( R_EDX );
  3018                         JNE_rel8(13,doubleprec);
  3019                         push_fr(R_EDX, FRm);
  3020                         push_fr(R_EDX, FRn);
  3021                         FADDP_st(1);
  3022                         pop_fr(R_EDX, FRn);
  3023                         JMP_rel8(11,end);
  3024                         JMP_TARGET(doubleprec);
  3025                         push_dr(R_EDX, FRm);
  3026                         push_dr(R_EDX, FRn);
  3027                         FADDP_st(1);
  3028                         pop_dr(R_EDX, FRn);
  3029                         JMP_TARGET(end);
  3030                         sh4_x86.tstate = TSTATE_NONE;
  3032                         break;
  3033                     case 0x1:
  3034                         { /* FSUB FRm, FRn */
  3035                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3036                         check_fpuen();
  3037                         load_spreg( R_ECX, R_FPSCR );
  3038                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3039                         load_fr_bank( R_EDX );
  3040                         JNE_rel8(13, doubleprec);
  3041                         push_fr(R_EDX, FRn);
  3042                         push_fr(R_EDX, FRm);
  3043                         FSUBP_st(1);
  3044                         pop_fr(R_EDX, FRn);
  3045                         JMP_rel8(11, end);
  3046                         JMP_TARGET(doubleprec);
  3047                         push_dr(R_EDX, FRn);
  3048                         push_dr(R_EDX, FRm);
  3049                         FSUBP_st(1);
  3050                         pop_dr(R_EDX, FRn);
  3051                         JMP_TARGET(end);
  3052                         sh4_x86.tstate = TSTATE_NONE;
  3054                         break;
  3055                     case 0x2:
  3056                         { /* FMUL FRm, FRn */
  3057                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3058                         check_fpuen();
  3059                         load_spreg( R_ECX, R_FPSCR );
  3060                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3061                         load_fr_bank( R_EDX );
  3062                         JNE_rel8(13, doubleprec);
  3063                         push_fr(R_EDX, FRm);
  3064                         push_fr(R_EDX, FRn);
  3065                         FMULP_st(1);
  3066                         pop_fr(R_EDX, FRn);
  3067                         JMP_rel8(11, end);
  3068                         JMP_TARGET(doubleprec);
  3069                         push_dr(R_EDX, FRm);
  3070                         push_dr(R_EDX, FRn);
  3071                         FMULP_st(1);
  3072                         pop_dr(R_EDX, FRn);
  3073                         JMP_TARGET(end);
  3074                         sh4_x86.tstate = TSTATE_NONE;
  3076                         break;
  3077                     case 0x3:
  3078                         { /* FDIV FRm, FRn */
  3079                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3080                         check_fpuen();
  3081                         load_spreg( R_ECX, R_FPSCR );
  3082                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3083                         load_fr_bank( R_EDX );
  3084                         JNE_rel8(13, doubleprec);
  3085                         push_fr(R_EDX, FRn);
  3086                         push_fr(R_EDX, FRm);
  3087                         FDIVP_st(1);
  3088                         pop_fr(R_EDX, FRn);
  3089                         JMP_rel8(11, end);
  3090                         JMP_TARGET(doubleprec);
  3091                         push_dr(R_EDX, FRn);
  3092                         push_dr(R_EDX, FRm);
  3093                         FDIVP_st(1);
  3094                         pop_dr(R_EDX, FRn);
  3095                         JMP_TARGET(end);
  3096                         sh4_x86.tstate = TSTATE_NONE;
  3098                         break;
  3099                     case 0x4:
  3100                         { /* FCMP/EQ FRm, FRn */
  3101                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3102                         check_fpuen();
  3103                         load_spreg( R_ECX, R_FPSCR );
  3104                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3105                         load_fr_bank( R_EDX );
  3106                         JNE_rel8(8, doubleprec);
  3107                         push_fr(R_EDX, FRm);
  3108                         push_fr(R_EDX, FRn);
  3109                         JMP_rel8(6, end);
  3110                         JMP_TARGET(doubleprec);
  3111                         push_dr(R_EDX, FRm);
  3112                         push_dr(R_EDX, FRn);
  3113                         JMP_TARGET(end);
  3114                         FCOMIP_st(1);
  3115                         SETE_t();
  3116                         FPOP_st();
  3117                         sh4_x86.tstate = TSTATE_NONE;
  3119                         break;
  3120                     case 0x5:
  3121                         { /* FCMP/GT FRm, FRn */
  3122                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3123                         check_fpuen();
  3124                         load_spreg( R_ECX, R_FPSCR );
  3125                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3126                         load_fr_bank( R_EDX );
  3127                         JNE_rel8(8, doubleprec);
  3128                         push_fr(R_EDX, FRm);
  3129                         push_fr(R_EDX, FRn);
  3130                         JMP_rel8(6, end);
  3131                         JMP_TARGET(doubleprec);
  3132                         push_dr(R_EDX, FRm);
  3133                         push_dr(R_EDX, FRn);
  3134                         JMP_TARGET(end);
  3135                         FCOMIP_st(1);
  3136                         SETA_t();
  3137                         FPOP_st();
  3138                         sh4_x86.tstate = TSTATE_NONE;
  3140                         break;
  3141                     case 0x6:
  3142                         { /* FMOV @(R0, Rm), FRn */
  3143                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3144                         precheck();
  3145                         check_fpuen_no_precheck();
  3146                         load_reg( R_ECX, Rm );
  3147                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
  3148                         check_ralign32( R_ECX );
  3149                         load_spreg( R_EDX, R_FPSCR );
  3150                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3151                         JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
  3152                         MEM_READ_LONG( R_ECX, R_EAX );
  3153                         load_fr_bank( R_EDX );
  3154                         store_fr( R_EDX, R_EAX, FRn );
  3155                         if( FRn&1 ) {
  3156                     	JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
  3157                     	JMP_TARGET(doublesize);
  3158                     	MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
  3159                     	load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
  3160                     	load_xf_bank( R_EDX );
  3161                     	store_fr( R_EDX, R_EAX, FRn&0x0E );
  3162                     	store_fr( R_EDX, R_ECX, FRn|0x01 );
  3163                     	JMP_TARGET(end);
  3164                         } else {
  3165                     	JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
  3166                     	JMP_TARGET(doublesize);
  3167                     	MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
  3168                     	load_fr_bank( R_EDX );
  3169                     	store_fr( R_EDX, R_EAX, FRn&0x0E );
  3170                     	store_fr( R_EDX, R_ECX, FRn|0x01 );
  3171                     	JMP_TARGET(end);
  3173                         sh4_x86.tstate = TSTATE_NONE;
  3175                         break;
  3176                     case 0x7:
  3177                         { /* FMOV FRm, @(R0, Rn) */
  3178                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3179                         precheck();
  3180                         check_fpuen_no_precheck();
  3181                         load_reg( R_ECX, Rn );
  3182                         ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
  3183                         check_walign32( R_ECX );
  3184                         load_spreg( R_EDX, R_FPSCR );
  3185                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3186                         JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
  3187                         load_fr_bank( R_EDX );
  3188                         load_fr( R_EDX, R_EAX, FRm );
  3189                         MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
  3190                         if( FRm&1 ) {
  3191                     	JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
  3192                     	JMP_TARGET(doublesize);
  3193                     	load_xf_bank( R_EDX );
  3194                     	load_fr( R_EDX, R_EAX, FRm&0x0E );
  3195                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3196                     	MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
  3197                     	JMP_TARGET(end);
  3198                         } else {
  3199                     	JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
  3200                     	JMP_TARGET(doublesize);
  3201                     	load_fr_bank( R_EDX );
  3202                     	load_fr( R_EDX, R_EAX, FRm&0x0E );
  3203                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3204                     	MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
  3205                     	JMP_TARGET(end);
  3207                         sh4_x86.tstate = TSTATE_NONE;
  3209                         break;
  3210                     case 0x8:
  3211                         { /* FMOV @Rm, FRn */
  3212                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3213                         precheck();
  3214                         check_fpuen_no_precheck();
  3215                         load_reg( R_ECX, Rm );
  3216                         check_ralign32( R_ECX );
  3217                         load_spreg( R_EDX, R_FPSCR );
  3218                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3219                         JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
  3220                         MEM_READ_LONG( R_ECX, R_EAX );
  3221                         load_fr_bank( R_EDX );
  3222                         store_fr( R_EDX, R_EAX, FRn );
  3223                         if( FRn&1 ) {
  3224                     	JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
  3225                     	JMP_TARGET(doublesize);
  3226                     	MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
  3227                     	load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
  3228                     	load_xf_bank( R_EDX );
  3229                     	store_fr( R_EDX, R_EAX, FRn&0x0E );
  3230                     	store_fr( R_EDX, R_ECX, FRn|0x01 );
  3231                     	JMP_TARGET(end);
  3232                         } else {
  3233                     	JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
  3234                     	JMP_TARGET(doublesize);
  3235                     	MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
  3236                     	load_fr_bank( R_EDX );
  3237                     	store_fr( R_EDX, R_EAX, FRn&0x0E );
  3238                     	store_fr( R_EDX, R_ECX, FRn|0x01 );
  3239                     	JMP_TARGET(end);
  3241                         sh4_x86.tstate = TSTATE_NONE;
  3243                         break;
  3244                     case 0x9:
  3245                         { /* FMOV @Rm+, FRn */
  3246                         uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); 
  3247                         precheck();
  3248                         check_fpuen_no_precheck();
  3249                         load_reg( R_ECX, Rm );
  3250                         check_ralign32( R_ECX );
  3251                         MOV_r32_r32( R_ECX, R_EAX );
  3252                         load_spreg( R_EDX, R_FPSCR );
  3253                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3254                         JNE_rel8(14 + CALL_FUNC1_SIZE, doublesize);
  3255                         ADD_imm8s_r32( 4, R_EAX );
  3256                         store_reg( R_EAX, Rm );
  3257                         MEM_READ_LONG( R_ECX, R_EAX );
  3258                         load_fr_bank( R_EDX );
  3259                         store_fr( R_EDX, R_EAX, FRn );
  3260                         if( FRn&1 ) {
  3261                     	JMP_rel8(27 + MEM_READ_DOUBLE_SIZE, end);
  3262                     	JMP_TARGET(doublesize);
  3263                     	ADD_imm8s_r32( 8, R_EAX );
  3264                     	store_reg(R_EAX, Rm);
  3265                     	MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
  3266                     	load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
  3267                     	load_xf_bank( R_EDX );
  3268                     	store_fr( R_EDX, R_EAX, FRn&0x0E );
  3269                     	store_fr( R_EDX, R_ECX, FRn|0x01 );
  3270                     	JMP_TARGET(end);
  3271                         } else {
  3272                     	JMP_rel8(15 + MEM_READ_DOUBLE_SIZE, end);
  3273                     	ADD_imm8s_r32( 8, R_EAX );
  3274                     	store_reg(R_EAX, Rm);
  3275                     	MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
  3276                     	load_fr_bank( R_EDX );
  3277                     	store_fr( R_EDX, R_EAX, FRn&0x0E );
  3278                     	store_fr( R_EDX, R_ECX, FRn|0x01 );
  3279                     	JMP_TARGET(end);
  3281                         sh4_x86.tstate = TSTATE_NONE;
  3283                         break;
  3284                     case 0xA:
  3285                         { /* FMOV FRm, @Rn */
  3286                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3287                         precheck();
  3288                         check_fpuen_no_precheck();
  3289                         load_reg( R_ECX, Rn );
  3290                         check_walign32( R_ECX );
  3291                         load_spreg( R_EDX, R_FPSCR );
  3292                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3293                         JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
  3294                         load_fr_bank( R_EDX );
  3295                         load_fr( R_EDX, R_EAX, FRm );
  3296                         MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
  3297                         if( FRm&1 ) {
  3298                     	JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
  3299                     	JMP_TARGET(doublesize);
  3300                     	load_xf_bank( R_EDX );
  3301                     	load_fr( R_EDX, R_EAX, FRm&0x0E );
  3302                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3303                     	MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
  3304                     	JMP_TARGET(end);
  3305                         } else {
  3306                     	JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
  3307                     	JMP_TARGET(doublesize);
  3308                     	load_fr_bank( R_EDX );
  3309                     	load_fr( R_EDX, R_EAX, FRm&0x0E );
  3310                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3311                     	MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
  3312                     	JMP_TARGET(end);
  3314                         sh4_x86.tstate = TSTATE_NONE;
  3316                         break;
  3317                     case 0xB:
  3318                         { /* FMOV FRm, @-Rn */
  3319                         uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3320                         precheck();
  3321                         check_fpuen_no_precheck();
  3322                         load_reg( R_ECX, Rn );
  3323                         check_walign32( R_ECX );
  3324                         load_spreg( R_EDX, R_FPSCR );
  3325                         TEST_imm32_r32( FPSCR_SZ, R_EDX );
  3326                         JNE_rel8(14 + CALL_FUNC2_SIZE, doublesize);
  3327                         load_fr_bank( R_EDX );
  3328                         load_fr( R_EDX, R_EAX, FRm );
  3329                         ADD_imm8s_r32(-4,R_ECX);
  3330                         store_reg( R_ECX, Rn );
  3331                         MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
  3332                         if( FRm&1 ) {
  3333                     	JMP_rel8( 24 + MEM_WRITE_DOUBLE_SIZE, end );
  3334                     	JMP_TARGET(doublesize);
  3335                     	load_xf_bank( R_EDX );
  3336                     	load_fr( R_EDX, R_EAX, FRm&0x0E );
  3337                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3338                     	ADD_imm8s_r32(-8,R_ECX);
  3339                     	store_reg( R_ECX, Rn );
  3340                     	MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
  3341                     	JMP_TARGET(end);
  3342                         } else {
  3343                     	JMP_rel8( 15 + MEM_WRITE_DOUBLE_SIZE, end );
  3344                     	JMP_TARGET(doublesize);
  3345                     	load_fr_bank( R_EDX );
  3346                     	load_fr( R_EDX, R_EAX, FRm&0x0E );
  3347                     	load_fr( R_EDX, R_EDX, FRm|0x01 );
  3348                     	ADD_imm8s_r32(-8,R_ECX);
  3349                     	store_reg( R_ECX, Rn );
  3350                     	MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
  3351                     	JMP_TARGET(end);
  3353                         sh4_x86.tstate = TSTATE_NONE;
  3355                         break;
  3356                     case 0xC:
  3357                         { /* FMOV FRm, FRn */
  3358                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3359                         /* As horrible as this looks, it's actually covering 5 separate cases:
  3360                          * 1. 32-bit fr-to-fr (PR=0)
  3361                          * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
  3362                          * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
  3363                          * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
  3364                          * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
  3365                          */
  3366                         check_fpuen();
  3367                         load_spreg( R_ECX, R_FPSCR );
  3368                         load_fr_bank( R_EDX );
  3369                         TEST_imm32_r32( FPSCR_SZ, R_ECX );
  3370                         JNE_rel8(8, doublesize);
  3371                         load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
  3372                         store_fr( R_EDX, R_EAX, FRn );
  3373                         if( FRm&1 ) {
  3374                     	JMP_rel8(24, end);
  3375                     	JMP_TARGET(doublesize);
  3376                     	load_xf_bank( R_ECX ); 
  3377                     	load_fr( R_ECX, R_EAX, FRm-1 );
  3378                     	if( FRn&1 ) {
  3379                     	    load_fr( R_ECX, R_EDX, FRm );
  3380                     	    store_fr( R_ECX, R_EAX, FRn-1 );
  3381                     	    store_fr( R_ECX, R_EDX, FRn );
  3382                     	} else /* FRn&1 == 0 */ {
  3383                     	    load_fr( R_ECX, R_ECX, FRm );
  3384                     	    store_fr( R_EDX, R_EAX, FRn );
  3385                     	    store_fr( R_EDX, R_ECX, FRn+1 );
  3387                     	JMP_TARGET(end);
  3388                         } else /* FRm&1 == 0 */ {
  3389                     	if( FRn&1 ) {
  3390                     	    JMP_rel8(24, end);
  3391                     	    load_xf_bank( R_ECX );
  3392                     	    load_fr( R_EDX, R_EAX, FRm );
  3393                     	    load_fr( R_EDX, R_EDX, FRm+1 );
  3394                     	    store_fr( R_ECX, R_EAX, FRn-1 );
  3395                     	    store_fr( R_ECX, R_EDX, FRn );
  3396                     	    JMP_TARGET(end);
  3397                     	} else /* FRn&1 == 0 */ {
  3398                     	    JMP_rel8(12, end);
  3399                     	    load_fr( R_EDX, R_EAX, FRm );
  3400                     	    load_fr( R_EDX, R_ECX, FRm+1 );
  3401                     	    store_fr( R_EDX, R_EAX, FRn );
  3402                     	    store_fr( R_EDX, R_ECX, FRn+1 );
  3403                     	    JMP_TARGET(end);
  3406                         sh4_x86.tstate = TSTATE_NONE;
  3408                         break;
  3409                     case 0xD:
  3410                         switch( (ir&0xF0) >> 4 ) {
  3411                             case 0x0:
  3412                                 { /* FSTS FPUL, FRn */
  3413                                 uint32_t FRn = ((ir>>8)&0xF); 
  3414                                 check_fpuen();
  3415                                 load_fr_bank( R_ECX );
  3416                                 load_spreg( R_EAX, R_FPUL );
  3417                                 store_fr( R_ECX, R_EAX, FRn );
  3418                                 sh4_x86.tstate = TSTATE_NONE;
  3420                                 break;
  3421                             case 0x1:
  3422                                 { /* FLDS FRm, FPUL */
  3423                                 uint32_t FRm = ((ir>>8)&0xF); 
  3424                                 check_fpuen();
  3425                                 load_fr_bank( R_ECX );
  3426                                 load_fr( R_ECX, R_EAX, FRm );
  3427                                 store_spreg( R_EAX, R_FPUL );
  3428                                 sh4_x86.tstate = TSTATE_NONE;
  3430                                 break;
  3431                             case 0x2:
  3432                                 { /* FLOAT FPUL, FRn */
  3433                                 uint32_t FRn = ((ir>>8)&0xF); 
  3434                                 check_fpuen();
  3435                                 load_spreg( R_ECX, R_FPSCR );
  3436                                 load_spreg(R_EDX, REG_OFFSET(fr_bank));
  3437                                 FILD_sh4r(R_FPUL);
  3438                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3439                                 JNE_rel8(5, doubleprec);
  3440                                 pop_fr( R_EDX, FRn );
  3441                                 JMP_rel8(3, end);
  3442                                 JMP_TARGET(doubleprec);
  3443                                 pop_dr( R_EDX, FRn );
  3444                                 JMP_TARGET(end);
  3445                                 sh4_x86.tstate = TSTATE_NONE;
  3447                                 break;
  3448                             case 0x3:
  3449                                 { /* FTRC FRm, FPUL */
  3450                                 uint32_t FRm = ((ir>>8)&0xF); 
  3451                                 check_fpuen();
  3452                                 load_spreg( R_ECX, R_FPSCR );
  3453                                 load_fr_bank( R_EDX );
  3454                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3455                                 JNE_rel8(5, doubleprec);
  3456                                 push_fr( R_EDX, FRm );
  3457                                 JMP_rel8(3, doop);
  3458                                 JMP_TARGET(doubleprec);
  3459                                 push_dr( R_EDX, FRm );
  3460                                 JMP_TARGET( doop );
  3461                                 load_imm32( R_ECX, (uint32_t)&max_int );
  3462                                 FILD_r32ind( R_ECX );
  3463                                 FCOMIP_st(1);
  3464                                 JNA_rel8( 32, sat );
  3465                                 load_imm32( R_ECX, (uint32_t)&min_int );  // 5
  3466                                 FILD_r32ind( R_ECX );           // 2
  3467                                 FCOMIP_st(1);                   // 2
  3468                                 JAE_rel8( 21, sat2 );            // 2
  3469                                 load_imm32( R_EAX, (uint32_t)&save_fcw );
  3470                                 FNSTCW_r32ind( R_EAX );
  3471                                 load_imm32( R_EDX, (uint32_t)&trunc_fcw );
  3472                                 FLDCW_r32ind( R_EDX );
  3473                                 FISTP_sh4r(R_FPUL);             // 3
  3474                                 FLDCW_r32ind( R_EAX );
  3475                                 JMP_rel8( 9, end );             // 2
  3477                                 JMP_TARGET(sat);
  3478                                 JMP_TARGET(sat2);
  3479                                 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
  3480                                 store_spreg( R_ECX, R_FPUL );
  3481                                 FPOP_st();
  3482                                 JMP_TARGET(end);
  3483                                 sh4_x86.tstate = TSTATE_NONE;
  3485                                 break;
  3486                             case 0x4:
  3487                                 { /* FNEG FRn */
  3488                                 uint32_t FRn = ((ir>>8)&0xF); 
  3489                                 check_fpuen();
  3490                                 load_spreg( R_ECX, R_FPSCR );
  3491                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3492                                 load_fr_bank( R_EDX );
  3493                                 JNE_rel8(10, doubleprec);
  3494                                 push_fr(R_EDX, FRn);
  3495                                 FCHS_st0();
  3496                                 pop_fr(R_EDX, FRn);
  3497                                 JMP_rel8(8, end);
  3498                                 JMP_TARGET(doubleprec);
  3499                                 push_dr(R_EDX, FRn);
  3500                                 FCHS_st0();
  3501                                 pop_dr(R_EDX, FRn);
  3502                                 JMP_TARGET(end);
  3503                                 sh4_x86.tstate = TSTATE_NONE;
  3505                                 break;
  3506                             case 0x5:
  3507                                 { /* FABS FRn */
  3508                                 uint32_t FRn = ((ir>>8)&0xF); 
  3509                                 check_fpuen();
  3510                                 load_spreg( R_ECX, R_FPSCR );
  3511                                 load_fr_bank( R_EDX );
  3512                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3513                                 JNE_rel8(10, doubleprec);
  3514                                 push_fr(R_EDX, FRn); // 3
  3515                                 FABS_st0(); // 2
  3516                                 pop_fr( R_EDX, FRn); //3
  3517                                 JMP_rel8(8,end); // 2
  3518                                 JMP_TARGET(doubleprec);
  3519                                 push_dr(R_EDX, FRn);
  3520                                 FABS_st0();
  3521                                 pop_dr(R_EDX, FRn);
  3522                                 JMP_TARGET(end);
  3523                                 sh4_x86.tstate = TSTATE_NONE;
  3525                                 break;
  3526                             case 0x6:
  3527                                 { /* FSQRT FRn */
  3528                                 uint32_t FRn = ((ir>>8)&0xF); 
  3529                                 check_fpuen();
  3530                                 load_spreg( R_ECX, R_FPSCR );
  3531                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3532                                 load_fr_bank( R_EDX );
  3533                                 JNE_rel8(10, doubleprec);
  3534                                 push_fr(R_EDX, FRn);
  3535                                 FSQRT_st0();
  3536                                 pop_fr(R_EDX, FRn);
  3537                                 JMP_rel8(8, end);
  3538                                 JMP_TARGET(doubleprec);
  3539                                 push_dr(R_EDX, FRn);
  3540                                 FSQRT_st0();
  3541                                 pop_dr(R_EDX, FRn);
  3542                                 JMP_TARGET(end);
  3543                                 sh4_x86.tstate = TSTATE_NONE;
  3545                                 break;
  3546                             case 0x7:
  3547                                 { /* FSRRA FRn */
  3548                                 uint32_t FRn = ((ir>>8)&0xF); 
  3549                                 check_fpuen();
  3550                                 load_spreg( R_ECX, R_FPSCR );
  3551                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3552                                 load_fr_bank( R_EDX );
  3553                                 JNE_rel8(12, end); // PR=0 only
  3554                                 FLD1_st0();
  3555                                 push_fr(R_EDX, FRn);
  3556                                 FSQRT_st0();
  3557                                 FDIVP_st(1);
  3558                                 pop_fr(R_EDX, FRn);
  3559                                 JMP_TARGET(end);
  3560                                 sh4_x86.tstate = TSTATE_NONE;
  3562                                 break;
  3563                             case 0x8:
  3564                                 { /* FLDI0 FRn */
  3565                                 uint32_t FRn = ((ir>>8)&0xF); 
  3566                                 /* IFF PR=0 */
  3567                                   check_fpuen();
  3568                                   load_spreg( R_ECX, R_FPSCR );
  3569                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3570                                   JNE_rel8(8, end);
  3571                                   XOR_r32_r32( R_EAX, R_EAX );
  3572                                   load_spreg( R_ECX, REG_OFFSET(fr_bank) );
  3573                                   store_fr( R_ECX, R_EAX, FRn );
  3574                                   JMP_TARGET(end);
  3575                                   sh4_x86.tstate = TSTATE_NONE;
  3577                                 break;
  3578                             case 0x9:
  3579                                 { /* FLDI1 FRn */
  3580                                 uint32_t FRn = ((ir>>8)&0xF); 
  3581                                 /* IFF PR=0 */
  3582                                   check_fpuen();
  3583                                   load_spreg( R_ECX, R_FPSCR );
  3584                                   TEST_imm32_r32( FPSCR_PR, R_ECX );
  3585                                   JNE_rel8(11, end);
  3586                                   load_imm32(R_EAX, 0x3F800000);
  3587                                   load_spreg( R_ECX, REG_OFFSET(fr_bank) );
  3588                                   store_fr( R_ECX, R_EAX, FRn );
  3589                                   JMP_TARGET(end);
  3590                                   sh4_x86.tstate = TSTATE_NONE;
  3592                                 break;
  3593                             case 0xA:
  3594                                 { /* FCNVSD FPUL, FRn */
  3595                                 uint32_t FRn = ((ir>>8)&0xF); 
  3596                                 check_fpuen();
  3597                                 load_spreg( R_ECX, R_FPSCR );
  3598                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3599                                 JE_rel8(9, end); // only when PR=1
  3600                                 load_fr_bank( R_ECX );
  3601                                 push_fpul();
  3602                                 pop_dr( R_ECX, FRn );
  3603                                 JMP_TARGET(end);
  3604                                 sh4_x86.tstate = TSTATE_NONE;
  3606                                 break;
  3607                             case 0xB:
  3608                                 { /* FCNVDS FRm, FPUL */
  3609                                 uint32_t FRm = ((ir>>8)&0xF); 
  3610                                 check_fpuen();
  3611                                 load_spreg( R_ECX, R_FPSCR );
  3612                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3613                                 JE_rel8(9, end); // only when PR=1
  3614                                 load_fr_bank( R_ECX );
  3615                                 push_dr( R_ECX, FRm );
  3616                                 pop_fpul();
  3617                                 JMP_TARGET(end);
  3618                                 sh4_x86.tstate = TSTATE_NONE;
  3620                                 break;
  3621                             case 0xE:
  3622                                 { /* FIPR FVm, FVn */
  3623                                 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3); 
  3624                                 check_fpuen();
  3625                                 load_spreg( R_ECX, R_FPSCR );
  3626                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3627                                 JNE_rel8(44, doubleprec);
  3629                                 load_fr_bank( R_ECX );
  3630                                 push_fr( R_ECX, FVm<<2 );
  3631                                 push_fr( R_ECX, FVn<<2 );
  3632                                 FMULP_st(1);
  3633                                 push_fr( R_ECX, (FVm<<2)+1);
  3634                                 push_fr( R_ECX, (FVn<<2)+1);
  3635                                 FMULP_st(1);
  3636                                 FADDP_st(1);
  3637                                 push_fr( R_ECX, (FVm<<2)+2);
  3638                                 push_fr( R_ECX, (FVn<<2)+2);
  3639                                 FMULP_st(1);
  3640                                 FADDP_st(1);
  3641                                 push_fr( R_ECX, (FVm<<2)+3);
  3642                                 push_fr( R_ECX, (FVn<<2)+3);
  3643                                 FMULP_st(1);
  3644                                 FADDP_st(1);
  3645                                 pop_fr( R_ECX, (FVn<<2)+3);
  3646                                 JMP_TARGET(doubleprec);
  3647                                 sh4_x86.tstate = TSTATE_NONE;
  3649                                 break;
  3650                             case 0xF:
  3651                                 switch( (ir&0x100) >> 8 ) {
  3652                                     case 0x0:
  3653                                         { /* FSCA FPUL, FRn */
  3654                                         uint32_t FRn = ((ir>>9)&0x7)<<1; 
  3655                                         check_fpuen();
  3656                                         load_spreg( R_ECX, R_FPSCR );
  3657                                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3658                                         JNE_rel8( CALL_FUNC2_SIZE + 9, doubleprec );
  3659                                         load_fr_bank( R_ECX );
  3660                                         ADD_imm8s_r32( (FRn&0x0E)<<2, R_ECX );
  3661                                         load_spreg( R_EDX, R_FPUL );
  3662                                         call_func2( sh4_fsca, R_EDX, R_ECX );
  3663                                         JMP_TARGET(doubleprec);
  3664                                         sh4_x86.tstate = TSTATE_NONE;
  3666                                         break;
  3667                                     case 0x1:
  3668                                         switch( (ir&0x200) >> 9 ) {
  3669                                             case 0x0:
  3670                                                 { /* FTRV XMTRX, FVn */
  3671                                                 uint32_t FVn = ((ir>>10)&0x3); 
  3672                                                 check_fpuen();
  3673                                                 load_spreg( R_ECX, R_FPSCR );
  3674                                                 TEST_imm32_r32( FPSCR_PR, R_ECX );
  3675                                                 JNE_rel8( 18 + CALL_FUNC2_SIZE, doubleprec );
  3676                                                 load_fr_bank( R_EDX );                 // 3
  3677                                                 ADD_imm8s_r32( FVn<<4, R_EDX );        // 3
  3678                                                 load_xf_bank( R_ECX );                 // 12
  3679                                                 call_func2( sh4_ftrv, R_EDX, R_ECX );  // 12
  3680                                                 JMP_TARGET(doubleprec);
  3681                                                 sh4_x86.tstate = TSTATE_NONE;
  3683                                                 break;
  3684                                             case 0x1:
  3685                                                 switch( (ir&0xC00) >> 10 ) {
  3686                                                     case 0x0:
  3687                                                         { /* FSCHG */
  3688                                                         check_fpuen();
  3689                                                         load_spreg( R_ECX, R_FPSCR );
  3690                                                         XOR_imm32_r32( FPSCR_SZ, R_ECX );
  3691                                                         store_spreg( R_ECX, R_FPSCR );
  3692                                                         sh4_x86.tstate = TSTATE_NONE;
  3694                                                         break;
  3695                                                     case 0x2:
  3696                                                         { /* FRCHG */
  3697                                                         check_fpuen();
  3698                                                         load_spreg( R_ECX, R_FPSCR );
  3699                                                         XOR_imm32_r32( FPSCR_FR, R_ECX );
  3700                                                         store_spreg( R_ECX, R_FPSCR );
  3701                                                         update_fr_bank( R_ECX );
  3702                                                         sh4_x86.tstate = TSTATE_NONE;
  3704                                                         break;
  3705                                                     case 0x3:
  3706                                                         { /* UNDEF */
  3707                                                         if( sh4_x86.in_delay_slot ) {
  3708                                                     	SLOTILLEGAL();
  3709                                                         } else {
  3710                                                     	precheck();
  3711                                                     	JMP_exit(EXIT_ILLEGAL);
  3712                                                     	return 2;
  3715                                                         break;
  3716                                                     default:
  3717                                                         UNDEF();
  3718                                                         break;
  3720                                                 break;
  3722                                         break;
  3724                                 break;
  3725                             default:
  3726                                 UNDEF();
  3727                                 break;
  3729                         break;
  3730                     case 0xE:
  3731                         { /* FMAC FR0, FRm, FRn */
  3732                         uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF); 
  3733                         check_fpuen();
  3734                         load_spreg( R_ECX, R_FPSCR );
  3735                         load_spreg( R_EDX, REG_OFFSET(fr_bank));
  3736                         TEST_imm32_r32( FPSCR_PR, R_ECX );
  3737                         JNE_rel8(18, doubleprec);
  3738                         push_fr( R_EDX, 0 );
  3739                         push_fr( R_EDX, FRm );
  3740                         FMULP_st(1);
  3741                         push_fr( R_EDX, FRn );
  3742                         FADDP_st(1);
  3743                         pop_fr( R_EDX, FRn );
  3744                         JMP_rel8(16, end);
  3745                         JMP_TARGET(doubleprec);
  3746                         push_dr( R_EDX, 0 );
  3747                         push_dr( R_EDX, FRm );
  3748                         FMULP_st(1);
  3749                         push_dr( R_EDX, FRn );
  3750                         FADDP_st(1);
  3751                         pop_dr( R_EDX, FRn );
  3752                         JMP_TARGET(end);
  3753                         sh4_x86.tstate = TSTATE_NONE;
  3755                         break;
  3756                     default:
  3757                         UNDEF();
  3758                         break;
  3760                 break;
  3763     sh4_x86.in_delay_slot = FALSE;
  3764     return 0;
.