Search
lxdream.org :: lxdream/src/sh4/sh4x86.in
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.in
changeset 368:36fac4c42322
prev361:be3de4ecd954
next374:8f80a795513e
author nkeynes
date Tue Sep 04 08:40:23 2007 +0000 (13 years ago)
permissions -rw-r--r--
last change More translator WIP - blocks are approaching something sane
view annotate diff log raw
     1 /**
     2  * $Id: sh4x86.in,v 1.3 2007-09-04 08:40:23 nkeynes Exp $
     3  * 
     4  * SH4 => x86 translation. This version does no real optimization, it just
     5  * outputs straight-line x86 code - it mainly exists to provide a baseline
     6  * to test the optimizing versions against.
     7  *
     8  * Copyright (c) 2007 Nathan Keynes.
     9  *
    10  * This program is free software; you can redistribute it and/or modify
    11  * it under the terms of the GNU General Public License as published by
    12  * the Free Software Foundation; either version 2 of the License, or
    13  * (at your option) any later version.
    14  *
    15  * This program is distributed in the hope that it will be useful,
    16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
    17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    18  * GNU General Public License for more details.
    19  */
    21 #include <assert.h>
    23 #include "sh4/sh4core.h"
    24 #include "sh4/sh4trans.h"
    25 #include "sh4/x86op.h"
    26 #include "clock.h"
    28 #define DEFAULT_BACKPATCH_SIZE 4096
    30 /** 
    31  * Struct to manage internal translation state. This state is not saved -
    32  * it is only valid between calls to sh4_translate_begin_block() and
    33  * sh4_translate_end_block()
    34  */
    35 struct sh4_x86_state {
    36     gboolean in_delay_slot;
    37     gboolean priv_checked; /* true if we've already checked the cpu mode. */
    38     gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
    40     /* Allocated memory for the (block-wide) back-patch list */
    41     uint32_t **backpatch_list;
    42     uint32_t backpatch_posn;
    43     uint32_t backpatch_size;
    44 };
    46 #define EXIT_DATA_ADDR_READ 0
    47 #define EXIT_DATA_ADDR_WRITE 7
    48 #define EXIT_ILLEGAL 14
    49 #define EXIT_SLOT_ILLEGAL 21
    50 #define EXIT_FPU_DISABLED 28
    51 #define EXIT_SLOT_FPU_DISABLED 35
    53 static struct sh4_x86_state sh4_x86;
    55 void sh4_x86_init()
    56 {
    57     sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
    58     sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
    59 }
    62 static void sh4_x86_add_backpatch( uint8_t *ptr )
    63 {
    64     if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
    65 	sh4_x86.backpatch_size <<= 1;
    66 	sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
    67 	assert( sh4_x86.backpatch_list != NULL );
    68     }
    69     sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
    70 }
    72 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
    73 {
    74     unsigned int i;
    75     for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
    76 	*sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]));
    77     }
    78 }
    80 #ifndef NDEBUG
    81 #define MARK_JMP(x,n) uint8_t *_mark_jmp_##x = xlat_output + n
    82 #define CHECK_JMP(x) assert( _mark_jmp_##x == xlat_output )
    83 #else
    84 #define MARK_JMP(x,n)
    85 #define CHECK_JMP(x)
    86 #endif
    89 /**
    90  * Emit an instruction to load an SH4 reg into a real register
    91  */
    92 static inline void load_reg( int x86reg, int sh4reg ) 
    93 {
    94     /* mov [bp+n], reg */
    95     OP(0x8B);
    96     OP(0x45 + (x86reg<<3));
    97     OP(REG_OFFSET(r[sh4reg]));
    98 }
   100 /**
   101  * Load the SR register into an x86 register
   102  */
   103 static inline void read_sr( int x86reg )
   104 {
   105     MOV_ebp_r32( R_M, x86reg );
   106     SHL1_r32( x86reg );
   107     OR_ebp_r32( R_Q, x86reg );
   108     SHL_imm8_r32( 7, x86reg );
   109     OR_ebp_r32( R_S, x86reg );
   110     SHL1_r32( x86reg );
   111     OR_ebp_r32( R_T, x86reg );
   112     OR_ebp_r32( R_SR, x86reg );
   113 }
   115 static inline void write_sr( int x86reg )
   116 {
   117     TEST_imm32_r32( SR_M, x86reg );
   118     SETNE_ebp(R_M);
   119     TEST_imm32_r32( SR_Q, x86reg );
   120     SETNE_ebp(R_Q);
   121     TEST_imm32_r32( SR_S, x86reg );
   122     SETNE_ebp(R_S);
   123     TEST_imm32_r32( SR_T, x86reg );
   124     SETNE_ebp(R_T);
   125     AND_imm32_r32( SR_MQSTMASK, x86reg );
   126     MOV_r32_ebp( x86reg, R_SR );
   127 }
   130 static inline void load_spreg( int x86reg, int regoffset )
   131 {
   132     /* mov [bp+n], reg */
   133     OP(0x8B);
   134     OP(0x45 + (x86reg<<3));
   135     OP(regoffset);
   136 }
   138 /**
   139  * Emit an instruction to load an immediate value into a register
   140  */
   141 static inline void load_imm32( int x86reg, uint32_t value ) {
   142     /* mov #value, reg */
   143     OP(0xB8 + x86reg);
   144     OP32(value);
   145 }
   147 /**
   148  * Emit an instruction to store an SH4 reg (RN)
   149  */
   150 void static inline store_reg( int x86reg, int sh4reg ) {
   151     /* mov reg, [bp+n] */
   152     OP(0x89);
   153     OP(0x45 + (x86reg<<3));
   154     OP(REG_OFFSET(r[sh4reg]));
   155 }
   156 void static inline store_spreg( int x86reg, int regoffset ) {
   157     /* mov reg, [bp+n] */
   158     OP(0x89);
   159     OP(0x45 + (x86reg<<3));
   160     OP(regoffset);
   161 }
   163 /**
   164  * Note: clobbers EAX to make the indirect call - this isn't usually
   165  * a problem since the callee will usually clobber it anyway.
   166  */
   167 static inline void call_func0( void *ptr )
   168 {
   169     load_imm32(R_EAX, (uint32_t)ptr);
   170     CALL_r32(R_EAX);
   171 }
   173 static inline void call_func1( void *ptr, int arg1 )
   174 {
   175     PUSH_r32(arg1);
   176     call_func0(ptr);
   177     ADD_imm8s_r32( -4, R_ESP );
   178 }
   180 static inline void call_func2( void *ptr, int arg1, int arg2 )
   181 {
   182     PUSH_r32(arg2);
   183     PUSH_r32(arg1);
   184     call_func0(ptr);
   185     ADD_imm8s_r32( -4, R_ESP );
   186 }
   188 /* Exception checks - Note that all exception checks will clobber EAX */
   189 static void check_priv( )
   190 {
   191     if( !sh4_x86.priv_checked ) {
   192 	sh4_x86.priv_checked = TRUE;
   193 	load_spreg( R_EAX, R_SR );
   194 	AND_imm32_r32( SR_MD, R_EAX );
   195 	if( sh4_x86.in_delay_slot ) {
   196 	    JE_exit( EXIT_SLOT_ILLEGAL );
   197 	} else {
   198 	    JE_exit( EXIT_ILLEGAL );
   199 	}
   200     }
   201 }
   203 static void check_fpuen( )
   204 {
   205     if( !sh4_x86.fpuen_checked ) {
   206 	sh4_x86.fpuen_checked = TRUE;
   207 	load_spreg( R_EAX, R_SR );
   208 	AND_imm32_r32( SR_FD, R_EAX );
   209 	if( sh4_x86.in_delay_slot ) {
   210 	    JNE_exit(EXIT_SLOT_FPU_DISABLED);
   211 	} else {
   212 	    JNE_exit(EXIT_FPU_DISABLED);
   213 	}
   214     }
   215 }
   217 static void check_ralign16( int x86reg )
   218 {
   219     TEST_imm32_r32( 0x00000001, x86reg );
   220     JNE_exit(EXIT_DATA_ADDR_READ);
   221 }
   223 static void check_walign16( int x86reg )
   224 {
   225     TEST_imm32_r32( 0x00000001, x86reg );
   226     JNE_exit(EXIT_DATA_ADDR_WRITE);
   227 }
   229 static void check_ralign32( int x86reg )
   230 {
   231     TEST_imm32_r32( 0x00000003, x86reg );
   232     JNE_exit(EXIT_DATA_ADDR_READ);
   233 }
   234 static void check_walign32( int x86reg )
   235 {
   236     TEST_imm32_r32( 0x00000003, x86reg );
   237     JNE_exit(EXIT_DATA_ADDR_WRITE);
   238 }
   241 #define UNDEF()
   242 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
   243 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
   244 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
   245 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
   246 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
   247 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
   248 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
   250 #define RAISE_EXCEPTION( exc ) call_func1(sh4_raise_exception, exc);
   251 #define CHECKSLOTILLEGAL() if(sh4_x86.in_delay_slot) RAISE_EXCEPTION(EXC_SLOT_ILLEGAL)
   255 /**
   256  * Emit the 'start of block' assembly. Sets up the stack frame and save
   257  * SI/DI as required
   258  */
   259 void sh4_translate_begin_block() 
   260 {
   261     PUSH_r32(R_EBP);
   262     PUSH_r32(R_ESI);
   263     /* mov &sh4r, ebp */
   264     load_imm32( R_EBP, (uint32_t)&sh4r );
   265     PUSH_r32(R_ESI);
   267     sh4_x86.in_delay_slot = FALSE;
   268     sh4_x86.priv_checked = FALSE;
   269     sh4_x86.fpuen_checked = FALSE;
   270     sh4_x86.backpatch_posn = 0;
   271 }
   273 /**
   274  * Exit the block early (ie branch out), conditionally or otherwise
   275  */
   276 void exit_block( uint32_t pc )
   277 {
   278     load_imm32( R_ECX, pc );
   279     store_spreg( R_ECX, REG_OFFSET(pc) );
   280     MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
   281     load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
   282     MUL_r32( R_ESI );
   283     ADD_r32_r32( R_EAX, R_ECX );
   284     store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
   285     XOR_r32_r32( R_EAX, R_EAX );
   286     RET();
   287 }
   289 /**
   290  * Flush any open regs back to memory, restore SI/DI/, update PC, etc
   291  */
   292 void sh4_translate_end_block( sh4addr_t pc ) {
   293     assert( !sh4_x86.in_delay_slot ); // should never stop here
   294     // Normal termination - save PC, cycle count
   295     exit_block( pc );
   297     uint8_t *end_ptr = xlat_output;
   298     // Exception termination. Jump block for various exception codes:
   299     PUSH_imm32( EXC_DATA_ADDR_READ );
   300     JMP_rel8( 33 );
   301     PUSH_imm32( EXC_DATA_ADDR_WRITE );
   302     JMP_rel8( 26 );
   303     PUSH_imm32( EXC_ILLEGAL );
   304     JMP_rel8( 19 );
   305     PUSH_imm32( EXC_SLOT_ILLEGAL ); 
   306     JMP_rel8( 12 );
   307     PUSH_imm32( EXC_FPU_DISABLED ); 
   308     JMP_rel8( 5 );                 
   309     PUSH_imm32( EXC_SLOT_FPU_DISABLED );
   310     // target
   311     load_spreg( R_ECX, REG_OFFSET(pc) );
   312     ADD_r32_r32( R_ESI, R_ECX );
   313     ADD_r32_r32( R_ESI, R_ECX );
   314     store_spreg( R_ECX, REG_OFFSET(pc) );
   315     MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
   316     load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
   317     MUL_r32( R_ESI );
   318     ADD_r32_r32( R_EAX, R_ECX );
   319     store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
   321     load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
   322     CALL_r32( R_EAX ); // 2
   323     POP_r32(R_EBP);
   324     RET();
   326     sh4_x86_do_backpatch( end_ptr );
   327 }
   329 /**
   330  * Translate a single instruction. Delayed branches are handled specially
   331  * by translating both branch and delayed instruction as a single unit (as
   332  * 
   333  *
   334  * @return true if the instruction marks the end of a basic block
   335  * (eg a branch or 
   336  */
   337 uint32_t sh4_x86_translate_instruction( uint32_t pc )
   338 {
   339     uint16_t ir = sh4_read_word( pc );
   341 %%
   342 /* ALU operations */
   343 ADD Rm, Rn {:
   344     load_reg( R_EAX, Rm );
   345     load_reg( R_ECX, Rn );
   346     ADD_r32_r32( R_EAX, R_ECX );
   347     store_reg( R_ECX, Rn );
   348 :}
   349 ADD #imm, Rn {:  
   350     load_reg( R_EAX, Rn );
   351     ADD_imm8s_r32( imm, R_EAX );
   352     store_reg( R_EAX, Rn );
   353 :}
   354 ADDC Rm, Rn {:
   355     load_reg( R_EAX, Rm );
   356     load_reg( R_ECX, Rn );
   357     LDC_t();
   358     ADC_r32_r32( R_EAX, R_ECX );
   359     store_reg( R_ECX, Rn );
   360     SETC_t();
   361 :}
   362 ADDV Rm, Rn {:
   363     load_reg( R_EAX, Rm );
   364     load_reg( R_ECX, Rn );
   365     ADD_r32_r32( R_EAX, R_ECX );
   366     store_reg( R_ECX, Rn );
   367     SETO_t();
   368 :}
   369 AND Rm, Rn {:
   370     load_reg( R_EAX, Rm );
   371     load_reg( R_ECX, Rn );
   372     AND_r32_r32( R_EAX, R_ECX );
   373     store_reg( R_ECX, Rn );
   374 :}
   375 AND #imm, R0 {:  
   376     load_reg( R_EAX, 0 );
   377     AND_imm32_r32(imm, R_EAX); 
   378     store_reg( R_EAX, 0 );
   379 :}
   380 AND.B #imm, @(R0, GBR) {: 
   381     load_reg( R_EAX, 0 );
   382     load_spreg( R_ECX, R_GBR );
   383     ADD_r32_r32( R_EAX, R_EBX );
   384     MEM_READ_BYTE( R_ECX, R_EAX );
   385     AND_imm32_r32(imm, R_ECX );
   386     MEM_WRITE_BYTE( R_ECX, R_EAX );
   387 :}
   388 CMP/EQ Rm, Rn {:  
   389     load_reg( R_EAX, Rm );
   390     load_reg( R_ECX, Rn );
   391     CMP_r32_r32( R_EAX, R_ECX );
   392     SETE_t();
   393 :}
   394 CMP/EQ #imm, R0 {:  
   395     load_reg( R_EAX, 0 );
   396     CMP_imm8s_r32(imm, R_EAX);
   397     SETE_t();
   398 :}
   399 CMP/GE Rm, Rn {:  
   400     load_reg( R_EAX, Rm );
   401     load_reg( R_ECX, Rn );
   402     CMP_r32_r32( R_EAX, R_ECX );
   403     SETGE_t();
   404 :}
   405 CMP/GT Rm, Rn {: 
   406     load_reg( R_EAX, Rm );
   407     load_reg( R_ECX, Rn );
   408     CMP_r32_r32( R_EAX, R_ECX );
   409     SETG_t();
   410 :}
   411 CMP/HI Rm, Rn {:  
   412     load_reg( R_EAX, Rm );
   413     load_reg( R_ECX, Rn );
   414     CMP_r32_r32( R_EAX, R_ECX );
   415     SETA_t();
   416 :}
   417 CMP/HS Rm, Rn {: 
   418     load_reg( R_EAX, Rm );
   419     load_reg( R_ECX, Rn );
   420     CMP_r32_r32( R_EAX, R_ECX );
   421     SETAE_t();
   422  :}
   423 CMP/PL Rn {: 
   424     load_reg( R_EAX, Rn );
   425     CMP_imm8s_r32( 0, R_EAX );
   426     SETG_t();
   427 :}
   428 CMP/PZ Rn {:  
   429     load_reg( R_EAX, Rn );
   430     CMP_imm8s_r32( 0, R_EAX );
   431     SETGE_t();
   432 :}
   433 CMP/STR Rm, Rn {:  
   434     load_reg( R_EAX, Rm );
   435     load_reg( R_ECX, Rn );
   436     XOR_r32_r32( R_ECX, R_EAX );
   437     TEST_r8_r8( R_AL, R_AL );
   438     JE_rel8(13);
   439     TEST_r8_r8( R_AH, R_AH ); // 2
   440     JE_rel8(9);
   441     SHR_imm8_r32( 16, R_EAX ); // 3
   442     TEST_r8_r8( R_AL, R_AL ); // 2
   443     JE_rel8(2);
   444     TEST_r8_r8( R_AH, R_AH ); // 2
   445     SETE_t();
   446 :}
   447 DIV0S Rm, Rn {:
   448     load_reg( R_EAX, Rm );
   449     load_reg( R_ECX, Rm );
   450     SHR_imm8_r32( 31, R_EAX );
   451     SHR_imm8_r32( 31, R_ECX );
   452     store_spreg( R_EAX, R_M );
   453     store_spreg( R_ECX, R_Q );
   454     CMP_r32_r32( R_EAX, R_ECX );
   455     SETE_t();
   456 :}
   457 DIV0U {:  
   458     XOR_r32_r32( R_EAX, R_EAX );
   459     store_spreg( R_EAX, R_Q );
   460     store_spreg( R_EAX, R_M );
   461     store_spreg( R_EAX, R_T );
   462 :}
   463 DIV1 Rm, Rn {:  :}
   464 DMULS.L Rm, Rn {:  
   465     load_reg( R_EAX, Rm );
   466     load_reg( R_ECX, Rn );
   467     IMUL_r32(R_ECX);
   468     store_spreg( R_EDX, R_MACH );
   469     store_spreg( R_EAX, R_MACL );
   470 :}
   471 DMULU.L Rm, Rn {:  
   472     load_reg( R_EAX, Rm );
   473     load_reg( R_ECX, Rn );
   474     MUL_r32(R_ECX);
   475     store_spreg( R_EDX, R_MACH );
   476     store_spreg( R_EAX, R_MACL );    
   477 :}
   478 DT Rn {:  
   479     load_reg( R_EAX, Rn );
   480     ADD_imm8s_r32( -1, Rn );
   481     store_reg( R_EAX, Rn );
   482     SETE_t();
   483 :}
   484 EXTS.B Rm, Rn {:  
   485     load_reg( R_EAX, Rm );
   486     MOVSX_r8_r32( R_EAX, R_EAX );
   487     store_reg( R_EAX, Rn );
   488 :}
   489 EXTS.W Rm, Rn {:  
   490     load_reg( R_EAX, Rm );
   491     MOVSX_r16_r32( R_EAX, R_EAX );
   492     store_reg( R_EAX, Rn );
   493 :}
   494 EXTU.B Rm, Rn {:  
   495     load_reg( R_EAX, Rm );
   496     MOVZX_r8_r32( R_EAX, R_EAX );
   497     store_reg( R_EAX, Rn );
   498 :}
   499 EXTU.W Rm, Rn {:  
   500     load_reg( R_EAX, Rm );
   501     MOVZX_r16_r32( R_EAX, R_EAX );
   502     store_reg( R_EAX, Rn );
   503 :}
   504 MAC.L @Rm+, @Rn+ {:  :}
   505 MAC.W @Rm+, @Rn+ {:  :}
   506 MOVT Rn {:  
   507     load_spreg( R_EAX, R_T );
   508     store_reg( R_EAX, Rn );
   509 :}
   510 MUL.L Rm, Rn {:  
   511     load_reg( R_EAX, Rm );
   512     load_reg( R_ECX, Rn );
   513     MUL_r32( R_ECX );
   514     store_spreg( R_EAX, R_MACL );
   515 :}
   516 MULS.W Rm, Rn {:  
   517 :}
   518 MULU.W Rm, Rn {:  :}
   519 NEG Rm, Rn {:
   520     load_reg( R_EAX, Rm );
   521     NEG_r32( R_EAX );
   522     store_reg( R_EAX, Rn );
   523 :}
   524 NEGC Rm, Rn {:  
   525     load_reg( R_EAX, Rm );
   526     XOR_r32_r32( R_ECX, R_ECX );
   527     LDC_t();
   528     SBB_r32_r32( R_EAX, R_ECX );
   529     store_reg( R_ECX, Rn );
   530     SETC_t();
   531 :}
   532 NOT Rm, Rn {:  
   533     load_reg( R_EAX, Rm );
   534     NOT_r32( R_EAX );
   535     store_reg( R_EAX, Rn );
   536 :}
   537 OR Rm, Rn {:  
   538     load_reg( R_EAX, Rm );
   539     load_reg( R_ECX, Rn );
   540     OR_r32_r32( R_EAX, R_ECX );
   541     store_reg( R_ECX, Rn );
   542 :}
   543 OR #imm, R0 {:
   544     load_reg( R_EAX, 0 );
   545     OR_imm32_r32(imm, R_EAX);
   546     store_reg( R_EAX, 0 );
   547 :}
   548 OR.B #imm, @(R0, GBR) {:  :}
   549 ROTCL Rn {:
   550     load_reg( R_EAX, Rn );
   551     LDC_t();
   552     RCL1_r32( R_EAX );
   553     store_reg( R_EAX, Rn );
   554     SETC_t();
   555 :}
   556 ROTCR Rn {:  
   557     load_reg( R_EAX, Rn );
   558     LDC_t();
   559     RCR1_r32( R_EAX );
   560     store_reg( R_EAX, Rn );
   561     SETC_t();
   562 :}
   563 ROTL Rn {:  
   564     load_reg( R_EAX, Rn );
   565     ROL1_r32( R_EAX );
   566     store_reg( R_EAX, Rn );
   567     SETC_t();
   568 :}
   569 ROTR Rn {:  
   570     load_reg( R_EAX, Rn );
   571     ROR1_r32( R_EAX );
   572     store_reg( R_EAX, Rn );
   573     SETC_t();
   574 :}
   575 SHAD Rm, Rn {:
   576     /* Annoyingly enough, not directly convertible */
   577     load_reg( R_EAX, Rn );
   578     load_reg( R_ECX, Rm );
   579     CMP_imm32_r32( 0, R_ECX );
   580     JAE_rel8(9);
   582     NEG_r32( R_ECX );      // 2
   583     AND_imm8_r8( 0x1F, R_CL ); // 3
   584     SAR_r32_CL( R_EAX );       // 2
   585     JMP_rel8(5);               // 2
   587     AND_imm8_r8( 0x1F, R_CL ); // 3
   588     SHL_r32_CL( R_EAX );       // 2
   590     store_reg( R_EAX, Rn );
   591 :}
   592 SHLD Rm, Rn {:  
   593     load_reg( R_EAX, Rn );
   594     load_reg( R_ECX, Rm );
   596     MOV_r32_r32( R_EAX, R_EDX );
   597     SHL_r32_CL( R_EAX );
   598     NEG_r32( R_ECX );
   599     SHR_r32_CL( R_EDX );
   600     CMP_imm8s_r32( 0, R_ECX );
   601     CMOVAE_r32_r32( R_EDX,  R_EAX );
   602     store_reg( R_EAX, Rn );
   603 :}
   604 SHAL Rn {: 
   605     load_reg( R_EAX, Rn );
   606     SHL1_r32( R_EAX );
   607     store_reg( R_EAX, Rn );
   608 :}
   609 SHAR Rn {:  
   610     load_reg( R_EAX, Rn );
   611     SAR1_r32( R_EAX );
   612     store_reg( R_EAX, Rn );
   613 :}
   614 SHLL Rn {:  
   615     load_reg( R_EAX, Rn );
   616     SHL1_r32( R_EAX );
   617     store_reg( R_EAX, Rn );
   618 :}
   619 SHLL2 Rn {:
   620     load_reg( R_EAX, Rn );
   621     SHL_imm8_r32( 2, R_EAX );
   622     store_reg( R_EAX, Rn );
   623 :}
   624 SHLL8 Rn {:  
   625     load_reg( R_EAX, Rn );
   626     SHL_imm8_r32( 8, R_EAX );
   627     store_reg( R_EAX, Rn );
   628 :}
   629 SHLL16 Rn {:  
   630     load_reg( R_EAX, Rn );
   631     SHL_imm8_r32( 16, R_EAX );
   632     store_reg( R_EAX, Rn );
   633 :}
   634 SHLR Rn {:  
   635     load_reg( R_EAX, Rn );
   636     SHR1_r32( R_EAX );
   637     store_reg( R_EAX, Rn );
   638 :}
   639 SHLR2 Rn {:  
   640     load_reg( R_EAX, Rn );
   641     SHR_imm8_r32( 2, R_EAX );
   642     store_reg( R_EAX, Rn );
   643 :}
   644 SHLR8 Rn {:  
   645     load_reg( R_EAX, Rn );
   646     SHR_imm8_r32( 8, R_EAX );
   647     store_reg( R_EAX, Rn );
   648 :}
   649 SHLR16 Rn {:  
   650     load_reg( R_EAX, Rn );
   651     SHR_imm8_r32( 16, R_EAX );
   652     store_reg( R_EAX, Rn );
   653 :}
   654 SUB Rm, Rn {:  
   655     load_reg( R_EAX, Rm );
   656     load_reg( R_ECX, Rn );
   657     SUB_r32_r32( R_EAX, R_ECX );
   658     store_reg( R_ECX, Rn );
   659 :}
   660 SUBC Rm, Rn {:  
   661     load_reg( R_EAX, Rm );
   662     load_reg( R_ECX, Rn );
   663     LDC_t();
   664     SBB_r32_r32( R_EAX, R_ECX );
   665     store_reg( R_ECX, Rn );
   666 :}
   667 SUBV Rm, Rn {:  
   668     load_reg( R_EAX, Rm );
   669     load_reg( R_ECX, Rn );
   670     SUB_r32_r32( R_EAX, R_ECX );
   671     store_reg( R_ECX, Rn );
   672     SETO_t();
   673 :}
   674 SWAP.B Rm, Rn {:  
   675     load_reg( R_EAX, Rm );
   676     XCHG_r8_r8( R_AL, R_AH );
   677     store_reg( R_EAX, Rn );
   678 :}
   679 SWAP.W Rm, Rn {:  
   680     load_reg( R_EAX, Rm );
   681     MOV_r32_r32( R_EAX, R_ECX );
   682     SHL_imm8_r32( 16, R_ECX );
   683     SHR_imm8_r32( 16, R_EAX );
   684     OR_r32_r32( R_EAX, R_ECX );
   685     store_reg( R_ECX, Rn );
   686 :}
   687 TAS.B @Rn {:  
   688     load_reg( R_ECX, Rn );
   689     MEM_READ_BYTE( R_ECX, R_EAX );
   690     TEST_r8_r8( R_AL, R_AL );
   691     SETE_t();
   692     OR_imm8_r8( 0x80, R_AL );
   693     MEM_WRITE_BYTE( R_ECX, R_EAX );
   694 :}
   695 TST Rm, Rn {:  
   696     load_reg( R_EAX, Rm );
   697     load_reg( R_ECX, Rn );
   698     TEST_r32_r32( R_EAX, R_ECX );
   699     SETE_t();
   700 :}
   701 TST #imm, R0 {:  
   702     load_reg( R_EAX, 0 );
   703     TEST_imm32_r32( imm, R_EAX );
   704     SETE_t();
   705 :}
   706 TST.B #imm, @(R0, GBR) {:  
   707     load_reg( R_EAX, 0);
   708     load_reg( R_ECX, R_GBR);
   709     ADD_r32_r32( R_EAX, R_ECX );
   710     MEM_READ_BYTE( R_ECX, R_EAX );
   711     TEST_imm8_r8( imm, R_EAX );
   712     SETE_t();
   713 :}
   714 XOR Rm, Rn {:  
   715     load_reg( R_EAX, Rm );
   716     load_reg( R_ECX, Rn );
   717     XOR_r32_r32( R_EAX, R_ECX );
   718     store_reg( R_ECX, Rn );
   719 :}
   720 XOR #imm, R0 {:  
   721     load_reg( R_EAX, 0 );
   722     XOR_imm32_r32( imm, R_EAX );
   723     store_reg( R_EAX, 0 );
   724 :}
   725 XOR.B #imm, @(R0, GBR) {:  
   726     load_reg( R_EAX, 0 );
   727     load_spreg( R_ECX, R_GBR );
   728     ADD_r32_r32( R_EAX, R_ECX );
   729     MEM_READ_BYTE( R_ECX, R_EAX );
   730     XOR_imm32_r32( imm, R_EAX );
   731     MEM_WRITE_BYTE( R_ECX, R_EAX );
   732 :}
   733 XTRCT Rm, Rn {:
   734     load_reg( R_EAX, Rm );
   735     MOV_r32_r32( R_EAX, R_ECX );
   736     SHR_imm8_r32( 16, R_EAX );
   737     SHL_imm8_r32( 16, R_ECX );
   738     OR_r32_r32( R_EAX, R_ECX );
   739     store_reg( R_ECX, Rn );
   740 :}
   742 /* Data move instructions */
   743 MOV Rm, Rn {:  
   744     load_reg( R_EAX, Rm );
   745     store_reg( R_EAX, Rn );
   746 :}
   747 MOV #imm, Rn {:  
   748     load_imm32( R_EAX, imm );
   749     store_reg( R_EAX, Rn );
   750 :}
   751 MOV.B Rm, @Rn {:  
   752     load_reg( R_EAX, Rm );
   753     load_reg( R_ECX, Rn );
   754     MEM_WRITE_BYTE( R_ECX, R_EAX );
   755 :}
   756 MOV.B Rm, @-Rn {:  
   757     load_reg( R_EAX, Rm );
   758     load_reg( R_ECX, Rn );
   759     ADD_imm8s_r32( -1, Rn );
   760     store_reg( R_ECX, Rn );
   761     MEM_WRITE_BYTE( R_ECX, R_EAX );
   762 :}
   763 MOV.B Rm, @(R0, Rn) {:  
   764     load_reg( R_EAX, 0 );
   765     load_reg( R_ECX, Rn );
   766     ADD_r32_r32( R_EAX, R_ECX );
   767     load_reg( R_EAX, Rm );
   768     MEM_WRITE_BYTE( R_ECX, R_EAX );
   769 :}
   770 MOV.B R0, @(disp, GBR) {:  
   771     load_reg( R_EAX, 0 );
   772     load_spreg( R_ECX, R_GBR );
   773     ADD_imm32_r32( disp, R_ECX );
   774     MEM_WRITE_BYTE( R_ECX, R_EAX );
   775 :}
   776 MOV.B R0, @(disp, Rn) {:  
   777     load_reg( R_EAX, 0 );
   778     load_reg( R_ECX, Rn );
   779     ADD_imm32_r32( disp, R_ECX );
   780     MEM_WRITE_BYTE( R_ECX, R_EAX );
   781 :}
   782 MOV.B @Rm, Rn {:  
   783     load_reg( R_ECX, Rm );
   784     MEM_READ_BYTE( R_ECX, R_EAX );
   785     store_reg( R_ECX, Rn );
   786 :}
   787 MOV.B @Rm+, Rn {:  
   788     load_reg( R_ECX, Rm );
   789     MOV_r32_r32( R_ECX, R_EAX );
   790     ADD_imm8s_r32( 1, R_EAX );
   791     store_reg( R_EAX, Rm );
   792     MEM_READ_BYTE( R_ECX, R_EAX );
   793     store_reg( R_EAX, Rn );
   794 :}
   795 MOV.B @(R0, Rm), Rn {:  
   796     load_reg( R_EAX, 0 );
   797     load_reg( R_ECX, Rm );
   798     ADD_r32_r32( R_EAX, R_ECX );
   799     MEM_READ_BYTE( R_ECX, R_EAX );
   800     store_reg( R_EAX, Rn );
   801 :}
   802 MOV.B @(disp, GBR), R0 {:  
   803     load_spreg( R_ECX, R_GBR );
   804     ADD_imm32_r32( disp, R_ECX );
   805     MEM_READ_BYTE( R_ECX, R_EAX );
   806     store_reg( R_EAX, 0 );
   807 :}
   808 MOV.B @(disp, Rm), R0 {:  
   809     load_reg( R_ECX, Rm );
   810     ADD_imm32_r32( disp, R_ECX );
   811     MEM_READ_BYTE( R_ECX, R_EAX );
   812     store_reg( R_EAX, 0 );
   813 :}
   814 MOV.L Rm, @Rn {:  
   815     load_reg( R_EAX, Rm );
   816     load_reg( R_ECX, Rn );
   817     MEM_WRITE_LONG( R_ECX, R_EAX );
   818 :}
   819 MOV.L Rm, @-Rn {:  
   820     load_reg( R_EAX, Rm );
   821     load_reg( R_ECX, Rn );
   822     ADD_imm8s_r32( -4, R_ECX );
   823     store_reg( R_ECX, Rn );
   824     MEM_WRITE_LONG( R_ECX, R_EAX );
   825 :}
   826 MOV.L Rm, @(R0, Rn) {:  
   827     load_reg( R_EAX, 0 );
   828     load_reg( R_ECX, Rn );
   829     ADD_r32_r32( R_EAX, R_ECX );
   830     load_reg( R_EAX, Rm );
   831     MEM_WRITE_LONG( R_ECX, R_EAX );
   832 :}
   833 MOV.L R0, @(disp, GBR) {:  
   834     load_spreg( R_ECX, R_GBR );
   835     load_reg( R_EAX, 0 );
   836     ADD_imm32_r32( disp, R_ECX );
   837     MEM_WRITE_LONG( R_ECX, R_EAX );
   838 :}
   839 MOV.L Rm, @(disp, Rn) {:  
   840     load_reg( R_ECX, Rn );
   841     load_reg( R_EAX, Rm );
   842     ADD_imm32_r32( disp, R_ECX );
   843     MEM_WRITE_LONG( R_ECX, R_EAX );
   844 :}
   845 MOV.L @Rm, Rn {:  
   846     load_reg( R_ECX, Rm );
   847     MEM_READ_LONG( R_ECX, R_EAX );
   848     store_reg( R_EAX, Rn );
   849 :}
   850 MOV.L @Rm+, Rn {:  
   851     load_reg( R_EAX, Rm );
   852     MOV_r32_r32( R_EAX, R_ECX );
   853     ADD_imm8s_r32( 4, R_EAX );
   854     store_reg( R_EAX, Rm );
   855     MEM_READ_LONG( R_ECX, R_EAX );
   856     store_reg( R_EAX, Rn );
   857 :}
   858 MOV.L @(R0, Rm), Rn {:  
   859     load_reg( R_EAX, 0 );
   860     load_reg( R_ECX, Rm );
   861     ADD_r32_r32( R_EAX, R_ECX );
   862     MEM_READ_LONG( R_ECX, R_EAX );
   863     store_reg( R_EAX, Rn );
   864 :}
   865 MOV.L @(disp, GBR), R0 {:
   866     load_spreg( R_ECX, R_GBR );
   867     ADD_imm32_r32( disp, R_ECX );
   868     MEM_READ_LONG( R_ECX, R_EAX );
   869     store_reg( R_EAX, 0 );
   870 :}
   871 MOV.L @(disp, PC), Rn {:  
   872     load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
   873     MEM_READ_LONG( R_ECX, R_EAX );
   874     store_reg( R_EAX, 0 );
   875 :}
   876 MOV.L @(disp, Rm), Rn {:  
   877     load_reg( R_ECX, Rm );
   878     ADD_imm8s_r32( disp, R_ECX );
   879     MEM_READ_LONG( R_ECX, R_EAX );
   880     store_reg( R_EAX, Rn );
   881 :}
   882 MOV.W Rm, @Rn {:  
   883     load_reg( R_ECX, Rn );
   884     MEM_READ_WORD( R_ECX, R_EAX );
   885     store_reg( R_EAX, Rn );
   886 :}
   887 MOV.W Rm, @-Rn {:  
   888     load_reg( R_ECX, Rn );
   889     load_reg( R_EAX, Rm );
   890     ADD_imm8s_r32( -2, R_ECX );
   891     MEM_WRITE_WORD( R_ECX, R_EAX );
   892 :}
   893 MOV.W Rm, @(R0, Rn) {:  
   894     load_reg( R_EAX, 0 );
   895     load_reg( R_ECX, Rn );
   896     ADD_r32_r32( R_EAX, R_ECX );
   897     load_reg( R_EAX, Rm );
   898     MEM_WRITE_WORD( R_ECX, R_EAX );
   899 :}
   900 MOV.W R0, @(disp, GBR) {:  
   901     load_spreg( R_ECX, R_GBR );
   902     load_reg( R_EAX, 0 );
   903     ADD_imm32_r32( disp, R_ECX );
   904     MEM_WRITE_WORD( R_ECX, R_EAX );
   905 :}
   906 MOV.W R0, @(disp, Rn) {:  
   907     load_reg( R_ECX, Rn );
   908     load_reg( R_EAX, 0 );
   909     ADD_imm32_r32( disp, R_ECX );
   910     MEM_WRITE_WORD( R_ECX, R_EAX );
   911 :}
   912 MOV.W @Rm, Rn {:  
   913     load_reg( R_ECX, Rm );
   914     MEM_READ_WORD( R_ECX, R_EAX );
   915     store_reg( R_EAX, Rn );
   916 :}
   917 MOV.W @Rm+, Rn {:  
   918     load_reg( R_EAX, Rm );
   919     MOV_r32_r32( R_EAX, R_ECX );
   920     ADD_imm8s_r32( 2, R_EAX );
   921     store_reg( R_EAX, Rm );
   922     MEM_READ_WORD( R_ECX, R_EAX );
   923     store_reg( R_EAX, Rn );
   924 :}
   925 MOV.W @(R0, Rm), Rn {:  
   926     load_reg( R_EAX, 0 );
   927     load_reg( R_ECX, Rm );
   928     ADD_r32_r32( R_EAX, R_ECX );
   929     MEM_READ_WORD( R_ECX, R_EAX );
   930     store_reg( R_EAX, Rn );
   931 :}
   932 MOV.W @(disp, GBR), R0 {:  
   933     load_spreg( R_ECX, R_GBR );
   934     ADD_imm32_r32( disp, R_ECX );
   935     MEM_READ_WORD( R_ECX, R_EAX );
   936     store_reg( R_EAX, 0 );
   937 :}
   938 MOV.W @(disp, PC), Rn {:  
   939     load_imm32( R_ECX, pc + disp + 4 );
   940     MEM_READ_WORD( R_ECX, R_EAX );
   941     store_reg( R_EAX, Rn );
   942 :}
   943 MOV.W @(disp, Rm), R0 {:  
   944     load_reg( R_ECX, Rm );
   945     ADD_imm32_r32( disp, R_ECX );
   946     MEM_READ_WORD( R_ECX, R_EAX );
   947     store_reg( R_EAX, 0 );
   948 :}
   949 MOVA @(disp, PC), R0 {:  
   950     load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
   951     store_reg( R_ECX, 0 );
   952 :}
   953 MOVCA.L R0, @Rn {:  
   954     load_reg( R_EAX, 0 );
   955     load_reg( R_ECX, Rn );
   956     MEM_WRITE_LONG( R_ECX, R_EAX );
   957 :}
   959 /* Control transfer instructions */
   960 BF disp {:  
   961     CMP_imm8s_ebp( 0, R_T );
   962     JNE_rel8( 1 );
   963     exit_block( disp + pc + 4 );
   964     return 1;
   965 :}
   966 BF/S disp {:  
   967     CMP_imm8s_ebp( 0, R_T );
   968     JNE_rel8( 1 );
   969     exit_block( disp + pc + 4 );
   970     sh4_x86.in_delay_slot = TRUE;
   971 :}
   972 BRA disp {:  
   973     exit_block( disp + pc + 4 );
   974 :}
   975 BRAF Rn {:  :}
   976 BSR disp {:  :}
   977 BSRF Rn {:  :}
   978 BT disp {:  /* If true, result PC += 4 + disp. else result PC = pc+2 */
   979     return pc + 2;
   980 :}
   981 BT/S disp {:
   983     return pc + 4;
   984 :}
   985 JMP @Rn {:  :}
   986 JSR @Rn {:  :}
   987 RTE {:  :}
   988 RTS {:  :}
   989 TRAPA #imm {:  :}
   990 UNDEF {:  :}
   992 CLRMAC {:  :}
   993 CLRS {:  :}
   994 CLRT {:  :}
   995 SETS {:  :}
   996 SETT {:  :}
   998 /* Floating point instructions */
   999 FABS FRn {:  :}
  1000 FADD FRm, FRn {:  :}
  1001 FCMP/EQ FRm, FRn {:  :}
  1002 FCMP/GT FRm, FRn {:  :}
  1003 FCNVDS FRm, FPUL {:  :}
  1004 FCNVSD FPUL, FRn {:  :}
  1005 FDIV FRm, FRn {:  :}
  1006 FIPR FVm, FVn {:  :}
  1007 FLDS FRm, FPUL {:  :}
  1008 FLDI0 FRn {:  :}
  1009 FLDI1 FRn {:  :}
  1010 FLOAT FPUL, FRn {:  :}
  1011 FMAC FR0, FRm, FRn {:  :}
  1012 FMOV FRm, FRn {:  :}
  1013 FMOV FRm, @Rn {:  :}
  1014 FMOV FRm, @-Rn {:  :}
  1015 FMOV FRm, @(R0, Rn) {:  :}
  1016 FMOV @Rm, FRn {:  :}
  1017 FMOV @Rm+, FRn {:  :}
  1018 FMOV @(R0, Rm), FRn {:  :}
  1019 FMUL FRm, FRn {:  :}
  1020 FNEG FRn {:  :}
  1021 FRCHG {:  :}
  1022 FSCA FPUL, FRn {:  :}
  1023 FSCHG {:  :}
  1024 FSQRT FRn {:  :}
  1025 FSRRA FRn {:  :}
  1026 FSTS FPUL, FRn {:  :}
  1027 FSUB FRm, FRn {:  :}
  1028 FTRC FRm, FPUL {:  :}
  1029 FTRV XMTRX, FVn {:  :}
  1031 /* Processor control instructions */
  1032 LDC Rm, SR {:
  1033     load_reg( R_EAX, Rm );
  1034     write_sr( R_EAX );
  1035 :}
  1036 LDC Rm, GBR {: 
  1037     load_reg( R_EAX, Rm );
  1038     store_spreg( R_EAX, R_GBR );
  1039 :}
  1040 LDC Rm, VBR {:  
  1041     load_reg( R_EAX, Rm );
  1042     store_spreg( R_EAX, R_VBR );
  1043 :}
  1044 LDC Rm, SSR {:  
  1045     load_reg( R_EAX, Rm );
  1046     store_spreg( R_EAX, R_SSR );
  1047 :}
  1048 LDC Rm, SGR {:  
  1049     load_reg( R_EAX, Rm );
  1050     store_spreg( R_EAX, R_SGR );
  1051 :}
  1052 LDC Rm, SPC {:  
  1053     load_reg( R_EAX, Rm );
  1054     store_spreg( R_EAX, R_SPC );
  1055 :}
  1056 LDC Rm, DBR {:  
  1057     load_reg( R_EAX, Rm );
  1058     store_spreg( R_EAX, R_DBR );
  1059 :}
  1060 LDC Rm, Rn_BANK {:  :}
  1061 LDC.L @Rm+, GBR {:  
  1062     load_reg( R_EAX, Rm );
  1063     MOV_r32_r32( R_EAX, R_ECX );
  1064     ADD_imm8s_r32( 4, R_EAX );
  1065     store_reg( R_EAX, Rm );
  1066     MEM_READ_LONG( R_ECX, R_EAX );
  1067     store_spreg( R_EAX, R_GBR );
  1068 :}
  1069 LDC.L @Rm+, SR {:
  1070     load_reg( R_EAX, Rm );
  1071     MOV_r32_r32( R_EAX, R_ECX );
  1072     ADD_imm8s_r32( 4, R_EAX );
  1073     store_reg( R_EAX, Rm );
  1074     MEM_READ_LONG( R_ECX, R_EAX );
  1075     write_sr( R_EAX );
  1076 :}
  1077 LDC.L @Rm+, VBR {:  
  1078     load_reg( R_EAX, Rm );
  1079     MOV_r32_r32( R_EAX, R_ECX );
  1080     ADD_imm8s_r32( 4, R_EAX );
  1081     store_reg( R_EAX, Rm );
  1082     MEM_READ_LONG( R_ECX, R_EAX );
  1083     store_spreg( R_EAX, R_VBR );
  1084 :}
  1085 LDC.L @Rm+, SSR {:
  1086     load_reg( R_EAX, Rm );
  1087     MOV_r32_r32( R_EAX, R_ECX );
  1088     ADD_imm8s_r32( 4, R_EAX );
  1089     store_reg( R_EAX, Rm );
  1090     MEM_READ_LONG( R_ECX, R_EAX );
  1091     store_spreg( R_EAX, R_SSR );
  1092 :}
  1093 LDC.L @Rm+, SGR {:  
  1094     load_reg( R_EAX, Rm );
  1095     MOV_r32_r32( R_EAX, R_ECX );
  1096     ADD_imm8s_r32( 4, R_EAX );
  1097     store_reg( R_EAX, Rm );
  1098     MEM_READ_LONG( R_ECX, R_EAX );
  1099     store_spreg( R_EAX, R_SGR );
  1100 :}
  1101 LDC.L @Rm+, SPC {:  
  1102     load_reg( R_EAX, Rm );
  1103     MOV_r32_r32( R_EAX, R_ECX );
  1104     ADD_imm8s_r32( 4, R_EAX );
  1105     store_reg( R_EAX, Rm );
  1106     MEM_READ_LONG( R_ECX, R_EAX );
  1107     store_spreg( R_EAX, R_SPC );
  1108 :}
  1109 LDC.L @Rm+, DBR {:  
  1110     load_reg( R_EAX, Rm );
  1111     MOV_r32_r32( R_EAX, R_ECX );
  1112     ADD_imm8s_r32( 4, R_EAX );
  1113     store_reg( R_EAX, Rm );
  1114     MEM_READ_LONG( R_ECX, R_EAX );
  1115     store_spreg( R_EAX, R_DBR );
  1116 :}
  1117 LDC.L @Rm+, Rn_BANK {:  
  1118 :}
  1119 LDS Rm, FPSCR {:  
  1120     load_reg( R_EAX, Rm );
  1121     store_spreg( R_EAX, R_FPSCR );
  1122 :}
  1123 LDS.L @Rm+, FPSCR {:  
  1124     load_reg( R_EAX, Rm );
  1125     MOV_r32_r32( R_EAX, R_ECX );
  1126     ADD_imm8s_r32( 4, R_EAX );
  1127     store_reg( R_EAX, Rm );
  1128     MEM_READ_LONG( R_ECX, R_EAX );
  1129     store_spreg( R_EAX, R_FPSCR );
  1130 :}
  1131 LDS Rm, FPUL {:  
  1132     load_reg( R_EAX, Rm );
  1133     store_spreg( R_EAX, R_FPUL );
  1134 :}
  1135 LDS.L @Rm+, FPUL {:  
  1136     load_reg( R_EAX, Rm );
  1137     MOV_r32_r32( R_EAX, R_ECX );
  1138     ADD_imm8s_r32( 4, R_EAX );
  1139     store_reg( R_EAX, Rm );
  1140     MEM_READ_LONG( R_ECX, R_EAX );
  1141     store_spreg( R_EAX, R_FPUL );
  1142 :}
  1143 LDS Rm, MACH {: 
  1144     load_reg( R_EAX, Rm );
  1145     store_spreg( R_EAX, R_MACH );
  1146 :}
  1147 LDS.L @Rm+, MACH {:  
  1148     load_reg( R_EAX, Rm );
  1149     MOV_r32_r32( R_EAX, R_ECX );
  1150     ADD_imm8s_r32( 4, R_EAX );
  1151     store_reg( R_EAX, Rm );
  1152     MEM_READ_LONG( R_ECX, R_EAX );
  1153     store_spreg( R_EAX, R_MACH );
  1154 :}
  1155 LDS Rm, MACL {:  
  1156     load_reg( R_EAX, Rm );
  1157     store_spreg( R_EAX, R_MACL );
  1158 :}
  1159 LDS.L @Rm+, MACL {:  
  1160     load_reg( R_EAX, Rm );
  1161     MOV_r32_r32( R_EAX, R_ECX );
  1162     ADD_imm8s_r32( 4, R_EAX );
  1163     store_reg( R_EAX, Rm );
  1164     MEM_READ_LONG( R_ECX, R_EAX );
  1165     store_spreg( R_EAX, R_MACL );
  1166 :}
  1167 LDS Rm, PR {:  
  1168     load_reg( R_EAX, Rm );
  1169     store_spreg( R_EAX, R_PR );
  1170 :}
  1171 LDS.L @Rm+, PR {:  
  1172     load_reg( R_EAX, Rm );
  1173     MOV_r32_r32( R_EAX, R_ECX );
  1174     ADD_imm8s_r32( 4, R_EAX );
  1175     store_reg( R_EAX, Rm );
  1176     MEM_READ_LONG( R_ECX, R_EAX );
  1177     store_spreg( R_EAX, R_PR );
  1178 :}
  1179 LDTLB {:  :}
  1180 OCBI @Rn {:  :}
  1181 OCBP @Rn {:  :}
  1182 OCBWB @Rn {:  :}
  1183 PREF @Rn {:  :}
  1184 SLEEP {:  :}
  1185  STC SR, Rn {:
  1186      read_sr( R_EAX );
  1187      store_reg( R_EAX, Rn );
  1188 :}
  1189 STC GBR, Rn {:  
  1190     load_spreg( R_EAX, R_GBR );
  1191     store_reg( R_EAX, Rn );
  1192 :}
  1193 STC VBR, Rn {:  
  1194     load_spreg( R_EAX, R_VBR );
  1195     store_reg( R_EAX, Rn );
  1196 :}
  1197 STC SSR, Rn {:  
  1198     load_spreg( R_EAX, R_SSR );
  1199     store_reg( R_EAX, Rn );
  1200 :}
  1201 STC SPC, Rn {:  
  1202     load_spreg( R_EAX, R_SPC );
  1203     store_reg( R_EAX, Rn );
  1204 :}
  1205 STC SGR, Rn {:  
  1206     load_spreg( R_EAX, R_SGR );
  1207     store_reg( R_EAX, Rn );
  1208 :}
  1209 STC DBR, Rn {:  
  1210     load_spreg( R_EAX, R_DBR );
  1211     store_reg( R_EAX, Rn );
  1212 :}
  1213 STC Rm_BANK, Rn {: /* TODO */ 
  1214 :}
  1215 STC.L SR, @-Rn {:  /* TODO */
  1216     load_reg( R_ECX, Rn );
  1217     ADD_imm8s_r32( -4, Rn );
  1218     store_reg( R_ECX, Rn );
  1219     read_sr( R_EAX );
  1220     MEM_WRITE_LONG( R_ECX, R_EAX );
  1221 :}
  1222 STC.L VBR, @-Rn {:  
  1223     load_reg( R_ECX, Rn );
  1224     ADD_imm8s_r32( -4, Rn );
  1225     store_reg( R_ECX, Rn );
  1226     load_spreg( R_EAX, R_VBR );
  1227     MEM_WRITE_LONG( R_ECX, R_EAX );
  1228 :}
  1229 STC.L SSR, @-Rn {:  
  1230     load_reg( R_ECX, Rn );
  1231     ADD_imm8s_r32( -4, Rn );
  1232     store_reg( R_ECX, Rn );
  1233     load_spreg( R_EAX, R_SSR );
  1234     MEM_WRITE_LONG( R_ECX, R_EAX );
  1235 :}
  1236 STC.L SPC, @-Rn {:  
  1237     load_reg( R_ECX, Rn );
  1238     ADD_imm8s_r32( -4, Rn );
  1239     store_reg( R_ECX, Rn );
  1240     load_spreg( R_EAX, R_SPC );
  1241     MEM_WRITE_LONG( R_ECX, R_EAX );
  1242 :}
  1243 STC.L SGR, @-Rn {:  
  1244     load_reg( R_ECX, Rn );
  1245     ADD_imm8s_r32( -4, Rn );
  1246     store_reg( R_ECX, Rn );
  1247     load_spreg( R_EAX, R_SGR );
  1248     MEM_WRITE_LONG( R_ECX, R_EAX );
  1249 :}
  1250 STC.L DBR, @-Rn {:  
  1251     load_reg( R_ECX, Rn );
  1252     ADD_imm8s_r32( -4, Rn );
  1253     store_reg( R_ECX, Rn );
  1254     load_spreg( R_EAX, R_DBR );
  1255     MEM_WRITE_LONG( R_ECX, R_EAX );
  1256 :}
  1257 STC.L Rm_BANK, @-Rn {:  :}
  1258 STC.L GBR, @-Rn {:  
  1259     load_reg( R_ECX, Rn );
  1260     ADD_imm8s_r32( -4, Rn );
  1261     store_reg( R_ECX, Rn );
  1262     load_spreg( R_EAX, R_GBR );
  1263     MEM_WRITE_LONG( R_ECX, R_EAX );
  1264 :}
  1265 STS FPSCR, Rn {:  
  1266     load_spreg( R_EAX, R_FPSCR );
  1267     store_reg( R_EAX, Rn );
  1268 :}
  1269 STS.L FPSCR, @-Rn {:  
  1270     load_reg( R_ECX, Rn );
  1271     ADD_imm8s_r32( -4, Rn );
  1272     store_reg( R_ECX, Rn );
  1273     load_spreg( R_EAX, R_FPSCR );
  1274     MEM_WRITE_LONG( R_ECX, R_EAX );
  1275 :}
  1276 STS FPUL, Rn {:  
  1277     load_spreg( R_EAX, R_FPUL );
  1278     store_reg( R_EAX, Rn );
  1279 :}
  1280 STS.L FPUL, @-Rn {:  
  1281     load_reg( R_ECX, Rn );
  1282     ADD_imm8s_r32( -4, Rn );
  1283     store_reg( R_ECX, Rn );
  1284     load_spreg( R_EAX, R_FPUL );
  1285     MEM_WRITE_LONG( R_ECX, R_EAX );
  1286 :}
  1287 STS MACH, Rn {:  
  1288     load_spreg( R_EAX, R_MACH );
  1289     store_reg( R_EAX, Rn );
  1290 :}
  1291 STS.L MACH, @-Rn {:  
  1292     load_reg( R_ECX, Rn );
  1293     ADD_imm8s_r32( -4, Rn );
  1294     store_reg( R_ECX, Rn );
  1295     load_spreg( R_EAX, R_MACH );
  1296     MEM_WRITE_LONG( R_ECX, R_EAX );
  1297 :}
  1298 STS MACL, Rn {:  
  1299     load_spreg( R_EAX, R_MACL );
  1300     store_reg( R_EAX, Rn );
  1301 :}
  1302 STS.L MACL, @-Rn {:  
  1303     load_reg( R_ECX, Rn );
  1304     ADD_imm8s_r32( -4, Rn );
  1305     store_reg( R_ECX, Rn );
  1306     load_spreg( R_EAX, R_MACL );
  1307     MEM_WRITE_LONG( R_ECX, R_EAX );
  1308 :}
  1309 STS PR, Rn {:  
  1310     load_spreg( R_EAX, R_PR );
  1311     store_reg( R_EAX, Rn );
  1312 :}
  1313 STS.L PR, @-Rn {:  
  1314     load_reg( R_ECX, Rn );
  1315     ADD_imm8s_r32( -4, Rn );
  1316     store_reg( R_ECX, Rn );
  1317     load_spreg( R_EAX, R_PR );
  1318     MEM_WRITE_LONG( R_ECX, R_EAX );
  1319 :}
  1321 NOP {: /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */ :}
  1322 %%
  1323     INC_r32(R_ESI);
  1325     return 0;
.