2 * $Id: sh4x86.in,v 1.11 2007-09-18 08:59:00 nkeynes Exp $
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
8 * Copyright (c) 2007 Nathan Keynes.
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
28 #include "sh4/sh4core.h"
29 #include "sh4/sh4trans.h"
30 #include "sh4/sh4mmio.h"
31 #include "sh4/x86op.h"
34 #define DEFAULT_BACKPATCH_SIZE 4096
37 * Struct to manage internal translation state. This state is not saved -
38 * it is only valid between calls to sh4_translate_begin_block() and
39 * sh4_translate_end_block()
41 struct sh4_x86_state {
42 gboolean in_delay_slot;
43 gboolean priv_checked; /* true if we've already checked the cpu mode. */
44 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
47 /* Allocated memory for the (block-wide) back-patch list */
48 uint32_t **backpatch_list;
49 uint32_t backpatch_posn;
50 uint32_t backpatch_size;
53 #define EXIT_DATA_ADDR_READ 0
54 #define EXIT_DATA_ADDR_WRITE 7
55 #define EXIT_ILLEGAL 14
56 #define EXIT_SLOT_ILLEGAL 21
57 #define EXIT_FPU_DISABLED 28
58 #define EXIT_SLOT_FPU_DISABLED 35
60 static struct sh4_x86_state sh4_x86;
62 static uint32_t max_int = 0x7FFFFFFF;
63 static uint32_t min_int = 0x80000000;
64 void signsat48( void )
66 if( ((int64_t)sh4r.mac) < (int64_t)0xFFFF800000000000LL )
67 sh4r.mac = 0xFFFF800000000000LL;
68 else if( ((int64_t)sh4r.mac) > (int64_t)0x00007FFFFFFFFFFFLL )
69 sh4r.mac = 0x00007FFFFFFFFFFFLL;
72 void sh4_fsca( uint32_t anglei, float *fr )
74 float angle = (((float)(anglei&0xFFFF))/65536.0) * 2 * M_PI;
81 if( MMIO_READ( CPG, STBCR ) & 0x80 ) {
82 sh4r.sh4_state = SH4_STATE_STANDBY;
84 sh4r.sh4_state = SH4_STATE_SLEEP;
89 * Compute the matrix tranform of fv given the matrix xf.
90 * Both fv and xf are word-swapped as per the sh4r.fr banks
92 void sh4_ftrv( float *target, float *xf )
94 float fv[4] = { target[1], target[0], target[3], target[2] };
95 target[1] = xf[1] * fv[0] + xf[5]*fv[1] +
96 xf[9]*fv[2] + xf[13]*fv[3];
97 target[0] = xf[0] * fv[0] + xf[4]*fv[1] +
98 xf[8]*fv[2] + xf[12]*fv[3];
99 target[3] = xf[3] * fv[0] + xf[7]*fv[1] +
100 xf[11]*fv[2] + xf[15]*fv[3];
101 target[2] = xf[2] * fv[0] + xf[6]*fv[1] +
102 xf[10]*fv[2] + xf[14]*fv[3];
109 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
110 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
114 static void sh4_x86_add_backpatch( uint8_t *ptr )
116 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
117 sh4_x86.backpatch_size <<= 1;
118 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
119 assert( sh4_x86.backpatch_list != NULL );
121 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
124 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
127 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
128 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
133 * Emit an instruction to load an SH4 reg into a real register
135 static inline void load_reg( int x86reg, int sh4reg )
137 /* mov [bp+n], reg */
139 OP(0x45 + (x86reg<<3));
140 OP(REG_OFFSET(r[sh4reg]));
143 static inline void load_reg16s( int x86reg, int sh4reg )
147 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
150 static inline void load_reg16u( int x86reg, int sh4reg )
154 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
158 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
159 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
161 * Emit an instruction to load an immediate value into a register
163 static inline void load_imm32( int x86reg, uint32_t value ) {
164 /* mov #value, reg */
170 * Emit an instruction to store an SH4 reg (RN)
172 void static inline store_reg( int x86reg, int sh4reg ) {
173 /* mov reg, [bp+n] */
175 OP(0x45 + (x86reg<<3));
176 OP(REG_OFFSET(r[sh4reg]));
179 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
182 * Load an FR register (single-precision floating point) into an integer x86
183 * register (eg for register-to-register moves)
185 void static inline load_fr( int bankreg, int x86reg, int frm )
187 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
191 * Store an FR register (single-precision floating point) into an integer x86
192 * register (eg for register-to-register moves)
194 void static inline store_fr( int bankreg, int x86reg, int frn )
196 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
201 * Load a pointer to the back fp back into the specified x86 register. The
202 * bankreg must have been previously loaded with FPSCR.
205 static inline void load_xf_bank( int bankreg )
208 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
209 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
210 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
214 * Update the fr_bank pointer based on the current fpscr value.
216 static inline void update_fr_bank( int fpscrreg )
218 SHR_imm8_r32( (21 - 6), fpscrreg ); // Extract bit 21 then *64 for bank size
219 AND_imm8s_r32( 0x40, fpscrreg ); // Complete extraction
220 OP(0x8D); OP(0x44+(fpscrreg<<3)); OP(0x28+fpscrreg); OP(REG_OFFSET(fr)); // LEA [ebp+fpscrreg+disp], fpscrreg
221 store_spreg( fpscrreg, REG_OFFSET(fr_bank) );
224 * Push FPUL (as a 32-bit float) onto the FPU stack
226 static inline void push_fpul( )
228 OP(0xD9); OP(0x45); OP(R_FPUL);
232 * Pop FPUL (as a 32-bit float) from the FPU stack
234 static inline void pop_fpul( )
236 OP(0xD9); OP(0x5D); OP(R_FPUL);
240 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
241 * with the location of the current fp bank.
243 static inline void push_fr( int bankreg, int frm )
245 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
249 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
250 * with bankreg previously loaded with the location of the current fp bank.
252 static inline void pop_fr( int bankreg, int frm )
254 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
258 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
259 * with the location of the current fp bank.
261 static inline void push_dr( int bankreg, int frm )
263 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
266 static inline void pop_dr( int bankreg, int frm )
268 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
272 * Note: clobbers EAX to make the indirect call - this isn't usually
273 * a problem since the callee will usually clobber it anyway.
275 static inline void call_func0( void *ptr )
277 load_imm32(R_EAX, (uint32_t)ptr);
281 static inline void call_func1( void *ptr, int arg1 )
285 ADD_imm8s_r32( 4, R_ESP );
288 static inline void call_func2( void *ptr, int arg1, int arg2 )
293 ADD_imm8s_r32( 8, R_ESP );
297 * Write a double (64-bit) value into memory, with the first word in arg2a, and
298 * the second in arg2b
301 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
303 ADD_imm8s_r32( 4, addr );
306 ADD_imm8s_r32( -4, addr );
309 call_func0(sh4_write_long);
310 ADD_imm8s_r32( 8, R_ESP );
311 call_func0(sh4_write_long);
312 ADD_imm8s_r32( 8, R_ESP );
316 * Read a double (64-bit) value from memory, writing the first word into arg2a
317 * and the second into arg2b. The addr must not be in EAX
320 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
323 call_func0(sh4_read_long);
326 ADD_imm8s_r32( 4, addr );
328 call_func0(sh4_read_long);
329 ADD_imm8s_r32( 4, R_ESP );
330 MOV_r32_r32( R_EAX, arg2b );
334 /* Exception checks - Note that all exception checks will clobber EAX */
335 static void check_priv( )
337 if( !sh4_x86.priv_checked ) {
338 sh4_x86.priv_checked = TRUE;
339 load_spreg( R_EAX, R_SR );
340 AND_imm32_r32( SR_MD, R_EAX );
341 if( sh4_x86.in_delay_slot ) {
342 JE_exit( EXIT_SLOT_ILLEGAL );
344 JE_exit( EXIT_ILLEGAL );
349 static void check_fpuen( )
351 if( !sh4_x86.fpuen_checked ) {
352 sh4_x86.fpuen_checked = TRUE;
353 load_spreg( R_EAX, R_SR );
354 AND_imm32_r32( SR_FD, R_EAX );
355 if( sh4_x86.in_delay_slot ) {
356 JNE_exit(EXIT_SLOT_FPU_DISABLED);
358 JNE_exit(EXIT_FPU_DISABLED);
363 static void check_ralign16( int x86reg )
365 TEST_imm32_r32( 0x00000001, x86reg );
366 JNE_exit(EXIT_DATA_ADDR_READ);
369 static void check_walign16( int x86reg )
371 TEST_imm32_r32( 0x00000001, x86reg );
372 JNE_exit(EXIT_DATA_ADDR_WRITE);
375 static void check_ralign32( int x86reg )
377 TEST_imm32_r32( 0x00000003, x86reg );
378 JNE_exit(EXIT_DATA_ADDR_READ);
380 static void check_walign32( int x86reg )
382 TEST_imm32_r32( 0x00000003, x86reg );
383 JNE_exit(EXIT_DATA_ADDR_WRITE);
387 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
388 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
389 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
390 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
391 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
392 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
393 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
395 #define SLOTILLEGAL() JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
400 * Emit the 'start of block' assembly. Sets up the stack frame and save
403 void sh4_translate_begin_block()
407 load_imm32( R_EBP, (uint32_t)&sh4r );
410 XOR_r32_r32(R_ESI, R_ESI);
412 sh4_x86.in_delay_slot = FALSE;
413 sh4_x86.priv_checked = FALSE;
414 sh4_x86.fpuen_checked = FALSE;
415 sh4_x86.backpatch_posn = 0;
416 sh4_x86.exit_code = 1;
420 * Exit the block early (ie branch out), conditionally or otherwise
424 store_spreg( R_EDI, REG_OFFSET(pc) );
425 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
426 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
428 ADD_r32_r32( R_EAX, R_ECX );
429 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
430 load_imm32( R_EAX, sh4_x86.exit_code );
438 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
440 void sh4_translate_end_block( sh4addr_t pc ) {
441 assert( !sh4_x86.in_delay_slot ); // should never stop here
442 // Normal termination - save PC, cycle count
445 if( sh4_x86.backpatch_posn != 0 ) {
446 uint8_t *end_ptr = xlat_output;
447 // Exception termination. Jump block for various exception codes:
448 PUSH_imm32( EXC_DATA_ADDR_READ );
449 JMP_rel8( 33, target1 );
450 PUSH_imm32( EXC_DATA_ADDR_WRITE );
451 JMP_rel8( 26, target2 );
452 PUSH_imm32( EXC_ILLEGAL );
453 JMP_rel8( 19, target3 );
454 PUSH_imm32( EXC_SLOT_ILLEGAL );
455 JMP_rel8( 12, target4 );
456 PUSH_imm32( EXC_FPU_DISABLED );
457 JMP_rel8( 5, target5 );
458 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
465 load_spreg( R_ECX, REG_OFFSET(pc) );
466 ADD_r32_r32( R_ESI, R_ECX );
467 ADD_r32_r32( R_ESI, R_ECX );
468 store_spreg( R_ECX, REG_OFFSET(pc) );
469 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
470 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
472 ADD_r32_r32( R_EAX, R_ECX );
473 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
475 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
476 CALL_r32( R_EAX ); // 2
477 ADD_imm8s_r32( 4, R_ESP );
483 sh4_x86_do_backpatch( end_ptr );
489 extern uint16_t *sh4_icache;
490 extern uint32_t sh4_icache_addr;
493 * Translate a single instruction. Delayed branches are handled specially
494 * by translating both branch and delayed instruction as a single unit (as
497 * @return true if the instruction marks the end of a basic block
500 uint32_t sh4_x86_translate_instruction( uint32_t pc )
503 /* Read instruction */
504 uint32_t pageaddr = pc >> 12;
505 if( sh4_icache != NULL && pageaddr == sh4_icache_addr ) {
506 ir = sh4_icache[(pc&0xFFF)>>1];
508 sh4_icache = (uint16_t *)mem_get_page(pc);
509 if( ((uint32_t)sh4_icache) < MAX_IO_REGIONS ) {
510 /* If someone's actually been so daft as to try to execute out of an IO
511 * region, fallback on the full-blown memory read
514 ir = sh4_read_word(pc);
516 sh4_icache_addr = pageaddr;
517 ir = sh4_icache[(pc&0xFFF)>>1];
524 load_reg( R_EAX, Rm );
525 load_reg( R_ECX, Rn );
526 ADD_r32_r32( R_EAX, R_ECX );
527 store_reg( R_ECX, Rn );
530 load_reg( R_EAX, Rn );
531 ADD_imm8s_r32( imm, R_EAX );
532 store_reg( R_EAX, Rn );
535 load_reg( R_EAX, Rm );
536 load_reg( R_ECX, Rn );
538 ADC_r32_r32( R_EAX, R_ECX );
539 store_reg( R_ECX, Rn );
543 load_reg( R_EAX, Rm );
544 load_reg( R_ECX, Rn );
545 ADD_r32_r32( R_EAX, R_ECX );
546 store_reg( R_ECX, Rn );
550 load_reg( R_EAX, Rm );
551 load_reg( R_ECX, Rn );
552 AND_r32_r32( R_EAX, R_ECX );
553 store_reg( R_ECX, Rn );
556 load_reg( R_EAX, 0 );
557 AND_imm32_r32(imm, R_EAX);
558 store_reg( R_EAX, 0 );
560 AND.B #imm, @(R0, GBR) {:
561 load_reg( R_EAX, 0 );
562 load_spreg( R_ECX, R_GBR );
563 ADD_r32_r32( R_EAX, R_ECX );
565 call_func0(sh4_read_byte);
567 AND_imm32_r32(imm, R_EAX );
568 MEM_WRITE_BYTE( R_ECX, R_EAX );
571 load_reg( R_EAX, Rm );
572 load_reg( R_ECX, Rn );
573 CMP_r32_r32( R_EAX, R_ECX );
577 load_reg( R_EAX, 0 );
578 CMP_imm8s_r32(imm, R_EAX);
582 load_reg( R_EAX, Rm );
583 load_reg( R_ECX, Rn );
584 CMP_r32_r32( R_EAX, R_ECX );
588 load_reg( R_EAX, Rm );
589 load_reg( R_ECX, Rn );
590 CMP_r32_r32( R_EAX, R_ECX );
594 load_reg( R_EAX, Rm );
595 load_reg( R_ECX, Rn );
596 CMP_r32_r32( R_EAX, R_ECX );
600 load_reg( R_EAX, Rm );
601 load_reg( R_ECX, Rn );
602 CMP_r32_r32( R_EAX, R_ECX );
606 load_reg( R_EAX, Rn );
607 CMP_imm8s_r32( 0, R_EAX );
611 load_reg( R_EAX, Rn );
612 CMP_imm8s_r32( 0, R_EAX );
616 load_reg( R_EAX, Rm );
617 load_reg( R_ECX, Rn );
618 XOR_r32_r32( R_ECX, R_EAX );
619 TEST_r8_r8( R_AL, R_AL );
620 JE_rel8(13, target1);
621 TEST_r8_r8( R_AH, R_AH ); // 2
623 SHR_imm8_r32( 16, R_EAX ); // 3
624 TEST_r8_r8( R_AL, R_AL ); // 2
626 TEST_r8_r8( R_AH, R_AH ); // 2
633 load_reg( R_EAX, Rm );
634 load_reg( R_ECX, Rn );
635 SHR_imm8_r32( 31, R_EAX );
636 SHR_imm8_r32( 31, R_ECX );
637 store_spreg( R_EAX, R_M );
638 store_spreg( R_ECX, R_Q );
639 CMP_r32_r32( R_EAX, R_ECX );
643 XOR_r32_r32( R_EAX, R_EAX );
644 store_spreg( R_EAX, R_Q );
645 store_spreg( R_EAX, R_M );
646 store_spreg( R_EAX, R_T );
649 load_spreg( R_ECX, R_M );
650 load_reg( R_EAX, Rn );
653 SETC_r8( R_DL ); // Q'
654 CMP_sh4r_r32( R_Q, R_ECX );
656 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
659 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
661 store_reg( R_EAX, Rn ); // Done with Rn now
662 SETC_r8(R_AL); // tmp1
663 XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
664 XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
665 store_spreg( R_ECX, R_Q );
666 XOR_imm8s_r32( 1, R_AL ); // T = !Q'
667 MOVZX_r8_r32( R_AL, R_EAX );
668 store_spreg( R_EAX, R_T );
671 load_reg( R_EAX, Rm );
672 load_reg( R_ECX, Rn );
674 store_spreg( R_EDX, R_MACH );
675 store_spreg( R_EAX, R_MACL );
678 load_reg( R_EAX, Rm );
679 load_reg( R_ECX, Rn );
681 store_spreg( R_EDX, R_MACH );
682 store_spreg( R_EAX, R_MACL );
685 load_reg( R_EAX, Rn );
686 ADD_imm8s_r32( -1, R_EAX );
687 store_reg( R_EAX, Rn );
691 load_reg( R_EAX, Rm );
692 MOVSX_r8_r32( R_EAX, R_EAX );
693 store_reg( R_EAX, Rn );
696 load_reg( R_EAX, Rm );
697 MOVSX_r16_r32( R_EAX, R_EAX );
698 store_reg( R_EAX, Rn );
701 load_reg( R_EAX, Rm );
702 MOVZX_r8_r32( R_EAX, R_EAX );
703 store_reg( R_EAX, Rn );
706 load_reg( R_EAX, Rm );
707 MOVZX_r16_r32( R_EAX, R_EAX );
708 store_reg( R_EAX, Rn );
711 load_reg( R_ECX, Rm );
712 check_ralign32( R_ECX );
713 load_reg( R_ECX, Rn );
714 check_ralign32( R_ECX );
715 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
716 MEM_READ_LONG( R_ECX, R_EAX );
718 load_reg( R_ECX, Rm );
719 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
720 MEM_READ_LONG( R_ECX, R_EAX );
723 ADD_r32_sh4r( R_EAX, R_MACL );
724 ADC_r32_sh4r( R_EDX, R_MACH );
726 load_spreg( R_ECX, R_S );
727 TEST_r32_r32(R_ECX, R_ECX);
729 call_func0( signsat48 );
733 load_reg( R_ECX, Rm );
734 check_ralign16( R_ECX );
735 load_reg( R_ECX, Rn );
736 check_ralign16( R_ECX );
737 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
738 MEM_READ_WORD( R_ECX, R_EAX );
740 load_reg( R_ECX, Rm );
741 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
742 MEM_READ_WORD( R_ECX, R_EAX );
746 load_spreg( R_ECX, R_S );
747 TEST_r32_r32( R_ECX, R_ECX );
748 JE_rel8( 47, nosat );
750 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
751 JNO_rel8( 51, end ); // 2
752 load_imm32( R_EDX, 1 ); // 5
753 store_spreg( R_EDX, R_MACH ); // 6
754 JS_rel8( 13, positive ); // 2
755 load_imm32( R_EAX, 0x80000000 );// 5
756 store_spreg( R_EAX, R_MACL ); // 6
757 JMP_rel8( 25, end2 ); // 2
759 JMP_TARGET(positive);
760 load_imm32( R_EAX, 0x7FFFFFFF );// 5
761 store_spreg( R_EAX, R_MACL ); // 6
762 JMP_rel8( 12, end3); // 2
765 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
766 ADC_r32_sh4r( R_EDX, R_MACH ); // 6
772 load_spreg( R_EAX, R_T );
773 store_reg( R_EAX, Rn );
776 load_reg( R_EAX, Rm );
777 load_reg( R_ECX, Rn );
779 store_spreg( R_EAX, R_MACL );
782 load_reg16s( R_EAX, Rm );
783 load_reg16s( R_ECX, Rn );
785 store_spreg( R_EAX, R_MACL );
788 load_reg16u( R_EAX, Rm );
789 load_reg16u( R_ECX, Rn );
791 store_spreg( R_EAX, R_MACL );
794 load_reg( R_EAX, Rm );
796 store_reg( R_EAX, Rn );
799 load_reg( R_EAX, Rm );
800 XOR_r32_r32( R_ECX, R_ECX );
802 SBB_r32_r32( R_EAX, R_ECX );
803 store_reg( R_ECX, Rn );
807 load_reg( R_EAX, Rm );
809 store_reg( R_EAX, Rn );
812 load_reg( R_EAX, Rm );
813 load_reg( R_ECX, Rn );
814 OR_r32_r32( R_EAX, R_ECX );
815 store_reg( R_ECX, Rn );
818 load_reg( R_EAX, 0 );
819 OR_imm32_r32(imm, R_EAX);
820 store_reg( R_EAX, 0 );
822 OR.B #imm, @(R0, GBR) {:
823 load_reg( R_EAX, 0 );
824 load_spreg( R_ECX, R_GBR );
825 ADD_r32_r32( R_EAX, R_ECX );
827 call_func0(sh4_read_byte);
829 OR_imm32_r32(imm, R_EAX );
830 MEM_WRITE_BYTE( R_ECX, R_EAX );
833 load_reg( R_EAX, Rn );
836 store_reg( R_EAX, Rn );
840 load_reg( R_EAX, Rn );
843 store_reg( R_EAX, Rn );
847 load_reg( R_EAX, Rn );
849 store_reg( R_EAX, Rn );
853 load_reg( R_EAX, Rn );
855 store_reg( R_EAX, Rn );
859 /* Annoyingly enough, not directly convertible */
860 load_reg( R_EAX, Rn );
861 load_reg( R_ECX, Rm );
862 CMP_imm32_r32( 0, R_ECX );
865 NEG_r32( R_ECX ); // 2
866 AND_imm8_r8( 0x1F, R_CL ); // 3
867 JE_rel8( 4, emptysar); // 2
868 SAR_r32_CL( R_EAX ); // 2
869 JMP_rel8(10, end); // 2
871 JMP_TARGET(emptysar);
872 SAR_imm8_r32(31, R_EAX ); // 3
876 AND_imm8_r8( 0x1F, R_CL ); // 3
877 SHL_r32_CL( R_EAX ); // 2
880 store_reg( R_EAX, Rn );
883 load_reg( R_EAX, Rn );
884 load_reg( R_ECX, Rm );
885 CMP_imm32_r32( 0, R_ECX );
888 NEG_r32( R_ECX ); // 2
889 AND_imm8_r8( 0x1F, R_CL ); // 3
890 JE_rel8( 4, emptyshr );
891 SHR_r32_CL( R_EAX ); // 2
892 JMP_rel8(9, end); // 2
894 JMP_TARGET(emptyshr);
895 XOR_r32_r32( R_EAX, R_EAX );
899 AND_imm8_r8( 0x1F, R_CL ); // 3
900 SHL_r32_CL( R_EAX ); // 2
903 store_reg( R_EAX, Rn );
906 load_reg( R_EAX, Rn );
908 store_reg( R_EAX, Rn );
911 load_reg( R_EAX, Rn );
913 store_reg( R_EAX, Rn );
916 load_reg( R_EAX, Rn );
918 store_reg( R_EAX, Rn );
921 load_reg( R_EAX, Rn );
922 SHL_imm8_r32( 2, R_EAX );
923 store_reg( R_EAX, Rn );
926 load_reg( R_EAX, Rn );
927 SHL_imm8_r32( 8, R_EAX );
928 store_reg( R_EAX, Rn );
931 load_reg( R_EAX, Rn );
932 SHL_imm8_r32( 16, R_EAX );
933 store_reg( R_EAX, Rn );
936 load_reg( R_EAX, Rn );
938 store_reg( R_EAX, Rn );
941 load_reg( R_EAX, Rn );
942 SHR_imm8_r32( 2, R_EAX );
943 store_reg( R_EAX, Rn );
946 load_reg( R_EAX, Rn );
947 SHR_imm8_r32( 8, R_EAX );
948 store_reg( R_EAX, Rn );
951 load_reg( R_EAX, Rn );
952 SHR_imm8_r32( 16, R_EAX );
953 store_reg( R_EAX, Rn );
956 load_reg( R_EAX, Rm );
957 load_reg( R_ECX, Rn );
958 SUB_r32_r32( R_EAX, R_ECX );
959 store_reg( R_ECX, Rn );
962 load_reg( R_EAX, Rm );
963 load_reg( R_ECX, Rn );
965 SBB_r32_r32( R_EAX, R_ECX );
966 store_reg( R_ECX, Rn );
969 load_reg( R_EAX, Rm );
970 load_reg( R_ECX, Rn );
971 SUB_r32_r32( R_EAX, R_ECX );
972 store_reg( R_ECX, Rn );
976 load_reg( R_EAX, Rm );
977 XCHG_r8_r8( R_AL, R_AH );
978 store_reg( R_EAX, Rn );
981 load_reg( R_EAX, Rm );
982 MOV_r32_r32( R_EAX, R_ECX );
983 SHL_imm8_r32( 16, R_ECX );
984 SHR_imm8_r32( 16, R_EAX );
985 OR_r32_r32( R_EAX, R_ECX );
986 store_reg( R_ECX, Rn );
989 load_reg( R_ECX, Rn );
990 MEM_READ_BYTE( R_ECX, R_EAX );
991 TEST_r8_r8( R_AL, R_AL );
993 OR_imm8_r8( 0x80, R_AL );
994 load_reg( R_ECX, Rn );
995 MEM_WRITE_BYTE( R_ECX, R_EAX );
998 load_reg( R_EAX, Rm );
999 load_reg( R_ECX, Rn );
1000 TEST_r32_r32( R_EAX, R_ECX );
1004 load_reg( R_EAX, 0 );
1005 TEST_imm32_r32( imm, R_EAX );
1008 TST.B #imm, @(R0, GBR) {:
1009 load_reg( R_EAX, 0);
1010 load_reg( R_ECX, R_GBR);
1011 ADD_r32_r32( R_EAX, R_ECX );
1012 MEM_READ_BYTE( R_ECX, R_EAX );
1013 TEST_imm8_r8( imm, R_EAX );
1017 load_reg( R_EAX, Rm );
1018 load_reg( R_ECX, Rn );
1019 XOR_r32_r32( R_EAX, R_ECX );
1020 store_reg( R_ECX, Rn );
1023 load_reg( R_EAX, 0 );
1024 XOR_imm32_r32( imm, R_EAX );
1025 store_reg( R_EAX, 0 );
1027 XOR.B #imm, @(R0, GBR) {:
1028 load_reg( R_EAX, 0 );
1029 load_spreg( R_ECX, R_GBR );
1030 ADD_r32_r32( R_EAX, R_ECX );
1032 call_func0(sh4_read_byte);
1034 XOR_imm32_r32( imm, R_EAX );
1035 MEM_WRITE_BYTE( R_ECX, R_EAX );
1038 load_reg( R_EAX, Rm );
1039 MOV_r32_r32( R_EAX, R_ECX );
1040 SHR_imm8_r32( 16, R_EAX );
1041 SHL_imm8_r32( 16, R_ECX );
1042 OR_r32_r32( R_EAX, R_ECX );
1043 store_reg( R_ECX, Rn );
1046 /* Data move instructions */
1048 load_reg( R_EAX, Rm );
1049 store_reg( R_EAX, Rn );
1052 load_imm32( R_EAX, imm );
1053 store_reg( R_EAX, Rn );
1056 load_reg( R_EAX, Rm );
1057 load_reg( R_ECX, Rn );
1058 MEM_WRITE_BYTE( R_ECX, R_EAX );
1061 load_reg( R_EAX, Rm );
1062 load_reg( R_ECX, Rn );
1063 ADD_imm8s_r32( -1, R_ECX );
1064 store_reg( R_ECX, Rn );
1065 MEM_WRITE_BYTE( R_ECX, R_EAX );
1067 MOV.B Rm, @(R0, Rn) {:
1068 load_reg( R_EAX, 0 );
1069 load_reg( R_ECX, Rn );
1070 ADD_r32_r32( R_EAX, R_ECX );
1071 load_reg( R_EAX, Rm );
1072 MEM_WRITE_BYTE( R_ECX, R_EAX );
1074 MOV.B R0, @(disp, GBR) {:
1075 load_reg( R_EAX, 0 );
1076 load_spreg( R_ECX, R_GBR );
1077 ADD_imm32_r32( disp, R_ECX );
1078 MEM_WRITE_BYTE( R_ECX, R_EAX );
1080 MOV.B R0, @(disp, Rn) {:
1081 load_reg( R_EAX, 0 );
1082 load_reg( R_ECX, Rn );
1083 ADD_imm32_r32( disp, R_ECX );
1084 MEM_WRITE_BYTE( R_ECX, R_EAX );
1087 load_reg( R_ECX, Rm );
1088 MEM_READ_BYTE( R_ECX, R_EAX );
1089 store_reg( R_EAX, Rn );
1092 load_reg( R_ECX, Rm );
1093 MOV_r32_r32( R_ECX, R_EAX );
1094 ADD_imm8s_r32( 1, R_EAX );
1095 store_reg( R_EAX, Rm );
1096 MEM_READ_BYTE( R_ECX, R_EAX );
1097 store_reg( R_EAX, Rn );
1099 MOV.B @(R0, Rm), Rn {:
1100 load_reg( R_EAX, 0 );
1101 load_reg( R_ECX, Rm );
1102 ADD_r32_r32( R_EAX, R_ECX );
1103 MEM_READ_BYTE( R_ECX, R_EAX );
1104 store_reg( R_EAX, Rn );
1106 MOV.B @(disp, GBR), R0 {:
1107 load_spreg( R_ECX, R_GBR );
1108 ADD_imm32_r32( disp, R_ECX );
1109 MEM_READ_BYTE( R_ECX, R_EAX );
1110 store_reg( R_EAX, 0 );
1112 MOV.B @(disp, Rm), R0 {:
1113 load_reg( R_ECX, Rm );
1114 ADD_imm32_r32( disp, R_ECX );
1115 MEM_READ_BYTE( R_ECX, R_EAX );
1116 store_reg( R_EAX, 0 );
1119 load_reg( R_EAX, Rm );
1120 load_reg( R_ECX, Rn );
1121 check_walign32(R_ECX);
1122 MEM_WRITE_LONG( R_ECX, R_EAX );
1125 load_reg( R_EAX, Rm );
1126 load_reg( R_ECX, Rn );
1127 check_walign32( R_ECX );
1128 ADD_imm8s_r32( -4, R_ECX );
1129 store_reg( R_ECX, Rn );
1130 MEM_WRITE_LONG( R_ECX, R_EAX );
1132 MOV.L Rm, @(R0, Rn) {:
1133 load_reg( R_EAX, 0 );
1134 load_reg( R_ECX, Rn );
1135 ADD_r32_r32( R_EAX, R_ECX );
1136 check_walign32( R_ECX );
1137 load_reg( R_EAX, Rm );
1138 MEM_WRITE_LONG( R_ECX, R_EAX );
1140 MOV.L R0, @(disp, GBR) {:
1141 load_spreg( R_ECX, R_GBR );
1142 load_reg( R_EAX, 0 );
1143 ADD_imm32_r32( disp, R_ECX );
1144 check_walign32( R_ECX );
1145 MEM_WRITE_LONG( R_ECX, R_EAX );
1147 MOV.L Rm, @(disp, Rn) {:
1148 load_reg( R_ECX, Rn );
1149 load_reg( R_EAX, Rm );
1150 ADD_imm32_r32( disp, R_ECX );
1151 check_walign32( R_ECX );
1152 MEM_WRITE_LONG( R_ECX, R_EAX );
1155 load_reg( R_ECX, Rm );
1156 check_ralign32( R_ECX );
1157 MEM_READ_LONG( R_ECX, R_EAX );
1158 store_reg( R_EAX, Rn );
1161 load_reg( R_EAX, Rm );
1162 check_ralign32( R_EAX );
1163 MOV_r32_r32( R_EAX, R_ECX );
1164 ADD_imm8s_r32( 4, R_EAX );
1165 store_reg( R_EAX, Rm );
1166 MEM_READ_LONG( R_ECX, R_EAX );
1167 store_reg( R_EAX, Rn );
1169 MOV.L @(R0, Rm), Rn {:
1170 load_reg( R_EAX, 0 );
1171 load_reg( R_ECX, Rm );
1172 ADD_r32_r32( R_EAX, R_ECX );
1173 check_ralign32( R_ECX );
1174 MEM_READ_LONG( R_ECX, R_EAX );
1175 store_reg( R_EAX, Rn );
1177 MOV.L @(disp, GBR), R0 {:
1178 load_spreg( R_ECX, R_GBR );
1179 ADD_imm32_r32( disp, R_ECX );
1180 check_ralign32( R_ECX );
1181 MEM_READ_LONG( R_ECX, R_EAX );
1182 store_reg( R_EAX, 0 );
1184 MOV.L @(disp, PC), Rn {:
1185 if( sh4_x86.in_delay_slot ) {
1188 uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
1189 char *ptr = mem_get_region(target);
1191 MOV_moff32_EAX( (uint32_t)ptr );
1193 load_imm32( R_ECX, target );
1194 MEM_READ_LONG( R_ECX, R_EAX );
1196 store_reg( R_EAX, Rn );
1199 MOV.L @(disp, Rm), Rn {:
1200 load_reg( R_ECX, Rm );
1201 ADD_imm8s_r32( disp, R_ECX );
1202 check_ralign32( R_ECX );
1203 MEM_READ_LONG( R_ECX, R_EAX );
1204 store_reg( R_EAX, Rn );
1207 load_reg( R_ECX, Rn );
1208 check_walign16( R_ECX );
1209 load_reg( R_EAX, Rm );
1210 MEM_WRITE_WORD( R_ECX, R_EAX );
1213 load_reg( R_ECX, Rn );
1214 check_walign16( R_ECX );
1215 load_reg( R_EAX, Rm );
1216 ADD_imm8s_r32( -2, R_ECX );
1217 store_reg( R_ECX, Rn );
1218 MEM_WRITE_WORD( R_ECX, R_EAX );
1220 MOV.W Rm, @(R0, Rn) {:
1221 load_reg( R_EAX, 0 );
1222 load_reg( R_ECX, Rn );
1223 ADD_r32_r32( R_EAX, R_ECX );
1224 check_walign16( R_ECX );
1225 load_reg( R_EAX, Rm );
1226 MEM_WRITE_WORD( R_ECX, R_EAX );
1228 MOV.W R0, @(disp, GBR) {:
1229 load_spreg( R_ECX, R_GBR );
1230 load_reg( R_EAX, 0 );
1231 ADD_imm32_r32( disp, R_ECX );
1232 check_walign16( R_ECX );
1233 MEM_WRITE_WORD( R_ECX, R_EAX );
1235 MOV.W R0, @(disp, Rn) {:
1236 load_reg( R_ECX, Rn );
1237 load_reg( R_EAX, 0 );
1238 ADD_imm32_r32( disp, R_ECX );
1239 check_walign16( R_ECX );
1240 MEM_WRITE_WORD( R_ECX, R_EAX );
1243 load_reg( R_ECX, Rm );
1244 check_ralign16( R_ECX );
1245 MEM_READ_WORD( R_ECX, R_EAX );
1246 store_reg( R_EAX, Rn );
1249 load_reg( R_EAX, Rm );
1250 check_ralign16( R_EAX );
1251 MOV_r32_r32( R_EAX, R_ECX );
1252 ADD_imm8s_r32( 2, R_EAX );
1253 store_reg( R_EAX, Rm );
1254 MEM_READ_WORD( R_ECX, R_EAX );
1255 store_reg( R_EAX, Rn );
1257 MOV.W @(R0, Rm), Rn {:
1258 load_reg( R_EAX, 0 );
1259 load_reg( R_ECX, Rm );
1260 ADD_r32_r32( R_EAX, R_ECX );
1261 check_ralign16( R_ECX );
1262 MEM_READ_WORD( R_ECX, R_EAX );
1263 store_reg( R_EAX, Rn );
1265 MOV.W @(disp, GBR), R0 {:
1266 load_spreg( R_ECX, R_GBR );
1267 ADD_imm32_r32( disp, R_ECX );
1268 check_ralign16( R_ECX );
1269 MEM_READ_WORD( R_ECX, R_EAX );
1270 store_reg( R_EAX, 0 );
1272 MOV.W @(disp, PC), Rn {:
1273 if( sh4_x86.in_delay_slot ) {
1276 load_imm32( R_ECX, pc + disp + 4 );
1277 MEM_READ_WORD( R_ECX, R_EAX );
1278 store_reg( R_EAX, Rn );
1281 MOV.W @(disp, Rm), R0 {:
1282 load_reg( R_ECX, Rm );
1283 ADD_imm32_r32( disp, R_ECX );
1284 check_ralign16( R_ECX );
1285 MEM_READ_WORD( R_ECX, R_EAX );
1286 store_reg( R_EAX, 0 );
1288 MOVA @(disp, PC), R0 {:
1289 if( sh4_x86.in_delay_slot ) {
1292 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
1293 store_reg( R_ECX, 0 );
1297 load_reg( R_EAX, 0 );
1298 load_reg( R_ECX, Rn );
1299 check_walign32( R_ECX );
1300 MEM_WRITE_LONG( R_ECX, R_EAX );
1303 /* Control transfer instructions */
1305 if( sh4_x86.in_delay_slot ) {
1308 load_imm32( R_EDI, pc + 2 );
1309 CMP_imm8s_sh4r( 0, R_T );
1310 JNE_rel8( 5, nottaken );
1311 load_imm32( R_EDI, disp + pc + 4 );
1312 JMP_TARGET(nottaken);
1318 if( sh4_x86.in_delay_slot ) {
1321 load_imm32( R_EDI, pc + 4 );
1322 CMP_imm8s_sh4r( 0, R_T );
1323 JNE_rel8( 5, nottaken );
1324 load_imm32( R_EDI, disp + pc + 4 );
1325 JMP_TARGET(nottaken);
1326 sh4_x86.in_delay_slot = TRUE;
1331 if( sh4_x86.in_delay_slot ) {
1334 load_imm32( R_EDI, disp + pc + 4 );
1335 sh4_x86.in_delay_slot = TRUE;
1340 if( sh4_x86.in_delay_slot ) {
1343 load_reg( R_EDI, Rn );
1344 ADD_imm32_r32( pc + 4, R_EDI );
1345 sh4_x86.in_delay_slot = TRUE;
1350 if( sh4_x86.in_delay_slot ) {
1353 load_imm32( R_EAX, pc + 4 );
1354 store_spreg( R_EAX, R_PR );
1355 load_imm32( R_EDI, disp + pc + 4 );
1356 sh4_x86.in_delay_slot = TRUE;
1361 if( sh4_x86.in_delay_slot ) {
1364 load_imm32( R_EAX, pc + 4 );
1365 store_spreg( R_EAX, R_PR );
1366 load_reg( R_EDI, Rn );
1367 ADD_r32_r32( R_EAX, R_EDI );
1368 sh4_x86.in_delay_slot = TRUE;
1373 if( sh4_x86.in_delay_slot ) {
1376 load_imm32( R_EDI, pc + 2 );
1377 CMP_imm8s_sh4r( 0, R_T );
1378 JE_rel8( 5, nottaken );
1379 load_imm32( R_EDI, disp + pc + 4 );
1380 JMP_TARGET(nottaken);
1386 if( sh4_x86.in_delay_slot ) {
1389 load_imm32( R_EDI, pc + 4 );
1390 CMP_imm8s_sh4r( 0, R_T );
1391 JE_rel8( 5, nottaken );
1392 load_imm32( R_EDI, disp + pc + 4 );
1393 JMP_TARGET(nottaken);
1394 sh4_x86.in_delay_slot = TRUE;
1399 if( sh4_x86.in_delay_slot ) {
1402 load_reg( R_EDI, Rn );
1403 sh4_x86.in_delay_slot = TRUE;
1408 if( sh4_x86.in_delay_slot ) {
1411 load_imm32( R_EAX, pc + 4 );
1412 store_spreg( R_EAX, R_PR );
1413 load_reg( R_EDI, Rn );
1414 sh4_x86.in_delay_slot = TRUE;
1420 if( sh4_x86.in_delay_slot ) {
1423 load_spreg( R_EDI, R_SPC );
1424 load_spreg( R_EAX, R_SSR );
1425 call_func1( sh4_write_sr, R_EAX );
1426 sh4_x86.in_delay_slot = TRUE;
1427 sh4_x86.priv_checked = FALSE;
1428 sh4_x86.fpuen_checked = FALSE;
1433 if( sh4_x86.in_delay_slot ) {
1436 load_spreg( R_EDI, R_PR );
1437 sh4_x86.in_delay_slot = TRUE;
1442 if( sh4_x86.in_delay_slot ) {
1446 call_func0( sh4_raise_trap );
1447 ADD_imm8s_r32( 4, R_ESP );
1451 if( sh4_x86.in_delay_slot ) {
1454 JMP_exit(EXIT_ILLEGAL);
1460 XOR_r32_r32(R_EAX, R_EAX);
1461 store_spreg( R_EAX, R_MACL );
1462 store_spreg( R_EAX, R_MACH );
1481 /* Floating point moves */
1483 /* As horrible as this looks, it's actually covering 5 separate cases:
1484 * 1. 32-bit fr-to-fr (PR=0)
1485 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
1486 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
1487 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
1488 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
1491 load_spreg( R_ECX, R_FPSCR );
1492 load_fr_bank( R_EDX );
1493 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1494 JNE_rel8(8, doublesize);
1495 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
1496 store_fr( R_EDX, R_EAX, FRn );
1499 JMP_TARGET(doublesize);
1500 load_xf_bank( R_ECX );
1501 load_fr( R_ECX, R_EAX, FRm-1 );
1503 load_fr( R_ECX, R_EDX, FRm );
1504 store_fr( R_ECX, R_EAX, FRn-1 );
1505 store_fr( R_ECX, R_EDX, FRn );
1506 } else /* FRn&1 == 0 */ {
1507 load_fr( R_ECX, R_ECX, FRm );
1508 store_fr( R_EDX, R_EAX, FRn );
1509 store_fr( R_EDX, R_ECX, FRn+1 );
1512 } else /* FRm&1 == 0 */ {
1515 load_xf_bank( R_ECX );
1516 load_fr( R_EDX, R_EAX, FRm );
1517 load_fr( R_EDX, R_EDX, FRm+1 );
1518 store_fr( R_ECX, R_EAX, FRn-1 );
1519 store_fr( R_ECX, R_EDX, FRn );
1521 } else /* FRn&1 == 0 */ {
1523 load_fr( R_EDX, R_EAX, FRm );
1524 load_fr( R_EDX, R_ECX, FRm+1 );
1525 store_fr( R_EDX, R_EAX, FRn );
1526 store_fr( R_EDX, R_ECX, FRn+1 );
1533 load_reg( R_EDX, Rn );
1534 check_walign32( R_EDX );
1535 load_spreg( R_ECX, R_FPSCR );
1536 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1537 JNE_rel8(20, doublesize);
1538 load_fr_bank( R_ECX );
1539 load_fr( R_ECX, R_EAX, FRm );
1540 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
1542 JMP_rel8( 48, end );
1543 JMP_TARGET(doublesize);
1544 load_xf_bank( R_ECX );
1545 load_fr( R_ECX, R_EAX, FRm&0x0E );
1546 load_fr( R_ECX, R_ECX, FRm|0x01 );
1547 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1550 JMP_rel8( 39, end );
1551 JMP_TARGET(doublesize);
1552 load_fr_bank( R_ECX );
1553 load_fr( R_ECX, R_EAX, FRm&0x0E );
1554 load_fr( R_ECX, R_ECX, FRm|0x01 );
1555 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1561 load_reg( R_EDX, Rm );
1562 check_ralign32( R_EDX );
1563 load_spreg( R_ECX, R_FPSCR );
1564 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1565 JNE_rel8(19, doublesize);
1566 MEM_READ_LONG( R_EDX, R_EAX );
1567 load_fr_bank( R_ECX );
1568 store_fr( R_ECX, R_EAX, FRn );
1571 JMP_TARGET(doublesize);
1572 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1573 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
1574 load_xf_bank( R_ECX );
1575 store_fr( R_ECX, R_EAX, FRn&0x0E );
1576 store_fr( R_ECX, R_EDX, FRn|0x01 );
1580 JMP_TARGET(doublesize);
1581 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1582 load_fr_bank( R_ECX );
1583 store_fr( R_ECX, R_EAX, FRn&0x0E );
1584 store_fr( R_ECX, R_EDX, FRn|0x01 );
1590 load_reg( R_EDX, Rn );
1591 check_walign32( R_EDX );
1592 load_spreg( R_ECX, R_FPSCR );
1593 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1594 JNE_rel8(26, doublesize);
1595 load_fr_bank( R_ECX );
1596 load_fr( R_ECX, R_EAX, FRm );
1597 ADD_imm8s_r32(-4,R_EDX);
1598 store_reg( R_EDX, Rn );
1599 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
1601 JMP_rel8( 54, end );
1602 JMP_TARGET(doublesize);
1603 load_xf_bank( R_ECX );
1604 load_fr( R_ECX, R_EAX, FRm&0x0E );
1605 load_fr( R_ECX, R_ECX, FRm|0x01 );
1606 ADD_imm8s_r32(-8,R_EDX);
1607 store_reg( R_EDX, Rn );
1608 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1611 JMP_rel8( 45, end );
1612 JMP_TARGET(doublesize);
1613 load_fr_bank( R_ECX );
1614 load_fr( R_ECX, R_EAX, FRm&0x0E );
1615 load_fr( R_ECX, R_ECX, FRm|0x01 );
1616 ADD_imm8s_r32(-8,R_EDX);
1617 store_reg( R_EDX, Rn );
1618 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1624 load_reg( R_EDX, Rm );
1625 check_ralign32( R_EDX );
1626 MOV_r32_r32( R_EDX, R_EAX );
1627 load_spreg( R_ECX, R_FPSCR );
1628 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1629 JNE_rel8(25, doublesize);
1630 ADD_imm8s_r32( 4, R_EAX );
1631 store_reg( R_EAX, Rm );
1632 MEM_READ_LONG( R_EDX, R_EAX );
1633 load_fr_bank( R_ECX );
1634 store_fr( R_ECX, R_EAX, FRn );
1637 JMP_TARGET(doublesize);
1638 ADD_imm8s_r32( 8, R_EAX );
1639 store_reg(R_EAX, Rm);
1640 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1641 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
1642 load_xf_bank( R_ECX );
1643 store_fr( R_ECX, R_EAX, FRn&0x0E );
1644 store_fr( R_ECX, R_EDX, FRn|0x01 );
1648 ADD_imm8s_r32( 8, R_EAX );
1649 store_reg(R_EAX, Rm);
1650 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1651 load_fr_bank( R_ECX );
1652 store_fr( R_ECX, R_EAX, FRn&0x0E );
1653 store_fr( R_ECX, R_EDX, FRn|0x01 );
1657 FMOV FRm, @(R0, Rn) {:
1659 load_reg( R_EDX, Rn );
1660 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
1661 check_walign32( R_EDX );
1662 load_spreg( R_ECX, R_FPSCR );
1663 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1664 JNE_rel8(20, doublesize);
1665 load_fr_bank( R_ECX );
1666 load_fr( R_ECX, R_EAX, FRm );
1667 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
1669 JMP_rel8( 48, end );
1670 JMP_TARGET(doublesize);
1671 load_xf_bank( R_ECX );
1672 load_fr( R_ECX, R_EAX, FRm&0x0E );
1673 load_fr( R_ECX, R_ECX, FRm|0x01 );
1674 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1677 JMP_rel8( 39, end );
1678 JMP_TARGET(doublesize);
1679 load_fr_bank( R_ECX );
1680 load_fr( R_ECX, R_EAX, FRm&0x0E );
1681 load_fr( R_ECX, R_ECX, FRm|0x01 );
1682 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1686 FMOV @(R0, Rm), FRn {:
1688 load_reg( R_EDX, Rm );
1689 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
1690 check_ralign32( R_EDX );
1691 load_spreg( R_ECX, R_FPSCR );
1692 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1693 JNE_rel8(19, doublesize);
1694 MEM_READ_LONG( R_EDX, R_EAX );
1695 load_fr_bank( R_ECX );
1696 store_fr( R_ECX, R_EAX, FRn );
1699 JMP_TARGET(doublesize);
1700 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1701 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
1702 load_xf_bank( R_ECX );
1703 store_fr( R_ECX, R_EAX, FRn&0x0E );
1704 store_fr( R_ECX, R_EDX, FRn|0x01 );
1708 JMP_TARGET(doublesize);
1709 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1710 load_fr_bank( R_ECX );
1711 store_fr( R_ECX, R_EAX, FRn&0x0E );
1712 store_fr( R_ECX, R_EDX, FRn|0x01 );
1716 FLDI0 FRn {: /* IFF PR=0 */
1718 load_spreg( R_ECX, R_FPSCR );
1719 TEST_imm32_r32( FPSCR_PR, R_ECX );
1721 XOR_r32_r32( R_EAX, R_EAX );
1722 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
1723 store_fr( R_ECX, R_EAX, FRn );
1726 FLDI1 FRn {: /* IFF PR=0 */
1728 load_spreg( R_ECX, R_FPSCR );
1729 TEST_imm32_r32( FPSCR_PR, R_ECX );
1731 load_imm32(R_EAX, 0x3F800000);
1732 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
1733 store_fr( R_ECX, R_EAX, FRn );
1739 load_spreg( R_ECX, R_FPSCR );
1740 load_spreg(R_EDX, REG_OFFSET(fr_bank));
1742 TEST_imm32_r32( FPSCR_PR, R_ECX );
1743 JNE_rel8(5, doubleprec);
1744 pop_fr( R_EDX, FRn );
1746 JMP_TARGET(doubleprec);
1747 pop_dr( R_EDX, FRn );
1752 load_spreg( R_ECX, R_FPSCR );
1753 load_fr_bank( R_EDX );
1754 TEST_imm32_r32( FPSCR_PR, R_ECX );
1755 JNE_rel8(5, doubleprec);
1756 push_fr( R_EDX, FRm );
1758 JMP_TARGET(doubleprec);
1759 push_dr( R_EDX, FRm );
1761 load_imm32( R_ECX, (uint32_t)&max_int );
1762 FILD_r32ind( R_ECX );
1764 JNA_rel8( 16, sat );
1765 load_imm32( R_ECX, (uint32_t)&min_int ); // 5
1766 FILD_r32ind( R_ECX ); // 2
1768 JAE_rel8( 5, sat2 ); // 2
1769 FISTP_sh4r(R_FPUL); // 3
1770 JMP_rel8( 9, end ); // 2
1774 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
1775 store_spreg( R_ECX, R_FPUL );
1781 load_fr_bank( R_ECX );
1782 load_fr( R_ECX, R_EAX, FRm );
1783 store_spreg( R_EAX, R_FPUL );
1787 load_fr_bank( R_ECX );
1788 load_spreg( R_EAX, R_FPUL );
1789 store_fr( R_ECX, R_EAX, FRn );
1793 load_spreg( R_ECX, R_FPSCR );
1794 TEST_imm32_r32( FPSCR_PR, R_ECX );
1795 JE_rel8(9, end); // only when PR=1
1796 load_fr_bank( R_ECX );
1797 push_dr( R_ECX, FRm );
1803 load_spreg( R_ECX, R_FPSCR );
1804 TEST_imm32_r32( FPSCR_PR, R_ECX );
1805 JE_rel8(9, end); // only when PR=1
1806 load_fr_bank( R_ECX );
1808 pop_dr( R_ECX, FRn );
1812 /* Floating point instructions */
1815 load_spreg( R_ECX, R_FPSCR );
1816 load_fr_bank( R_EDX );
1817 TEST_imm32_r32( FPSCR_PR, R_ECX );
1818 JNE_rel8(10, doubleprec);
1819 push_fr(R_EDX, FRn); // 3
1821 pop_fr( R_EDX, FRn); //3
1822 JMP_rel8(8,end); // 2
1823 JMP_TARGET(doubleprec);
1824 push_dr(R_EDX, FRn);
1831 load_spreg( R_ECX, R_FPSCR );
1832 TEST_imm32_r32( FPSCR_PR, R_ECX );
1833 load_fr_bank( R_EDX );
1834 JNE_rel8(13,doubleprec);
1835 push_fr(R_EDX, FRm);
1836 push_fr(R_EDX, FRn);
1840 JMP_TARGET(doubleprec);
1841 push_dr(R_EDX, FRm);
1842 push_dr(R_EDX, FRn);
1849 load_spreg( R_ECX, R_FPSCR );
1850 TEST_imm32_r32( FPSCR_PR, R_ECX );
1851 load_fr_bank( R_EDX );
1852 JNE_rel8(13, doubleprec);
1853 push_fr(R_EDX, FRn);
1854 push_fr(R_EDX, FRm);
1858 JMP_TARGET(doubleprec);
1859 push_dr(R_EDX, FRn);
1860 push_dr(R_EDX, FRm);
1865 FMAC FR0, FRm, FRn {:
1867 load_spreg( R_ECX, R_FPSCR );
1868 load_spreg( R_EDX, REG_OFFSET(fr_bank));
1869 TEST_imm32_r32( FPSCR_PR, R_ECX );
1870 JNE_rel8(18, doubleprec);
1871 push_fr( R_EDX, 0 );
1872 push_fr( R_EDX, FRm );
1874 push_fr( R_EDX, FRn );
1876 pop_fr( R_EDX, FRn );
1878 JMP_TARGET(doubleprec);
1879 push_dr( R_EDX, 0 );
1880 push_dr( R_EDX, FRm );
1882 push_dr( R_EDX, FRn );
1884 pop_dr( R_EDX, FRn );
1890 load_spreg( R_ECX, R_FPSCR );
1891 TEST_imm32_r32( FPSCR_PR, R_ECX );
1892 load_fr_bank( R_EDX );
1893 JNE_rel8(13, doubleprec);
1894 push_fr(R_EDX, FRm);
1895 push_fr(R_EDX, FRn);
1899 JMP_TARGET(doubleprec);
1900 push_dr(R_EDX, FRm);
1901 push_dr(R_EDX, FRn);
1908 load_spreg( R_ECX, R_FPSCR );
1909 TEST_imm32_r32( FPSCR_PR, R_ECX );
1910 load_fr_bank( R_EDX );
1911 JNE_rel8(10, doubleprec);
1912 push_fr(R_EDX, FRn);
1916 JMP_TARGET(doubleprec);
1917 push_dr(R_EDX, FRn);
1924 load_spreg( R_ECX, R_FPSCR );
1925 TEST_imm32_r32( FPSCR_PR, R_ECX );
1926 load_fr_bank( R_EDX );
1927 JNE_rel8(12, end); // PR=0 only
1929 push_fr(R_EDX, FRn);
1937 load_spreg( R_ECX, R_FPSCR );
1938 TEST_imm32_r32( FPSCR_PR, R_ECX );
1939 load_fr_bank( R_EDX );
1940 JNE_rel8(10, doubleprec);
1941 push_fr(R_EDX, FRn);
1945 JMP_TARGET(doubleprec);
1946 push_dr(R_EDX, FRn);
1953 load_spreg( R_ECX, R_FPSCR );
1954 TEST_imm32_r32( FPSCR_PR, R_ECX );
1955 load_fr_bank( R_EDX );
1956 JNE_rel8(13, doubleprec);
1957 push_fr(R_EDX, FRn);
1958 push_fr(R_EDX, FRm);
1962 JMP_TARGET(doubleprec);
1963 push_dr(R_EDX, FRn);
1964 push_dr(R_EDX, FRm);
1972 load_spreg( R_ECX, R_FPSCR );
1973 TEST_imm32_r32( FPSCR_PR, R_ECX );
1974 load_fr_bank( R_EDX );
1975 JNE_rel8(8, doubleprec);
1976 push_fr(R_EDX, FRm);
1977 push_fr(R_EDX, FRn);
1979 JMP_TARGET(doubleprec);
1980 push_dr(R_EDX, FRm);
1981 push_dr(R_EDX, FRn);
1989 load_spreg( R_ECX, R_FPSCR );
1990 TEST_imm32_r32( FPSCR_PR, R_ECX );
1991 load_fr_bank( R_EDX );
1992 JNE_rel8(8, doubleprec);
1993 push_fr(R_EDX, FRm);
1994 push_fr(R_EDX, FRn);
1996 JMP_TARGET(doubleprec);
1997 push_dr(R_EDX, FRm);
1998 push_dr(R_EDX, FRn);
2007 load_spreg( R_ECX, R_FPSCR );
2008 TEST_imm32_r32( FPSCR_PR, R_ECX );
2009 JNE_rel8( 21, doubleprec );
2010 load_fr_bank( R_ECX );
2011 ADD_imm8s_r32( (FRn&0x0E)<<2, R_ECX );
2012 load_spreg( R_EDX, R_FPUL );
2013 call_func2( sh4_fsca, R_EDX, R_ECX );
2014 JMP_TARGET(doubleprec);
2018 load_spreg( R_ECX, R_FPSCR );
2019 TEST_imm32_r32( FPSCR_PR, R_ECX );
2020 JNE_rel8(44, doubleprec);
2022 load_fr_bank( R_ECX );
2023 push_fr( R_ECX, FVm<<2 );
2024 push_fr( R_ECX, FVn<<2 );
2026 push_fr( R_ECX, (FVm<<2)+1);
2027 push_fr( R_ECX, (FVn<<2)+1);
2030 push_fr( R_ECX, (FVm<<2)+2);
2031 push_fr( R_ECX, (FVn<<2)+2);
2034 push_fr( R_ECX, (FVm<<2)+3);
2035 push_fr( R_ECX, (FVn<<2)+3);
2038 pop_fr( R_ECX, (FVn<<2)+3);
2039 JMP_TARGET(doubleprec);
2043 load_spreg( R_ECX, R_FPSCR );
2044 TEST_imm32_r32( FPSCR_PR, R_ECX );
2045 JNE_rel8( 30, doubleprec );
2046 load_fr_bank( R_EDX ); // 3
2047 ADD_imm8s_r32( FVn<<4, R_EDX ); // 3
2048 load_xf_bank( R_ECX ); // 12
2049 call_func2( sh4_ftrv, R_EDX, R_ECX ); // 12
2050 JMP_TARGET(doubleprec);
2055 load_spreg( R_ECX, R_FPSCR );
2056 XOR_imm32_r32( FPSCR_FR, R_ECX );
2057 store_spreg( R_ECX, R_FPSCR );
2058 update_fr_bank( R_ECX );
2062 load_spreg( R_ECX, R_FPSCR );
2063 XOR_imm32_r32( FPSCR_SZ, R_ECX );
2064 store_spreg( R_ECX, R_FPSCR );
2067 /* Processor control instructions */
2069 if( sh4_x86.in_delay_slot ) {
2073 load_reg( R_EAX, Rm );
2074 call_func1( sh4_write_sr, R_EAX );
2075 sh4_x86.priv_checked = FALSE;
2076 sh4_x86.fpuen_checked = FALSE;
2080 load_reg( R_EAX, Rm );
2081 store_spreg( R_EAX, R_GBR );
2085 load_reg( R_EAX, Rm );
2086 store_spreg( R_EAX, R_VBR );
2090 load_reg( R_EAX, Rm );
2091 store_spreg( R_EAX, R_SSR );
2095 load_reg( R_EAX, Rm );
2096 store_spreg( R_EAX, R_SGR );
2100 load_reg( R_EAX, Rm );
2101 store_spreg( R_EAX, R_SPC );
2105 load_reg( R_EAX, Rm );
2106 store_spreg( R_EAX, R_DBR );
2110 load_reg( R_EAX, Rm );
2111 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
2114 load_reg( R_EAX, Rm );
2115 MOV_r32_r32( R_EAX, R_ECX );
2116 ADD_imm8s_r32( 4, R_EAX );
2117 store_reg( R_EAX, Rm );
2118 MEM_READ_LONG( R_ECX, R_EAX );
2119 store_spreg( R_EAX, R_GBR );
2122 if( sh4_x86.in_delay_slot ) {
2126 load_reg( R_EAX, Rm );
2127 MOV_r32_r32( R_EAX, R_ECX );
2128 ADD_imm8s_r32( 4, R_EAX );
2129 store_reg( R_EAX, Rm );
2130 MEM_READ_LONG( R_ECX, R_EAX );
2131 call_func1( sh4_write_sr, R_EAX );
2132 sh4_x86.priv_checked = FALSE;
2133 sh4_x86.fpuen_checked = FALSE;
2138 load_reg( R_EAX, Rm );
2139 MOV_r32_r32( R_EAX, R_ECX );
2140 ADD_imm8s_r32( 4, R_EAX );
2141 store_reg( R_EAX, Rm );
2142 MEM_READ_LONG( R_ECX, R_EAX );
2143 store_spreg( R_EAX, R_VBR );
2147 load_reg( R_EAX, Rm );
2148 MOV_r32_r32( R_EAX, R_ECX );
2149 ADD_imm8s_r32( 4, R_EAX );
2150 store_reg( R_EAX, Rm );
2151 MEM_READ_LONG( R_ECX, R_EAX );
2152 store_spreg( R_EAX, R_SSR );
2156 load_reg( R_EAX, Rm );
2157 MOV_r32_r32( R_EAX, R_ECX );
2158 ADD_imm8s_r32( 4, R_EAX );
2159 store_reg( R_EAX, Rm );
2160 MEM_READ_LONG( R_ECX, R_EAX );
2161 store_spreg( R_EAX, R_SGR );
2165 load_reg( R_EAX, Rm );
2166 MOV_r32_r32( R_EAX, R_ECX );
2167 ADD_imm8s_r32( 4, R_EAX );
2168 store_reg( R_EAX, Rm );
2169 MEM_READ_LONG( R_ECX, R_EAX );
2170 store_spreg( R_EAX, R_SPC );
2174 load_reg( R_EAX, Rm );
2175 MOV_r32_r32( R_EAX, R_ECX );
2176 ADD_imm8s_r32( 4, R_EAX );
2177 store_reg( R_EAX, Rm );
2178 MEM_READ_LONG( R_ECX, R_EAX );
2179 store_spreg( R_EAX, R_DBR );
2181 LDC.L @Rm+, Rn_BANK {:
2183 load_reg( R_EAX, Rm );
2184 MOV_r32_r32( R_EAX, R_ECX );
2185 ADD_imm8s_r32( 4, R_EAX );
2186 store_reg( R_EAX, Rm );
2187 MEM_READ_LONG( R_ECX, R_EAX );
2188 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
2191 load_reg( R_EAX, Rm );
2192 store_spreg( R_EAX, R_FPSCR );
2193 update_fr_bank( R_EAX );
2195 LDS.L @Rm+, FPSCR {:
2196 load_reg( R_EAX, Rm );
2197 MOV_r32_r32( R_EAX, R_ECX );
2198 ADD_imm8s_r32( 4, R_EAX );
2199 store_reg( R_EAX, Rm );
2200 MEM_READ_LONG( R_ECX, R_EAX );
2201 store_spreg( R_EAX, R_FPSCR );
2202 update_fr_bank( R_EAX );
2205 load_reg( R_EAX, Rm );
2206 store_spreg( R_EAX, R_FPUL );
2209 load_reg( R_EAX, Rm );
2210 MOV_r32_r32( R_EAX, R_ECX );
2211 ADD_imm8s_r32( 4, R_EAX );
2212 store_reg( R_EAX, Rm );
2213 MEM_READ_LONG( R_ECX, R_EAX );
2214 store_spreg( R_EAX, R_FPUL );
2217 load_reg( R_EAX, Rm );
2218 store_spreg( R_EAX, R_MACH );
2221 load_reg( R_EAX, Rm );
2222 MOV_r32_r32( R_EAX, R_ECX );
2223 ADD_imm8s_r32( 4, R_EAX );
2224 store_reg( R_EAX, Rm );
2225 MEM_READ_LONG( R_ECX, R_EAX );
2226 store_spreg( R_EAX, R_MACH );
2229 load_reg( R_EAX, Rm );
2230 store_spreg( R_EAX, R_MACL );
2233 load_reg( R_EAX, Rm );
2234 MOV_r32_r32( R_EAX, R_ECX );
2235 ADD_imm8s_r32( 4, R_EAX );
2236 store_reg( R_EAX, Rm );
2237 MEM_READ_LONG( R_ECX, R_EAX );
2238 store_spreg( R_EAX, R_MACL );
2241 load_reg( R_EAX, Rm );
2242 store_spreg( R_EAX, R_PR );
2245 load_reg( R_EAX, Rm );
2246 MOV_r32_r32( R_EAX, R_ECX );
2247 ADD_imm8s_r32( 4, R_EAX );
2248 store_reg( R_EAX, Rm );
2249 MEM_READ_LONG( R_ECX, R_EAX );
2250 store_spreg( R_EAX, R_PR );
2257 load_reg( R_EAX, Rn );
2259 AND_imm32_r32( 0xFC000000, R_EAX );
2260 CMP_imm32_r32( 0xE0000000, R_EAX );
2262 call_func0( sh4_flush_store_queue );
2264 ADD_imm8s_r32( 4, R_ESP );
2268 call_func0( sh4_sleep );
2269 sh4_x86.exit_code = 0;
2270 sh4_x86.in_delay_slot = FALSE;
2275 call_func0(sh4_read_sr);
2276 store_reg( R_EAX, Rn );
2279 load_spreg( R_EAX, R_GBR );
2280 store_reg( R_EAX, Rn );
2284 load_spreg( R_EAX, R_VBR );
2285 store_reg( R_EAX, Rn );
2289 load_spreg( R_EAX, R_SSR );
2290 store_reg( R_EAX, Rn );
2294 load_spreg( R_EAX, R_SPC );
2295 store_reg( R_EAX, Rn );
2299 load_spreg( R_EAX, R_SGR );
2300 store_reg( R_EAX, Rn );
2304 load_spreg( R_EAX, R_DBR );
2305 store_reg( R_EAX, Rn );
2309 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
2310 store_reg( R_EAX, Rn );
2314 load_reg( R_ECX, Rn );
2315 ADD_imm8s_r32( -4, R_ECX );
2316 store_reg( R_ECX, Rn );
2317 call_func0( sh4_read_sr );
2318 MEM_WRITE_LONG( R_ECX, R_EAX );
2322 load_reg( R_ECX, Rn );
2323 ADD_imm8s_r32( -4, R_ECX );
2324 store_reg( R_ECX, Rn );
2325 load_spreg( R_EAX, R_VBR );
2326 MEM_WRITE_LONG( R_ECX, R_EAX );
2330 load_reg( R_ECX, Rn );
2331 ADD_imm8s_r32( -4, R_ECX );
2332 store_reg( R_ECX, Rn );
2333 load_spreg( R_EAX, R_SSR );
2334 MEM_WRITE_LONG( R_ECX, R_EAX );
2338 load_reg( R_ECX, Rn );
2339 ADD_imm8s_r32( -4, R_ECX );
2340 store_reg( R_ECX, Rn );
2341 load_spreg( R_EAX, R_SPC );
2342 MEM_WRITE_LONG( R_ECX, R_EAX );
2346 load_reg( R_ECX, Rn );
2347 ADD_imm8s_r32( -4, R_ECX );
2348 store_reg( R_ECX, Rn );
2349 load_spreg( R_EAX, R_SGR );
2350 MEM_WRITE_LONG( R_ECX, R_EAX );
2354 load_reg( R_ECX, Rn );
2355 ADD_imm8s_r32( -4, R_ECX );
2356 store_reg( R_ECX, Rn );
2357 load_spreg( R_EAX, R_DBR );
2358 MEM_WRITE_LONG( R_ECX, R_EAX );
2360 STC.L Rm_BANK, @-Rn {:
2362 load_reg( R_ECX, Rn );
2363 ADD_imm8s_r32( -4, R_ECX );
2364 store_reg( R_ECX, Rn );
2365 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
2366 MEM_WRITE_LONG( R_ECX, R_EAX );
2369 load_reg( R_ECX, Rn );
2370 ADD_imm8s_r32( -4, R_ECX );
2371 store_reg( R_ECX, Rn );
2372 load_spreg( R_EAX, R_GBR );
2373 MEM_WRITE_LONG( R_ECX, R_EAX );
2376 load_spreg( R_EAX, R_FPSCR );
2377 store_reg( R_EAX, Rn );
2379 STS.L FPSCR, @-Rn {:
2380 load_reg( R_ECX, Rn );
2381 ADD_imm8s_r32( -4, R_ECX );
2382 store_reg( R_ECX, Rn );
2383 load_spreg( R_EAX, R_FPSCR );
2384 MEM_WRITE_LONG( R_ECX, R_EAX );
2387 load_spreg( R_EAX, R_FPUL );
2388 store_reg( R_EAX, Rn );
2391 load_reg( R_ECX, Rn );
2392 ADD_imm8s_r32( -4, R_ECX );
2393 store_reg( R_ECX, Rn );
2394 load_spreg( R_EAX, R_FPUL );
2395 MEM_WRITE_LONG( R_ECX, R_EAX );
2398 load_spreg( R_EAX, R_MACH );
2399 store_reg( R_EAX, Rn );
2402 load_reg( R_ECX, Rn );
2403 ADD_imm8s_r32( -4, R_ECX );
2404 store_reg( R_ECX, Rn );
2405 load_spreg( R_EAX, R_MACH );
2406 MEM_WRITE_LONG( R_ECX, R_EAX );
2409 load_spreg( R_EAX, R_MACL );
2410 store_reg( R_EAX, Rn );
2413 load_reg( R_ECX, Rn );
2414 ADD_imm8s_r32( -4, R_ECX );
2415 store_reg( R_ECX, Rn );
2416 load_spreg( R_EAX, R_MACL );
2417 MEM_WRITE_LONG( R_ECX, R_EAX );
2420 load_spreg( R_EAX, R_PR );
2421 store_reg( R_EAX, Rn );
2424 load_reg( R_ECX, Rn );
2425 ADD_imm8s_r32( -4, R_ECX );
2426 store_reg( R_ECX, Rn );
2427 load_spreg( R_EAX, R_PR );
2428 MEM_WRITE_LONG( R_ECX, R_EAX );
2431 NOP {: /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */ :}
2433 if( sh4_x86.in_delay_slot ) {
2434 ADD_imm8s_r32(2,R_ESI);
2435 sh4_x86.in_delay_slot = FALSE;
.