2 * $Id: sh4x86.in,v 1.15 2007-09-20 08:37:19 nkeynes Exp $
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
8 * Copyright (c) 2007 Nathan Keynes.
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
28 #include "sh4/sh4core.h"
29 #include "sh4/sh4trans.h"
30 #include "sh4/sh4mmio.h"
31 #include "sh4/x86op.h"
34 #define DEFAULT_BACKPATCH_SIZE 4096
37 * Struct to manage internal translation state. This state is not saved -
38 * it is only valid between calls to sh4_translate_begin_block() and
39 * sh4_translate_end_block()
41 struct sh4_x86_state {
42 gboolean in_delay_slot;
43 gboolean priv_checked; /* true if we've already checked the cpu mode. */
44 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
47 /* Allocated memory for the (block-wide) back-patch list */
48 uint32_t **backpatch_list;
49 uint32_t backpatch_posn;
50 uint32_t backpatch_size;
53 #define EXIT_DATA_ADDR_READ 0
54 #define EXIT_DATA_ADDR_WRITE 7
55 #define EXIT_ILLEGAL 14
56 #define EXIT_SLOT_ILLEGAL 21
57 #define EXIT_FPU_DISABLED 28
58 #define EXIT_SLOT_FPU_DISABLED 35
60 static struct sh4_x86_state sh4_x86;
62 static uint32_t max_int = 0x7FFFFFFF;
63 static uint32_t min_int = 0x80000000;
64 static uint32_t save_fcw; /* save value for fpu control word */
65 static uint32_t trunc_fcw = 0x0F7F; /* fcw value for truncation mode */
69 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
70 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
74 static void sh4_x86_add_backpatch( uint8_t *ptr )
76 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
77 sh4_x86.backpatch_size <<= 1;
78 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
79 assert( sh4_x86.backpatch_list != NULL );
81 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
84 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
87 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
88 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
93 * Emit an instruction to load an SH4 reg into a real register
95 static inline void load_reg( int x86reg, int sh4reg )
99 OP(0x45 + (x86reg<<3));
100 OP(REG_OFFSET(r[sh4reg]));
103 static inline void load_reg16s( int x86reg, int sh4reg )
107 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
110 static inline void load_reg16u( int x86reg, int sh4reg )
114 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
118 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
119 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
121 * Emit an instruction to load an immediate value into a register
123 static inline void load_imm32( int x86reg, uint32_t value ) {
124 /* mov #value, reg */
130 * Emit an instruction to store an SH4 reg (RN)
132 void static inline store_reg( int x86reg, int sh4reg ) {
133 /* mov reg, [bp+n] */
135 OP(0x45 + (x86reg<<3));
136 OP(REG_OFFSET(r[sh4reg]));
139 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
142 * Load an FR register (single-precision floating point) into an integer x86
143 * register (eg for register-to-register moves)
145 void static inline load_fr( int bankreg, int x86reg, int frm )
147 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
151 * Store an FR register (single-precision floating point) into an integer x86
152 * register (eg for register-to-register moves)
154 void static inline store_fr( int bankreg, int x86reg, int frn )
156 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
161 * Load a pointer to the back fp back into the specified x86 register. The
162 * bankreg must have been previously loaded with FPSCR.
165 static inline void load_xf_bank( int bankreg )
168 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
169 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
170 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
174 * Update the fr_bank pointer based on the current fpscr value.
176 static inline void update_fr_bank( int fpscrreg )
178 SHR_imm8_r32( (21 - 6), fpscrreg ); // Extract bit 21 then *64 for bank size
179 AND_imm8s_r32( 0x40, fpscrreg ); // Complete extraction
180 OP(0x8D); OP(0x44+(fpscrreg<<3)); OP(0x28+fpscrreg); OP(REG_OFFSET(fr)); // LEA [ebp+fpscrreg+disp], fpscrreg
181 store_spreg( fpscrreg, REG_OFFSET(fr_bank) );
184 * Push FPUL (as a 32-bit float) onto the FPU stack
186 static inline void push_fpul( )
188 OP(0xD9); OP(0x45); OP(R_FPUL);
192 * Pop FPUL (as a 32-bit float) from the FPU stack
194 static inline void pop_fpul( )
196 OP(0xD9); OP(0x5D); OP(R_FPUL);
200 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
201 * with the location of the current fp bank.
203 static inline void push_fr( int bankreg, int frm )
205 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
209 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
210 * with bankreg previously loaded with the location of the current fp bank.
212 static inline void pop_fr( int bankreg, int frm )
214 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
218 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
219 * with the location of the current fp bank.
221 static inline void push_dr( int bankreg, int frm )
223 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
226 static inline void pop_dr( int bankreg, int frm )
228 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
232 * Note: clobbers EAX to make the indirect call - this isn't usually
233 * a problem since the callee will usually clobber it anyway.
235 static inline void call_func0( void *ptr )
237 load_imm32(R_EAX, (uint32_t)ptr);
241 static inline void call_func1( void *ptr, int arg1 )
245 ADD_imm8s_r32( 4, R_ESP );
248 static inline void call_func2( void *ptr, int arg1, int arg2 )
253 ADD_imm8s_r32( 8, R_ESP );
257 * Write a double (64-bit) value into memory, with the first word in arg2a, and
258 * the second in arg2b
261 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
263 ADD_imm8s_r32( 4, addr );
266 ADD_imm8s_r32( -4, addr );
269 call_func0(sh4_write_long);
270 ADD_imm8s_r32( 8, R_ESP );
271 call_func0(sh4_write_long);
272 ADD_imm8s_r32( 8, R_ESP );
276 * Read a double (64-bit) value from memory, writing the first word into arg2a
277 * and the second into arg2b. The addr must not be in EAX
280 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
283 call_func0(sh4_read_long);
286 ADD_imm8s_r32( 4, addr );
288 call_func0(sh4_read_long);
289 ADD_imm8s_r32( 4, R_ESP );
290 MOV_r32_r32( R_EAX, arg2b );
294 /* Exception checks - Note that all exception checks will clobber EAX */
295 static void check_priv( )
297 if( !sh4_x86.priv_checked ) {
298 sh4_x86.priv_checked = TRUE;
299 load_spreg( R_EAX, R_SR );
300 AND_imm32_r32( SR_MD, R_EAX );
301 if( sh4_x86.in_delay_slot ) {
302 JE_exit( EXIT_SLOT_ILLEGAL );
304 JE_exit( EXIT_ILLEGAL );
309 static void check_fpuen( )
311 if( !sh4_x86.fpuen_checked ) {
312 sh4_x86.fpuen_checked = TRUE;
313 load_spreg( R_EAX, R_SR );
314 AND_imm32_r32( SR_FD, R_EAX );
315 if( sh4_x86.in_delay_slot ) {
316 JNE_exit(EXIT_SLOT_FPU_DISABLED);
318 JNE_exit(EXIT_FPU_DISABLED);
323 static void check_ralign16( int x86reg )
325 TEST_imm32_r32( 0x00000001, x86reg );
326 JNE_exit(EXIT_DATA_ADDR_READ);
329 static void check_walign16( int x86reg )
331 TEST_imm32_r32( 0x00000001, x86reg );
332 JNE_exit(EXIT_DATA_ADDR_WRITE);
335 static void check_ralign32( int x86reg )
337 TEST_imm32_r32( 0x00000003, x86reg );
338 JNE_exit(EXIT_DATA_ADDR_READ);
340 static void check_walign32( int x86reg )
342 TEST_imm32_r32( 0x00000003, x86reg );
343 JNE_exit(EXIT_DATA_ADDR_WRITE);
347 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
348 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
349 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
350 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
351 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
352 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
353 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
355 #define SLOTILLEGAL() JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
360 * Emit the 'start of block' assembly. Sets up the stack frame and save
363 void sh4_translate_begin_block()
367 load_imm32( R_EBP, (uint32_t)&sh4r );
370 XOR_r32_r32(R_ESI, R_ESI);
372 sh4_x86.in_delay_slot = FALSE;
373 sh4_x86.priv_checked = FALSE;
374 sh4_x86.fpuen_checked = FALSE;
375 sh4_x86.backpatch_posn = 0;
376 sh4_x86.exit_code = 1;
380 * Exit the block early (ie branch out), conditionally or otherwise
384 store_spreg( R_EDI, REG_OFFSET(pc) );
385 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
386 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
388 ADD_r32_r32( R_EAX, R_ECX );
389 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
390 load_imm32( R_EAX, sh4_x86.exit_code );
398 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
400 void sh4_translate_end_block( sh4addr_t pc ) {
401 assert( !sh4_x86.in_delay_slot ); // should never stop here
402 // Normal termination - save PC, cycle count
405 if( sh4_x86.backpatch_posn != 0 ) {
406 uint8_t *end_ptr = xlat_output;
407 // Exception termination. Jump block for various exception codes:
408 PUSH_imm32( EXC_DATA_ADDR_READ );
409 JMP_rel8( 33, target1 );
410 PUSH_imm32( EXC_DATA_ADDR_WRITE );
411 JMP_rel8( 26, target2 );
412 PUSH_imm32( EXC_ILLEGAL );
413 JMP_rel8( 19, target3 );
414 PUSH_imm32( EXC_SLOT_ILLEGAL );
415 JMP_rel8( 12, target4 );
416 PUSH_imm32( EXC_FPU_DISABLED );
417 JMP_rel8( 5, target5 );
418 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
425 load_spreg( R_ECX, REG_OFFSET(pc) );
426 ADD_r32_r32( R_ESI, R_ECX );
427 ADD_r32_r32( R_ESI, R_ECX );
428 store_spreg( R_ECX, REG_OFFSET(pc) );
429 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
430 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
432 ADD_r32_r32( R_EAX, R_ECX );
433 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
435 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
436 CALL_r32( R_EAX ); // 2
437 ADD_imm8s_r32( 4, R_ESP );
443 sh4_x86_do_backpatch( end_ptr );
449 extern uint16_t *sh4_icache;
450 extern uint32_t sh4_icache_addr;
453 * Translate a single instruction. Delayed branches are handled specially
454 * by translating both branch and delayed instruction as a single unit (as
457 * @return true if the instruction marks the end of a basic block
460 uint32_t sh4_x86_translate_instruction( uint32_t pc )
463 /* Read instruction */
464 uint32_t pageaddr = pc >> 12;
465 if( sh4_icache != NULL && pageaddr == sh4_icache_addr ) {
466 ir = sh4_icache[(pc&0xFFF)>>1];
468 sh4_icache = (uint16_t *)mem_get_page(pc);
469 if( ((uint32_t)sh4_icache) < MAX_IO_REGIONS ) {
470 /* If someone's actually been so daft as to try to execute out of an IO
471 * region, fallback on the full-blown memory read
474 ir = sh4_read_word(pc);
476 sh4_icache_addr = pageaddr;
477 ir = sh4_icache[(pc&0xFFF)>>1];
484 load_reg( R_EAX, Rm );
485 load_reg( R_ECX, Rn );
486 ADD_r32_r32( R_EAX, R_ECX );
487 store_reg( R_ECX, Rn );
490 load_reg( R_EAX, Rn );
491 ADD_imm8s_r32( imm, R_EAX );
492 store_reg( R_EAX, Rn );
495 load_reg( R_EAX, Rm );
496 load_reg( R_ECX, Rn );
498 ADC_r32_r32( R_EAX, R_ECX );
499 store_reg( R_ECX, Rn );
503 load_reg( R_EAX, Rm );
504 load_reg( R_ECX, Rn );
505 ADD_r32_r32( R_EAX, R_ECX );
506 store_reg( R_ECX, Rn );
510 load_reg( R_EAX, Rm );
511 load_reg( R_ECX, Rn );
512 AND_r32_r32( R_EAX, R_ECX );
513 store_reg( R_ECX, Rn );
516 load_reg( R_EAX, 0 );
517 AND_imm32_r32(imm, R_EAX);
518 store_reg( R_EAX, 0 );
520 AND.B #imm, @(R0, GBR) {:
521 load_reg( R_EAX, 0 );
522 load_spreg( R_ECX, R_GBR );
523 ADD_r32_r32( R_EAX, R_ECX );
525 call_func0(sh4_read_byte);
527 AND_imm32_r32(imm, R_EAX );
528 MEM_WRITE_BYTE( R_ECX, R_EAX );
531 load_reg( R_EAX, Rm );
532 load_reg( R_ECX, Rn );
533 CMP_r32_r32( R_EAX, R_ECX );
537 load_reg( R_EAX, 0 );
538 CMP_imm8s_r32(imm, R_EAX);
542 load_reg( R_EAX, Rm );
543 load_reg( R_ECX, Rn );
544 CMP_r32_r32( R_EAX, R_ECX );
548 load_reg( R_EAX, Rm );
549 load_reg( R_ECX, Rn );
550 CMP_r32_r32( R_EAX, R_ECX );
554 load_reg( R_EAX, Rm );
555 load_reg( R_ECX, Rn );
556 CMP_r32_r32( R_EAX, R_ECX );
560 load_reg( R_EAX, Rm );
561 load_reg( R_ECX, Rn );
562 CMP_r32_r32( R_EAX, R_ECX );
566 load_reg( R_EAX, Rn );
567 CMP_imm8s_r32( 0, R_EAX );
571 load_reg( R_EAX, Rn );
572 CMP_imm8s_r32( 0, R_EAX );
576 load_reg( R_EAX, Rm );
577 load_reg( R_ECX, Rn );
578 XOR_r32_r32( R_ECX, R_EAX );
579 TEST_r8_r8( R_AL, R_AL );
580 JE_rel8(13, target1);
581 TEST_r8_r8( R_AH, R_AH ); // 2
583 SHR_imm8_r32( 16, R_EAX ); // 3
584 TEST_r8_r8( R_AL, R_AL ); // 2
586 TEST_r8_r8( R_AH, R_AH ); // 2
593 load_reg( R_EAX, Rm );
594 load_reg( R_ECX, Rn );
595 SHR_imm8_r32( 31, R_EAX );
596 SHR_imm8_r32( 31, R_ECX );
597 store_spreg( R_EAX, R_M );
598 store_spreg( R_ECX, R_Q );
599 CMP_r32_r32( R_EAX, R_ECX );
603 XOR_r32_r32( R_EAX, R_EAX );
604 store_spreg( R_EAX, R_Q );
605 store_spreg( R_EAX, R_M );
606 store_spreg( R_EAX, R_T );
609 load_spreg( R_ECX, R_M );
610 load_reg( R_EAX, Rn );
613 SETC_r8( R_DL ); // Q'
614 CMP_sh4r_r32( R_Q, R_ECX );
616 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
619 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_EAX );
621 store_reg( R_EAX, Rn ); // Done with Rn now
622 SETC_r8(R_AL); // tmp1
623 XOR_r8_r8( R_DL, R_AL ); // Q' = Q ^ tmp1
624 XOR_r8_r8( R_AL, R_CL ); // Q'' = Q' ^ M
625 store_spreg( R_ECX, R_Q );
626 XOR_imm8s_r32( 1, R_AL ); // T = !Q'
627 MOVZX_r8_r32( R_AL, R_EAX );
628 store_spreg( R_EAX, R_T );
631 load_reg( R_EAX, Rm );
632 load_reg( R_ECX, Rn );
634 store_spreg( R_EDX, R_MACH );
635 store_spreg( R_EAX, R_MACL );
638 load_reg( R_EAX, Rm );
639 load_reg( R_ECX, Rn );
641 store_spreg( R_EDX, R_MACH );
642 store_spreg( R_EAX, R_MACL );
645 load_reg( R_EAX, Rn );
646 ADD_imm8s_r32( -1, R_EAX );
647 store_reg( R_EAX, Rn );
651 load_reg( R_EAX, Rm );
652 MOVSX_r8_r32( R_EAX, R_EAX );
653 store_reg( R_EAX, Rn );
656 load_reg( R_EAX, Rm );
657 MOVSX_r16_r32( R_EAX, R_EAX );
658 store_reg( R_EAX, Rn );
661 load_reg( R_EAX, Rm );
662 MOVZX_r8_r32( R_EAX, R_EAX );
663 store_reg( R_EAX, Rn );
666 load_reg( R_EAX, Rm );
667 MOVZX_r16_r32( R_EAX, R_EAX );
668 store_reg( R_EAX, Rn );
671 load_reg( R_ECX, Rm );
672 check_ralign32( R_ECX );
673 load_reg( R_ECX, Rn );
674 check_ralign32( R_ECX );
675 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
676 MEM_READ_LONG( R_ECX, R_EAX );
678 load_reg( R_ECX, Rm );
679 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
680 MEM_READ_LONG( R_ECX, R_EAX );
683 ADD_r32_sh4r( R_EAX, R_MACL );
684 ADC_r32_sh4r( R_EDX, R_MACH );
686 load_spreg( R_ECX, R_S );
687 TEST_r32_r32(R_ECX, R_ECX);
689 call_func0( signsat48 );
693 load_reg( R_ECX, Rm );
694 check_ralign16( R_ECX );
695 load_reg( R_ECX, Rn );
696 check_ralign16( R_ECX );
697 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
698 MEM_READ_WORD( R_ECX, R_EAX );
700 load_reg( R_ECX, Rm );
701 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
702 MEM_READ_WORD( R_ECX, R_EAX );
706 load_spreg( R_ECX, R_S );
707 TEST_r32_r32( R_ECX, R_ECX );
708 JE_rel8( 47, nosat );
710 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
711 JNO_rel8( 51, end ); // 2
712 load_imm32( R_EDX, 1 ); // 5
713 store_spreg( R_EDX, R_MACH ); // 6
714 JS_rel8( 13, positive ); // 2
715 load_imm32( R_EAX, 0x80000000 );// 5
716 store_spreg( R_EAX, R_MACL ); // 6
717 JMP_rel8( 25, end2 ); // 2
719 JMP_TARGET(positive);
720 load_imm32( R_EAX, 0x7FFFFFFF );// 5
721 store_spreg( R_EAX, R_MACL ); // 6
722 JMP_rel8( 12, end3); // 2
725 ADD_r32_sh4r( R_EAX, R_MACL ); // 6
726 ADC_r32_sh4r( R_EDX, R_MACH ); // 6
732 load_spreg( R_EAX, R_T );
733 store_reg( R_EAX, Rn );
736 load_reg( R_EAX, Rm );
737 load_reg( R_ECX, Rn );
739 store_spreg( R_EAX, R_MACL );
742 load_reg16s( R_EAX, Rm );
743 load_reg16s( R_ECX, Rn );
745 store_spreg( R_EAX, R_MACL );
748 load_reg16u( R_EAX, Rm );
749 load_reg16u( R_ECX, Rn );
751 store_spreg( R_EAX, R_MACL );
754 load_reg( R_EAX, Rm );
756 store_reg( R_EAX, Rn );
759 load_reg( R_EAX, Rm );
760 XOR_r32_r32( R_ECX, R_ECX );
762 SBB_r32_r32( R_EAX, R_ECX );
763 store_reg( R_ECX, Rn );
767 load_reg( R_EAX, Rm );
769 store_reg( R_EAX, Rn );
772 load_reg( R_EAX, Rm );
773 load_reg( R_ECX, Rn );
774 OR_r32_r32( R_EAX, R_ECX );
775 store_reg( R_ECX, Rn );
778 load_reg( R_EAX, 0 );
779 OR_imm32_r32(imm, R_EAX);
780 store_reg( R_EAX, 0 );
782 OR.B #imm, @(R0, GBR) {:
783 load_reg( R_EAX, 0 );
784 load_spreg( R_ECX, R_GBR );
785 ADD_r32_r32( R_EAX, R_ECX );
787 call_func0(sh4_read_byte);
789 OR_imm32_r32(imm, R_EAX );
790 MEM_WRITE_BYTE( R_ECX, R_EAX );
793 load_reg( R_EAX, Rn );
796 store_reg( R_EAX, Rn );
800 load_reg( R_EAX, Rn );
803 store_reg( R_EAX, Rn );
807 load_reg( R_EAX, Rn );
809 store_reg( R_EAX, Rn );
813 load_reg( R_EAX, Rn );
815 store_reg( R_EAX, Rn );
819 /* Annoyingly enough, not directly convertible */
820 load_reg( R_EAX, Rn );
821 load_reg( R_ECX, Rm );
822 CMP_imm32_r32( 0, R_ECX );
825 NEG_r32( R_ECX ); // 2
826 AND_imm8_r8( 0x1F, R_CL ); // 3
827 JE_rel8( 4, emptysar); // 2
828 SAR_r32_CL( R_EAX ); // 2
829 JMP_rel8(10, end); // 2
831 JMP_TARGET(emptysar);
832 SAR_imm8_r32(31, R_EAX ); // 3
836 AND_imm8_r8( 0x1F, R_CL ); // 3
837 SHL_r32_CL( R_EAX ); // 2
840 store_reg( R_EAX, Rn );
843 load_reg( R_EAX, Rn );
844 load_reg( R_ECX, Rm );
845 CMP_imm32_r32( 0, R_ECX );
848 NEG_r32( R_ECX ); // 2
849 AND_imm8_r8( 0x1F, R_CL ); // 3
850 JE_rel8( 4, emptyshr );
851 SHR_r32_CL( R_EAX ); // 2
852 JMP_rel8(9, end); // 2
854 JMP_TARGET(emptyshr);
855 XOR_r32_r32( R_EAX, R_EAX );
859 AND_imm8_r8( 0x1F, R_CL ); // 3
860 SHL_r32_CL( R_EAX ); // 2
863 store_reg( R_EAX, Rn );
866 load_reg( R_EAX, Rn );
869 store_reg( R_EAX, Rn );
872 load_reg( R_EAX, Rn );
875 store_reg( R_EAX, Rn );
878 load_reg( R_EAX, Rn );
881 store_reg( R_EAX, Rn );
884 load_reg( R_EAX, Rn );
885 SHL_imm8_r32( 2, R_EAX );
886 store_reg( R_EAX, Rn );
889 load_reg( R_EAX, Rn );
890 SHL_imm8_r32( 8, R_EAX );
891 store_reg( R_EAX, Rn );
894 load_reg( R_EAX, Rn );
895 SHL_imm8_r32( 16, R_EAX );
896 store_reg( R_EAX, Rn );
899 load_reg( R_EAX, Rn );
902 store_reg( R_EAX, Rn );
905 load_reg( R_EAX, Rn );
906 SHR_imm8_r32( 2, R_EAX );
907 store_reg( R_EAX, Rn );
910 load_reg( R_EAX, Rn );
911 SHR_imm8_r32( 8, R_EAX );
912 store_reg( R_EAX, Rn );
915 load_reg( R_EAX, Rn );
916 SHR_imm8_r32( 16, R_EAX );
917 store_reg( R_EAX, Rn );
920 load_reg( R_EAX, Rm );
921 load_reg( R_ECX, Rn );
922 SUB_r32_r32( R_EAX, R_ECX );
923 store_reg( R_ECX, Rn );
926 load_reg( R_EAX, Rm );
927 load_reg( R_ECX, Rn );
929 SBB_r32_r32( R_EAX, R_ECX );
930 store_reg( R_ECX, Rn );
934 load_reg( R_EAX, Rm );
935 load_reg( R_ECX, Rn );
936 SUB_r32_r32( R_EAX, R_ECX );
937 store_reg( R_ECX, Rn );
941 load_reg( R_EAX, Rm );
942 XCHG_r8_r8( R_AL, R_AH );
943 store_reg( R_EAX, Rn );
946 load_reg( R_EAX, Rm );
947 MOV_r32_r32( R_EAX, R_ECX );
948 SHL_imm8_r32( 16, R_ECX );
949 SHR_imm8_r32( 16, R_EAX );
950 OR_r32_r32( R_EAX, R_ECX );
951 store_reg( R_ECX, Rn );
954 load_reg( R_ECX, Rn );
955 MEM_READ_BYTE( R_ECX, R_EAX );
956 TEST_r8_r8( R_AL, R_AL );
958 OR_imm8_r8( 0x80, R_AL );
959 load_reg( R_ECX, Rn );
960 MEM_WRITE_BYTE( R_ECX, R_EAX );
963 load_reg( R_EAX, Rm );
964 load_reg( R_ECX, Rn );
965 TEST_r32_r32( R_EAX, R_ECX );
969 load_reg( R_EAX, 0 );
970 TEST_imm32_r32( imm, R_EAX );
973 TST.B #imm, @(R0, GBR) {:
975 load_reg( R_ECX, R_GBR);
976 ADD_r32_r32( R_EAX, R_ECX );
977 MEM_READ_BYTE( R_ECX, R_EAX );
978 TEST_imm8_r8( imm, R_AL );
982 load_reg( R_EAX, Rm );
983 load_reg( R_ECX, Rn );
984 XOR_r32_r32( R_EAX, R_ECX );
985 store_reg( R_ECX, Rn );
988 load_reg( R_EAX, 0 );
989 XOR_imm32_r32( imm, R_EAX );
990 store_reg( R_EAX, 0 );
992 XOR.B #imm, @(R0, GBR) {:
993 load_reg( R_EAX, 0 );
994 load_spreg( R_ECX, R_GBR );
995 ADD_r32_r32( R_EAX, R_ECX );
997 call_func0(sh4_read_byte);
999 XOR_imm32_r32( imm, R_EAX );
1000 MEM_WRITE_BYTE( R_ECX, R_EAX );
1003 load_reg( R_EAX, Rm );
1004 load_reg( R_ECX, Rn );
1005 SHL_imm8_r32( 16, R_EAX );
1006 SHR_imm8_r32( 16, R_ECX );
1007 OR_r32_r32( R_EAX, R_ECX );
1008 store_reg( R_ECX, Rn );
1011 /* Data move instructions */
1013 load_reg( R_EAX, Rm );
1014 store_reg( R_EAX, Rn );
1017 load_imm32( R_EAX, imm );
1018 store_reg( R_EAX, Rn );
1021 load_reg( R_EAX, Rm );
1022 load_reg( R_ECX, Rn );
1023 MEM_WRITE_BYTE( R_ECX, R_EAX );
1026 load_reg( R_EAX, Rm );
1027 load_reg( R_ECX, Rn );
1028 ADD_imm8s_r32( -1, R_ECX );
1029 store_reg( R_ECX, Rn );
1030 MEM_WRITE_BYTE( R_ECX, R_EAX );
1032 MOV.B Rm, @(R0, Rn) {:
1033 load_reg( R_EAX, 0 );
1034 load_reg( R_ECX, Rn );
1035 ADD_r32_r32( R_EAX, R_ECX );
1036 load_reg( R_EAX, Rm );
1037 MEM_WRITE_BYTE( R_ECX, R_EAX );
1039 MOV.B R0, @(disp, GBR) {:
1040 load_reg( R_EAX, 0 );
1041 load_spreg( R_ECX, R_GBR );
1042 ADD_imm32_r32( disp, R_ECX );
1043 MEM_WRITE_BYTE( R_ECX, R_EAX );
1045 MOV.B R0, @(disp, Rn) {:
1046 load_reg( R_EAX, 0 );
1047 load_reg( R_ECX, Rn );
1048 ADD_imm32_r32( disp, R_ECX );
1049 MEM_WRITE_BYTE( R_ECX, R_EAX );
1052 load_reg( R_ECX, Rm );
1053 MEM_READ_BYTE( R_ECX, R_EAX );
1054 store_reg( R_EAX, Rn );
1057 load_reg( R_ECX, Rm );
1058 MOV_r32_r32( R_ECX, R_EAX );
1059 ADD_imm8s_r32( 1, R_EAX );
1060 store_reg( R_EAX, Rm );
1061 MEM_READ_BYTE( R_ECX, R_EAX );
1062 store_reg( R_EAX, Rn );
1064 MOV.B @(R0, Rm), Rn {:
1065 load_reg( R_EAX, 0 );
1066 load_reg( R_ECX, Rm );
1067 ADD_r32_r32( R_EAX, R_ECX );
1068 MEM_READ_BYTE( R_ECX, R_EAX );
1069 store_reg( R_EAX, Rn );
1071 MOV.B @(disp, GBR), R0 {:
1072 load_spreg( R_ECX, R_GBR );
1073 ADD_imm32_r32( disp, R_ECX );
1074 MEM_READ_BYTE( R_ECX, R_EAX );
1075 store_reg( R_EAX, 0 );
1077 MOV.B @(disp, Rm), R0 {:
1078 load_reg( R_ECX, Rm );
1079 ADD_imm32_r32( disp, R_ECX );
1080 MEM_READ_BYTE( R_ECX, R_EAX );
1081 store_reg( R_EAX, 0 );
1084 load_reg( R_EAX, Rm );
1085 load_reg( R_ECX, Rn );
1086 check_walign32(R_ECX);
1087 MEM_WRITE_LONG( R_ECX, R_EAX );
1090 load_reg( R_EAX, Rm );
1091 load_reg( R_ECX, Rn );
1092 check_walign32( R_ECX );
1093 ADD_imm8s_r32( -4, R_ECX );
1094 store_reg( R_ECX, Rn );
1095 MEM_WRITE_LONG( R_ECX, R_EAX );
1097 MOV.L Rm, @(R0, Rn) {:
1098 load_reg( R_EAX, 0 );
1099 load_reg( R_ECX, Rn );
1100 ADD_r32_r32( R_EAX, R_ECX );
1101 check_walign32( R_ECX );
1102 load_reg( R_EAX, Rm );
1103 MEM_WRITE_LONG( R_ECX, R_EAX );
1105 MOV.L R0, @(disp, GBR) {:
1106 load_spreg( R_ECX, R_GBR );
1107 load_reg( R_EAX, 0 );
1108 ADD_imm32_r32( disp, R_ECX );
1109 check_walign32( R_ECX );
1110 MEM_WRITE_LONG( R_ECX, R_EAX );
1112 MOV.L Rm, @(disp, Rn) {:
1113 load_reg( R_ECX, Rn );
1114 load_reg( R_EAX, Rm );
1115 ADD_imm32_r32( disp, R_ECX );
1116 check_walign32( R_ECX );
1117 MEM_WRITE_LONG( R_ECX, R_EAX );
1120 load_reg( R_ECX, Rm );
1121 check_ralign32( R_ECX );
1122 MEM_READ_LONG( R_ECX, R_EAX );
1123 store_reg( R_EAX, Rn );
1126 load_reg( R_EAX, Rm );
1127 check_ralign32( R_EAX );
1128 MOV_r32_r32( R_EAX, R_ECX );
1129 ADD_imm8s_r32( 4, R_EAX );
1130 store_reg( R_EAX, Rm );
1131 MEM_READ_LONG( R_ECX, R_EAX );
1132 store_reg( R_EAX, Rn );
1134 MOV.L @(R0, Rm), Rn {:
1135 load_reg( R_EAX, 0 );
1136 load_reg( R_ECX, Rm );
1137 ADD_r32_r32( R_EAX, R_ECX );
1138 check_ralign32( R_ECX );
1139 MEM_READ_LONG( R_ECX, R_EAX );
1140 store_reg( R_EAX, Rn );
1142 MOV.L @(disp, GBR), R0 {:
1143 load_spreg( R_ECX, R_GBR );
1144 ADD_imm32_r32( disp, R_ECX );
1145 check_ralign32( R_ECX );
1146 MEM_READ_LONG( R_ECX, R_EAX );
1147 store_reg( R_EAX, 0 );
1149 MOV.L @(disp, PC), Rn {:
1150 if( sh4_x86.in_delay_slot ) {
1153 uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
1154 char *ptr = mem_get_region(target);
1156 MOV_moff32_EAX( (uint32_t)ptr );
1158 load_imm32( R_ECX, target );
1159 MEM_READ_LONG( R_ECX, R_EAX );
1161 store_reg( R_EAX, Rn );
1164 MOV.L @(disp, Rm), Rn {:
1165 load_reg( R_ECX, Rm );
1166 ADD_imm8s_r32( disp, R_ECX );
1167 check_ralign32( R_ECX );
1168 MEM_READ_LONG( R_ECX, R_EAX );
1169 store_reg( R_EAX, Rn );
1172 load_reg( R_ECX, Rn );
1173 check_walign16( R_ECX );
1174 load_reg( R_EAX, Rm );
1175 MEM_WRITE_WORD( R_ECX, R_EAX );
1178 load_reg( R_ECX, Rn );
1179 check_walign16( R_ECX );
1180 load_reg( R_EAX, Rm );
1181 ADD_imm8s_r32( -2, R_ECX );
1182 store_reg( R_ECX, Rn );
1183 MEM_WRITE_WORD( R_ECX, R_EAX );
1185 MOV.W Rm, @(R0, Rn) {:
1186 load_reg( R_EAX, 0 );
1187 load_reg( R_ECX, Rn );
1188 ADD_r32_r32( R_EAX, R_ECX );
1189 check_walign16( R_ECX );
1190 load_reg( R_EAX, Rm );
1191 MEM_WRITE_WORD( R_ECX, R_EAX );
1193 MOV.W R0, @(disp, GBR) {:
1194 load_spreg( R_ECX, R_GBR );
1195 load_reg( R_EAX, 0 );
1196 ADD_imm32_r32( disp, R_ECX );
1197 check_walign16( R_ECX );
1198 MEM_WRITE_WORD( R_ECX, R_EAX );
1200 MOV.W R0, @(disp, Rn) {:
1201 load_reg( R_ECX, Rn );
1202 load_reg( R_EAX, 0 );
1203 ADD_imm32_r32( disp, R_ECX );
1204 check_walign16( R_ECX );
1205 MEM_WRITE_WORD( R_ECX, R_EAX );
1208 load_reg( R_ECX, Rm );
1209 check_ralign16( R_ECX );
1210 MEM_READ_WORD( R_ECX, R_EAX );
1211 store_reg( R_EAX, Rn );
1214 load_reg( R_EAX, Rm );
1215 check_ralign16( R_EAX );
1216 MOV_r32_r32( R_EAX, R_ECX );
1217 ADD_imm8s_r32( 2, R_EAX );
1218 store_reg( R_EAX, Rm );
1219 MEM_READ_WORD( R_ECX, R_EAX );
1220 store_reg( R_EAX, Rn );
1222 MOV.W @(R0, Rm), Rn {:
1223 load_reg( R_EAX, 0 );
1224 load_reg( R_ECX, Rm );
1225 ADD_r32_r32( R_EAX, R_ECX );
1226 check_ralign16( R_ECX );
1227 MEM_READ_WORD( R_ECX, R_EAX );
1228 store_reg( R_EAX, Rn );
1230 MOV.W @(disp, GBR), R0 {:
1231 load_spreg( R_ECX, R_GBR );
1232 ADD_imm32_r32( disp, R_ECX );
1233 check_ralign16( R_ECX );
1234 MEM_READ_WORD( R_ECX, R_EAX );
1235 store_reg( R_EAX, 0 );
1237 MOV.W @(disp, PC), Rn {:
1238 if( sh4_x86.in_delay_slot ) {
1241 load_imm32( R_ECX, pc + disp + 4 );
1242 MEM_READ_WORD( R_ECX, R_EAX );
1243 store_reg( R_EAX, Rn );
1246 MOV.W @(disp, Rm), R0 {:
1247 load_reg( R_ECX, Rm );
1248 ADD_imm32_r32( disp, R_ECX );
1249 check_ralign16( R_ECX );
1250 MEM_READ_WORD( R_ECX, R_EAX );
1251 store_reg( R_EAX, 0 );
1253 MOVA @(disp, PC), R0 {:
1254 if( sh4_x86.in_delay_slot ) {
1257 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
1258 store_reg( R_ECX, 0 );
1262 load_reg( R_EAX, 0 );
1263 load_reg( R_ECX, Rn );
1264 check_walign32( R_ECX );
1265 MEM_WRITE_LONG( R_ECX, R_EAX );
1268 /* Control transfer instructions */
1270 if( sh4_x86.in_delay_slot ) {
1273 load_imm32( R_EDI, pc + 2 );
1274 CMP_imm8s_sh4r( 0, R_T );
1275 JNE_rel8( 5, nottaken );
1276 load_imm32( R_EDI, disp + pc + 4 );
1277 JMP_TARGET(nottaken);
1283 if( sh4_x86.in_delay_slot ) {
1286 load_imm32( R_EDI, pc + 4 );
1287 CMP_imm8s_sh4r( 0, R_T );
1288 JNE_rel8( 5, nottaken );
1289 load_imm32( R_EDI, disp + pc + 4 );
1290 JMP_TARGET(nottaken);
1291 sh4_x86.in_delay_slot = TRUE;
1296 if( sh4_x86.in_delay_slot ) {
1299 load_imm32( R_EDI, disp + pc + 4 );
1300 sh4_x86.in_delay_slot = TRUE;
1305 if( sh4_x86.in_delay_slot ) {
1308 load_reg( R_EDI, Rn );
1309 ADD_imm32_r32( pc + 4, R_EDI );
1310 sh4_x86.in_delay_slot = TRUE;
1315 if( sh4_x86.in_delay_slot ) {
1318 load_imm32( R_EAX, pc + 4 );
1319 store_spreg( R_EAX, R_PR );
1320 load_imm32( R_EDI, disp + pc + 4 );
1321 sh4_x86.in_delay_slot = TRUE;
1326 if( sh4_x86.in_delay_slot ) {
1329 load_imm32( R_EAX, pc + 4 );
1330 store_spreg( R_EAX, R_PR );
1331 load_reg( R_EDI, Rn );
1332 ADD_r32_r32( R_EAX, R_EDI );
1333 sh4_x86.in_delay_slot = TRUE;
1338 if( sh4_x86.in_delay_slot ) {
1341 load_imm32( R_EDI, pc + 2 );
1342 CMP_imm8s_sh4r( 0, R_T );
1343 JE_rel8( 5, nottaken );
1344 load_imm32( R_EDI, disp + pc + 4 );
1345 JMP_TARGET(nottaken);
1351 if( sh4_x86.in_delay_slot ) {
1354 load_imm32( R_EDI, pc + 4 );
1355 CMP_imm8s_sh4r( 0, R_T );
1356 JE_rel8( 5, nottaken );
1357 load_imm32( R_EDI, disp + pc + 4 );
1358 JMP_TARGET(nottaken);
1359 sh4_x86.in_delay_slot = TRUE;
1364 if( sh4_x86.in_delay_slot ) {
1367 load_reg( R_EDI, Rn );
1368 sh4_x86.in_delay_slot = TRUE;
1373 if( sh4_x86.in_delay_slot ) {
1376 load_imm32( R_EAX, pc + 4 );
1377 store_spreg( R_EAX, R_PR );
1378 load_reg( R_EDI, Rn );
1379 sh4_x86.in_delay_slot = TRUE;
1385 if( sh4_x86.in_delay_slot ) {
1388 load_spreg( R_EDI, R_SPC );
1389 load_spreg( R_EAX, R_SSR );
1390 call_func1( sh4_write_sr, R_EAX );
1391 sh4_x86.in_delay_slot = TRUE;
1392 sh4_x86.priv_checked = FALSE;
1393 sh4_x86.fpuen_checked = FALSE;
1398 if( sh4_x86.in_delay_slot ) {
1401 load_spreg( R_EDI, R_PR );
1402 sh4_x86.in_delay_slot = TRUE;
1407 if( sh4_x86.in_delay_slot ) {
1411 call_func0( sh4_raise_trap );
1412 ADD_imm8s_r32( 4, R_ESP );
1416 if( sh4_x86.in_delay_slot ) {
1419 JMP_exit(EXIT_ILLEGAL);
1425 XOR_r32_r32(R_EAX, R_EAX);
1426 store_spreg( R_EAX, R_MACL );
1427 store_spreg( R_EAX, R_MACH );
1446 /* Floating point moves */
1448 /* As horrible as this looks, it's actually covering 5 separate cases:
1449 * 1. 32-bit fr-to-fr (PR=0)
1450 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
1451 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
1452 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
1453 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
1456 load_spreg( R_ECX, R_FPSCR );
1457 load_fr_bank( R_EDX );
1458 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1459 JNE_rel8(8, doublesize);
1460 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
1461 store_fr( R_EDX, R_EAX, FRn );
1464 JMP_TARGET(doublesize);
1465 load_xf_bank( R_ECX );
1466 load_fr( R_ECX, R_EAX, FRm-1 );
1468 load_fr( R_ECX, R_EDX, FRm );
1469 store_fr( R_ECX, R_EAX, FRn-1 );
1470 store_fr( R_ECX, R_EDX, FRn );
1471 } else /* FRn&1 == 0 */ {
1472 load_fr( R_ECX, R_ECX, FRm );
1473 store_fr( R_EDX, R_EAX, FRn );
1474 store_fr( R_EDX, R_ECX, FRn+1 );
1477 } else /* FRm&1 == 0 */ {
1480 load_xf_bank( R_ECX );
1481 load_fr( R_EDX, R_EAX, FRm );
1482 load_fr( R_EDX, R_EDX, FRm+1 );
1483 store_fr( R_ECX, R_EAX, FRn-1 );
1484 store_fr( R_ECX, R_EDX, FRn );
1486 } else /* FRn&1 == 0 */ {
1488 load_fr( R_EDX, R_EAX, FRm );
1489 load_fr( R_EDX, R_ECX, FRm+1 );
1490 store_fr( R_EDX, R_EAX, FRn );
1491 store_fr( R_EDX, R_ECX, FRn+1 );
1498 load_reg( R_EDX, Rn );
1499 check_walign32( R_EDX );
1500 load_spreg( R_ECX, R_FPSCR );
1501 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1502 JNE_rel8(20, doublesize);
1503 load_fr_bank( R_ECX );
1504 load_fr( R_ECX, R_EAX, FRm );
1505 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
1507 JMP_rel8( 48, end );
1508 JMP_TARGET(doublesize);
1509 load_xf_bank( R_ECX );
1510 load_fr( R_ECX, R_EAX, FRm&0x0E );
1511 load_fr( R_ECX, R_ECX, FRm|0x01 );
1512 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1515 JMP_rel8( 39, end );
1516 JMP_TARGET(doublesize);
1517 load_fr_bank( R_ECX );
1518 load_fr( R_ECX, R_EAX, FRm&0x0E );
1519 load_fr( R_ECX, R_ECX, FRm|0x01 );
1520 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1526 load_reg( R_EDX, Rm );
1527 check_ralign32( R_EDX );
1528 load_spreg( R_ECX, R_FPSCR );
1529 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1530 JNE_rel8(19, doublesize);
1531 MEM_READ_LONG( R_EDX, R_EAX );
1532 load_fr_bank( R_ECX );
1533 store_fr( R_ECX, R_EAX, FRn );
1536 JMP_TARGET(doublesize);
1537 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1538 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
1539 load_xf_bank( R_ECX );
1540 store_fr( R_ECX, R_EAX, FRn&0x0E );
1541 store_fr( R_ECX, R_EDX, FRn|0x01 );
1545 JMP_TARGET(doublesize);
1546 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1547 load_fr_bank( R_ECX );
1548 store_fr( R_ECX, R_EAX, FRn&0x0E );
1549 store_fr( R_ECX, R_EDX, FRn|0x01 );
1555 load_reg( R_EDX, Rn );
1556 check_walign32( R_EDX );
1557 load_spreg( R_ECX, R_FPSCR );
1558 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1559 JNE_rel8(26, doublesize);
1560 load_fr_bank( R_ECX );
1561 load_fr( R_ECX, R_EAX, FRm );
1562 ADD_imm8s_r32(-4,R_EDX);
1563 store_reg( R_EDX, Rn );
1564 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
1566 JMP_rel8( 54, end );
1567 JMP_TARGET(doublesize);
1568 load_xf_bank( R_ECX );
1569 load_fr( R_ECX, R_EAX, FRm&0x0E );
1570 load_fr( R_ECX, R_ECX, FRm|0x01 );
1571 ADD_imm8s_r32(-8,R_EDX);
1572 store_reg( R_EDX, Rn );
1573 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1576 JMP_rel8( 45, end );
1577 JMP_TARGET(doublesize);
1578 load_fr_bank( R_ECX );
1579 load_fr( R_ECX, R_EAX, FRm&0x0E );
1580 load_fr( R_ECX, R_ECX, FRm|0x01 );
1581 ADD_imm8s_r32(-8,R_EDX);
1582 store_reg( R_EDX, Rn );
1583 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1589 load_reg( R_EDX, Rm );
1590 check_ralign32( R_EDX );
1591 MOV_r32_r32( R_EDX, R_EAX );
1592 load_spreg( R_ECX, R_FPSCR );
1593 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1594 JNE_rel8(25, doublesize);
1595 ADD_imm8s_r32( 4, R_EAX );
1596 store_reg( R_EAX, Rm );
1597 MEM_READ_LONG( R_EDX, R_EAX );
1598 load_fr_bank( R_ECX );
1599 store_fr( R_ECX, R_EAX, FRn );
1602 JMP_TARGET(doublesize);
1603 ADD_imm8s_r32( 8, R_EAX );
1604 store_reg(R_EAX, Rm);
1605 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1606 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
1607 load_xf_bank( R_ECX );
1608 store_fr( R_ECX, R_EAX, FRn&0x0E );
1609 store_fr( R_ECX, R_EDX, FRn|0x01 );
1613 ADD_imm8s_r32( 8, R_EAX );
1614 store_reg(R_EAX, Rm);
1615 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1616 load_fr_bank( R_ECX );
1617 store_fr( R_ECX, R_EAX, FRn&0x0E );
1618 store_fr( R_ECX, R_EDX, FRn|0x01 );
1622 FMOV FRm, @(R0, Rn) {:
1624 load_reg( R_EDX, Rn );
1625 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
1626 check_walign32( R_EDX );
1627 load_spreg( R_ECX, R_FPSCR );
1628 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1629 JNE_rel8(20, doublesize);
1630 load_fr_bank( R_ECX );
1631 load_fr( R_ECX, R_EAX, FRm );
1632 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
1634 JMP_rel8( 48, end );
1635 JMP_TARGET(doublesize);
1636 load_xf_bank( R_ECX );
1637 load_fr( R_ECX, R_EAX, FRm&0x0E );
1638 load_fr( R_ECX, R_ECX, FRm|0x01 );
1639 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1642 JMP_rel8( 39, end );
1643 JMP_TARGET(doublesize);
1644 load_fr_bank( R_ECX );
1645 load_fr( R_ECX, R_EAX, FRm&0x0E );
1646 load_fr( R_ECX, R_ECX, FRm|0x01 );
1647 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
1651 FMOV @(R0, Rm), FRn {:
1653 load_reg( R_EDX, Rm );
1654 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
1655 check_ralign32( R_EDX );
1656 load_spreg( R_ECX, R_FPSCR );
1657 TEST_imm32_r32( FPSCR_SZ, R_ECX );
1658 JNE_rel8(19, doublesize);
1659 MEM_READ_LONG( R_EDX, R_EAX );
1660 load_fr_bank( R_ECX );
1661 store_fr( R_ECX, R_EAX, FRn );
1664 JMP_TARGET(doublesize);
1665 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1666 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
1667 load_xf_bank( R_ECX );
1668 store_fr( R_ECX, R_EAX, FRn&0x0E );
1669 store_fr( R_ECX, R_EDX, FRn|0x01 );
1673 JMP_TARGET(doublesize);
1674 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
1675 load_fr_bank( R_ECX );
1676 store_fr( R_ECX, R_EAX, FRn&0x0E );
1677 store_fr( R_ECX, R_EDX, FRn|0x01 );
1681 FLDI0 FRn {: /* IFF PR=0 */
1683 load_spreg( R_ECX, R_FPSCR );
1684 TEST_imm32_r32( FPSCR_PR, R_ECX );
1686 XOR_r32_r32( R_EAX, R_EAX );
1687 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
1688 store_fr( R_ECX, R_EAX, FRn );
1691 FLDI1 FRn {: /* IFF PR=0 */
1693 load_spreg( R_ECX, R_FPSCR );
1694 TEST_imm32_r32( FPSCR_PR, R_ECX );
1696 load_imm32(R_EAX, 0x3F800000);
1697 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
1698 store_fr( R_ECX, R_EAX, FRn );
1704 load_spreg( R_ECX, R_FPSCR );
1705 load_spreg(R_EDX, REG_OFFSET(fr_bank));
1707 TEST_imm32_r32( FPSCR_PR, R_ECX );
1708 JNE_rel8(5, doubleprec);
1709 pop_fr( R_EDX, FRn );
1711 JMP_TARGET(doubleprec);
1712 pop_dr( R_EDX, FRn );
1717 load_spreg( R_ECX, R_FPSCR );
1718 load_fr_bank( R_EDX );
1719 TEST_imm32_r32( FPSCR_PR, R_ECX );
1720 JNE_rel8(5, doubleprec);
1721 push_fr( R_EDX, FRm );
1723 JMP_TARGET(doubleprec);
1724 push_dr( R_EDX, FRm );
1726 load_imm32( R_ECX, (uint32_t)&max_int );
1727 FILD_r32ind( R_ECX );
1729 JNA_rel8( 32, sat );
1730 load_imm32( R_ECX, (uint32_t)&min_int ); // 5
1731 FILD_r32ind( R_ECX ); // 2
1733 JAE_rel8( 21, sat2 ); // 2
1734 load_imm32( R_EAX, (uint32_t)&save_fcw );
1735 FNSTCW_r32ind( R_EAX );
1736 load_imm32( R_EDX, (uint32_t)&trunc_fcw );
1737 FLDCW_r32ind( R_EDX );
1738 FISTP_sh4r(R_FPUL); // 3
1739 FLDCW_r32ind( R_EAX );
1740 JMP_rel8( 9, end ); // 2
1744 MOV_r32ind_r32( R_ECX, R_ECX ); // 2
1745 store_spreg( R_ECX, R_FPUL );
1751 load_fr_bank( R_ECX );
1752 load_fr( R_ECX, R_EAX, FRm );
1753 store_spreg( R_EAX, R_FPUL );
1757 load_fr_bank( R_ECX );
1758 load_spreg( R_EAX, R_FPUL );
1759 store_fr( R_ECX, R_EAX, FRn );
1763 load_spreg( R_ECX, R_FPSCR );
1764 TEST_imm32_r32( FPSCR_PR, R_ECX );
1765 JE_rel8(9, end); // only when PR=1
1766 load_fr_bank( R_ECX );
1767 push_dr( R_ECX, FRm );
1773 load_spreg( R_ECX, R_FPSCR );
1774 TEST_imm32_r32( FPSCR_PR, R_ECX );
1775 JE_rel8(9, end); // only when PR=1
1776 load_fr_bank( R_ECX );
1778 pop_dr( R_ECX, FRn );
1782 /* Floating point instructions */
1785 load_spreg( R_ECX, R_FPSCR );
1786 load_fr_bank( R_EDX );
1787 TEST_imm32_r32( FPSCR_PR, R_ECX );
1788 JNE_rel8(10, doubleprec);
1789 push_fr(R_EDX, FRn); // 3
1791 pop_fr( R_EDX, FRn); //3
1792 JMP_rel8(8,end); // 2
1793 JMP_TARGET(doubleprec);
1794 push_dr(R_EDX, FRn);
1801 load_spreg( R_ECX, R_FPSCR );
1802 TEST_imm32_r32( FPSCR_PR, R_ECX );
1803 load_fr_bank( R_EDX );
1804 JNE_rel8(13,doubleprec);
1805 push_fr(R_EDX, FRm);
1806 push_fr(R_EDX, FRn);
1810 JMP_TARGET(doubleprec);
1811 push_dr(R_EDX, FRm);
1812 push_dr(R_EDX, FRn);
1819 load_spreg( R_ECX, R_FPSCR );
1820 TEST_imm32_r32( FPSCR_PR, R_ECX );
1821 load_fr_bank( R_EDX );
1822 JNE_rel8(13, doubleprec);
1823 push_fr(R_EDX, FRn);
1824 push_fr(R_EDX, FRm);
1828 JMP_TARGET(doubleprec);
1829 push_dr(R_EDX, FRn);
1830 push_dr(R_EDX, FRm);
1835 FMAC FR0, FRm, FRn {:
1837 load_spreg( R_ECX, R_FPSCR );
1838 load_spreg( R_EDX, REG_OFFSET(fr_bank));
1839 TEST_imm32_r32( FPSCR_PR, R_ECX );
1840 JNE_rel8(18, doubleprec);
1841 push_fr( R_EDX, 0 );
1842 push_fr( R_EDX, FRm );
1844 push_fr( R_EDX, FRn );
1846 pop_fr( R_EDX, FRn );
1848 JMP_TARGET(doubleprec);
1849 push_dr( R_EDX, 0 );
1850 push_dr( R_EDX, FRm );
1852 push_dr( R_EDX, FRn );
1854 pop_dr( R_EDX, FRn );
1860 load_spreg( R_ECX, R_FPSCR );
1861 TEST_imm32_r32( FPSCR_PR, R_ECX );
1862 load_fr_bank( R_EDX );
1863 JNE_rel8(13, doubleprec);
1864 push_fr(R_EDX, FRm);
1865 push_fr(R_EDX, FRn);
1869 JMP_TARGET(doubleprec);
1870 push_dr(R_EDX, FRm);
1871 push_dr(R_EDX, FRn);
1878 load_spreg( R_ECX, R_FPSCR );
1879 TEST_imm32_r32( FPSCR_PR, R_ECX );
1880 load_fr_bank( R_EDX );
1881 JNE_rel8(10, doubleprec);
1882 push_fr(R_EDX, FRn);
1886 JMP_TARGET(doubleprec);
1887 push_dr(R_EDX, FRn);
1894 load_spreg( R_ECX, R_FPSCR );
1895 TEST_imm32_r32( FPSCR_PR, R_ECX );
1896 load_fr_bank( R_EDX );
1897 JNE_rel8(12, end); // PR=0 only
1899 push_fr(R_EDX, FRn);
1907 load_spreg( R_ECX, R_FPSCR );
1908 TEST_imm32_r32( FPSCR_PR, R_ECX );
1909 load_fr_bank( R_EDX );
1910 JNE_rel8(10, doubleprec);
1911 push_fr(R_EDX, FRn);
1915 JMP_TARGET(doubleprec);
1916 push_dr(R_EDX, FRn);
1923 load_spreg( R_ECX, R_FPSCR );
1924 TEST_imm32_r32( FPSCR_PR, R_ECX );
1925 load_fr_bank( R_EDX );
1926 JNE_rel8(13, doubleprec);
1927 push_fr(R_EDX, FRn);
1928 push_fr(R_EDX, FRm);
1932 JMP_TARGET(doubleprec);
1933 push_dr(R_EDX, FRn);
1934 push_dr(R_EDX, FRm);
1942 load_spreg( R_ECX, R_FPSCR );
1943 TEST_imm32_r32( FPSCR_PR, R_ECX );
1944 load_fr_bank( R_EDX );
1945 JNE_rel8(8, doubleprec);
1946 push_fr(R_EDX, FRm);
1947 push_fr(R_EDX, FRn);
1949 JMP_TARGET(doubleprec);
1950 push_dr(R_EDX, FRm);
1951 push_dr(R_EDX, FRn);
1959 load_spreg( R_ECX, R_FPSCR );
1960 TEST_imm32_r32( FPSCR_PR, R_ECX );
1961 load_fr_bank( R_EDX );
1962 JNE_rel8(8, doubleprec);
1963 push_fr(R_EDX, FRm);
1964 push_fr(R_EDX, FRn);
1966 JMP_TARGET(doubleprec);
1967 push_dr(R_EDX, FRm);
1968 push_dr(R_EDX, FRn);
1977 load_spreg( R_ECX, R_FPSCR );
1978 TEST_imm32_r32( FPSCR_PR, R_ECX );
1979 JNE_rel8( 21, doubleprec );
1980 load_fr_bank( R_ECX );
1981 ADD_imm8s_r32( (FRn&0x0E)<<2, R_ECX );
1982 load_spreg( R_EDX, R_FPUL );
1983 call_func2( sh4_fsca, R_EDX, R_ECX );
1984 JMP_TARGET(doubleprec);
1988 load_spreg( R_ECX, R_FPSCR );
1989 TEST_imm32_r32( FPSCR_PR, R_ECX );
1990 JNE_rel8(44, doubleprec);
1992 load_fr_bank( R_ECX );
1993 push_fr( R_ECX, FVm<<2 );
1994 push_fr( R_ECX, FVn<<2 );
1996 push_fr( R_ECX, (FVm<<2)+1);
1997 push_fr( R_ECX, (FVn<<2)+1);
2000 push_fr( R_ECX, (FVm<<2)+2);
2001 push_fr( R_ECX, (FVn<<2)+2);
2004 push_fr( R_ECX, (FVm<<2)+3);
2005 push_fr( R_ECX, (FVn<<2)+3);
2008 pop_fr( R_ECX, (FVn<<2)+3);
2009 JMP_TARGET(doubleprec);
2013 load_spreg( R_ECX, R_FPSCR );
2014 TEST_imm32_r32( FPSCR_PR, R_ECX );
2015 JNE_rel8( 30, doubleprec );
2016 load_fr_bank( R_EDX ); // 3
2017 ADD_imm8s_r32( FVn<<4, R_EDX ); // 3
2018 load_xf_bank( R_ECX ); // 12
2019 call_func2( sh4_ftrv, R_EDX, R_ECX ); // 12
2020 JMP_TARGET(doubleprec);
2025 load_spreg( R_ECX, R_FPSCR );
2026 XOR_imm32_r32( FPSCR_FR, R_ECX );
2027 store_spreg( R_ECX, R_FPSCR );
2028 update_fr_bank( R_ECX );
2032 load_spreg( R_ECX, R_FPSCR );
2033 XOR_imm32_r32( FPSCR_SZ, R_ECX );
2034 store_spreg( R_ECX, R_FPSCR );
2037 /* Processor control instructions */
2039 if( sh4_x86.in_delay_slot ) {
2043 load_reg( R_EAX, Rm );
2044 call_func1( sh4_write_sr, R_EAX );
2045 sh4_x86.priv_checked = FALSE;
2046 sh4_x86.fpuen_checked = FALSE;
2050 load_reg( R_EAX, Rm );
2051 store_spreg( R_EAX, R_GBR );
2055 load_reg( R_EAX, Rm );
2056 store_spreg( R_EAX, R_VBR );
2060 load_reg( R_EAX, Rm );
2061 store_spreg( R_EAX, R_SSR );
2065 load_reg( R_EAX, Rm );
2066 store_spreg( R_EAX, R_SGR );
2070 load_reg( R_EAX, Rm );
2071 store_spreg( R_EAX, R_SPC );
2075 load_reg( R_EAX, Rm );
2076 store_spreg( R_EAX, R_DBR );
2080 load_reg( R_EAX, Rm );
2081 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
2084 load_reg( R_EAX, Rm );
2085 check_ralign32( R_EAX );
2086 MOV_r32_r32( R_EAX, R_ECX );
2087 ADD_imm8s_r32( 4, R_EAX );
2088 store_reg( R_EAX, Rm );
2089 MEM_READ_LONG( R_ECX, R_EAX );
2090 store_spreg( R_EAX, R_GBR );
2093 if( sh4_x86.in_delay_slot ) {
2097 load_reg( R_EAX, Rm );
2098 check_ralign32( R_EAX );
2099 MOV_r32_r32( R_EAX, R_ECX );
2100 ADD_imm8s_r32( 4, R_EAX );
2101 store_reg( R_EAX, Rm );
2102 MEM_READ_LONG( R_ECX, R_EAX );
2103 call_func1( sh4_write_sr, R_EAX );
2104 sh4_x86.priv_checked = FALSE;
2105 sh4_x86.fpuen_checked = FALSE;
2110 load_reg( R_EAX, Rm );
2111 check_ralign32( R_EAX );
2112 MOV_r32_r32( R_EAX, R_ECX );
2113 ADD_imm8s_r32( 4, R_EAX );
2114 store_reg( R_EAX, Rm );
2115 MEM_READ_LONG( R_ECX, R_EAX );
2116 store_spreg( R_EAX, R_VBR );
2120 load_reg( R_EAX, Rm );
2121 MOV_r32_r32( R_EAX, R_ECX );
2122 ADD_imm8s_r32( 4, R_EAX );
2123 store_reg( R_EAX, Rm );
2124 MEM_READ_LONG( R_ECX, R_EAX );
2125 store_spreg( R_EAX, R_SSR );
2129 load_reg( R_EAX, Rm );
2130 check_ralign32( R_EAX );
2131 MOV_r32_r32( R_EAX, R_ECX );
2132 ADD_imm8s_r32( 4, R_EAX );
2133 store_reg( R_EAX, Rm );
2134 MEM_READ_LONG( R_ECX, R_EAX );
2135 store_spreg( R_EAX, R_SGR );
2139 load_reg( R_EAX, Rm );
2140 check_ralign32( R_EAX );
2141 MOV_r32_r32( R_EAX, R_ECX );
2142 ADD_imm8s_r32( 4, R_EAX );
2143 store_reg( R_EAX, Rm );
2144 MEM_READ_LONG( R_ECX, R_EAX );
2145 store_spreg( R_EAX, R_SPC );
2149 load_reg( R_EAX, Rm );
2150 check_ralign32( R_EAX );
2151 MOV_r32_r32( R_EAX, R_ECX );
2152 ADD_imm8s_r32( 4, R_EAX );
2153 store_reg( R_EAX, Rm );
2154 MEM_READ_LONG( R_ECX, R_EAX );
2155 store_spreg( R_EAX, R_DBR );
2157 LDC.L @Rm+, Rn_BANK {:
2159 load_reg( R_EAX, Rm );
2160 check_ralign32( R_EAX );
2161 MOV_r32_r32( R_EAX, R_ECX );
2162 ADD_imm8s_r32( 4, R_EAX );
2163 store_reg( R_EAX, Rm );
2164 MEM_READ_LONG( R_ECX, R_EAX );
2165 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
2168 load_reg( R_EAX, Rm );
2169 store_spreg( R_EAX, R_FPSCR );
2170 update_fr_bank( R_EAX );
2172 LDS.L @Rm+, FPSCR {:
2173 load_reg( R_EAX, Rm );
2174 check_ralign32( R_EAX );
2175 MOV_r32_r32( R_EAX, R_ECX );
2176 ADD_imm8s_r32( 4, R_EAX );
2177 store_reg( R_EAX, Rm );
2178 MEM_READ_LONG( R_ECX, R_EAX );
2179 store_spreg( R_EAX, R_FPSCR );
2180 update_fr_bank( R_EAX );
2183 load_reg( R_EAX, Rm );
2184 store_spreg( R_EAX, R_FPUL );
2187 load_reg( R_EAX, Rm );
2188 check_ralign32( R_EAX );
2189 MOV_r32_r32( R_EAX, R_ECX );
2190 ADD_imm8s_r32( 4, R_EAX );
2191 store_reg( R_EAX, Rm );
2192 MEM_READ_LONG( R_ECX, R_EAX );
2193 store_spreg( R_EAX, R_FPUL );
2196 load_reg( R_EAX, Rm );
2197 store_spreg( R_EAX, R_MACH );
2200 load_reg( R_EAX, Rm );
2201 check_ralign32( R_EAX );
2202 MOV_r32_r32( R_EAX, R_ECX );
2203 ADD_imm8s_r32( 4, R_EAX );
2204 store_reg( R_EAX, Rm );
2205 MEM_READ_LONG( R_ECX, R_EAX );
2206 store_spreg( R_EAX, R_MACH );
2209 load_reg( R_EAX, Rm );
2210 store_spreg( R_EAX, R_MACL );
2213 load_reg( R_EAX, Rm );
2214 check_ralign32( R_EAX );
2215 MOV_r32_r32( R_EAX, R_ECX );
2216 ADD_imm8s_r32( 4, R_EAX );
2217 store_reg( R_EAX, Rm );
2218 MEM_READ_LONG( R_ECX, R_EAX );
2219 store_spreg( R_EAX, R_MACL );
2222 load_reg( R_EAX, Rm );
2223 store_spreg( R_EAX, R_PR );
2226 load_reg( R_EAX, Rm );
2227 check_ralign32( R_EAX );
2228 MOV_r32_r32( R_EAX, R_ECX );
2229 ADD_imm8s_r32( 4, R_EAX );
2230 store_reg( R_EAX, Rm );
2231 MEM_READ_LONG( R_ECX, R_EAX );
2232 store_spreg( R_EAX, R_PR );
2239 load_reg( R_EAX, Rn );
2241 AND_imm32_r32( 0xFC000000, R_EAX );
2242 CMP_imm32_r32( 0xE0000000, R_EAX );
2244 call_func0( sh4_flush_store_queue );
2246 ADD_imm8s_r32( 4, R_ESP );
2250 call_func0( sh4_sleep );
2251 sh4_x86.exit_code = 0;
2252 sh4_x86.in_delay_slot = FALSE;
2258 call_func0(sh4_read_sr);
2259 store_reg( R_EAX, Rn );
2262 load_spreg( R_EAX, R_GBR );
2263 store_reg( R_EAX, Rn );
2267 load_spreg( R_EAX, R_VBR );
2268 store_reg( R_EAX, Rn );
2272 load_spreg( R_EAX, R_SSR );
2273 store_reg( R_EAX, Rn );
2277 load_spreg( R_EAX, R_SPC );
2278 store_reg( R_EAX, Rn );
2282 load_spreg( R_EAX, R_SGR );
2283 store_reg( R_EAX, Rn );
2287 load_spreg( R_EAX, R_DBR );
2288 store_reg( R_EAX, Rn );
2292 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
2293 store_reg( R_EAX, Rn );
2297 call_func0( sh4_read_sr );
2298 load_reg( R_ECX, Rn );
2299 check_walign32( R_ECX );
2300 ADD_imm8s_r32( -4, R_ECX );
2301 store_reg( R_ECX, Rn );
2302 MEM_WRITE_LONG( R_ECX, R_EAX );
2306 load_reg( R_ECX, Rn );
2307 check_walign32( R_ECX );
2308 ADD_imm8s_r32( -4, R_ECX );
2309 store_reg( R_ECX, Rn );
2310 load_spreg( R_EAX, R_VBR );
2311 MEM_WRITE_LONG( R_ECX, R_EAX );
2315 load_reg( R_ECX, Rn );
2316 check_walign32( R_ECX );
2317 ADD_imm8s_r32( -4, R_ECX );
2318 store_reg( R_ECX, Rn );
2319 load_spreg( R_EAX, R_SSR );
2320 MEM_WRITE_LONG( R_ECX, R_EAX );
2324 load_reg( R_ECX, Rn );
2325 check_walign32( R_ECX );
2326 ADD_imm8s_r32( -4, R_ECX );
2327 store_reg( R_ECX, Rn );
2328 load_spreg( R_EAX, R_SPC );
2329 MEM_WRITE_LONG( R_ECX, R_EAX );
2333 load_reg( R_ECX, Rn );
2334 check_walign32( R_ECX );
2335 ADD_imm8s_r32( -4, R_ECX );
2336 store_reg( R_ECX, Rn );
2337 load_spreg( R_EAX, R_SGR );
2338 MEM_WRITE_LONG( R_ECX, R_EAX );
2342 load_reg( R_ECX, Rn );
2343 check_walign32( R_ECX );
2344 ADD_imm8s_r32( -4, R_ECX );
2345 store_reg( R_ECX, Rn );
2346 load_spreg( R_EAX, R_DBR );
2347 MEM_WRITE_LONG( R_ECX, R_EAX );
2349 STC.L Rm_BANK, @-Rn {:
2351 load_reg( R_ECX, Rn );
2352 check_walign32( R_ECX );
2353 ADD_imm8s_r32( -4, R_ECX );
2354 store_reg( R_ECX, Rn );
2355 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
2356 MEM_WRITE_LONG( R_ECX, R_EAX );
2359 load_reg( R_ECX, Rn );
2360 check_walign32( R_ECX );
2361 ADD_imm8s_r32( -4, R_ECX );
2362 store_reg( R_ECX, Rn );
2363 load_spreg( R_EAX, R_GBR );
2364 MEM_WRITE_LONG( R_ECX, R_EAX );
2367 load_spreg( R_EAX, R_FPSCR );
2368 store_reg( R_EAX, Rn );
2370 STS.L FPSCR, @-Rn {:
2371 load_reg( R_ECX, Rn );
2372 check_walign32( R_ECX );
2373 ADD_imm8s_r32( -4, R_ECX );
2374 store_reg( R_ECX, Rn );
2375 load_spreg( R_EAX, R_FPSCR );
2376 MEM_WRITE_LONG( R_ECX, R_EAX );
2379 load_spreg( R_EAX, R_FPUL );
2380 store_reg( R_EAX, Rn );
2383 load_reg( R_ECX, Rn );
2384 check_walign32( R_ECX );
2385 ADD_imm8s_r32( -4, R_ECX );
2386 store_reg( R_ECX, Rn );
2387 load_spreg( R_EAX, R_FPUL );
2388 MEM_WRITE_LONG( R_ECX, R_EAX );
2391 load_spreg( R_EAX, R_MACH );
2392 store_reg( R_EAX, Rn );
2395 load_reg( R_ECX, Rn );
2396 check_walign32( R_ECX );
2397 ADD_imm8s_r32( -4, R_ECX );
2398 store_reg( R_ECX, Rn );
2399 load_spreg( R_EAX, R_MACH );
2400 MEM_WRITE_LONG( R_ECX, R_EAX );
2403 load_spreg( R_EAX, R_MACL );
2404 store_reg( R_EAX, Rn );
2407 load_reg( R_ECX, Rn );
2408 check_walign32( R_ECX );
2409 ADD_imm8s_r32( -4, R_ECX );
2410 store_reg( R_ECX, Rn );
2411 load_spreg( R_EAX, R_MACL );
2412 MEM_WRITE_LONG( R_ECX, R_EAX );
2415 load_spreg( R_EAX, R_PR );
2416 store_reg( R_EAX, Rn );
2419 load_reg( R_ECX, Rn );
2420 check_walign32( R_ECX );
2421 ADD_imm8s_r32( -4, R_ECX );
2422 store_reg( R_ECX, Rn );
2423 load_spreg( R_EAX, R_PR );
2424 MEM_WRITE_LONG( R_ECX, R_EAX );
2427 NOP {: /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */ :}
2429 if( sh4_x86.in_delay_slot ) {
2430 ADD_imm8s_r32(2,R_ESI);
2431 sh4_x86.in_delay_slot = FALSE;
.