2 * $Id: sh4x86.in,v 1.3 2007-09-04 08:40:23 nkeynes Exp $
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
8 * Copyright (c) 2007 Nathan Keynes.
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
23 #include "sh4/sh4core.h"
24 #include "sh4/sh4trans.h"
25 #include "sh4/x86op.h"
28 #define DEFAULT_BACKPATCH_SIZE 4096
31 * Struct to manage internal translation state. This state is not saved -
32 * it is only valid between calls to sh4_translate_begin_block() and
33 * sh4_translate_end_block()
35 struct sh4_x86_state {
36 gboolean in_delay_slot;
37 gboolean priv_checked; /* true if we've already checked the cpu mode. */
38 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
40 /* Allocated memory for the (block-wide) back-patch list */
41 uint32_t **backpatch_list;
42 uint32_t backpatch_posn;
43 uint32_t backpatch_size;
46 #define EXIT_DATA_ADDR_READ 0
47 #define EXIT_DATA_ADDR_WRITE 7
48 #define EXIT_ILLEGAL 14
49 #define EXIT_SLOT_ILLEGAL 21
50 #define EXIT_FPU_DISABLED 28
51 #define EXIT_SLOT_FPU_DISABLED 35
53 static struct sh4_x86_state sh4_x86;
57 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
58 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
62 static void sh4_x86_add_backpatch( uint8_t *ptr )
64 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
65 sh4_x86.backpatch_size <<= 1;
66 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
67 assert( sh4_x86.backpatch_list != NULL );
69 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
72 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
75 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
76 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]));
81 #define MARK_JMP(x,n) uint8_t *_mark_jmp_##x = xlat_output + n
82 #define CHECK_JMP(x) assert( _mark_jmp_##x == xlat_output )
90 * Emit an instruction to load an SH4 reg into a real register
92 static inline void load_reg( int x86reg, int sh4reg )
96 OP(0x45 + (x86reg<<3));
97 OP(REG_OFFSET(r[sh4reg]));
101 * Load the SR register into an x86 register
103 static inline void read_sr( int x86reg )
105 MOV_ebp_r32( R_M, x86reg );
107 OR_ebp_r32( R_Q, x86reg );
108 SHL_imm8_r32( 7, x86reg );
109 OR_ebp_r32( R_S, x86reg );
111 OR_ebp_r32( R_T, x86reg );
112 OR_ebp_r32( R_SR, x86reg );
115 static inline void write_sr( int x86reg )
117 TEST_imm32_r32( SR_M, x86reg );
119 TEST_imm32_r32( SR_Q, x86reg );
121 TEST_imm32_r32( SR_S, x86reg );
123 TEST_imm32_r32( SR_T, x86reg );
125 AND_imm32_r32( SR_MQSTMASK, x86reg );
126 MOV_r32_ebp( x86reg, R_SR );
130 static inline void load_spreg( int x86reg, int regoffset )
132 /* mov [bp+n], reg */
134 OP(0x45 + (x86reg<<3));
139 * Emit an instruction to load an immediate value into a register
141 static inline void load_imm32( int x86reg, uint32_t value ) {
142 /* mov #value, reg */
148 * Emit an instruction to store an SH4 reg (RN)
150 void static inline store_reg( int x86reg, int sh4reg ) {
151 /* mov reg, [bp+n] */
153 OP(0x45 + (x86reg<<3));
154 OP(REG_OFFSET(r[sh4reg]));
156 void static inline store_spreg( int x86reg, int regoffset ) {
157 /* mov reg, [bp+n] */
159 OP(0x45 + (x86reg<<3));
164 * Note: clobbers EAX to make the indirect call - this isn't usually
165 * a problem since the callee will usually clobber it anyway.
167 static inline void call_func0( void *ptr )
169 load_imm32(R_EAX, (uint32_t)ptr);
173 static inline void call_func1( void *ptr, int arg1 )
177 ADD_imm8s_r32( -4, R_ESP );
180 static inline void call_func2( void *ptr, int arg1, int arg2 )
185 ADD_imm8s_r32( -4, R_ESP );
188 /* Exception checks - Note that all exception checks will clobber EAX */
189 static void check_priv( )
191 if( !sh4_x86.priv_checked ) {
192 sh4_x86.priv_checked = TRUE;
193 load_spreg( R_EAX, R_SR );
194 AND_imm32_r32( SR_MD, R_EAX );
195 if( sh4_x86.in_delay_slot ) {
196 JE_exit( EXIT_SLOT_ILLEGAL );
198 JE_exit( EXIT_ILLEGAL );
203 static void check_fpuen( )
205 if( !sh4_x86.fpuen_checked ) {
206 sh4_x86.fpuen_checked = TRUE;
207 load_spreg( R_EAX, R_SR );
208 AND_imm32_r32( SR_FD, R_EAX );
209 if( sh4_x86.in_delay_slot ) {
210 JNE_exit(EXIT_SLOT_FPU_DISABLED);
212 JNE_exit(EXIT_FPU_DISABLED);
217 static void check_ralign16( int x86reg )
219 TEST_imm32_r32( 0x00000001, x86reg );
220 JNE_exit(EXIT_DATA_ADDR_READ);
223 static void check_walign16( int x86reg )
225 TEST_imm32_r32( 0x00000001, x86reg );
226 JNE_exit(EXIT_DATA_ADDR_WRITE);
229 static void check_ralign32( int x86reg )
231 TEST_imm32_r32( 0x00000003, x86reg );
232 JNE_exit(EXIT_DATA_ADDR_READ);
234 static void check_walign32( int x86reg )
236 TEST_imm32_r32( 0x00000003, x86reg );
237 JNE_exit(EXIT_DATA_ADDR_WRITE);
242 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
243 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
244 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
245 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
246 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
247 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
248 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
250 #define RAISE_EXCEPTION( exc ) call_func1(sh4_raise_exception, exc);
251 #define CHECKSLOTILLEGAL() if(sh4_x86.in_delay_slot) RAISE_EXCEPTION(EXC_SLOT_ILLEGAL)
256 * Emit the 'start of block' assembly. Sets up the stack frame and save
259 void sh4_translate_begin_block()
264 load_imm32( R_EBP, (uint32_t)&sh4r );
267 sh4_x86.in_delay_slot = FALSE;
268 sh4_x86.priv_checked = FALSE;
269 sh4_x86.fpuen_checked = FALSE;
270 sh4_x86.backpatch_posn = 0;
274 * Exit the block early (ie branch out), conditionally or otherwise
276 void exit_block( uint32_t pc )
278 load_imm32( R_ECX, pc );
279 store_spreg( R_ECX, REG_OFFSET(pc) );
280 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
281 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
283 ADD_r32_r32( R_EAX, R_ECX );
284 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
285 XOR_r32_r32( R_EAX, R_EAX );
290 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
292 void sh4_translate_end_block( sh4addr_t pc ) {
293 assert( !sh4_x86.in_delay_slot ); // should never stop here
294 // Normal termination - save PC, cycle count
297 uint8_t *end_ptr = xlat_output;
298 // Exception termination. Jump block for various exception codes:
299 PUSH_imm32( EXC_DATA_ADDR_READ );
301 PUSH_imm32( EXC_DATA_ADDR_WRITE );
303 PUSH_imm32( EXC_ILLEGAL );
305 PUSH_imm32( EXC_SLOT_ILLEGAL );
307 PUSH_imm32( EXC_FPU_DISABLED );
309 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
311 load_spreg( R_ECX, REG_OFFSET(pc) );
312 ADD_r32_r32( R_ESI, R_ECX );
313 ADD_r32_r32( R_ESI, R_ECX );
314 store_spreg( R_ECX, REG_OFFSET(pc) );
315 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
316 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
318 ADD_r32_r32( R_EAX, R_ECX );
319 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
321 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
322 CALL_r32( R_EAX ); // 2
326 sh4_x86_do_backpatch( end_ptr );
330 * Translate a single instruction. Delayed branches are handled specially
331 * by translating both branch and delayed instruction as a single unit (as
334 * @return true if the instruction marks the end of a basic block
337 uint32_t sh4_x86_translate_instruction( uint32_t pc )
339 uint16_t ir = sh4_read_word( pc );
344 load_reg( R_EAX, Rm );
345 load_reg( R_ECX, Rn );
346 ADD_r32_r32( R_EAX, R_ECX );
347 store_reg( R_ECX, Rn );
350 load_reg( R_EAX, Rn );
351 ADD_imm8s_r32( imm, R_EAX );
352 store_reg( R_EAX, Rn );
355 load_reg( R_EAX, Rm );
356 load_reg( R_ECX, Rn );
358 ADC_r32_r32( R_EAX, R_ECX );
359 store_reg( R_ECX, Rn );
363 load_reg( R_EAX, Rm );
364 load_reg( R_ECX, Rn );
365 ADD_r32_r32( R_EAX, R_ECX );
366 store_reg( R_ECX, Rn );
370 load_reg( R_EAX, Rm );
371 load_reg( R_ECX, Rn );
372 AND_r32_r32( R_EAX, R_ECX );
373 store_reg( R_ECX, Rn );
376 load_reg( R_EAX, 0 );
377 AND_imm32_r32(imm, R_EAX);
378 store_reg( R_EAX, 0 );
380 AND.B #imm, @(R0, GBR) {:
381 load_reg( R_EAX, 0 );
382 load_spreg( R_ECX, R_GBR );
383 ADD_r32_r32( R_EAX, R_EBX );
384 MEM_READ_BYTE( R_ECX, R_EAX );
385 AND_imm32_r32(imm, R_ECX );
386 MEM_WRITE_BYTE( R_ECX, R_EAX );
389 load_reg( R_EAX, Rm );
390 load_reg( R_ECX, Rn );
391 CMP_r32_r32( R_EAX, R_ECX );
395 load_reg( R_EAX, 0 );
396 CMP_imm8s_r32(imm, R_EAX);
400 load_reg( R_EAX, Rm );
401 load_reg( R_ECX, Rn );
402 CMP_r32_r32( R_EAX, R_ECX );
406 load_reg( R_EAX, Rm );
407 load_reg( R_ECX, Rn );
408 CMP_r32_r32( R_EAX, R_ECX );
412 load_reg( R_EAX, Rm );
413 load_reg( R_ECX, Rn );
414 CMP_r32_r32( R_EAX, R_ECX );
418 load_reg( R_EAX, Rm );
419 load_reg( R_ECX, Rn );
420 CMP_r32_r32( R_EAX, R_ECX );
424 load_reg( R_EAX, Rn );
425 CMP_imm8s_r32( 0, R_EAX );
429 load_reg( R_EAX, Rn );
430 CMP_imm8s_r32( 0, R_EAX );
434 load_reg( R_EAX, Rm );
435 load_reg( R_ECX, Rn );
436 XOR_r32_r32( R_ECX, R_EAX );
437 TEST_r8_r8( R_AL, R_AL );
439 TEST_r8_r8( R_AH, R_AH ); // 2
441 SHR_imm8_r32( 16, R_EAX ); // 3
442 TEST_r8_r8( R_AL, R_AL ); // 2
444 TEST_r8_r8( R_AH, R_AH ); // 2
448 load_reg( R_EAX, Rm );
449 load_reg( R_ECX, Rm );
450 SHR_imm8_r32( 31, R_EAX );
451 SHR_imm8_r32( 31, R_ECX );
452 store_spreg( R_EAX, R_M );
453 store_spreg( R_ECX, R_Q );
454 CMP_r32_r32( R_EAX, R_ECX );
458 XOR_r32_r32( R_EAX, R_EAX );
459 store_spreg( R_EAX, R_Q );
460 store_spreg( R_EAX, R_M );
461 store_spreg( R_EAX, R_T );
465 load_reg( R_EAX, Rm );
466 load_reg( R_ECX, Rn );
468 store_spreg( R_EDX, R_MACH );
469 store_spreg( R_EAX, R_MACL );
472 load_reg( R_EAX, Rm );
473 load_reg( R_ECX, Rn );
475 store_spreg( R_EDX, R_MACH );
476 store_spreg( R_EAX, R_MACL );
479 load_reg( R_EAX, Rn );
480 ADD_imm8s_r32( -1, Rn );
481 store_reg( R_EAX, Rn );
485 load_reg( R_EAX, Rm );
486 MOVSX_r8_r32( R_EAX, R_EAX );
487 store_reg( R_EAX, Rn );
490 load_reg( R_EAX, Rm );
491 MOVSX_r16_r32( R_EAX, R_EAX );
492 store_reg( R_EAX, Rn );
495 load_reg( R_EAX, Rm );
496 MOVZX_r8_r32( R_EAX, R_EAX );
497 store_reg( R_EAX, Rn );
500 load_reg( R_EAX, Rm );
501 MOVZX_r16_r32( R_EAX, R_EAX );
502 store_reg( R_EAX, Rn );
504 MAC.L @Rm+, @Rn+ {: :}
505 MAC.W @Rm+, @Rn+ {: :}
507 load_spreg( R_EAX, R_T );
508 store_reg( R_EAX, Rn );
511 load_reg( R_EAX, Rm );
512 load_reg( R_ECX, Rn );
514 store_spreg( R_EAX, R_MACL );
520 load_reg( R_EAX, Rm );
522 store_reg( R_EAX, Rn );
525 load_reg( R_EAX, Rm );
526 XOR_r32_r32( R_ECX, R_ECX );
528 SBB_r32_r32( R_EAX, R_ECX );
529 store_reg( R_ECX, Rn );
533 load_reg( R_EAX, Rm );
535 store_reg( R_EAX, Rn );
538 load_reg( R_EAX, Rm );
539 load_reg( R_ECX, Rn );
540 OR_r32_r32( R_EAX, R_ECX );
541 store_reg( R_ECX, Rn );
544 load_reg( R_EAX, 0 );
545 OR_imm32_r32(imm, R_EAX);
546 store_reg( R_EAX, 0 );
548 OR.B #imm, @(R0, GBR) {: :}
550 load_reg( R_EAX, Rn );
553 store_reg( R_EAX, Rn );
557 load_reg( R_EAX, Rn );
560 store_reg( R_EAX, Rn );
564 load_reg( R_EAX, Rn );
566 store_reg( R_EAX, Rn );
570 load_reg( R_EAX, Rn );
572 store_reg( R_EAX, Rn );
576 /* Annoyingly enough, not directly convertible */
577 load_reg( R_EAX, Rn );
578 load_reg( R_ECX, Rm );
579 CMP_imm32_r32( 0, R_ECX );
582 NEG_r32( R_ECX ); // 2
583 AND_imm8_r8( 0x1F, R_CL ); // 3
584 SAR_r32_CL( R_EAX ); // 2
587 AND_imm8_r8( 0x1F, R_CL ); // 3
588 SHL_r32_CL( R_EAX ); // 2
590 store_reg( R_EAX, Rn );
593 load_reg( R_EAX, Rn );
594 load_reg( R_ECX, Rm );
596 MOV_r32_r32( R_EAX, R_EDX );
600 CMP_imm8s_r32( 0, R_ECX );
601 CMOVAE_r32_r32( R_EDX, R_EAX );
602 store_reg( R_EAX, Rn );
605 load_reg( R_EAX, Rn );
607 store_reg( R_EAX, Rn );
610 load_reg( R_EAX, Rn );
612 store_reg( R_EAX, Rn );
615 load_reg( R_EAX, Rn );
617 store_reg( R_EAX, Rn );
620 load_reg( R_EAX, Rn );
621 SHL_imm8_r32( 2, R_EAX );
622 store_reg( R_EAX, Rn );
625 load_reg( R_EAX, Rn );
626 SHL_imm8_r32( 8, R_EAX );
627 store_reg( R_EAX, Rn );
630 load_reg( R_EAX, Rn );
631 SHL_imm8_r32( 16, R_EAX );
632 store_reg( R_EAX, Rn );
635 load_reg( R_EAX, Rn );
637 store_reg( R_EAX, Rn );
640 load_reg( R_EAX, Rn );
641 SHR_imm8_r32( 2, R_EAX );
642 store_reg( R_EAX, Rn );
645 load_reg( R_EAX, Rn );
646 SHR_imm8_r32( 8, R_EAX );
647 store_reg( R_EAX, Rn );
650 load_reg( R_EAX, Rn );
651 SHR_imm8_r32( 16, R_EAX );
652 store_reg( R_EAX, Rn );
655 load_reg( R_EAX, Rm );
656 load_reg( R_ECX, Rn );
657 SUB_r32_r32( R_EAX, R_ECX );
658 store_reg( R_ECX, Rn );
661 load_reg( R_EAX, Rm );
662 load_reg( R_ECX, Rn );
664 SBB_r32_r32( R_EAX, R_ECX );
665 store_reg( R_ECX, Rn );
668 load_reg( R_EAX, Rm );
669 load_reg( R_ECX, Rn );
670 SUB_r32_r32( R_EAX, R_ECX );
671 store_reg( R_ECX, Rn );
675 load_reg( R_EAX, Rm );
676 XCHG_r8_r8( R_AL, R_AH );
677 store_reg( R_EAX, Rn );
680 load_reg( R_EAX, Rm );
681 MOV_r32_r32( R_EAX, R_ECX );
682 SHL_imm8_r32( 16, R_ECX );
683 SHR_imm8_r32( 16, R_EAX );
684 OR_r32_r32( R_EAX, R_ECX );
685 store_reg( R_ECX, Rn );
688 load_reg( R_ECX, Rn );
689 MEM_READ_BYTE( R_ECX, R_EAX );
690 TEST_r8_r8( R_AL, R_AL );
692 OR_imm8_r8( 0x80, R_AL );
693 MEM_WRITE_BYTE( R_ECX, R_EAX );
696 load_reg( R_EAX, Rm );
697 load_reg( R_ECX, Rn );
698 TEST_r32_r32( R_EAX, R_ECX );
702 load_reg( R_EAX, 0 );
703 TEST_imm32_r32( imm, R_EAX );
706 TST.B #imm, @(R0, GBR) {:
708 load_reg( R_ECX, R_GBR);
709 ADD_r32_r32( R_EAX, R_ECX );
710 MEM_READ_BYTE( R_ECX, R_EAX );
711 TEST_imm8_r8( imm, R_EAX );
715 load_reg( R_EAX, Rm );
716 load_reg( R_ECX, Rn );
717 XOR_r32_r32( R_EAX, R_ECX );
718 store_reg( R_ECX, Rn );
721 load_reg( R_EAX, 0 );
722 XOR_imm32_r32( imm, R_EAX );
723 store_reg( R_EAX, 0 );
725 XOR.B #imm, @(R0, GBR) {:
726 load_reg( R_EAX, 0 );
727 load_spreg( R_ECX, R_GBR );
728 ADD_r32_r32( R_EAX, R_ECX );
729 MEM_READ_BYTE( R_ECX, R_EAX );
730 XOR_imm32_r32( imm, R_EAX );
731 MEM_WRITE_BYTE( R_ECX, R_EAX );
734 load_reg( R_EAX, Rm );
735 MOV_r32_r32( R_EAX, R_ECX );
736 SHR_imm8_r32( 16, R_EAX );
737 SHL_imm8_r32( 16, R_ECX );
738 OR_r32_r32( R_EAX, R_ECX );
739 store_reg( R_ECX, Rn );
742 /* Data move instructions */
744 load_reg( R_EAX, Rm );
745 store_reg( R_EAX, Rn );
748 load_imm32( R_EAX, imm );
749 store_reg( R_EAX, Rn );
752 load_reg( R_EAX, Rm );
753 load_reg( R_ECX, Rn );
754 MEM_WRITE_BYTE( R_ECX, R_EAX );
757 load_reg( R_EAX, Rm );
758 load_reg( R_ECX, Rn );
759 ADD_imm8s_r32( -1, Rn );
760 store_reg( R_ECX, Rn );
761 MEM_WRITE_BYTE( R_ECX, R_EAX );
763 MOV.B Rm, @(R0, Rn) {:
764 load_reg( R_EAX, 0 );
765 load_reg( R_ECX, Rn );
766 ADD_r32_r32( R_EAX, R_ECX );
767 load_reg( R_EAX, Rm );
768 MEM_WRITE_BYTE( R_ECX, R_EAX );
770 MOV.B R0, @(disp, GBR) {:
771 load_reg( R_EAX, 0 );
772 load_spreg( R_ECX, R_GBR );
773 ADD_imm32_r32( disp, R_ECX );
774 MEM_WRITE_BYTE( R_ECX, R_EAX );
776 MOV.B R0, @(disp, Rn) {:
777 load_reg( R_EAX, 0 );
778 load_reg( R_ECX, Rn );
779 ADD_imm32_r32( disp, R_ECX );
780 MEM_WRITE_BYTE( R_ECX, R_EAX );
783 load_reg( R_ECX, Rm );
784 MEM_READ_BYTE( R_ECX, R_EAX );
785 store_reg( R_ECX, Rn );
788 load_reg( R_ECX, Rm );
789 MOV_r32_r32( R_ECX, R_EAX );
790 ADD_imm8s_r32( 1, R_EAX );
791 store_reg( R_EAX, Rm );
792 MEM_READ_BYTE( R_ECX, R_EAX );
793 store_reg( R_EAX, Rn );
795 MOV.B @(R0, Rm), Rn {:
796 load_reg( R_EAX, 0 );
797 load_reg( R_ECX, Rm );
798 ADD_r32_r32( R_EAX, R_ECX );
799 MEM_READ_BYTE( R_ECX, R_EAX );
800 store_reg( R_EAX, Rn );
802 MOV.B @(disp, GBR), R0 {:
803 load_spreg( R_ECX, R_GBR );
804 ADD_imm32_r32( disp, R_ECX );
805 MEM_READ_BYTE( R_ECX, R_EAX );
806 store_reg( R_EAX, 0 );
808 MOV.B @(disp, Rm), R0 {:
809 load_reg( R_ECX, Rm );
810 ADD_imm32_r32( disp, R_ECX );
811 MEM_READ_BYTE( R_ECX, R_EAX );
812 store_reg( R_EAX, 0 );
815 load_reg( R_EAX, Rm );
816 load_reg( R_ECX, Rn );
817 MEM_WRITE_LONG( R_ECX, R_EAX );
820 load_reg( R_EAX, Rm );
821 load_reg( R_ECX, Rn );
822 ADD_imm8s_r32( -4, R_ECX );
823 store_reg( R_ECX, Rn );
824 MEM_WRITE_LONG( R_ECX, R_EAX );
826 MOV.L Rm, @(R0, Rn) {:
827 load_reg( R_EAX, 0 );
828 load_reg( R_ECX, Rn );
829 ADD_r32_r32( R_EAX, R_ECX );
830 load_reg( R_EAX, Rm );
831 MEM_WRITE_LONG( R_ECX, R_EAX );
833 MOV.L R0, @(disp, GBR) {:
834 load_spreg( R_ECX, R_GBR );
835 load_reg( R_EAX, 0 );
836 ADD_imm32_r32( disp, R_ECX );
837 MEM_WRITE_LONG( R_ECX, R_EAX );
839 MOV.L Rm, @(disp, Rn) {:
840 load_reg( R_ECX, Rn );
841 load_reg( R_EAX, Rm );
842 ADD_imm32_r32( disp, R_ECX );
843 MEM_WRITE_LONG( R_ECX, R_EAX );
846 load_reg( R_ECX, Rm );
847 MEM_READ_LONG( R_ECX, R_EAX );
848 store_reg( R_EAX, Rn );
851 load_reg( R_EAX, Rm );
852 MOV_r32_r32( R_EAX, R_ECX );
853 ADD_imm8s_r32( 4, R_EAX );
854 store_reg( R_EAX, Rm );
855 MEM_READ_LONG( R_ECX, R_EAX );
856 store_reg( R_EAX, Rn );
858 MOV.L @(R0, Rm), Rn {:
859 load_reg( R_EAX, 0 );
860 load_reg( R_ECX, Rm );
861 ADD_r32_r32( R_EAX, R_ECX );
862 MEM_READ_LONG( R_ECX, R_EAX );
863 store_reg( R_EAX, Rn );
865 MOV.L @(disp, GBR), R0 {:
866 load_spreg( R_ECX, R_GBR );
867 ADD_imm32_r32( disp, R_ECX );
868 MEM_READ_LONG( R_ECX, R_EAX );
869 store_reg( R_EAX, 0 );
871 MOV.L @(disp, PC), Rn {:
872 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
873 MEM_READ_LONG( R_ECX, R_EAX );
874 store_reg( R_EAX, 0 );
876 MOV.L @(disp, Rm), Rn {:
877 load_reg( R_ECX, Rm );
878 ADD_imm8s_r32( disp, R_ECX );
879 MEM_READ_LONG( R_ECX, R_EAX );
880 store_reg( R_EAX, Rn );
883 load_reg( R_ECX, Rn );
884 MEM_READ_WORD( R_ECX, R_EAX );
885 store_reg( R_EAX, Rn );
888 load_reg( R_ECX, Rn );
889 load_reg( R_EAX, Rm );
890 ADD_imm8s_r32( -2, R_ECX );
891 MEM_WRITE_WORD( R_ECX, R_EAX );
893 MOV.W Rm, @(R0, Rn) {:
894 load_reg( R_EAX, 0 );
895 load_reg( R_ECX, Rn );
896 ADD_r32_r32( R_EAX, R_ECX );
897 load_reg( R_EAX, Rm );
898 MEM_WRITE_WORD( R_ECX, R_EAX );
900 MOV.W R0, @(disp, GBR) {:
901 load_spreg( R_ECX, R_GBR );
902 load_reg( R_EAX, 0 );
903 ADD_imm32_r32( disp, R_ECX );
904 MEM_WRITE_WORD( R_ECX, R_EAX );
906 MOV.W R0, @(disp, Rn) {:
907 load_reg( R_ECX, Rn );
908 load_reg( R_EAX, 0 );
909 ADD_imm32_r32( disp, R_ECX );
910 MEM_WRITE_WORD( R_ECX, R_EAX );
913 load_reg( R_ECX, Rm );
914 MEM_READ_WORD( R_ECX, R_EAX );
915 store_reg( R_EAX, Rn );
918 load_reg( R_EAX, Rm );
919 MOV_r32_r32( R_EAX, R_ECX );
920 ADD_imm8s_r32( 2, R_EAX );
921 store_reg( R_EAX, Rm );
922 MEM_READ_WORD( R_ECX, R_EAX );
923 store_reg( R_EAX, Rn );
925 MOV.W @(R0, Rm), Rn {:
926 load_reg( R_EAX, 0 );
927 load_reg( R_ECX, Rm );
928 ADD_r32_r32( R_EAX, R_ECX );
929 MEM_READ_WORD( R_ECX, R_EAX );
930 store_reg( R_EAX, Rn );
932 MOV.W @(disp, GBR), R0 {:
933 load_spreg( R_ECX, R_GBR );
934 ADD_imm32_r32( disp, R_ECX );
935 MEM_READ_WORD( R_ECX, R_EAX );
936 store_reg( R_EAX, 0 );
938 MOV.W @(disp, PC), Rn {:
939 load_imm32( R_ECX, pc + disp + 4 );
940 MEM_READ_WORD( R_ECX, R_EAX );
941 store_reg( R_EAX, Rn );
943 MOV.W @(disp, Rm), R0 {:
944 load_reg( R_ECX, Rm );
945 ADD_imm32_r32( disp, R_ECX );
946 MEM_READ_WORD( R_ECX, R_EAX );
947 store_reg( R_EAX, 0 );
949 MOVA @(disp, PC), R0 {:
950 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
951 store_reg( R_ECX, 0 );
954 load_reg( R_EAX, 0 );
955 load_reg( R_ECX, Rn );
956 MEM_WRITE_LONG( R_ECX, R_EAX );
959 /* Control transfer instructions */
961 CMP_imm8s_ebp( 0, R_T );
963 exit_block( disp + pc + 4 );
967 CMP_imm8s_ebp( 0, R_T );
969 exit_block( disp + pc + 4 );
970 sh4_x86.in_delay_slot = TRUE;
973 exit_block( disp + pc + 4 );
978 BT disp {: /* If true, result PC += 4 + disp. else result PC = pc+2 */
998 /* Floating point instructions */
1001 FCMP/EQ FRm, FRn {: :}
1002 FCMP/GT FRm, FRn {: :}
1003 FCNVDS FRm, FPUL {: :}
1004 FCNVSD FPUL, FRn {: :}
1007 FLDS FRm, FPUL {: :}
1010 FLOAT FPUL, FRn {: :}
1011 FMAC FR0, FRm, FRn {: :}
1014 FMOV FRm, @-Rn {: :}
1015 FMOV FRm, @(R0, Rn) {: :}
1017 FMOV @Rm+, FRn {: :}
1018 FMOV @(R0, Rm), FRn {: :}
1022 FSCA FPUL, FRn {: :}
1026 FSTS FPUL, FRn {: :}
1028 FTRC FRm, FPUL {: :}
1029 FTRV XMTRX, FVn {: :}
1031 /* Processor control instructions */
1033 load_reg( R_EAX, Rm );
1037 load_reg( R_EAX, Rm );
1038 store_spreg( R_EAX, R_GBR );
1041 load_reg( R_EAX, Rm );
1042 store_spreg( R_EAX, R_VBR );
1045 load_reg( R_EAX, Rm );
1046 store_spreg( R_EAX, R_SSR );
1049 load_reg( R_EAX, Rm );
1050 store_spreg( R_EAX, R_SGR );
1053 load_reg( R_EAX, Rm );
1054 store_spreg( R_EAX, R_SPC );
1057 load_reg( R_EAX, Rm );
1058 store_spreg( R_EAX, R_DBR );
1060 LDC Rm, Rn_BANK {: :}
1062 load_reg( R_EAX, Rm );
1063 MOV_r32_r32( R_EAX, R_ECX );
1064 ADD_imm8s_r32( 4, R_EAX );
1065 store_reg( R_EAX, Rm );
1066 MEM_READ_LONG( R_ECX, R_EAX );
1067 store_spreg( R_EAX, R_GBR );
1070 load_reg( R_EAX, Rm );
1071 MOV_r32_r32( R_EAX, R_ECX );
1072 ADD_imm8s_r32( 4, R_EAX );
1073 store_reg( R_EAX, Rm );
1074 MEM_READ_LONG( R_ECX, R_EAX );
1078 load_reg( R_EAX, Rm );
1079 MOV_r32_r32( R_EAX, R_ECX );
1080 ADD_imm8s_r32( 4, R_EAX );
1081 store_reg( R_EAX, Rm );
1082 MEM_READ_LONG( R_ECX, R_EAX );
1083 store_spreg( R_EAX, R_VBR );
1086 load_reg( R_EAX, Rm );
1087 MOV_r32_r32( R_EAX, R_ECX );
1088 ADD_imm8s_r32( 4, R_EAX );
1089 store_reg( R_EAX, Rm );
1090 MEM_READ_LONG( R_ECX, R_EAX );
1091 store_spreg( R_EAX, R_SSR );
1094 load_reg( R_EAX, Rm );
1095 MOV_r32_r32( R_EAX, R_ECX );
1096 ADD_imm8s_r32( 4, R_EAX );
1097 store_reg( R_EAX, Rm );
1098 MEM_READ_LONG( R_ECX, R_EAX );
1099 store_spreg( R_EAX, R_SGR );
1102 load_reg( R_EAX, Rm );
1103 MOV_r32_r32( R_EAX, R_ECX );
1104 ADD_imm8s_r32( 4, R_EAX );
1105 store_reg( R_EAX, Rm );
1106 MEM_READ_LONG( R_ECX, R_EAX );
1107 store_spreg( R_EAX, R_SPC );
1110 load_reg( R_EAX, Rm );
1111 MOV_r32_r32( R_EAX, R_ECX );
1112 ADD_imm8s_r32( 4, R_EAX );
1113 store_reg( R_EAX, Rm );
1114 MEM_READ_LONG( R_ECX, R_EAX );
1115 store_spreg( R_EAX, R_DBR );
1117 LDC.L @Rm+, Rn_BANK {:
1120 load_reg( R_EAX, Rm );
1121 store_spreg( R_EAX, R_FPSCR );
1123 LDS.L @Rm+, FPSCR {:
1124 load_reg( R_EAX, Rm );
1125 MOV_r32_r32( R_EAX, R_ECX );
1126 ADD_imm8s_r32( 4, R_EAX );
1127 store_reg( R_EAX, Rm );
1128 MEM_READ_LONG( R_ECX, R_EAX );
1129 store_spreg( R_EAX, R_FPSCR );
1132 load_reg( R_EAX, Rm );
1133 store_spreg( R_EAX, R_FPUL );
1136 load_reg( R_EAX, Rm );
1137 MOV_r32_r32( R_EAX, R_ECX );
1138 ADD_imm8s_r32( 4, R_EAX );
1139 store_reg( R_EAX, Rm );
1140 MEM_READ_LONG( R_ECX, R_EAX );
1141 store_spreg( R_EAX, R_FPUL );
1144 load_reg( R_EAX, Rm );
1145 store_spreg( R_EAX, R_MACH );
1148 load_reg( R_EAX, Rm );
1149 MOV_r32_r32( R_EAX, R_ECX );
1150 ADD_imm8s_r32( 4, R_EAX );
1151 store_reg( R_EAX, Rm );
1152 MEM_READ_LONG( R_ECX, R_EAX );
1153 store_spreg( R_EAX, R_MACH );
1156 load_reg( R_EAX, Rm );
1157 store_spreg( R_EAX, R_MACL );
1160 load_reg( R_EAX, Rm );
1161 MOV_r32_r32( R_EAX, R_ECX );
1162 ADD_imm8s_r32( 4, R_EAX );
1163 store_reg( R_EAX, Rm );
1164 MEM_READ_LONG( R_ECX, R_EAX );
1165 store_spreg( R_EAX, R_MACL );
1168 load_reg( R_EAX, Rm );
1169 store_spreg( R_EAX, R_PR );
1172 load_reg( R_EAX, Rm );
1173 MOV_r32_r32( R_EAX, R_ECX );
1174 ADD_imm8s_r32( 4, R_EAX );
1175 store_reg( R_EAX, Rm );
1176 MEM_READ_LONG( R_ECX, R_EAX );
1177 store_spreg( R_EAX, R_PR );
1187 store_reg( R_EAX, Rn );
1190 load_spreg( R_EAX, R_GBR );
1191 store_reg( R_EAX, Rn );
1194 load_spreg( R_EAX, R_VBR );
1195 store_reg( R_EAX, Rn );
1198 load_spreg( R_EAX, R_SSR );
1199 store_reg( R_EAX, Rn );
1202 load_spreg( R_EAX, R_SPC );
1203 store_reg( R_EAX, Rn );
1206 load_spreg( R_EAX, R_SGR );
1207 store_reg( R_EAX, Rn );
1210 load_spreg( R_EAX, R_DBR );
1211 store_reg( R_EAX, Rn );
1213 STC Rm_BANK, Rn {: /* TODO */
1215 STC.L SR, @-Rn {: /* TODO */
1216 load_reg( R_ECX, Rn );
1217 ADD_imm8s_r32( -4, Rn );
1218 store_reg( R_ECX, Rn );
1220 MEM_WRITE_LONG( R_ECX, R_EAX );
1223 load_reg( R_ECX, Rn );
1224 ADD_imm8s_r32( -4, Rn );
1225 store_reg( R_ECX, Rn );
1226 load_spreg( R_EAX, R_VBR );
1227 MEM_WRITE_LONG( R_ECX, R_EAX );
1230 load_reg( R_ECX, Rn );
1231 ADD_imm8s_r32( -4, Rn );
1232 store_reg( R_ECX, Rn );
1233 load_spreg( R_EAX, R_SSR );
1234 MEM_WRITE_LONG( R_ECX, R_EAX );
1237 load_reg( R_ECX, Rn );
1238 ADD_imm8s_r32( -4, Rn );
1239 store_reg( R_ECX, Rn );
1240 load_spreg( R_EAX, R_SPC );
1241 MEM_WRITE_LONG( R_ECX, R_EAX );
1244 load_reg( R_ECX, Rn );
1245 ADD_imm8s_r32( -4, Rn );
1246 store_reg( R_ECX, Rn );
1247 load_spreg( R_EAX, R_SGR );
1248 MEM_WRITE_LONG( R_ECX, R_EAX );
1251 load_reg( R_ECX, Rn );
1252 ADD_imm8s_r32( -4, Rn );
1253 store_reg( R_ECX, Rn );
1254 load_spreg( R_EAX, R_DBR );
1255 MEM_WRITE_LONG( R_ECX, R_EAX );
1257 STC.L Rm_BANK, @-Rn {: :}
1259 load_reg( R_ECX, Rn );
1260 ADD_imm8s_r32( -4, Rn );
1261 store_reg( R_ECX, Rn );
1262 load_spreg( R_EAX, R_GBR );
1263 MEM_WRITE_LONG( R_ECX, R_EAX );
1266 load_spreg( R_EAX, R_FPSCR );
1267 store_reg( R_EAX, Rn );
1269 STS.L FPSCR, @-Rn {:
1270 load_reg( R_ECX, Rn );
1271 ADD_imm8s_r32( -4, Rn );
1272 store_reg( R_ECX, Rn );
1273 load_spreg( R_EAX, R_FPSCR );
1274 MEM_WRITE_LONG( R_ECX, R_EAX );
1277 load_spreg( R_EAX, R_FPUL );
1278 store_reg( R_EAX, Rn );
1281 load_reg( R_ECX, Rn );
1282 ADD_imm8s_r32( -4, Rn );
1283 store_reg( R_ECX, Rn );
1284 load_spreg( R_EAX, R_FPUL );
1285 MEM_WRITE_LONG( R_ECX, R_EAX );
1288 load_spreg( R_EAX, R_MACH );
1289 store_reg( R_EAX, Rn );
1292 load_reg( R_ECX, Rn );
1293 ADD_imm8s_r32( -4, Rn );
1294 store_reg( R_ECX, Rn );
1295 load_spreg( R_EAX, R_MACH );
1296 MEM_WRITE_LONG( R_ECX, R_EAX );
1299 load_spreg( R_EAX, R_MACL );
1300 store_reg( R_EAX, Rn );
1303 load_reg( R_ECX, Rn );
1304 ADD_imm8s_r32( -4, Rn );
1305 store_reg( R_ECX, Rn );
1306 load_spreg( R_EAX, R_MACL );
1307 MEM_WRITE_LONG( R_ECX, R_EAX );
1310 load_spreg( R_EAX, R_PR );
1311 store_reg( R_EAX, Rn );
1314 load_reg( R_ECX, Rn );
1315 ADD_imm8s_r32( -4, Rn );
1316 store_reg( R_ECX, Rn );
1317 load_spreg( R_EAX, R_PR );
1318 MEM_WRITE_LONG( R_ECX, R_EAX );
1321 NOP {: /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */ :}
.