filename | src/sh4/sh4x86.c |
changeset | 377:fa18743f6905 |
prev | 375:4627600f7f8e |
next | 380:2e8166bf6832 |
author | nkeynes |
date | Wed Sep 12 09:20:38 2007 +0000 (16 years ago) |
permissions | -rw-r--r-- |
last change | Start splitting the common SH4 parts into sh4.c, with sh4core.c to become just the emulation core. |
view | annotate | diff | log | raw |
1 /**
2 * $Id: sh4x86.c,v 1.6 2007-09-12 09:17:52 nkeynes Exp $
3 *
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
7 *
8 * Copyright (c) 2007 Nathan Keynes.
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
14 *
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
19 */
21 #include <assert.h>
23 #include "sh4/sh4core.h"
24 #include "sh4/sh4trans.h"
25 #include "sh4/x86op.h"
26 #include "clock.h"
28 #define DEFAULT_BACKPATCH_SIZE 4096
30 /**
31 * Struct to manage internal translation state. This state is not saved -
32 * it is only valid between calls to sh4_translate_begin_block() and
33 * sh4_translate_end_block()
34 */
35 struct sh4_x86_state {
36 gboolean in_delay_slot;
37 gboolean priv_checked; /* true if we've already checked the cpu mode. */
38 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
40 /* Allocated memory for the (block-wide) back-patch list */
41 uint32_t **backpatch_list;
42 uint32_t backpatch_posn;
43 uint32_t backpatch_size;
44 };
46 #define EXIT_DATA_ADDR_READ 0
47 #define EXIT_DATA_ADDR_WRITE 7
48 #define EXIT_ILLEGAL 14
49 #define EXIT_SLOT_ILLEGAL 21
50 #define EXIT_FPU_DISABLED 28
51 #define EXIT_SLOT_FPU_DISABLED 35
53 static struct sh4_x86_state sh4_x86;
55 void sh4_x86_init()
56 {
57 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
58 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
59 }
62 static void sh4_x86_add_backpatch( uint8_t *ptr )
63 {
64 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
65 sh4_x86.backpatch_size <<= 1;
66 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
67 assert( sh4_x86.backpatch_list != NULL );
68 }
69 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
70 }
72 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
73 {
74 unsigned int i;
75 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
76 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
77 }
78 }
80 #ifndef NDEBUG
81 #define MARK_JMP(x,n) uint8_t *_mark_jmp_##x = xlat_output + n
82 #define CHECK_JMP(x) assert( _mark_jmp_##x == xlat_output )
83 #else
84 #define MARK_JMP(x,n)
85 #define CHECK_JMP(x)
86 #endif
89 /**
90 * Emit an instruction to load an SH4 reg into a real register
91 */
92 static inline void load_reg( int x86reg, int sh4reg )
93 {
94 /* mov [bp+n], reg */
95 OP(0x8B);
96 OP(0x45 + (x86reg<<3));
97 OP(REG_OFFSET(r[sh4reg]));
98 }
100 static inline void load_reg16s( int x86reg, int sh4reg )
101 {
102 OP(0x0F);
103 OP(0xBF);
104 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
105 }
107 static inline void load_reg16u( int x86reg, int sh4reg )
108 {
109 OP(0x0F);
110 OP(0xB7);
111 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
113 }
115 static inline void load_spreg( int x86reg, int regoffset )
116 {
117 /* mov [bp+n], reg */
118 OP(0x8B);
119 OP(0x45 + (x86reg<<3));
120 OP(regoffset);
121 }
123 /**
124 * Emit an instruction to load an immediate value into a register
125 */
126 static inline void load_imm32( int x86reg, uint32_t value ) {
127 /* mov #value, reg */
128 OP(0xB8 + x86reg);
129 OP32(value);
130 }
132 /**
133 * Emit an instruction to store an SH4 reg (RN)
134 */
135 void static inline store_reg( int x86reg, int sh4reg ) {
136 /* mov reg, [bp+n] */
137 OP(0x89);
138 OP(0x45 + (x86reg<<3));
139 OP(REG_OFFSET(r[sh4reg]));
140 }
141 void static inline store_spreg( int x86reg, int regoffset ) {
142 /* mov reg, [bp+n] */
143 OP(0x89);
144 OP(0x45 + (x86reg<<3));
145 OP(regoffset);
146 }
149 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
151 /**
152 * Load an FR register (single-precision floating point) into an integer x86
153 * register (eg for register-to-register moves)
154 */
155 void static inline load_fr( int bankreg, int x86reg, int frm )
156 {
157 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
158 }
160 /**
161 * Store an FR register (single-precision floating point) into an integer x86
162 * register (eg for register-to-register moves)
163 */
164 void static inline store_fr( int bankreg, int x86reg, int frn )
165 {
166 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
167 }
170 /**
171 * Load a pointer to the back fp back into the specified x86 register. The
172 * bankreg must have been previously loaded with FPSCR.
173 * NB: 10 bytes
174 */
175 static inline void load_xf_bank( int bankreg )
176 {
177 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
178 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
179 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
180 }
182 /**
183 * Push FPUL (as a 32-bit float) onto the FPU stack
184 */
185 static inline void push_fpul( )
186 {
187 OP(0xD9); OP(0x45); OP(R_FPUL);
188 }
190 /**
191 * Pop FPUL (as a 32-bit float) from the FPU stack
192 */
193 static inline void pop_fpul( )
194 {
195 OP(0xD9); OP(0x5D); OP(R_FPUL);
196 }
198 /**
199 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
200 * with the location of the current fp bank.
201 */
202 static inline void push_fr( int bankreg, int frm )
203 {
204 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
205 }
207 /**
208 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
209 * with bankreg previously loaded with the location of the current fp bank.
210 */
211 static inline void pop_fr( int bankreg, int frm )
212 {
213 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
214 }
216 /**
217 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
218 * with the location of the current fp bank.
219 */
220 static inline void push_dr( int bankreg, int frm )
221 {
222 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
223 }
225 static inline void pop_dr( int bankreg, int frm )
226 {
227 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
228 }
230 /**
231 * Note: clobbers EAX to make the indirect call - this isn't usually
232 * a problem since the callee will usually clobber it anyway.
233 */
234 static inline void call_func0( void *ptr )
235 {
236 load_imm32(R_EAX, (uint32_t)ptr);
237 CALL_r32(R_EAX);
238 }
240 static inline void call_func1( void *ptr, int arg1 )
241 {
242 PUSH_r32(arg1);
243 call_func0(ptr);
244 ADD_imm8s_r32( 4, R_ESP );
245 }
247 static inline void call_func2( void *ptr, int arg1, int arg2 )
248 {
249 PUSH_r32(arg2);
250 PUSH_r32(arg1);
251 call_func0(ptr);
252 ADD_imm8s_r32( 8, R_ESP );
253 }
255 /**
256 * Write a double (64-bit) value into memory, with the first word in arg2a, and
257 * the second in arg2b
258 * NB: 30 bytes
259 */
260 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
261 {
262 ADD_imm8s_r32( 4, addr );
263 PUSH_r32(addr);
264 PUSH_r32(arg2b);
265 ADD_imm8s_r32( -4, addr );
266 PUSH_r32(addr);
267 PUSH_r32(arg2a);
268 call_func0(sh4_write_long);
269 ADD_imm8s_r32( 8, R_ESP );
270 call_func0(sh4_write_long);
271 ADD_imm8s_r32( 8, R_ESP );
272 }
274 /**
275 * Read a double (64-bit) value from memory, writing the first word into arg2a
276 * and the second into arg2b. The addr must not be in EAX
277 * NB: 27 bytes
278 */
279 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
280 {
281 PUSH_r32(addr);
282 call_func0(sh4_read_long);
283 POP_r32(addr);
284 PUSH_r32(R_EAX);
285 ADD_imm8s_r32( 4, addr );
286 PUSH_r32(addr);
287 call_func0(sh4_read_long);
288 ADD_imm8s_r32( 4, R_ESP );
289 MOV_r32_r32( R_EAX, arg2b );
290 POP_r32(arg2a);
291 }
293 /* Exception checks - Note that all exception checks will clobber EAX */
294 static void check_priv( )
295 {
296 if( !sh4_x86.priv_checked ) {
297 sh4_x86.priv_checked = TRUE;
298 load_spreg( R_EAX, R_SR );
299 AND_imm32_r32( SR_MD, R_EAX );
300 if( sh4_x86.in_delay_slot ) {
301 JE_exit( EXIT_SLOT_ILLEGAL );
302 } else {
303 JE_exit( EXIT_ILLEGAL );
304 }
305 }
306 }
308 static void check_fpuen( )
309 {
310 if( !sh4_x86.fpuen_checked ) {
311 sh4_x86.fpuen_checked = TRUE;
312 load_spreg( R_EAX, R_SR );
313 AND_imm32_r32( SR_FD, R_EAX );
314 if( sh4_x86.in_delay_slot ) {
315 JNE_exit(EXIT_SLOT_FPU_DISABLED);
316 } else {
317 JNE_exit(EXIT_FPU_DISABLED);
318 }
319 }
320 }
322 static void check_ralign16( int x86reg )
323 {
324 TEST_imm32_r32( 0x00000001, x86reg );
325 JNE_exit(EXIT_DATA_ADDR_READ);
326 }
328 static void check_walign16( int x86reg )
329 {
330 TEST_imm32_r32( 0x00000001, x86reg );
331 JNE_exit(EXIT_DATA_ADDR_WRITE);
332 }
334 static void check_ralign32( int x86reg )
335 {
336 TEST_imm32_r32( 0x00000003, x86reg );
337 JNE_exit(EXIT_DATA_ADDR_READ);
338 }
339 static void check_walign32( int x86reg )
340 {
341 TEST_imm32_r32( 0x00000003, x86reg );
342 JNE_exit(EXIT_DATA_ADDR_WRITE);
343 }
346 #define UNDEF()
347 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
348 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
349 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
350 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
351 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
352 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
353 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
355 #define RAISE_EXCEPTION( exc ) call_func1(sh4_raise_exception, exc);
356 #define SLOTILLEGAL() RAISE_EXCEPTION(EXC_SLOT_ILLEGAL); return 1
360 /**
361 * Emit the 'start of block' assembly. Sets up the stack frame and save
362 * SI/DI as required
363 */
364 void sh4_translate_begin_block()
365 {
366 PUSH_r32(R_EBP);
367 /* mov &sh4r, ebp */
368 load_imm32( R_EBP, (uint32_t)&sh4r );
369 PUSH_r32(R_EDI);
370 PUSH_r32(R_ESI);
372 sh4_x86.in_delay_slot = FALSE;
373 sh4_x86.priv_checked = FALSE;
374 sh4_x86.fpuen_checked = FALSE;
375 sh4_x86.backpatch_posn = 0;
376 }
378 /**
379 * Exit the block early (ie branch out), conditionally or otherwise
380 */
381 void exit_block( )
382 {
383 store_spreg( R_EDI, REG_OFFSET(pc) );
384 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
385 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
386 MUL_r32( R_ESI );
387 ADD_r32_r32( R_EAX, R_ECX );
388 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
389 XOR_r32_r32( R_EAX, R_EAX );
390 POP_r32(R_ESI);
391 POP_r32(R_EDI);
392 POP_r32(R_EBP);
393 RET();
394 }
396 /**
397 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
398 */
399 void sh4_translate_end_block( sh4addr_t pc ) {
400 assert( !sh4_x86.in_delay_slot ); // should never stop here
401 // Normal termination - save PC, cycle count
402 exit_block( );
404 uint8_t *end_ptr = xlat_output;
405 // Exception termination. Jump block for various exception codes:
406 PUSH_imm32( EXC_DATA_ADDR_READ );
407 JMP_rel8( 33 );
408 PUSH_imm32( EXC_DATA_ADDR_WRITE );
409 JMP_rel8( 26 );
410 PUSH_imm32( EXC_ILLEGAL );
411 JMP_rel8( 19 );
412 PUSH_imm32( EXC_SLOT_ILLEGAL );
413 JMP_rel8( 12 );
414 PUSH_imm32( EXC_FPU_DISABLED );
415 JMP_rel8( 5 );
416 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
417 // target
418 load_spreg( R_ECX, REG_OFFSET(pc) );
419 ADD_r32_r32( R_ESI, R_ECX );
420 ADD_r32_r32( R_ESI, R_ECX );
421 store_spreg( R_ECX, REG_OFFSET(pc) );
422 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
423 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
424 MUL_r32( R_ESI );
425 ADD_r32_r32( R_EAX, R_ECX );
426 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
428 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
429 CALL_r32( R_EAX ); // 2
430 POP_r32(R_EBP);
431 RET();
433 sh4_x86_do_backpatch( end_ptr );
434 }
436 /**
437 * Translate a single instruction. Delayed branches are handled specially
438 * by translating both branch and delayed instruction as a single unit (as
439 *
440 *
441 * @return true if the instruction marks the end of a basic block
442 * (eg a branch or
443 */
444 uint32_t sh4_x86_translate_instruction( uint32_t pc )
445 {
446 uint16_t ir = sh4_read_word( pc );
448 switch( (ir&0xF000) >> 12 ) {
449 case 0x0:
450 switch( ir&0xF ) {
451 case 0x2:
452 switch( (ir&0x80) >> 7 ) {
453 case 0x0:
454 switch( (ir&0x70) >> 4 ) {
455 case 0x0:
456 { /* STC SR, Rn */
457 uint32_t Rn = ((ir>>8)&0xF);
458 call_func0(sh4_read_sr);
459 store_reg( R_EAX, Rn );
460 }
461 break;
462 case 0x1:
463 { /* STC GBR, Rn */
464 uint32_t Rn = ((ir>>8)&0xF);
465 load_spreg( R_EAX, R_GBR );
466 store_reg( R_EAX, Rn );
467 }
468 break;
469 case 0x2:
470 { /* STC VBR, Rn */
471 uint32_t Rn = ((ir>>8)&0xF);
472 load_spreg( R_EAX, R_VBR );
473 store_reg( R_EAX, Rn );
474 }
475 break;
476 case 0x3:
477 { /* STC SSR, Rn */
478 uint32_t Rn = ((ir>>8)&0xF);
479 load_spreg( R_EAX, R_SSR );
480 store_reg( R_EAX, Rn );
481 }
482 break;
483 case 0x4:
484 { /* STC SPC, Rn */
485 uint32_t Rn = ((ir>>8)&0xF);
486 load_spreg( R_EAX, R_SPC );
487 store_reg( R_EAX, Rn );
488 }
489 break;
490 default:
491 UNDEF();
492 break;
493 }
494 break;
495 case 0x1:
496 { /* STC Rm_BANK, Rn */
497 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
498 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
499 store_reg( R_EAX, Rn );
500 }
501 break;
502 }
503 break;
504 case 0x3:
505 switch( (ir&0xF0) >> 4 ) {
506 case 0x0:
507 { /* BSRF Rn */
508 uint32_t Rn = ((ir>>8)&0xF);
509 if( sh4_x86.in_delay_slot ) {
510 SLOTILLEGAL();
511 } else {
512 load_imm32( R_EAX, pc + 4 );
513 store_spreg( R_EAX, R_PR );
514 load_reg( R_EDI, Rn );
515 ADD_r32_r32( R_EAX, R_EDI );
516 sh4_x86.in_delay_slot = TRUE;
517 INC_r32(R_ESI);
518 return 0;
519 }
520 }
521 break;
522 case 0x2:
523 { /* BRAF Rn */
524 uint32_t Rn = ((ir>>8)&0xF);
525 if( sh4_x86.in_delay_slot ) {
526 SLOTILLEGAL();
527 } else {
528 load_reg( R_EDI, Rn );
529 sh4_x86.in_delay_slot = TRUE;
530 INC_r32(R_ESI);
531 return 0;
532 }
533 }
534 break;
535 case 0x8:
536 { /* PREF @Rn */
537 uint32_t Rn = ((ir>>8)&0xF);
538 load_reg( R_EAX, Rn );
539 PUSH_r32( R_EAX );
540 AND_imm32_r32( 0xFC000000, R_EAX );
541 CMP_imm32_r32( 0xE0000000, R_EAX );
542 JNE_rel8(8);
543 call_func0( sh4_flush_store_queue );
544 ADD_imm8s_r32( 4, R_ESP );
545 }
546 break;
547 case 0x9:
548 { /* OCBI @Rn */
549 uint32_t Rn = ((ir>>8)&0xF);
550 }
551 break;
552 case 0xA:
553 { /* OCBP @Rn */
554 uint32_t Rn = ((ir>>8)&0xF);
555 }
556 break;
557 case 0xB:
558 { /* OCBWB @Rn */
559 uint32_t Rn = ((ir>>8)&0xF);
560 }
561 break;
562 case 0xC:
563 { /* MOVCA.L R0, @Rn */
564 uint32_t Rn = ((ir>>8)&0xF);
565 load_reg( R_EAX, 0 );
566 load_reg( R_ECX, Rn );
567 check_walign32( R_ECX );
568 MEM_WRITE_LONG( R_ECX, R_EAX );
569 }
570 break;
571 default:
572 UNDEF();
573 break;
574 }
575 break;
576 case 0x4:
577 { /* MOV.B Rm, @(R0, Rn) */
578 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
579 load_reg( R_EAX, 0 );
580 load_reg( R_ECX, Rn );
581 ADD_r32_r32( R_EAX, R_ECX );
582 load_reg( R_EAX, Rm );
583 MEM_WRITE_BYTE( R_ECX, R_EAX );
584 }
585 break;
586 case 0x5:
587 { /* MOV.W Rm, @(R0, Rn) */
588 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
589 load_reg( R_EAX, 0 );
590 load_reg( R_ECX, Rn );
591 ADD_r32_r32( R_EAX, R_ECX );
592 check_walign16( R_ECX );
593 load_reg( R_EAX, Rm );
594 MEM_WRITE_WORD( R_ECX, R_EAX );
595 }
596 break;
597 case 0x6:
598 { /* MOV.L Rm, @(R0, Rn) */
599 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
600 load_reg( R_EAX, 0 );
601 load_reg( R_ECX, Rn );
602 ADD_r32_r32( R_EAX, R_ECX );
603 check_walign32( R_ECX );
604 load_reg( R_EAX, Rm );
605 MEM_WRITE_LONG( R_ECX, R_EAX );
606 }
607 break;
608 case 0x7:
609 { /* MUL.L Rm, Rn */
610 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
611 load_reg( R_EAX, Rm );
612 load_reg( R_ECX, Rn );
613 MUL_r32( R_ECX );
614 store_spreg( R_EAX, R_MACL );
615 }
616 break;
617 case 0x8:
618 switch( (ir&0xFF0) >> 4 ) {
619 case 0x0:
620 { /* CLRT */
621 CLC();
622 SETC_t();
623 }
624 break;
625 case 0x1:
626 { /* SETT */
627 STC();
628 SETC_t();
629 }
630 break;
631 case 0x2:
632 { /* CLRMAC */
633 XOR_r32_r32(R_EAX, R_EAX);
634 store_spreg( R_EAX, R_MACL );
635 store_spreg( R_EAX, R_MACH );
636 }
637 break;
638 case 0x3:
639 { /* LDTLB */
640 }
641 break;
642 case 0x4:
643 { /* CLRS */
644 CLC();
645 SETC_sh4r(R_S);
646 }
647 break;
648 case 0x5:
649 { /* SETS */
650 STC();
651 SETC_sh4r(R_S);
652 }
653 break;
654 default:
655 UNDEF();
656 break;
657 }
658 break;
659 case 0x9:
660 switch( (ir&0xF0) >> 4 ) {
661 case 0x0:
662 { /* NOP */
663 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
664 }
665 break;
666 case 0x1:
667 { /* DIV0U */
668 XOR_r32_r32( R_EAX, R_EAX );
669 store_spreg( R_EAX, R_Q );
670 store_spreg( R_EAX, R_M );
671 store_spreg( R_EAX, R_T );
672 }
673 break;
674 case 0x2:
675 { /* MOVT Rn */
676 uint32_t Rn = ((ir>>8)&0xF);
677 load_spreg( R_EAX, R_T );
678 store_reg( R_EAX, Rn );
679 }
680 break;
681 default:
682 UNDEF();
683 break;
684 }
685 break;
686 case 0xA:
687 switch( (ir&0xF0) >> 4 ) {
688 case 0x0:
689 { /* STS MACH, Rn */
690 uint32_t Rn = ((ir>>8)&0xF);
691 load_spreg( R_EAX, R_MACH );
692 store_reg( R_EAX, Rn );
693 }
694 break;
695 case 0x1:
696 { /* STS MACL, Rn */
697 uint32_t Rn = ((ir>>8)&0xF);
698 load_spreg( R_EAX, R_MACL );
699 store_reg( R_EAX, Rn );
700 }
701 break;
702 case 0x2:
703 { /* STS PR, Rn */
704 uint32_t Rn = ((ir>>8)&0xF);
705 load_spreg( R_EAX, R_PR );
706 store_reg( R_EAX, Rn );
707 }
708 break;
709 case 0x3:
710 { /* STC SGR, Rn */
711 uint32_t Rn = ((ir>>8)&0xF);
712 load_spreg( R_EAX, R_SGR );
713 store_reg( R_EAX, Rn );
714 }
715 break;
716 case 0x5:
717 { /* STS FPUL, Rn */
718 uint32_t Rn = ((ir>>8)&0xF);
719 load_spreg( R_EAX, R_FPUL );
720 store_reg( R_EAX, Rn );
721 }
722 break;
723 case 0x6:
724 { /* STS FPSCR, Rn */
725 uint32_t Rn = ((ir>>8)&0xF);
726 load_spreg( R_EAX, R_FPSCR );
727 store_reg( R_EAX, Rn );
728 }
729 break;
730 case 0xF:
731 { /* STC DBR, Rn */
732 uint32_t Rn = ((ir>>8)&0xF);
733 load_spreg( R_EAX, R_DBR );
734 store_reg( R_EAX, Rn );
735 }
736 break;
737 default:
738 UNDEF();
739 break;
740 }
741 break;
742 case 0xB:
743 switch( (ir&0xFF0) >> 4 ) {
744 case 0x0:
745 { /* RTS */
746 if( sh4_x86.in_delay_slot ) {
747 SLOTILLEGAL();
748 } else {
749 load_spreg( R_EDI, R_PR );
750 sh4_x86.in_delay_slot = TRUE;
751 INC_r32(R_ESI);
752 return 0;
753 }
754 }
755 break;
756 case 0x1:
757 { /* SLEEP */
758 /* TODO */
759 }
760 break;
761 case 0x2:
762 { /* RTE */
763 check_priv();
764 if( sh4_x86.in_delay_slot ) {
765 SLOTILLEGAL();
766 } else {
767 load_spreg( R_EDI, R_PR );
768 load_spreg( R_EAX, R_SSR );
769 call_func1( sh4_write_sr, R_EAX );
770 sh4_x86.in_delay_slot = TRUE;
771 sh4_x86.priv_checked = FALSE;
772 sh4_x86.fpuen_checked = FALSE;
773 INC_r32(R_ESI);
774 return 0;
775 }
776 }
777 break;
778 default:
779 UNDEF();
780 break;
781 }
782 break;
783 case 0xC:
784 { /* MOV.B @(R0, Rm), Rn */
785 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
786 load_reg( R_EAX, 0 );
787 load_reg( R_ECX, Rm );
788 ADD_r32_r32( R_EAX, R_ECX );
789 MEM_READ_BYTE( R_ECX, R_EAX );
790 store_reg( R_EAX, Rn );
791 }
792 break;
793 case 0xD:
794 { /* MOV.W @(R0, Rm), Rn */
795 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
796 load_reg( R_EAX, 0 );
797 load_reg( R_ECX, Rm );
798 ADD_r32_r32( R_EAX, R_ECX );
799 check_ralign16( R_ECX );
800 MEM_READ_WORD( R_ECX, R_EAX );
801 store_reg( R_EAX, Rn );
802 }
803 break;
804 case 0xE:
805 { /* MOV.L @(R0, Rm), Rn */
806 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
807 load_reg( R_EAX, 0 );
808 load_reg( R_ECX, Rm );
809 ADD_r32_r32( R_EAX, R_ECX );
810 check_ralign32( R_ECX );
811 MEM_READ_LONG( R_ECX, R_EAX );
812 store_reg( R_EAX, Rn );
813 }
814 break;
815 case 0xF:
816 { /* MAC.L @Rm+, @Rn+ */
817 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
818 }
819 break;
820 default:
821 UNDEF();
822 break;
823 }
824 break;
825 case 0x1:
826 { /* MOV.L Rm, @(disp, Rn) */
827 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
828 load_reg( R_ECX, Rn );
829 load_reg( R_EAX, Rm );
830 ADD_imm32_r32( disp, R_ECX );
831 check_walign32( R_ECX );
832 MEM_WRITE_LONG( R_ECX, R_EAX );
833 }
834 break;
835 case 0x2:
836 switch( ir&0xF ) {
837 case 0x0:
838 { /* MOV.B Rm, @Rn */
839 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
840 load_reg( R_EAX, Rm );
841 load_reg( R_ECX, Rn );
842 MEM_WRITE_BYTE( R_ECX, R_EAX );
843 }
844 break;
845 case 0x1:
846 { /* MOV.W Rm, @Rn */
847 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
848 load_reg( R_ECX, Rn );
849 check_walign16( R_ECX );
850 MEM_READ_WORD( R_ECX, R_EAX );
851 store_reg( R_EAX, Rn );
852 }
853 break;
854 case 0x2:
855 { /* MOV.L Rm, @Rn */
856 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
857 load_reg( R_EAX, Rm );
858 load_reg( R_ECX, Rn );
859 check_walign32(R_ECX);
860 MEM_WRITE_LONG( R_ECX, R_EAX );
861 }
862 break;
863 case 0x4:
864 { /* MOV.B Rm, @-Rn */
865 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
866 load_reg( R_EAX, Rm );
867 load_reg( R_ECX, Rn );
868 ADD_imm8s_r32( -1, Rn );
869 store_reg( R_ECX, Rn );
870 MEM_WRITE_BYTE( R_ECX, R_EAX );
871 }
872 break;
873 case 0x5:
874 { /* MOV.W Rm, @-Rn */
875 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
876 load_reg( R_ECX, Rn );
877 check_walign16( R_ECX );
878 load_reg( R_EAX, Rm );
879 ADD_imm8s_r32( -2, R_ECX );
880 MEM_WRITE_WORD( R_ECX, R_EAX );
881 }
882 break;
883 case 0x6:
884 { /* MOV.L Rm, @-Rn */
885 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
886 load_reg( R_EAX, Rm );
887 load_reg( R_ECX, Rn );
888 check_walign32( R_ECX );
889 ADD_imm8s_r32( -4, R_ECX );
890 store_reg( R_ECX, Rn );
891 MEM_WRITE_LONG( R_ECX, R_EAX );
892 }
893 break;
894 case 0x7:
895 { /* DIV0S Rm, Rn */
896 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
897 load_reg( R_EAX, Rm );
898 load_reg( R_ECX, Rm );
899 SHR_imm8_r32( 31, R_EAX );
900 SHR_imm8_r32( 31, R_ECX );
901 store_spreg( R_EAX, R_M );
902 store_spreg( R_ECX, R_Q );
903 CMP_r32_r32( R_EAX, R_ECX );
904 SETE_t();
905 }
906 break;
907 case 0x8:
908 { /* TST Rm, Rn */
909 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
910 load_reg( R_EAX, Rm );
911 load_reg( R_ECX, Rn );
912 TEST_r32_r32( R_EAX, R_ECX );
913 SETE_t();
914 }
915 break;
916 case 0x9:
917 { /* AND Rm, Rn */
918 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
919 load_reg( R_EAX, Rm );
920 load_reg( R_ECX, Rn );
921 AND_r32_r32( R_EAX, R_ECX );
922 store_reg( R_ECX, Rn );
923 }
924 break;
925 case 0xA:
926 { /* XOR Rm, Rn */
927 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
928 load_reg( R_EAX, Rm );
929 load_reg( R_ECX, Rn );
930 XOR_r32_r32( R_EAX, R_ECX );
931 store_reg( R_ECX, Rn );
932 }
933 break;
934 case 0xB:
935 { /* OR Rm, Rn */
936 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
937 load_reg( R_EAX, Rm );
938 load_reg( R_ECX, Rn );
939 OR_r32_r32( R_EAX, R_ECX );
940 store_reg( R_ECX, Rn );
941 }
942 break;
943 case 0xC:
944 { /* CMP/STR Rm, Rn */
945 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
946 load_reg( R_EAX, Rm );
947 load_reg( R_ECX, Rn );
948 XOR_r32_r32( R_ECX, R_EAX );
949 TEST_r8_r8( R_AL, R_AL );
950 JE_rel8(13);
951 TEST_r8_r8( R_AH, R_AH ); // 2
952 JE_rel8(9);
953 SHR_imm8_r32( 16, R_EAX ); // 3
954 TEST_r8_r8( R_AL, R_AL ); // 2
955 JE_rel8(2);
956 TEST_r8_r8( R_AH, R_AH ); // 2
957 SETE_t();
958 }
959 break;
960 case 0xD:
961 { /* XTRCT Rm, Rn */
962 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
963 load_reg( R_EAX, Rm );
964 MOV_r32_r32( R_EAX, R_ECX );
965 SHR_imm8_r32( 16, R_EAX );
966 SHL_imm8_r32( 16, R_ECX );
967 OR_r32_r32( R_EAX, R_ECX );
968 store_reg( R_ECX, Rn );
969 }
970 break;
971 case 0xE:
972 { /* MULU.W Rm, Rn */
973 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
974 load_reg16u( R_EAX, Rm );
975 load_reg16u( R_ECX, Rn );
976 MUL_r32( R_ECX );
977 store_spreg( R_EAX, R_MACL );
978 }
979 break;
980 case 0xF:
981 { /* MULS.W Rm, Rn */
982 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
983 load_reg16s( R_EAX, Rm );
984 load_reg16s( R_ECX, Rn );
985 MUL_r32( R_ECX );
986 store_spreg( R_EAX, R_MACL );
987 }
988 break;
989 default:
990 UNDEF();
991 break;
992 }
993 break;
994 case 0x3:
995 switch( ir&0xF ) {
996 case 0x0:
997 { /* CMP/EQ Rm, Rn */
998 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
999 load_reg( R_EAX, Rm );
1000 load_reg( R_ECX, Rn );
1001 CMP_r32_r32( R_EAX, R_ECX );
1002 SETE_t();
1003 }
1004 break;
1005 case 0x2:
1006 { /* CMP/HS Rm, Rn */
1007 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1008 load_reg( R_EAX, Rm );
1009 load_reg( R_ECX, Rn );
1010 CMP_r32_r32( R_EAX, R_ECX );
1011 SETAE_t();
1012 }
1013 break;
1014 case 0x3:
1015 { /* CMP/GE Rm, Rn */
1016 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1017 load_reg( R_EAX, Rm );
1018 load_reg( R_ECX, Rn );
1019 CMP_r32_r32( R_EAX, R_ECX );
1020 SETGE_t();
1021 }
1022 break;
1023 case 0x4:
1024 { /* DIV1 Rm, Rn */
1025 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1026 load_reg( R_ECX, Rn );
1027 LDC_t();
1028 RCL1_r32( R_ECX ); // OP2
1029 SETC_r32( R_EDX ); // Q
1030 load_spreg( R_EAX, R_Q );
1031 CMP_sh4r_r32( R_M, R_EAX );
1032 JE_rel8(8);
1033 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_ECX );
1034 JMP_rel8(3);
1035 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_ECX );
1036 // TODO
1037 }
1038 break;
1039 case 0x5:
1040 { /* DMULU.L Rm, Rn */
1041 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1042 load_reg( R_EAX, Rm );
1043 load_reg( R_ECX, Rn );
1044 MUL_r32(R_ECX);
1045 store_spreg( R_EDX, R_MACH );
1046 store_spreg( R_EAX, R_MACL );
1047 }
1048 break;
1049 case 0x6:
1050 { /* CMP/HI Rm, Rn */
1051 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1052 load_reg( R_EAX, Rm );
1053 load_reg( R_ECX, Rn );
1054 CMP_r32_r32( R_EAX, R_ECX );
1055 SETA_t();
1056 }
1057 break;
1058 case 0x7:
1059 { /* CMP/GT Rm, Rn */
1060 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1061 load_reg( R_EAX, Rm );
1062 load_reg( R_ECX, Rn );
1063 CMP_r32_r32( R_EAX, R_ECX );
1064 SETG_t();
1065 }
1066 break;
1067 case 0x8:
1068 { /* SUB Rm, Rn */
1069 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1070 load_reg( R_EAX, Rm );
1071 load_reg( R_ECX, Rn );
1072 SUB_r32_r32( R_EAX, R_ECX );
1073 store_reg( R_ECX, Rn );
1074 }
1075 break;
1076 case 0xA:
1077 { /* SUBC Rm, Rn */
1078 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1079 load_reg( R_EAX, Rm );
1080 load_reg( R_ECX, Rn );
1081 LDC_t();
1082 SBB_r32_r32( R_EAX, R_ECX );
1083 store_reg( R_ECX, Rn );
1084 }
1085 break;
1086 case 0xB:
1087 { /* SUBV Rm, Rn */
1088 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1089 load_reg( R_EAX, Rm );
1090 load_reg( R_ECX, Rn );
1091 SUB_r32_r32( R_EAX, R_ECX );
1092 store_reg( R_ECX, Rn );
1093 SETO_t();
1094 }
1095 break;
1096 case 0xC:
1097 { /* ADD Rm, Rn */
1098 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1099 load_reg( R_EAX, Rm );
1100 load_reg( R_ECX, Rn );
1101 ADD_r32_r32( R_EAX, R_ECX );
1102 store_reg( R_ECX, Rn );
1103 }
1104 break;
1105 case 0xD:
1106 { /* DMULS.L Rm, Rn */
1107 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1108 load_reg( R_EAX, Rm );
1109 load_reg( R_ECX, Rn );
1110 IMUL_r32(R_ECX);
1111 store_spreg( R_EDX, R_MACH );
1112 store_spreg( R_EAX, R_MACL );
1113 }
1114 break;
1115 case 0xE:
1116 { /* ADDC Rm, Rn */
1117 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1118 load_reg( R_EAX, Rm );
1119 load_reg( R_ECX, Rn );
1120 LDC_t();
1121 ADC_r32_r32( R_EAX, R_ECX );
1122 store_reg( R_ECX, Rn );
1123 SETC_t();
1124 }
1125 break;
1126 case 0xF:
1127 { /* ADDV Rm, Rn */
1128 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1129 load_reg( R_EAX, Rm );
1130 load_reg( R_ECX, Rn );
1131 ADD_r32_r32( R_EAX, R_ECX );
1132 store_reg( R_ECX, Rn );
1133 SETO_t();
1134 }
1135 break;
1136 default:
1137 UNDEF();
1138 break;
1139 }
1140 break;
1141 case 0x4:
1142 switch( ir&0xF ) {
1143 case 0x0:
1144 switch( (ir&0xF0) >> 4 ) {
1145 case 0x0:
1146 { /* SHLL Rn */
1147 uint32_t Rn = ((ir>>8)&0xF);
1148 load_reg( R_EAX, Rn );
1149 SHL1_r32( R_EAX );
1150 store_reg( R_EAX, Rn );
1151 }
1152 break;
1153 case 0x1:
1154 { /* DT Rn */
1155 uint32_t Rn = ((ir>>8)&0xF);
1156 load_reg( R_EAX, Rn );
1157 ADD_imm8s_r32( -1, Rn );
1158 store_reg( R_EAX, Rn );
1159 SETE_t();
1160 }
1161 break;
1162 case 0x2:
1163 { /* SHAL Rn */
1164 uint32_t Rn = ((ir>>8)&0xF);
1165 load_reg( R_EAX, Rn );
1166 SHL1_r32( R_EAX );
1167 store_reg( R_EAX, Rn );
1168 }
1169 break;
1170 default:
1171 UNDEF();
1172 break;
1173 }
1174 break;
1175 case 0x1:
1176 switch( (ir&0xF0) >> 4 ) {
1177 case 0x0:
1178 { /* SHLR Rn */
1179 uint32_t Rn = ((ir>>8)&0xF);
1180 load_reg( R_EAX, Rn );
1181 SHR1_r32( R_EAX );
1182 store_reg( R_EAX, Rn );
1183 }
1184 break;
1185 case 0x1:
1186 { /* CMP/PZ Rn */
1187 uint32_t Rn = ((ir>>8)&0xF);
1188 load_reg( R_EAX, Rn );
1189 CMP_imm8s_r32( 0, R_EAX );
1190 SETGE_t();
1191 }
1192 break;
1193 case 0x2:
1194 { /* SHAR Rn */
1195 uint32_t Rn = ((ir>>8)&0xF);
1196 load_reg( R_EAX, Rn );
1197 SAR1_r32( R_EAX );
1198 store_reg( R_EAX, Rn );
1199 }
1200 break;
1201 default:
1202 UNDEF();
1203 break;
1204 }
1205 break;
1206 case 0x2:
1207 switch( (ir&0xF0) >> 4 ) {
1208 case 0x0:
1209 { /* STS.L MACH, @-Rn */
1210 uint32_t Rn = ((ir>>8)&0xF);
1211 load_reg( R_ECX, Rn );
1212 ADD_imm8s_r32( -4, Rn );
1213 store_reg( R_ECX, Rn );
1214 load_spreg( R_EAX, R_MACH );
1215 MEM_WRITE_LONG( R_ECX, R_EAX );
1216 }
1217 break;
1218 case 0x1:
1219 { /* STS.L MACL, @-Rn */
1220 uint32_t Rn = ((ir>>8)&0xF);
1221 load_reg( R_ECX, Rn );
1222 ADD_imm8s_r32( -4, Rn );
1223 store_reg( R_ECX, Rn );
1224 load_spreg( R_EAX, R_MACL );
1225 MEM_WRITE_LONG( R_ECX, R_EAX );
1226 }
1227 break;
1228 case 0x2:
1229 { /* STS.L PR, @-Rn */
1230 uint32_t Rn = ((ir>>8)&0xF);
1231 load_reg( R_ECX, Rn );
1232 ADD_imm8s_r32( -4, Rn );
1233 store_reg( R_ECX, Rn );
1234 load_spreg( R_EAX, R_PR );
1235 MEM_WRITE_LONG( R_ECX, R_EAX );
1236 }
1237 break;
1238 case 0x3:
1239 { /* STC.L SGR, @-Rn */
1240 uint32_t Rn = ((ir>>8)&0xF);
1241 load_reg( R_ECX, Rn );
1242 ADD_imm8s_r32( -4, Rn );
1243 store_reg( R_ECX, Rn );
1244 load_spreg( R_EAX, R_SGR );
1245 MEM_WRITE_LONG( R_ECX, R_EAX );
1246 }
1247 break;
1248 case 0x5:
1249 { /* STS.L FPUL, @-Rn */
1250 uint32_t Rn = ((ir>>8)&0xF);
1251 load_reg( R_ECX, Rn );
1252 ADD_imm8s_r32( -4, Rn );
1253 store_reg( R_ECX, Rn );
1254 load_spreg( R_EAX, R_FPUL );
1255 MEM_WRITE_LONG( R_ECX, R_EAX );
1256 }
1257 break;
1258 case 0x6:
1259 { /* STS.L FPSCR, @-Rn */
1260 uint32_t Rn = ((ir>>8)&0xF);
1261 load_reg( R_ECX, Rn );
1262 ADD_imm8s_r32( -4, Rn );
1263 store_reg( R_ECX, Rn );
1264 load_spreg( R_EAX, R_FPSCR );
1265 MEM_WRITE_LONG( R_ECX, R_EAX );
1266 }
1267 break;
1268 case 0xF:
1269 { /* STC.L DBR, @-Rn */
1270 uint32_t Rn = ((ir>>8)&0xF);
1271 load_reg( R_ECX, Rn );
1272 ADD_imm8s_r32( -4, Rn );
1273 store_reg( R_ECX, Rn );
1274 load_spreg( R_EAX, R_DBR );
1275 MEM_WRITE_LONG( R_ECX, R_EAX );
1276 }
1277 break;
1278 default:
1279 UNDEF();
1280 break;
1281 }
1282 break;
1283 case 0x3:
1284 switch( (ir&0x80) >> 7 ) {
1285 case 0x0:
1286 switch( (ir&0x70) >> 4 ) {
1287 case 0x0:
1288 { /* STC.L SR, @-Rn */
1289 uint32_t Rn = ((ir>>8)&0xF);
1290 load_reg( R_ECX, Rn );
1291 ADD_imm8s_r32( -4, Rn );
1292 store_reg( R_ECX, Rn );
1293 call_func0( sh4_read_sr );
1294 MEM_WRITE_LONG( R_ECX, R_EAX );
1295 }
1296 break;
1297 case 0x1:
1298 { /* STC.L GBR, @-Rn */
1299 uint32_t Rn = ((ir>>8)&0xF);
1300 load_reg( R_ECX, Rn );
1301 ADD_imm8s_r32( -4, Rn );
1302 store_reg( R_ECX, Rn );
1303 load_spreg( R_EAX, R_GBR );
1304 MEM_WRITE_LONG( R_ECX, R_EAX );
1305 }
1306 break;
1307 case 0x2:
1308 { /* STC.L VBR, @-Rn */
1309 uint32_t Rn = ((ir>>8)&0xF);
1310 load_reg( R_ECX, Rn );
1311 ADD_imm8s_r32( -4, Rn );
1312 store_reg( R_ECX, Rn );
1313 load_spreg( R_EAX, R_VBR );
1314 MEM_WRITE_LONG( R_ECX, R_EAX );
1315 }
1316 break;
1317 case 0x3:
1318 { /* STC.L SSR, @-Rn */
1319 uint32_t Rn = ((ir>>8)&0xF);
1320 load_reg( R_ECX, Rn );
1321 ADD_imm8s_r32( -4, Rn );
1322 store_reg( R_ECX, Rn );
1323 load_spreg( R_EAX, R_SSR );
1324 MEM_WRITE_LONG( R_ECX, R_EAX );
1325 }
1326 break;
1327 case 0x4:
1328 { /* STC.L SPC, @-Rn */
1329 uint32_t Rn = ((ir>>8)&0xF);
1330 load_reg( R_ECX, Rn );
1331 ADD_imm8s_r32( -4, Rn );
1332 store_reg( R_ECX, Rn );
1333 load_spreg( R_EAX, R_SPC );
1334 MEM_WRITE_LONG( R_ECX, R_EAX );
1335 }
1336 break;
1337 default:
1338 UNDEF();
1339 break;
1340 }
1341 break;
1342 case 0x1:
1343 { /* STC.L Rm_BANK, @-Rn */
1344 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
1345 load_reg( R_ECX, Rn );
1346 ADD_imm8s_r32( -4, Rn );
1347 store_reg( R_ECX, Rn );
1348 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
1349 MEM_WRITE_LONG( R_ECX, R_EAX );
1350 }
1351 break;
1352 }
1353 break;
1354 case 0x4:
1355 switch( (ir&0xF0) >> 4 ) {
1356 case 0x0:
1357 { /* ROTL Rn */
1358 uint32_t Rn = ((ir>>8)&0xF);
1359 load_reg( R_EAX, Rn );
1360 ROL1_r32( R_EAX );
1361 store_reg( R_EAX, Rn );
1362 SETC_t();
1363 }
1364 break;
1365 case 0x2:
1366 { /* ROTCL Rn */
1367 uint32_t Rn = ((ir>>8)&0xF);
1368 load_reg( R_EAX, Rn );
1369 LDC_t();
1370 RCL1_r32( R_EAX );
1371 store_reg( R_EAX, Rn );
1372 SETC_t();
1373 }
1374 break;
1375 default:
1376 UNDEF();
1377 break;
1378 }
1379 break;
1380 case 0x5:
1381 switch( (ir&0xF0) >> 4 ) {
1382 case 0x0:
1383 { /* ROTR Rn */
1384 uint32_t Rn = ((ir>>8)&0xF);
1385 load_reg( R_EAX, Rn );
1386 ROR1_r32( R_EAX );
1387 store_reg( R_EAX, Rn );
1388 SETC_t();
1389 }
1390 break;
1391 case 0x1:
1392 { /* CMP/PL Rn */
1393 uint32_t Rn = ((ir>>8)&0xF);
1394 load_reg( R_EAX, Rn );
1395 CMP_imm8s_r32( 0, R_EAX );
1396 SETG_t();
1397 }
1398 break;
1399 case 0x2:
1400 { /* ROTCR Rn */
1401 uint32_t Rn = ((ir>>8)&0xF);
1402 load_reg( R_EAX, Rn );
1403 LDC_t();
1404 RCR1_r32( R_EAX );
1405 store_reg( R_EAX, Rn );
1406 SETC_t();
1407 }
1408 break;
1409 default:
1410 UNDEF();
1411 break;
1412 }
1413 break;
1414 case 0x6:
1415 switch( (ir&0xF0) >> 4 ) {
1416 case 0x0:
1417 { /* LDS.L @Rm+, MACH */
1418 uint32_t Rm = ((ir>>8)&0xF);
1419 load_reg( R_EAX, Rm );
1420 MOV_r32_r32( R_EAX, R_ECX );
1421 ADD_imm8s_r32( 4, R_EAX );
1422 store_reg( R_EAX, Rm );
1423 MEM_READ_LONG( R_ECX, R_EAX );
1424 store_spreg( R_EAX, R_MACH );
1425 }
1426 break;
1427 case 0x1:
1428 { /* LDS.L @Rm+, MACL */
1429 uint32_t Rm = ((ir>>8)&0xF);
1430 load_reg( R_EAX, Rm );
1431 MOV_r32_r32( R_EAX, R_ECX );
1432 ADD_imm8s_r32( 4, R_EAX );
1433 store_reg( R_EAX, Rm );
1434 MEM_READ_LONG( R_ECX, R_EAX );
1435 store_spreg( R_EAX, R_MACL );
1436 }
1437 break;
1438 case 0x2:
1439 { /* LDS.L @Rm+, PR */
1440 uint32_t Rm = ((ir>>8)&0xF);
1441 load_reg( R_EAX, Rm );
1442 MOV_r32_r32( R_EAX, R_ECX );
1443 ADD_imm8s_r32( 4, R_EAX );
1444 store_reg( R_EAX, Rm );
1445 MEM_READ_LONG( R_ECX, R_EAX );
1446 store_spreg( R_EAX, R_PR );
1447 }
1448 break;
1449 case 0x3:
1450 { /* LDC.L @Rm+, SGR */
1451 uint32_t Rm = ((ir>>8)&0xF);
1452 load_reg( R_EAX, Rm );
1453 MOV_r32_r32( R_EAX, R_ECX );
1454 ADD_imm8s_r32( 4, R_EAX );
1455 store_reg( R_EAX, Rm );
1456 MEM_READ_LONG( R_ECX, R_EAX );
1457 store_spreg( R_EAX, R_SGR );
1458 }
1459 break;
1460 case 0x5:
1461 { /* LDS.L @Rm+, FPUL */
1462 uint32_t Rm = ((ir>>8)&0xF);
1463 load_reg( R_EAX, Rm );
1464 MOV_r32_r32( R_EAX, R_ECX );
1465 ADD_imm8s_r32( 4, R_EAX );
1466 store_reg( R_EAX, Rm );
1467 MEM_READ_LONG( R_ECX, R_EAX );
1468 store_spreg( R_EAX, R_FPUL );
1469 }
1470 break;
1471 case 0x6:
1472 { /* LDS.L @Rm+, FPSCR */
1473 uint32_t Rm = ((ir>>8)&0xF);
1474 load_reg( R_EAX, Rm );
1475 MOV_r32_r32( R_EAX, R_ECX );
1476 ADD_imm8s_r32( 4, R_EAX );
1477 store_reg( R_EAX, Rm );
1478 MEM_READ_LONG( R_ECX, R_EAX );
1479 store_spreg( R_EAX, R_FPSCR );
1480 }
1481 break;
1482 case 0xF:
1483 { /* LDC.L @Rm+, DBR */
1484 uint32_t Rm = ((ir>>8)&0xF);
1485 load_reg( R_EAX, Rm );
1486 MOV_r32_r32( R_EAX, R_ECX );
1487 ADD_imm8s_r32( 4, R_EAX );
1488 store_reg( R_EAX, Rm );
1489 MEM_READ_LONG( R_ECX, R_EAX );
1490 store_spreg( R_EAX, R_DBR );
1491 }
1492 break;
1493 default:
1494 UNDEF();
1495 break;
1496 }
1497 break;
1498 case 0x7:
1499 switch( (ir&0x80) >> 7 ) {
1500 case 0x0:
1501 switch( (ir&0x70) >> 4 ) {
1502 case 0x0:
1503 { /* LDC.L @Rm+, SR */
1504 uint32_t Rm = ((ir>>8)&0xF);
1505 load_reg( R_EAX, Rm );
1506 MOV_r32_r32( R_EAX, R_ECX );
1507 ADD_imm8s_r32( 4, R_EAX );
1508 store_reg( R_EAX, Rm );
1509 MEM_READ_LONG( R_ECX, R_EAX );
1510 call_func1( sh4_write_sr, R_EAX );
1511 sh4_x86.priv_checked = FALSE;
1512 sh4_x86.fpuen_checked = FALSE;
1513 }
1514 break;
1515 case 0x1:
1516 { /* LDC.L @Rm+, GBR */
1517 uint32_t Rm = ((ir>>8)&0xF);
1518 load_reg( R_EAX, Rm );
1519 MOV_r32_r32( R_EAX, R_ECX );
1520 ADD_imm8s_r32( 4, R_EAX );
1521 store_reg( R_EAX, Rm );
1522 MEM_READ_LONG( R_ECX, R_EAX );
1523 store_spreg( R_EAX, R_GBR );
1524 }
1525 break;
1526 case 0x2:
1527 { /* LDC.L @Rm+, VBR */
1528 uint32_t Rm = ((ir>>8)&0xF);
1529 load_reg( R_EAX, Rm );
1530 MOV_r32_r32( R_EAX, R_ECX );
1531 ADD_imm8s_r32( 4, R_EAX );
1532 store_reg( R_EAX, Rm );
1533 MEM_READ_LONG( R_ECX, R_EAX );
1534 store_spreg( R_EAX, R_VBR );
1535 }
1536 break;
1537 case 0x3:
1538 { /* LDC.L @Rm+, SSR */
1539 uint32_t Rm = ((ir>>8)&0xF);
1540 load_reg( R_EAX, Rm );
1541 MOV_r32_r32( R_EAX, R_ECX );
1542 ADD_imm8s_r32( 4, R_EAX );
1543 store_reg( R_EAX, Rm );
1544 MEM_READ_LONG( R_ECX, R_EAX );
1545 store_spreg( R_EAX, R_SSR );
1546 }
1547 break;
1548 case 0x4:
1549 { /* LDC.L @Rm+, SPC */
1550 uint32_t Rm = ((ir>>8)&0xF);
1551 load_reg( R_EAX, Rm );
1552 MOV_r32_r32( R_EAX, R_ECX );
1553 ADD_imm8s_r32( 4, R_EAX );
1554 store_reg( R_EAX, Rm );
1555 MEM_READ_LONG( R_ECX, R_EAX );
1556 store_spreg( R_EAX, R_SPC );
1557 }
1558 break;
1559 default:
1560 UNDEF();
1561 break;
1562 }
1563 break;
1564 case 0x1:
1565 { /* LDC.L @Rm+, Rn_BANK */
1566 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1567 load_reg( R_EAX, Rm );
1568 MOV_r32_r32( R_EAX, R_ECX );
1569 ADD_imm8s_r32( 4, R_EAX );
1570 store_reg( R_EAX, Rm );
1571 MEM_READ_LONG( R_ECX, R_EAX );
1572 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1573 }
1574 break;
1575 }
1576 break;
1577 case 0x8:
1578 switch( (ir&0xF0) >> 4 ) {
1579 case 0x0:
1580 { /* SHLL2 Rn */
1581 uint32_t Rn = ((ir>>8)&0xF);
1582 load_reg( R_EAX, Rn );
1583 SHL_imm8_r32( 2, R_EAX );
1584 store_reg( R_EAX, Rn );
1585 }
1586 break;
1587 case 0x1:
1588 { /* SHLL8 Rn */
1589 uint32_t Rn = ((ir>>8)&0xF);
1590 load_reg( R_EAX, Rn );
1591 SHL_imm8_r32( 8, R_EAX );
1592 store_reg( R_EAX, Rn );
1593 }
1594 break;
1595 case 0x2:
1596 { /* SHLL16 Rn */
1597 uint32_t Rn = ((ir>>8)&0xF);
1598 load_reg( R_EAX, Rn );
1599 SHL_imm8_r32( 16, R_EAX );
1600 store_reg( R_EAX, Rn );
1601 }
1602 break;
1603 default:
1604 UNDEF();
1605 break;
1606 }
1607 break;
1608 case 0x9:
1609 switch( (ir&0xF0) >> 4 ) {
1610 case 0x0:
1611 { /* SHLR2 Rn */
1612 uint32_t Rn = ((ir>>8)&0xF);
1613 load_reg( R_EAX, Rn );
1614 SHR_imm8_r32( 2, R_EAX );
1615 store_reg( R_EAX, Rn );
1616 }
1617 break;
1618 case 0x1:
1619 { /* SHLR8 Rn */
1620 uint32_t Rn = ((ir>>8)&0xF);
1621 load_reg( R_EAX, Rn );
1622 SHR_imm8_r32( 8, R_EAX );
1623 store_reg( R_EAX, Rn );
1624 }
1625 break;
1626 case 0x2:
1627 { /* SHLR16 Rn */
1628 uint32_t Rn = ((ir>>8)&0xF);
1629 load_reg( R_EAX, Rn );
1630 SHR_imm8_r32( 16, R_EAX );
1631 store_reg( R_EAX, Rn );
1632 }
1633 break;
1634 default:
1635 UNDEF();
1636 break;
1637 }
1638 break;
1639 case 0xA:
1640 switch( (ir&0xF0) >> 4 ) {
1641 case 0x0:
1642 { /* LDS Rm, MACH */
1643 uint32_t Rm = ((ir>>8)&0xF);
1644 load_reg( R_EAX, Rm );
1645 store_spreg( R_EAX, R_MACH );
1646 }
1647 break;
1648 case 0x1:
1649 { /* LDS Rm, MACL */
1650 uint32_t Rm = ((ir>>8)&0xF);
1651 load_reg( R_EAX, Rm );
1652 store_spreg( R_EAX, R_MACL );
1653 }
1654 break;
1655 case 0x2:
1656 { /* LDS Rm, PR */
1657 uint32_t Rm = ((ir>>8)&0xF);
1658 load_reg( R_EAX, Rm );
1659 store_spreg( R_EAX, R_PR );
1660 }
1661 break;
1662 case 0x3:
1663 { /* LDC Rm, SGR */
1664 uint32_t Rm = ((ir>>8)&0xF);
1665 load_reg( R_EAX, Rm );
1666 store_spreg( R_EAX, R_SGR );
1667 }
1668 break;
1669 case 0x5:
1670 { /* LDS Rm, FPUL */
1671 uint32_t Rm = ((ir>>8)&0xF);
1672 load_reg( R_EAX, Rm );
1673 store_spreg( R_EAX, R_FPUL );
1674 }
1675 break;
1676 case 0x6:
1677 { /* LDS Rm, FPSCR */
1678 uint32_t Rm = ((ir>>8)&0xF);
1679 load_reg( R_EAX, Rm );
1680 store_spreg( R_EAX, R_FPSCR );
1681 }
1682 break;
1683 case 0xF:
1684 { /* LDC Rm, DBR */
1685 uint32_t Rm = ((ir>>8)&0xF);
1686 load_reg( R_EAX, Rm );
1687 store_spreg( R_EAX, R_DBR );
1688 }
1689 break;
1690 default:
1691 UNDEF();
1692 break;
1693 }
1694 break;
1695 case 0xB:
1696 switch( (ir&0xF0) >> 4 ) {
1697 case 0x0:
1698 { /* JSR @Rn */
1699 uint32_t Rn = ((ir>>8)&0xF);
1700 if( sh4_x86.in_delay_slot ) {
1701 SLOTILLEGAL();
1702 } else {
1703 load_imm32( R_EAX, pc + 4 );
1704 store_spreg( R_EAX, R_PR );
1705 load_reg( R_EDI, Rn );
1706 sh4_x86.in_delay_slot = TRUE;
1707 INC_r32(R_ESI);
1708 return 0;
1709 }
1710 }
1711 break;
1712 case 0x1:
1713 { /* TAS.B @Rn */
1714 uint32_t Rn = ((ir>>8)&0xF);
1715 load_reg( R_ECX, Rn );
1716 MEM_READ_BYTE( R_ECX, R_EAX );
1717 TEST_r8_r8( R_AL, R_AL );
1718 SETE_t();
1719 OR_imm8_r8( 0x80, R_AL );
1720 MEM_WRITE_BYTE( R_ECX, R_EAX );
1721 }
1722 break;
1723 case 0x2:
1724 { /* JMP @Rn */
1725 uint32_t Rn = ((ir>>8)&0xF);
1726 if( sh4_x86.in_delay_slot ) {
1727 SLOTILLEGAL();
1728 } else {
1729 load_reg( R_EDI, Rn );
1730 sh4_x86.in_delay_slot = TRUE;
1731 INC_r32(R_ESI);
1732 return 0;
1733 }
1734 }
1735 break;
1736 default:
1737 UNDEF();
1738 break;
1739 }
1740 break;
1741 case 0xC:
1742 { /* SHAD Rm, Rn */
1743 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1744 /* Annoyingly enough, not directly convertible */
1745 load_reg( R_EAX, Rn );
1746 load_reg( R_ECX, Rm );
1747 CMP_imm32_r32( 0, R_ECX );
1748 JAE_rel8(9);
1750 NEG_r32( R_ECX ); // 2
1751 AND_imm8_r8( 0x1F, R_CL ); // 3
1752 SAR_r32_CL( R_EAX ); // 2
1753 JMP_rel8(5); // 2
1755 AND_imm8_r8( 0x1F, R_CL ); // 3
1756 SHL_r32_CL( R_EAX ); // 2
1758 store_reg( R_EAX, Rn );
1759 }
1760 break;
1761 case 0xD:
1762 { /* SHLD Rm, Rn */
1763 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1764 load_reg( R_EAX, Rn );
1765 load_reg( R_ECX, Rm );
1767 MOV_r32_r32( R_EAX, R_EDX );
1768 SHL_r32_CL( R_EAX );
1769 NEG_r32( R_ECX );
1770 SHR_r32_CL( R_EDX );
1771 CMP_imm8s_r32( 0, R_ECX );
1772 CMOVAE_r32_r32( R_EDX, R_EAX );
1773 store_reg( R_EAX, Rn );
1774 }
1775 break;
1776 case 0xE:
1777 switch( (ir&0x80) >> 7 ) {
1778 case 0x0:
1779 switch( (ir&0x70) >> 4 ) {
1780 case 0x0:
1781 { /* LDC Rm, SR */
1782 uint32_t Rm = ((ir>>8)&0xF);
1783 load_reg( R_EAX, Rm );
1784 call_func1( sh4_write_sr, R_EAX );
1785 sh4_x86.priv_checked = FALSE;
1786 sh4_x86.fpuen_checked = FALSE;
1787 }
1788 break;
1789 case 0x1:
1790 { /* LDC Rm, GBR */
1791 uint32_t Rm = ((ir>>8)&0xF);
1792 load_reg( R_EAX, Rm );
1793 store_spreg( R_EAX, R_GBR );
1794 }
1795 break;
1796 case 0x2:
1797 { /* LDC Rm, VBR */
1798 uint32_t Rm = ((ir>>8)&0xF);
1799 load_reg( R_EAX, Rm );
1800 store_spreg( R_EAX, R_VBR );
1801 }
1802 break;
1803 case 0x3:
1804 { /* LDC Rm, SSR */
1805 uint32_t Rm = ((ir>>8)&0xF);
1806 load_reg( R_EAX, Rm );
1807 store_spreg( R_EAX, R_SSR );
1808 }
1809 break;
1810 case 0x4:
1811 { /* LDC Rm, SPC */
1812 uint32_t Rm = ((ir>>8)&0xF);
1813 load_reg( R_EAX, Rm );
1814 store_spreg( R_EAX, R_SPC );
1815 }
1816 break;
1817 default:
1818 UNDEF();
1819 break;
1820 }
1821 break;
1822 case 0x1:
1823 { /* LDC Rm, Rn_BANK */
1824 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1825 load_reg( R_EAX, Rm );
1826 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1827 }
1828 break;
1829 }
1830 break;
1831 case 0xF:
1832 { /* MAC.W @Rm+, @Rn+ */
1833 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1834 }
1835 break;
1836 }
1837 break;
1838 case 0x5:
1839 { /* MOV.L @(disp, Rm), Rn */
1840 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
1841 load_reg( R_ECX, Rm );
1842 ADD_imm8s_r32( disp, R_ECX );
1843 check_ralign32( R_ECX );
1844 MEM_READ_LONG( R_ECX, R_EAX );
1845 store_reg( R_EAX, Rn );
1846 }
1847 break;
1848 case 0x6:
1849 switch( ir&0xF ) {
1850 case 0x0:
1851 { /* MOV.B @Rm, Rn */
1852 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1853 load_reg( R_ECX, Rm );
1854 MEM_READ_BYTE( R_ECX, R_EAX );
1855 store_reg( R_ECX, Rn );
1856 }
1857 break;
1858 case 0x1:
1859 { /* MOV.W @Rm, Rn */
1860 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1861 load_reg( R_ECX, Rm );
1862 check_ralign16( R_ECX );
1863 MEM_READ_WORD( R_ECX, R_EAX );
1864 store_reg( R_EAX, Rn );
1865 }
1866 break;
1867 case 0x2:
1868 { /* MOV.L @Rm, Rn */
1869 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1870 load_reg( R_ECX, Rm );
1871 check_ralign32( R_ECX );
1872 MEM_READ_LONG( R_ECX, R_EAX );
1873 store_reg( R_EAX, Rn );
1874 }
1875 break;
1876 case 0x3:
1877 { /* MOV Rm, Rn */
1878 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1879 load_reg( R_EAX, Rm );
1880 store_reg( R_EAX, Rn );
1881 }
1882 break;
1883 case 0x4:
1884 { /* MOV.B @Rm+, Rn */
1885 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1886 load_reg( R_ECX, Rm );
1887 MOV_r32_r32( R_ECX, R_EAX );
1888 ADD_imm8s_r32( 1, R_EAX );
1889 store_reg( R_EAX, Rm );
1890 MEM_READ_BYTE( R_ECX, R_EAX );
1891 store_reg( R_EAX, Rn );
1892 }
1893 break;
1894 case 0x5:
1895 { /* MOV.W @Rm+, Rn */
1896 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1897 load_reg( R_EAX, Rm );
1898 check_ralign16( R_EAX );
1899 MOV_r32_r32( R_EAX, R_ECX );
1900 ADD_imm8s_r32( 2, R_EAX );
1901 store_reg( R_EAX, Rm );
1902 MEM_READ_WORD( R_ECX, R_EAX );
1903 store_reg( R_EAX, Rn );
1904 }
1905 break;
1906 case 0x6:
1907 { /* MOV.L @Rm+, Rn */
1908 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1909 load_reg( R_EAX, Rm );
1910 check_ralign32( R_ECX );
1911 MOV_r32_r32( R_EAX, R_ECX );
1912 ADD_imm8s_r32( 4, R_EAX );
1913 store_reg( R_EAX, Rm );
1914 MEM_READ_LONG( R_ECX, R_EAX );
1915 store_reg( R_EAX, Rn );
1916 }
1917 break;
1918 case 0x7:
1919 { /* NOT Rm, Rn */
1920 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1921 load_reg( R_EAX, Rm );
1922 NOT_r32( R_EAX );
1923 store_reg( R_EAX, Rn );
1924 }
1925 break;
1926 case 0x8:
1927 { /* SWAP.B Rm, Rn */
1928 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1929 load_reg( R_EAX, Rm );
1930 XCHG_r8_r8( R_AL, R_AH );
1931 store_reg( R_EAX, Rn );
1932 }
1933 break;
1934 case 0x9:
1935 { /* SWAP.W Rm, Rn */
1936 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1937 load_reg( R_EAX, Rm );
1938 MOV_r32_r32( R_EAX, R_ECX );
1939 SHL_imm8_r32( 16, R_ECX );
1940 SHR_imm8_r32( 16, R_EAX );
1941 OR_r32_r32( R_EAX, R_ECX );
1942 store_reg( R_ECX, Rn );
1943 }
1944 break;
1945 case 0xA:
1946 { /* NEGC Rm, Rn */
1947 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1948 load_reg( R_EAX, Rm );
1949 XOR_r32_r32( R_ECX, R_ECX );
1950 LDC_t();
1951 SBB_r32_r32( R_EAX, R_ECX );
1952 store_reg( R_ECX, Rn );
1953 SETC_t();
1954 }
1955 break;
1956 case 0xB:
1957 { /* NEG Rm, Rn */
1958 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1959 load_reg( R_EAX, Rm );
1960 NEG_r32( R_EAX );
1961 store_reg( R_EAX, Rn );
1962 }
1963 break;
1964 case 0xC:
1965 { /* EXTU.B Rm, Rn */
1966 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1967 load_reg( R_EAX, Rm );
1968 MOVZX_r8_r32( R_EAX, R_EAX );
1969 store_reg( R_EAX, Rn );
1970 }
1971 break;
1972 case 0xD:
1973 { /* EXTU.W Rm, Rn */
1974 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1975 load_reg( R_EAX, Rm );
1976 MOVZX_r16_r32( R_EAX, R_EAX );
1977 store_reg( R_EAX, Rn );
1978 }
1979 break;
1980 case 0xE:
1981 { /* EXTS.B Rm, Rn */
1982 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1983 load_reg( R_EAX, Rm );
1984 MOVSX_r8_r32( R_EAX, R_EAX );
1985 store_reg( R_EAX, Rn );
1986 }
1987 break;
1988 case 0xF:
1989 { /* EXTS.W Rm, Rn */
1990 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1991 load_reg( R_EAX, Rm );
1992 MOVSX_r16_r32( R_EAX, R_EAX );
1993 store_reg( R_EAX, Rn );
1994 }
1995 break;
1996 }
1997 break;
1998 case 0x7:
1999 { /* ADD #imm, Rn */
2000 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2001 load_reg( R_EAX, Rn );
2002 ADD_imm8s_r32( imm, R_EAX );
2003 store_reg( R_EAX, Rn );
2004 }
2005 break;
2006 case 0x8:
2007 switch( (ir&0xF00) >> 8 ) {
2008 case 0x0:
2009 { /* MOV.B R0, @(disp, Rn) */
2010 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2011 load_reg( R_EAX, 0 );
2012 load_reg( R_ECX, Rn );
2013 ADD_imm32_r32( disp, R_ECX );
2014 MEM_WRITE_BYTE( R_ECX, R_EAX );
2015 }
2016 break;
2017 case 0x1:
2018 { /* MOV.W R0, @(disp, Rn) */
2019 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2020 load_reg( R_ECX, Rn );
2021 load_reg( R_EAX, 0 );
2022 ADD_imm32_r32( disp, R_ECX );
2023 check_walign16( R_ECX );
2024 MEM_WRITE_WORD( R_ECX, R_EAX );
2025 }
2026 break;
2027 case 0x4:
2028 { /* MOV.B @(disp, Rm), R0 */
2029 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2030 load_reg( R_ECX, Rm );
2031 ADD_imm32_r32( disp, R_ECX );
2032 MEM_READ_BYTE( R_ECX, R_EAX );
2033 store_reg( R_EAX, 0 );
2034 }
2035 break;
2036 case 0x5:
2037 { /* MOV.W @(disp, Rm), R0 */
2038 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2039 load_reg( R_ECX, Rm );
2040 ADD_imm32_r32( disp, R_ECX );
2041 check_ralign16( R_ECX );
2042 MEM_READ_WORD( R_ECX, R_EAX );
2043 store_reg( R_EAX, 0 );
2044 }
2045 break;
2046 case 0x8:
2047 { /* CMP/EQ #imm, R0 */
2048 int32_t imm = SIGNEXT8(ir&0xFF);
2049 load_reg( R_EAX, 0 );
2050 CMP_imm8s_r32(imm, R_EAX);
2051 SETE_t();
2052 }
2053 break;
2054 case 0x9:
2055 { /* BT disp */
2056 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2057 if( sh4_x86.in_delay_slot ) {
2058 SLOTILLEGAL();
2059 } else {
2060 load_imm32( R_EDI, pc + 2 );
2061 CMP_imm8s_sh4r( 0, R_T );
2062 JE_rel8( 5 );
2063 load_imm32( R_EDI, disp + pc + 4 );
2064 INC_r32(R_ESI);
2065 return 1;
2066 }
2067 }
2068 break;
2069 case 0xB:
2070 { /* BF disp */
2071 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2072 if( sh4_x86.in_delay_slot ) {
2073 SLOTILLEGAL();
2074 } else {
2075 load_imm32( R_EDI, pc + 2 );
2076 CMP_imm8s_sh4r( 0, R_T );
2077 JNE_rel8( 5 );
2078 load_imm32( R_EDI, disp + pc + 4 );
2079 INC_r32(R_ESI);
2080 return 1;
2081 }
2082 }
2083 break;
2084 case 0xD:
2085 { /* BT/S disp */
2086 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2087 if( sh4_x86.in_delay_slot ) {
2088 SLOTILLEGAL();
2089 } else {
2090 load_imm32( R_EDI, pc + 2 );
2091 CMP_imm8s_sh4r( 0, R_T );
2092 JE_rel8( 5 );
2093 load_imm32( R_EDI, disp + pc + 4 );
2094 sh4_x86.in_delay_slot = TRUE;
2095 INC_r32(R_ESI);
2096 return 0;
2097 }
2098 }
2099 break;
2100 case 0xF:
2101 { /* BF/S disp */
2102 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2103 if( sh4_x86.in_delay_slot ) {
2104 SLOTILLEGAL();
2105 } else {
2106 load_imm32( R_EDI, pc + 2 );
2107 CMP_imm8s_sh4r( 0, R_T );
2108 JNE_rel8( 5 );
2109 load_imm32( R_EDI, disp + pc + 4 );
2110 sh4_x86.in_delay_slot = TRUE;
2111 INC_r32(R_ESI);
2112 return 0;
2113 }
2114 }
2115 break;
2116 default:
2117 UNDEF();
2118 break;
2119 }
2120 break;
2121 case 0x9:
2122 { /* MOV.W @(disp, PC), Rn */
2123 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1;
2124 if( sh4_x86.in_delay_slot ) {
2125 SLOTILLEGAL();
2126 } else {
2127 load_imm32( R_ECX, pc + disp + 4 );
2128 MEM_READ_WORD( R_ECX, R_EAX );
2129 store_reg( R_EAX, Rn );
2130 }
2131 }
2132 break;
2133 case 0xA:
2134 { /* BRA disp */
2135 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2136 if( sh4_x86.in_delay_slot ) {
2137 SLOTILLEGAL();
2138 } else {
2139 load_imm32( R_EDI, disp + pc + 4 );
2140 sh4_x86.in_delay_slot = TRUE;
2141 INC_r32(R_ESI);
2142 return 0;
2143 }
2144 }
2145 break;
2146 case 0xB:
2147 { /* BSR disp */
2148 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2149 if( sh4_x86.in_delay_slot ) {
2150 SLOTILLEGAL();
2151 } else {
2152 load_imm32( R_EAX, pc + 4 );
2153 store_spreg( R_EAX, R_PR );
2154 load_imm32( R_EDI, disp + pc + 4 );
2155 sh4_x86.in_delay_slot = TRUE;
2156 INC_r32(R_ESI);
2157 return 0;
2158 }
2159 }
2160 break;
2161 case 0xC:
2162 switch( (ir&0xF00) >> 8 ) {
2163 case 0x0:
2164 { /* MOV.B R0, @(disp, GBR) */
2165 uint32_t disp = (ir&0xFF);
2166 load_reg( R_EAX, 0 );
2167 load_spreg( R_ECX, R_GBR );
2168 ADD_imm32_r32( disp, R_ECX );
2169 MEM_WRITE_BYTE( R_ECX, R_EAX );
2170 }
2171 break;
2172 case 0x1:
2173 { /* MOV.W R0, @(disp, GBR) */
2174 uint32_t disp = (ir&0xFF)<<1;
2175 load_spreg( R_ECX, R_GBR );
2176 load_reg( R_EAX, 0 );
2177 ADD_imm32_r32( disp, R_ECX );
2178 check_walign16( R_ECX );
2179 MEM_WRITE_WORD( R_ECX, R_EAX );
2180 }
2181 break;
2182 case 0x2:
2183 { /* MOV.L R0, @(disp, GBR) */
2184 uint32_t disp = (ir&0xFF)<<2;
2185 load_spreg( R_ECX, R_GBR );
2186 load_reg( R_EAX, 0 );
2187 ADD_imm32_r32( disp, R_ECX );
2188 check_walign32( R_ECX );
2189 MEM_WRITE_LONG( R_ECX, R_EAX );
2190 }
2191 break;
2192 case 0x3:
2193 { /* TRAPA #imm */
2194 uint32_t imm = (ir&0xFF);
2195 if( sh4_x86.in_delay_slot ) {
2196 SLOTILLEGAL();
2197 } else {
2198 // TODO: Write TRA
2199 RAISE_EXCEPTION(EXC_TRAP);
2200 }
2201 }
2202 break;
2203 case 0x4:
2204 { /* MOV.B @(disp, GBR), R0 */
2205 uint32_t disp = (ir&0xFF);
2206 load_spreg( R_ECX, R_GBR );
2207 ADD_imm32_r32( disp, R_ECX );
2208 MEM_READ_BYTE( R_ECX, R_EAX );
2209 store_reg( R_EAX, 0 );
2210 }
2211 break;
2212 case 0x5:
2213 { /* MOV.W @(disp, GBR), R0 */
2214 uint32_t disp = (ir&0xFF)<<1;
2215 load_spreg( R_ECX, R_GBR );
2216 ADD_imm32_r32( disp, R_ECX );
2217 check_ralign16( R_ECX );
2218 MEM_READ_WORD( R_ECX, R_EAX );
2219 store_reg( R_EAX, 0 );
2220 }
2221 break;
2222 case 0x6:
2223 { /* MOV.L @(disp, GBR), R0 */
2224 uint32_t disp = (ir&0xFF)<<2;
2225 load_spreg( R_ECX, R_GBR );
2226 ADD_imm32_r32( disp, R_ECX );
2227 check_ralign32( R_ECX );
2228 MEM_READ_LONG( R_ECX, R_EAX );
2229 store_reg( R_EAX, 0 );
2230 }
2231 break;
2232 case 0x7:
2233 { /* MOVA @(disp, PC), R0 */
2234 uint32_t disp = (ir&0xFF)<<2;
2235 if( sh4_x86.in_delay_slot ) {
2236 SLOTILLEGAL();
2237 } else {
2238 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2239 store_reg( R_ECX, 0 );
2240 }
2241 }
2242 break;
2243 case 0x8:
2244 { /* TST #imm, R0 */
2245 uint32_t imm = (ir&0xFF);
2246 load_reg( R_EAX, 0 );
2247 TEST_imm32_r32( imm, R_EAX );
2248 SETE_t();
2249 }
2250 break;
2251 case 0x9:
2252 { /* AND #imm, R0 */
2253 uint32_t imm = (ir&0xFF);
2254 load_reg( R_EAX, 0 );
2255 AND_imm32_r32(imm, R_EAX);
2256 store_reg( R_EAX, 0 );
2257 }
2258 break;
2259 case 0xA:
2260 { /* XOR #imm, R0 */
2261 uint32_t imm = (ir&0xFF);
2262 load_reg( R_EAX, 0 );
2263 XOR_imm32_r32( imm, R_EAX );
2264 store_reg( R_EAX, 0 );
2265 }
2266 break;
2267 case 0xB:
2268 { /* OR #imm, R0 */
2269 uint32_t imm = (ir&0xFF);
2270 load_reg( R_EAX, 0 );
2271 OR_imm32_r32(imm, R_EAX);
2272 store_reg( R_EAX, 0 );
2273 }
2274 break;
2275 case 0xC:
2276 { /* TST.B #imm, @(R0, GBR) */
2277 uint32_t imm = (ir&0xFF);
2278 load_reg( R_EAX, 0);
2279 load_reg( R_ECX, R_GBR);
2280 ADD_r32_r32( R_EAX, R_ECX );
2281 MEM_READ_BYTE( R_ECX, R_EAX );
2282 TEST_imm8_r8( imm, R_EAX );
2283 SETE_t();
2284 }
2285 break;
2286 case 0xD:
2287 { /* AND.B #imm, @(R0, GBR) */
2288 uint32_t imm = (ir&0xFF);
2289 load_reg( R_EAX, 0 );
2290 load_spreg( R_ECX, R_GBR );
2291 ADD_r32_r32( R_EAX, R_ECX );
2292 MEM_READ_BYTE( R_ECX, R_EAX );
2293 AND_imm32_r32(imm, R_ECX );
2294 MEM_WRITE_BYTE( R_ECX, R_EAX );
2295 }
2296 break;
2297 case 0xE:
2298 { /* XOR.B #imm, @(R0, GBR) */
2299 uint32_t imm = (ir&0xFF);
2300 load_reg( R_EAX, 0 );
2301 load_spreg( R_ECX, R_GBR );
2302 ADD_r32_r32( R_EAX, R_ECX );
2303 MEM_READ_BYTE( R_ECX, R_EAX );
2304 XOR_imm32_r32( imm, R_EAX );
2305 MEM_WRITE_BYTE( R_ECX, R_EAX );
2306 }
2307 break;
2308 case 0xF:
2309 { /* OR.B #imm, @(R0, GBR) */
2310 uint32_t imm = (ir&0xFF);
2311 load_reg( R_EAX, 0 );
2312 load_spreg( R_ECX, R_GBR );
2313 ADD_r32_r32( R_EAX, R_ECX );
2314 MEM_READ_BYTE( R_ECX, R_EAX );
2315 OR_imm32_r32(imm, R_ECX );
2316 MEM_WRITE_BYTE( R_ECX, R_EAX );
2317 }
2318 break;
2319 }
2320 break;
2321 case 0xD:
2322 { /* MOV.L @(disp, PC), Rn */
2323 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2;
2324 if( sh4_x86.in_delay_slot ) {
2325 SLOTILLEGAL();
2326 } else {
2327 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2328 MEM_READ_LONG( R_ECX, R_EAX );
2329 store_reg( R_EAX, 0 );
2330 }
2331 }
2332 break;
2333 case 0xE:
2334 { /* MOV #imm, Rn */
2335 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2336 load_imm32( R_EAX, imm );
2337 store_reg( R_EAX, Rn );
2338 }
2339 break;
2340 case 0xF:
2341 switch( ir&0xF ) {
2342 case 0x0:
2343 { /* FADD FRm, FRn */
2344 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2345 check_fpuen();
2346 load_spreg( R_ECX, R_FPSCR );
2347 TEST_imm32_r32( FPSCR_PR, R_ECX );
2348 load_fr_bank( R_EDX );
2349 JNE_rel8(13);
2350 push_fr(R_EDX, FRm);
2351 push_fr(R_EDX, FRn);
2352 FADDP_st(1);
2353 pop_fr(R_EDX, FRn);
2354 JMP_rel8(11);
2355 push_dr(R_EDX, FRm);
2356 push_dr(R_EDX, FRn);
2357 FADDP_st(1);
2358 pop_dr(R_EDX, FRn);
2359 }
2360 break;
2361 case 0x1:
2362 { /* FSUB FRm, FRn */
2363 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2364 check_fpuen();
2365 load_spreg( R_ECX, R_FPSCR );
2366 TEST_imm32_r32( FPSCR_PR, R_ECX );
2367 load_fr_bank( R_EDX );
2368 JNE_rel8(13);
2369 push_fr(R_EDX, FRn);
2370 push_fr(R_EDX, FRm);
2371 FMULP_st(1);
2372 pop_fr(R_EDX, FRn);
2373 JMP_rel8(11);
2374 push_dr(R_EDX, FRn);
2375 push_dr(R_EDX, FRm);
2376 FMULP_st(1);
2377 pop_dr(R_EDX, FRn);
2378 }
2379 break;
2380 case 0x2:
2381 { /* FMUL FRm, FRn */
2382 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2383 check_fpuen();
2384 load_spreg( R_ECX, R_FPSCR );
2385 TEST_imm32_r32( FPSCR_PR, R_ECX );
2386 load_fr_bank( R_EDX );
2387 JNE_rel8(13);
2388 push_fr(R_EDX, FRm);
2389 push_fr(R_EDX, FRn);
2390 FMULP_st(1);
2391 pop_fr(R_EDX, FRn);
2392 JMP_rel8(11);
2393 push_dr(R_EDX, FRm);
2394 push_dr(R_EDX, FRn);
2395 FMULP_st(1);
2396 pop_dr(R_EDX, FRn);
2397 }
2398 break;
2399 case 0x3:
2400 { /* FDIV FRm, FRn */
2401 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2402 check_fpuen();
2403 load_spreg( R_ECX, R_FPSCR );
2404 TEST_imm32_r32( FPSCR_PR, R_ECX );
2405 load_fr_bank( R_EDX );
2406 JNE_rel8(13);
2407 push_fr(R_EDX, FRn);
2408 push_fr(R_EDX, FRm);
2409 FDIVP_st(1);
2410 pop_fr(R_EDX, FRn);
2411 JMP_rel8(11);
2412 push_dr(R_EDX, FRn);
2413 push_dr(R_EDX, FRm);
2414 FDIVP_st(1);
2415 pop_dr(R_EDX, FRn);
2416 }
2417 break;
2418 case 0x4:
2419 { /* FCMP/EQ FRm, FRn */
2420 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2421 check_fpuen();
2422 load_spreg( R_ECX, R_FPSCR );
2423 TEST_imm32_r32( FPSCR_PR, R_ECX );
2424 load_fr_bank( R_EDX );
2425 JNE_rel8(8);
2426 push_fr(R_EDX, FRm);
2427 push_fr(R_EDX, FRn);
2428 JMP_rel8(6);
2429 push_dr(R_EDX, FRm);
2430 push_dr(R_EDX, FRn);
2431 FCOMIP_st(1);
2432 SETE_t();
2433 FPOP_st();
2434 }
2435 break;
2436 case 0x5:
2437 { /* FCMP/GT FRm, FRn */
2438 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2439 check_fpuen();
2440 load_spreg( R_ECX, R_FPSCR );
2441 TEST_imm32_r32( FPSCR_PR, R_ECX );
2442 load_fr_bank( R_EDX );
2443 JNE_rel8(8);
2444 push_fr(R_EDX, FRm);
2445 push_fr(R_EDX, FRn);
2446 JMP_rel8(6);
2447 push_dr(R_EDX, FRm);
2448 push_dr(R_EDX, FRn);
2449 FCOMIP_st(1);
2450 SETA_t();
2451 FPOP_st();
2452 }
2453 break;
2454 case 0x6:
2455 { /* FMOV @(R0, Rm), FRn */
2456 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2457 check_fpuen();
2458 load_reg( R_EDX, Rm );
2459 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2460 check_ralign32( R_EDX );
2461 load_spreg( R_ECX, R_FPSCR );
2462 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2463 JNE_rel8(19);
2464 MEM_READ_LONG( R_EDX, R_EAX );
2465 load_fr_bank( R_ECX );
2466 store_fr( R_ECX, R_EAX, FRn );
2467 if( FRn&1 ) {
2468 JMP_rel8(46);
2469 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2470 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2471 load_xf_bank( R_ECX );
2472 } else {
2473 JMP_rel8(36);
2474 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2475 load_fr_bank( R_ECX );
2476 }
2477 store_fr( R_ECX, R_EAX, FRn&0x0E );
2478 store_fr( R_ECX, R_EDX, FRn|0x01 );
2479 }
2480 break;
2481 case 0x7:
2482 { /* FMOV FRm, @(R0, Rn) */
2483 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2484 check_fpuen();
2485 load_reg( R_EDX, Rn );
2486 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2487 check_walign32( R_EDX );
2488 load_spreg( R_ECX, R_FPSCR );
2489 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2490 JNE_rel8(20);
2491 load_fr_bank( R_ECX );
2492 load_fr( R_ECX, R_EAX, FRm );
2493 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2494 if( FRm&1 ) {
2495 JMP_rel8( 46 );
2496 load_xf_bank( R_ECX );
2497 } else {
2498 JMP_rel8( 39 );
2499 load_fr_bank( R_ECX );
2500 }
2501 load_fr( R_ECX, R_EAX, FRm&0x0E );
2502 load_fr( R_ECX, R_ECX, FRm|0x01 );
2503 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2504 }
2505 break;
2506 case 0x8:
2507 { /* FMOV @Rm, FRn */
2508 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2509 check_fpuen();
2510 load_reg( R_EDX, Rm );
2511 check_ralign32( R_EDX );
2512 load_spreg( R_ECX, R_FPSCR );
2513 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2514 JNE_rel8(19);
2515 MEM_READ_LONG( R_EDX, R_EAX );
2516 load_fr_bank( R_ECX );
2517 store_fr( R_ECX, R_EAX, FRn );
2518 if( FRn&1 ) {
2519 JMP_rel8(46);
2520 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2521 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2522 load_xf_bank( R_ECX );
2523 } else {
2524 JMP_rel8(36);
2525 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2526 load_fr_bank( R_ECX );
2527 }
2528 store_fr( R_ECX, R_EAX, FRn&0x0E );
2529 store_fr( R_ECX, R_EDX, FRn|0x01 );
2530 }
2531 break;
2532 case 0x9:
2533 { /* FMOV @Rm+, FRn */
2534 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2535 check_fpuen();
2536 load_reg( R_EDX, Rm );
2537 check_ralign32( R_EDX );
2538 MOV_r32_r32( R_EDX, R_EAX );
2539 load_spreg( R_ECX, R_FPSCR );
2540 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2541 JNE_rel8(25);
2542 ADD_imm8s_r32( 4, R_EAX );
2543 store_reg( R_EAX, Rm );
2544 MEM_READ_LONG( R_EDX, R_EAX );
2545 load_fr_bank( R_ECX );
2546 store_fr( R_ECX, R_EAX, FRn );
2547 if( FRn&1 ) {
2548 JMP_rel8(52);
2549 ADD_imm8s_r32( 8, R_EAX );
2550 store_reg(R_EAX, Rm);
2551 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2552 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2553 load_xf_bank( R_ECX );
2554 } else {
2555 JMP_rel8(42);
2556 ADD_imm8s_r32( 8, R_EAX );
2557 store_reg(R_EAX, Rm);
2558 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2559 load_fr_bank( R_ECX );
2560 }
2561 store_fr( R_ECX, R_EAX, FRn&0x0E );
2562 store_fr( R_ECX, R_EDX, FRn|0x01 );
2563 }
2564 break;
2565 case 0xA:
2566 { /* FMOV FRm, @Rn */
2567 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2568 check_fpuen();
2569 load_reg( R_EDX, Rn );
2570 check_walign32( R_EDX );
2571 load_spreg( R_ECX, R_FPSCR );
2572 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2573 JNE_rel8(20);
2574 load_fr_bank( R_ECX );
2575 load_fr( R_ECX, R_EAX, FRm );
2576 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2577 if( FRm&1 ) {
2578 JMP_rel8( 46 );
2579 load_xf_bank( R_ECX );
2580 } else {
2581 JMP_rel8( 39 );
2582 load_fr_bank( R_ECX );
2583 }
2584 load_fr( R_ECX, R_EAX, FRm&0x0E );
2585 load_fr( R_ECX, R_ECX, FRm|0x01 );
2586 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2587 }
2588 break;
2589 case 0xB:
2590 { /* FMOV FRm, @-Rn */
2591 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2592 check_fpuen();
2593 load_reg( R_EDX, Rn );
2594 check_walign32( R_EDX );
2595 load_spreg( R_ECX, R_FPSCR );
2596 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2597 JNE_rel8(20);
2598 load_fr_bank( R_ECX );
2599 load_fr( R_ECX, R_EAX, FRm );
2600 ADD_imm8s_r32(-4,R_EDX);
2601 store_reg( R_EDX, Rn );
2602 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2603 if( FRm&1 ) {
2604 JMP_rel8( 46 );
2605 load_xf_bank( R_ECX );
2606 } else {
2607 JMP_rel8( 39 );
2608 load_fr_bank( R_ECX );
2609 }
2610 load_fr( R_ECX, R_EAX, FRm&0x0E );
2611 load_fr( R_ECX, R_ECX, FRm|0x01 );
2612 ADD_imm8s_r32(-8,R_EDX);
2613 store_reg( R_EDX, Rn );
2614 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2615 }
2616 break;
2617 case 0xC:
2618 { /* FMOV FRm, FRn */
2619 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2620 /* As horrible as this looks, it's actually covering 5 separate cases:
2621 * 1. 32-bit fr-to-fr (PR=0)
2622 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
2623 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
2624 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
2625 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
2626 */
2627 check_fpuen();
2628 load_spreg( R_ECX, R_FPSCR );
2629 load_fr_bank( R_EDX );
2630 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2631 JNE_rel8(8);
2632 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
2633 store_fr( R_EDX, R_EAX, FRn );
2634 if( FRm&1 ) {
2635 JMP_rel8(22);
2636 load_xf_bank( R_ECX );
2637 load_fr( R_ECX, R_EAX, FRm-1 );
2638 if( FRn&1 ) {
2639 load_fr( R_ECX, R_EDX, FRm );
2640 store_fr( R_ECX, R_EAX, FRn-1 );
2641 store_fr( R_ECX, R_EDX, FRn );
2642 } else /* FRn&1 == 0 */ {
2643 load_fr( R_ECX, R_ECX, FRm );
2644 store_fr( R_EDX, R_EAX, FRn-1 );
2645 store_fr( R_EDX, R_ECX, FRn );
2646 }
2647 } else /* FRm&1 == 0 */ {
2648 if( FRn&1 ) {
2649 JMP_rel8(22);
2650 load_xf_bank( R_ECX );
2651 load_fr( R_EDX, R_EAX, FRm );
2652 load_fr( R_EDX, R_EDX, FRm+1 );
2653 store_fr( R_ECX, R_EAX, FRn-1 );
2654 store_fr( R_ECX, R_EDX, FRn );
2655 } else /* FRn&1 == 0 */ {
2656 JMP_rel8(12);
2657 load_fr( R_EDX, R_EAX, FRm );
2658 load_fr( R_EDX, R_ECX, FRm+1 );
2659 store_fr( R_EDX, R_EAX, FRn );
2660 store_fr( R_EDX, R_ECX, FRn+1 );
2661 }
2662 }
2663 }
2664 break;
2665 case 0xD:
2666 switch( (ir&0xF0) >> 4 ) {
2667 case 0x0:
2668 { /* FSTS FPUL, FRn */
2669 uint32_t FRn = ((ir>>8)&0xF);
2670 check_fpuen();
2671 load_fr_bank( R_ECX );
2672 load_spreg( R_EAX, R_FPUL );
2673 store_fr( R_ECX, R_EAX, FRn );
2674 }
2675 break;
2676 case 0x1:
2677 { /* FLDS FRm, FPUL */
2678 uint32_t FRm = ((ir>>8)&0xF);
2679 check_fpuen();
2680 load_fr_bank( R_ECX );
2681 load_fr( R_ECX, R_EAX, FRm );
2682 store_spreg( R_EAX, R_FPUL );
2683 }
2684 break;
2685 case 0x2:
2686 { /* FLOAT FPUL, FRn */
2687 uint32_t FRn = ((ir>>8)&0xF);
2688 check_fpuen();
2689 load_spreg( R_ECX, R_FPSCR );
2690 load_spreg(R_EDX, REG_OFFSET(fr_bank));
2691 FILD_sh4r(R_FPUL);
2692 TEST_imm32_r32( FPSCR_PR, R_ECX );
2693 JNE_rel8(5);
2694 pop_fr( R_EDX, FRn );
2695 JMP_rel8(3);
2696 pop_dr( R_EDX, FRn );
2697 }
2698 break;
2699 case 0x3:
2700 { /* FTRC FRm, FPUL */
2701 uint32_t FRm = ((ir>>8)&0xF);
2702 check_fpuen();
2703 // TODO
2704 }
2705 break;
2706 case 0x4:
2707 { /* FNEG FRn */
2708 uint32_t FRn = ((ir>>8)&0xF);
2709 check_fpuen();
2710 load_spreg( R_ECX, R_FPSCR );
2711 TEST_imm32_r32( FPSCR_PR, R_ECX );
2712 load_fr_bank( R_EDX );
2713 JNE_rel8(10);
2714 push_fr(R_EDX, FRn);
2715 FCHS_st0();
2716 pop_fr(R_EDX, FRn);
2717 JMP_rel8(8);
2718 push_dr(R_EDX, FRn);
2719 FCHS_st0();
2720 pop_dr(R_EDX, FRn);
2721 }
2722 break;
2723 case 0x5:
2724 { /* FABS FRn */
2725 uint32_t FRn = ((ir>>8)&0xF);
2726 check_fpuen();
2727 load_spreg( R_ECX, R_FPSCR );
2728 load_fr_bank( R_EDX );
2729 TEST_imm32_r32( FPSCR_PR, R_ECX );
2730 JNE_rel8(10);
2731 push_fr(R_EDX, FRn); // 3
2732 FABS_st0(); // 2
2733 pop_fr( R_EDX, FRn); //3
2734 JMP_rel8(8); // 2
2735 push_dr(R_EDX, FRn);
2736 FABS_st0();
2737 pop_dr(R_EDX, FRn);
2738 }
2739 break;
2740 case 0x6:
2741 { /* FSQRT FRn */
2742 uint32_t FRn = ((ir>>8)&0xF);
2743 check_fpuen();
2744 load_spreg( R_ECX, R_FPSCR );
2745 TEST_imm32_r32( FPSCR_PR, R_ECX );
2746 load_fr_bank( R_EDX );
2747 JNE_rel8(10);
2748 push_fr(R_EDX, FRn);
2749 FSQRT_st0();
2750 pop_fr(R_EDX, FRn);
2751 JMP_rel8(8);
2752 push_dr(R_EDX, FRn);
2753 FSQRT_st0();
2754 pop_dr(R_EDX, FRn);
2755 }
2756 break;
2757 case 0x7:
2758 { /* FSRRA FRn */
2759 uint32_t FRn = ((ir>>8)&0xF);
2760 check_fpuen();
2761 load_spreg( R_ECX, R_FPSCR );
2762 TEST_imm32_r32( FPSCR_PR, R_ECX );
2763 load_fr_bank( R_EDX );
2764 JNE_rel8(12); // PR=0 only
2765 FLD1_st0();
2766 push_fr(R_EDX, FRn);
2767 FSQRT_st0();
2768 FDIVP_st(1);
2769 pop_fr(R_EDX, FRn);
2770 }
2771 break;
2772 case 0x8:
2773 { /* FLDI0 FRn */
2774 uint32_t FRn = ((ir>>8)&0xF);
2775 /* IFF PR=0 */
2776 check_fpuen();
2777 load_spreg( R_ECX, R_FPSCR );
2778 TEST_imm32_r32( FPSCR_PR, R_ECX );
2779 JNE_rel8(8);
2780 XOR_r32_r32( R_EAX, R_EAX );
2781 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2782 store_fr( R_ECX, R_EAX, FRn );
2783 }
2784 break;
2785 case 0x9:
2786 { /* FLDI1 FRn */
2787 uint32_t FRn = ((ir>>8)&0xF);
2788 /* IFF PR=0 */
2789 check_fpuen();
2790 load_spreg( R_ECX, R_FPSCR );
2791 TEST_imm32_r32( FPSCR_PR, R_ECX );
2792 JNE_rel8(11);
2793 load_imm32(R_EAX, 0x3F800000);
2794 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2795 store_fr( R_ECX, R_EAX, FRn );
2796 }
2797 break;
2798 case 0xA:
2799 { /* FCNVSD FPUL, FRn */
2800 uint32_t FRn = ((ir>>8)&0xF);
2801 check_fpuen();
2802 check_fpuen();
2803 load_spreg( R_ECX, R_FPSCR );
2804 TEST_imm32_r32( FPSCR_PR, R_ECX );
2805 JE_rel8(9); // only when PR=1
2806 load_fr_bank( R_ECX );
2807 push_fpul();
2808 pop_dr( R_ECX, FRn );
2809 }
2810 break;
2811 case 0xB:
2812 { /* FCNVDS FRm, FPUL */
2813 uint32_t FRm = ((ir>>8)&0xF);
2814 check_fpuen();
2815 load_spreg( R_ECX, R_FPSCR );
2816 TEST_imm32_r32( FPSCR_PR, R_ECX );
2817 JE_rel8(9); // only when PR=1
2818 load_fr_bank( R_ECX );
2819 push_dr( R_ECX, FRm );
2820 pop_fpul();
2821 }
2822 break;
2823 case 0xE:
2824 { /* FIPR FVm, FVn */
2825 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3);
2826 check_fpuen();
2827 }
2828 break;
2829 case 0xF:
2830 switch( (ir&0x100) >> 8 ) {
2831 case 0x0:
2832 { /* FSCA FPUL, FRn */
2833 uint32_t FRn = ((ir>>9)&0x7)<<1;
2834 check_fpuen();
2835 }
2836 break;
2837 case 0x1:
2838 switch( (ir&0x200) >> 9 ) {
2839 case 0x0:
2840 { /* FTRV XMTRX, FVn */
2841 uint32_t FVn = ((ir>>10)&0x3);
2842 check_fpuen();
2843 }
2844 break;
2845 case 0x1:
2846 switch( (ir&0xC00) >> 10 ) {
2847 case 0x0:
2848 { /* FSCHG */
2849 check_fpuen();
2850 load_spreg( R_ECX, R_FPSCR );
2851 XOR_imm32_r32( FPSCR_SZ, R_ECX );
2852 store_spreg( R_ECX, R_FPSCR );
2853 }
2854 break;
2855 case 0x2:
2856 { /* FRCHG */
2857 check_fpuen();
2858 load_spreg( R_ECX, R_FPSCR );
2859 XOR_imm32_r32( FPSCR_FR, R_ECX );
2860 store_spreg( R_ECX, R_FPSCR );
2861 }
2862 break;
2863 case 0x3:
2864 { /* UNDEF */
2865 if( sh4_x86.in_delay_slot ) {
2866 RAISE_EXCEPTION(EXC_SLOT_ILLEGAL);
2867 } else {
2868 RAISE_EXCEPTION(EXC_ILLEGAL);
2869 }
2870 return 1;
2871 }
2872 break;
2873 default:
2874 UNDEF();
2875 break;
2876 }
2877 break;
2878 }
2879 break;
2880 }
2881 break;
2882 default:
2883 UNDEF();
2884 break;
2885 }
2886 break;
2887 case 0xE:
2888 { /* FMAC FR0, FRm, FRn */
2889 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2890 check_fpuen();
2891 load_spreg( R_ECX, R_FPSCR );
2892 load_spreg( R_EDX, REG_OFFSET(fr_bank));
2893 TEST_imm32_r32( FPSCR_PR, R_ECX );
2894 JNE_rel8(18);
2895 push_fr( R_EDX, 0 );
2896 push_fr( R_EDX, FRm );
2897 FMULP_st(1);
2898 push_fr( R_EDX, FRn );
2899 FADDP_st(1);
2900 pop_fr( R_EDX, FRn );
2901 JMP_rel8(16);
2902 push_dr( R_EDX, 0 );
2903 push_dr( R_EDX, FRm );
2904 FMULP_st(1);
2905 push_dr( R_EDX, FRn );
2906 FADDP_st(1);
2907 pop_dr( R_EDX, FRn );
2908 }
2909 break;
2910 default:
2911 UNDEF();
2912 break;
2913 }
2914 break;
2915 }
2917 INC_r32(R_ESI);
2918 if( sh4_x86.in_delay_slot ) {
2919 sh4_x86.in_delay_slot = FALSE;
2920 return 1;
2921 }
2922 return 0;
2923 }
.