filename | src/sh4/sh4x86.c |
changeset | 380:2e8166bf6832 |
prev | 377:fa18743f6905 |
next | 381:aade6c9aca4d |
author | nkeynes |
date | Wed Sep 12 11:31:16 2007 +0000 (15 years ago) |
permissions | -rw-r--r-- |
last change | Fix load_spreg/store_spreg Fix PREF Add jump target debug checking |
view | annotate | diff | log | raw |
1 /**
2 * $Id: sh4x86.c,v 1.7 2007-09-12 11:31:16 nkeynes Exp $
3 *
4 * SH4 => x86 translation. This version does no real optimization, it just
5 * outputs straight-line x86 code - it mainly exists to provide a baseline
6 * to test the optimizing versions against.
7 *
8 * Copyright (c) 2007 Nathan Keynes.
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
14 *
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
19 */
21 #include <assert.h>
23 #ifndef NDEBUG
24 #define DEBUG_JUMPS 1
25 #endif
27 #include "sh4/sh4core.h"
28 #include "sh4/sh4trans.h"
29 #include "sh4/x86op.h"
30 #include "clock.h"
32 #define DEFAULT_BACKPATCH_SIZE 4096
34 /**
35 * Struct to manage internal translation state. This state is not saved -
36 * it is only valid between calls to sh4_translate_begin_block() and
37 * sh4_translate_end_block()
38 */
39 struct sh4_x86_state {
40 gboolean in_delay_slot;
41 gboolean priv_checked; /* true if we've already checked the cpu mode. */
42 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */
44 /* Allocated memory for the (block-wide) back-patch list */
45 uint32_t **backpatch_list;
46 uint32_t backpatch_posn;
47 uint32_t backpatch_size;
48 };
50 #define EXIT_DATA_ADDR_READ 0
51 #define EXIT_DATA_ADDR_WRITE 7
52 #define EXIT_ILLEGAL 14
53 #define EXIT_SLOT_ILLEGAL 21
54 #define EXIT_FPU_DISABLED 28
55 #define EXIT_SLOT_FPU_DISABLED 35
57 static struct sh4_x86_state sh4_x86;
59 void sh4_x86_init()
60 {
61 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
62 sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
63 }
66 static void sh4_x86_add_backpatch( uint8_t *ptr )
67 {
68 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
69 sh4_x86.backpatch_size <<= 1;
70 sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
71 assert( sh4_x86.backpatch_list != NULL );
72 }
73 sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
74 }
76 static void sh4_x86_do_backpatch( uint8_t *reloc_base )
77 {
78 unsigned int i;
79 for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
80 *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
81 }
82 }
84 /**
85 * Emit an instruction to load an SH4 reg into a real register
86 */
87 static inline void load_reg( int x86reg, int sh4reg )
88 {
89 /* mov [bp+n], reg */
90 OP(0x8B);
91 OP(0x45 + (x86reg<<3));
92 OP(REG_OFFSET(r[sh4reg]));
93 }
95 static inline void load_reg16s( int x86reg, int sh4reg )
96 {
97 OP(0x0F);
98 OP(0xBF);
99 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
100 }
102 static inline void load_reg16u( int x86reg, int sh4reg )
103 {
104 OP(0x0F);
105 OP(0xB7);
106 MODRM_r32_sh4r(x86reg, REG_OFFSET(r[sh4reg]));
108 }
110 #define load_spreg( x86reg, regoff ) MOV_sh4r_r32( regoff, x86reg )
111 #define store_spreg( x86reg, regoff ) MOV_r32_sh4r( x86reg, regoff )
112 /**
113 * Emit an instruction to load an immediate value into a register
114 */
115 static inline void load_imm32( int x86reg, uint32_t value ) {
116 /* mov #value, reg */
117 OP(0xB8 + x86reg);
118 OP32(value);
119 }
121 /**
122 * Emit an instruction to store an SH4 reg (RN)
123 */
124 void static inline store_reg( int x86reg, int sh4reg ) {
125 /* mov reg, [bp+n] */
126 OP(0x89);
127 OP(0x45 + (x86reg<<3));
128 OP(REG_OFFSET(r[sh4reg]));
129 }
131 #define load_fr_bank(bankreg) load_spreg( bankreg, REG_OFFSET(fr_bank))
133 /**
134 * Load an FR register (single-precision floating point) into an integer x86
135 * register (eg for register-to-register moves)
136 */
137 void static inline load_fr( int bankreg, int x86reg, int frm )
138 {
139 OP(0x8B); OP(0x40+bankreg+(x86reg<<3)); OP((frm^1)<<2);
140 }
142 /**
143 * Store an FR register (single-precision floating point) into an integer x86
144 * register (eg for register-to-register moves)
145 */
146 void static inline store_fr( int bankreg, int x86reg, int frn )
147 {
148 OP(0x89); OP(0x40+bankreg+(x86reg<<3)); OP((frn^1)<<2);
149 }
152 /**
153 * Load a pointer to the back fp back into the specified x86 register. The
154 * bankreg must have been previously loaded with FPSCR.
155 * NB: 10 bytes
156 */
157 static inline void load_xf_bank( int bankreg )
158 {
159 SHR_imm8_r32( (21 - 6), bankreg ); // Extract bit 21 then *64 for bank size
160 AND_imm8s_r32( 0x40, bankreg ); // Complete extraction
161 OP(0x8D); OP(0x44+(bankreg<<3)); OP(0x28+bankreg); OP(REG_OFFSET(fr)); // LEA [ebp+bankreg+disp], bankreg
162 }
164 /**
165 * Push FPUL (as a 32-bit float) onto the FPU stack
166 */
167 static inline void push_fpul( )
168 {
169 OP(0xD9); OP(0x45); OP(R_FPUL);
170 }
172 /**
173 * Pop FPUL (as a 32-bit float) from the FPU stack
174 */
175 static inline void pop_fpul( )
176 {
177 OP(0xD9); OP(0x5D); OP(R_FPUL);
178 }
180 /**
181 * Push a 32-bit float onto the FPU stack, with bankreg previously loaded
182 * with the location of the current fp bank.
183 */
184 static inline void push_fr( int bankreg, int frm )
185 {
186 OP(0xD9); OP(0x40 + bankreg); OP((frm^1)<<2); // FLD.S [bankreg + frm^1*4]
187 }
189 /**
190 * Pop a 32-bit float from the FPU stack and store it back into the fp bank,
191 * with bankreg previously loaded with the location of the current fp bank.
192 */
193 static inline void pop_fr( int bankreg, int frm )
194 {
195 OP(0xD9); OP(0x58 + bankreg); OP((frm^1)<<2); // FST.S [bankreg + frm^1*4]
196 }
198 /**
199 * Push a 64-bit double onto the FPU stack, with bankreg previously loaded
200 * with the location of the current fp bank.
201 */
202 static inline void push_dr( int bankreg, int frm )
203 {
204 OP(0xDD); OP(0x40 + bankreg); OP(frm<<2); // FLD.D [bankreg + frm*4]
205 }
207 static inline void pop_dr( int bankreg, int frm )
208 {
209 OP(0xDD); OP(0x58 + bankreg); OP(frm<<2); // FST.D [bankreg + frm*4]
210 }
212 /**
213 * Note: clobbers EAX to make the indirect call - this isn't usually
214 * a problem since the callee will usually clobber it anyway.
215 */
216 static inline void call_func0( void *ptr )
217 {
218 load_imm32(R_EAX, (uint32_t)ptr);
219 CALL_r32(R_EAX);
220 }
222 static inline void call_func1( void *ptr, int arg1 )
223 {
224 PUSH_r32(arg1);
225 call_func0(ptr);
226 ADD_imm8s_r32( 4, R_ESP );
227 }
229 static inline void call_func2( void *ptr, int arg1, int arg2 )
230 {
231 PUSH_r32(arg2);
232 PUSH_r32(arg1);
233 call_func0(ptr);
234 ADD_imm8s_r32( 8, R_ESP );
235 }
237 /**
238 * Write a double (64-bit) value into memory, with the first word in arg2a, and
239 * the second in arg2b
240 * NB: 30 bytes
241 */
242 static inline void MEM_WRITE_DOUBLE( int addr, int arg2a, int arg2b )
243 {
244 ADD_imm8s_r32( 4, addr );
245 PUSH_r32(addr);
246 PUSH_r32(arg2b);
247 ADD_imm8s_r32( -4, addr );
248 PUSH_r32(addr);
249 PUSH_r32(arg2a);
250 call_func0(sh4_write_long);
251 ADD_imm8s_r32( 8, R_ESP );
252 call_func0(sh4_write_long);
253 ADD_imm8s_r32( 8, R_ESP );
254 }
256 /**
257 * Read a double (64-bit) value from memory, writing the first word into arg2a
258 * and the second into arg2b. The addr must not be in EAX
259 * NB: 27 bytes
260 */
261 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
262 {
263 PUSH_r32(addr);
264 call_func0(sh4_read_long);
265 POP_r32(addr);
266 PUSH_r32(R_EAX);
267 ADD_imm8s_r32( 4, addr );
268 PUSH_r32(addr);
269 call_func0(sh4_read_long);
270 ADD_imm8s_r32( 4, R_ESP );
271 MOV_r32_r32( R_EAX, arg2b );
272 POP_r32(arg2a);
273 }
275 /* Exception checks - Note that all exception checks will clobber EAX */
276 static void check_priv( )
277 {
278 if( !sh4_x86.priv_checked ) {
279 sh4_x86.priv_checked = TRUE;
280 load_spreg( R_EAX, R_SR );
281 AND_imm32_r32( SR_MD, R_EAX );
282 if( sh4_x86.in_delay_slot ) {
283 JE_exit( EXIT_SLOT_ILLEGAL );
284 } else {
285 JE_exit( EXIT_ILLEGAL );
286 }
287 }
288 }
290 static void check_fpuen( )
291 {
292 if( !sh4_x86.fpuen_checked ) {
293 sh4_x86.fpuen_checked = TRUE;
294 load_spreg( R_EAX, R_SR );
295 AND_imm32_r32( SR_FD, R_EAX );
296 if( sh4_x86.in_delay_slot ) {
297 JNE_exit(EXIT_SLOT_FPU_DISABLED);
298 } else {
299 JNE_exit(EXIT_FPU_DISABLED);
300 }
301 }
302 }
304 static void check_ralign16( int x86reg )
305 {
306 TEST_imm32_r32( 0x00000001, x86reg );
307 JNE_exit(EXIT_DATA_ADDR_READ);
308 }
310 static void check_walign16( int x86reg )
311 {
312 TEST_imm32_r32( 0x00000001, x86reg );
313 JNE_exit(EXIT_DATA_ADDR_WRITE);
314 }
316 static void check_ralign32( int x86reg )
317 {
318 TEST_imm32_r32( 0x00000003, x86reg );
319 JNE_exit(EXIT_DATA_ADDR_READ);
320 }
321 static void check_walign32( int x86reg )
322 {
323 TEST_imm32_r32( 0x00000003, x86reg );
324 JNE_exit(EXIT_DATA_ADDR_WRITE);
325 }
328 #define UNDEF()
329 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
330 #define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
331 #define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
332 #define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
333 #define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
334 #define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
335 #define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
337 #define RAISE_EXCEPTION( exc ) call_func1(sh4_raise_exception, exc);
338 #define SLOTILLEGAL() RAISE_EXCEPTION(EXC_SLOT_ILLEGAL); return 1
342 /**
343 * Emit the 'start of block' assembly. Sets up the stack frame and save
344 * SI/DI as required
345 */
346 void sh4_translate_begin_block()
347 {
348 PUSH_r32(R_EBP);
349 /* mov &sh4r, ebp */
350 load_imm32( R_EBP, (uint32_t)&sh4r );
351 PUSH_r32(R_EDI);
352 PUSH_r32(R_ESI);
353 XOR_r32_r32(R_ESI, R_ESI);
355 sh4_x86.in_delay_slot = FALSE;
356 sh4_x86.priv_checked = FALSE;
357 sh4_x86.fpuen_checked = FALSE;
358 sh4_x86.backpatch_posn = 0;
359 }
361 /**
362 * Exit the block early (ie branch out), conditionally or otherwise
363 */
364 void exit_block( )
365 {
366 store_spreg( R_EDI, REG_OFFSET(pc) );
367 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
368 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
369 MUL_r32( R_ESI );
370 ADD_r32_r32( R_EAX, R_ECX );
371 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
372 XOR_r32_r32( R_EAX, R_EAX );
373 POP_r32(R_ESI);
374 POP_r32(R_EDI);
375 POP_r32(R_EBP);
376 RET();
377 }
379 /**
380 * Flush any open regs back to memory, restore SI/DI/, update PC, etc
381 */
382 void sh4_translate_end_block( sh4addr_t pc ) {
383 assert( !sh4_x86.in_delay_slot ); // should never stop here
384 // Normal termination - save PC, cycle count
385 exit_block( );
387 uint8_t *end_ptr = xlat_output;
388 // Exception termination. Jump block for various exception codes:
389 PUSH_imm32( EXC_DATA_ADDR_READ );
390 JMP_rel8( 33, target1 );
391 PUSH_imm32( EXC_DATA_ADDR_WRITE );
392 JMP_rel8( 26, target2 );
393 PUSH_imm32( EXC_ILLEGAL );
394 JMP_rel8( 19, target3 );
395 PUSH_imm32( EXC_SLOT_ILLEGAL );
396 JMP_rel8( 12, target4 );
397 PUSH_imm32( EXC_FPU_DISABLED );
398 JMP_rel8( 5, target5 );
399 PUSH_imm32( EXC_SLOT_FPU_DISABLED );
400 // target
401 JMP_TARGET(target1);
402 JMP_TARGET(target2);
403 JMP_TARGET(target3);
404 JMP_TARGET(target4);
405 JMP_TARGET(target5);
406 load_spreg( R_ECX, REG_OFFSET(pc) );
407 ADD_r32_r32( R_ESI, R_ECX );
408 ADD_r32_r32( R_ESI, R_ECX );
409 store_spreg( R_ECX, REG_OFFSET(pc) );
410 MOV_moff32_EAX( (uint32_t)&sh4_cpu_period );
411 load_spreg( R_ECX, REG_OFFSET(slice_cycle) );
412 MUL_r32( R_ESI );
413 ADD_r32_r32( R_EAX, R_ECX );
414 store_spreg( R_ECX, REG_OFFSET(slice_cycle) );
416 load_imm32( R_EAX, (uint32_t)sh4_raise_exception ); // 6
417 CALL_r32( R_EAX ); // 2
418 POP_r32(R_EBP);
419 RET();
421 sh4_x86_do_backpatch( end_ptr );
422 }
424 /**
425 * Translate a single instruction. Delayed branches are handled specially
426 * by translating both branch and delayed instruction as a single unit (as
427 *
428 *
429 * @return true if the instruction marks the end of a basic block
430 * (eg a branch or
431 */
432 uint32_t sh4_x86_translate_instruction( uint32_t pc )
433 {
434 uint16_t ir = sh4_read_word( pc );
436 switch( (ir&0xF000) >> 12 ) {
437 case 0x0:
438 switch( ir&0xF ) {
439 case 0x2:
440 switch( (ir&0x80) >> 7 ) {
441 case 0x0:
442 switch( (ir&0x70) >> 4 ) {
443 case 0x0:
444 { /* STC SR, Rn */
445 uint32_t Rn = ((ir>>8)&0xF);
446 call_func0(sh4_read_sr);
447 store_reg( R_EAX, Rn );
448 }
449 break;
450 case 0x1:
451 { /* STC GBR, Rn */
452 uint32_t Rn = ((ir>>8)&0xF);
453 load_spreg( R_EAX, R_GBR );
454 store_reg( R_EAX, Rn );
455 }
456 break;
457 case 0x2:
458 { /* STC VBR, Rn */
459 uint32_t Rn = ((ir>>8)&0xF);
460 load_spreg( R_EAX, R_VBR );
461 store_reg( R_EAX, Rn );
462 }
463 break;
464 case 0x3:
465 { /* STC SSR, Rn */
466 uint32_t Rn = ((ir>>8)&0xF);
467 load_spreg( R_EAX, R_SSR );
468 store_reg( R_EAX, Rn );
469 }
470 break;
471 case 0x4:
472 { /* STC SPC, Rn */
473 uint32_t Rn = ((ir>>8)&0xF);
474 load_spreg( R_EAX, R_SPC );
475 store_reg( R_EAX, Rn );
476 }
477 break;
478 default:
479 UNDEF();
480 break;
481 }
482 break;
483 case 0x1:
484 { /* STC Rm_BANK, Rn */
485 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
486 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
487 store_reg( R_EAX, Rn );
488 }
489 break;
490 }
491 break;
492 case 0x3:
493 switch( (ir&0xF0) >> 4 ) {
494 case 0x0:
495 { /* BSRF Rn */
496 uint32_t Rn = ((ir>>8)&0xF);
497 if( sh4_x86.in_delay_slot ) {
498 SLOTILLEGAL();
499 } else {
500 load_imm32( R_EAX, pc + 4 );
501 store_spreg( R_EAX, R_PR );
502 load_reg( R_EDI, Rn );
503 ADD_r32_r32( R_EAX, R_EDI );
504 sh4_x86.in_delay_slot = TRUE;
505 INC_r32(R_ESI);
506 return 0;
507 }
508 }
509 break;
510 case 0x2:
511 { /* BRAF Rn */
512 uint32_t Rn = ((ir>>8)&0xF);
513 if( sh4_x86.in_delay_slot ) {
514 SLOTILLEGAL();
515 } else {
516 load_reg( R_EDI, Rn );
517 sh4_x86.in_delay_slot = TRUE;
518 INC_r32(R_ESI);
519 return 0;
520 }
521 }
522 break;
523 case 0x8:
524 { /* PREF @Rn */
525 uint32_t Rn = ((ir>>8)&0xF);
526 load_reg( R_EAX, Rn );
527 PUSH_r32( R_EAX );
528 AND_imm32_r32( 0xFC000000, R_EAX );
529 CMP_imm32_r32( 0xE0000000, R_EAX );
530 JNE_rel8(7, end);
531 call_func0( sh4_flush_store_queue );
532 JMP_TARGET(end);
533 ADD_imm8s_r32( 4, R_ESP );
534 }
535 break;
536 case 0x9:
537 { /* OCBI @Rn */
538 uint32_t Rn = ((ir>>8)&0xF);
539 }
540 break;
541 case 0xA:
542 { /* OCBP @Rn */
543 uint32_t Rn = ((ir>>8)&0xF);
544 }
545 break;
546 case 0xB:
547 { /* OCBWB @Rn */
548 uint32_t Rn = ((ir>>8)&0xF);
549 }
550 break;
551 case 0xC:
552 { /* MOVCA.L R0, @Rn */
553 uint32_t Rn = ((ir>>8)&0xF);
554 load_reg( R_EAX, 0 );
555 load_reg( R_ECX, Rn );
556 check_walign32( R_ECX );
557 MEM_WRITE_LONG( R_ECX, R_EAX );
558 }
559 break;
560 default:
561 UNDEF();
562 break;
563 }
564 break;
565 case 0x4:
566 { /* MOV.B Rm, @(R0, Rn) */
567 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
568 load_reg( R_EAX, 0 );
569 load_reg( R_ECX, Rn );
570 ADD_r32_r32( R_EAX, R_ECX );
571 load_reg( R_EAX, Rm );
572 MEM_WRITE_BYTE( R_ECX, R_EAX );
573 }
574 break;
575 case 0x5:
576 { /* MOV.W Rm, @(R0, Rn) */
577 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
578 load_reg( R_EAX, 0 );
579 load_reg( R_ECX, Rn );
580 ADD_r32_r32( R_EAX, R_ECX );
581 check_walign16( R_ECX );
582 load_reg( R_EAX, Rm );
583 MEM_WRITE_WORD( R_ECX, R_EAX );
584 }
585 break;
586 case 0x6:
587 { /* MOV.L Rm, @(R0, Rn) */
588 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
589 load_reg( R_EAX, 0 );
590 load_reg( R_ECX, Rn );
591 ADD_r32_r32( R_EAX, R_ECX );
592 check_walign32( R_ECX );
593 load_reg( R_EAX, Rm );
594 MEM_WRITE_LONG( R_ECX, R_EAX );
595 }
596 break;
597 case 0x7:
598 { /* MUL.L Rm, Rn */
599 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
600 load_reg( R_EAX, Rm );
601 load_reg( R_ECX, Rn );
602 MUL_r32( R_ECX );
603 store_spreg( R_EAX, R_MACL );
604 }
605 break;
606 case 0x8:
607 switch( (ir&0xFF0) >> 4 ) {
608 case 0x0:
609 { /* CLRT */
610 CLC();
611 SETC_t();
612 }
613 break;
614 case 0x1:
615 { /* SETT */
616 STC();
617 SETC_t();
618 }
619 break;
620 case 0x2:
621 { /* CLRMAC */
622 XOR_r32_r32(R_EAX, R_EAX);
623 store_spreg( R_EAX, R_MACL );
624 store_spreg( R_EAX, R_MACH );
625 }
626 break;
627 case 0x3:
628 { /* LDTLB */
629 }
630 break;
631 case 0x4:
632 { /* CLRS */
633 CLC();
634 SETC_sh4r(R_S);
635 }
636 break;
637 case 0x5:
638 { /* SETS */
639 STC();
640 SETC_sh4r(R_S);
641 }
642 break;
643 default:
644 UNDEF();
645 break;
646 }
647 break;
648 case 0x9:
649 switch( (ir&0xF0) >> 4 ) {
650 case 0x0:
651 { /* NOP */
652 /* Do nothing. Well, we could emit an 0x90, but what would really be the point? */
653 }
654 break;
655 case 0x1:
656 { /* DIV0U */
657 XOR_r32_r32( R_EAX, R_EAX );
658 store_spreg( R_EAX, R_Q );
659 store_spreg( R_EAX, R_M );
660 store_spreg( R_EAX, R_T );
661 }
662 break;
663 case 0x2:
664 { /* MOVT Rn */
665 uint32_t Rn = ((ir>>8)&0xF);
666 load_spreg( R_EAX, R_T );
667 store_reg( R_EAX, Rn );
668 }
669 break;
670 default:
671 UNDEF();
672 break;
673 }
674 break;
675 case 0xA:
676 switch( (ir&0xF0) >> 4 ) {
677 case 0x0:
678 { /* STS MACH, Rn */
679 uint32_t Rn = ((ir>>8)&0xF);
680 load_spreg( R_EAX, R_MACH );
681 store_reg( R_EAX, Rn );
682 }
683 break;
684 case 0x1:
685 { /* STS MACL, Rn */
686 uint32_t Rn = ((ir>>8)&0xF);
687 load_spreg( R_EAX, R_MACL );
688 store_reg( R_EAX, Rn );
689 }
690 break;
691 case 0x2:
692 { /* STS PR, Rn */
693 uint32_t Rn = ((ir>>8)&0xF);
694 load_spreg( R_EAX, R_PR );
695 store_reg( R_EAX, Rn );
696 }
697 break;
698 case 0x3:
699 { /* STC SGR, Rn */
700 uint32_t Rn = ((ir>>8)&0xF);
701 load_spreg( R_EAX, R_SGR );
702 store_reg( R_EAX, Rn );
703 }
704 break;
705 case 0x5:
706 { /* STS FPUL, Rn */
707 uint32_t Rn = ((ir>>8)&0xF);
708 load_spreg( R_EAX, R_FPUL );
709 store_reg( R_EAX, Rn );
710 }
711 break;
712 case 0x6:
713 { /* STS FPSCR, Rn */
714 uint32_t Rn = ((ir>>8)&0xF);
715 load_spreg( R_EAX, R_FPSCR );
716 store_reg( R_EAX, Rn );
717 }
718 break;
719 case 0xF:
720 { /* STC DBR, Rn */
721 uint32_t Rn = ((ir>>8)&0xF);
722 load_spreg( R_EAX, R_DBR );
723 store_reg( R_EAX, Rn );
724 }
725 break;
726 default:
727 UNDEF();
728 break;
729 }
730 break;
731 case 0xB:
732 switch( (ir&0xFF0) >> 4 ) {
733 case 0x0:
734 { /* RTS */
735 if( sh4_x86.in_delay_slot ) {
736 SLOTILLEGAL();
737 } else {
738 load_spreg( R_EDI, R_PR );
739 sh4_x86.in_delay_slot = TRUE;
740 INC_r32(R_ESI);
741 return 0;
742 }
743 }
744 break;
745 case 0x1:
746 { /* SLEEP */
747 /* TODO */
748 }
749 break;
750 case 0x2:
751 { /* RTE */
752 check_priv();
753 if( sh4_x86.in_delay_slot ) {
754 SLOTILLEGAL();
755 } else {
756 load_spreg( R_EDI, R_PR );
757 load_spreg( R_EAX, R_SSR );
758 call_func1( sh4_write_sr, R_EAX );
759 sh4_x86.in_delay_slot = TRUE;
760 sh4_x86.priv_checked = FALSE;
761 sh4_x86.fpuen_checked = FALSE;
762 INC_r32(R_ESI);
763 return 0;
764 }
765 }
766 break;
767 default:
768 UNDEF();
769 break;
770 }
771 break;
772 case 0xC:
773 { /* MOV.B @(R0, Rm), Rn */
774 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
775 load_reg( R_EAX, 0 );
776 load_reg( R_ECX, Rm );
777 ADD_r32_r32( R_EAX, R_ECX );
778 MEM_READ_BYTE( R_ECX, R_EAX );
779 store_reg( R_EAX, Rn );
780 }
781 break;
782 case 0xD:
783 { /* MOV.W @(R0, Rm), Rn */
784 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
785 load_reg( R_EAX, 0 );
786 load_reg( R_ECX, Rm );
787 ADD_r32_r32( R_EAX, R_ECX );
788 check_ralign16( R_ECX );
789 MEM_READ_WORD( R_ECX, R_EAX );
790 store_reg( R_EAX, Rn );
791 }
792 break;
793 case 0xE:
794 { /* MOV.L @(R0, Rm), Rn */
795 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
796 load_reg( R_EAX, 0 );
797 load_reg( R_ECX, Rm );
798 ADD_r32_r32( R_EAX, R_ECX );
799 check_ralign32( R_ECX );
800 MEM_READ_LONG( R_ECX, R_EAX );
801 store_reg( R_EAX, Rn );
802 }
803 break;
804 case 0xF:
805 { /* MAC.L @Rm+, @Rn+ */
806 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
807 }
808 break;
809 default:
810 UNDEF();
811 break;
812 }
813 break;
814 case 0x1:
815 { /* MOV.L Rm, @(disp, Rn) */
816 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
817 load_reg( R_ECX, Rn );
818 load_reg( R_EAX, Rm );
819 ADD_imm32_r32( disp, R_ECX );
820 check_walign32( R_ECX );
821 MEM_WRITE_LONG( R_ECX, R_EAX );
822 }
823 break;
824 case 0x2:
825 switch( ir&0xF ) {
826 case 0x0:
827 { /* MOV.B Rm, @Rn */
828 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
829 load_reg( R_EAX, Rm );
830 load_reg( R_ECX, Rn );
831 MEM_WRITE_BYTE( R_ECX, R_EAX );
832 }
833 break;
834 case 0x1:
835 { /* MOV.W Rm, @Rn */
836 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
837 load_reg( R_ECX, Rn );
838 check_walign16( R_ECX );
839 MEM_READ_WORD( R_ECX, R_EAX );
840 store_reg( R_EAX, Rn );
841 }
842 break;
843 case 0x2:
844 { /* MOV.L Rm, @Rn */
845 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
846 load_reg( R_EAX, Rm );
847 load_reg( R_ECX, Rn );
848 check_walign32(R_ECX);
849 MEM_WRITE_LONG( R_ECX, R_EAX );
850 }
851 break;
852 case 0x4:
853 { /* MOV.B Rm, @-Rn */
854 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
855 load_reg( R_EAX, Rm );
856 load_reg( R_ECX, Rn );
857 ADD_imm8s_r32( -1, Rn );
858 store_reg( R_ECX, Rn );
859 MEM_WRITE_BYTE( R_ECX, R_EAX );
860 }
861 break;
862 case 0x5:
863 { /* MOV.W Rm, @-Rn */
864 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
865 load_reg( R_ECX, Rn );
866 check_walign16( R_ECX );
867 load_reg( R_EAX, Rm );
868 ADD_imm8s_r32( -2, R_ECX );
869 MEM_WRITE_WORD( R_ECX, R_EAX );
870 }
871 break;
872 case 0x6:
873 { /* MOV.L Rm, @-Rn */
874 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
875 load_reg( R_EAX, Rm );
876 load_reg( R_ECX, Rn );
877 check_walign32( R_ECX );
878 ADD_imm8s_r32( -4, R_ECX );
879 store_reg( R_ECX, Rn );
880 MEM_WRITE_LONG( R_ECX, R_EAX );
881 }
882 break;
883 case 0x7:
884 { /* DIV0S Rm, Rn */
885 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
886 load_reg( R_EAX, Rm );
887 load_reg( R_ECX, Rm );
888 SHR_imm8_r32( 31, R_EAX );
889 SHR_imm8_r32( 31, R_ECX );
890 store_spreg( R_EAX, R_M );
891 store_spreg( R_ECX, R_Q );
892 CMP_r32_r32( R_EAX, R_ECX );
893 SETE_t();
894 }
895 break;
896 case 0x8:
897 { /* TST Rm, Rn */
898 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
899 load_reg( R_EAX, Rm );
900 load_reg( R_ECX, Rn );
901 TEST_r32_r32( R_EAX, R_ECX );
902 SETE_t();
903 }
904 break;
905 case 0x9:
906 { /* AND Rm, Rn */
907 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
908 load_reg( R_EAX, Rm );
909 load_reg( R_ECX, Rn );
910 AND_r32_r32( R_EAX, R_ECX );
911 store_reg( R_ECX, Rn );
912 }
913 break;
914 case 0xA:
915 { /* XOR Rm, Rn */
916 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
917 load_reg( R_EAX, Rm );
918 load_reg( R_ECX, Rn );
919 XOR_r32_r32( R_EAX, R_ECX );
920 store_reg( R_ECX, Rn );
921 }
922 break;
923 case 0xB:
924 { /* OR Rm, Rn */
925 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
926 load_reg( R_EAX, Rm );
927 load_reg( R_ECX, Rn );
928 OR_r32_r32( R_EAX, R_ECX );
929 store_reg( R_ECX, Rn );
930 }
931 break;
932 case 0xC:
933 { /* CMP/STR Rm, Rn */
934 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
935 load_reg( R_EAX, Rm );
936 load_reg( R_ECX, Rn );
937 XOR_r32_r32( R_ECX, R_EAX );
938 TEST_r8_r8( R_AL, R_AL );
939 JE_rel8(13, target1);
940 TEST_r8_r8( R_AH, R_AH ); // 2
941 JE_rel8(9, target2);
942 SHR_imm8_r32( 16, R_EAX ); // 3
943 TEST_r8_r8( R_AL, R_AL ); // 2
944 JE_rel8(2, target3);
945 TEST_r8_r8( R_AH, R_AH ); // 2
946 JMP_TARGET(target1);
947 JMP_TARGET(target2);
948 JMP_TARGET(target3);
949 SETE_t();
950 }
951 break;
952 case 0xD:
953 { /* XTRCT Rm, Rn */
954 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
955 load_reg( R_EAX, Rm );
956 MOV_r32_r32( R_EAX, R_ECX );
957 SHR_imm8_r32( 16, R_EAX );
958 SHL_imm8_r32( 16, R_ECX );
959 OR_r32_r32( R_EAX, R_ECX );
960 store_reg( R_ECX, Rn );
961 }
962 break;
963 case 0xE:
964 { /* MULU.W Rm, Rn */
965 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
966 load_reg16u( R_EAX, Rm );
967 load_reg16u( R_ECX, Rn );
968 MUL_r32( R_ECX );
969 store_spreg( R_EAX, R_MACL );
970 }
971 break;
972 case 0xF:
973 { /* MULS.W Rm, Rn */
974 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
975 load_reg16s( R_EAX, Rm );
976 load_reg16s( R_ECX, Rn );
977 MUL_r32( R_ECX );
978 store_spreg( R_EAX, R_MACL );
979 }
980 break;
981 default:
982 UNDEF();
983 break;
984 }
985 break;
986 case 0x3:
987 switch( ir&0xF ) {
988 case 0x0:
989 { /* CMP/EQ Rm, Rn */
990 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
991 load_reg( R_EAX, Rm );
992 load_reg( R_ECX, Rn );
993 CMP_r32_r32( R_EAX, R_ECX );
994 SETE_t();
995 }
996 break;
997 case 0x2:
998 { /* CMP/HS Rm, Rn */
999 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1000 load_reg( R_EAX, Rm );
1001 load_reg( R_ECX, Rn );
1002 CMP_r32_r32( R_EAX, R_ECX );
1003 SETAE_t();
1004 }
1005 break;
1006 case 0x3:
1007 { /* CMP/GE Rm, Rn */
1008 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1009 load_reg( R_EAX, Rm );
1010 load_reg( R_ECX, Rn );
1011 CMP_r32_r32( R_EAX, R_ECX );
1012 SETGE_t();
1013 }
1014 break;
1015 case 0x4:
1016 { /* DIV1 Rm, Rn */
1017 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1018 load_reg( R_ECX, Rn );
1019 LDC_t();
1020 RCL1_r32( R_ECX ); // OP2
1021 SETC_r32( R_EDX ); // Q
1022 load_spreg( R_EAX, R_Q );
1023 CMP_sh4r_r32( R_M, R_EAX );
1024 JE_rel8(8,mqequal);
1025 ADD_sh4r_r32( REG_OFFSET(r[Rm]), R_ECX );
1026 JMP_rel8(3, mqnotequal);
1027 JMP_TARGET(mqequal);
1028 SUB_sh4r_r32( REG_OFFSET(r[Rm]), R_ECX );
1029 JMP_TARGET(mqnotequal);
1030 // TODO
1031 }
1032 break;
1033 case 0x5:
1034 { /* DMULU.L Rm, Rn */
1035 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1036 load_reg( R_EAX, Rm );
1037 load_reg( R_ECX, Rn );
1038 MUL_r32(R_ECX);
1039 store_spreg( R_EDX, R_MACH );
1040 store_spreg( R_EAX, R_MACL );
1041 }
1042 break;
1043 case 0x6:
1044 { /* CMP/HI Rm, Rn */
1045 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1046 load_reg( R_EAX, Rm );
1047 load_reg( R_ECX, Rn );
1048 CMP_r32_r32( R_EAX, R_ECX );
1049 SETA_t();
1050 }
1051 break;
1052 case 0x7:
1053 { /* CMP/GT Rm, Rn */
1054 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1055 load_reg( R_EAX, Rm );
1056 load_reg( R_ECX, Rn );
1057 CMP_r32_r32( R_EAX, R_ECX );
1058 SETG_t();
1059 }
1060 break;
1061 case 0x8:
1062 { /* SUB Rm, Rn */
1063 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1064 load_reg( R_EAX, Rm );
1065 load_reg( R_ECX, Rn );
1066 SUB_r32_r32( R_EAX, R_ECX );
1067 store_reg( R_ECX, Rn );
1068 }
1069 break;
1070 case 0xA:
1071 { /* SUBC Rm, Rn */
1072 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1073 load_reg( R_EAX, Rm );
1074 load_reg( R_ECX, Rn );
1075 LDC_t();
1076 SBB_r32_r32( R_EAX, R_ECX );
1077 store_reg( R_ECX, Rn );
1078 }
1079 break;
1080 case 0xB:
1081 { /* SUBV Rm, Rn */
1082 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1083 load_reg( R_EAX, Rm );
1084 load_reg( R_ECX, Rn );
1085 SUB_r32_r32( R_EAX, R_ECX );
1086 store_reg( R_ECX, Rn );
1087 SETO_t();
1088 }
1089 break;
1090 case 0xC:
1091 { /* ADD Rm, Rn */
1092 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1093 load_reg( R_EAX, Rm );
1094 load_reg( R_ECX, Rn );
1095 ADD_r32_r32( R_EAX, R_ECX );
1096 store_reg( R_ECX, Rn );
1097 }
1098 break;
1099 case 0xD:
1100 { /* DMULS.L Rm, Rn */
1101 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1102 load_reg( R_EAX, Rm );
1103 load_reg( R_ECX, Rn );
1104 IMUL_r32(R_ECX);
1105 store_spreg( R_EDX, R_MACH );
1106 store_spreg( R_EAX, R_MACL );
1107 }
1108 break;
1109 case 0xE:
1110 { /* ADDC Rm, Rn */
1111 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1112 load_reg( R_EAX, Rm );
1113 load_reg( R_ECX, Rn );
1114 LDC_t();
1115 ADC_r32_r32( R_EAX, R_ECX );
1116 store_reg( R_ECX, Rn );
1117 SETC_t();
1118 }
1119 break;
1120 case 0xF:
1121 { /* ADDV Rm, Rn */
1122 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1123 load_reg( R_EAX, Rm );
1124 load_reg( R_ECX, Rn );
1125 ADD_r32_r32( R_EAX, R_ECX );
1126 store_reg( R_ECX, Rn );
1127 SETO_t();
1128 }
1129 break;
1130 default:
1131 UNDEF();
1132 break;
1133 }
1134 break;
1135 case 0x4:
1136 switch( ir&0xF ) {
1137 case 0x0:
1138 switch( (ir&0xF0) >> 4 ) {
1139 case 0x0:
1140 { /* SHLL Rn */
1141 uint32_t Rn = ((ir>>8)&0xF);
1142 load_reg( R_EAX, Rn );
1143 SHL1_r32( R_EAX );
1144 store_reg( R_EAX, Rn );
1145 }
1146 break;
1147 case 0x1:
1148 { /* DT Rn */
1149 uint32_t Rn = ((ir>>8)&0xF);
1150 load_reg( R_EAX, Rn );
1151 ADD_imm8s_r32( -1, Rn );
1152 store_reg( R_EAX, Rn );
1153 SETE_t();
1154 }
1155 break;
1156 case 0x2:
1157 { /* SHAL Rn */
1158 uint32_t Rn = ((ir>>8)&0xF);
1159 load_reg( R_EAX, Rn );
1160 SHL1_r32( R_EAX );
1161 store_reg( R_EAX, Rn );
1162 }
1163 break;
1164 default:
1165 UNDEF();
1166 break;
1167 }
1168 break;
1169 case 0x1:
1170 switch( (ir&0xF0) >> 4 ) {
1171 case 0x0:
1172 { /* SHLR Rn */
1173 uint32_t Rn = ((ir>>8)&0xF);
1174 load_reg( R_EAX, Rn );
1175 SHR1_r32( R_EAX );
1176 store_reg( R_EAX, Rn );
1177 }
1178 break;
1179 case 0x1:
1180 { /* CMP/PZ Rn */
1181 uint32_t Rn = ((ir>>8)&0xF);
1182 load_reg( R_EAX, Rn );
1183 CMP_imm8s_r32( 0, R_EAX );
1184 SETGE_t();
1185 }
1186 break;
1187 case 0x2:
1188 { /* SHAR Rn */
1189 uint32_t Rn = ((ir>>8)&0xF);
1190 load_reg( R_EAX, Rn );
1191 SAR1_r32( R_EAX );
1192 store_reg( R_EAX, Rn );
1193 }
1194 break;
1195 default:
1196 UNDEF();
1197 break;
1198 }
1199 break;
1200 case 0x2:
1201 switch( (ir&0xF0) >> 4 ) {
1202 case 0x0:
1203 { /* STS.L MACH, @-Rn */
1204 uint32_t Rn = ((ir>>8)&0xF);
1205 load_reg( R_ECX, Rn );
1206 ADD_imm8s_r32( -4, Rn );
1207 store_reg( R_ECX, Rn );
1208 load_spreg( R_EAX, R_MACH );
1209 MEM_WRITE_LONG( R_ECX, R_EAX );
1210 }
1211 break;
1212 case 0x1:
1213 { /* STS.L MACL, @-Rn */
1214 uint32_t Rn = ((ir>>8)&0xF);
1215 load_reg( R_ECX, Rn );
1216 ADD_imm8s_r32( -4, Rn );
1217 store_reg( R_ECX, Rn );
1218 load_spreg( R_EAX, R_MACL );
1219 MEM_WRITE_LONG( R_ECX, R_EAX );
1220 }
1221 break;
1222 case 0x2:
1223 { /* STS.L PR, @-Rn */
1224 uint32_t Rn = ((ir>>8)&0xF);
1225 load_reg( R_ECX, Rn );
1226 ADD_imm8s_r32( -4, Rn );
1227 store_reg( R_ECX, Rn );
1228 load_spreg( R_EAX, R_PR );
1229 MEM_WRITE_LONG( R_ECX, R_EAX );
1230 }
1231 break;
1232 case 0x3:
1233 { /* STC.L SGR, @-Rn */
1234 uint32_t Rn = ((ir>>8)&0xF);
1235 load_reg( R_ECX, Rn );
1236 ADD_imm8s_r32( -4, Rn );
1237 store_reg( R_ECX, Rn );
1238 load_spreg( R_EAX, R_SGR );
1239 MEM_WRITE_LONG( R_ECX, R_EAX );
1240 }
1241 break;
1242 case 0x5:
1243 { /* STS.L FPUL, @-Rn */
1244 uint32_t Rn = ((ir>>8)&0xF);
1245 load_reg( R_ECX, Rn );
1246 ADD_imm8s_r32( -4, Rn );
1247 store_reg( R_ECX, Rn );
1248 load_spreg( R_EAX, R_FPUL );
1249 MEM_WRITE_LONG( R_ECX, R_EAX );
1250 }
1251 break;
1252 case 0x6:
1253 { /* STS.L FPSCR, @-Rn */
1254 uint32_t Rn = ((ir>>8)&0xF);
1255 load_reg( R_ECX, Rn );
1256 ADD_imm8s_r32( -4, Rn );
1257 store_reg( R_ECX, Rn );
1258 load_spreg( R_EAX, R_FPSCR );
1259 MEM_WRITE_LONG( R_ECX, R_EAX );
1260 }
1261 break;
1262 case 0xF:
1263 { /* STC.L DBR, @-Rn */
1264 uint32_t Rn = ((ir>>8)&0xF);
1265 load_reg( R_ECX, Rn );
1266 ADD_imm8s_r32( -4, Rn );
1267 store_reg( R_ECX, Rn );
1268 load_spreg( R_EAX, R_DBR );
1269 MEM_WRITE_LONG( R_ECX, R_EAX );
1270 }
1271 break;
1272 default:
1273 UNDEF();
1274 break;
1275 }
1276 break;
1277 case 0x3:
1278 switch( (ir&0x80) >> 7 ) {
1279 case 0x0:
1280 switch( (ir&0x70) >> 4 ) {
1281 case 0x0:
1282 { /* STC.L SR, @-Rn */
1283 uint32_t Rn = ((ir>>8)&0xF);
1284 load_reg( R_ECX, Rn );
1285 ADD_imm8s_r32( -4, Rn );
1286 store_reg( R_ECX, Rn );
1287 call_func0( sh4_read_sr );
1288 MEM_WRITE_LONG( R_ECX, R_EAX );
1289 }
1290 break;
1291 case 0x1:
1292 { /* STC.L GBR, @-Rn */
1293 uint32_t Rn = ((ir>>8)&0xF);
1294 load_reg( R_ECX, Rn );
1295 ADD_imm8s_r32( -4, Rn );
1296 store_reg( R_ECX, Rn );
1297 load_spreg( R_EAX, R_GBR );
1298 MEM_WRITE_LONG( R_ECX, R_EAX );
1299 }
1300 break;
1301 case 0x2:
1302 { /* STC.L VBR, @-Rn */
1303 uint32_t Rn = ((ir>>8)&0xF);
1304 load_reg( R_ECX, Rn );
1305 ADD_imm8s_r32( -4, Rn );
1306 store_reg( R_ECX, Rn );
1307 load_spreg( R_EAX, R_VBR );
1308 MEM_WRITE_LONG( R_ECX, R_EAX );
1309 }
1310 break;
1311 case 0x3:
1312 { /* STC.L SSR, @-Rn */
1313 uint32_t Rn = ((ir>>8)&0xF);
1314 load_reg( R_ECX, Rn );
1315 ADD_imm8s_r32( -4, Rn );
1316 store_reg( R_ECX, Rn );
1317 load_spreg( R_EAX, R_SSR );
1318 MEM_WRITE_LONG( R_ECX, R_EAX );
1319 }
1320 break;
1321 case 0x4:
1322 { /* STC.L SPC, @-Rn */
1323 uint32_t Rn = ((ir>>8)&0xF);
1324 load_reg( R_ECX, Rn );
1325 ADD_imm8s_r32( -4, Rn );
1326 store_reg( R_ECX, Rn );
1327 load_spreg( R_EAX, R_SPC );
1328 MEM_WRITE_LONG( R_ECX, R_EAX );
1329 }
1330 break;
1331 default:
1332 UNDEF();
1333 break;
1334 }
1335 break;
1336 case 0x1:
1337 { /* STC.L Rm_BANK, @-Rn */
1338 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
1339 load_reg( R_ECX, Rn );
1340 ADD_imm8s_r32( -4, Rn );
1341 store_reg( R_ECX, Rn );
1342 load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
1343 MEM_WRITE_LONG( R_ECX, R_EAX );
1344 }
1345 break;
1346 }
1347 break;
1348 case 0x4:
1349 switch( (ir&0xF0) >> 4 ) {
1350 case 0x0:
1351 { /* ROTL Rn */
1352 uint32_t Rn = ((ir>>8)&0xF);
1353 load_reg( R_EAX, Rn );
1354 ROL1_r32( R_EAX );
1355 store_reg( R_EAX, Rn );
1356 SETC_t();
1357 }
1358 break;
1359 case 0x2:
1360 { /* ROTCL Rn */
1361 uint32_t Rn = ((ir>>8)&0xF);
1362 load_reg( R_EAX, Rn );
1363 LDC_t();
1364 RCL1_r32( R_EAX );
1365 store_reg( R_EAX, Rn );
1366 SETC_t();
1367 }
1368 break;
1369 default:
1370 UNDEF();
1371 break;
1372 }
1373 break;
1374 case 0x5:
1375 switch( (ir&0xF0) >> 4 ) {
1376 case 0x0:
1377 { /* ROTR Rn */
1378 uint32_t Rn = ((ir>>8)&0xF);
1379 load_reg( R_EAX, Rn );
1380 ROR1_r32( R_EAX );
1381 store_reg( R_EAX, Rn );
1382 SETC_t();
1383 }
1384 break;
1385 case 0x1:
1386 { /* CMP/PL Rn */
1387 uint32_t Rn = ((ir>>8)&0xF);
1388 load_reg( R_EAX, Rn );
1389 CMP_imm8s_r32( 0, R_EAX );
1390 SETG_t();
1391 }
1392 break;
1393 case 0x2:
1394 { /* ROTCR Rn */
1395 uint32_t Rn = ((ir>>8)&0xF);
1396 load_reg( R_EAX, Rn );
1397 LDC_t();
1398 RCR1_r32( R_EAX );
1399 store_reg( R_EAX, Rn );
1400 SETC_t();
1401 }
1402 break;
1403 default:
1404 UNDEF();
1405 break;
1406 }
1407 break;
1408 case 0x6:
1409 switch( (ir&0xF0) >> 4 ) {
1410 case 0x0:
1411 { /* LDS.L @Rm+, MACH */
1412 uint32_t Rm = ((ir>>8)&0xF);
1413 load_reg( R_EAX, Rm );
1414 MOV_r32_r32( R_EAX, R_ECX );
1415 ADD_imm8s_r32( 4, R_EAX );
1416 store_reg( R_EAX, Rm );
1417 MEM_READ_LONG( R_ECX, R_EAX );
1418 store_spreg( R_EAX, R_MACH );
1419 }
1420 break;
1421 case 0x1:
1422 { /* LDS.L @Rm+, MACL */
1423 uint32_t Rm = ((ir>>8)&0xF);
1424 load_reg( R_EAX, Rm );
1425 MOV_r32_r32( R_EAX, R_ECX );
1426 ADD_imm8s_r32( 4, R_EAX );
1427 store_reg( R_EAX, Rm );
1428 MEM_READ_LONG( R_ECX, R_EAX );
1429 store_spreg( R_EAX, R_MACL );
1430 }
1431 break;
1432 case 0x2:
1433 { /* LDS.L @Rm+, PR */
1434 uint32_t Rm = ((ir>>8)&0xF);
1435 load_reg( R_EAX, Rm );
1436 MOV_r32_r32( R_EAX, R_ECX );
1437 ADD_imm8s_r32( 4, R_EAX );
1438 store_reg( R_EAX, Rm );
1439 MEM_READ_LONG( R_ECX, R_EAX );
1440 store_spreg( R_EAX, R_PR );
1441 }
1442 break;
1443 case 0x3:
1444 { /* LDC.L @Rm+, SGR */
1445 uint32_t Rm = ((ir>>8)&0xF);
1446 load_reg( R_EAX, Rm );
1447 MOV_r32_r32( R_EAX, R_ECX );
1448 ADD_imm8s_r32( 4, R_EAX );
1449 store_reg( R_EAX, Rm );
1450 MEM_READ_LONG( R_ECX, R_EAX );
1451 store_spreg( R_EAX, R_SGR );
1452 }
1453 break;
1454 case 0x5:
1455 { /* LDS.L @Rm+, FPUL */
1456 uint32_t Rm = ((ir>>8)&0xF);
1457 load_reg( R_EAX, Rm );
1458 MOV_r32_r32( R_EAX, R_ECX );
1459 ADD_imm8s_r32( 4, R_EAX );
1460 store_reg( R_EAX, Rm );
1461 MEM_READ_LONG( R_ECX, R_EAX );
1462 store_spreg( R_EAX, R_FPUL );
1463 }
1464 break;
1465 case 0x6:
1466 { /* LDS.L @Rm+, FPSCR */
1467 uint32_t Rm = ((ir>>8)&0xF);
1468 load_reg( R_EAX, Rm );
1469 MOV_r32_r32( R_EAX, R_ECX );
1470 ADD_imm8s_r32( 4, R_EAX );
1471 store_reg( R_EAX, Rm );
1472 MEM_READ_LONG( R_ECX, R_EAX );
1473 store_spreg( R_EAX, R_FPSCR );
1474 }
1475 break;
1476 case 0xF:
1477 { /* LDC.L @Rm+, DBR */
1478 uint32_t Rm = ((ir>>8)&0xF);
1479 load_reg( R_EAX, Rm );
1480 MOV_r32_r32( R_EAX, R_ECX );
1481 ADD_imm8s_r32( 4, R_EAX );
1482 store_reg( R_EAX, Rm );
1483 MEM_READ_LONG( R_ECX, R_EAX );
1484 store_spreg( R_EAX, R_DBR );
1485 }
1486 break;
1487 default:
1488 UNDEF();
1489 break;
1490 }
1491 break;
1492 case 0x7:
1493 switch( (ir&0x80) >> 7 ) {
1494 case 0x0:
1495 switch( (ir&0x70) >> 4 ) {
1496 case 0x0:
1497 { /* LDC.L @Rm+, SR */
1498 uint32_t Rm = ((ir>>8)&0xF);
1499 load_reg( R_EAX, Rm );
1500 MOV_r32_r32( R_EAX, R_ECX );
1501 ADD_imm8s_r32( 4, R_EAX );
1502 store_reg( R_EAX, Rm );
1503 MEM_READ_LONG( R_ECX, R_EAX );
1504 call_func1( sh4_write_sr, R_EAX );
1505 sh4_x86.priv_checked = FALSE;
1506 sh4_x86.fpuen_checked = FALSE;
1507 }
1508 break;
1509 case 0x1:
1510 { /* LDC.L @Rm+, GBR */
1511 uint32_t Rm = ((ir>>8)&0xF);
1512 load_reg( R_EAX, Rm );
1513 MOV_r32_r32( R_EAX, R_ECX );
1514 ADD_imm8s_r32( 4, R_EAX );
1515 store_reg( R_EAX, Rm );
1516 MEM_READ_LONG( R_ECX, R_EAX );
1517 store_spreg( R_EAX, R_GBR );
1518 }
1519 break;
1520 case 0x2:
1521 { /* LDC.L @Rm+, VBR */
1522 uint32_t Rm = ((ir>>8)&0xF);
1523 load_reg( R_EAX, Rm );
1524 MOV_r32_r32( R_EAX, R_ECX );
1525 ADD_imm8s_r32( 4, R_EAX );
1526 store_reg( R_EAX, Rm );
1527 MEM_READ_LONG( R_ECX, R_EAX );
1528 store_spreg( R_EAX, R_VBR );
1529 }
1530 break;
1531 case 0x3:
1532 { /* LDC.L @Rm+, SSR */
1533 uint32_t Rm = ((ir>>8)&0xF);
1534 load_reg( R_EAX, Rm );
1535 MOV_r32_r32( R_EAX, R_ECX );
1536 ADD_imm8s_r32( 4, R_EAX );
1537 store_reg( R_EAX, Rm );
1538 MEM_READ_LONG( R_ECX, R_EAX );
1539 store_spreg( R_EAX, R_SSR );
1540 }
1541 break;
1542 case 0x4:
1543 { /* LDC.L @Rm+, SPC */
1544 uint32_t Rm = ((ir>>8)&0xF);
1545 load_reg( R_EAX, Rm );
1546 MOV_r32_r32( R_EAX, R_ECX );
1547 ADD_imm8s_r32( 4, R_EAX );
1548 store_reg( R_EAX, Rm );
1549 MEM_READ_LONG( R_ECX, R_EAX );
1550 store_spreg( R_EAX, R_SPC );
1551 }
1552 break;
1553 default:
1554 UNDEF();
1555 break;
1556 }
1557 break;
1558 case 0x1:
1559 { /* LDC.L @Rm+, Rn_BANK */
1560 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1561 load_reg( R_EAX, Rm );
1562 MOV_r32_r32( R_EAX, R_ECX );
1563 ADD_imm8s_r32( 4, R_EAX );
1564 store_reg( R_EAX, Rm );
1565 MEM_READ_LONG( R_ECX, R_EAX );
1566 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1567 }
1568 break;
1569 }
1570 break;
1571 case 0x8:
1572 switch( (ir&0xF0) >> 4 ) {
1573 case 0x0:
1574 { /* SHLL2 Rn */
1575 uint32_t Rn = ((ir>>8)&0xF);
1576 load_reg( R_EAX, Rn );
1577 SHL_imm8_r32( 2, R_EAX );
1578 store_reg( R_EAX, Rn );
1579 }
1580 break;
1581 case 0x1:
1582 { /* SHLL8 Rn */
1583 uint32_t Rn = ((ir>>8)&0xF);
1584 load_reg( R_EAX, Rn );
1585 SHL_imm8_r32( 8, R_EAX );
1586 store_reg( R_EAX, Rn );
1587 }
1588 break;
1589 case 0x2:
1590 { /* SHLL16 Rn */
1591 uint32_t Rn = ((ir>>8)&0xF);
1592 load_reg( R_EAX, Rn );
1593 SHL_imm8_r32( 16, R_EAX );
1594 store_reg( R_EAX, Rn );
1595 }
1596 break;
1597 default:
1598 UNDEF();
1599 break;
1600 }
1601 break;
1602 case 0x9:
1603 switch( (ir&0xF0) >> 4 ) {
1604 case 0x0:
1605 { /* SHLR2 Rn */
1606 uint32_t Rn = ((ir>>8)&0xF);
1607 load_reg( R_EAX, Rn );
1608 SHR_imm8_r32( 2, R_EAX );
1609 store_reg( R_EAX, Rn );
1610 }
1611 break;
1612 case 0x1:
1613 { /* SHLR8 Rn */
1614 uint32_t Rn = ((ir>>8)&0xF);
1615 load_reg( R_EAX, Rn );
1616 SHR_imm8_r32( 8, R_EAX );
1617 store_reg( R_EAX, Rn );
1618 }
1619 break;
1620 case 0x2:
1621 { /* SHLR16 Rn */
1622 uint32_t Rn = ((ir>>8)&0xF);
1623 load_reg( R_EAX, Rn );
1624 SHR_imm8_r32( 16, R_EAX );
1625 store_reg( R_EAX, Rn );
1626 }
1627 break;
1628 default:
1629 UNDEF();
1630 break;
1631 }
1632 break;
1633 case 0xA:
1634 switch( (ir&0xF0) >> 4 ) {
1635 case 0x0:
1636 { /* LDS Rm, MACH */
1637 uint32_t Rm = ((ir>>8)&0xF);
1638 load_reg( R_EAX, Rm );
1639 store_spreg( R_EAX, R_MACH );
1640 }
1641 break;
1642 case 0x1:
1643 { /* LDS Rm, MACL */
1644 uint32_t Rm = ((ir>>8)&0xF);
1645 load_reg( R_EAX, Rm );
1646 store_spreg( R_EAX, R_MACL );
1647 }
1648 break;
1649 case 0x2:
1650 { /* LDS Rm, PR */
1651 uint32_t Rm = ((ir>>8)&0xF);
1652 load_reg( R_EAX, Rm );
1653 store_spreg( R_EAX, R_PR );
1654 }
1655 break;
1656 case 0x3:
1657 { /* LDC Rm, SGR */
1658 uint32_t Rm = ((ir>>8)&0xF);
1659 load_reg( R_EAX, Rm );
1660 store_spreg( R_EAX, R_SGR );
1661 }
1662 break;
1663 case 0x5:
1664 { /* LDS Rm, FPUL */
1665 uint32_t Rm = ((ir>>8)&0xF);
1666 load_reg( R_EAX, Rm );
1667 store_spreg( R_EAX, R_FPUL );
1668 }
1669 break;
1670 case 0x6:
1671 { /* LDS Rm, FPSCR */
1672 uint32_t Rm = ((ir>>8)&0xF);
1673 load_reg( R_EAX, Rm );
1674 store_spreg( R_EAX, R_FPSCR );
1675 }
1676 break;
1677 case 0xF:
1678 { /* LDC Rm, DBR */
1679 uint32_t Rm = ((ir>>8)&0xF);
1680 load_reg( R_EAX, Rm );
1681 store_spreg( R_EAX, R_DBR );
1682 }
1683 break;
1684 default:
1685 UNDEF();
1686 break;
1687 }
1688 break;
1689 case 0xB:
1690 switch( (ir&0xF0) >> 4 ) {
1691 case 0x0:
1692 { /* JSR @Rn */
1693 uint32_t Rn = ((ir>>8)&0xF);
1694 if( sh4_x86.in_delay_slot ) {
1695 SLOTILLEGAL();
1696 } else {
1697 load_imm32( R_EAX, pc + 4 );
1698 store_spreg( R_EAX, R_PR );
1699 load_reg( R_EDI, Rn );
1700 sh4_x86.in_delay_slot = TRUE;
1701 INC_r32(R_ESI);
1702 return 0;
1703 }
1704 }
1705 break;
1706 case 0x1:
1707 { /* TAS.B @Rn */
1708 uint32_t Rn = ((ir>>8)&0xF);
1709 load_reg( R_ECX, Rn );
1710 MEM_READ_BYTE( R_ECX, R_EAX );
1711 TEST_r8_r8( R_AL, R_AL );
1712 SETE_t();
1713 OR_imm8_r8( 0x80, R_AL );
1714 MEM_WRITE_BYTE( R_ECX, R_EAX );
1715 }
1716 break;
1717 case 0x2:
1718 { /* JMP @Rn */
1719 uint32_t Rn = ((ir>>8)&0xF);
1720 if( sh4_x86.in_delay_slot ) {
1721 SLOTILLEGAL();
1722 } else {
1723 load_reg( R_EDI, Rn );
1724 sh4_x86.in_delay_slot = TRUE;
1725 INC_r32(R_ESI);
1726 return 0;
1727 }
1728 }
1729 break;
1730 default:
1731 UNDEF();
1732 break;
1733 }
1734 break;
1735 case 0xC:
1736 { /* SHAD Rm, Rn */
1737 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1738 /* Annoyingly enough, not directly convertible */
1739 load_reg( R_EAX, Rn );
1740 load_reg( R_ECX, Rm );
1741 CMP_imm32_r32( 0, R_ECX );
1742 JAE_rel8(9, doshl);
1744 NEG_r32( R_ECX ); // 2
1745 AND_imm8_r8( 0x1F, R_CL ); // 3
1746 SAR_r32_CL( R_EAX ); // 2
1747 JMP_rel8(5, end); // 2
1748 JMP_TARGET(doshl);
1749 AND_imm8_r8( 0x1F, R_CL ); // 3
1750 SHL_r32_CL( R_EAX ); // 2
1751 JMP_TARGET(end);
1752 store_reg( R_EAX, Rn );
1753 }
1754 break;
1755 case 0xD:
1756 { /* SHLD Rm, Rn */
1757 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1758 load_reg( R_EAX, Rn );
1759 load_reg( R_ECX, Rm );
1761 MOV_r32_r32( R_EAX, R_EDX );
1762 SHL_r32_CL( R_EAX );
1763 NEG_r32( R_ECX );
1764 SHR_r32_CL( R_EDX );
1765 CMP_imm8s_r32( 0, R_ECX );
1766 CMOVAE_r32_r32( R_EDX, R_EAX );
1767 store_reg( R_EAX, Rn );
1768 }
1769 break;
1770 case 0xE:
1771 switch( (ir&0x80) >> 7 ) {
1772 case 0x0:
1773 switch( (ir&0x70) >> 4 ) {
1774 case 0x0:
1775 { /* LDC Rm, SR */
1776 uint32_t Rm = ((ir>>8)&0xF);
1777 load_reg( R_EAX, Rm );
1778 call_func1( sh4_write_sr, R_EAX );
1779 sh4_x86.priv_checked = FALSE;
1780 sh4_x86.fpuen_checked = FALSE;
1781 }
1782 break;
1783 case 0x1:
1784 { /* LDC Rm, GBR */
1785 uint32_t Rm = ((ir>>8)&0xF);
1786 load_reg( R_EAX, Rm );
1787 store_spreg( R_EAX, R_GBR );
1788 }
1789 break;
1790 case 0x2:
1791 { /* LDC Rm, VBR */
1792 uint32_t Rm = ((ir>>8)&0xF);
1793 load_reg( R_EAX, Rm );
1794 store_spreg( R_EAX, R_VBR );
1795 }
1796 break;
1797 case 0x3:
1798 { /* LDC Rm, SSR */
1799 uint32_t Rm = ((ir>>8)&0xF);
1800 load_reg( R_EAX, Rm );
1801 store_spreg( R_EAX, R_SSR );
1802 }
1803 break;
1804 case 0x4:
1805 { /* LDC Rm, SPC */
1806 uint32_t Rm = ((ir>>8)&0xF);
1807 load_reg( R_EAX, Rm );
1808 store_spreg( R_EAX, R_SPC );
1809 }
1810 break;
1811 default:
1812 UNDEF();
1813 break;
1814 }
1815 break;
1816 case 0x1:
1817 { /* LDC Rm, Rn_BANK */
1818 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1819 load_reg( R_EAX, Rm );
1820 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1821 }
1822 break;
1823 }
1824 break;
1825 case 0xF:
1826 { /* MAC.W @Rm+, @Rn+ */
1827 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1828 }
1829 break;
1830 }
1831 break;
1832 case 0x5:
1833 { /* MOV.L @(disp, Rm), Rn */
1834 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
1835 load_reg( R_ECX, Rm );
1836 ADD_imm8s_r32( disp, R_ECX );
1837 check_ralign32( R_ECX );
1838 MEM_READ_LONG( R_ECX, R_EAX );
1839 store_reg( R_EAX, Rn );
1840 }
1841 break;
1842 case 0x6:
1843 switch( ir&0xF ) {
1844 case 0x0:
1845 { /* MOV.B @Rm, Rn */
1846 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1847 load_reg( R_ECX, Rm );
1848 MEM_READ_BYTE( R_ECX, R_EAX );
1849 store_reg( R_ECX, Rn );
1850 }
1851 break;
1852 case 0x1:
1853 { /* MOV.W @Rm, Rn */
1854 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1855 load_reg( R_ECX, Rm );
1856 check_ralign16( R_ECX );
1857 MEM_READ_WORD( R_ECX, R_EAX );
1858 store_reg( R_EAX, Rn );
1859 }
1860 break;
1861 case 0x2:
1862 { /* MOV.L @Rm, Rn */
1863 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1864 load_reg( R_ECX, Rm );
1865 check_ralign32( R_ECX );
1866 MEM_READ_LONG( R_ECX, R_EAX );
1867 store_reg( R_EAX, Rn );
1868 }
1869 break;
1870 case 0x3:
1871 { /* MOV Rm, Rn */
1872 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1873 load_reg( R_EAX, Rm );
1874 store_reg( R_EAX, Rn );
1875 }
1876 break;
1877 case 0x4:
1878 { /* MOV.B @Rm+, Rn */
1879 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1880 load_reg( R_ECX, Rm );
1881 MOV_r32_r32( R_ECX, R_EAX );
1882 ADD_imm8s_r32( 1, R_EAX );
1883 store_reg( R_EAX, Rm );
1884 MEM_READ_BYTE( R_ECX, R_EAX );
1885 store_reg( R_EAX, Rn );
1886 }
1887 break;
1888 case 0x5:
1889 { /* MOV.W @Rm+, Rn */
1890 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1891 load_reg( R_EAX, Rm );
1892 check_ralign16( R_EAX );
1893 MOV_r32_r32( R_EAX, R_ECX );
1894 ADD_imm8s_r32( 2, R_EAX );
1895 store_reg( R_EAX, Rm );
1896 MEM_READ_WORD( R_ECX, R_EAX );
1897 store_reg( R_EAX, Rn );
1898 }
1899 break;
1900 case 0x6:
1901 { /* MOV.L @Rm+, Rn */
1902 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1903 load_reg( R_EAX, Rm );
1904 check_ralign32( R_ECX );
1905 MOV_r32_r32( R_EAX, R_ECX );
1906 ADD_imm8s_r32( 4, R_EAX );
1907 store_reg( R_EAX, Rm );
1908 MEM_READ_LONG( R_ECX, R_EAX );
1909 store_reg( R_EAX, Rn );
1910 }
1911 break;
1912 case 0x7:
1913 { /* NOT Rm, Rn */
1914 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1915 load_reg( R_EAX, Rm );
1916 NOT_r32( R_EAX );
1917 store_reg( R_EAX, Rn );
1918 }
1919 break;
1920 case 0x8:
1921 { /* SWAP.B Rm, Rn */
1922 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1923 load_reg( R_EAX, Rm );
1924 XCHG_r8_r8( R_AL, R_AH );
1925 store_reg( R_EAX, Rn );
1926 }
1927 break;
1928 case 0x9:
1929 { /* SWAP.W Rm, Rn */
1930 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1931 load_reg( R_EAX, Rm );
1932 MOV_r32_r32( R_EAX, R_ECX );
1933 SHL_imm8_r32( 16, R_ECX );
1934 SHR_imm8_r32( 16, R_EAX );
1935 OR_r32_r32( R_EAX, R_ECX );
1936 store_reg( R_ECX, Rn );
1937 }
1938 break;
1939 case 0xA:
1940 { /* NEGC Rm, Rn */
1941 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1942 load_reg( R_EAX, Rm );
1943 XOR_r32_r32( R_ECX, R_ECX );
1944 LDC_t();
1945 SBB_r32_r32( R_EAX, R_ECX );
1946 store_reg( R_ECX, Rn );
1947 SETC_t();
1948 }
1949 break;
1950 case 0xB:
1951 { /* NEG Rm, Rn */
1952 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1953 load_reg( R_EAX, Rm );
1954 NEG_r32( R_EAX );
1955 store_reg( R_EAX, Rn );
1956 }
1957 break;
1958 case 0xC:
1959 { /* EXTU.B Rm, Rn */
1960 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1961 load_reg( R_EAX, Rm );
1962 MOVZX_r8_r32( R_EAX, R_EAX );
1963 store_reg( R_EAX, Rn );
1964 }
1965 break;
1966 case 0xD:
1967 { /* EXTU.W Rm, Rn */
1968 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1969 load_reg( R_EAX, Rm );
1970 MOVZX_r16_r32( R_EAX, R_EAX );
1971 store_reg( R_EAX, Rn );
1972 }
1973 break;
1974 case 0xE:
1975 { /* EXTS.B Rm, Rn */
1976 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1977 load_reg( R_EAX, Rm );
1978 MOVSX_r8_r32( R_EAX, R_EAX );
1979 store_reg( R_EAX, Rn );
1980 }
1981 break;
1982 case 0xF:
1983 { /* EXTS.W Rm, Rn */
1984 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1985 load_reg( R_EAX, Rm );
1986 MOVSX_r16_r32( R_EAX, R_EAX );
1987 store_reg( R_EAX, Rn );
1988 }
1989 break;
1990 }
1991 break;
1992 case 0x7:
1993 { /* ADD #imm, Rn */
1994 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
1995 load_reg( R_EAX, Rn );
1996 ADD_imm8s_r32( imm, R_EAX );
1997 store_reg( R_EAX, Rn );
1998 }
1999 break;
2000 case 0x8:
2001 switch( (ir&0xF00) >> 8 ) {
2002 case 0x0:
2003 { /* MOV.B R0, @(disp, Rn) */
2004 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2005 load_reg( R_EAX, 0 );
2006 load_reg( R_ECX, Rn );
2007 ADD_imm32_r32( disp, R_ECX );
2008 MEM_WRITE_BYTE( R_ECX, R_EAX );
2009 }
2010 break;
2011 case 0x1:
2012 { /* MOV.W R0, @(disp, Rn) */
2013 uint32_t Rn = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2014 load_reg( R_ECX, Rn );
2015 load_reg( R_EAX, 0 );
2016 ADD_imm32_r32( disp, R_ECX );
2017 check_walign16( R_ECX );
2018 MEM_WRITE_WORD( R_ECX, R_EAX );
2019 }
2020 break;
2021 case 0x4:
2022 { /* MOV.B @(disp, Rm), R0 */
2023 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF);
2024 load_reg( R_ECX, Rm );
2025 ADD_imm32_r32( disp, R_ECX );
2026 MEM_READ_BYTE( R_ECX, R_EAX );
2027 store_reg( R_EAX, 0 );
2028 }
2029 break;
2030 case 0x5:
2031 { /* MOV.W @(disp, Rm), R0 */
2032 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
2033 load_reg( R_ECX, Rm );
2034 ADD_imm32_r32( disp, R_ECX );
2035 check_ralign16( R_ECX );
2036 MEM_READ_WORD( R_ECX, R_EAX );
2037 store_reg( R_EAX, 0 );
2038 }
2039 break;
2040 case 0x8:
2041 { /* CMP/EQ #imm, R0 */
2042 int32_t imm = SIGNEXT8(ir&0xFF);
2043 load_reg( R_EAX, 0 );
2044 CMP_imm8s_r32(imm, R_EAX);
2045 SETE_t();
2046 }
2047 break;
2048 case 0x9:
2049 { /* BT disp */
2050 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2051 if( sh4_x86.in_delay_slot ) {
2052 SLOTILLEGAL();
2053 } else {
2054 load_imm32( R_EDI, pc + 2 );
2055 CMP_imm8s_sh4r( 0, R_T );
2056 JE_rel8( 5, nottaken );
2057 load_imm32( R_EDI, disp + pc + 4 );
2058 JMP_TARGET(nottaken);
2059 INC_r32(R_ESI);
2060 return 1;
2061 }
2062 }
2063 break;
2064 case 0xB:
2065 { /* BF disp */
2066 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2067 if( sh4_x86.in_delay_slot ) {
2068 SLOTILLEGAL();
2069 } else {
2070 load_imm32( R_EDI, pc + 2 );
2071 CMP_imm8s_sh4r( 0, R_T );
2072 JNE_rel8( 5, nottaken );
2073 load_imm32( R_EDI, disp + pc + 4 );
2074 JMP_TARGET(nottaken);
2075 INC_r32(R_ESI);
2076 return 1;
2077 }
2078 }
2079 break;
2080 case 0xD:
2081 { /* BT/S disp */
2082 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2083 if( sh4_x86.in_delay_slot ) {
2084 SLOTILLEGAL();
2085 } else {
2086 load_imm32( R_EDI, pc + 2 );
2087 CMP_imm8s_sh4r( 0, R_T );
2088 JE_rel8( 5, nottaken );
2089 load_imm32( R_EDI, disp + pc + 4 );
2090 JMP_TARGET(nottaken);
2091 sh4_x86.in_delay_slot = TRUE;
2092 INC_r32(R_ESI);
2093 return 0;
2094 }
2095 }
2096 break;
2097 case 0xF:
2098 { /* BF/S disp */
2099 int32_t disp = SIGNEXT8(ir&0xFF)<<1;
2100 if( sh4_x86.in_delay_slot ) {
2101 SLOTILLEGAL();
2102 } else {
2103 load_imm32( R_EDI, pc + 2 );
2104 CMP_imm8s_sh4r( 0, R_T );
2105 JNE_rel8( 5, nottaken );
2106 load_imm32( R_EDI, disp + pc + 4 );
2107 JMP_TARGET(nottaken);
2108 sh4_x86.in_delay_slot = TRUE;
2109 INC_r32(R_ESI);
2110 return 0;
2111 }
2112 }
2113 break;
2114 default:
2115 UNDEF();
2116 break;
2117 }
2118 break;
2119 case 0x9:
2120 { /* MOV.W @(disp, PC), Rn */
2121 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<1;
2122 if( sh4_x86.in_delay_slot ) {
2123 SLOTILLEGAL();
2124 } else {
2125 load_imm32( R_ECX, pc + disp + 4 );
2126 MEM_READ_WORD( R_ECX, R_EAX );
2127 store_reg( R_EAX, Rn );
2128 }
2129 }
2130 break;
2131 case 0xA:
2132 { /* BRA disp */
2133 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2134 if( sh4_x86.in_delay_slot ) {
2135 SLOTILLEGAL();
2136 } else {
2137 load_imm32( R_EDI, disp + pc + 4 );
2138 sh4_x86.in_delay_slot = TRUE;
2139 INC_r32(R_ESI);
2140 return 0;
2141 }
2142 }
2143 break;
2144 case 0xB:
2145 { /* BSR disp */
2146 int32_t disp = SIGNEXT12(ir&0xFFF)<<1;
2147 if( sh4_x86.in_delay_slot ) {
2148 SLOTILLEGAL();
2149 } else {
2150 load_imm32( R_EAX, pc + 4 );
2151 store_spreg( R_EAX, R_PR );
2152 load_imm32( R_EDI, disp + pc + 4 );
2153 sh4_x86.in_delay_slot = TRUE;
2154 INC_r32(R_ESI);
2155 return 0;
2156 }
2157 }
2158 break;
2159 case 0xC:
2160 switch( (ir&0xF00) >> 8 ) {
2161 case 0x0:
2162 { /* MOV.B R0, @(disp, GBR) */
2163 uint32_t disp = (ir&0xFF);
2164 load_reg( R_EAX, 0 );
2165 load_spreg( R_ECX, R_GBR );
2166 ADD_imm32_r32( disp, R_ECX );
2167 MEM_WRITE_BYTE( R_ECX, R_EAX );
2168 }
2169 break;
2170 case 0x1:
2171 { /* MOV.W R0, @(disp, GBR) */
2172 uint32_t disp = (ir&0xFF)<<1;
2173 load_spreg( R_ECX, R_GBR );
2174 load_reg( R_EAX, 0 );
2175 ADD_imm32_r32( disp, R_ECX );
2176 check_walign16( R_ECX );
2177 MEM_WRITE_WORD( R_ECX, R_EAX );
2178 }
2179 break;
2180 case 0x2:
2181 { /* MOV.L R0, @(disp, GBR) */
2182 uint32_t disp = (ir&0xFF)<<2;
2183 load_spreg( R_ECX, R_GBR );
2184 load_reg( R_EAX, 0 );
2185 ADD_imm32_r32( disp, R_ECX );
2186 check_walign32( R_ECX );
2187 MEM_WRITE_LONG( R_ECX, R_EAX );
2188 }
2189 break;
2190 case 0x3:
2191 { /* TRAPA #imm */
2192 uint32_t imm = (ir&0xFF);
2193 if( sh4_x86.in_delay_slot ) {
2194 SLOTILLEGAL();
2195 } else {
2196 // TODO: Write TRA
2197 RAISE_EXCEPTION(EXC_TRAP);
2198 }
2199 }
2200 break;
2201 case 0x4:
2202 { /* MOV.B @(disp, GBR), R0 */
2203 uint32_t disp = (ir&0xFF);
2204 load_spreg( R_ECX, R_GBR );
2205 ADD_imm32_r32( disp, R_ECX );
2206 MEM_READ_BYTE( R_ECX, R_EAX );
2207 store_reg( R_EAX, 0 );
2208 }
2209 break;
2210 case 0x5:
2211 { /* MOV.W @(disp, GBR), R0 */
2212 uint32_t disp = (ir&0xFF)<<1;
2213 load_spreg( R_ECX, R_GBR );
2214 ADD_imm32_r32( disp, R_ECX );
2215 check_ralign16( R_ECX );
2216 MEM_READ_WORD( R_ECX, R_EAX );
2217 store_reg( R_EAX, 0 );
2218 }
2219 break;
2220 case 0x6:
2221 { /* MOV.L @(disp, GBR), R0 */
2222 uint32_t disp = (ir&0xFF)<<2;
2223 load_spreg( R_ECX, R_GBR );
2224 ADD_imm32_r32( disp, R_ECX );
2225 check_ralign32( R_ECX );
2226 MEM_READ_LONG( R_ECX, R_EAX );
2227 store_reg( R_EAX, 0 );
2228 }
2229 break;
2230 case 0x7:
2231 { /* MOVA @(disp, PC), R0 */
2232 uint32_t disp = (ir&0xFF)<<2;
2233 if( sh4_x86.in_delay_slot ) {
2234 SLOTILLEGAL();
2235 } else {
2236 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2237 store_reg( R_ECX, 0 );
2238 }
2239 }
2240 break;
2241 case 0x8:
2242 { /* TST #imm, R0 */
2243 uint32_t imm = (ir&0xFF);
2244 load_reg( R_EAX, 0 );
2245 TEST_imm32_r32( imm, R_EAX );
2246 SETE_t();
2247 }
2248 break;
2249 case 0x9:
2250 { /* AND #imm, R0 */
2251 uint32_t imm = (ir&0xFF);
2252 load_reg( R_EAX, 0 );
2253 AND_imm32_r32(imm, R_EAX);
2254 store_reg( R_EAX, 0 );
2255 }
2256 break;
2257 case 0xA:
2258 { /* XOR #imm, R0 */
2259 uint32_t imm = (ir&0xFF);
2260 load_reg( R_EAX, 0 );
2261 XOR_imm32_r32( imm, R_EAX );
2262 store_reg( R_EAX, 0 );
2263 }
2264 break;
2265 case 0xB:
2266 { /* OR #imm, R0 */
2267 uint32_t imm = (ir&0xFF);
2268 load_reg( R_EAX, 0 );
2269 OR_imm32_r32(imm, R_EAX);
2270 store_reg( R_EAX, 0 );
2271 }
2272 break;
2273 case 0xC:
2274 { /* TST.B #imm, @(R0, GBR) */
2275 uint32_t imm = (ir&0xFF);
2276 load_reg( R_EAX, 0);
2277 load_reg( R_ECX, R_GBR);
2278 ADD_r32_r32( R_EAX, R_ECX );
2279 MEM_READ_BYTE( R_ECX, R_EAX );
2280 TEST_imm8_r8( imm, R_EAX );
2281 SETE_t();
2282 }
2283 break;
2284 case 0xD:
2285 { /* AND.B #imm, @(R0, GBR) */
2286 uint32_t imm = (ir&0xFF);
2287 load_reg( R_EAX, 0 );
2288 load_spreg( R_ECX, R_GBR );
2289 ADD_r32_r32( R_EAX, R_ECX );
2290 MEM_READ_BYTE( R_ECX, R_EAX );
2291 AND_imm32_r32(imm, R_ECX );
2292 MEM_WRITE_BYTE( R_ECX, R_EAX );
2293 }
2294 break;
2295 case 0xE:
2296 { /* XOR.B #imm, @(R0, GBR) */
2297 uint32_t imm = (ir&0xFF);
2298 load_reg( R_EAX, 0 );
2299 load_spreg( R_ECX, R_GBR );
2300 ADD_r32_r32( R_EAX, R_ECX );
2301 MEM_READ_BYTE( R_ECX, R_EAX );
2302 XOR_imm32_r32( imm, R_EAX );
2303 MEM_WRITE_BYTE( R_ECX, R_EAX );
2304 }
2305 break;
2306 case 0xF:
2307 { /* OR.B #imm, @(R0, GBR) */
2308 uint32_t imm = (ir&0xFF);
2309 load_reg( R_EAX, 0 );
2310 load_spreg( R_ECX, R_GBR );
2311 ADD_r32_r32( R_EAX, R_ECX );
2312 MEM_READ_BYTE( R_ECX, R_EAX );
2313 OR_imm32_r32(imm, R_ECX );
2314 MEM_WRITE_BYTE( R_ECX, R_EAX );
2315 }
2316 break;
2317 }
2318 break;
2319 case 0xD:
2320 { /* MOV.L @(disp, PC), Rn */
2321 uint32_t Rn = ((ir>>8)&0xF); uint32_t disp = (ir&0xFF)<<2;
2322 if( sh4_x86.in_delay_slot ) {
2323 SLOTILLEGAL();
2324 } else {
2325 load_imm32( R_ECX, (pc & 0xFFFFFFFC) + disp + 4 );
2326 MEM_READ_LONG( R_ECX, R_EAX );
2327 store_reg( R_EAX, 0 );
2328 }
2329 }
2330 break;
2331 case 0xE:
2332 { /* MOV #imm, Rn */
2333 uint32_t Rn = ((ir>>8)&0xF); int32_t imm = SIGNEXT8(ir&0xFF);
2334 load_imm32( R_EAX, imm );
2335 store_reg( R_EAX, Rn );
2336 }
2337 break;
2338 case 0xF:
2339 switch( ir&0xF ) {
2340 case 0x0:
2341 { /* FADD FRm, FRn */
2342 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2343 check_fpuen();
2344 load_spreg( R_ECX, R_FPSCR );
2345 TEST_imm32_r32( FPSCR_PR, R_ECX );
2346 load_fr_bank( R_EDX );
2347 JNE_rel8(13,doubleprec);
2348 push_fr(R_EDX, FRm);
2349 push_fr(R_EDX, FRn);
2350 FADDP_st(1);
2351 pop_fr(R_EDX, FRn);
2352 JMP_rel8(11,end);
2353 JMP_TARGET(doubleprec);
2354 push_dr(R_EDX, FRm);
2355 push_dr(R_EDX, FRn);
2356 FADDP_st(1);
2357 pop_dr(R_EDX, FRn);
2358 JMP_TARGET(end);
2359 }
2360 break;
2361 case 0x1:
2362 { /* FSUB FRm, FRn */
2363 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2364 check_fpuen();
2365 load_spreg( R_ECX, R_FPSCR );
2366 TEST_imm32_r32( FPSCR_PR, R_ECX );
2367 load_fr_bank( R_EDX );
2368 JNE_rel8(13, doubleprec);
2369 push_fr(R_EDX, FRn);
2370 push_fr(R_EDX, FRm);
2371 FMULP_st(1);
2372 pop_fr(R_EDX, FRn);
2373 JMP_rel8(11, end);
2374 JMP_TARGET(doubleprec);
2375 push_dr(R_EDX, FRn);
2376 push_dr(R_EDX, FRm);
2377 FMULP_st(1);
2378 pop_dr(R_EDX, FRn);
2379 JMP_TARGET(end);
2380 }
2381 break;
2382 case 0x2:
2383 { /* FMUL FRm, FRn */
2384 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2385 check_fpuen();
2386 load_spreg( R_ECX, R_FPSCR );
2387 TEST_imm32_r32( FPSCR_PR, R_ECX );
2388 load_fr_bank( R_EDX );
2389 JNE_rel8(13, doubleprec);
2390 push_fr(R_EDX, FRm);
2391 push_fr(R_EDX, FRn);
2392 FMULP_st(1);
2393 pop_fr(R_EDX, FRn);
2394 JMP_rel8(11, end);
2395 JMP_TARGET(doubleprec);
2396 push_dr(R_EDX, FRm);
2397 push_dr(R_EDX, FRn);
2398 FMULP_st(1);
2399 pop_dr(R_EDX, FRn);
2400 JMP_TARGET(end);
2401 }
2402 break;
2403 case 0x3:
2404 { /* FDIV FRm, FRn */
2405 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2406 check_fpuen();
2407 load_spreg( R_ECX, R_FPSCR );
2408 TEST_imm32_r32( FPSCR_PR, R_ECX );
2409 load_fr_bank( R_EDX );
2410 JNE_rel8(13, doubleprec);
2411 push_fr(R_EDX, FRn);
2412 push_fr(R_EDX, FRm);
2413 FDIVP_st(1);
2414 pop_fr(R_EDX, FRn);
2415 JMP_rel8(11, end);
2416 JMP_TARGET(doubleprec);
2417 push_dr(R_EDX, FRn);
2418 push_dr(R_EDX, FRm);
2419 FDIVP_st(1);
2420 pop_dr(R_EDX, FRn);
2421 JMP_TARGET(end);
2422 }
2423 break;
2424 case 0x4:
2425 { /* FCMP/EQ FRm, FRn */
2426 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2427 check_fpuen();
2428 load_spreg( R_ECX, R_FPSCR );
2429 TEST_imm32_r32( FPSCR_PR, R_ECX );
2430 load_fr_bank( R_EDX );
2431 JNE_rel8(8, doubleprec);
2432 push_fr(R_EDX, FRm);
2433 push_fr(R_EDX, FRn);
2434 JMP_rel8(6, end);
2435 JMP_TARGET(doubleprec);
2436 push_dr(R_EDX, FRm);
2437 push_dr(R_EDX, FRn);
2438 FCOMIP_st(1);
2439 SETE_t();
2440 FPOP_st();
2441 JMP_TARGET(end);
2442 }
2443 break;
2444 case 0x5:
2445 { /* FCMP/GT FRm, FRn */
2446 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2447 check_fpuen();
2448 load_spreg( R_ECX, R_FPSCR );
2449 TEST_imm32_r32( FPSCR_PR, R_ECX );
2450 load_fr_bank( R_EDX );
2451 JNE_rel8(8, doubleprec);
2452 push_fr(R_EDX, FRm);
2453 push_fr(R_EDX, FRn);
2454 JMP_rel8(6, end);
2455 JMP_TARGET(doubleprec);
2456 push_dr(R_EDX, FRm);
2457 push_dr(R_EDX, FRn);
2458 JMP_TARGET(end);
2459 FCOMIP_st(1);
2460 SETA_t();
2461 FPOP_st();
2462 }
2463 break;
2464 case 0x6:
2465 { /* FMOV @(R0, Rm), FRn */
2466 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2467 check_fpuen();
2468 load_reg( R_EDX, Rm );
2469 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2470 check_ralign32( R_EDX );
2471 load_spreg( R_ECX, R_FPSCR );
2472 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2473 JNE_rel8(19, doublesize);
2474 MEM_READ_LONG( R_EDX, R_EAX );
2475 load_fr_bank( R_ECX );
2476 store_fr( R_ECX, R_EAX, FRn );
2477 if( FRn&1 ) {
2478 JMP_rel8(46, end);
2479 JMP_TARGET(doublesize);
2480 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2481 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2482 load_xf_bank( R_ECX );
2483 store_fr( R_ECX, R_EAX, FRn&0x0E );
2484 store_fr( R_ECX, R_EDX, FRn|0x01 );
2485 JMP_TARGET(end);
2486 } else {
2487 JMP_rel8(36, end);
2488 JMP_TARGET(doublesize);
2489 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2490 load_fr_bank( R_ECX );
2491 store_fr( R_ECX, R_EAX, FRn&0x0E );
2492 store_fr( R_ECX, R_EDX, FRn|0x01 );
2493 JMP_TARGET(end);
2494 }
2495 }
2496 break;
2497 case 0x7:
2498 { /* FMOV FRm, @(R0, Rn) */
2499 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2500 check_fpuen();
2501 load_reg( R_EDX, Rn );
2502 ADD_sh4r_r32( REG_OFFSET(r[0]), R_EDX );
2503 check_walign32( R_EDX );
2504 load_spreg( R_ECX, R_FPSCR );
2505 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2506 JNE_rel8(20, doublesize);
2507 load_fr_bank( R_ECX );
2508 load_fr( R_ECX, R_EAX, FRm );
2509 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2510 if( FRm&1 ) {
2511 JMP_rel8( 46, end );
2512 JMP_TARGET(doublesize);
2513 load_xf_bank( R_ECX );
2514 load_fr( R_ECX, R_EAX, FRm&0x0E );
2515 load_fr( R_ECX, R_ECX, FRm|0x01 );
2516 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2517 JMP_TARGET(end);
2518 } else {
2519 JMP_rel8( 39, end );
2520 JMP_TARGET(doublesize);
2521 load_fr_bank( R_ECX );
2522 load_fr( R_ECX, R_EAX, FRm&0x0E );
2523 load_fr( R_ECX, R_ECX, FRm|0x01 );
2524 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2525 JMP_TARGET(end);
2526 }
2527 }
2528 break;
2529 case 0x8:
2530 { /* FMOV @Rm, FRn */
2531 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2532 check_fpuen();
2533 load_reg( R_EDX, Rm );
2534 check_ralign32( R_EDX );
2535 load_spreg( R_ECX, R_FPSCR );
2536 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2537 JNE_rel8(19, doublesize);
2538 MEM_READ_LONG( R_EDX, R_EAX );
2539 load_fr_bank( R_ECX );
2540 store_fr( R_ECX, R_EAX, FRn );
2541 if( FRn&1 ) {
2542 JMP_rel8(46, end);
2543 JMP_TARGET(doublesize);
2544 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2545 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2546 load_xf_bank( R_ECX );
2547 store_fr( R_ECX, R_EAX, FRn&0x0E );
2548 store_fr( R_ECX, R_EDX, FRn|0x01 );
2549 JMP_TARGET(end);
2550 } else {
2551 JMP_rel8(36, end);
2552 JMP_TARGET(doublesize);
2553 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2554 load_fr_bank( R_ECX );
2555 store_fr( R_ECX, R_EAX, FRn&0x0E );
2556 store_fr( R_ECX, R_EDX, FRn|0x01 );
2557 JMP_TARGET(end);
2558 }
2559 }
2560 break;
2561 case 0x9:
2562 { /* FMOV @Rm+, FRn */
2563 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
2564 check_fpuen();
2565 load_reg( R_EDX, Rm );
2566 check_ralign32( R_EDX );
2567 MOV_r32_r32( R_EDX, R_EAX );
2568 load_spreg( R_ECX, R_FPSCR );
2569 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2570 JNE_rel8(25, doublesize);
2571 ADD_imm8s_r32( 4, R_EAX );
2572 store_reg( R_EAX, Rm );
2573 MEM_READ_LONG( R_EDX, R_EAX );
2574 load_fr_bank( R_ECX );
2575 store_fr( R_ECX, R_EAX, FRn );
2576 if( FRn&1 ) {
2577 JMP_rel8(52, end);
2578 JMP_TARGET(doublesize);
2579 ADD_imm8s_r32( 8, R_EAX );
2580 store_reg(R_EAX, Rm);
2581 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2582 load_spreg( R_ECX, R_FPSCR ); // assume read_long clobbered it
2583 load_xf_bank( R_ECX );
2584 store_fr( R_ECX, R_EAX, FRn&0x0E );
2585 store_fr( R_ECX, R_EDX, FRn|0x01 );
2586 JMP_TARGET(end);
2587 } else {
2588 JMP_rel8(42, end);
2589 ADD_imm8s_r32( 8, R_EAX );
2590 store_reg(R_EAX, Rm);
2591 MEM_READ_DOUBLE( R_EDX, R_EAX, R_EDX );
2592 load_fr_bank( R_ECX );
2593 store_fr( R_ECX, R_EAX, FRn&0x0E );
2594 store_fr( R_ECX, R_EDX, FRn|0x01 );
2595 JMP_TARGET(end);
2596 }
2597 }
2598 break;
2599 case 0xA:
2600 { /* FMOV FRm, @Rn */
2601 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2602 check_fpuen();
2603 load_reg( R_EDX, Rn );
2604 check_walign32( R_EDX );
2605 load_spreg( R_ECX, R_FPSCR );
2606 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2607 JNE_rel8(20, doublesize);
2608 load_fr_bank( R_ECX );
2609 load_fr( R_ECX, R_EAX, FRm );
2610 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2611 if( FRm&1 ) {
2612 JMP_rel8( 46, end );
2613 JMP_TARGET(doublesize);
2614 load_xf_bank( R_ECX );
2615 load_fr( R_ECX, R_EAX, FRm&0x0E );
2616 load_fr( R_ECX, R_ECX, FRm|0x01 );
2617 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2618 JMP_TARGET(end);
2619 } else {
2620 JMP_rel8( 39, end );
2621 JMP_TARGET(doublesize);
2622 load_fr_bank( R_ECX );
2623 load_fr( R_ECX, R_EAX, FRm&0x0E );
2624 load_fr( R_ECX, R_ECX, FRm|0x01 );
2625 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2626 JMP_TARGET(end);
2627 }
2628 }
2629 break;
2630 case 0xB:
2631 { /* FMOV FRm, @-Rn */
2632 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2633 check_fpuen();
2634 load_reg( R_EDX, Rn );
2635 check_walign32( R_EDX );
2636 load_spreg( R_ECX, R_FPSCR );
2637 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2638 JNE_rel8(20, doublesize);
2639 load_fr_bank( R_ECX );
2640 load_fr( R_ECX, R_EAX, FRm );
2641 ADD_imm8s_r32(-4,R_EDX);
2642 store_reg( R_EDX, Rn );
2643 MEM_WRITE_LONG( R_EDX, R_EAX ); // 12
2644 if( FRm&1 ) {
2645 JMP_rel8( 46, end );
2646 JMP_TARGET(doublesize);
2647 load_xf_bank( R_ECX );
2648 load_fr( R_ECX, R_EAX, FRm&0x0E );
2649 load_fr( R_ECX, R_ECX, FRm|0x01 );
2650 ADD_imm8s_r32(-8,R_EDX);
2651 store_reg( R_EDX, Rn );
2652 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2653 JMP_TARGET(end);
2654 } else {
2655 JMP_rel8( 39, end );
2656 JMP_TARGET(doublesize);
2657 load_fr_bank( R_ECX );
2658 load_fr( R_ECX, R_EAX, FRm&0x0E );
2659 load_fr( R_ECX, R_ECX, FRm|0x01 );
2660 ADD_imm8s_r32(-8,R_EDX);
2661 store_reg( R_EDX, Rn );
2662 MEM_WRITE_DOUBLE( R_EDX, R_EAX, R_ECX );
2663 JMP_TARGET(end);
2664 }
2665 }
2666 break;
2667 case 0xC:
2668 { /* FMOV FRm, FRn */
2669 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2670 /* As horrible as this looks, it's actually covering 5 separate cases:
2671 * 1. 32-bit fr-to-fr (PR=0)
2672 * 2. 64-bit dr-to-dr (PR=1, FRm&1 == 0, FRn&1 == 0 )
2673 * 3. 64-bit dr-to-xd (PR=1, FRm&1 == 0, FRn&1 == 1 )
2674 * 4. 64-bit xd-to-dr (PR=1, FRm&1 == 1, FRn&1 == 0 )
2675 * 5. 64-bit xd-to-xd (PR=1, FRm&1 == 1, FRn&1 == 1 )
2676 */
2677 check_fpuen();
2678 load_spreg( R_ECX, R_FPSCR );
2679 load_fr_bank( R_EDX );
2680 TEST_imm32_r32( FPSCR_SZ, R_ECX );
2681 JNE_rel8(8, doublesize);
2682 load_fr( R_EDX, R_EAX, FRm ); // PR=0 branch
2683 store_fr( R_EDX, R_EAX, FRn );
2684 if( FRm&1 ) {
2685 JMP_rel8(22, end);
2686 JMP_TARGET(doublesize);
2687 load_xf_bank( R_ECX );
2688 load_fr( R_ECX, R_EAX, FRm-1 );
2689 if( FRn&1 ) {
2690 load_fr( R_ECX, R_EDX, FRm );
2691 store_fr( R_ECX, R_EAX, FRn-1 );
2692 store_fr( R_ECX, R_EDX, FRn );
2693 } else /* FRn&1 == 0 */ {
2694 load_fr( R_ECX, R_ECX, FRm );
2695 store_fr( R_EDX, R_EAX, FRn-1 );
2696 store_fr( R_EDX, R_ECX, FRn );
2697 }
2698 JMP_TARGET(end);
2699 } else /* FRm&1 == 0 */ {
2700 if( FRn&1 ) {
2701 JMP_rel8(22, end);
2702 load_xf_bank( R_ECX );
2703 load_fr( R_EDX, R_EAX, FRm );
2704 load_fr( R_EDX, R_EDX, FRm+1 );
2705 store_fr( R_ECX, R_EAX, FRn-1 );
2706 store_fr( R_ECX, R_EDX, FRn );
2707 JMP_TARGET(end);
2708 } else /* FRn&1 == 0 */ {
2709 JMP_rel8(12, end);
2710 load_fr( R_EDX, R_EAX, FRm );
2711 load_fr( R_EDX, R_ECX, FRm+1 );
2712 store_fr( R_EDX, R_EAX, FRn );
2713 store_fr( R_EDX, R_ECX, FRn+1 );
2714 JMP_TARGET(end);
2715 }
2716 }
2717 }
2718 break;
2719 case 0xD:
2720 switch( (ir&0xF0) >> 4 ) {
2721 case 0x0:
2722 { /* FSTS FPUL, FRn */
2723 uint32_t FRn = ((ir>>8)&0xF);
2724 check_fpuen();
2725 load_fr_bank( R_ECX );
2726 load_spreg( R_EAX, R_FPUL );
2727 store_fr( R_ECX, R_EAX, FRn );
2728 }
2729 break;
2730 case 0x1:
2731 { /* FLDS FRm, FPUL */
2732 uint32_t FRm = ((ir>>8)&0xF);
2733 check_fpuen();
2734 load_fr_bank( R_ECX );
2735 load_fr( R_ECX, R_EAX, FRm );
2736 store_spreg( R_EAX, R_FPUL );
2737 }
2738 break;
2739 case 0x2:
2740 { /* FLOAT FPUL, FRn */
2741 uint32_t FRn = ((ir>>8)&0xF);
2742 check_fpuen();
2743 load_spreg( R_ECX, R_FPSCR );
2744 load_spreg(R_EDX, REG_OFFSET(fr_bank));
2745 FILD_sh4r(R_FPUL);
2746 TEST_imm32_r32( FPSCR_PR, R_ECX );
2747 JNE_rel8(5, doubleprec);
2748 pop_fr( R_EDX, FRn );
2749 JMP_rel8(3, end);
2750 JMP_TARGET(doubleprec);
2751 pop_dr( R_EDX, FRn );
2752 JMP_TARGET(end);
2753 }
2754 break;
2755 case 0x3:
2756 { /* FTRC FRm, FPUL */
2757 uint32_t FRm = ((ir>>8)&0xF);
2758 check_fpuen();
2759 // TODO
2760 }
2761 break;
2762 case 0x4:
2763 { /* FNEG FRn */
2764 uint32_t FRn = ((ir>>8)&0xF);
2765 check_fpuen();
2766 load_spreg( R_ECX, R_FPSCR );
2767 TEST_imm32_r32( FPSCR_PR, R_ECX );
2768 load_fr_bank( R_EDX );
2769 JNE_rel8(10, doubleprec);
2770 push_fr(R_EDX, FRn);
2771 FCHS_st0();
2772 pop_fr(R_EDX, FRn);
2773 JMP_rel8(8, end);
2774 JMP_TARGET(doubleprec);
2775 push_dr(R_EDX, FRn);
2776 FCHS_st0();
2777 pop_dr(R_EDX, FRn);
2778 JMP_TARGET(end);
2779 }
2780 break;
2781 case 0x5:
2782 { /* FABS FRn */
2783 uint32_t FRn = ((ir>>8)&0xF);
2784 check_fpuen();
2785 load_spreg( R_ECX, R_FPSCR );
2786 load_fr_bank( R_EDX );
2787 TEST_imm32_r32( FPSCR_PR, R_ECX );
2788 JNE_rel8(10, doubleprec);
2789 push_fr(R_EDX, FRn); // 3
2790 FABS_st0(); // 2
2791 pop_fr( R_EDX, FRn); //3
2792 JMP_rel8(8,end); // 2
2793 JMP_TARGET(doubleprec);
2794 push_dr(R_EDX, FRn);
2795 FABS_st0();
2796 pop_dr(R_EDX, FRn);
2797 JMP_TARGET(end);
2798 }
2799 break;
2800 case 0x6:
2801 { /* FSQRT FRn */
2802 uint32_t FRn = ((ir>>8)&0xF);
2803 check_fpuen();
2804 load_spreg( R_ECX, R_FPSCR );
2805 TEST_imm32_r32( FPSCR_PR, R_ECX );
2806 load_fr_bank( R_EDX );
2807 JNE_rel8(10, doubleprec);
2808 push_fr(R_EDX, FRn);
2809 FSQRT_st0();
2810 pop_fr(R_EDX, FRn);
2811 JMP_rel8(8, end);
2812 JMP_TARGET(doubleprec);
2813 push_dr(R_EDX, FRn);
2814 FSQRT_st0();
2815 pop_dr(R_EDX, FRn);
2816 JMP_TARGET(end);
2817 }
2818 break;
2819 case 0x7:
2820 { /* FSRRA FRn */
2821 uint32_t FRn = ((ir>>8)&0xF);
2822 check_fpuen();
2823 load_spreg( R_ECX, R_FPSCR );
2824 TEST_imm32_r32( FPSCR_PR, R_ECX );
2825 load_fr_bank( R_EDX );
2826 JNE_rel8(12, end); // PR=0 only
2827 FLD1_st0();
2828 push_fr(R_EDX, FRn);
2829 FSQRT_st0();
2830 FDIVP_st(1);
2831 pop_fr(R_EDX, FRn);
2832 JMP_TARGET(end);
2833 }
2834 break;
2835 case 0x8:
2836 { /* FLDI0 FRn */
2837 uint32_t FRn = ((ir>>8)&0xF);
2838 /* IFF PR=0 */
2839 check_fpuen();
2840 load_spreg( R_ECX, R_FPSCR );
2841 TEST_imm32_r32( FPSCR_PR, R_ECX );
2842 JNE_rel8(8, end);
2843 XOR_r32_r32( R_EAX, R_EAX );
2844 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2845 store_fr( R_ECX, R_EAX, FRn );
2846 JMP_TARGET(end);
2847 }
2848 break;
2849 case 0x9:
2850 { /* FLDI1 FRn */
2851 uint32_t FRn = ((ir>>8)&0xF);
2852 /* IFF PR=0 */
2853 check_fpuen();
2854 load_spreg( R_ECX, R_FPSCR );
2855 TEST_imm32_r32( FPSCR_PR, R_ECX );
2856 JNE_rel8(11, end);
2857 load_imm32(R_EAX, 0x3F800000);
2858 load_spreg( R_ECX, REG_OFFSET(fr_bank) );
2859 store_fr( R_ECX, R_EAX, FRn );
2860 JMP_TARGET(end);
2861 }
2862 break;
2863 case 0xA:
2864 { /* FCNVSD FPUL, FRn */
2865 uint32_t FRn = ((ir>>8)&0xF);
2866 check_fpuen();
2867 check_fpuen();
2868 load_spreg( R_ECX, R_FPSCR );
2869 TEST_imm32_r32( FPSCR_PR, R_ECX );
2870 JE_rel8(9, end); // only when PR=1
2871 load_fr_bank( R_ECX );
2872 push_fpul();
2873 pop_dr( R_ECX, FRn );
2874 JMP_TARGET(end);
2875 }
2876 break;
2877 case 0xB:
2878 { /* FCNVDS FRm, FPUL */
2879 uint32_t FRm = ((ir>>8)&0xF);
2880 check_fpuen();
2881 load_spreg( R_ECX, R_FPSCR );
2882 TEST_imm32_r32( FPSCR_PR, R_ECX );
2883 JE_rel8(9, end); // only when PR=1
2884 load_fr_bank( R_ECX );
2885 push_dr( R_ECX, FRm );
2886 pop_fpul();
2887 JMP_TARGET(end);
2888 }
2889 break;
2890 case 0xE:
2891 { /* FIPR FVm, FVn */
2892 uint32_t FVn = ((ir>>10)&0x3); uint32_t FVm = ((ir>>8)&0x3);
2893 check_fpuen();
2894 }
2895 break;
2896 case 0xF:
2897 switch( (ir&0x100) >> 8 ) {
2898 case 0x0:
2899 { /* FSCA FPUL, FRn */
2900 uint32_t FRn = ((ir>>9)&0x7)<<1;
2901 check_fpuen();
2902 }
2903 break;
2904 case 0x1:
2905 switch( (ir&0x200) >> 9 ) {
2906 case 0x0:
2907 { /* FTRV XMTRX, FVn */
2908 uint32_t FVn = ((ir>>10)&0x3);
2909 check_fpuen();
2910 }
2911 break;
2912 case 0x1:
2913 switch( (ir&0xC00) >> 10 ) {
2914 case 0x0:
2915 { /* FSCHG */
2916 check_fpuen();
2917 load_spreg( R_ECX, R_FPSCR );
2918 XOR_imm32_r32( FPSCR_SZ, R_ECX );
2919 store_spreg( R_ECX, R_FPSCR );
2920 }
2921 break;
2922 case 0x2:
2923 { /* FRCHG */
2924 check_fpuen();
2925 load_spreg( R_ECX, R_FPSCR );
2926 XOR_imm32_r32( FPSCR_FR, R_ECX );
2927 store_spreg( R_ECX, R_FPSCR );
2928 }
2929 break;
2930 case 0x3:
2931 { /* UNDEF */
2932 if( sh4_x86.in_delay_slot ) {
2933 RAISE_EXCEPTION(EXC_SLOT_ILLEGAL);
2934 } else {
2935 RAISE_EXCEPTION(EXC_ILLEGAL);
2936 }
2937 return 1;
2938 }
2939 break;
2940 default:
2941 UNDEF();
2942 break;
2943 }
2944 break;
2945 }
2946 break;
2947 }
2948 break;
2949 default:
2950 UNDEF();
2951 break;
2952 }
2953 break;
2954 case 0xE:
2955 { /* FMAC FR0, FRm, FRn */
2956 uint32_t FRn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
2957 check_fpuen();
2958 load_spreg( R_ECX, R_FPSCR );
2959 load_spreg( R_EDX, REG_OFFSET(fr_bank));
2960 TEST_imm32_r32( FPSCR_PR, R_ECX );
2961 JNE_rel8(18, doubleprec);
2962 push_fr( R_EDX, 0 );
2963 push_fr( R_EDX, FRm );
2964 FMULP_st(1);
2965 push_fr( R_EDX, FRn );
2966 FADDP_st(1);
2967 pop_fr( R_EDX, FRn );
2968 JMP_rel8(16, end);
2969 JMP_TARGET(doubleprec);
2970 push_dr( R_EDX, 0 );
2971 push_dr( R_EDX, FRm );
2972 FMULP_st(1);
2973 push_dr( R_EDX, FRn );
2974 FADDP_st(1);
2975 pop_dr( R_EDX, FRn );
2976 JMP_TARGET(end);
2977 }
2978 break;
2979 default:
2980 UNDEF();
2981 break;
2982 }
2983 break;
2984 }
2986 INC_r32(R_ESI);
2987 if( sh4_x86.in_delay_slot ) {
2988 sh4_x86.in_delay_slot = FALSE;
2989 return 1;
2990 }
2991 return 0;
2992 }
.