revision 937:81b0c79d9788
summary |
tree |
shortlog |
changelog |
graph |
changeset |
raw | bz2 | zip | gz changeset | 937:81b0c79d9788 |
parent | 936:f394309c399a |
child | 938:e377bd827c54 |
author | nkeynes |
date | Sat Dec 27 03:14:59 2008 +0000 (15 years ago) |
branch | lxdream-mem |
Update sh4x86 to take advantage of SR assumptions. nice 2% there :)
src/sh4/sh4.c | view | annotate | diff | log | ||
src/sh4/sh4x86.in | view | annotate | diff | log |
1.1 --- a/src/sh4/sh4.c Sat Dec 27 02:59:35 2008 +00001.2 +++ b/src/sh4/sh4.c Sat Dec 27 03:14:59 2008 +00001.3 @@ -108,7 +108,7 @@1.4 sh4r.new_pc= 0xA0000002;1.5 sh4r.vbr = 0x00000000;1.6 sh4r.fpscr = 0x00040001;1.7 - sh4r.sr = 0x700000F0;1.8 + sh4_write_sr(0x700000F0);1.10 /* Mem reset will do this, but if we want to reset _just_ the SH4... */1.11 MMIO_WRITE( MMU, EXPEVT, EXC_POWER_RESET );
2.1 --- a/src/sh4/sh4x86.in Sat Dec 27 02:59:35 2008 +00002.2 +++ b/src/sh4/sh4x86.in Sat Dec 27 03:14:59 2008 +00002.3 @@ -53,7 +53,6 @@2.4 */2.5 struct sh4_x86_state {2.6 int in_delay_slot;2.7 - gboolean priv_checked; /* true if we've already checked the cpu mode. */2.8 gboolean fpuen_checked; /* true if we've already checked fpu enabled. */2.9 gboolean branch_taken; /* true if we branched unconditionally */2.10 gboolean double_prec; /* true if FPU is in double-precision mode */2.11 @@ -238,17 +237,15 @@2.12 /* Exception checks - Note that all exception checks will clobber EAX */2.14 #define check_priv( ) \2.15 - if( !sh4_x86.priv_checked ) { \2.16 - sh4_x86.priv_checked = TRUE;\2.17 - load_spreg( R_EAX, R_SR );\2.18 - AND_imm32_r32( SR_MD, R_EAX );\2.19 - if( sh4_x86.in_delay_slot ) {\2.20 - JE_exc( EXC_SLOT_ILLEGAL );\2.21 - } else {\2.22 - JE_exc( EXC_ILLEGAL );\2.23 - }\2.24 - sh4_x86.tstate = TSTATE_NONE; \2.25 - }\2.26 + if( (sh4r.xlat_sh4_mode & SR_MD) == 0 ) { \2.27 + if( sh4_x86.in_delay_slot ) { \2.28 + JMP_exc(EXC_SLOT_ILLEGAL); \2.29 + } else { \2.30 + JMP_exc(EXC_ILLEGAL ); \2.31 + } \2.32 + sh4_x86.in_delay_slot = DELAY_NONE; \2.33 + return 2; \2.34 + }2.36 #define check_fpuen( ) \2.37 if( !sh4_x86.fpuen_checked ) {\2.38 @@ -314,7 +311,7 @@2.39 #define MMU_TRANSLATE_WRITE( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }2.40 #endif2.42 -#define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = DELAY_NONE; return 1;2.43 +#define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = DELAY_NONE; return 2;2.45 /****** Import appropriate calling conventions ******/2.46 #if SIZEOF_VOID_P == 82.47 @@ -327,7 +324,6 @@2.48 {2.49 enter_block();2.50 sh4_x86.in_delay_slot = FALSE;2.51 - sh4_x86.priv_checked = FALSE;2.52 sh4_x86.fpuen_checked = FALSE;2.53 sh4_x86.branch_taken = FALSE;2.54 sh4_x86.backpatch_posn = 0;2.55 @@ -1752,7 +1748,6 @@2.56 load_spreg( R_EAX, R_SSR );2.57 call_func1( sh4_write_sr, R_EAX );2.58 sh4_x86.in_delay_slot = DELAY_PC;2.59 - sh4_x86.priv_checked = FALSE;2.60 sh4_x86.fpuen_checked = FALSE;2.61 sh4_x86.tstate = TSTATE_NONE;2.62 sh4_x86.branch_taken = TRUE;2.63 @@ -2323,9 +2318,9 @@2.64 check_priv();2.65 load_reg( R_EAX, Rm );2.66 call_func1( sh4_write_sr, R_EAX );2.67 - sh4_x86.priv_checked = FALSE;2.68 sh4_x86.fpuen_checked = FALSE;2.69 sh4_x86.tstate = TSTATE_NONE;2.70 + return 2;2.71 }2.72 :}2.73 LDC Rm, GBR {:2.74 @@ -2397,9 +2392,9 @@2.75 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );2.76 MEM_READ_LONG( R_EAX, R_EAX );2.77 call_func1( sh4_write_sr, R_EAX );2.78 - sh4_x86.priv_checked = FALSE;2.79 sh4_x86.fpuen_checked = FALSE;2.80 sh4_x86.tstate = TSTATE_NONE;2.81 + return 2;2.82 }2.83 :}2.84 LDC.L @Rm+, VBR {:
.