1.1 --- a/src/sh4/sh4x86.in Thu Jan 17 21:26:58 2008 +0000
1.2 +++ b/src/sh4/sh4x86.in Tue Jan 22 10:06:41 2008 +0000
1.4 struct backpatch_record {
1.5 uint32_t *fixup_addr;
1.6 uint32_t fixup_icount;
1.11 #define MAX_RECOVERY_SIZE 2048
1.13 struct backpatch_record *backpatch_list;
1.14 uint32_t backpatch_posn;
1.15 uint32_t backpatch_size;
1.16 - struct xlat_recovery_record recovery_list[MAX_RECOVERY_SIZE];
1.17 - uint32_t recovery_posn;
1.20 #define TSTATE_NONE -1
1.21 @@ -123,13 +121,6 @@
1.22 sh4_x86.backpatch_posn++;
1.25 -void sh4_x86_add_recovery( uint32_t pc )
1.27 - xlat_recovery[xlat_recovery_posn].xlat_pc = (uintptr_t)xlat_output;
1.28 - xlat_recovery[xlat_recovery_posn].sh4_icount = (pc - sh4_x86.block_start_pc)>>1;
1.29 - xlat_recovery_posn++;
1.33 * Emit an instruction to load an SH4 reg into a real register
1.36 * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
1.38 #define MMU_TRANSLATE_READ( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
1.40 +#define MMU_TRANSLATE_READ_EXC( addr_reg, exc_code ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(exc_code); MEM_RESULT(addr_reg) }
1.42 * Perform MMU translation on the address in addr_reg for a write operation, iff the TLB is turned
1.43 * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
1.44 @@ -361,7 +354,11 @@
1.46 uint32_t sh4_translate_end_block_size()
1.48 - return EPILOGUE_SIZE + (sh4_x86.backpatch_posn*12);
1.49 + if( sh4_x86.backpatch_posn <= 3 ) {
1.50 + return EPILOGUE_SIZE + (sh4_x86.backpatch_posn*12);
1.52 + return EPILOGUE_SIZE + 48 + (sh4_x86.backpatch_posn-3)*15;
1.60 if( !sh4_x86.in_delay_slot ) {
1.61 - sh4_x86_add_recovery(pc);
1.62 + sh4_translate_add_recovery( (pc - sh4_x86.block_start_pc)>>1 );
1.65 /* ALU operations */
1.67 PUSH_realigned_r32( R_EAX );
1.68 load_reg( R_EAX, Rn );
1.69 ADD_imm8s_r32( 4, R_EAX );
1.70 - MMU_TRANSLATE_READ( R_EAX );
1.71 + MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
1.72 ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rn]) );
1.73 // Note translate twice in case of page boundaries. Maybe worth
1.74 // adding a page-boundary check to skip the second translation
1.75 @@ -662,10 +659,11 @@
1.76 load_reg( R_EAX, Rm );
1.77 check_ralign32( R_EAX );
1.78 MMU_TRANSLATE_READ( R_EAX );
1.79 + load_reg( R_ECX, Rn );
1.80 + check_ralign32( R_ECX );
1.81 PUSH_realigned_r32( R_EAX );
1.82 - load_reg( R_EAX, Rn );
1.83 - check_ralign32( R_EAX );
1.84 - MMU_TRANSLATE_READ( R_EAX );
1.85 + MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
1.86 + MOV_r32_r32( R_ECX, R_EAX );
1.87 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
1.88 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.91 PUSH_realigned_r32( R_EAX );
1.92 load_reg( R_EAX, Rn );
1.93 ADD_imm8s_r32( 2, R_EAX );
1.94 - MMU_TRANSLATE_READ( R_EAX );
1.95 + MMU_TRANSLATE_READ_EXC( R_EAX, -5 );
1.96 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
1.97 // Note translate twice in case of page boundaries. Maybe worth
1.98 // adding a page-boundary check to skip the second translation
1.99 @@ -702,10 +700,11 @@
1.100 load_reg( R_EAX, Rm );
1.101 check_ralign16( R_EAX );
1.102 MMU_TRANSLATE_READ( R_EAX );
1.103 + load_reg( R_ECX, Rn );
1.104 + check_ralign16( R_ECX );
1.105 PUSH_realigned_r32( R_EAX );
1.106 - load_reg( R_EAX, Rn );
1.107 - check_ralign16( R_EAX );
1.108 - MMU_TRANSLATE_READ( R_EAX );
1.109 + MMU_TRANSLATE_READ_EXC( R_ECX, -5 );
1.110 + MOV_r32_r32( R_ECX, R_EAX );
1.111 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
1.112 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );