Search
lxdream.org :: lxdream/src/sh4/sh4x86.in :: diff
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.in
changeset 571:9bc09948d0f2
prev570:d2893980fbf5
next577:a181aeacd6e8
author nkeynes
date Thu Jan 10 08:28:37 2008 +0000 (12 years ago)
branchlxdream-mmu
permissions -rw-r--r--
last change More MMU work in progess. Much better now...
file annotate diff log raw
1.1 --- a/src/sh4/sh4x86.in Sun Jan 06 12:24:18 2008 +0000
1.2 +++ b/src/sh4/sh4x86.in Thu Jan 10 08:28:37 2008 +0000
1.3 @@ -40,6 +40,8 @@
1.4 uint32_t exc_code;
1.5 };
1.6
1.7 +#define MAX_RECOVERY_SIZE 2048
1.8 +
1.9 /**
1.10 * Struct to manage internal translation state. This state is not saved -
1.11 * it is only valid between calls to sh4_translate_begin_block() and
1.12 @@ -61,6 +63,8 @@
1.13 struct backpatch_record *backpatch_list;
1.14 uint32_t backpatch_posn;
1.15 uint32_t backpatch_size;
1.16 + struct xlat_recovery_record recovery_list[MAX_RECOVERY_SIZE];
1.17 + uint32_t recovery_posn;
1.18 };
1.19
1.20 #define TSTATE_NONE -1
1.21 @@ -115,6 +119,13 @@
1.22 sh4_x86.backpatch_posn++;
1.23 }
1.24
1.25 +void sh4_x86_add_recovery( uint32_t pc )
1.26 +{
1.27 + xlat_recovery[xlat_recovery_posn].xlat_pc = (uintptr_t)xlat_output;
1.28 + xlat_recovery[xlat_recovery_posn].sh4_icount = (pc - sh4_x86.block_start_pc)>>1;
1.29 + xlat_recovery_posn++;
1.30 +}
1.31 +
1.32 /**
1.33 * Emit an instruction to load an SH4 reg into a real register
1.34 */
1.35 @@ -309,34 +320,27 @@
1.36
1.37 #define UNDEF()
1.38 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
1.39 -#define MEM_READ_BYTE_PHYS( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
1.40 -#define MEM_READ_WORD_PHYS( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
1.41 -#define MEM_READ_LONG_PHYS( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
1.42 -#define MEM_WRITE_BYTE_PHYS( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
1.43 -#define MEM_WRITE_WORD_PHYS( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
1.44 -#define MEM_WRITE_LONG_PHYS( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
1.45 +#define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
1.46 +#define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
1.47 +#define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
1.48 +#define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
1.49 +#define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
1.50 +#define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
1.51
1.52 -#define MEM_READ_BYTE_VMA( addr_reg, value_reg ) call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); call_func1(sh4_read_byte, R_EAX); MEM_RESULT(value_reg)
1.53 -#define MEM_READ_WORD_VMA( addr_reg, value_reg ) call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); call_func1(sh4_read_word, R_EAX); MEM_RESULT(value_reg)
1.54 -#define MEM_READ_LONG_VMA( addr_reg, value_reg ) call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); call_func1(sh4_read_long, R_EAX); MEM_RESULT(value_reg)
1.55 -#define MEM_WRITE_BYTE_VMA( addr_reg, value_reg ) call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); call_func2(sh4_write_byte, R_EAX, value_reg)
1.56 -#define MEM_WRITE_WORD_VMA( addr_reg, value_reg ) call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); call_func2(sh4_write_word, R_EAX, value_reg)
1.57 -#define MEM_WRITE_LONG_VMA( addr_reg, value_reg ) call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); call_func2(sh4_write_long, R_EAX, value_reg)
1.58 +/**
1.59 + * Perform MMU translation on the address in addr_reg for a read operation, iff the TLB is turned
1.60 + * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
1.61 + */
1.62 +#define MMU_TRANSLATE_READ( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_read, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
1.63 +/**
1.64 + * Perform MMU translation on the address in addr_reg for a write operation, iff the TLB is turned
1.65 + * on, otherwise do nothing. Clobbers EAX, ECX and EDX. May raise a TLB exception or address error.
1.66 + */
1.67 +#define MMU_TRANSLATE_WRITE( addr_reg ) if( sh4_x86.tlb_on ) { call_func1(mmu_vma_to_phys_write, addr_reg); CMP_imm32_r32(MMU_VMA_ERROR, R_EAX); JE_exc(-1); MEM_RESULT(addr_reg); }
1.68
1.69 -#define MEM_READ_BYTE( addr_reg, value_reg ) if(sh4_x86.tlb_on){MEM_READ_BYTE_VMA(addr_reg,value_reg);}else{MEM_READ_BYTE_PHYS(addr_reg, value_reg);}
1.70 -#define MEM_READ_WORD( addr_reg, value_reg ) if(sh4_x86.tlb_on){MEM_READ_WORD_VMA(addr_reg,value_reg);}else{MEM_READ_WORD_PHYS(addr_reg, value_reg);}
1.71 -#define MEM_READ_LONG( addr_reg, value_reg ) if(sh4_x86.tlb_on){MEM_READ_LONG_VMA(addr_reg,value_reg);}else{MEM_READ_LONG_PHYS(addr_reg, value_reg);}
1.72 -#define MEM_WRITE_BYTE( addr_reg, value_reg ) if(sh4_x86.tlb_on){MEM_WRITE_BYTE_VMA(addr_reg,value_reg);}else{MEM_WRITE_BYTE_PHYS(addr_reg, value_reg);}
1.73 -#define MEM_WRITE_WORD( addr_reg, value_reg ) if(sh4_x86.tlb_on){MEM_WRITE_WORD_VMA(addr_reg,value_reg);}else{MEM_WRITE_WORD_PHYS(addr_reg, value_reg);}
1.74 -#define MEM_WRITE_LONG( addr_reg, value_reg ) if(sh4_x86.tlb_on){MEM_WRITE_LONG_VMA(addr_reg,value_reg);}else{MEM_WRITE_LONG_PHYS(addr_reg, value_reg);}
1.75 -
1.76 -#define MEM_READ_SIZE_PHYS (CALL_FUNC1_SIZE)
1.77 -#define MEM_WRITE_SIZE_PHYS (CALL_FUNC2_SIZE)
1.78 -#define MEM_READ_SIZE_VMA (CALL_FUNC1_SIZE + CALL_FUNC1_SIZE + 12)
1.79 -#define MEM_WRITE_SIZE_VMA (CALL_FUNC1_SIZE + CALL_FUNC2_SIZE + 12)
1.80 -
1.81 -#define MEM_READ_SIZE (sh4_x86.tlb_on?MEM_READ_SIZE_VMA:MEM_READ_SIZE_PHYS)
1.82 -#define MEM_WRITE_SIZE (sh4_x86.tlb_on?MEM_WRITE_SIZE_VMA:MEM_WRITE_SIZE_PHYS)
1.83 +#define MEM_READ_SIZE (CALL_FUNC1_SIZE)
1.84 +#define MEM_WRITE_SIZE (CALL_FUNC2_SIZE)
1.85 +#define MMU_TRANSLATE_SIZE (sh4_x86.tlb_on ? (CALL_FUNC1_SIZE + 12) : 0 )
1.86
1.87 #define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
1.88
1.89 @@ -369,6 +373,9 @@
1.90 } else {
1.91 ir = sh4_read_word(pc);
1.92 }
1.93 + if( !sh4_x86.in_delay_slot ) {
1.94 + sh4_x86_add_recovery(pc);
1.95 + }
1.96 %%
1.97 /* ALU operations */
1.98 ADD Rm, Rn {:
1.99 @@ -419,9 +426,10 @@
1.100 AND.B #imm, @(R0, GBR) {:
1.101 load_reg( R_EAX, 0 );
1.102 load_spreg( R_ECX, R_GBR );
1.103 - ADD_r32_r32( R_EAX, R_ECX );
1.104 - PUSH_realigned_r32(R_ECX);
1.105 - MEM_READ_BYTE( R_ECX, R_EAX );
1.106 + ADD_r32_r32( R_ECX, R_EAX );
1.107 + MMU_TRANSLATE_WRITE( R_EAX );
1.108 + PUSH_realigned_r32(R_EAX);
1.109 + MEM_READ_BYTE( R_EAX, R_EAX );
1.110 POP_realigned_r32(R_ECX);
1.111 AND_imm32_r32(imm, R_EAX );
1.112 MEM_WRITE_BYTE( R_ECX, R_EAX );
1.113 @@ -584,18 +592,35 @@
1.114 MOVZX_r16_r32( R_EAX, R_EAX );
1.115 store_reg( R_EAX, Rn );
1.116 :}
1.117 -MAC.L @Rm+, @Rn+ {:
1.118 - load_reg( R_ECX, Rm );
1.119 - check_ralign32( R_ECX );
1.120 - load_reg( R_ECX, Rn );
1.121 - check_ralign32( R_ECX );
1.122 - ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
1.123 - MEM_READ_LONG( R_ECX, R_EAX );
1.124 - PUSH_realigned_r32( R_EAX );
1.125 - load_reg( R_ECX, Rm );
1.126 - ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.127 +MAC.L @Rm+, @Rn+ {:
1.128 + if( Rm == Rn ) {
1.129 + load_reg( R_EAX, Rm );
1.130 + check_ralign32( R_EAX );
1.131 + MMU_TRANSLATE_READ( R_EAX );
1.132 + PUSH_realigned_r32( R_EAX );
1.133 + load_reg( R_EAX, Rn );
1.134 + ADD_imm8s_r32( 4, R_EAX );
1.135 + MMU_TRANSLATE_READ( R_EAX );
1.136 + ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rn]) );
1.137 + // Note translate twice in case of page boundaries. Maybe worth
1.138 + // adding a page-boundary check to skip the second translation
1.139 + } else {
1.140 + load_reg( R_EAX, Rm );
1.141 + check_ralign32( R_EAX );
1.142 + MMU_TRANSLATE_READ( R_EAX );
1.143 + PUSH_realigned_r32( R_EAX );
1.144 + load_reg( R_EAX, Rn );
1.145 + check_ralign32( R_EAX );
1.146 + MMU_TRANSLATE_READ( R_EAX );
1.147 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
1.148 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.149 + }
1.150 + MEM_READ_LONG( R_EAX, R_EAX );
1.151 + POP_r32( R_ECX );
1.152 + PUSH_r32( R_EAX );
1.153 MEM_READ_LONG( R_ECX, R_EAX );
1.154 POP_realigned_r32( R_ECX );
1.155 +
1.156 IMUL_r32( R_ECX );
1.157 ADD_r32_sh4r( R_EAX, R_MACL );
1.158 ADC_r32_sh4r( R_EDX, R_MACH );
1.159 @@ -608,15 +633,31 @@
1.160 sh4_x86.tstate = TSTATE_NONE;
1.161 :}
1.162 MAC.W @Rm+, @Rn+ {:
1.163 - load_reg( R_ECX, Rm );
1.164 - check_ralign16( R_ECX );
1.165 - load_reg( R_ECX, Rn );
1.166 - check_ralign16( R_ECX );
1.167 - ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
1.168 - MEM_READ_WORD( R_ECX, R_EAX );
1.169 - PUSH_realigned_r32( R_EAX );
1.170 - load_reg( R_ECX, Rm );
1.171 - ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
1.172 + if( Rm == Rn ) {
1.173 + load_reg( R_EAX, Rm );
1.174 + check_ralign16( R_EAX );
1.175 + MMU_TRANSLATE_READ( R_EAX );
1.176 + PUSH_realigned_r32( R_EAX );
1.177 + load_reg( R_EAX, Rn );
1.178 + ADD_imm8s_r32( 2, R_EAX );
1.179 + MMU_TRANSLATE_READ( R_EAX );
1.180 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rn]) );
1.181 + // Note translate twice in case of page boundaries. Maybe worth
1.182 + // adding a page-boundary check to skip the second translation
1.183 + } else {
1.184 + load_reg( R_EAX, Rm );
1.185 + check_ralign16( R_EAX );
1.186 + MMU_TRANSLATE_READ( R_EAX );
1.187 + PUSH_realigned_r32( R_EAX );
1.188 + load_reg( R_EAX, Rn );
1.189 + check_ralign16( R_EAX );
1.190 + MMU_TRANSLATE_READ( R_EAX );
1.191 + ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rn]) );
1.192 + ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
1.193 + }
1.194 + MEM_READ_WORD( R_EAX, R_EAX );
1.195 + POP_r32( R_ECX );
1.196 + PUSH_r32( R_EAX );
1.197 MEM_READ_WORD( R_ECX, R_EAX );
1.198 POP_realigned_r32( R_ECX );
1.199 IMUL_r32( R_ECX );
1.200 @@ -709,9 +750,10 @@
1.201 OR.B #imm, @(R0, GBR) {:
1.202 load_reg( R_EAX, 0 );
1.203 load_spreg( R_ECX, R_GBR );
1.204 - ADD_r32_r32( R_EAX, R_ECX );
1.205 - PUSH_realigned_r32(R_ECX);
1.206 - MEM_READ_BYTE( R_ECX, R_EAX );
1.207 + ADD_r32_r32( R_ECX, R_EAX );
1.208 + MMU_TRANSLATE_WRITE( R_EAX );
1.209 + PUSH_realigned_r32(R_EAX);
1.210 + MEM_READ_BYTE( R_EAX, R_EAX );
1.211 POP_realigned_r32(R_ECX);
1.212 OR_imm32_r32(imm, R_EAX );
1.213 MEM_WRITE_BYTE( R_ECX, R_EAX );
1.214 @@ -905,12 +947,14 @@
1.215 sh4_x86.tstate = TSTATE_NONE;
1.216 :}
1.217 TAS.B @Rn {:
1.218 - load_reg( R_ECX, Rn );
1.219 - MEM_READ_BYTE( R_ECX, R_EAX );
1.220 + load_reg( R_EAX, Rn );
1.221 + MMU_TRANSLATE_WRITE( R_EAX );
1.222 + PUSH_realigned_r32( R_EAX );
1.223 + MEM_READ_BYTE( R_EAX, R_EAX );
1.224 TEST_r8_r8( R_AL, R_AL );
1.225 SETE_t();
1.226 OR_imm8_r8( 0x80, R_AL );
1.227 - load_reg( R_ECX, Rn );
1.228 + POP_realigned_r32( R_ECX );
1.229 MEM_WRITE_BYTE( R_ECX, R_EAX );
1.230 sh4_x86.tstate = TSTATE_NONE;
1.231 :}
1.232 @@ -930,8 +974,9 @@
1.233 TST.B #imm, @(R0, GBR) {:
1.234 load_reg( R_EAX, 0);
1.235 load_reg( R_ECX, R_GBR);
1.236 - ADD_r32_r32( R_EAX, R_ECX );
1.237 - MEM_READ_BYTE( R_ECX, R_EAX );
1.238 + ADD_r32_r32( R_ECX, R_EAX );
1.239 + MMU_TRANSLATE_READ( R_EAX );
1.240 + MEM_READ_BYTE( R_EAX, R_EAX );
1.241 TEST_imm8_r8( imm, R_AL );
1.242 SETE_t();
1.243 sh4_x86.tstate = TSTATE_E;
1.244 @@ -952,9 +997,10 @@
1.245 XOR.B #imm, @(R0, GBR) {:
1.246 load_reg( R_EAX, 0 );
1.247 load_spreg( R_ECX, R_GBR );
1.248 - ADD_r32_r32( R_EAX, R_ECX );
1.249 - PUSH_realigned_r32(R_ECX);
1.250 - MEM_READ_BYTE(R_ECX, R_EAX);
1.251 + ADD_r32_r32( R_ECX, R_EAX );
1.252 + MMU_TRANSLATE_WRITE( R_EAX );
1.253 + PUSH_realigned_r32(R_EAX);
1.254 + MEM_READ_BYTE(R_EAX, R_EAX);
1.255 POP_realigned_r32(R_ECX);
1.256 XOR_imm32_r32( imm, R_EAX );
1.257 MEM_WRITE_BYTE( R_ECX, R_EAX );
1.258 @@ -980,150 +1026,165 @@
1.259 store_reg( R_EAX, Rn );
1.260 :}
1.261 MOV.B Rm, @Rn {:
1.262 - load_reg( R_EAX, Rm );
1.263 - load_reg( R_ECX, Rn );
1.264 - MEM_WRITE_BYTE( R_ECX, R_EAX );
1.265 + load_reg( R_EAX, Rn );
1.266 + MMU_TRANSLATE_WRITE( R_EAX );
1.267 + load_reg( R_EDX, Rm );
1.268 + MEM_WRITE_BYTE( R_EAX, R_EDX );
1.269 sh4_x86.tstate = TSTATE_NONE;
1.270 :}
1.271 MOV.B Rm, @-Rn {:
1.272 - load_reg( R_EAX, Rm );
1.273 - load_reg( R_ECX, Rn );
1.274 - ADD_imm8s_r32( -1, R_ECX );
1.275 - store_reg( R_ECX, Rn );
1.276 - MEM_WRITE_BYTE( R_ECX, R_EAX );
1.277 + load_reg( R_EAX, Rn );
1.278 + ADD_imm8s_r32( -1, R_EAX );
1.279 + MMU_TRANSLATE_WRITE( R_EAX );
1.280 + load_reg( R_EDX, Rm );
1.281 + ADD_imm8s_sh4r( -1, REG_OFFSET(r[Rn]) );
1.282 + MEM_WRITE_BYTE( R_EAX, R_EDX );
1.283 sh4_x86.tstate = TSTATE_NONE;
1.284 :}
1.285 MOV.B Rm, @(R0, Rn) {:
1.286 load_reg( R_EAX, 0 );
1.287 load_reg( R_ECX, Rn );
1.288 - ADD_r32_r32( R_EAX, R_ECX );
1.289 - load_reg( R_EAX, Rm );
1.290 - MEM_WRITE_BYTE( R_ECX, R_EAX );
1.291 + ADD_r32_r32( R_ECX, R_EAX );
1.292 + MMU_TRANSLATE_WRITE( R_EAX );
1.293 + load_reg( R_EDX, Rm );
1.294 + MEM_WRITE_BYTE( R_EAX, R_EDX );
1.295 sh4_x86.tstate = TSTATE_NONE;
1.296 :}
1.297 MOV.B R0, @(disp, GBR) {:
1.298 - load_reg( R_EAX, 0 );
1.299 - load_spreg( R_ECX, R_GBR );
1.300 - ADD_imm32_r32( disp, R_ECX );
1.301 - MEM_WRITE_BYTE( R_ECX, R_EAX );
1.302 + load_spreg( R_EAX, R_GBR );
1.303 + ADD_imm32_r32( disp, R_EAX );
1.304 + MMU_TRANSLATE_WRITE( R_EAX );
1.305 + load_reg( R_EDX, 0 );
1.306 + MEM_WRITE_BYTE( R_EAX, R_EDX );
1.307 sh4_x86.tstate = TSTATE_NONE;
1.308 :}
1.309 MOV.B R0, @(disp, Rn) {:
1.310 - load_reg( R_EAX, 0 );
1.311 - load_reg( R_ECX, Rn );
1.312 - ADD_imm32_r32( disp, R_ECX );
1.313 - MEM_WRITE_BYTE( R_ECX, R_EAX );
1.314 + load_reg( R_EAX, Rn );
1.315 + ADD_imm32_r32( disp, R_EAX );
1.316 + MMU_TRANSLATE_WRITE( R_EAX );
1.317 + load_reg( R_EDX, 0 );
1.318 + MEM_WRITE_BYTE( R_EAX, R_EDX );
1.319 sh4_x86.tstate = TSTATE_NONE;
1.320 :}
1.321 MOV.B @Rm, Rn {:
1.322 - load_reg( R_ECX, Rm );
1.323 - MEM_READ_BYTE( R_ECX, R_EAX );
1.324 + load_reg( R_EAX, Rm );
1.325 + MMU_TRANSLATE_READ( R_EAX );
1.326 + MEM_READ_BYTE( R_EAX, R_EAX );
1.327 store_reg( R_EAX, Rn );
1.328 sh4_x86.tstate = TSTATE_NONE;
1.329 :}
1.330 MOV.B @Rm+, Rn {:
1.331 - load_reg( R_ECX, Rm );
1.332 - MOV_r32_r32( R_ECX, R_EAX );
1.333 - ADD_imm8s_r32( 1, R_EAX );
1.334 - store_reg( R_EAX, Rm );
1.335 - MEM_READ_BYTE( R_ECX, R_EAX );
1.336 + load_reg( R_EAX, Rm );
1.337 + MMU_TRANSLATE_READ( R_EAX );
1.338 + ADD_imm8s_sh4r( 1, REG_OFFSET(r[Rm]) );
1.339 + MEM_READ_BYTE( R_EAX, R_EAX );
1.340 store_reg( R_EAX, Rn );
1.341 sh4_x86.tstate = TSTATE_NONE;
1.342 :}
1.343 MOV.B @(R0, Rm), Rn {:
1.344 load_reg( R_EAX, 0 );
1.345 load_reg( R_ECX, Rm );
1.346 - ADD_r32_r32( R_EAX, R_ECX );
1.347 - MEM_READ_BYTE( R_ECX, R_EAX );
1.348 + ADD_r32_r32( R_ECX, R_EAX );
1.349 + MMU_TRANSLATE_READ( R_EAX )
1.350 + MEM_READ_BYTE( R_EAX, R_EAX );
1.351 store_reg( R_EAX, Rn );
1.352 sh4_x86.tstate = TSTATE_NONE;
1.353 :}
1.354 MOV.B @(disp, GBR), R0 {:
1.355 - load_spreg( R_ECX, R_GBR );
1.356 - ADD_imm32_r32( disp, R_ECX );
1.357 - MEM_READ_BYTE( R_ECX, R_EAX );
1.358 + load_spreg( R_EAX, R_GBR );
1.359 + ADD_imm32_r32( disp, R_EAX );
1.360 + MMU_TRANSLATE_READ( R_EAX );
1.361 + MEM_READ_BYTE( R_EAX, R_EAX );
1.362 store_reg( R_EAX, 0 );
1.363 sh4_x86.tstate = TSTATE_NONE;
1.364 :}
1.365 MOV.B @(disp, Rm), R0 {:
1.366 - load_reg( R_ECX, Rm );
1.367 - ADD_imm32_r32( disp, R_ECX );
1.368 - MEM_READ_BYTE( R_ECX, R_EAX );
1.369 + load_reg( R_EAX, Rm );
1.370 + ADD_imm32_r32( disp, R_EAX );
1.371 + MMU_TRANSLATE_READ( R_EAX );
1.372 + MEM_READ_BYTE( R_EAX, R_EAX );
1.373 store_reg( R_EAX, 0 );
1.374 sh4_x86.tstate = TSTATE_NONE;
1.375 :}
1.376 MOV.L Rm, @Rn {:
1.377 - load_reg( R_EAX, Rm );
1.378 - load_reg( R_ECX, Rn );
1.379 - check_walign32(R_ECX);
1.380 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.381 + load_reg( R_EAX, Rn );
1.382 + check_walign32(R_EAX);
1.383 + MMU_TRANSLATE_WRITE( R_EAX );
1.384 + load_reg( R_EDX, Rm );
1.385 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.386 sh4_x86.tstate = TSTATE_NONE;
1.387 :}
1.388 MOV.L Rm, @-Rn {:
1.389 - load_reg( R_EAX, Rm );
1.390 - load_reg( R_ECX, Rn );
1.391 - check_walign32( R_ECX );
1.392 - ADD_imm8s_r32( -4, R_ECX );
1.393 - store_reg( R_ECX, Rn );
1.394 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.395 + load_reg( R_EAX, Rn );
1.396 + ADD_imm8s_r32( -4, R_EAX );
1.397 + check_walign32( R_EAX );
1.398 + MMU_TRANSLATE_WRITE( R_EAX );
1.399 + load_reg( R_EDX, Rm );
1.400 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.401 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.402 sh4_x86.tstate = TSTATE_NONE;
1.403 :}
1.404 MOV.L Rm, @(R0, Rn) {:
1.405 load_reg( R_EAX, 0 );
1.406 load_reg( R_ECX, Rn );
1.407 - ADD_r32_r32( R_EAX, R_ECX );
1.408 - check_walign32( R_ECX );
1.409 - load_reg( R_EAX, Rm );
1.410 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.411 + ADD_r32_r32( R_ECX, R_EAX );
1.412 + check_walign32( R_EAX );
1.413 + MMU_TRANSLATE_WRITE( R_EAX );
1.414 + load_reg( R_EDX, Rm );
1.415 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.416 sh4_x86.tstate = TSTATE_NONE;
1.417 :}
1.418 MOV.L R0, @(disp, GBR) {:
1.419 - load_spreg( R_ECX, R_GBR );
1.420 - load_reg( R_EAX, 0 );
1.421 - ADD_imm32_r32( disp, R_ECX );
1.422 - check_walign32( R_ECX );
1.423 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.424 + load_spreg( R_EAX, R_GBR );
1.425 + ADD_imm32_r32( disp, R_EAX );
1.426 + check_walign32( R_EAX );
1.427 + MMU_TRANSLATE_WRITE( R_EAX );
1.428 + load_reg( R_EDX, 0 );
1.429 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.430 sh4_x86.tstate = TSTATE_NONE;
1.431 :}
1.432 MOV.L Rm, @(disp, Rn) {:
1.433 - load_reg( R_ECX, Rn );
1.434 - load_reg( R_EAX, Rm );
1.435 - ADD_imm32_r32( disp, R_ECX );
1.436 - check_walign32( R_ECX );
1.437 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.438 + load_reg( R_EAX, Rn );
1.439 + ADD_imm32_r32( disp, R_EAX );
1.440 + check_walign32( R_EAX );
1.441 + MMU_TRANSLATE_WRITE( R_EAX );
1.442 + load_reg( R_EDX, Rm );
1.443 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.444 sh4_x86.tstate = TSTATE_NONE;
1.445 :}
1.446 MOV.L @Rm, Rn {:
1.447 - load_reg( R_ECX, Rm );
1.448 - check_ralign32( R_ECX );
1.449 - MEM_READ_LONG( R_ECX, R_EAX );
1.450 + load_reg( R_EAX, Rm );
1.451 + check_ralign32( R_EAX );
1.452 + MMU_TRANSLATE_READ( R_EAX );
1.453 + MEM_READ_LONG( R_EAX, R_EAX );
1.454 store_reg( R_EAX, Rn );
1.455 sh4_x86.tstate = TSTATE_NONE;
1.456 :}
1.457 MOV.L @Rm+, Rn {:
1.458 load_reg( R_EAX, Rm );
1.459 check_ralign32( R_EAX );
1.460 - MOV_r32_r32( R_EAX, R_ECX );
1.461 - ADD_imm8s_r32( 4, R_EAX );
1.462 - store_reg( R_EAX, Rm );
1.463 - MEM_READ_LONG( R_ECX, R_EAX );
1.464 + MMU_TRANSLATE_READ( R_EAX );
1.465 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.466 + MEM_READ_LONG( R_EAX, R_EAX );
1.467 store_reg( R_EAX, Rn );
1.468 sh4_x86.tstate = TSTATE_NONE;
1.469 :}
1.470 MOV.L @(R0, Rm), Rn {:
1.471 load_reg( R_EAX, 0 );
1.472 load_reg( R_ECX, Rm );
1.473 - ADD_r32_r32( R_EAX, R_ECX );
1.474 - check_ralign32( R_ECX );
1.475 - MEM_READ_LONG( R_ECX, R_EAX );
1.476 + ADD_r32_r32( R_ECX, R_EAX );
1.477 + check_ralign32( R_EAX );
1.478 + MMU_TRANSLATE_READ( R_EAX );
1.479 + MEM_READ_LONG( R_EAX, R_EAX );
1.480 store_reg( R_EAX, Rn );
1.481 sh4_x86.tstate = TSTATE_NONE;
1.482 :}
1.483 MOV.L @(disp, GBR), R0 {:
1.484 - load_spreg( R_ECX, R_GBR );
1.485 - ADD_imm32_r32( disp, R_ECX );
1.486 - check_ralign32( R_ECX );
1.487 - MEM_READ_LONG( R_ECX, R_EAX );
1.488 + load_spreg( R_EAX, R_GBR );
1.489 + ADD_imm32_r32( disp, R_EAX );
1.490 + check_ralign32( R_EAX );
1.491 + MMU_TRANSLATE_READ( R_EAX );
1.492 + MEM_READ_LONG( R_EAX, R_EAX );
1.493 store_reg( R_EAX, 0 );
1.494 sh4_x86.tstate = TSTATE_NONE;
1.495 :}
1.496 @@ -1148,94 +1209,103 @@
1.497 // Note: we use sh4r.pc for the calc as we could be running at a
1.498 // different virtual address than the translation was done with,
1.499 // but we can safely assume that the low bits are the same.
1.500 - load_imm32( R_ECX, (pc-sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
1.501 - ADD_sh4r_r32( R_PC, R_ECX );
1.502 - MEM_READ_LONG( R_ECX, R_EAX );
1.503 + load_imm32( R_EAX, (pc-sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
1.504 + ADD_sh4r_r32( R_PC, R_EAX );
1.505 + MMU_TRANSLATE_READ( R_EAX );
1.506 + MEM_READ_LONG( R_EAX, R_EAX );
1.507 sh4_x86.tstate = TSTATE_NONE;
1.508 }
1.509 store_reg( R_EAX, Rn );
1.510 }
1.511 :}
1.512 MOV.L @(disp, Rm), Rn {:
1.513 - load_reg( R_ECX, Rm );
1.514 - ADD_imm8s_r32( disp, R_ECX );
1.515 - check_ralign32( R_ECX );
1.516 - MEM_READ_LONG( R_ECX, R_EAX );
1.517 + load_reg( R_EAX, Rm );
1.518 + ADD_imm8s_r32( disp, R_EAX );
1.519 + check_ralign32( R_EAX );
1.520 + MMU_TRANSLATE_READ( R_EAX );
1.521 + MEM_READ_LONG( R_EAX, R_EAX );
1.522 store_reg( R_EAX, Rn );
1.523 sh4_x86.tstate = TSTATE_NONE;
1.524 :}
1.525 MOV.W Rm, @Rn {:
1.526 - load_reg( R_ECX, Rn );
1.527 - check_walign16( R_ECX );
1.528 - load_reg( R_EAX, Rm );
1.529 - MEM_WRITE_WORD( R_ECX, R_EAX );
1.530 + load_reg( R_EAX, Rn );
1.531 + check_walign16( R_EAX );
1.532 + MMU_TRANSLATE_WRITE( R_EAX )
1.533 + load_reg( R_EDX, Rm );
1.534 + MEM_WRITE_WORD( R_EAX, R_EDX );
1.535 sh4_x86.tstate = TSTATE_NONE;
1.536 :}
1.537 MOV.W Rm, @-Rn {:
1.538 - load_reg( R_ECX, Rn );
1.539 - check_walign16( R_ECX );
1.540 - load_reg( R_EAX, Rm );
1.541 - ADD_imm8s_r32( -2, R_ECX );
1.542 - store_reg( R_ECX, Rn );
1.543 - MEM_WRITE_WORD( R_ECX, R_EAX );
1.544 + load_reg( R_EAX, Rn );
1.545 + ADD_imm8s_r32( -2, R_EAX );
1.546 + check_walign16( R_EAX );
1.547 + MMU_TRANSLATE_WRITE( R_EAX );
1.548 + load_reg( R_EDX, Rm );
1.549 + ADD_imm8s_sh4r( -2, REG_OFFSET(r[Rn]) );
1.550 + MEM_WRITE_WORD( R_EAX, R_EDX );
1.551 sh4_x86.tstate = TSTATE_NONE;
1.552 :}
1.553 MOV.W Rm, @(R0, Rn) {:
1.554 load_reg( R_EAX, 0 );
1.555 load_reg( R_ECX, Rn );
1.556 - ADD_r32_r32( R_EAX, R_ECX );
1.557 - check_walign16( R_ECX );
1.558 - load_reg( R_EAX, Rm );
1.559 - MEM_WRITE_WORD( R_ECX, R_EAX );
1.560 + ADD_r32_r32( R_ECX, R_EAX );
1.561 + check_walign16( R_EAX );
1.562 + MMU_TRANSLATE_WRITE( R_EAX );
1.563 + load_reg( R_EDX, Rm );
1.564 + MEM_WRITE_WORD( R_EAX, R_EDX );
1.565 sh4_x86.tstate = TSTATE_NONE;
1.566 :}
1.567 MOV.W R0, @(disp, GBR) {:
1.568 - load_spreg( R_ECX, R_GBR );
1.569 - load_reg( R_EAX, 0 );
1.570 - ADD_imm32_r32( disp, R_ECX );
1.571 - check_walign16( R_ECX );
1.572 - MEM_WRITE_WORD( R_ECX, R_EAX );
1.573 + load_spreg( R_EAX, R_GBR );
1.574 + ADD_imm32_r32( disp, R_EAX );
1.575 + check_walign16( R_EAX );
1.576 + MMU_TRANSLATE_WRITE( R_EAX );
1.577 + load_reg( R_EDX, 0 );
1.578 + MEM_WRITE_WORD( R_EAX, R_EDX );
1.579 sh4_x86.tstate = TSTATE_NONE;
1.580 :}
1.581 MOV.W R0, @(disp, Rn) {:
1.582 - load_reg( R_ECX, Rn );
1.583 - load_reg( R_EAX, 0 );
1.584 - ADD_imm32_r32( disp, R_ECX );
1.585 - check_walign16( R_ECX );
1.586 - MEM_WRITE_WORD( R_ECX, R_EAX );
1.587 + load_reg( R_EAX, Rn );
1.588 + ADD_imm32_r32( disp, R_EAX );
1.589 + check_walign16( R_EAX );
1.590 + MMU_TRANSLATE_WRITE( R_EAX );
1.591 + load_reg( R_EDX, 0 );
1.592 + MEM_WRITE_WORD( R_EAX, R_EDX );
1.593 sh4_x86.tstate = TSTATE_NONE;
1.594 :}
1.595 MOV.W @Rm, Rn {:
1.596 - load_reg( R_ECX, Rm );
1.597 - check_ralign16( R_ECX );
1.598 - MEM_READ_WORD( R_ECX, R_EAX );
1.599 + load_reg( R_EAX, Rm );
1.600 + check_ralign16( R_EAX );
1.601 + MMU_TRANSLATE_READ( R_EAX );
1.602 + MEM_READ_WORD( R_EAX, R_EAX );
1.603 store_reg( R_EAX, Rn );
1.604 sh4_x86.tstate = TSTATE_NONE;
1.605 :}
1.606 MOV.W @Rm+, Rn {:
1.607 load_reg( R_EAX, Rm );
1.608 check_ralign16( R_EAX );
1.609 - MOV_r32_r32( R_EAX, R_ECX );
1.610 - ADD_imm8s_r32( 2, R_EAX );
1.611 - store_reg( R_EAX, Rm );
1.612 - MEM_READ_WORD( R_ECX, R_EAX );
1.613 + MMU_TRANSLATE_READ( R_EAX );
1.614 + ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
1.615 + MEM_READ_WORD( R_EAX, R_EAX );
1.616 store_reg( R_EAX, Rn );
1.617 sh4_x86.tstate = TSTATE_NONE;
1.618 :}
1.619 MOV.W @(R0, Rm), Rn {:
1.620 load_reg( R_EAX, 0 );
1.621 load_reg( R_ECX, Rm );
1.622 - ADD_r32_r32( R_EAX, R_ECX );
1.623 - check_ralign16( R_ECX );
1.624 - MEM_READ_WORD( R_ECX, R_EAX );
1.625 + ADD_r32_r32( R_ECX, R_EAX );
1.626 + check_ralign16( R_EAX );
1.627 + MMU_TRANSLATE_READ( R_EAX );
1.628 + MEM_READ_WORD( R_EAX, R_EAX );
1.629 store_reg( R_EAX, Rn );
1.630 sh4_x86.tstate = TSTATE_NONE;
1.631 :}
1.632 MOV.W @(disp, GBR), R0 {:
1.633 - load_spreg( R_ECX, R_GBR );
1.634 - ADD_imm32_r32( disp, R_ECX );
1.635 - check_ralign16( R_ECX );
1.636 - MEM_READ_WORD( R_ECX, R_EAX );
1.637 + load_spreg( R_EAX, R_GBR );
1.638 + ADD_imm32_r32( disp, R_EAX );
1.639 + check_ralign16( R_EAX );
1.640 + MMU_TRANSLATE_READ( R_EAX );
1.641 + MEM_READ_WORD( R_EAX, R_EAX );
1.642 store_reg( R_EAX, 0 );
1.643 sh4_x86.tstate = TSTATE_NONE;
1.644 :}
1.645 @@ -1250,19 +1320,21 @@
1.646 MOV_moff32_EAX( ptr );
1.647 MOVSX_r16_r32( R_EAX, R_EAX );
1.648 } else {
1.649 - load_imm32( R_ECX, (pc - sh4_x86.block_start_pc) + disp + 4 );
1.650 - ADD_sh4r_r32( R_PC, R_ECX );
1.651 - MEM_READ_WORD( R_ECX, R_EAX );
1.652 + load_imm32( R_EAX, (pc - sh4_x86.block_start_pc) + disp + 4 );
1.653 + ADD_sh4r_r32( R_PC, R_EAX );
1.654 + MMU_TRANSLATE_READ( R_EAX );
1.655 + MEM_READ_WORD( R_EAX, R_EAX );
1.656 sh4_x86.tstate = TSTATE_NONE;
1.657 }
1.658 store_reg( R_EAX, Rn );
1.659 }
1.660 :}
1.661 MOV.W @(disp, Rm), R0 {:
1.662 - load_reg( R_ECX, Rm );
1.663 - ADD_imm32_r32( disp, R_ECX );
1.664 - check_ralign16( R_ECX );
1.665 - MEM_READ_WORD( R_ECX, R_EAX );
1.666 + load_reg( R_EAX, Rm );
1.667 + ADD_imm32_r32( disp, R_EAX );
1.668 + check_ralign16( R_EAX );
1.669 + MMU_TRANSLATE_READ( R_EAX );
1.670 + MEM_READ_WORD( R_EAX, R_EAX );
1.671 store_reg( R_EAX, 0 );
1.672 sh4_x86.tstate = TSTATE_NONE;
1.673 :}
1.674 @@ -1273,13 +1345,15 @@
1.675 load_imm32( R_ECX, (pc - sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
1.676 ADD_sh4r_r32( R_PC, R_ECX );
1.677 store_reg( R_ECX, 0 );
1.678 + sh4_x86.tstate = TSTATE_NONE;
1.679 }
1.680 :}
1.681 MOVCA.L R0, @Rn {:
1.682 - load_reg( R_EAX, 0 );
1.683 - load_reg( R_ECX, Rn );
1.684 - check_walign32( R_ECX );
1.685 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.686 + load_reg( R_EAX, Rn );
1.687 + check_walign32( R_EAX );
1.688 + MMU_TRANSLATE_WRITE( R_EAX );
1.689 + load_reg( R_EDX, 0 );
1.690 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.691 sh4_x86.tstate = TSTATE_NONE;
1.692 :}
1.693
1.694 @@ -1288,8 +1362,9 @@
1.695 if( sh4_x86.in_delay_slot ) {
1.696 SLOTILLEGAL();
1.697 } else {
1.698 - JT_rel8( EXIT_BLOCK_SIZE, nottaken );
1.699 - exit_block( disp + pc + 4, pc+2 );
1.700 + sh4vma_t target = disp + pc + 4;
1.701 + JT_rel8( EXIT_BLOCK_REL_SIZE(target), nottaken );
1.702 + exit_block_rel(target, pc+2 );
1.703 JMP_TARGET(nottaken);
1.704 return 2;
1.705 }
1.706 @@ -1298,6 +1373,7 @@
1.707 if( sh4_x86.in_delay_slot ) {
1.708 SLOTILLEGAL();
1.709 } else {
1.710 + sh4vma_t target = disp + pc + 4;
1.711 sh4_x86.in_delay_slot = TRUE;
1.712 if( sh4_x86.tstate == TSTATE_NONE ) {
1.713 CMP_imm8s_sh4r( 1, R_T );
1.714 @@ -1305,7 +1381,7 @@
1.715 }
1.716 OP(0x0F); OP(0x80+sh4_x86.tstate); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JNE rel32
1.717 sh4_translate_instruction(pc+2);
1.718 - exit_block( disp + pc + 4, pc+4 );
1.719 + exit_block_rel( target, pc+4 );
1.720 // not taken
1.721 *patch = (xlat_output - ((uint8_t *)patch)) - 4;
1.722 sh4_translate_instruction(pc+2);
1.723 @@ -1318,7 +1394,7 @@
1.724 } else {
1.725 sh4_x86.in_delay_slot = TRUE;
1.726 sh4_translate_instruction( pc + 2 );
1.727 - exit_block( disp + pc + 4, pc+4 );
1.728 + exit_block_rel( disp + pc + 4, pc+4 );
1.729 sh4_x86.branch_taken = TRUE;
1.730 return 4;
1.731 }
1.732 @@ -1346,7 +1422,7 @@
1.733 store_spreg( R_EAX, R_PR );
1.734 sh4_x86.in_delay_slot = TRUE;
1.735 sh4_translate_instruction( pc + 2 );
1.736 - exit_block( disp + pc + 4, pc+4 );
1.737 + exit_block_rel( disp + pc + 4, pc+4 );
1.738 sh4_x86.branch_taken = TRUE;
1.739 return 4;
1.740 }
1.741 @@ -1371,8 +1447,9 @@
1.742 if( sh4_x86.in_delay_slot ) {
1.743 SLOTILLEGAL();
1.744 } else {
1.745 - JF_rel8( EXIT_BLOCK_SIZE, nottaken );
1.746 - exit_block( disp + pc + 4, pc+2 );
1.747 + sh4vma_t target = disp + pc + 4;
1.748 + JF_rel8( EXIT_BLOCK_REL_SIZE(target), nottaken );
1.749 + exit_block_rel(target, pc+2 );
1.750 JMP_TARGET(nottaken);
1.751 return 2;
1.752 }
1.753 @@ -1388,7 +1465,7 @@
1.754 }
1.755 OP(0x0F); OP(0x80+(sh4_x86.tstate^1)); uint32_t *patch = (uint32_t *)xlat_output; OP32(0); // JE rel32
1.756 sh4_translate_instruction(pc+2);
1.757 - exit_block( disp + pc + 4, pc+4 );
1.758 + exit_block_rel( disp + pc + 4, pc+4 );
1.759 // not taken
1.760 *patch = (xlat_output - ((uint8_t *)patch)) - 4;
1.761 sh4_translate_instruction(pc+2);
1.762 @@ -1558,191 +1635,195 @@
1.763 :}
1.764 FMOV FRm, @Rn {:
1.765 check_fpuen();
1.766 - load_reg( R_ECX, Rn );
1.767 - check_walign32( R_ECX );
1.768 + load_reg( R_EAX, Rn );
1.769 + check_walign32( R_EAX );
1.770 + MMU_TRANSLATE_WRITE( R_EAX );
1.771 load_spreg( R_EDX, R_FPSCR );
1.772 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.773 JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
1.774 load_fr_bank( R_EDX );
1.775 - load_fr( R_EDX, R_EAX, FRm );
1.776 - MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
1.777 + load_fr( R_EDX, R_ECX, FRm );
1.778 + MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
1.779 if( FRm&1 ) {
1.780 JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
1.781 JMP_TARGET(doublesize);
1.782 load_xf_bank( R_EDX );
1.783 - load_fr( R_EDX, R_EAX, FRm&0x0E );
1.784 + load_fr( R_EDX, R_ECX, FRm&0x0E );
1.785 load_fr( R_EDX, R_EDX, FRm|0x01 );
1.786 - MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
1.787 + MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
1.788 JMP_TARGET(end);
1.789 } else {
1.790 JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
1.791 JMP_TARGET(doublesize);
1.792 load_fr_bank( R_EDX );
1.793 - load_fr( R_EDX, R_EAX, FRm&0x0E );
1.794 + load_fr( R_EDX, R_ECX, FRm&0x0E );
1.795 load_fr( R_EDX, R_EDX, FRm|0x01 );
1.796 - MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
1.797 + MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
1.798 JMP_TARGET(end);
1.799 }
1.800 sh4_x86.tstate = TSTATE_NONE;
1.801 :}
1.802 FMOV @Rm, FRn {:
1.803 check_fpuen();
1.804 - load_reg( R_ECX, Rm );
1.805 - check_ralign32( R_ECX );
1.806 + load_reg( R_EAX, Rm );
1.807 + check_ralign32( R_EAX );
1.808 + MMU_TRANSLATE_READ( R_EAX );
1.809 load_spreg( R_EDX, R_FPSCR );
1.810 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.811 JNE_rel8(8 + MEM_READ_SIZE, doublesize);
1.812 - MEM_READ_LONG( R_ECX, R_EAX );
1.813 + MEM_READ_LONG( R_EAX, R_EAX );
1.814 load_fr_bank( R_EDX );
1.815 store_fr( R_EDX, R_EAX, FRn );
1.816 if( FRn&1 ) {
1.817 JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
1.818 JMP_TARGET(doublesize);
1.819 - MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
1.820 + MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
1.821 load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
1.822 load_xf_bank( R_EDX );
1.823 - store_fr( R_EDX, R_EAX, FRn&0x0E );
1.824 - store_fr( R_EDX, R_ECX, FRn|0x01 );
1.825 + store_fr( R_EDX, R_ECX, FRn&0x0E );
1.826 + store_fr( R_EDX, R_EAX, FRn|0x01 );
1.827 JMP_TARGET(end);
1.828 } else {
1.829 JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
1.830 JMP_TARGET(doublesize);
1.831 - MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
1.832 + MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
1.833 load_fr_bank( R_EDX );
1.834 - store_fr( R_EDX, R_EAX, FRn&0x0E );
1.835 - store_fr( R_EDX, R_ECX, FRn|0x01 );
1.836 + store_fr( R_EDX, R_ECX, FRn&0x0E );
1.837 + store_fr( R_EDX, R_EAX, FRn|0x01 );
1.838 JMP_TARGET(end);
1.839 }
1.840 sh4_x86.tstate = TSTATE_NONE;
1.841 :}
1.842 FMOV FRm, @-Rn {:
1.843 check_fpuen();
1.844 - load_reg( R_ECX, Rn );
1.845 - check_walign32( R_ECX );
1.846 + load_reg( R_EAX, Rn );
1.847 + check_walign32( R_EAX );
1.848 load_spreg( R_EDX, R_FPSCR );
1.849 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.850 - JNE_rel8(14 + MEM_WRITE_SIZE, doublesize);
1.851 + JNE_rel8(15 + MEM_WRITE_SIZE + MMU_TRANSLATE_SIZE, doublesize);
1.852 + ADD_imm8s_r32( -4, R_EAX );
1.853 + MMU_TRANSLATE_WRITE( R_EAX );
1.854 load_fr_bank( R_EDX );
1.855 - load_fr( R_EDX, R_EAX, FRm );
1.856 - ADD_imm8s_r32(-4,R_ECX);
1.857 - store_reg( R_ECX, Rn );
1.858 - MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
1.859 + load_fr( R_EDX, R_ECX, FRm );
1.860 + ADD_imm8s_sh4r(-4,REG_OFFSET(r[Rn]));
1.861 + MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
1.862 if( FRm&1 ) {
1.863 - JMP_rel8( 24 + MEM_WRITE_DOUBLE_SIZE, end );
1.864 + JMP_rel8( 25 + MEM_WRITE_DOUBLE_SIZE + MMU_TRANSLATE_SIZE, end );
1.865 JMP_TARGET(doublesize);
1.866 + ADD_imm8s_r32(-8,R_EAX);
1.867 + MMU_TRANSLATE_WRITE( R_EAX );
1.868 load_xf_bank( R_EDX );
1.869 - load_fr( R_EDX, R_EAX, FRm&0x0E );
1.870 + load_fr( R_EDX, R_ECX, FRm&0x0E );
1.871 load_fr( R_EDX, R_EDX, FRm|0x01 );
1.872 - ADD_imm8s_r32(-8,R_ECX);
1.873 - store_reg( R_ECX, Rn );
1.874 - MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
1.875 + ADD_imm8s_sh4r(-8,REG_OFFSET(r[Rn]));
1.876 + MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
1.877 JMP_TARGET(end);
1.878 } else {
1.879 - JMP_rel8( 15 + MEM_WRITE_DOUBLE_SIZE, end );
1.880 + JMP_rel8( 16 + MEM_WRITE_DOUBLE_SIZE + MMU_TRANSLATE_SIZE, end );
1.881 JMP_TARGET(doublesize);
1.882 + ADD_imm8s_r32(-8,R_EAX);
1.883 + MMU_TRANSLATE_WRITE( R_EAX );
1.884 load_fr_bank( R_EDX );
1.885 - load_fr( R_EDX, R_EAX, FRm&0x0E );
1.886 + load_fr( R_EDX, R_ECX, FRm&0x0E );
1.887 load_fr( R_EDX, R_EDX, FRm|0x01 );
1.888 - ADD_imm8s_r32(-8,R_ECX);
1.889 - store_reg( R_ECX, Rn );
1.890 - MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
1.891 + ADD_imm8s_sh4r(-8,REG_OFFSET(r[Rn]));
1.892 + MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
1.893 JMP_TARGET(end);
1.894 }
1.895 sh4_x86.tstate = TSTATE_NONE;
1.896 :}
1.897 FMOV @Rm+, FRn {:
1.898 check_fpuen();
1.899 - load_reg( R_ECX, Rm );
1.900 - check_ralign32( R_ECX );
1.901 - MOV_r32_r32( R_ECX, R_EAX );
1.902 + load_reg( R_EAX, Rm );
1.903 + check_ralign32( R_EAX );
1.904 + MMU_TRANSLATE_READ( R_EAX );
1.905 load_spreg( R_EDX, R_FPSCR );
1.906 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.907 - JNE_rel8(14 + MEM_READ_SIZE, doublesize);
1.908 - ADD_imm8s_r32( 4, R_EAX );
1.909 - store_reg( R_EAX, Rm );
1.910 - MEM_READ_LONG( R_ECX, R_EAX );
1.911 + JNE_rel8(12 + MEM_READ_SIZE, doublesize);
1.912 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.913 + MEM_READ_LONG( R_EAX, R_EAX );
1.914 load_fr_bank( R_EDX );
1.915 store_fr( R_EDX, R_EAX, FRn );
1.916 if( FRn&1 ) {
1.917 - JMP_rel8(27 + MEM_READ_DOUBLE_SIZE, end);
1.918 + JMP_rel8(25 + MEM_READ_DOUBLE_SIZE, end);
1.919 JMP_TARGET(doublesize);
1.920 - ADD_imm8s_r32( 8, R_EAX );
1.921 - store_reg(R_EAX, Rm);
1.922 - MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
1.923 + ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rm]) );
1.924 + MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
1.925 load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
1.926 load_xf_bank( R_EDX );
1.927 - store_fr( R_EDX, R_EAX, FRn&0x0E );
1.928 - store_fr( R_EDX, R_ECX, FRn|0x01 );
1.929 + store_fr( R_EDX, R_ECX, FRn&0x0E );
1.930 + store_fr( R_EDX, R_EAX, FRn|0x01 );
1.931 JMP_TARGET(end);
1.932 } else {
1.933 - JMP_rel8(15 + MEM_READ_DOUBLE_SIZE, end);
1.934 - ADD_imm8s_r32( 8, R_EAX );
1.935 - store_reg(R_EAX, Rm);
1.936 - MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
1.937 + JMP_rel8(13 + MEM_READ_DOUBLE_SIZE, end);
1.938 + ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rm]) );
1.939 + MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
1.940 load_fr_bank( R_EDX );
1.941 - store_fr( R_EDX, R_EAX, FRn&0x0E );
1.942 - store_fr( R_EDX, R_ECX, FRn|0x01 );
1.943 + store_fr( R_EDX, R_ECX, FRn&0x0E );
1.944 + store_fr( R_EDX, R_EAX, FRn|0x01 );
1.945 JMP_TARGET(end);
1.946 }
1.947 sh4_x86.tstate = TSTATE_NONE;
1.948 :}
1.949 FMOV FRm, @(R0, Rn) {:
1.950 check_fpuen();
1.951 - load_reg( R_ECX, Rn );
1.952 - ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
1.953 - check_walign32( R_ECX );
1.954 + load_reg( R_EAX, Rn );
1.955 + ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
1.956 + check_walign32( R_EAX );
1.957 + MMU_TRANSLATE_WRITE( R_EAX );
1.958 load_spreg( R_EDX, R_FPSCR );
1.959 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.960 JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
1.961 load_fr_bank( R_EDX );
1.962 - load_fr( R_EDX, R_EAX, FRm );
1.963 - MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
1.964 + load_fr( R_EDX, R_ECX, FRm );
1.965 + MEM_WRITE_LONG( R_EAX, R_ECX ); // 12
1.966 if( FRm&1 ) {
1.967 JMP_rel8( 18 + MEM_WRITE_DOUBLE_SIZE, end );
1.968 JMP_TARGET(doublesize);
1.969 load_xf_bank( R_EDX );
1.970 - load_fr( R_EDX, R_EAX, FRm&0x0E );
1.971 + load_fr( R_EDX, R_ECX, FRm&0x0E );
1.972 load_fr( R_EDX, R_EDX, FRm|0x01 );
1.973 - MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
1.974 + MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
1.975 JMP_TARGET(end);
1.976 } else {
1.977 JMP_rel8( 9 + MEM_WRITE_DOUBLE_SIZE, end );
1.978 JMP_TARGET(doublesize);
1.979 load_fr_bank( R_EDX );
1.980 - load_fr( R_EDX, R_EAX, FRm&0x0E );
1.981 + load_fr( R_EDX, R_ECX, FRm&0x0E );
1.982 load_fr( R_EDX, R_EDX, FRm|0x01 );
1.983 - MEM_WRITE_DOUBLE( R_ECX, R_EAX, R_EDX );
1.984 + MEM_WRITE_DOUBLE( R_EAX, R_ECX, R_EDX );
1.985 JMP_TARGET(end);
1.986 }
1.987 sh4_x86.tstate = TSTATE_NONE;
1.988 :}
1.989 FMOV @(R0, Rm), FRn {:
1.990 check_fpuen();
1.991 - load_reg( R_ECX, Rm );
1.992 - ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
1.993 - check_ralign32( R_ECX );
1.994 + load_reg( R_EAX, Rm );
1.995 + ADD_sh4r_r32( REG_OFFSET(r[0]), R_EAX );
1.996 + check_ralign32( R_EAX );
1.997 + MMU_TRANSLATE_READ( R_EAX );
1.998 load_spreg( R_EDX, R_FPSCR );
1.999 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.1000 JNE_rel8(8 + MEM_READ_SIZE, doublesize);
1.1001 - MEM_READ_LONG( R_ECX, R_EAX );
1.1002 + MEM_READ_LONG( R_EAX, R_EAX );
1.1003 load_fr_bank( R_EDX );
1.1004 store_fr( R_EDX, R_EAX, FRn );
1.1005 if( FRn&1 ) {
1.1006 JMP_rel8(21 + MEM_READ_DOUBLE_SIZE, end);
1.1007 JMP_TARGET(doublesize);
1.1008 - MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
1.1009 + MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
1.1010 load_spreg( R_EDX, R_FPSCR ); // assume read_long clobbered it
1.1011 load_xf_bank( R_EDX );
1.1012 - store_fr( R_EDX, R_EAX, FRn&0x0E );
1.1013 - store_fr( R_EDX, R_ECX, FRn|0x01 );
1.1014 + store_fr( R_EDX, R_ECX, FRn&0x0E );
1.1015 + store_fr( R_EDX, R_EAX, FRn|0x01 );
1.1016 JMP_TARGET(end);
1.1017 } else {
1.1018 JMP_rel8(9 + MEM_READ_DOUBLE_SIZE, end);
1.1019 JMP_TARGET(doublesize);
1.1020 - MEM_READ_DOUBLE( R_ECX, R_EAX, R_ECX );
1.1021 + MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
1.1022 load_fr_bank( R_EDX );
1.1023 - store_fr( R_EDX, R_EAX, FRn&0x0E );
1.1024 - store_fr( R_EDX, R_ECX, FRn|0x01 );
1.1025 + store_fr( R_EDX, R_ECX, FRn&0x0E );
1.1026 + store_fr( R_EDX, R_EAX, FRn|0x01 );
1.1027 JMP_TARGET(end);
1.1028 }
1.1029 sh4_x86.tstate = TSTATE_NONE;
1.1030 @@ -2183,10 +2264,9 @@
1.1031 LDC.L @Rm+, GBR {:
1.1032 load_reg( R_EAX, Rm );
1.1033 check_ralign32( R_EAX );
1.1034 - MOV_r32_r32( R_EAX, R_ECX );
1.1035 - ADD_imm8s_r32( 4, R_EAX );
1.1036 - store_reg( R_EAX, Rm );
1.1037 - MEM_READ_LONG( R_ECX, R_EAX );
1.1038 + MMU_TRANSLATE_READ( R_EAX );
1.1039 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1040 + MEM_READ_LONG( R_EAX, R_EAX );
1.1041 store_spreg( R_EAX, R_GBR );
1.1042 sh4_x86.tstate = TSTATE_NONE;
1.1043 :}
1.1044 @@ -2197,10 +2277,9 @@
1.1045 check_priv();
1.1046 load_reg( R_EAX, Rm );
1.1047 check_ralign32( R_EAX );
1.1048 - MOV_r32_r32( R_EAX, R_ECX );
1.1049 - ADD_imm8s_r32( 4, R_EAX );
1.1050 - store_reg( R_EAX, Rm );
1.1051 - MEM_READ_LONG( R_ECX, R_EAX );
1.1052 + MMU_TRANSLATE_READ( R_EAX );
1.1053 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1054 + MEM_READ_LONG( R_EAX, R_EAX );
1.1055 call_func1( sh4_write_sr, R_EAX );
1.1056 sh4_x86.priv_checked = FALSE;
1.1057 sh4_x86.fpuen_checked = FALSE;
1.1058 @@ -2211,10 +2290,9 @@
1.1059 check_priv();
1.1060 load_reg( R_EAX, Rm );
1.1061 check_ralign32( R_EAX );
1.1062 - MOV_r32_r32( R_EAX, R_ECX );
1.1063 - ADD_imm8s_r32( 4, R_EAX );
1.1064 - store_reg( R_EAX, Rm );
1.1065 - MEM_READ_LONG( R_ECX, R_EAX );
1.1066 + MMU_TRANSLATE_READ( R_EAX );
1.1067 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1068 + MEM_READ_LONG( R_EAX, R_EAX );
1.1069 store_spreg( R_EAX, R_VBR );
1.1070 sh4_x86.tstate = TSTATE_NONE;
1.1071 :}
1.1072 @@ -2222,10 +2300,9 @@
1.1073 check_priv();
1.1074 load_reg( R_EAX, Rm );
1.1075 check_ralign32( R_EAX );
1.1076 - MOV_r32_r32( R_EAX, R_ECX );
1.1077 - ADD_imm8s_r32( 4, R_EAX );
1.1078 - store_reg( R_EAX, Rm );
1.1079 - MEM_READ_LONG( R_ECX, R_EAX );
1.1080 + MMU_TRANSLATE_READ( R_EAX );
1.1081 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1082 + MEM_READ_LONG( R_EAX, R_EAX );
1.1083 store_spreg( R_EAX, R_SSR );
1.1084 sh4_x86.tstate = TSTATE_NONE;
1.1085 :}
1.1086 @@ -2233,10 +2310,9 @@
1.1087 check_priv();
1.1088 load_reg( R_EAX, Rm );
1.1089 check_ralign32( R_EAX );
1.1090 - MOV_r32_r32( R_EAX, R_ECX );
1.1091 - ADD_imm8s_r32( 4, R_EAX );
1.1092 - store_reg( R_EAX, Rm );
1.1093 - MEM_READ_LONG( R_ECX, R_EAX );
1.1094 + MMU_TRANSLATE_READ( R_EAX );
1.1095 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1096 + MEM_READ_LONG( R_EAX, R_EAX );
1.1097 store_spreg( R_EAX, R_SGR );
1.1098 sh4_x86.tstate = TSTATE_NONE;
1.1099 :}
1.1100 @@ -2244,10 +2320,9 @@
1.1101 check_priv();
1.1102 load_reg( R_EAX, Rm );
1.1103 check_ralign32( R_EAX );
1.1104 - MOV_r32_r32( R_EAX, R_ECX );
1.1105 - ADD_imm8s_r32( 4, R_EAX );
1.1106 - store_reg( R_EAX, Rm );
1.1107 - MEM_READ_LONG( R_ECX, R_EAX );
1.1108 + MMU_TRANSLATE_READ( R_EAX );
1.1109 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1110 + MEM_READ_LONG( R_EAX, R_EAX );
1.1111 store_spreg( R_EAX, R_SPC );
1.1112 sh4_x86.tstate = TSTATE_NONE;
1.1113 :}
1.1114 @@ -2255,10 +2330,9 @@
1.1115 check_priv();
1.1116 load_reg( R_EAX, Rm );
1.1117 check_ralign32( R_EAX );
1.1118 - MOV_r32_r32( R_EAX, R_ECX );
1.1119 - ADD_imm8s_r32( 4, R_EAX );
1.1120 - store_reg( R_EAX, Rm );
1.1121 - MEM_READ_LONG( R_ECX, R_EAX );
1.1122 + MMU_TRANSLATE_READ( R_EAX );
1.1123 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1124 + MEM_READ_LONG( R_EAX, R_EAX );
1.1125 store_spreg( R_EAX, R_DBR );
1.1126 sh4_x86.tstate = TSTATE_NONE;
1.1127 :}
1.1128 @@ -2266,10 +2340,9 @@
1.1129 check_priv();
1.1130 load_reg( R_EAX, Rm );
1.1131 check_ralign32( R_EAX );
1.1132 - MOV_r32_r32( R_EAX, R_ECX );
1.1133 - ADD_imm8s_r32( 4, R_EAX );
1.1134 - store_reg( R_EAX, Rm );
1.1135 - MEM_READ_LONG( R_ECX, R_EAX );
1.1136 + MMU_TRANSLATE_READ( R_EAX );
1.1137 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1138 + MEM_READ_LONG( R_EAX, R_EAX );
1.1139 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
1.1140 sh4_x86.tstate = TSTATE_NONE;
1.1141 :}
1.1142 @@ -2282,10 +2355,9 @@
1.1143 LDS.L @Rm+, FPSCR {:
1.1144 load_reg( R_EAX, Rm );
1.1145 check_ralign32( R_EAX );
1.1146 - MOV_r32_r32( R_EAX, R_ECX );
1.1147 - ADD_imm8s_r32( 4, R_EAX );
1.1148 - store_reg( R_EAX, Rm );
1.1149 - MEM_READ_LONG( R_ECX, R_EAX );
1.1150 + MMU_TRANSLATE_READ( R_EAX );
1.1151 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1152 + MEM_READ_LONG( R_EAX, R_EAX );
1.1153 store_spreg( R_EAX, R_FPSCR );
1.1154 update_fr_bank( R_EAX );
1.1155 sh4_x86.tstate = TSTATE_NONE;
1.1156 @@ -2297,10 +2369,9 @@
1.1157 LDS.L @Rm+, FPUL {:
1.1158 load_reg( R_EAX, Rm );
1.1159 check_ralign32( R_EAX );
1.1160 - MOV_r32_r32( R_EAX, R_ECX );
1.1161 - ADD_imm8s_r32( 4, R_EAX );
1.1162 - store_reg( R_EAX, Rm );
1.1163 - MEM_READ_LONG( R_ECX, R_EAX );
1.1164 + MMU_TRANSLATE_READ( R_EAX );
1.1165 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1166 + MEM_READ_LONG( R_EAX, R_EAX );
1.1167 store_spreg( R_EAX, R_FPUL );
1.1168 sh4_x86.tstate = TSTATE_NONE;
1.1169 :}
1.1170 @@ -2311,10 +2382,9 @@
1.1171 LDS.L @Rm+, MACH {:
1.1172 load_reg( R_EAX, Rm );
1.1173 check_ralign32( R_EAX );
1.1174 - MOV_r32_r32( R_EAX, R_ECX );
1.1175 - ADD_imm8s_r32( 4, R_EAX );
1.1176 - store_reg( R_EAX, Rm );
1.1177 - MEM_READ_LONG( R_ECX, R_EAX );
1.1178 + MMU_TRANSLATE_READ( R_EAX );
1.1179 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1180 + MEM_READ_LONG( R_EAX, R_EAX );
1.1181 store_spreg( R_EAX, R_MACH );
1.1182 sh4_x86.tstate = TSTATE_NONE;
1.1183 :}
1.1184 @@ -2325,10 +2395,9 @@
1.1185 LDS.L @Rm+, MACL {:
1.1186 load_reg( R_EAX, Rm );
1.1187 check_ralign32( R_EAX );
1.1188 - MOV_r32_r32( R_EAX, R_ECX );
1.1189 - ADD_imm8s_r32( 4, R_EAX );
1.1190 - store_reg( R_EAX, Rm );
1.1191 - MEM_READ_LONG( R_ECX, R_EAX );
1.1192 + MMU_TRANSLATE_READ( R_EAX );
1.1193 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1194 + MEM_READ_LONG( R_EAX, R_EAX );
1.1195 store_spreg( R_EAX, R_MACL );
1.1196 sh4_x86.tstate = TSTATE_NONE;
1.1197 :}
1.1198 @@ -2339,10 +2408,9 @@
1.1199 LDS.L @Rm+, PR {:
1.1200 load_reg( R_EAX, Rm );
1.1201 check_ralign32( R_EAX );
1.1202 - MOV_r32_r32( R_EAX, R_ECX );
1.1203 - ADD_imm8s_r32( 4, R_EAX );
1.1204 - store_reg( R_EAX, Rm );
1.1205 - MEM_READ_LONG( R_ECX, R_EAX );
1.1206 + MMU_TRANSLATE_READ( R_EAX );
1.1207 + ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
1.1208 + MEM_READ_LONG( R_EAX, R_EAX );
1.1209 store_spreg( R_EAX, R_PR );
1.1210 sh4_x86.tstate = TSTATE_NONE;
1.1211 :}
1.1212 @@ -2417,81 +2485,91 @@
1.1213 :}
1.1214 STC.L SR, @-Rn {:
1.1215 check_priv();
1.1216 + load_reg( R_EAX, Rn );
1.1217 + check_walign32( R_EAX );
1.1218 + ADD_imm8s_r32( -4, R_EAX );
1.1219 + MMU_TRANSLATE_WRITE( R_EAX );
1.1220 + PUSH_realigned_r32( R_EAX );
1.1221 call_func0( sh4_read_sr );
1.1222 - load_reg( R_ECX, Rn );
1.1223 - check_walign32( R_ECX );
1.1224 - ADD_imm8s_r32( -4, R_ECX );
1.1225 - store_reg( R_ECX, Rn );
1.1226 + POP_realigned_r32( R_ECX );
1.1227 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1228 MEM_WRITE_LONG( R_ECX, R_EAX );
1.1229 sh4_x86.tstate = TSTATE_NONE;
1.1230 :}
1.1231 STC.L VBR, @-Rn {:
1.1232 check_priv();
1.1233 - load_reg( R_ECX, Rn );
1.1234 - check_walign32( R_ECX );
1.1235 - ADD_imm8s_r32( -4, R_ECX );
1.1236 - store_reg( R_ECX, Rn );
1.1237 - load_spreg( R_EAX, R_VBR );
1.1238 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1239 + load_reg( R_EAX, Rn );
1.1240 + check_walign32( R_EAX );
1.1241 + ADD_imm8s_r32( -4, R_EAX );
1.1242 + MMU_TRANSLATE_WRITE( R_EAX );
1.1243 + load_spreg( R_EDX, R_VBR );
1.1244 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1245 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1246 sh4_x86.tstate = TSTATE_NONE;
1.1247 :}
1.1248 STC.L SSR, @-Rn {:
1.1249 check_priv();
1.1250 - load_reg( R_ECX, Rn );
1.1251 - check_walign32( R_ECX );
1.1252 - ADD_imm8s_r32( -4, R_ECX );
1.1253 - store_reg( R_ECX, Rn );
1.1254 - load_spreg( R_EAX, R_SSR );
1.1255 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1256 + load_reg( R_EAX, Rn );
1.1257 + check_walign32( R_EAX );
1.1258 + ADD_imm8s_r32( -4, R_EAX );
1.1259 + MMU_TRANSLATE_WRITE( R_EAX );
1.1260 + load_spreg( R_EDX, R_SSR );
1.1261 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1262 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1263 sh4_x86.tstate = TSTATE_NONE;
1.1264 :}
1.1265 STC.L SPC, @-Rn {:
1.1266 check_priv();
1.1267 - load_reg( R_ECX, Rn );
1.1268 - check_walign32( R_ECX );
1.1269 - ADD_imm8s_r32( -4, R_ECX );
1.1270 - store_reg( R_ECX, Rn );
1.1271 - load_spreg( R_EAX, R_SPC );
1.1272 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1273 + load_reg( R_EAX, Rn );
1.1274 + check_walign32( R_EAX );
1.1275 + ADD_imm8s_r32( -4, R_EAX );
1.1276 + MMU_TRANSLATE_WRITE( R_EAX );
1.1277 + load_spreg( R_EDX, R_SPC );
1.1278 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1279 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1280 sh4_x86.tstate = TSTATE_NONE;
1.1281 :}
1.1282 STC.L SGR, @-Rn {:
1.1283 check_priv();
1.1284 - load_reg( R_ECX, Rn );
1.1285 - check_walign32( R_ECX );
1.1286 - ADD_imm8s_r32( -4, R_ECX );
1.1287 - store_reg( R_ECX, Rn );
1.1288 - load_spreg( R_EAX, R_SGR );
1.1289 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1290 + load_reg( R_EAX, Rn );
1.1291 + check_walign32( R_EAX );
1.1292 + ADD_imm8s_r32( -4, R_EAX );
1.1293 + MMU_TRANSLATE_WRITE( R_EAX );
1.1294 + load_spreg( R_EDX, R_SGR );
1.1295 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1296 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1297 sh4_x86.tstate = TSTATE_NONE;
1.1298 :}
1.1299 STC.L DBR, @-Rn {:
1.1300 check_priv();
1.1301 - load_reg( R_ECX, Rn );
1.1302 - check_walign32( R_ECX );
1.1303 - ADD_imm8s_r32( -4, R_ECX );
1.1304 - store_reg( R_ECX, Rn );
1.1305 - load_spreg( R_EAX, R_DBR );
1.1306 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1307 + load_reg( R_EAX, Rn );
1.1308 + check_walign32( R_EAX );
1.1309 + ADD_imm8s_r32( -4, R_EAX );
1.1310 + MMU_TRANSLATE_WRITE( R_EAX );
1.1311 + load_spreg( R_EDX, R_DBR );
1.1312 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1313 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1314 sh4_x86.tstate = TSTATE_NONE;
1.1315 :}
1.1316 STC.L Rm_BANK, @-Rn {:
1.1317 check_priv();
1.1318 - load_reg( R_ECX, Rn );
1.1319 - check_walign32( R_ECX );
1.1320 - ADD_imm8s_r32( -4, R_ECX );
1.1321 - store_reg( R_ECX, Rn );
1.1322 - load_spreg( R_EAX, REG_OFFSET(r_bank[Rm_BANK]) );
1.1323 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1324 + load_reg( R_EAX, Rn );
1.1325 + check_walign32( R_EAX );
1.1326 + ADD_imm8s_r32( -4, R_EAX );
1.1327 + MMU_TRANSLATE_WRITE( R_EAX );
1.1328 + load_spreg( R_EDX, REG_OFFSET(r_bank[Rm_BANK]) );
1.1329 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1330 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1331 sh4_x86.tstate = TSTATE_NONE;
1.1332 :}
1.1333 STC.L GBR, @-Rn {:
1.1334 - load_reg( R_ECX, Rn );
1.1335 - check_walign32( R_ECX );
1.1336 - ADD_imm8s_r32( -4, R_ECX );
1.1337 - store_reg( R_ECX, Rn );
1.1338 - load_spreg( R_EAX, R_GBR );
1.1339 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1340 + load_reg( R_EAX, Rn );
1.1341 + check_walign32( R_EAX );
1.1342 + ADD_imm8s_r32( -4, R_EAX );
1.1343 + MMU_TRANSLATE_WRITE( R_EAX );
1.1344 + load_spreg( R_EDX, R_GBR );
1.1345 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1346 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1347 sh4_x86.tstate = TSTATE_NONE;
1.1348 :}
1.1349 STS FPSCR, Rn {:
1.1350 @@ -2499,12 +2577,13 @@
1.1351 store_reg( R_EAX, Rn );
1.1352 :}
1.1353 STS.L FPSCR, @-Rn {:
1.1354 - load_reg( R_ECX, Rn );
1.1355 - check_walign32( R_ECX );
1.1356 - ADD_imm8s_r32( -4, R_ECX );
1.1357 - store_reg( R_ECX, Rn );
1.1358 - load_spreg( R_EAX, R_FPSCR );
1.1359 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1360 + load_reg( R_EAX, Rn );
1.1361 + check_walign32( R_EAX );
1.1362 + ADD_imm8s_r32( -4, R_EAX );
1.1363 + MMU_TRANSLATE_WRITE( R_EAX );
1.1364 + load_spreg( R_EDX, R_FPSCR );
1.1365 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1366 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1367 sh4_x86.tstate = TSTATE_NONE;
1.1368 :}
1.1369 STS FPUL, Rn {:
1.1370 @@ -2512,12 +2591,13 @@
1.1371 store_reg( R_EAX, Rn );
1.1372 :}
1.1373 STS.L FPUL, @-Rn {:
1.1374 - load_reg( R_ECX, Rn );
1.1375 - check_walign32( R_ECX );
1.1376 - ADD_imm8s_r32( -4, R_ECX );
1.1377 - store_reg( R_ECX, Rn );
1.1378 - load_spreg( R_EAX, R_FPUL );
1.1379 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1380 + load_reg( R_EAX, Rn );
1.1381 + check_walign32( R_EAX );
1.1382 + ADD_imm8s_r32( -4, R_EAX );
1.1383 + MMU_TRANSLATE_WRITE( R_EAX );
1.1384 + load_spreg( R_EDX, R_FPUL );
1.1385 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1386 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1387 sh4_x86.tstate = TSTATE_NONE;
1.1388 :}
1.1389 STS MACH, Rn {:
1.1390 @@ -2525,12 +2605,13 @@
1.1391 store_reg( R_EAX, Rn );
1.1392 :}
1.1393 STS.L MACH, @-Rn {:
1.1394 - load_reg( R_ECX, Rn );
1.1395 - check_walign32( R_ECX );
1.1396 - ADD_imm8s_r32( -4, R_ECX );
1.1397 - store_reg( R_ECX, Rn );
1.1398 - load_spreg( R_EAX, R_MACH );
1.1399 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1400 + load_reg( R_EAX, Rn );
1.1401 + check_walign32( R_EAX );
1.1402 + ADD_imm8s_r32( -4, R_EAX );
1.1403 + MMU_TRANSLATE_WRITE( R_EAX );
1.1404 + load_spreg( R_EDX, R_MACH );
1.1405 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1406 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1407 sh4_x86.tstate = TSTATE_NONE;
1.1408 :}
1.1409 STS MACL, Rn {:
1.1410 @@ -2538,12 +2619,13 @@
1.1411 store_reg( R_EAX, Rn );
1.1412 :}
1.1413 STS.L MACL, @-Rn {:
1.1414 - load_reg( R_ECX, Rn );
1.1415 - check_walign32( R_ECX );
1.1416 - ADD_imm8s_r32( -4, R_ECX );
1.1417 - store_reg( R_ECX, Rn );
1.1418 - load_spreg( R_EAX, R_MACL );
1.1419 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1420 + load_reg( R_EAX, Rn );
1.1421 + check_walign32( R_EAX );
1.1422 + ADD_imm8s_r32( -4, R_EAX );
1.1423 + MMU_TRANSLATE_WRITE( R_EAX );
1.1424 + load_spreg( R_EDX, R_MACL );
1.1425 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1426 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1427 sh4_x86.tstate = TSTATE_NONE;
1.1428 :}
1.1429 STS PR, Rn {:
1.1430 @@ -2551,12 +2633,13 @@
1.1431 store_reg( R_EAX, Rn );
1.1432 :}
1.1433 STS.L PR, @-Rn {:
1.1434 - load_reg( R_ECX, Rn );
1.1435 - check_walign32( R_ECX );
1.1436 - ADD_imm8s_r32( -4, R_ECX );
1.1437 - store_reg( R_ECX, Rn );
1.1438 - load_spreg( R_EAX, R_PR );
1.1439 - MEM_WRITE_LONG( R_ECX, R_EAX );
1.1440 + load_reg( R_EAX, Rn );
1.1441 + check_walign32( R_EAX );
1.1442 + ADD_imm8s_r32( -4, R_EAX );
1.1443 + MMU_TRANSLATE_WRITE( R_EAX );
1.1444 + load_spreg( R_EDX, R_PR );
1.1445 + ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
1.1446 + MEM_WRITE_LONG( R_EAX, R_EDX );
1.1447 sh4_x86.tstate = TSTATE_NONE;
1.1448 :}
1.1449
.