Search
lxdream.org :: lxdream/src/sh4/sh4x86.in :: diff
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.in
changeset 559:06714bc64271
prev553:4e6166258c22
next561:533f6b478071
author nkeynes
date Tue Jan 01 04:58:57 2008 +0000 (12 years ago)
branchlxdream-mmu
permissions -rw-r--r--
last change Commit first pass at full TLB support - still needs a lot more work
file annotate diff log raw
1.1 --- a/src/sh4/sh4x86.in Thu Dec 20 09:56:07 2007 +0000
1.2 +++ b/src/sh4/sh4x86.in Tue Jan 01 04:58:57 2008 +0000
1.3 @@ -34,6 +34,12 @@
1.4
1.5 #define DEFAULT_BACKPATCH_SIZE 4096
1.6
1.7 +struct backpatch_record {
1.8 + uint32_t *fixup_addr;
1.9 + uint32_t fixup_icount;
1.10 + uint32_t exc_code;
1.11 +};
1.12 +
1.13 /**
1.14 * Struct to manage internal translation state. This state is not saved -
1.15 * it is only valid between calls to sh4_translate_begin_block() and
1.16 @@ -49,7 +55,7 @@
1.17 int tstate;
1.18
1.19 /* Allocated memory for the (block-wide) back-patch list */
1.20 - uint32_t **backpatch_list;
1.21 + struct backpatch_record *backpatch_list;
1.22 uint32_t backpatch_posn;
1.23 uint32_t backpatch_size;
1.24 };
1.25 @@ -75,14 +81,6 @@
1.26 OP(0x70+ (sh4_x86.tstate^1)); OP(rel8); \
1.27 MARK_JMP(rel8, label)
1.28
1.29 -
1.30 -#define EXIT_DATA_ADDR_READ 0
1.31 -#define EXIT_DATA_ADDR_WRITE 7
1.32 -#define EXIT_ILLEGAL 14
1.33 -#define EXIT_SLOT_ILLEGAL 21
1.34 -#define EXIT_FPU_DISABLED 28
1.35 -#define EXIT_SLOT_FPU_DISABLED 35
1.36 -
1.37 static struct sh4_x86_state sh4_x86;
1.38
1.39 static uint32_t max_int = 0x7FFFFFFF;
1.40 @@ -93,26 +91,25 @@
1.41 void sh4_x86_init()
1.42 {
1.43 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
1.44 - sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
1.45 + sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(struct backpatch_record);
1.46 }
1.47
1.48
1.49 -static void sh4_x86_add_backpatch( uint8_t *ptr )
1.50 +static void sh4_x86_add_backpatch( uint8_t *fixup_addr, uint32_t fixup_pc, uint32_t exc_code )
1.51 {
1.52 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
1.53 sh4_x86.backpatch_size <<= 1;
1.54 - sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
1.55 + sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list,
1.56 + sh4_x86.backpatch_size * sizeof(struct backpatch_record));
1.57 assert( sh4_x86.backpatch_list != NULL );
1.58 }
1.59 - sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
1.60 -}
1.61 -
1.62 -static void sh4_x86_do_backpatch( uint8_t *reloc_base )
1.63 -{
1.64 - unsigned int i;
1.65 - for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
1.66 - *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
1.67 + if( sh4_x86.in_delay_slot ) {
1.68 + fixup_pc -= 2;
1.69 }
1.70 + sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_addr = (uint32_t *)fixup_addr;
1.71 + sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_icount = (fixup_pc - sh4_x86.block_start_pc)>>1;
1.72 + sh4_x86.backpatch_list[sh4_x86.backpatch_posn].exc_code = exc_code;
1.73 + sh4_x86.backpatch_posn++;
1.74 }
1.75
1.76 /**
1.77 @@ -266,97 +263,60 @@
1.78 }
1.79
1.80 /* Exception checks - Note that all exception checks will clobber EAX */
1.81 -#define precheck() load_imm32(R_EDX, (pc-sh4_x86.block_start_pc-(sh4_x86.in_delay_slot?2:0))>>1)
1.82
1.83 #define check_priv( ) \
1.84 if( !sh4_x86.priv_checked ) { \
1.85 sh4_x86.priv_checked = TRUE;\
1.86 - precheck();\
1.87 load_spreg( R_EAX, R_SR );\
1.88 AND_imm32_r32( SR_MD, R_EAX );\
1.89 if( sh4_x86.in_delay_slot ) {\
1.90 - JE_exit( EXIT_SLOT_ILLEGAL );\
1.91 + JE_exc( EXC_SLOT_ILLEGAL );\
1.92 } else {\
1.93 - JE_exit( EXIT_ILLEGAL );\
1.94 + JE_exc( EXC_ILLEGAL );\
1.95 }\
1.96 }\
1.97
1.98 -
1.99 -static void check_priv_no_precheck()
1.100 -{
1.101 - if( !sh4_x86.priv_checked ) {
1.102 - sh4_x86.priv_checked = TRUE;
1.103 - load_spreg( R_EAX, R_SR );
1.104 - AND_imm32_r32( SR_MD, R_EAX );
1.105 - if( sh4_x86.in_delay_slot ) {
1.106 - JE_exit( EXIT_SLOT_ILLEGAL );
1.107 - } else {
1.108 - JE_exit( EXIT_ILLEGAL );
1.109 - }
1.110 - }
1.111 -}
1.112 -
1.113 #define check_fpuen( ) \
1.114 if( !sh4_x86.fpuen_checked ) {\
1.115 sh4_x86.fpuen_checked = TRUE;\
1.116 - precheck();\
1.117 load_spreg( R_EAX, R_SR );\
1.118 AND_imm32_r32( SR_FD, R_EAX );\
1.119 if( sh4_x86.in_delay_slot ) {\
1.120 - JNE_exit(EXIT_SLOT_FPU_DISABLED);\
1.121 + JNE_exc(EXC_SLOT_FPU_DISABLED);\
1.122 } else {\
1.123 - JNE_exit(EXIT_FPU_DISABLED);\
1.124 + JNE_exc(EXC_FPU_DISABLED);\
1.125 }\
1.126 }
1.127
1.128 -static void check_fpuen_no_precheck()
1.129 -{
1.130 - if( !sh4_x86.fpuen_checked ) {
1.131 - sh4_x86.fpuen_checked = TRUE;
1.132 - load_spreg( R_EAX, R_SR );
1.133 - AND_imm32_r32( SR_FD, R_EAX );
1.134 - if( sh4_x86.in_delay_slot ) {
1.135 - JNE_exit(EXIT_SLOT_FPU_DISABLED);
1.136 - } else {
1.137 - JNE_exit(EXIT_FPU_DISABLED);
1.138 - }
1.139 - }
1.140 +#define check_ralign16( x86reg ) \
1.141 + TEST_imm32_r32( 0x00000001, x86reg ); \
1.142 + JNE_exc(EXC_DATA_ADDR_READ)
1.143
1.144 -}
1.145 +#define check_walign16( x86reg ) \
1.146 + TEST_imm32_r32( 0x00000001, x86reg ); \
1.147 + JNE_exc(EXC_DATA_ADDR_WRITE);
1.148
1.149 -static void check_ralign16( int x86reg )
1.150 -{
1.151 - TEST_imm32_r32( 0x00000001, x86reg );
1.152 - JNE_exit(EXIT_DATA_ADDR_READ);
1.153 -}
1.154 +#define check_ralign32( x86reg ) \
1.155 + TEST_imm32_r32( 0x00000003, x86reg ); \
1.156 + JNE_exc(EXC_DATA_ADDR_READ)
1.157
1.158 -static void check_walign16( int x86reg )
1.159 -{
1.160 - TEST_imm32_r32( 0x00000001, x86reg );
1.161 - JNE_exit(EXIT_DATA_ADDR_WRITE);
1.162 -}
1.163 -
1.164 -static void check_ralign32( int x86reg )
1.165 -{
1.166 - TEST_imm32_r32( 0x00000003, x86reg );
1.167 - JNE_exit(EXIT_DATA_ADDR_READ);
1.168 -}
1.169 -static void check_walign32( int x86reg )
1.170 -{
1.171 - TEST_imm32_r32( 0x00000003, x86reg );
1.172 - JNE_exit(EXIT_DATA_ADDR_WRITE);
1.173 -}
1.174 +#define check_walign32( x86reg ) \
1.175 + TEST_imm32_r32( 0x00000003, x86reg ); \
1.176 + JNE_exc(EXC_DATA_ADDR_WRITE);
1.177
1.178 #define UNDEF()
1.179 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
1.180 -#define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
1.181 -#define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
1.182 -#define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
1.183 -#define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
1.184 -#define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
1.185 -#define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
1.186 +#define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); TEST_r32_r32( R_EDX, R_EDX ); JNE_exc(-1); MEM_RESULT(value_reg)
1.187 +#define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); TEST_r32_r32( R_EDX, R_EDX ); JNE_exc(-1); MEM_RESULT(value_reg)
1.188 +#define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); TEST_r32_r32( R_EDX, R_EDX ); JNE_exc(-1); MEM_RESULT(value_reg)
1.189 +#define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg); TEST_r32_r32( R_EAX, R_EAX ); JNE_exc(-1);
1.190 +#define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg); TEST_r32_r32( R_EAX, R_EAX ); JNE_exc(-1);
1.191 +#define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg); TEST_r32_r32( R_EAX, R_EAX ); JNE_exc(-1);
1.192
1.193 -#define SLOTILLEGAL() precheck(); JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
1.194 +#define MEM_READ_SIZE (CALL_FUNC1_SIZE+8)
1.195 +#define MEM_WRITE_SIZE (CALL_FUNC2_SIZE+8)
1.196 +
1.197 +#define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
1.198
1.199 extern uint16_t *sh4_icache;
1.200 extern uint32_t sh4_icache_addr;
1.201 @@ -389,7 +349,8 @@
1.202 if( sh4_icache != NULL && pageaddr == sh4_icache_addr ) {
1.203 ir = sh4_icache[(pc&0xFFF)>>1];
1.204 } else {
1.205 - sh4_icache = (uint16_t *)mem_get_page(pc);
1.206 + uint64_t phys = mmu_vma_to_phys_exec(pc);
1.207 + sh4_icache = (uint16_t *)mem_get_page((uint32_t)phys);
1.208 if( ((uintptr_t)sh4_icache) < MAX_IO_REGIONS ) {
1.209 /* If someone's actually been so daft as to try to execute out of an IO
1.210 * region, fallback on the full-blown memory read
1.211 @@ -619,7 +580,6 @@
1.212 :}
1.213 MAC.L @Rm+, @Rn+ {:
1.214 load_reg( R_ECX, Rm );
1.215 - precheck();
1.216 check_ralign32( R_ECX );
1.217 load_reg( R_ECX, Rn );
1.218 check_ralign32( R_ECX );
1.219 @@ -643,7 +603,6 @@
1.220 :}
1.221 MAC.W @Rm+, @Rn+ {:
1.222 load_reg( R_ECX, Rm );
1.223 - precheck();
1.224 check_ralign16( R_ECX );
1.225 load_reg( R_ECX, Rn );
1.226 check_ralign16( R_ECX );
1.227 @@ -1090,7 +1049,6 @@
1.228 MOV.L Rm, @Rn {:
1.229 load_reg( R_EAX, Rm );
1.230 load_reg( R_ECX, Rn );
1.231 - precheck();
1.232 check_walign32(R_ECX);
1.233 MEM_WRITE_LONG( R_ECX, R_EAX );
1.234 sh4_x86.tstate = TSTATE_NONE;
1.235 @@ -1098,7 +1056,6 @@
1.236 MOV.L Rm, @-Rn {:
1.237 load_reg( R_EAX, Rm );
1.238 load_reg( R_ECX, Rn );
1.239 - precheck();
1.240 check_walign32( R_ECX );
1.241 ADD_imm8s_r32( -4, R_ECX );
1.242 store_reg( R_ECX, Rn );
1.243 @@ -1109,7 +1066,6 @@
1.244 load_reg( R_EAX, 0 );
1.245 load_reg( R_ECX, Rn );
1.246 ADD_r32_r32( R_EAX, R_ECX );
1.247 - precheck();
1.248 check_walign32( R_ECX );
1.249 load_reg( R_EAX, Rm );
1.250 MEM_WRITE_LONG( R_ECX, R_EAX );
1.251 @@ -1119,7 +1075,6 @@
1.252 load_spreg( R_ECX, R_GBR );
1.253 load_reg( R_EAX, 0 );
1.254 ADD_imm32_r32( disp, R_ECX );
1.255 - precheck();
1.256 check_walign32( R_ECX );
1.257 MEM_WRITE_LONG( R_ECX, R_EAX );
1.258 sh4_x86.tstate = TSTATE_NONE;
1.259 @@ -1128,14 +1083,12 @@
1.260 load_reg( R_ECX, Rn );
1.261 load_reg( R_EAX, Rm );
1.262 ADD_imm32_r32( disp, R_ECX );
1.263 - precheck();
1.264 check_walign32( R_ECX );
1.265 MEM_WRITE_LONG( R_ECX, R_EAX );
1.266 sh4_x86.tstate = TSTATE_NONE;
1.267 :}
1.268 MOV.L @Rm, Rn {:
1.269 load_reg( R_ECX, Rm );
1.270 - precheck();
1.271 check_ralign32( R_ECX );
1.272 MEM_READ_LONG( R_ECX, R_EAX );
1.273 store_reg( R_EAX, Rn );
1.274 @@ -1143,7 +1096,6 @@
1.275 :}
1.276 MOV.L @Rm+, Rn {:
1.277 load_reg( R_EAX, Rm );
1.278 - precheck();
1.279 check_ralign32( R_EAX );
1.280 MOV_r32_r32( R_EAX, R_ECX );
1.281 ADD_imm8s_r32( 4, R_EAX );
1.282 @@ -1156,7 +1108,6 @@
1.283 load_reg( R_EAX, 0 );
1.284 load_reg( R_ECX, Rm );
1.285 ADD_r32_r32( R_EAX, R_ECX );
1.286 - precheck();
1.287 check_ralign32( R_ECX );
1.288 MEM_READ_LONG( R_ECX, R_EAX );
1.289 store_reg( R_EAX, Rn );
1.290 @@ -1165,7 +1116,6 @@
1.291 MOV.L @(disp, GBR), R0 {:
1.292 load_spreg( R_ECX, R_GBR );
1.293 ADD_imm32_r32( disp, R_ECX );
1.294 - precheck();
1.295 check_ralign32( R_ECX );
1.296 MEM_READ_LONG( R_ECX, R_EAX );
1.297 store_reg( R_EAX, 0 );
1.298 @@ -1176,7 +1126,7 @@
1.299 SLOTILLEGAL();
1.300 } else {
1.301 uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
1.302 - sh4ptr_t ptr = mem_get_region(target);
1.303 + sh4ptr_t ptr = sh4_get_region_by_vma(target);
1.304 if( ptr != NULL ) {
1.305 MOV_moff32_EAX( ptr );
1.306 } else {
1.307 @@ -1190,7 +1140,6 @@
1.308 MOV.L @(disp, Rm), Rn {:
1.309 load_reg( R_ECX, Rm );
1.310 ADD_imm8s_r32( disp, R_ECX );
1.311 - precheck();
1.312 check_ralign32( R_ECX );
1.313 MEM_READ_LONG( R_ECX, R_EAX );
1.314 store_reg( R_EAX, Rn );
1.315 @@ -1198,7 +1147,6 @@
1.316 :}
1.317 MOV.W Rm, @Rn {:
1.318 load_reg( R_ECX, Rn );
1.319 - precheck();
1.320 check_walign16( R_ECX );
1.321 load_reg( R_EAX, Rm );
1.322 MEM_WRITE_WORD( R_ECX, R_EAX );
1.323 @@ -1206,7 +1154,6 @@
1.324 :}
1.325 MOV.W Rm, @-Rn {:
1.326 load_reg( R_ECX, Rn );
1.327 - precheck();
1.328 check_walign16( R_ECX );
1.329 load_reg( R_EAX, Rm );
1.330 ADD_imm8s_r32( -2, R_ECX );
1.331 @@ -1218,7 +1165,6 @@
1.332 load_reg( R_EAX, 0 );
1.333 load_reg( R_ECX, Rn );
1.334 ADD_r32_r32( R_EAX, R_ECX );
1.335 - precheck();
1.336 check_walign16( R_ECX );
1.337 load_reg( R_EAX, Rm );
1.338 MEM_WRITE_WORD( R_ECX, R_EAX );
1.339 @@ -1228,7 +1174,6 @@
1.340 load_spreg( R_ECX, R_GBR );
1.341 load_reg( R_EAX, 0 );
1.342 ADD_imm32_r32( disp, R_ECX );
1.343 - precheck();
1.344 check_walign16( R_ECX );
1.345 MEM_WRITE_WORD( R_ECX, R_EAX );
1.346 sh4_x86.tstate = TSTATE_NONE;
1.347 @@ -1237,14 +1182,12 @@
1.348 load_reg( R_ECX, Rn );
1.349 load_reg( R_EAX, 0 );
1.350 ADD_imm32_r32( disp, R_ECX );
1.351 - precheck();
1.352 check_walign16( R_ECX );
1.353 MEM_WRITE_WORD( R_ECX, R_EAX );
1.354 sh4_x86.tstate = TSTATE_NONE;
1.355 :}
1.356 MOV.W @Rm, Rn {:
1.357 load_reg( R_ECX, Rm );
1.358 - precheck();
1.359 check_ralign16( R_ECX );
1.360 MEM_READ_WORD( R_ECX, R_EAX );
1.361 store_reg( R_EAX, Rn );
1.362 @@ -1252,7 +1195,6 @@
1.363 :}
1.364 MOV.W @Rm+, Rn {:
1.365 load_reg( R_EAX, Rm );
1.366 - precheck();
1.367 check_ralign16( R_EAX );
1.368 MOV_r32_r32( R_EAX, R_ECX );
1.369 ADD_imm8s_r32( 2, R_EAX );
1.370 @@ -1265,7 +1207,6 @@
1.371 load_reg( R_EAX, 0 );
1.372 load_reg( R_ECX, Rm );
1.373 ADD_r32_r32( R_EAX, R_ECX );
1.374 - precheck();
1.375 check_ralign16( R_ECX );
1.376 MEM_READ_WORD( R_ECX, R_EAX );
1.377 store_reg( R_EAX, Rn );
1.378 @@ -1274,7 +1215,6 @@
1.379 MOV.W @(disp, GBR), R0 {:
1.380 load_spreg( R_ECX, R_GBR );
1.381 ADD_imm32_r32( disp, R_ECX );
1.382 - precheck();
1.383 check_ralign16( R_ECX );
1.384 MEM_READ_WORD( R_ECX, R_EAX );
1.385 store_reg( R_EAX, 0 );
1.386 @@ -1293,7 +1233,6 @@
1.387 MOV.W @(disp, Rm), R0 {:
1.388 load_reg( R_ECX, Rm );
1.389 ADD_imm32_r32( disp, R_ECX );
1.390 - precheck();
1.391 check_ralign16( R_ECX );
1.392 MEM_READ_WORD( R_ECX, R_EAX );
1.393 store_reg( R_EAX, 0 );
1.394 @@ -1310,7 +1249,6 @@
1.395 MOVCA.L R0, @Rn {:
1.396 load_reg( R_EAX, 0 );
1.397 load_reg( R_ECX, Rn );
1.398 - precheck();
1.399 check_walign32( R_ECX );
1.400 MEM_WRITE_LONG( R_ECX, R_EAX );
1.401 sh4_x86.tstate = TSTATE_NONE;
1.402 @@ -1506,8 +1444,7 @@
1.403 if( sh4_x86.in_delay_slot ) {
1.404 SLOTILLEGAL();
1.405 } else {
1.406 - precheck();
1.407 - JMP_exit(EXIT_ILLEGAL);
1.408 + JMP_exc(EXC_ILLEGAL);
1.409 return 2;
1.410 }
1.411 :}
1.412 @@ -1591,13 +1528,12 @@
1.413 sh4_x86.tstate = TSTATE_NONE;
1.414 :}
1.415 FMOV FRm, @Rn {:
1.416 - precheck();
1.417 - check_fpuen_no_precheck();
1.418 + check_fpuen();
1.419 load_reg( R_ECX, Rn );
1.420 check_walign32( R_ECX );
1.421 load_spreg( R_EDX, R_FPSCR );
1.422 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.423 - JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
1.424 + JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
1.425 load_fr_bank( R_EDX );
1.426 load_fr( R_EDX, R_EAX, FRm );
1.427 MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
1.428 @@ -1621,13 +1557,12 @@
1.429 sh4_x86.tstate = TSTATE_NONE;
1.430 :}
1.431 FMOV @Rm, FRn {:
1.432 - precheck();
1.433 - check_fpuen_no_precheck();
1.434 + check_fpuen();
1.435 load_reg( R_ECX, Rm );
1.436 check_ralign32( R_ECX );
1.437 load_spreg( R_EDX, R_FPSCR );
1.438 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.439 - JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
1.440 + JNE_rel8(8 + MEM_READ_SIZE, doublesize);
1.441 MEM_READ_LONG( R_ECX, R_EAX );
1.442 load_fr_bank( R_EDX );
1.443 store_fr( R_EDX, R_EAX, FRn );
1.444 @@ -1652,13 +1587,12 @@
1.445 sh4_x86.tstate = TSTATE_NONE;
1.446 :}
1.447 FMOV FRm, @-Rn {:
1.448 - precheck();
1.449 - check_fpuen_no_precheck();
1.450 + check_fpuen();
1.451 load_reg( R_ECX, Rn );
1.452 check_walign32( R_ECX );
1.453 load_spreg( R_EDX, R_FPSCR );
1.454 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.455 - JNE_rel8(14 + CALL_FUNC2_SIZE, doublesize);
1.456 + JNE_rel8(14 + MEM_WRITE_SIZE, doublesize);
1.457 load_fr_bank( R_EDX );
1.458 load_fr( R_EDX, R_EAX, FRm );
1.459 ADD_imm8s_r32(-4,R_ECX);
1.460 @@ -1688,14 +1622,13 @@
1.461 sh4_x86.tstate = TSTATE_NONE;
1.462 :}
1.463 FMOV @Rm+, FRn {:
1.464 - precheck();
1.465 - check_fpuen_no_precheck();
1.466 + check_fpuen();
1.467 load_reg( R_ECX, Rm );
1.468 check_ralign32( R_ECX );
1.469 MOV_r32_r32( R_ECX, R_EAX );
1.470 load_spreg( R_EDX, R_FPSCR );
1.471 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.472 - JNE_rel8(14 + CALL_FUNC1_SIZE, doublesize);
1.473 + JNE_rel8(14 + MEM_READ_SIZE, doublesize);
1.474 ADD_imm8s_r32( 4, R_EAX );
1.475 store_reg( R_EAX, Rm );
1.476 MEM_READ_LONG( R_ECX, R_EAX );
1.477 @@ -1725,14 +1658,13 @@
1.478 sh4_x86.tstate = TSTATE_NONE;
1.479 :}
1.480 FMOV FRm, @(R0, Rn) {:
1.481 - precheck();
1.482 - check_fpuen_no_precheck();
1.483 + check_fpuen();
1.484 load_reg( R_ECX, Rn );
1.485 ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
1.486 check_walign32( R_ECX );
1.487 load_spreg( R_EDX, R_FPSCR );
1.488 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.489 - JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
1.490 + JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
1.491 load_fr_bank( R_EDX );
1.492 load_fr( R_EDX, R_EAX, FRm );
1.493 MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
1.494 @@ -1756,14 +1688,13 @@
1.495 sh4_x86.tstate = TSTATE_NONE;
1.496 :}
1.497 FMOV @(R0, Rm), FRn {:
1.498 - precheck();
1.499 - check_fpuen_no_precheck();
1.500 + check_fpuen();
1.501 load_reg( R_ECX, Rm );
1.502 ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
1.503 check_ralign32( R_ECX );
1.504 load_spreg( R_EDX, R_FPSCR );
1.505 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.506 - JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
1.507 + JNE_rel8(8 + MEM_READ_SIZE, doublesize);
1.508 MEM_READ_LONG( R_ECX, R_EAX );
1.509 load_fr_bank( R_EDX );
1.510 store_fr( R_EDX, R_EAX, FRn );
1.511 @@ -2222,7 +2153,6 @@
1.512 :}
1.513 LDC.L @Rm+, GBR {:
1.514 load_reg( R_EAX, Rm );
1.515 - precheck();
1.516 check_ralign32( R_EAX );
1.517 MOV_r32_r32( R_EAX, R_ECX );
1.518 ADD_imm8s_r32( 4, R_EAX );
1.519 @@ -2235,8 +2165,7 @@
1.520 if( sh4_x86.in_delay_slot ) {
1.521 SLOTILLEGAL();
1.522 } else {
1.523 - precheck();
1.524 - check_priv_no_precheck();
1.525 + check_priv();
1.526 load_reg( R_EAX, Rm );
1.527 check_ralign32( R_EAX );
1.528 MOV_r32_r32( R_EAX, R_ECX );
1.529 @@ -2250,8 +2179,7 @@
1.530 }
1.531 :}
1.532 LDC.L @Rm+, VBR {:
1.533 - precheck();
1.534 - check_priv_no_precheck();
1.535 + check_priv();
1.536 load_reg( R_EAX, Rm );
1.537 check_ralign32( R_EAX );
1.538 MOV_r32_r32( R_EAX, R_ECX );
1.539 @@ -2262,8 +2190,7 @@
1.540 sh4_x86.tstate = TSTATE_NONE;
1.541 :}
1.542 LDC.L @Rm+, SSR {:
1.543 - precheck();
1.544 - check_priv_no_precheck();
1.545 + check_priv();
1.546 load_reg( R_EAX, Rm );
1.547 check_ralign32( R_EAX );
1.548 MOV_r32_r32( R_EAX, R_ECX );
1.549 @@ -2274,8 +2201,7 @@
1.550 sh4_x86.tstate = TSTATE_NONE;
1.551 :}
1.552 LDC.L @Rm+, SGR {:
1.553 - precheck();
1.554 - check_priv_no_precheck();
1.555 + check_priv();
1.556 load_reg( R_EAX, Rm );
1.557 check_ralign32( R_EAX );
1.558 MOV_r32_r32( R_EAX, R_ECX );
1.559 @@ -2286,8 +2212,7 @@
1.560 sh4_x86.tstate = TSTATE_NONE;
1.561 :}
1.562 LDC.L @Rm+, SPC {:
1.563 - precheck();
1.564 - check_priv_no_precheck();
1.565 + check_priv();
1.566 load_reg( R_EAX, Rm );
1.567 check_ralign32( R_EAX );
1.568 MOV_r32_r32( R_EAX, R_ECX );
1.569 @@ -2298,8 +2223,7 @@
1.570 sh4_x86.tstate = TSTATE_NONE;
1.571 :}
1.572 LDC.L @Rm+, DBR {:
1.573 - precheck();
1.574 - check_priv_no_precheck();
1.575 + check_priv();
1.576 load_reg( R_EAX, Rm );
1.577 check_ralign32( R_EAX );
1.578 MOV_r32_r32( R_EAX, R_ECX );
1.579 @@ -2310,8 +2234,7 @@
1.580 sh4_x86.tstate = TSTATE_NONE;
1.581 :}
1.582 LDC.L @Rm+, Rn_BANK {:
1.583 - precheck();
1.584 - check_priv_no_precheck();
1.585 + check_priv();
1.586 load_reg( R_EAX, Rm );
1.587 check_ralign32( R_EAX );
1.588 MOV_r32_r32( R_EAX, R_ECX );
1.589 @@ -2329,7 +2252,6 @@
1.590 :}
1.591 LDS.L @Rm+, FPSCR {:
1.592 load_reg( R_EAX, Rm );
1.593 - precheck();
1.594 check_ralign32( R_EAX );
1.595 MOV_r32_r32( R_EAX, R_ECX );
1.596 ADD_imm8s_r32( 4, R_EAX );
1.597 @@ -2345,7 +2267,6 @@
1.598 :}
1.599 LDS.L @Rm+, FPUL {:
1.600 load_reg( R_EAX, Rm );
1.601 - precheck();
1.602 check_ralign32( R_EAX );
1.603 MOV_r32_r32( R_EAX, R_ECX );
1.604 ADD_imm8s_r32( 4, R_EAX );
1.605 @@ -2360,7 +2281,6 @@
1.606 :}
1.607 LDS.L @Rm+, MACH {:
1.608 load_reg( R_EAX, Rm );
1.609 - precheck();
1.610 check_ralign32( R_EAX );
1.611 MOV_r32_r32( R_EAX, R_ECX );
1.612 ADD_imm8s_r32( 4, R_EAX );
1.613 @@ -2375,7 +2295,6 @@
1.614 :}
1.615 LDS.L @Rm+, MACL {:
1.616 load_reg( R_EAX, Rm );
1.617 - precheck();
1.618 check_ralign32( R_EAX );
1.619 MOV_r32_r32( R_EAX, R_ECX );
1.620 ADD_imm8s_r32( 4, R_EAX );
1.621 @@ -2390,7 +2309,6 @@
1.622 :}
1.623 LDS.L @Rm+, PR {:
1.624 load_reg( R_EAX, Rm );
1.625 - precheck();
1.626 check_ralign32( R_EAX );
1.627 MOV_r32_r32( R_EAX, R_ECX );
1.628 ADD_imm8s_r32( 4, R_EAX );
1.629 @@ -2469,8 +2387,7 @@
1.630 sh4_x86.tstate = TSTATE_NONE;
1.631 :}
1.632 STC.L SR, @-Rn {:
1.633 - precheck();
1.634 - check_priv_no_precheck();
1.635 + check_priv();
1.636 call_func0( sh4_read_sr );
1.637 load_reg( R_ECX, Rn );
1.638 check_walign32( R_ECX );
1.639 @@ -2480,8 +2397,7 @@
1.640 sh4_x86.tstate = TSTATE_NONE;
1.641 :}
1.642 STC.L VBR, @-Rn {:
1.643 - precheck();
1.644 - check_priv_no_precheck();
1.645 + check_priv();
1.646 load_reg( R_ECX, Rn );
1.647 check_walign32( R_ECX );
1.648 ADD_imm8s_r32( -4, R_ECX );
1.649 @@ -2491,8 +2407,7 @@
1.650 sh4_x86.tstate = TSTATE_NONE;
1.651 :}
1.652 STC.L SSR, @-Rn {:
1.653 - precheck();
1.654 - check_priv_no_precheck();
1.655 + check_priv();
1.656 load_reg( R_ECX, Rn );
1.657 check_walign32( R_ECX );
1.658 ADD_imm8s_r32( -4, R_ECX );
1.659 @@ -2502,8 +2417,7 @@
1.660 sh4_x86.tstate = TSTATE_NONE;
1.661 :}
1.662 STC.L SPC, @-Rn {:
1.663 - precheck();
1.664 - check_priv_no_precheck();
1.665 + check_priv();
1.666 load_reg( R_ECX, Rn );
1.667 check_walign32( R_ECX );
1.668 ADD_imm8s_r32( -4, R_ECX );
1.669 @@ -2513,8 +2427,7 @@
1.670 sh4_x86.tstate = TSTATE_NONE;
1.671 :}
1.672 STC.L SGR, @-Rn {:
1.673 - precheck();
1.674 - check_priv_no_precheck();
1.675 + check_priv();
1.676 load_reg( R_ECX, Rn );
1.677 check_walign32( R_ECX );
1.678 ADD_imm8s_r32( -4, R_ECX );
1.679 @@ -2524,8 +2437,7 @@
1.680 sh4_x86.tstate = TSTATE_NONE;
1.681 :}
1.682 STC.L DBR, @-Rn {:
1.683 - precheck();
1.684 - check_priv_no_precheck();
1.685 + check_priv();
1.686 load_reg( R_ECX, Rn );
1.687 check_walign32( R_ECX );
1.688 ADD_imm8s_r32( -4, R_ECX );
1.689 @@ -2535,8 +2447,7 @@
1.690 sh4_x86.tstate = TSTATE_NONE;
1.691 :}
1.692 STC.L Rm_BANK, @-Rn {:
1.693 - precheck();
1.694 - check_priv_no_precheck();
1.695 + check_priv();
1.696 load_reg( R_ECX, Rn );
1.697 check_walign32( R_ECX );
1.698 ADD_imm8s_r32( -4, R_ECX );
1.699 @@ -2547,7 +2458,6 @@
1.700 :}
1.701 STC.L GBR, @-Rn {:
1.702 load_reg( R_ECX, Rn );
1.703 - precheck();
1.704 check_walign32( R_ECX );
1.705 ADD_imm8s_r32( -4, R_ECX );
1.706 store_reg( R_ECX, Rn );
1.707 @@ -2561,7 +2471,6 @@
1.708 :}
1.709 STS.L FPSCR, @-Rn {:
1.710 load_reg( R_ECX, Rn );
1.711 - precheck();
1.712 check_walign32( R_ECX );
1.713 ADD_imm8s_r32( -4, R_ECX );
1.714 store_reg( R_ECX, Rn );
1.715 @@ -2575,7 +2484,6 @@
1.716 :}
1.717 STS.L FPUL, @-Rn {:
1.718 load_reg( R_ECX, Rn );
1.719 - precheck();
1.720 check_walign32( R_ECX );
1.721 ADD_imm8s_r32( -4, R_ECX );
1.722 store_reg( R_ECX, Rn );
1.723 @@ -2589,7 +2497,6 @@
1.724 :}
1.725 STS.L MACH, @-Rn {:
1.726 load_reg( R_ECX, Rn );
1.727 - precheck();
1.728 check_walign32( R_ECX );
1.729 ADD_imm8s_r32( -4, R_ECX );
1.730 store_reg( R_ECX, Rn );
1.731 @@ -2603,7 +2510,6 @@
1.732 :}
1.733 STS.L MACL, @-Rn {:
1.734 load_reg( R_ECX, Rn );
1.735 - precheck();
1.736 check_walign32( R_ECX );
1.737 ADD_imm8s_r32( -4, R_ECX );
1.738 store_reg( R_ECX, Rn );
1.739 @@ -2617,7 +2523,6 @@
1.740 :}
1.741 STS.L PR, @-Rn {:
1.742 load_reg( R_ECX, Rn );
1.743 - precheck();
1.744 check_walign32( R_ECX );
1.745 ADD_imm8s_r32( -4, R_ECX );
1.746 store_reg( R_ECX, Rn );
.