Search
lxdream.org :: lxdream/src/sh4/sh4x86.c :: diff
lxdream 0.9.1
released Jun 29
Download Now
filename src/sh4/sh4x86.c
changeset 559:06714bc64271
prev553:4e6166258c22
next561:533f6b478071
author nkeynes
date Tue Jan 01 04:58:57 2008 +0000 (12 years ago)
branchlxdream-mmu
permissions -rw-r--r--
last change Commit first pass at full TLB support - still needs a lot more work
file annotate diff log raw
1.1 --- a/src/sh4/sh4x86.c Thu Dec 20 09:56:07 2007 +0000
1.2 +++ b/src/sh4/sh4x86.c Tue Jan 01 04:58:57 2008 +0000
1.3 @@ -34,6 +34,12 @@
1.4
1.5 #define DEFAULT_BACKPATCH_SIZE 4096
1.6
1.7 +struct backpatch_record {
1.8 + uint32_t *fixup_addr;
1.9 + uint32_t fixup_icount;
1.10 + uint32_t exc_code;
1.11 +};
1.12 +
1.13 /**
1.14 * Struct to manage internal translation state. This state is not saved -
1.15 * it is only valid between calls to sh4_translate_begin_block() and
1.16 @@ -49,7 +55,7 @@
1.17 int tstate;
1.18
1.19 /* Allocated memory for the (block-wide) back-patch list */
1.20 - uint32_t **backpatch_list;
1.21 + struct backpatch_record *backpatch_list;
1.22 uint32_t backpatch_posn;
1.23 uint32_t backpatch_size;
1.24 };
1.25 @@ -75,14 +81,6 @@
1.26 OP(0x70+ (sh4_x86.tstate^1)); OP(rel8); \
1.27 MARK_JMP(rel8, label)
1.28
1.29 -
1.30 -#define EXIT_DATA_ADDR_READ 0
1.31 -#define EXIT_DATA_ADDR_WRITE 7
1.32 -#define EXIT_ILLEGAL 14
1.33 -#define EXIT_SLOT_ILLEGAL 21
1.34 -#define EXIT_FPU_DISABLED 28
1.35 -#define EXIT_SLOT_FPU_DISABLED 35
1.36 -
1.37 static struct sh4_x86_state sh4_x86;
1.38
1.39 static uint32_t max_int = 0x7FFFFFFF;
1.40 @@ -93,26 +91,25 @@
1.41 void sh4_x86_init()
1.42 {
1.43 sh4_x86.backpatch_list = malloc(DEFAULT_BACKPATCH_SIZE);
1.44 - sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(uint32_t *);
1.45 + sh4_x86.backpatch_size = DEFAULT_BACKPATCH_SIZE / sizeof(struct backpatch_record);
1.46 }
1.47
1.48
1.49 -static void sh4_x86_add_backpatch( uint8_t *ptr )
1.50 +static void sh4_x86_add_backpatch( uint8_t *fixup_addr, uint32_t fixup_pc, uint32_t exc_code )
1.51 {
1.52 if( sh4_x86.backpatch_posn == sh4_x86.backpatch_size ) {
1.53 sh4_x86.backpatch_size <<= 1;
1.54 - sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list, sh4_x86.backpatch_size * sizeof(uint32_t *) );
1.55 + sh4_x86.backpatch_list = realloc( sh4_x86.backpatch_list,
1.56 + sh4_x86.backpatch_size * sizeof(struct backpatch_record));
1.57 assert( sh4_x86.backpatch_list != NULL );
1.58 }
1.59 - sh4_x86.backpatch_list[sh4_x86.backpatch_posn++] = (uint32_t *)ptr;
1.60 -}
1.61 -
1.62 -static void sh4_x86_do_backpatch( uint8_t *reloc_base )
1.63 -{
1.64 - unsigned int i;
1.65 - for( i=0; i<sh4_x86.backpatch_posn; i++ ) {
1.66 - *sh4_x86.backpatch_list[i] += (reloc_base - ((uint8_t *)sh4_x86.backpatch_list[i]) - 4);
1.67 + if( sh4_x86.in_delay_slot ) {
1.68 + fixup_pc -= 2;
1.69 }
1.70 + sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_addr = (uint32_t *)fixup_addr;
1.71 + sh4_x86.backpatch_list[sh4_x86.backpatch_posn].fixup_icount = (fixup_pc - sh4_x86.block_start_pc)>>1;
1.72 + sh4_x86.backpatch_list[sh4_x86.backpatch_posn].exc_code = exc_code;
1.73 + sh4_x86.backpatch_posn++;
1.74 }
1.75
1.76 /**
1.77 @@ -266,97 +263,60 @@
1.78 }
1.79
1.80 /* Exception checks - Note that all exception checks will clobber EAX */
1.81 -#define precheck() load_imm32(R_EDX, (pc-sh4_x86.block_start_pc-(sh4_x86.in_delay_slot?2:0))>>1)
1.82
1.83 #define check_priv( ) \
1.84 if( !sh4_x86.priv_checked ) { \
1.85 sh4_x86.priv_checked = TRUE;\
1.86 - precheck();\
1.87 load_spreg( R_EAX, R_SR );\
1.88 AND_imm32_r32( SR_MD, R_EAX );\
1.89 if( sh4_x86.in_delay_slot ) {\
1.90 - JE_exit( EXIT_SLOT_ILLEGAL );\
1.91 + JE_exc( EXC_SLOT_ILLEGAL );\
1.92 } else {\
1.93 - JE_exit( EXIT_ILLEGAL );\
1.94 + JE_exc( EXC_ILLEGAL );\
1.95 }\
1.96 }\
1.97
1.98 -
1.99 -static void check_priv_no_precheck()
1.100 -{
1.101 - if( !sh4_x86.priv_checked ) {
1.102 - sh4_x86.priv_checked = TRUE;
1.103 - load_spreg( R_EAX, R_SR );
1.104 - AND_imm32_r32( SR_MD, R_EAX );
1.105 - if( sh4_x86.in_delay_slot ) {
1.106 - JE_exit( EXIT_SLOT_ILLEGAL );
1.107 - } else {
1.108 - JE_exit( EXIT_ILLEGAL );
1.109 - }
1.110 - }
1.111 -}
1.112 -
1.113 #define check_fpuen( ) \
1.114 if( !sh4_x86.fpuen_checked ) {\
1.115 sh4_x86.fpuen_checked = TRUE;\
1.116 - precheck();\
1.117 load_spreg( R_EAX, R_SR );\
1.118 AND_imm32_r32( SR_FD, R_EAX );\
1.119 if( sh4_x86.in_delay_slot ) {\
1.120 - JNE_exit(EXIT_SLOT_FPU_DISABLED);\
1.121 + JNE_exc(EXC_SLOT_FPU_DISABLED);\
1.122 } else {\
1.123 - JNE_exit(EXIT_FPU_DISABLED);\
1.124 + JNE_exc(EXC_FPU_DISABLED);\
1.125 }\
1.126 }
1.127
1.128 -static void check_fpuen_no_precheck()
1.129 -{
1.130 - if( !sh4_x86.fpuen_checked ) {
1.131 - sh4_x86.fpuen_checked = TRUE;
1.132 - load_spreg( R_EAX, R_SR );
1.133 - AND_imm32_r32( SR_FD, R_EAX );
1.134 - if( sh4_x86.in_delay_slot ) {
1.135 - JNE_exit(EXIT_SLOT_FPU_DISABLED);
1.136 - } else {
1.137 - JNE_exit(EXIT_FPU_DISABLED);
1.138 - }
1.139 - }
1.140 +#define check_ralign16( x86reg ) \
1.141 + TEST_imm32_r32( 0x00000001, x86reg ); \
1.142 + JNE_exc(EXC_DATA_ADDR_READ)
1.143
1.144 -}
1.145 +#define check_walign16( x86reg ) \
1.146 + TEST_imm32_r32( 0x00000001, x86reg ); \
1.147 + JNE_exc(EXC_DATA_ADDR_WRITE);
1.148
1.149 -static void check_ralign16( int x86reg )
1.150 -{
1.151 - TEST_imm32_r32( 0x00000001, x86reg );
1.152 - JNE_exit(EXIT_DATA_ADDR_READ);
1.153 -}
1.154 +#define check_ralign32( x86reg ) \
1.155 + TEST_imm32_r32( 0x00000003, x86reg ); \
1.156 + JNE_exc(EXC_DATA_ADDR_READ)
1.157
1.158 -static void check_walign16( int x86reg )
1.159 -{
1.160 - TEST_imm32_r32( 0x00000001, x86reg );
1.161 - JNE_exit(EXIT_DATA_ADDR_WRITE);
1.162 -}
1.163 -
1.164 -static void check_ralign32( int x86reg )
1.165 -{
1.166 - TEST_imm32_r32( 0x00000003, x86reg );
1.167 - JNE_exit(EXIT_DATA_ADDR_READ);
1.168 -}
1.169 -static void check_walign32( int x86reg )
1.170 -{
1.171 - TEST_imm32_r32( 0x00000003, x86reg );
1.172 - JNE_exit(EXIT_DATA_ADDR_WRITE);
1.173 -}
1.174 +#define check_walign32( x86reg ) \
1.175 + TEST_imm32_r32( 0x00000003, x86reg ); \
1.176 + JNE_exc(EXC_DATA_ADDR_WRITE);
1.177
1.178 #define UNDEF()
1.179 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
1.180 -#define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
1.181 -#define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
1.182 -#define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
1.183 -#define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
1.184 -#define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
1.185 -#define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
1.186 +#define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); TEST_r32_r32( R_EDX, R_EDX ); JNE_exc(-1); MEM_RESULT(value_reg)
1.187 +#define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); TEST_r32_r32( R_EDX, R_EDX ); JNE_exc(-1); MEM_RESULT(value_reg)
1.188 +#define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); TEST_r32_r32( R_EDX, R_EDX ); JNE_exc(-1); MEM_RESULT(value_reg)
1.189 +#define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg); TEST_r32_r32( R_EAX, R_EAX ); JNE_exc(-1);
1.190 +#define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg); TEST_r32_r32( R_EAX, R_EAX ); JNE_exc(-1);
1.191 +#define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg); TEST_r32_r32( R_EAX, R_EAX ); JNE_exc(-1);
1.192
1.193 -#define SLOTILLEGAL() precheck(); JMP_exit(EXIT_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
1.194 +#define MEM_READ_SIZE (CALL_FUNC1_SIZE+8)
1.195 +#define MEM_WRITE_SIZE (CALL_FUNC2_SIZE+8)
1.196 +
1.197 +#define SLOTILLEGAL() JMP_exc(EXC_SLOT_ILLEGAL); sh4_x86.in_delay_slot = FALSE; return 1;
1.198
1.199 extern uint16_t *sh4_icache;
1.200 extern uint32_t sh4_icache_addr;
1.201 @@ -389,7 +349,8 @@
1.202 if( sh4_icache != NULL && pageaddr == sh4_icache_addr ) {
1.203 ir = sh4_icache[(pc&0xFFF)>>1];
1.204 } else {
1.205 - sh4_icache = (uint16_t *)mem_get_page(pc);
1.206 + uint64_t phys = mmu_vma_to_phys_exec(pc);
1.207 + sh4_icache = (uint16_t *)mem_get_page((uint32_t)phys);
1.208 if( ((uintptr_t)sh4_icache) < MAX_IO_REGIONS ) {
1.209 /* If someone's actually been so daft as to try to execute out of an IO
1.210 * region, fallback on the full-blown memory read
1.211 @@ -540,7 +501,6 @@
1.212 uint32_t Rn = ((ir>>8)&0xF);
1.213 load_reg( R_EAX, 0 );
1.214 load_reg( R_ECX, Rn );
1.215 - precheck();
1.216 check_walign32( R_ECX );
1.217 MEM_WRITE_LONG( R_ECX, R_EAX );
1.218 sh4_x86.tstate = TSTATE_NONE;
1.219 @@ -568,7 +528,6 @@
1.220 load_reg( R_EAX, 0 );
1.221 load_reg( R_ECX, Rn );
1.222 ADD_r32_r32( R_EAX, R_ECX );
1.223 - precheck();
1.224 check_walign16( R_ECX );
1.225 load_reg( R_EAX, Rm );
1.226 MEM_WRITE_WORD( R_ECX, R_EAX );
1.227 @@ -581,7 +540,6 @@
1.228 load_reg( R_EAX, 0 );
1.229 load_reg( R_ECX, Rn );
1.230 ADD_r32_r32( R_EAX, R_ECX );
1.231 - precheck();
1.232 check_walign32( R_ECX );
1.233 load_reg( R_EAX, Rm );
1.234 MEM_WRITE_LONG( R_ECX, R_EAX );
1.235 @@ -803,7 +761,6 @@
1.236 load_reg( R_EAX, 0 );
1.237 load_reg( R_ECX, Rm );
1.238 ADD_r32_r32( R_EAX, R_ECX );
1.239 - precheck();
1.240 check_ralign16( R_ECX );
1.241 MEM_READ_WORD( R_ECX, R_EAX );
1.242 store_reg( R_EAX, Rn );
1.243 @@ -816,7 +773,6 @@
1.244 load_reg( R_EAX, 0 );
1.245 load_reg( R_ECX, Rm );
1.246 ADD_r32_r32( R_EAX, R_ECX );
1.247 - precheck();
1.248 check_ralign32( R_ECX );
1.249 MEM_READ_LONG( R_ECX, R_EAX );
1.250 store_reg( R_EAX, Rn );
1.251 @@ -827,7 +783,6 @@
1.252 { /* MAC.L @Rm+, @Rn+ */
1.253 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.254 load_reg( R_ECX, Rm );
1.255 - precheck();
1.256 check_ralign32( R_ECX );
1.257 load_reg( R_ECX, Rn );
1.258 check_ralign32( R_ECX );
1.259 @@ -861,7 +816,6 @@
1.260 load_reg( R_ECX, Rn );
1.261 load_reg( R_EAX, Rm );
1.262 ADD_imm32_r32( disp, R_ECX );
1.263 - precheck();
1.264 check_walign32( R_ECX );
1.265 MEM_WRITE_LONG( R_ECX, R_EAX );
1.266 sh4_x86.tstate = TSTATE_NONE;
1.267 @@ -882,7 +836,6 @@
1.268 { /* MOV.W Rm, @Rn */
1.269 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.270 load_reg( R_ECX, Rn );
1.271 - precheck();
1.272 check_walign16( R_ECX );
1.273 load_reg( R_EAX, Rm );
1.274 MEM_WRITE_WORD( R_ECX, R_EAX );
1.275 @@ -894,7 +847,6 @@
1.276 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.277 load_reg( R_EAX, Rm );
1.278 load_reg( R_ECX, Rn );
1.279 - precheck();
1.280 check_walign32(R_ECX);
1.281 MEM_WRITE_LONG( R_ECX, R_EAX );
1.282 sh4_x86.tstate = TSTATE_NONE;
1.283 @@ -915,7 +867,6 @@
1.284 { /* MOV.W Rm, @-Rn */
1.285 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.286 load_reg( R_ECX, Rn );
1.287 - precheck();
1.288 check_walign16( R_ECX );
1.289 load_reg( R_EAX, Rm );
1.290 ADD_imm8s_r32( -2, R_ECX );
1.291 @@ -929,7 +880,6 @@
1.292 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.293 load_reg( R_EAX, Rm );
1.294 load_reg( R_ECX, Rn );
1.295 - precheck();
1.296 check_walign32( R_ECX );
1.297 ADD_imm8s_r32( -4, R_ECX );
1.298 store_reg( R_ECX, Rn );
1.299 @@ -1307,7 +1257,6 @@
1.300 { /* STS.L MACH, @-Rn */
1.301 uint32_t Rn = ((ir>>8)&0xF);
1.302 load_reg( R_ECX, Rn );
1.303 - precheck();
1.304 check_walign32( R_ECX );
1.305 ADD_imm8s_r32( -4, R_ECX );
1.306 store_reg( R_ECX, Rn );
1.307 @@ -1320,7 +1269,6 @@
1.308 { /* STS.L MACL, @-Rn */
1.309 uint32_t Rn = ((ir>>8)&0xF);
1.310 load_reg( R_ECX, Rn );
1.311 - precheck();
1.312 check_walign32( R_ECX );
1.313 ADD_imm8s_r32( -4, R_ECX );
1.314 store_reg( R_ECX, Rn );
1.315 @@ -1333,7 +1281,6 @@
1.316 { /* STS.L PR, @-Rn */
1.317 uint32_t Rn = ((ir>>8)&0xF);
1.318 load_reg( R_ECX, Rn );
1.319 - precheck();
1.320 check_walign32( R_ECX );
1.321 ADD_imm8s_r32( -4, R_ECX );
1.322 store_reg( R_ECX, Rn );
1.323 @@ -1345,8 +1292,7 @@
1.324 case 0x3:
1.325 { /* STC.L SGR, @-Rn */
1.326 uint32_t Rn = ((ir>>8)&0xF);
1.327 - precheck();
1.328 - check_priv_no_precheck();
1.329 + check_priv();
1.330 load_reg( R_ECX, Rn );
1.331 check_walign32( R_ECX );
1.332 ADD_imm8s_r32( -4, R_ECX );
1.333 @@ -1360,7 +1306,6 @@
1.334 { /* STS.L FPUL, @-Rn */
1.335 uint32_t Rn = ((ir>>8)&0xF);
1.336 load_reg( R_ECX, Rn );
1.337 - precheck();
1.338 check_walign32( R_ECX );
1.339 ADD_imm8s_r32( -4, R_ECX );
1.340 store_reg( R_ECX, Rn );
1.341 @@ -1373,7 +1318,6 @@
1.342 { /* STS.L FPSCR, @-Rn */
1.343 uint32_t Rn = ((ir>>8)&0xF);
1.344 load_reg( R_ECX, Rn );
1.345 - precheck();
1.346 check_walign32( R_ECX );
1.347 ADD_imm8s_r32( -4, R_ECX );
1.348 store_reg( R_ECX, Rn );
1.349 @@ -1385,8 +1329,7 @@
1.350 case 0xF:
1.351 { /* STC.L DBR, @-Rn */
1.352 uint32_t Rn = ((ir>>8)&0xF);
1.353 - precheck();
1.354 - check_priv_no_precheck();
1.355 + check_priv();
1.356 load_reg( R_ECX, Rn );
1.357 check_walign32( R_ECX );
1.358 ADD_imm8s_r32( -4, R_ECX );
1.359 @@ -1408,8 +1351,7 @@
1.360 case 0x0:
1.361 { /* STC.L SR, @-Rn */
1.362 uint32_t Rn = ((ir>>8)&0xF);
1.363 - precheck();
1.364 - check_priv_no_precheck();
1.365 + check_priv();
1.366 call_func0( sh4_read_sr );
1.367 load_reg( R_ECX, Rn );
1.368 check_walign32( R_ECX );
1.369 @@ -1423,7 +1365,6 @@
1.370 { /* STC.L GBR, @-Rn */
1.371 uint32_t Rn = ((ir>>8)&0xF);
1.372 load_reg( R_ECX, Rn );
1.373 - precheck();
1.374 check_walign32( R_ECX );
1.375 ADD_imm8s_r32( -4, R_ECX );
1.376 store_reg( R_ECX, Rn );
1.377 @@ -1435,8 +1376,7 @@
1.378 case 0x2:
1.379 { /* STC.L VBR, @-Rn */
1.380 uint32_t Rn = ((ir>>8)&0xF);
1.381 - precheck();
1.382 - check_priv_no_precheck();
1.383 + check_priv();
1.384 load_reg( R_ECX, Rn );
1.385 check_walign32( R_ECX );
1.386 ADD_imm8s_r32( -4, R_ECX );
1.387 @@ -1449,8 +1389,7 @@
1.388 case 0x3:
1.389 { /* STC.L SSR, @-Rn */
1.390 uint32_t Rn = ((ir>>8)&0xF);
1.391 - precheck();
1.392 - check_priv_no_precheck();
1.393 + check_priv();
1.394 load_reg( R_ECX, Rn );
1.395 check_walign32( R_ECX );
1.396 ADD_imm8s_r32( -4, R_ECX );
1.397 @@ -1463,8 +1402,7 @@
1.398 case 0x4:
1.399 { /* STC.L SPC, @-Rn */
1.400 uint32_t Rn = ((ir>>8)&0xF);
1.401 - precheck();
1.402 - check_priv_no_precheck();
1.403 + check_priv();
1.404 load_reg( R_ECX, Rn );
1.405 check_walign32( R_ECX );
1.406 ADD_imm8s_r32( -4, R_ECX );
1.407 @@ -1482,8 +1420,7 @@
1.408 case 0x1:
1.409 { /* STC.L Rm_BANK, @-Rn */
1.410 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm_BANK = ((ir>>4)&0x7);
1.411 - precheck();
1.412 - check_priv_no_precheck();
1.413 + check_priv();
1.414 load_reg( R_ECX, Rn );
1.415 check_walign32( R_ECX );
1.416 ADD_imm8s_r32( -4, R_ECX );
1.417 @@ -1570,7 +1507,6 @@
1.418 { /* LDS.L @Rm+, MACH */
1.419 uint32_t Rm = ((ir>>8)&0xF);
1.420 load_reg( R_EAX, Rm );
1.421 - precheck();
1.422 check_ralign32( R_EAX );
1.423 MOV_r32_r32( R_EAX, R_ECX );
1.424 ADD_imm8s_r32( 4, R_EAX );
1.425 @@ -1584,7 +1520,6 @@
1.426 { /* LDS.L @Rm+, MACL */
1.427 uint32_t Rm = ((ir>>8)&0xF);
1.428 load_reg( R_EAX, Rm );
1.429 - precheck();
1.430 check_ralign32( R_EAX );
1.431 MOV_r32_r32( R_EAX, R_ECX );
1.432 ADD_imm8s_r32( 4, R_EAX );
1.433 @@ -1598,7 +1533,6 @@
1.434 { /* LDS.L @Rm+, PR */
1.435 uint32_t Rm = ((ir>>8)&0xF);
1.436 load_reg( R_EAX, Rm );
1.437 - precheck();
1.438 check_ralign32( R_EAX );
1.439 MOV_r32_r32( R_EAX, R_ECX );
1.440 ADD_imm8s_r32( 4, R_EAX );
1.441 @@ -1611,8 +1545,7 @@
1.442 case 0x3:
1.443 { /* LDC.L @Rm+, SGR */
1.444 uint32_t Rm = ((ir>>8)&0xF);
1.445 - precheck();
1.446 - check_priv_no_precheck();
1.447 + check_priv();
1.448 load_reg( R_EAX, Rm );
1.449 check_ralign32( R_EAX );
1.450 MOV_r32_r32( R_EAX, R_ECX );
1.451 @@ -1627,7 +1560,6 @@
1.452 { /* LDS.L @Rm+, FPUL */
1.453 uint32_t Rm = ((ir>>8)&0xF);
1.454 load_reg( R_EAX, Rm );
1.455 - precheck();
1.456 check_ralign32( R_EAX );
1.457 MOV_r32_r32( R_EAX, R_ECX );
1.458 ADD_imm8s_r32( 4, R_EAX );
1.459 @@ -1641,7 +1573,6 @@
1.460 { /* LDS.L @Rm+, FPSCR */
1.461 uint32_t Rm = ((ir>>8)&0xF);
1.462 load_reg( R_EAX, Rm );
1.463 - precheck();
1.464 check_ralign32( R_EAX );
1.465 MOV_r32_r32( R_EAX, R_ECX );
1.466 ADD_imm8s_r32( 4, R_EAX );
1.467 @@ -1655,8 +1586,7 @@
1.468 case 0xF:
1.469 { /* LDC.L @Rm+, DBR */
1.470 uint32_t Rm = ((ir>>8)&0xF);
1.471 - precheck();
1.472 - check_priv_no_precheck();
1.473 + check_priv();
1.474 load_reg( R_EAX, Rm );
1.475 check_ralign32( R_EAX );
1.476 MOV_r32_r32( R_EAX, R_ECX );
1.477 @@ -1682,8 +1612,7 @@
1.478 if( sh4_x86.in_delay_slot ) {
1.479 SLOTILLEGAL();
1.480 } else {
1.481 - precheck();
1.482 - check_priv_no_precheck();
1.483 + check_priv();
1.484 load_reg( R_EAX, Rm );
1.485 check_ralign32( R_EAX );
1.486 MOV_r32_r32( R_EAX, R_ECX );
1.487 @@ -1701,7 +1630,6 @@
1.488 { /* LDC.L @Rm+, GBR */
1.489 uint32_t Rm = ((ir>>8)&0xF);
1.490 load_reg( R_EAX, Rm );
1.491 - precheck();
1.492 check_ralign32( R_EAX );
1.493 MOV_r32_r32( R_EAX, R_ECX );
1.494 ADD_imm8s_r32( 4, R_EAX );
1.495 @@ -1714,8 +1642,7 @@
1.496 case 0x2:
1.497 { /* LDC.L @Rm+, VBR */
1.498 uint32_t Rm = ((ir>>8)&0xF);
1.499 - precheck();
1.500 - check_priv_no_precheck();
1.501 + check_priv();
1.502 load_reg( R_EAX, Rm );
1.503 check_ralign32( R_EAX );
1.504 MOV_r32_r32( R_EAX, R_ECX );
1.505 @@ -1729,8 +1656,7 @@
1.506 case 0x3:
1.507 { /* LDC.L @Rm+, SSR */
1.508 uint32_t Rm = ((ir>>8)&0xF);
1.509 - precheck();
1.510 - check_priv_no_precheck();
1.511 + check_priv();
1.512 load_reg( R_EAX, Rm );
1.513 check_ralign32( R_EAX );
1.514 MOV_r32_r32( R_EAX, R_ECX );
1.515 @@ -1744,8 +1670,7 @@
1.516 case 0x4:
1.517 { /* LDC.L @Rm+, SPC */
1.518 uint32_t Rm = ((ir>>8)&0xF);
1.519 - precheck();
1.520 - check_priv_no_precheck();
1.521 + check_priv();
1.522 load_reg( R_EAX, Rm );
1.523 check_ralign32( R_EAX );
1.524 MOV_r32_r32( R_EAX, R_ECX );
1.525 @@ -1764,8 +1689,7 @@
1.526 case 0x1:
1.527 { /* LDC.L @Rm+, Rn_BANK */
1.528 uint32_t Rm = ((ir>>8)&0xF); uint32_t Rn_BANK = ((ir>>4)&0x7);
1.529 - precheck();
1.530 - check_priv_no_precheck();
1.531 + check_priv();
1.532 load_reg( R_EAX, Rm );
1.533 check_ralign32( R_EAX );
1.534 MOV_r32_r32( R_EAX, R_ECX );
1.535 @@ -2090,7 +2014,6 @@
1.536 { /* MAC.W @Rm+, @Rn+ */
1.537 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.538 load_reg( R_ECX, Rm );
1.539 - precheck();
1.540 check_ralign16( R_ECX );
1.541 load_reg( R_ECX, Rn );
1.542 check_ralign16( R_ECX );
1.543 @@ -2137,7 +2060,6 @@
1.544 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<2;
1.545 load_reg( R_ECX, Rm );
1.546 ADD_imm8s_r32( disp, R_ECX );
1.547 - precheck();
1.548 check_ralign32( R_ECX );
1.549 MEM_READ_LONG( R_ECX, R_EAX );
1.550 store_reg( R_EAX, Rn );
1.551 @@ -2159,7 +2081,6 @@
1.552 { /* MOV.W @Rm, Rn */
1.553 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.554 load_reg( R_ECX, Rm );
1.555 - precheck();
1.556 check_ralign16( R_ECX );
1.557 MEM_READ_WORD( R_ECX, R_EAX );
1.558 store_reg( R_EAX, Rn );
1.559 @@ -2170,7 +2091,6 @@
1.560 { /* MOV.L @Rm, Rn */
1.561 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.562 load_reg( R_ECX, Rm );
1.563 - precheck();
1.564 check_ralign32( R_ECX );
1.565 MEM_READ_LONG( R_ECX, R_EAX );
1.566 store_reg( R_EAX, Rn );
1.567 @@ -2200,7 +2120,6 @@
1.568 { /* MOV.W @Rm+, Rn */
1.569 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.570 load_reg( R_EAX, Rm );
1.571 - precheck();
1.572 check_ralign16( R_EAX );
1.573 MOV_r32_r32( R_EAX, R_ECX );
1.574 ADD_imm8s_r32( 2, R_EAX );
1.575 @@ -2214,7 +2133,6 @@
1.576 { /* MOV.L @Rm+, Rn */
1.577 uint32_t Rn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.578 load_reg( R_EAX, Rm );
1.579 - precheck();
1.580 check_ralign32( R_EAX );
1.581 MOV_r32_r32( R_EAX, R_ECX );
1.582 ADD_imm8s_r32( 4, R_EAX );
1.583 @@ -2335,7 +2253,6 @@
1.584 load_reg( R_ECX, Rn );
1.585 load_reg( R_EAX, 0 );
1.586 ADD_imm32_r32( disp, R_ECX );
1.587 - precheck();
1.588 check_walign16( R_ECX );
1.589 MEM_WRITE_WORD( R_ECX, R_EAX );
1.590 sh4_x86.tstate = TSTATE_NONE;
1.591 @@ -2356,7 +2273,6 @@
1.592 uint32_t Rm = ((ir>>4)&0xF); uint32_t disp = (ir&0xF)<<1;
1.593 load_reg( R_ECX, Rm );
1.594 ADD_imm32_r32( disp, R_ECX );
1.595 - precheck();
1.596 check_ralign16( R_ECX );
1.597 MEM_READ_WORD( R_ECX, R_EAX );
1.598 store_reg( R_EAX, 0 );
1.599 @@ -2506,7 +2422,6 @@
1.600 load_spreg( R_ECX, R_GBR );
1.601 load_reg( R_EAX, 0 );
1.602 ADD_imm32_r32( disp, R_ECX );
1.603 - precheck();
1.604 check_walign16( R_ECX );
1.605 MEM_WRITE_WORD( R_ECX, R_EAX );
1.606 sh4_x86.tstate = TSTATE_NONE;
1.607 @@ -2518,7 +2433,6 @@
1.608 load_spreg( R_ECX, R_GBR );
1.609 load_reg( R_EAX, 0 );
1.610 ADD_imm32_r32( disp, R_ECX );
1.611 - precheck();
1.612 check_walign32( R_ECX );
1.613 MEM_WRITE_LONG( R_ECX, R_EAX );
1.614 sh4_x86.tstate = TSTATE_NONE;
1.615 @@ -2556,7 +2470,6 @@
1.616 uint32_t disp = (ir&0xFF)<<1;
1.617 load_spreg( R_ECX, R_GBR );
1.618 ADD_imm32_r32( disp, R_ECX );
1.619 - precheck();
1.620 check_ralign16( R_ECX );
1.621 MEM_READ_WORD( R_ECX, R_EAX );
1.622 store_reg( R_EAX, 0 );
1.623 @@ -2568,7 +2481,6 @@
1.624 uint32_t disp = (ir&0xFF)<<2;
1.625 load_spreg( R_ECX, R_GBR );
1.626 ADD_imm32_r32( disp, R_ECX );
1.627 - precheck();
1.628 check_ralign32( R_ECX );
1.629 MEM_READ_LONG( R_ECX, R_EAX );
1.630 store_reg( R_EAX, 0 );
1.631 @@ -2685,7 +2597,7 @@
1.632 SLOTILLEGAL();
1.633 } else {
1.634 uint32_t target = (pc & 0xFFFFFFFC) + disp + 4;
1.635 - sh4ptr_t ptr = mem_get_region(target);
1.636 + sh4ptr_t ptr = sh4_get_region_by_vma(target);
1.637 if( ptr != NULL ) {
1.638 MOV_moff32_EAX( ptr );
1.639 } else {
1.640 @@ -2839,14 +2751,13 @@
1.641 case 0x6:
1.642 { /* FMOV @(R0, Rm), FRn */
1.643 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.644 - precheck();
1.645 - check_fpuen_no_precheck();
1.646 + check_fpuen();
1.647 load_reg( R_ECX, Rm );
1.648 ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
1.649 check_ralign32( R_ECX );
1.650 load_spreg( R_EDX, R_FPSCR );
1.651 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.652 - JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
1.653 + JNE_rel8(8 + MEM_READ_SIZE, doublesize);
1.654 MEM_READ_LONG( R_ECX, R_EAX );
1.655 load_fr_bank( R_EDX );
1.656 store_fr( R_EDX, R_EAX, FRn );
1.657 @@ -2874,14 +2785,13 @@
1.658 case 0x7:
1.659 { /* FMOV FRm, @(R0, Rn) */
1.660 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
1.661 - precheck();
1.662 - check_fpuen_no_precheck();
1.663 + check_fpuen();
1.664 load_reg( R_ECX, Rn );
1.665 ADD_sh4r_r32( REG_OFFSET(r[0]), R_ECX );
1.666 check_walign32( R_ECX );
1.667 load_spreg( R_EDX, R_FPSCR );
1.668 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.669 - JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
1.670 + JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
1.671 load_fr_bank( R_EDX );
1.672 load_fr( R_EDX, R_EAX, FRm );
1.673 MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
1.674 @@ -2908,13 +2818,12 @@
1.675 case 0x8:
1.676 { /* FMOV @Rm, FRn */
1.677 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.678 - precheck();
1.679 - check_fpuen_no_precheck();
1.680 + check_fpuen();
1.681 load_reg( R_ECX, Rm );
1.682 check_ralign32( R_ECX );
1.683 load_spreg( R_EDX, R_FPSCR );
1.684 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.685 - JNE_rel8(8 + CALL_FUNC1_SIZE, doublesize);
1.686 + JNE_rel8(8 + MEM_READ_SIZE, doublesize);
1.687 MEM_READ_LONG( R_ECX, R_EAX );
1.688 load_fr_bank( R_EDX );
1.689 store_fr( R_EDX, R_EAX, FRn );
1.690 @@ -2942,14 +2851,13 @@
1.691 case 0x9:
1.692 { /* FMOV @Rm+, FRn */
1.693 uint32_t FRn = ((ir>>8)&0xF); uint32_t Rm = ((ir>>4)&0xF);
1.694 - precheck();
1.695 - check_fpuen_no_precheck();
1.696 + check_fpuen();
1.697 load_reg( R_ECX, Rm );
1.698 check_ralign32( R_ECX );
1.699 MOV_r32_r32( R_ECX, R_EAX );
1.700 load_spreg( R_EDX, R_FPSCR );
1.701 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.702 - JNE_rel8(14 + CALL_FUNC1_SIZE, doublesize);
1.703 + JNE_rel8(14 + MEM_READ_SIZE, doublesize);
1.704 ADD_imm8s_r32( 4, R_EAX );
1.705 store_reg( R_EAX, Rm );
1.706 MEM_READ_LONG( R_ECX, R_EAX );
1.707 @@ -2982,13 +2890,12 @@
1.708 case 0xA:
1.709 { /* FMOV FRm, @Rn */
1.710 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
1.711 - precheck();
1.712 - check_fpuen_no_precheck();
1.713 + check_fpuen();
1.714 load_reg( R_ECX, Rn );
1.715 check_walign32( R_ECX );
1.716 load_spreg( R_EDX, R_FPSCR );
1.717 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.718 - JNE_rel8(8 + CALL_FUNC2_SIZE, doublesize);
1.719 + JNE_rel8(8 + MEM_WRITE_SIZE, doublesize);
1.720 load_fr_bank( R_EDX );
1.721 load_fr( R_EDX, R_EAX, FRm );
1.722 MEM_WRITE_LONG( R_ECX, R_EAX ); // 12
1.723 @@ -3015,13 +2922,12 @@
1.724 case 0xB:
1.725 { /* FMOV FRm, @-Rn */
1.726 uint32_t Rn = ((ir>>8)&0xF); uint32_t FRm = ((ir>>4)&0xF);
1.727 - precheck();
1.728 - check_fpuen_no_precheck();
1.729 + check_fpuen();
1.730 load_reg( R_ECX, Rn );
1.731 check_walign32( R_ECX );
1.732 load_spreg( R_EDX, R_FPSCR );
1.733 TEST_imm32_r32( FPSCR_SZ, R_EDX );
1.734 - JNE_rel8(14 + CALL_FUNC2_SIZE, doublesize);
1.735 + JNE_rel8(14 + MEM_WRITE_SIZE, doublesize);
1.736 load_fr_bank( R_EDX );
1.737 load_fr( R_EDX, R_EAX, FRm );
1.738 ADD_imm8s_r32(-4,R_ECX);
1.739 @@ -3405,8 +3311,7 @@
1.740 if( sh4_x86.in_delay_slot ) {
1.741 SLOTILLEGAL();
1.742 } else {
1.743 - precheck();
1.744 - JMP_exit(EXIT_ILLEGAL);
1.745 + JMP_exc(EXC_ILLEGAL);
1.746 return 2;
1.747 }
1.748 }
.