Search
lxdream.org :: lxdream :: r930:07e5b11419db
lxdream 0.9.1
released Jun 29
Download Now
changeset930:07e5b11419db lxdream-mem
parent929:fd8cb0c82f5f
child931:430048ea8b71
authornkeynes
dateMon Dec 22 09:51:11 2008 +0000 (15 years ago)
branchlxdream-mem
Remove pointer cache and add full address-space map. Much better
src/dreamcast.c
src/mem.c
src/sh4/ia32abi.h
src/sh4/ia64abi.h
src/sh4/sh4.c
src/sh4/sh4.h
src/sh4/sh4core.h
src/sh4/sh4mem.c
src/sh4/sh4x86.in
src/sh4/x86op.h
src/test/testsh4x86.c
1.1 --- a/src/dreamcast.c Sat Dec 20 03:01:40 2008 +0000
1.2 +++ b/src/dreamcast.c Mon Dec 22 09:51:11 2008 +0000
1.3 @@ -100,6 +100,7 @@
1.4 dreamcast_register_module( &aica_module );
1.5 dreamcast_register_module( &maple_module );
1.6 dreamcast_register_module( &ide_module );
1.7 + sh4_mem_init();
1.8 }
1.9
1.10 void dreamcast_config_changed(void)
2.1 --- a/src/mem.c Sat Dec 20 03:01:40 2008 +0000
2.2 +++ b/src/mem.c Mon Dec 22 09:51:11 2008 +0000
2.3 @@ -36,6 +36,9 @@
2.4 #include "dreamcast.h"
2.5
2.6 sh4ptr_t *page_map = NULL;
2.7 +mem_region_fn_t *ext_address_space = NULL;
2.8 +
2.9 +extern struct mem_region_fn mem_region_unmapped;
2.10
2.11 int mem_load(FILE *f);
2.12 void mem_save(FILE *f);
2.13 @@ -62,15 +65,24 @@
2.14
2.15 void mem_init( void )
2.16 {
2.17 + int i;
2.18 + mem_region_fn_t *ptr;
2.19 page_map = mmap( NULL, sizeof(sh4ptr_t) * LXDREAM_PAGE_TABLE_ENTRIES,
2.20 PROT_READ|PROT_WRITE, MAP_ANON|MAP_PRIVATE, -1, 0 );
2.21 if( page_map == MAP_FAILED ) {
2.22 - ERROR( "Unable to allocate page map! (%s)", strerror(errno) );
2.23 - page_map = NULL;
2.24 - return;
2.25 + FATAL( "Unable to allocate page map! (%s)", strerror(errno) );
2.26 + }
2.27 + memset( page_map, 0, sizeof(sh4ptr_t) * LXDREAM_PAGE_TABLE_ENTRIES );
2.28 +
2.29 + ext_address_space = mmap( NULL, sizeof(mem_region_fn_t) * LXDREAM_PAGE_TABLE_ENTRIES,
2.30 + PROT_READ|PROT_WRITE, MAP_ANON|MAP_PRIVATE, -1, 0 );
2.31 + if( ext_address_space == MAP_FAILED ) {
2.32 + FATAL( "Unable to allocate external memory map (%s)", strerror(errno) );
2.33 }
2.34
2.35 - memset( page_map, 0, sizeof(sh4ptr_t) * LXDREAM_PAGE_TABLE_ENTRIES );
2.36 + for( ptr = ext_address_space, i = LXDREAM_PAGE_TABLE_ENTRIES; i > 0; ptr++, i-- ) {
2.37 + *ptr = &mem_region_unmapped;
2.38 + }
2.39 }
2.40
2.41 void mem_reset( void )
2.42 @@ -238,8 +250,10 @@
2.43 num_mem_rgns++;
2.44
2.45 do {
2.46 - for( i=0; i<size>>LXDREAM_PAGE_BITS; i++ )
2.47 + for( i=0; i<size>>LXDREAM_PAGE_BITS; i++ ) {
2.48 page_map[(base>>LXDREAM_PAGE_BITS)+i] = mem + (i<<LXDREAM_PAGE_BITS);
2.49 + ext_address_space[(base>>LXDREAM_PAGE_BITS)+i] = fn;
2.50 + }
2.51 base += repeat_offset;
2.52 } while( base <= repeat_until );
2.53
2.54 @@ -336,6 +350,7 @@
2.55 P4_io[(io->base&0x1FFFFFFF)>>19] = io;
2.56 } else {
2.57 page_map[io->base>>12] = (sh4ptr_t)(uintptr_t)num_io_rgns;
2.58 + ext_address_space[io->base>>12] = &io->fn;
2.59 }
2.60 io_rgn[num_io_rgns] = io;
2.61 num_io_rgns++;
3.1 --- a/src/sh4/ia32abi.h Sat Dec 20 03:01:40 2008 +0000
3.2 +++ b/src/sh4/ia32abi.h Mon Dec 22 09:51:11 2008 +0000
3.3 @@ -24,6 +24,13 @@
3.4
3.5 #define load_ptr( reg, ptr ) load_imm32( reg, (uint32_t)ptr );
3.6
3.7 +static inline decode_address( int addr_reg )
3.8 +{
3.9 + MOV_r32_r32( addr_reg, R_ECX );
3.10 + SHR_imm8_r32( 12, R_ECX );
3.11 + MOV_r32disp32x4_r32( R_ECX, (uintptr_t)sh4_address_space, R_ECX );
3.12 +}
3.13 +
3.14 /**
3.15 * Note: clobbers EAX to make the indirect call - this isn't usually
3.16 * a problem since the callee will usually clobber it anyway.
3.17 @@ -50,12 +57,12 @@
3.18 CALL_r32(addr_reg);
3.19 }
3.20
3.21 -static inline void call_func1_r32ind( int preg, uint32_t disp32, int arg1 )
3.22 +static inline void call_func1_r32disp8( int preg, uint32_t disp8, int arg1 )
3.23 {
3.24 if( arg1 != R_EAX ) {
3.25 MOV_r32_r32( arg1, R_EAX );
3.26 }
3.27 - CALL_r32ind(preg, disp32);
3.28 + CALL_r32disp8(preg, disp8);
3.29 }
3.30
3.31 static inline void call_func2( void *ptr, int arg1, int arg2 )
3.32 @@ -80,7 +87,7 @@
3.33 CALL_r32(addr_reg);
3.34 }
3.35
3.36 -static inline void call_func2_r32ind( int preg, uint32_t disp32, int arg1, int arg2 )
3.37 +static inline void call_func2_r32disp8( int preg, uint32_t disp8, int arg1, int arg2 )
3.38 {
3.39 if( arg2 != R_EDX ) {
3.40 MOV_r32_r32( arg2, R_EDX );
3.41 @@ -88,7 +95,7 @@
3.42 if( arg1 != R_EAX ) {
3.43 MOV_r32_r32( arg1, R_EAX );
3.44 }
3.45 - CALL_r32ind(preg, disp32);
3.46 + CALL_r32disp8(preg, disp8);
3.47 }
3.48
3.49
3.50 @@ -122,11 +129,11 @@
3.51 {
3.52 MOV_r32_esp8(addr, 0);
3.53 MOV_r32_esp8(arg2b, 4);
3.54 - call_func2(sh4_write_long, addr, arg2a);
3.55 + MEM_WRITE_LONG(addr, arg2a);
3.56 MOV_esp8_r32(0, R_EAX);
3.57 MOV_esp8_r32(4, R_EDX);
3.58 ADD_imm8s_r32(4, R_EAX);
3.59 - call_func0(sh4_write_long);
3.60 + MEM_WRITE_LONG(R_EAX, R_EDX);
3.61 }
3.62
3.63 /**
3.64 @@ -136,14 +143,11 @@
3.65 static inline void MEM_READ_DOUBLE( int addr, int arg2a, int arg2b )
3.66 {
3.67 MOV_r32_esp8(addr, 0);
3.68 - call_func1(sh4_read_long, addr);
3.69 + MEM_READ_LONG(addr, R_EAX);
3.70 MOV_r32_esp8(R_EAX, 4);
3.71 MOV_esp8_r32(0, R_EAX);
3.72 ADD_imm8s_r32(4, R_EAX);
3.73 - call_func0(sh4_read_long);
3.74 - if( arg2b != R_EAX ) {
3.75 - MOV_r32_r32(R_EAX, arg2b);
3.76 - }
3.77 + MEM_READ_LONG(R_EAX, arg2b );
3.78 MOV_esp8_r32(4, arg2a);
3.79 }
3.80 #else
3.81 @@ -214,14 +218,12 @@
3.82 {
3.83 PUSH_r32(R_EBP);
3.84 load_ptr( R_EBP, ((uint8_t *)&sh4r) + 128 );
3.85 - PUSH_r32(R_EBX);
3.86 - SUB_imm8s_r32( 4, R_ESP );
3.87 + SUB_imm8s_r32( 8, R_ESP );
3.88 }
3.89
3.90 static inline void exit_block( )
3.91 {
3.92 - ADD_imm8s_r32( 4, R_ESP );
3.93 - POP_r32(R_EBX);
3.94 + ADD_imm8s_r32( 8, R_ESP );
3.95 POP_r32(R_EBP);
3.96 RET();
3.97 }
4.1 --- a/src/sh4/ia64abi.h Sat Dec 20 03:01:40 2008 +0000
4.2 +++ b/src/sh4/ia64abi.h Mon Dec 22 09:51:11 2008 +0000
4.3 @@ -24,6 +24,14 @@
4.4
4.5 #define load_ptr( reg, ptr ) load_imm64( reg, (uint64_t)ptr );
4.6
4.7 +static inline decode_address( int addr_reg )
4.8 +{
4.9 + MOV_r32_r32( addr_reg, R_ECX );
4.10 + SHR_imm8_r32( 12, R_ECX );
4.11 + load_ptr( R_EDI, sh4_address_space );
4.12 + REXW(); OP(0x8B); OP(0x0C); OP(0xCF); // mov.q [%rdi + %rcx*8], %rcx
4.13 +}
4.14 +
4.15 /**
4.16 * Note: clobbers EAX to make the indirect call - this isn't usually
4.17 * a problem since the callee will usually clobber it anyway.
4.18 @@ -50,6 +58,12 @@
4.19 call_func0(ptr);
4.20 }
4.21
4.22 +static inline void call_func1_r32disp8( int preg, uint32_t disp8, int arg1 )
4.23 +{
4.24 + REXW(); MOV_r32_r32(arg1, R_EDI);
4.25 + CALL_r32disp8(preg, disp8);
4.26 +}
4.27 +
4.28 #define CALL_FUNC2_SIZE 16
4.29 static inline void call_func2( void *ptr, int arg1, int arg2 )
4.30 {
4.31 @@ -58,6 +72,14 @@
4.32 call_func0(ptr);
4.33 }
4.34
4.35 +static inline void call_func2_r32disp8( int preg, uint32_t disp8, int arg1, int arg2 )
4.36 +{
4.37 + REXW(); MOV_r32_r32(arg1, R_EDI);
4.38 + REXW(); MOV_r32_r32(arg2, R_ESI);
4.39 + CALL_r32disp8(preg, disp8);
4.40 +}
4.41 +
4.42 +
4.43 #define MEM_WRITE_DOUBLE_SIZE 35
4.44 /**
4.45 * Write a double (64-bit) value into memory, with the first word in arg2a, and
5.1 --- a/src/sh4/sh4.c Sat Dec 20 03:01:40 2008 +0000
5.2 +++ b/src/sh4/sh4.c Mon Dec 22 09:51:11 2008 +0000
5.3 @@ -41,7 +41,6 @@
5.4 void sh4_stop( void );
5.5 void sh4_save_state( FILE *f );
5.6 int sh4_load_state( FILE *f );
5.7 -static void sh4_reset_pointer_cache();
5.8
5.9 uint32_t sh4_run_slice( uint32_t );
5.10 uint32_t sh4_xlat_run_slice( uint32_t );
5.11 @@ -109,7 +108,6 @@
5.12
5.13 /* zero everything out, for the sake of having a consistent state. */
5.14 memset( &sh4r, 0, sizeof(sh4r) );
5.15 - sh4_reset_pointer_cache();
5.16
5.17 /* Resume running if we were halted */
5.18 sh4r.sh4_state = SH4_STATE_RUNNING;
5.19 @@ -243,8 +241,7 @@
5.20 sh4r.in_delay_slot = FALSE;
5.21 }
5.22
5.23 - int len = ((char *)&sh4r.pointer_cache) - ((char *)&sh4r);
5.24 - fwrite( &sh4r, len, 1, f );
5.25 + fwrite( &sh4r, sizeof(sh4r), 1, f );
5.26 MMU_save_state( f );
5.27 PMM_save_state( f );
5.28 INTC_save_state( f );
5.29 @@ -257,9 +254,7 @@
5.30 if( sh4_use_translator ) {
5.31 xlat_flush_cache();
5.32 }
5.33 - int len = ((char *)&sh4r.pointer_cache) - ((char *)&sh4r);
5.34 - fread( &sh4r, len, 1, f );
5.35 - sh4_reset_pointer_cache();
5.36 + fread( &sh4r, sizeof(sh4r), 1, f );
5.37 MMU_load_state( f );
5.38 PMM_load_state( f );
5.39 INTC_load_state( f );
5.40 @@ -267,17 +262,6 @@
5.41 return SCIF_load_state( f );
5.42 }
5.43
5.44 -static void sh4_reset_pointer_cache()
5.45 -{
5.46 - int i;
5.47 - for( i=0; i<16; i++ ) {
5.48 - sh4r.pointer_cache[i].page_vma = -1;
5.49 - sh4r.pointer_cache[i].page_mask = 0xFFFFF000;
5.50 - }
5.51 - sh4r.pointer_cache[16].page_vma = -1;
5.52 - sh4r.pointer_cache[16].page_mask = 0xFFFFF000;
5.53 -}
5.54 -
5.55 void sh4_set_breakpoint( uint32_t pc, breakpoint_type_t type )
5.56 {
5.57 sh4_breakpoints[sh4_breakpoint_count].address = pc;
6.1 --- a/src/sh4/sh4.h Sat Dec 20 03:01:40 2008 +0000
6.2 +++ b/src/sh4/sh4.h Mon Dec 22 09:51:11 2008 +0000
6.3 @@ -87,13 +87,6 @@
6.4 * a delay slot (certain rules apply) */
6.5 uint32_t slice_cycle; /* Current nanosecond within the timeslice */
6.6 int sh4_state; /* Current power-on state (one of the SH4_STATE_* values ) */
6.7 -
6.8 - /* lxdream cache structures below this point */
6.9 - struct {
6.10 - uint32_t page_vma;
6.11 - uint32_t page_mask;
6.12 - struct mem_region_fn *page_fn;
6.13 - } pointer_cache[17];
6.14 };
6.15
6.16 extern struct sh4_registers sh4r;
7.1 --- a/src/sh4/sh4core.h Sat Dec 20 03:01:40 2008 +0000
7.2 +++ b/src/sh4/sh4core.h Mon Dec 22 09:51:11 2008 +0000
7.3 @@ -51,6 +51,8 @@
7.4 };
7.5 extern struct sh4_icache_struct sh4_icache;
7.6
7.7 +extern struct mem_region_fn **sh4_address_space;
7.8 +
7.9 /**
7.10 * Test if a given address is contained in the current icache entry
7.11 */
8.1 --- a/src/sh4/sh4mem.c Sat Dec 20 03:01:40 2008 +0000
8.2 +++ b/src/sh4/sh4mem.c Mon Dec 22 09:51:11 2008 +0000
8.3 @@ -50,6 +50,8 @@
8.4
8.5 extern struct mmio_region *P4_io[];
8.6
8.7 +mem_region_fn_t *sh4_address_space;
8.8 +
8.9 /********************* The "unmapped" address space ********************/
8.10 /* Always reads as 0, writes have no effect */
8.11 static int32_t FASTCALL unmapped_read_long( sh4addr_t addr )
8.12 @@ -422,93 +424,89 @@
8.13 int32_t FASTCALL sh4_read_long( sh4addr_t addr )
8.14 {
8.15 rl_count++;
8.16 - uint32_t page = (addr & 0xFFFFF000);
8.17 - if( page == last_page ) {
8.18 - hit_count++;
8.19 - return last_region->read_long(addr);
8.20 - } else {
8.21 - miss_count++;
8.22 - last_page = page;
8.23 - last_region = sh7750_decode_address(addr);
8.24 - return last_region->read_long(addr);
8.25 - }
8.26 + return sh4_address_space[addr>>12]->read_long(addr);
8.27 }
8.28
8.29 int32_t FASTCALL sh4_read_word( sh4addr_t addr )
8.30 {
8.31 rw_count++;
8.32 - uint32_t page = (addr & 0xFFFFF000);
8.33 - if( page == last_page ) {
8.34 - hit_count++;
8.35 - return last_region->read_word(addr);
8.36 - } else {
8.37 - miss_count++;
8.38 - last_page = page;
8.39 - last_region = sh7750_decode_address(addr);
8.40 - return last_region->read_word(addr);
8.41 - }
8.42 + return sh4_address_space[addr>>12]->read_word(addr);
8.43 }
8.44
8.45 int32_t FASTCALL sh4_read_byte( sh4addr_t addr )
8.46 {
8.47 rb_count++;
8.48 - uint32_t page = (addr & 0xFFFFF000);
8.49 - if( page == last_page ) {
8.50 - hit_count++;
8.51 - return last_region->read_byte(addr);
8.52 - } else {
8.53 - miss_count++;
8.54 - last_page = page;
8.55 - last_region = sh7750_decode_address(addr);
8.56 - return last_region->read_byte(addr);
8.57 - }
8.58 + return sh4_address_space[addr>>12]->read_byte(addr);
8.59 }
8.60
8.61 void FASTCALL sh4_write_long( sh4addr_t addr, uint32_t val )
8.62 {
8.63 wl_count++;
8.64 - uint32_t page = (addr & 0xFFFFF000);
8.65 - if( page == last_page ) {
8.66 - hit_count++;
8.67 - last_region->write_long(addr, val);
8.68 - } else {
8.69 - miss_count++;
8.70 - last_page = page;
8.71 - last_region = sh7750_decode_address(addr);
8.72 - last_region->write_long(addr,val);
8.73 - }
8.74 + sh4_address_space[addr>>12]->write_long(addr, val);
8.75 }
8.76
8.77 void FASTCALL sh4_write_word( sh4addr_t addr, uint32_t val )
8.78 {
8.79 ww_count++;
8.80 - uint32_t page = (addr & 0xFFFFF000);
8.81 - if( page == last_page ) {
8.82 - hit_count++;
8.83 - last_region->write_word(addr, val);
8.84 - } else {
8.85 - miss_count++;
8.86 - last_page = page;
8.87 - last_region = sh7750_decode_address(addr);
8.88 - last_region->write_word(addr,val);
8.89 - }
8.90 + sh4_address_space[addr>>12]->write_word(addr,val);
8.91 }
8.92
8.93 void FASTCALL sh4_write_byte( sh4addr_t addr, uint32_t val )
8.94 {
8.95 wb_count++;
8.96 - uint32_t page = (addr & 0xFFFFF000);
8.97 - if( page == last_page ) {
8.98 - hit_count++;
8.99 - last_region->write_byte(addr, val);
8.100 - } else {
8.101 - miss_count++;
8.102 - last_page = page;
8.103 - last_region = sh7750_decode_address(addr);
8.104 - last_region->write_byte(addr,val);
8.105 + sh4_address_space[addr>>12]->write_byte(addr, val);
8.106 +}
8.107 +
8.108 +extern mem_region_fn_t *ext_address_space;
8.109 +
8.110 +static void sh4_register_mem_region( uint32_t start, uint32_t end, mem_region_fn_t fn )
8.111 +{
8.112 + int count = (end - start) >> 12;
8.113 + mem_region_fn_t *ptr = &sh4_address_space[start>>12];
8.114 + while( count-- > 0 ) {
8.115 + *ptr++ = fn;
8.116 }
8.117 }
8.118 +
8.119
8.120 +void sh4_mem_init()
8.121 +{
8.122 + int i;
8.123 + mem_region_fn_t *ptr;
8.124 + sh4_address_space = mem_alloc_pages( sizeof(mem_region_fn_t) * 256 );
8.125 + for( i=0, ptr = sh4_address_space; i<7; i++, ptr += LXDREAM_PAGE_TABLE_ENTRIES ) {
8.126 + memcpy( ptr, ext_address_space, sizeof(mem_region_fn_t) * LXDREAM_PAGE_TABLE_ENTRIES );
8.127 + }
8.128 +
8.129 + /* Setup main P4 regions */
8.130 + sh4_register_mem_region( 0xE0000000, 0xE4000000, &p4_region_storequeue );
8.131 + sh4_register_mem_region( 0xE4000000, 0xF0000000, &mem_region_unmapped );
8.132 + sh4_register_mem_region( 0xF0000000, 0xF1000000, &p4_region_icache_addr );
8.133 + sh4_register_mem_region( 0xF1000000, 0xF2000000, &p4_region_icache_data );
8.134 + sh4_register_mem_region( 0xF2000000, 0xF3000000, &p4_region_itlb_addr );
8.135 + sh4_register_mem_region( 0xF3000000, 0xF4000000, &p4_region_itlb_data );
8.136 + sh4_register_mem_region( 0xF4000000, 0xF5000000, &p4_region_ocache_addr );
8.137 + sh4_register_mem_region( 0xF5000000, 0xF6000000, &p4_region_ocache_data );
8.138 + sh4_register_mem_region( 0xF6000000, 0xF7000000, &p4_region_utlb_addr );
8.139 + sh4_register_mem_region( 0xF7000000, 0xF8000000, &p4_region_utlb_data );
8.140 + sh4_register_mem_region( 0xF8000000, 0x00000000, &mem_region_unmapped );
8.141 +
8.142 + /* Setup P4 control region */
8.143 + sh4_register_mem_region( 0xFF000000, 0xFF001000, &mmio_region_MMU.fn );
8.144 + sh4_register_mem_region( 0xFF100000, 0xFF101000, &mmio_region_PMM.fn );
8.145 + sh4_register_mem_region( 0xFF200000, 0xFF201000, &mmio_region_UBC.fn );
8.146 + sh4_register_mem_region( 0xFF800000, 0xFF801000, &mmio_region_BSC.fn );
8.147 + sh4_register_mem_region( 0xFF900000, 0xFFA00000, &mem_region_unmapped ); // SDMR2 + SDMR3
8.148 + sh4_register_mem_region( 0xFFA00000, 0xFFA01000, &mmio_region_DMAC.fn );
8.149 + sh4_register_mem_region( 0xFFC00000, 0xFFC01000, &mmio_region_CPG.fn );
8.150 + sh4_register_mem_region( 0xFFC80000, 0xFFC81000, &mmio_region_RTC.fn );
8.151 + sh4_register_mem_region( 0xFFD00000, 0xFFD01000, &mmio_region_INTC.fn );
8.152 + sh4_register_mem_region( 0xFFD80000, 0xFFD81000, &mmio_region_TMU.fn );
8.153 + sh4_register_mem_region( 0xFFE00000, 0xFFE01000, &mmio_region_SCI.fn );
8.154 + sh4_register_mem_region( 0xFFE80000, 0xFFE81000, &mmio_region_SCIF.fn );
8.155 + sh4_register_mem_region( 0xFFF00000, 0xFFF01000, &mem_region_unmapped ); // H-UDI
8.156 +}
8.157 +
8.158 void print_sh4mem_stats() {
8.159 printf( "Decodes to p4: %d sq: %d\n", p4_count+sq_count, sq_count );
8.160 printf( "Decodes to sdram: %d\n", decode_sdram );
9.1 --- a/src/sh4/sh4x86.in Sat Dec 20 03:01:40 2008 +0000
9.2 +++ b/src/sh4/sh4x86.in Mon Dec 22 09:51:11 2008 +0000
9.3 @@ -20,7 +20,6 @@
9.4
9.5 #include <assert.h>
9.6 #include <math.h>
9.7 -#include <stddef.h>
9.8
9.9 #ifndef NDEBUG
9.10 #define DEBUG_JUMPS 1
9.11 @@ -289,13 +288,14 @@
9.12 JNE_exc(EXC_DATA_ADDR_WRITE);
9.13
9.14 #define UNDEF(ir)
9.15 +#define MEM_REGION_PTR(name) offsetof( struct mem_region_fn, name )
9.16 #define MEM_RESULT(value_reg) if(value_reg != R_EAX) { MOV_r32_r32(R_EAX,value_reg); }
9.17 -#define MEM_READ_BYTE( addr_reg, value_reg ) call_func1(sh4_read_byte, addr_reg ); MEM_RESULT(value_reg)
9.18 -#define MEM_READ_WORD( addr_reg, value_reg ) call_func1(sh4_read_word, addr_reg ); MEM_RESULT(value_reg)
9.19 -#define MEM_READ_LONG( addr_reg, value_reg ) call_func1(sh4_read_long, addr_reg ); MEM_RESULT(value_reg)
9.20 -#define MEM_WRITE_BYTE( addr_reg, value_reg ) call_func2(sh4_write_byte, addr_reg, value_reg)
9.21 -#define MEM_WRITE_WORD( addr_reg, value_reg ) call_func2(sh4_write_word, addr_reg, value_reg)
9.22 -#define MEM_WRITE_LONG( addr_reg, value_reg ) call_func2(sh4_write_long, addr_reg, value_reg)
9.23 +#define MEM_READ_BYTE( addr_reg, value_reg ) decode_address(addr_reg); call_func1_r32disp8(R_ECX, MEM_REGION_PTR(read_byte), addr_reg ); MEM_RESULT(value_reg)
9.24 +#define MEM_READ_WORD( addr_reg, value_reg ) decode_address(addr_reg); call_func1_r32disp8(R_ECX, MEM_REGION_PTR(read_word), addr_reg ); MEM_RESULT(value_reg)
9.25 +#define MEM_READ_LONG( addr_reg, value_reg ) decode_address(addr_reg); call_func1_r32disp8(R_ECX, MEM_REGION_PTR(read_long), addr_reg ); MEM_RESULT(value_reg)
9.26 +#define MEM_WRITE_BYTE( addr_reg, value_reg ) decode_address(addr_reg); call_func2_r32disp8(R_ECX, MEM_REGION_PTR(write_byte), addr_reg, value_reg)
9.27 +#define MEM_WRITE_WORD( addr_reg, value_reg ) decode_address(addr_reg); call_func2_r32disp8(R_ECX, MEM_REGION_PTR(write_word), addr_reg, value_reg)
9.28 +#define MEM_WRITE_LONG( addr_reg, value_reg ) decode_address(addr_reg); call_func2_r32disp8(R_ECX, MEM_REGION_PTR(write_long), addr_reg, value_reg)
9.29
9.30 #ifdef HAVE_FRAME_ADDRESS
9.31 /**
9.32 @@ -323,111 +323,6 @@
9.33 #include "sh4/ia32abi.h"
9.34 #endif
9.35
9.36 -#define MEM_REGION_PTR(name) offsetof( struct mem_region_fn, name )
9.37 -
9.38 -/**
9.39 - * Given an address in addr_reg and a cache entry, test if the cache is valid
9.40 - * and decode otherwise.
9.41 - * At conclusion of this:
9.42 - * R_EBX will contain the address
9.43 - * R_ECX will contain the memory region vtable
9.44 - * R_EAX, R_EDX (and any other volatiles) are clobbered
9.45 - */
9.46 -static inline void MEM_DECODE_ADDRESS( int addr_reg, int rm )
9.47 -{
9.48 - MOV_r32_r32( addr_reg, R_EBX );
9.49 - AND_sh4r_r32( REG_OFFSET(pointer_cache[rm].page_mask), addr_reg );
9.50 - CMP_sh4r_r32( REG_OFFSET(pointer_cache[rm].page_vma), addr_reg );
9.51 - EXPJE_rel8(uptodate);
9.52 - store_spreg( addr_reg, REG_OFFSET(pointer_cache[rm].page_vma) );
9.53 - call_func1( sh7750_decode_address, addr_reg );
9.54 - store_spreg( R_EAX, REG_OFFSET(pointer_cache[rm].page_fn) );
9.55 - JMP_TARGET(uptodate);
9.56 - load_spreg( R_ECX, REG_OFFSET(pointer_cache[rm].page_fn) );
9.57 -}
9.58 -
9.59 -static inline void MEM_READ_LONG_CACHED( int addr_reg, int value_reg, int rm )
9.60 -{
9.61 - MEM_DECODE_ADDRESS( addr_reg, rm );
9.62 - call_func1_r32ind( R_ECX, MEM_REGION_PTR(read_long), R_EBX );
9.63 - MEM_RESULT(value_reg);
9.64 -}
9.65 -
9.66 -static inline void MEM_READ_WORD_CACHED( int addr_reg, int value_reg, int rm )
9.67 -{
9.68 - MEM_DECODE_ADDRESS( addr_reg, rm );
9.69 - call_func1_r32ind( R_ECX, MEM_REGION_PTR(read_word), R_EBX );
9.70 - MEM_RESULT(value_reg);
9.71 -}
9.72 -
9.73 -static inline void MEM_READ_BYTE_CACHED( int addr_reg, int value_reg, int rm )
9.74 -{
9.75 - MEM_DECODE_ADDRESS( addr_reg, rm );
9.76 - call_func1_r32ind( R_ECX, MEM_REGION_PTR(read_byte), R_EBX );
9.77 - MEM_RESULT(value_reg);
9.78 -}
9.79 -
9.80 -static inline void MEM_WRITE_LONG_CACHED_SP( int addr_reg, int ebpdisp, int rn )
9.81 -{
9.82 - MEM_DECODE_ADDRESS( addr_reg, rn );
9.83 - MOV_sh4r_r32( ebpdisp, R_EDX );
9.84 - call_func2_r32ind( R_ECX, MEM_REGION_PTR(write_long), R_EBX, R_EDX );
9.85 -}
9.86 -
9.87 -#define MEM_WRITE_LONG_CACHED( addr_reg, value_rm, rn ) MEM_WRITE_LONG_CACHED_SP( addr_reg, REG_OFFSET(r[value_rm]), rn )
9.88 -
9.89 -static inline void MEM_WRITE_WORD_CACHED( int addr_reg, int value_rm, int rn )
9.90 -{
9.91 - MEM_DECODE_ADDRESS( addr_reg, rn );
9.92 - MOVZX_sh4r16_r32( REG_OFFSET(r[value_rm]), R_EDX );
9.93 - call_func2_r32ind( R_ECX, MEM_REGION_PTR(write_word), R_EBX, R_EDX );
9.94 -}
9.95 -
9.96 -static inline void MEM_WRITE_BYTE_CACHED( int addr_reg, int value_rm, int rn )
9.97 -{
9.98 - MEM_DECODE_ADDRESS( addr_reg, rn );
9.99 - MOVZX_sh4r8_r32( REG_OFFSET(r[value_rm]), R_EDX );
9.100 - call_func2_r32ind( R_ECX, MEM_REGION_PTR(write_byte), R_EBX, R_EDX );
9.101 -}
9.102 -
9.103 -static inline void MEM_WRITE_BYTE_UNCHECKED( int addr_reg, int value_reg, int rn )
9.104 -{
9.105 - load_spreg( R_ECX, REG_OFFSET(pointer_cache[rn].page_fn) );
9.106 - call_func2_r32ind( R_ECX, MEM_REGION_PTR(write_byte), addr_reg, R_EDX );
9.107 -}
9.108 -
9.109 -static inline void MEM_WRITE_FLOAT_CACHED( int addr_reg, int value_frm, int rn )
9.110 -{
9.111 - MEM_DECODE_ADDRESS( addr_reg, rn );
9.112 - load_fr( R_EDX, value_frm );
9.113 - call_func2_r32ind( R_ECX, MEM_REGION_PTR(write_long), R_EBX, R_EDX );
9.114 -}
9.115 -
9.116 -static inline void MEM_READ_DOUBLE_CACHED( int addr_reg, int value_reg1, int value_reg2, int rm )
9.117 -{
9.118 - MEM_DECODE_ADDRESS( addr_reg, rm );
9.119 - call_func1_r32ind( R_ECX, MEM_REGION_PTR(read_long), R_EBX );
9.120 - MOV_r32_esp8( R_EAX, 0 );
9.121 - load_spreg( R_ECX, REG_OFFSET(pointer_cache[rm].page_fn) );
9.122 - LEA_r32disp8_r32( R_EBX, 4, R_EBX );
9.123 - call_func1_r32ind( R_ECX, MEM_REGION_PTR(read_long), R_EBX );
9.124 - MEM_RESULT(value_reg2);
9.125 - MOV_esp8_r32( 0, value_reg1 );
9.126 -}
9.127 -
9.128 -static inline void MEM_WRITE_DOUBLE_CACHED( int addr_reg, int value_frm, int rn )
9.129 -{
9.130 - MEM_DECODE_ADDRESS( addr_reg, rn );
9.131 - load_dr0( R_EDX, value_frm );
9.132 - call_func2_r32ind( R_ECX, MEM_REGION_PTR(write_long), R_EBX, R_EDX );
9.133 - LEA_r32disp8_r32( R_EBX, 4, R_EBX );
9.134 - load_spreg( R_ECX, REG_OFFSET(pointer_cache[rn].page_fn) );
9.135 - load_dr1( R_EDX, value_frm );
9.136 - call_func2_r32ind( R_ECX, MEM_REGION_PTR(write_long), R_EBX, R_EDX );
9.137 -}
9.138 -
9.139 -
9.140 -
9.141 void sh4_translate_begin_block( sh4addr_t pc )
9.142 {
9.143 enter_block();
9.144 @@ -577,9 +472,11 @@
9.145 load_spreg( R_ECX, R_GBR );
9.146 ADD_r32_r32( R_ECX, R_EAX );
9.147 MMU_TRANSLATE_WRITE( R_EAX );
9.148 - MEM_READ_BYTE_CACHED( R_EAX, R_EDX, 16 );
9.149 + MOV_r32_esp8(R_EAX, 0);
9.150 + MEM_READ_BYTE( R_EAX, R_EDX );
9.151 + MOV_esp8_r32(0, R_EAX);
9.152 AND_imm32_r32(imm, R_EDX );
9.153 - MEM_WRITE_BYTE_UNCHECKED( R_EBX, R_EDX, 16 );
9.154 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.155 sh4_x86.tstate = TSTATE_NONE;
9.156 :}
9.157 CMP/EQ Rm, Rn {:
9.158 @@ -783,10 +680,10 @@
9.159 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.160 }
9.161 MEM_READ_LONG( R_EAX, R_EAX );
9.162 - MOV_r32_r32( R_EAX, R_EBX );
9.163 + MOV_r32_esp8( R_EAX, 4 );
9.164 MOV_esp8_r32( 0, R_EAX );
9.165 MEM_READ_LONG( R_EAX, R_EAX );
9.166 - MOV_r32_r32( R_EBX, R_ECX );
9.167 + MOV_esp8_r32( 4, R_ECX );
9.168
9.169 IMUL_r32( R_ECX );
9.170 ADD_r32_sh4r( R_EAX, R_MACL );
9.171 @@ -824,10 +721,10 @@
9.172 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
9.173 }
9.174 MEM_READ_WORD( R_EAX, R_EAX );
9.175 - MOV_r32_r32( R_EAX, R_EBX );
9.176 + MOV_r32_esp8( R_EAX, 4 );
9.177 MOV_esp8_r32( 0, R_EAX );
9.178 MEM_READ_WORD( R_EAX, R_EAX );
9.179 - MOV_r32_r32( R_EBX, R_ECX );
9.180 + MOV_esp8_r32( 4, R_ECX );
9.181
9.182 IMUL_r32( R_ECX );
9.183 load_spreg( R_ECX, R_S );
9.184 @@ -930,9 +827,11 @@
9.185 load_spreg( R_ECX, R_GBR );
9.186 ADD_r32_r32( R_ECX, R_EAX );
9.187 MMU_TRANSLATE_WRITE( R_EAX );
9.188 - MEM_READ_BYTE_CACHED( R_EAX, R_EDX, 16 );
9.189 + MOV_r32_esp8( R_EAX, 0 );
9.190 + MEM_READ_BYTE( R_EAX, R_EDX );
9.191 + MOV_esp8_r32( 0, R_EAX );
9.192 OR_imm32_r32(imm, R_EDX );
9.193 - MEM_WRITE_BYTE_UNCHECKED( R_EBX, R_EDX, 16 );
9.194 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.195 sh4_x86.tstate = TSTATE_NONE;
9.196 :}
9.197 ROTCL Rn {:
9.198 @@ -1147,11 +1046,13 @@
9.199 COUNT_INST(I_TASB);
9.200 load_reg( R_EAX, Rn );
9.201 MMU_TRANSLATE_WRITE( R_EAX );
9.202 - MEM_READ_BYTE_CACHED( R_EAX, R_EDX, 16 );
9.203 + MOV_r32_esp8( R_EAX, 0 );
9.204 + MEM_READ_BYTE( R_EAX, R_EDX );
9.205 TEST_r8_r8( R_DL, R_DL );
9.206 SETE_t();
9.207 OR_imm8_r8( 0x80, R_DL );
9.208 - MEM_WRITE_BYTE_UNCHECKED( R_EBX, R_EDX, 16 );
9.209 + MOV_esp8_r32( 0, R_EAX );
9.210 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.211 sh4_x86.tstate = TSTATE_NONE;
9.212 :}
9.213 TST Rm, Rn {:
9.214 @@ -1175,7 +1076,7 @@
9.215 load_reg( R_ECX, R_GBR);
9.216 ADD_r32_r32( R_ECX, R_EAX );
9.217 MMU_TRANSLATE_READ( R_EAX );
9.218 - MEM_READ_BYTE_CACHED( R_EAX, R_EAX, 16 );
9.219 + MEM_READ_BYTE( R_EAX, R_EAX );
9.220 TEST_imm8_r8( imm, R_AL );
9.221 SETE_t();
9.222 sh4_x86.tstate = TSTATE_E;
9.223 @@ -1201,9 +1102,11 @@
9.224 load_spreg( R_ECX, R_GBR );
9.225 ADD_r32_r32( R_ECX, R_EAX );
9.226 MMU_TRANSLATE_WRITE( R_EAX );
9.227 - MEM_READ_BYTE_CACHED(R_EAX, R_EDX, 16);
9.228 + MOV_r32_esp8( R_EAX, 0 );
9.229 + MEM_READ_BYTE(R_EAX, R_EDX);
9.230 + MOV_esp8_r32( 0, R_EAX );
9.231 XOR_imm32_r32( imm, R_EDX );
9.232 - MEM_WRITE_BYTE_UNCHECKED( R_EBX, R_EDX, 16 );
9.233 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.234 sh4_x86.tstate = TSTATE_NONE;
9.235 :}
9.236 XTRCT Rm, Rn {:
9.237 @@ -1232,7 +1135,8 @@
9.238 COUNT_INST(I_MOVB);
9.239 load_reg( R_EAX, Rn );
9.240 MMU_TRANSLATE_WRITE( R_EAX );
9.241 - MEM_WRITE_BYTE_CACHED( R_EAX, Rm, Rn );
9.242 + load_reg( R_EDX, Rm );
9.243 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.244 sh4_x86.tstate = TSTATE_NONE;
9.245 :}
9.246 MOV.B Rm, @-Rn {:
9.247 @@ -1240,8 +1144,9 @@
9.248 load_reg( R_EAX, Rn );
9.249 ADD_imm8s_r32( -1, R_EAX );
9.250 MMU_TRANSLATE_WRITE( R_EAX );
9.251 + load_reg( R_EDX, Rm );
9.252 ADD_imm8s_sh4r( -1, REG_OFFSET(r[Rn]) );
9.253 - MEM_WRITE_BYTE_CACHED( R_EAX, Rm, Rn );
9.254 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.255 sh4_x86.tstate = TSTATE_NONE;
9.256 :}
9.257 MOV.B Rm, @(R0, Rn) {:
9.258 @@ -1250,7 +1155,8 @@
9.259 load_reg( R_ECX, Rn );
9.260 ADD_r32_r32( R_ECX, R_EAX );
9.261 MMU_TRANSLATE_WRITE( R_EAX );
9.262 - MEM_WRITE_BYTE_CACHED( R_EAX, Rm, 0 );
9.263 + load_reg( R_EDX, Rm );
9.264 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.265 sh4_x86.tstate = TSTATE_NONE;
9.266 :}
9.267 MOV.B R0, @(disp, GBR) {:
9.268 @@ -1258,7 +1164,8 @@
9.269 load_spreg( R_EAX, R_GBR );
9.270 ADD_imm32_r32( disp, R_EAX );
9.271 MMU_TRANSLATE_WRITE( R_EAX );
9.272 - MEM_WRITE_BYTE_CACHED( R_EAX, 0, 16 );
9.273 + load_reg( R_EDX, 0 );
9.274 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.275 sh4_x86.tstate = TSTATE_NONE;
9.276 :}
9.277 MOV.B R0, @(disp, Rn) {:
9.278 @@ -1266,14 +1173,15 @@
9.279 load_reg( R_EAX, Rn );
9.280 ADD_imm32_r32( disp, R_EAX );
9.281 MMU_TRANSLATE_WRITE( R_EAX );
9.282 - MEM_WRITE_BYTE_CACHED( R_EAX, 0, Rn );
9.283 + load_reg( R_EDX, 0 );
9.284 + MEM_WRITE_BYTE( R_EAX, R_EDX );
9.285 sh4_x86.tstate = TSTATE_NONE;
9.286 :}
9.287 MOV.B @Rm, Rn {:
9.288 COUNT_INST(I_MOVB);
9.289 load_reg( R_EAX, Rm );
9.290 MMU_TRANSLATE_READ( R_EAX );
9.291 - MEM_READ_BYTE_CACHED( R_EAX, R_EAX, Rm );
9.292 + MEM_READ_BYTE( R_EAX, R_EAX );
9.293 store_reg( R_EAX, Rn );
9.294 sh4_x86.tstate = TSTATE_NONE;
9.295 :}
9.296 @@ -1282,7 +1190,7 @@
9.297 load_reg( R_EAX, Rm );
9.298 MMU_TRANSLATE_READ( R_EAX );
9.299 ADD_imm8s_sh4r( 1, REG_OFFSET(r[Rm]) );
9.300 - MEM_READ_BYTE_CACHED( R_EAX, R_EAX, Rm );
9.301 + MEM_READ_BYTE( R_EAX, R_EAX );
9.302 store_reg( R_EAX, Rn );
9.303 sh4_x86.tstate = TSTATE_NONE;
9.304 :}
9.305 @@ -1292,7 +1200,7 @@
9.306 load_reg( R_ECX, Rm );
9.307 ADD_r32_r32( R_ECX, R_EAX );
9.308 MMU_TRANSLATE_READ( R_EAX )
9.309 - MEM_READ_BYTE_CACHED( R_EAX, R_EAX, 0 );
9.310 + MEM_READ_BYTE( R_EAX, R_EAX );
9.311 store_reg( R_EAX, Rn );
9.312 sh4_x86.tstate = TSTATE_NONE;
9.313 :}
9.314 @@ -1301,7 +1209,7 @@
9.315 load_spreg( R_EAX, R_GBR );
9.316 ADD_imm32_r32( disp, R_EAX );
9.317 MMU_TRANSLATE_READ( R_EAX );
9.318 - MEM_READ_BYTE_CACHED( R_EAX, R_EAX, 16 );
9.319 + MEM_READ_BYTE( R_EAX, R_EAX );
9.320 store_reg( R_EAX, 0 );
9.321 sh4_x86.tstate = TSTATE_NONE;
9.322 :}
9.323 @@ -1310,7 +1218,7 @@
9.324 load_reg( R_EAX, Rm );
9.325 ADD_imm32_r32( disp, R_EAX );
9.326 MMU_TRANSLATE_READ( R_EAX );
9.327 - MEM_READ_BYTE_CACHED( R_EAX, R_EAX, Rm );
9.328 + MEM_READ_BYTE( R_EAX, R_EAX );
9.329 store_reg( R_EAX, 0 );
9.330 sh4_x86.tstate = TSTATE_NONE;
9.331 :}
9.332 @@ -1318,8 +1226,19 @@
9.333 COUNT_INST(I_MOVL);
9.334 load_reg( R_EAX, Rn );
9.335 check_walign32(R_EAX);
9.336 + MOV_r32_r32( R_EAX, R_ECX );
9.337 + AND_imm32_r32( 0xFC000000, R_ECX );
9.338 + CMP_imm32_r32( 0xE0000000, R_ECX );
9.339 + JNE_rel8( notsq );
9.340 + AND_imm8s_r32( 0x3C, R_EAX );
9.341 + load_reg( R_EDX, Rm );
9.342 + MOV_r32_ebpr32disp32( R_EDX, R_EAX, REG_OFFSET(store_queue) );
9.343 + JMP_rel8(end);
9.344 + JMP_TARGET(notsq);
9.345 MMU_TRANSLATE_WRITE( R_EAX );
9.346 - MEM_WRITE_LONG_CACHED( R_EAX, Rm, Rn );
9.347 + load_reg( R_EDX, Rm );
9.348 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.349 + JMP_TARGET(end);
9.350 sh4_x86.tstate = TSTATE_NONE;
9.351 :}
9.352 MOV.L Rm, @-Rn {:
9.353 @@ -1328,8 +1247,9 @@
9.354 ADD_imm8s_r32( -4, R_EAX );
9.355 check_walign32( R_EAX );
9.356 MMU_TRANSLATE_WRITE( R_EAX );
9.357 + load_reg( R_EDX, Rm );
9.358 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.359 - MEM_WRITE_LONG_CACHED( R_EAX, Rm, Rn );
9.360 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.361 sh4_x86.tstate = TSTATE_NONE;
9.362 :}
9.363 MOV.L Rm, @(R0, Rn) {:
9.364 @@ -1339,7 +1259,8 @@
9.365 ADD_r32_r32( R_ECX, R_EAX );
9.366 check_walign32( R_EAX );
9.367 MMU_TRANSLATE_WRITE( R_EAX );
9.368 - MEM_WRITE_LONG_CACHED( R_EAX, Rm, 0 );
9.369 + load_reg( R_EDX, Rm );
9.370 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.371 sh4_x86.tstate = TSTATE_NONE;
9.372 :}
9.373 MOV.L R0, @(disp, GBR) {:
9.374 @@ -1348,7 +1269,8 @@
9.375 ADD_imm32_r32( disp, R_EAX );
9.376 check_walign32( R_EAX );
9.377 MMU_TRANSLATE_WRITE( R_EAX );
9.378 - MEM_WRITE_LONG_CACHED( R_EAX, 0, 16 );
9.379 + load_reg( R_EDX, 0 );
9.380 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.381 sh4_x86.tstate = TSTATE_NONE;
9.382 :}
9.383 MOV.L Rm, @(disp, Rn) {:
9.384 @@ -1356,8 +1278,19 @@
9.385 load_reg( R_EAX, Rn );
9.386 ADD_imm32_r32( disp, R_EAX );
9.387 check_walign32( R_EAX );
9.388 + MOV_r32_r32( R_EAX, R_ECX );
9.389 + AND_imm32_r32( 0xFC000000, R_ECX );
9.390 + CMP_imm32_r32( 0xE0000000, R_ECX );
9.391 + JNE_rel8( notsq );
9.392 + AND_imm8s_r32( 0x3C, R_EAX );
9.393 + load_reg( R_EDX, Rm );
9.394 + MOV_r32_ebpr32disp32( R_EDX, R_EAX, REG_OFFSET(store_queue) );
9.395 + JMP_rel8(end);
9.396 + JMP_TARGET(notsq);
9.397 MMU_TRANSLATE_WRITE( R_EAX );
9.398 - MEM_WRITE_LONG_CACHED( R_EAX, Rm, Rn );
9.399 + load_reg( R_EDX, Rm );
9.400 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.401 + JMP_TARGET(end);
9.402 sh4_x86.tstate = TSTATE_NONE;
9.403 :}
9.404 MOV.L @Rm, Rn {:
9.405 @@ -1365,7 +1298,7 @@
9.406 load_reg( R_EAX, Rm );
9.407 check_ralign32( R_EAX );
9.408 MMU_TRANSLATE_READ( R_EAX );
9.409 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.410 + MEM_READ_LONG( R_EAX, R_EAX );
9.411 store_reg( R_EAX, Rn );
9.412 sh4_x86.tstate = TSTATE_NONE;
9.413 :}
9.414 @@ -1375,7 +1308,7 @@
9.415 check_ralign32( R_EAX );
9.416 MMU_TRANSLATE_READ( R_EAX );
9.417 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.418 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.419 + MEM_READ_LONG( R_EAX, R_EAX );
9.420 store_reg( R_EAX, Rn );
9.421 sh4_x86.tstate = TSTATE_NONE;
9.422 :}
9.423 @@ -1386,7 +1319,7 @@
9.424 ADD_r32_r32( R_ECX, R_EAX );
9.425 check_ralign32( R_EAX );
9.426 MMU_TRANSLATE_READ( R_EAX );
9.427 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, 0 );
9.428 + MEM_READ_LONG( R_EAX, R_EAX );
9.429 store_reg( R_EAX, Rn );
9.430 sh4_x86.tstate = TSTATE_NONE;
9.431 :}
9.432 @@ -1396,7 +1329,7 @@
9.433 ADD_imm32_r32( disp, R_EAX );
9.434 check_ralign32( R_EAX );
9.435 MMU_TRANSLATE_READ( R_EAX );
9.436 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, 16 );
9.437 + MEM_READ_LONG( R_EAX, R_EAX );
9.438 store_reg( R_EAX, 0 );
9.439 sh4_x86.tstate = TSTATE_NONE;
9.440 :}
9.441 @@ -1425,7 +1358,7 @@
9.442 load_imm32( R_EAX, (pc-sh4_x86.block_start_pc) + disp + 4 - (pc&0x03) );
9.443 ADD_sh4r_r32( R_PC, R_EAX );
9.444 MMU_TRANSLATE_READ( R_EAX );
9.445 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, 16 );
9.446 + MEM_READ_LONG( R_EAX, R_EAX );
9.447 sh4_x86.tstate = TSTATE_NONE;
9.448 }
9.449 store_reg( R_EAX, Rn );
9.450 @@ -1437,7 +1370,7 @@
9.451 ADD_imm8s_r32( disp, R_EAX );
9.452 check_ralign32( R_EAX );
9.453 MMU_TRANSLATE_READ( R_EAX );
9.454 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.455 + MEM_READ_LONG( R_EAX, R_EAX );
9.456 store_reg( R_EAX, Rn );
9.457 sh4_x86.tstate = TSTATE_NONE;
9.458 :}
9.459 @@ -1446,7 +1379,8 @@
9.460 load_reg( R_EAX, Rn );
9.461 check_walign16( R_EAX );
9.462 MMU_TRANSLATE_WRITE( R_EAX )
9.463 - MEM_WRITE_WORD_CACHED( R_EAX, Rm, Rn );
9.464 + load_reg( R_EDX, Rm );
9.465 + MEM_WRITE_WORD( R_EAX, R_EDX );
9.466 sh4_x86.tstate = TSTATE_NONE;
9.467 :}
9.468 MOV.W Rm, @-Rn {:
9.469 @@ -1455,8 +1389,9 @@
9.470 ADD_imm8s_r32( -2, R_EAX );
9.471 check_walign16( R_EAX );
9.472 MMU_TRANSLATE_WRITE( R_EAX );
9.473 + load_reg( R_EDX, Rm );
9.474 ADD_imm8s_sh4r( -2, REG_OFFSET(r[Rn]) );
9.475 - MEM_WRITE_WORD_CACHED( R_EAX, Rm, Rn );
9.476 + MEM_WRITE_WORD( R_EAX, R_EDX );
9.477 sh4_x86.tstate = TSTATE_NONE;
9.478 :}
9.479 MOV.W Rm, @(R0, Rn) {:
9.480 @@ -1466,7 +1401,8 @@
9.481 ADD_r32_r32( R_ECX, R_EAX );
9.482 check_walign16( R_EAX );
9.483 MMU_TRANSLATE_WRITE( R_EAX );
9.484 - MEM_WRITE_WORD_CACHED( R_EAX, Rm, 0 );
9.485 + load_reg( R_EDX, Rm );
9.486 + MEM_WRITE_WORD( R_EAX, R_EDX );
9.487 sh4_x86.tstate = TSTATE_NONE;
9.488 :}
9.489 MOV.W R0, @(disp, GBR) {:
9.490 @@ -1475,7 +1411,8 @@
9.491 ADD_imm32_r32( disp, R_EAX );
9.492 check_walign16( R_EAX );
9.493 MMU_TRANSLATE_WRITE( R_EAX );
9.494 - MEM_WRITE_WORD_CACHED( R_EAX, 0, 16 );
9.495 + load_reg( R_EDX, 0 );
9.496 + MEM_WRITE_WORD( R_EAX, R_EDX );
9.497 sh4_x86.tstate = TSTATE_NONE;
9.498 :}
9.499 MOV.W R0, @(disp, Rn) {:
9.500 @@ -1484,7 +1421,8 @@
9.501 ADD_imm32_r32( disp, R_EAX );
9.502 check_walign16( R_EAX );
9.503 MMU_TRANSLATE_WRITE( R_EAX );
9.504 - MEM_WRITE_WORD_CACHED( R_EAX, 0, Rn );
9.505 + load_reg( R_EDX, 0 );
9.506 + MEM_WRITE_WORD( R_EAX, R_EDX );
9.507 sh4_x86.tstate = TSTATE_NONE;
9.508 :}
9.509 MOV.W @Rm, Rn {:
9.510 @@ -1492,7 +1430,7 @@
9.511 load_reg( R_EAX, Rm );
9.512 check_ralign16( R_EAX );
9.513 MMU_TRANSLATE_READ( R_EAX );
9.514 - MEM_READ_WORD_CACHED( R_EAX, R_EAX, Rm );
9.515 + MEM_READ_WORD( R_EAX, R_EAX );
9.516 store_reg( R_EAX, Rn );
9.517 sh4_x86.tstate = TSTATE_NONE;
9.518 :}
9.519 @@ -1502,7 +1440,7 @@
9.520 check_ralign16( R_EAX );
9.521 MMU_TRANSLATE_READ( R_EAX );
9.522 ADD_imm8s_sh4r( 2, REG_OFFSET(r[Rm]) );
9.523 - MEM_READ_WORD_CACHED( R_EAX, R_EAX, Rm );
9.524 + MEM_READ_WORD( R_EAX, R_EAX );
9.525 store_reg( R_EAX, Rn );
9.526 sh4_x86.tstate = TSTATE_NONE;
9.527 :}
9.528 @@ -1513,7 +1451,7 @@
9.529 ADD_r32_r32( R_ECX, R_EAX );
9.530 check_ralign16( R_EAX );
9.531 MMU_TRANSLATE_READ( R_EAX );
9.532 - MEM_READ_WORD_CACHED( R_EAX, R_EAX, 0 );
9.533 + MEM_READ_WORD( R_EAX, R_EAX );
9.534 store_reg( R_EAX, Rn );
9.535 sh4_x86.tstate = TSTATE_NONE;
9.536 :}
9.537 @@ -1523,7 +1461,7 @@
9.538 ADD_imm32_r32( disp, R_EAX );
9.539 check_ralign16( R_EAX );
9.540 MMU_TRANSLATE_READ( R_EAX );
9.541 - MEM_READ_WORD_CACHED( R_EAX, R_EAX, 16 );
9.542 + MEM_READ_WORD( R_EAX, R_EAX );
9.543 store_reg( R_EAX, 0 );
9.544 sh4_x86.tstate = TSTATE_NONE;
9.545 :}
9.546 @@ -1554,7 +1492,7 @@
9.547 ADD_imm32_r32( disp, R_EAX );
9.548 check_ralign16( R_EAX );
9.549 MMU_TRANSLATE_READ( R_EAX );
9.550 - MEM_READ_WORD_CACHED( R_EAX, R_EAX, Rm );
9.551 + MEM_READ_WORD( R_EAX, R_EAX );
9.552 store_reg( R_EAX, 0 );
9.553 sh4_x86.tstate = TSTATE_NONE;
9.554 :}
9.555 @@ -1574,7 +1512,8 @@
9.556 load_reg( R_EAX, Rn );
9.557 check_walign32( R_EAX );
9.558 MMU_TRANSLATE_WRITE( R_EAX );
9.559 - MEM_WRITE_LONG_CACHED( R_EAX, 0, Rn );
9.560 + load_reg( R_EDX, 0 );
9.561 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.562 sh4_x86.tstate = TSTATE_NONE;
9.563 :}
9.564
9.565 @@ -1924,11 +1863,14 @@
9.566 if( sh4_x86.double_size ) {
9.567 check_walign64( R_EAX );
9.568 MMU_TRANSLATE_WRITE( R_EAX );
9.569 - MEM_WRITE_DOUBLE_CACHED( R_EAX, FRm, Rn );
9.570 + load_dr0( R_EDX, FRm );
9.571 + load_dr1( R_ECX, FRm );
9.572 + MEM_WRITE_DOUBLE( R_EAX, R_EDX, R_ECX );
9.573 } else {
9.574 check_walign32( R_EAX );
9.575 MMU_TRANSLATE_WRITE( R_EAX );
9.576 - MEM_WRITE_FLOAT_CACHED( R_EAX, FRm, Rn );
9.577 + load_fr( R_EDX, FRm );
9.578 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.579 }
9.580 sh4_x86.tstate = TSTATE_NONE;
9.581 :}
9.582 @@ -1939,13 +1881,13 @@
9.583 if( sh4_x86.double_size ) {
9.584 check_ralign64( R_EAX );
9.585 MMU_TRANSLATE_READ( R_EAX );
9.586 - MEM_READ_DOUBLE_CACHED( R_EAX, R_EDX, R_EAX, Rm );
9.587 + MEM_READ_DOUBLE( R_EAX, R_EDX, R_EAX );
9.588 store_dr0( R_EDX, FRn );
9.589 store_dr1( R_EAX, FRn );
9.590 } else {
9.591 check_ralign32( R_EAX );
9.592 MMU_TRANSLATE_READ( R_EAX );
9.593 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.594 + MEM_READ_LONG( R_EAX, R_EAX );
9.595 store_fr( R_EAX, FRn );
9.596 }
9.597 sh4_x86.tstate = TSTATE_NONE;
9.598 @@ -1956,16 +1898,19 @@
9.599 load_reg( R_EAX, Rn );
9.600 if( sh4_x86.double_size ) {
9.601 check_walign64( R_EAX );
9.602 - LEA_r32disp8_r32( R_EAX, -8, R_EAX );
9.603 + ADD_imm8s_r32(-8,R_EAX);
9.604 MMU_TRANSLATE_WRITE( R_EAX );
9.605 + load_dr0( R_EDX, FRm );
9.606 + load_dr1( R_ECX, FRm );
9.607 ADD_imm8s_sh4r(-8,REG_OFFSET(r[Rn]));
9.608 - MEM_WRITE_DOUBLE_CACHED( R_EAX, FRm, Rn );
9.609 + MEM_WRITE_DOUBLE( R_EAX, R_EDX, R_ECX );
9.610 } else {
9.611 check_walign32( R_EAX );
9.612 - LEA_r32disp8_r32( R_EAX, -4, R_EAX );
9.613 + ADD_imm8s_r32( -4, R_EAX );
9.614 MMU_TRANSLATE_WRITE( R_EAX );
9.615 + load_fr( R_EDX, FRm );
9.616 ADD_imm8s_sh4r(-4,REG_OFFSET(r[Rn]));
9.617 - MEM_WRITE_FLOAT_CACHED( R_EAX, FRm, Rn );
9.618 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.619 }
9.620 sh4_x86.tstate = TSTATE_NONE;
9.621 :}
9.622 @@ -1977,14 +1922,14 @@
9.623 check_ralign64( R_EAX );
9.624 MMU_TRANSLATE_READ( R_EAX );
9.625 ADD_imm8s_sh4r( 8, REG_OFFSET(r[Rm]) );
9.626 - MEM_READ_DOUBLE_CACHED( R_EAX, R_EDX, R_EAX, Rm );
9.627 + MEM_READ_DOUBLE( R_EAX, R_EDX, R_EAX );
9.628 store_dr0( R_EDX, FRn );
9.629 store_dr1( R_EAX, FRn );
9.630 } else {
9.631 check_ralign32( R_EAX );
9.632 MMU_TRANSLATE_READ( R_EAX );
9.633 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.634 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.635 + MEM_READ_LONG( R_EAX, R_EAX );
9.636 store_fr( R_EAX, FRn );
9.637 }
9.638 sh4_x86.tstate = TSTATE_NONE;
9.639 @@ -1997,11 +1942,14 @@
9.640 if( sh4_x86.double_size ) {
9.641 check_walign64( R_EAX );
9.642 MMU_TRANSLATE_WRITE( R_EAX );
9.643 - MEM_WRITE_DOUBLE_CACHED( R_EAX, FRm, 0 );
9.644 + load_dr0( R_EDX, FRm );
9.645 + load_dr1( R_ECX, FRm );
9.646 + MEM_WRITE_DOUBLE( R_EAX, R_EDX, R_ECX );
9.647 } else {
9.648 check_walign32( R_EAX );
9.649 MMU_TRANSLATE_WRITE( R_EAX );
9.650 - MEM_WRITE_FLOAT_CACHED( R_EAX, FRm, 0 );
9.651 + load_fr( R_EDX, FRm );
9.652 + MEM_WRITE_LONG( R_EAX, R_EDX ); // 12
9.653 }
9.654 sh4_x86.tstate = TSTATE_NONE;
9.655 :}
9.656 @@ -2013,13 +1961,13 @@
9.657 if( sh4_x86.double_size ) {
9.658 check_ralign64( R_EAX );
9.659 MMU_TRANSLATE_READ( R_EAX );
9.660 - MEM_READ_DOUBLE_CACHED( R_EAX, R_ECX, R_EAX, 0 );
9.661 + MEM_READ_DOUBLE( R_EAX, R_ECX, R_EAX );
9.662 store_dr0( R_ECX, FRn );
9.663 store_dr1( R_EAX, FRn );
9.664 } else {
9.665 check_ralign32( R_EAX );
9.666 MMU_TRANSLATE_READ( R_EAX );
9.667 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, 0 );
9.668 + MEM_READ_LONG( R_EAX, R_EAX );
9.669 store_fr( R_EAX, FRn );
9.670 }
9.671 sh4_x86.tstate = TSTATE_NONE;
9.672 @@ -2436,7 +2384,7 @@
9.673 check_ralign32( R_EAX );
9.674 MMU_TRANSLATE_READ( R_EAX );
9.675 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.676 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.677 + MEM_READ_LONG( R_EAX, R_EAX );
9.678 store_spreg( R_EAX, R_GBR );
9.679 sh4_x86.tstate = TSTATE_NONE;
9.680 :}
9.681 @@ -2450,7 +2398,7 @@
9.682 check_ralign32( R_EAX );
9.683 MMU_TRANSLATE_READ( R_EAX );
9.684 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.685 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.686 + MEM_READ_LONG( R_EAX, R_EAX );
9.687 call_func1( sh4_write_sr, R_EAX );
9.688 sh4_x86.priv_checked = FALSE;
9.689 sh4_x86.fpuen_checked = FALSE;
9.690 @@ -2464,7 +2412,7 @@
9.691 check_ralign32( R_EAX );
9.692 MMU_TRANSLATE_READ( R_EAX );
9.693 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.694 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.695 + MEM_READ_LONG( R_EAX, R_EAX );
9.696 store_spreg( R_EAX, R_VBR );
9.697 sh4_x86.tstate = TSTATE_NONE;
9.698 :}
9.699 @@ -2475,7 +2423,7 @@
9.700 check_ralign32( R_EAX );
9.701 MMU_TRANSLATE_READ( R_EAX );
9.702 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.703 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.704 + MEM_READ_LONG( R_EAX, R_EAX );
9.705 store_spreg( R_EAX, R_SSR );
9.706 sh4_x86.tstate = TSTATE_NONE;
9.707 :}
9.708 @@ -2486,7 +2434,7 @@
9.709 check_ralign32( R_EAX );
9.710 MMU_TRANSLATE_READ( R_EAX );
9.711 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.712 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.713 + MEM_READ_LONG( R_EAX, R_EAX );
9.714 store_spreg( R_EAX, R_SGR );
9.715 sh4_x86.tstate = TSTATE_NONE;
9.716 :}
9.717 @@ -2497,7 +2445,7 @@
9.718 check_ralign32( R_EAX );
9.719 MMU_TRANSLATE_READ( R_EAX );
9.720 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.721 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.722 + MEM_READ_LONG( R_EAX, R_EAX );
9.723 store_spreg( R_EAX, R_SPC );
9.724 sh4_x86.tstate = TSTATE_NONE;
9.725 :}
9.726 @@ -2508,7 +2456,7 @@
9.727 check_ralign32( R_EAX );
9.728 MMU_TRANSLATE_READ( R_EAX );
9.729 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.730 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.731 + MEM_READ_LONG( R_EAX, R_EAX );
9.732 store_spreg( R_EAX, R_DBR );
9.733 sh4_x86.tstate = TSTATE_NONE;
9.734 :}
9.735 @@ -2519,7 +2467,7 @@
9.736 check_ralign32( R_EAX );
9.737 MMU_TRANSLATE_READ( R_EAX );
9.738 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.739 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.740 + MEM_READ_LONG( R_EAX, R_EAX );
9.741 store_spreg( R_EAX, REG_OFFSET(r_bank[Rn_BANK]) );
9.742 sh4_x86.tstate = TSTATE_NONE;
9.743 :}
9.744 @@ -2538,7 +2486,7 @@
9.745 check_ralign32( R_EAX );
9.746 MMU_TRANSLATE_READ( R_EAX );
9.747 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.748 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.749 + MEM_READ_LONG( R_EAX, R_EAX );
9.750 call_func1( sh4_write_fpscr, R_EAX );
9.751 sh4_x86.tstate = TSTATE_NONE;
9.752 return 2;
9.753 @@ -2556,7 +2504,7 @@
9.754 check_ralign32( R_EAX );
9.755 MMU_TRANSLATE_READ( R_EAX );
9.756 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.757 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.758 + MEM_READ_LONG( R_EAX, R_EAX );
9.759 store_spreg( R_EAX, R_FPUL );
9.760 sh4_x86.tstate = TSTATE_NONE;
9.761 :}
9.762 @@ -2571,7 +2519,7 @@
9.763 check_ralign32( R_EAX );
9.764 MMU_TRANSLATE_READ( R_EAX );
9.765 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.766 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.767 + MEM_READ_LONG( R_EAX, R_EAX );
9.768 store_spreg( R_EAX, R_MACH );
9.769 sh4_x86.tstate = TSTATE_NONE;
9.770 :}
9.771 @@ -2586,7 +2534,7 @@
9.772 check_ralign32( R_EAX );
9.773 MMU_TRANSLATE_READ( R_EAX );
9.774 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.775 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.776 + MEM_READ_LONG( R_EAX, R_EAX );
9.777 store_spreg( R_EAX, R_MACL );
9.778 sh4_x86.tstate = TSTATE_NONE;
9.779 :}
9.780 @@ -2601,7 +2549,7 @@
9.781 check_ralign32( R_EAX );
9.782 MMU_TRANSLATE_READ( R_EAX );
9.783 ADD_imm8s_sh4r( 4, REG_OFFSET(r[Rm]) );
9.784 - MEM_READ_LONG_CACHED( R_EAX, R_EAX, Rm );
9.785 + MEM_READ_LONG( R_EAX, R_EAX );
9.786 store_spreg( R_EAX, R_PR );
9.787 sh4_x86.tstate = TSTATE_NONE;
9.788 :}
9.789 @@ -2705,11 +2653,12 @@
9.790 check_walign32( R_EAX );
9.791 ADD_imm8s_r32( -4, R_EAX );
9.792 MMU_TRANSLATE_WRITE( R_EAX );
9.793 - MOV_r32_r32( R_EAX, R_EBX );
9.794 + MOV_r32_esp8( R_EAX, 0 );
9.795 call_func0( sh4_read_sr );
9.796 MOV_r32_r32( R_EAX, R_EDX );
9.797 + MOV_esp8_r32( 0, R_EAX );
9.798 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.799 - MEM_WRITE_LONG( R_EBX, R_EDX );
9.800 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.801 sh4_x86.tstate = TSTATE_NONE;
9.802 :}
9.803 STC.L VBR, @-Rn {:
9.804 @@ -2719,8 +2668,9 @@
9.805 check_walign32( R_EAX );
9.806 ADD_imm8s_r32( -4, R_EAX );
9.807 MMU_TRANSLATE_WRITE( R_EAX );
9.808 + load_spreg( R_EDX, R_VBR );
9.809 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.810 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_VBR, Rn );
9.811 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.812 sh4_x86.tstate = TSTATE_NONE;
9.813 :}
9.814 STC.L SSR, @-Rn {:
9.815 @@ -2730,8 +2680,9 @@
9.816 check_walign32( R_EAX );
9.817 ADD_imm8s_r32( -4, R_EAX );
9.818 MMU_TRANSLATE_WRITE( R_EAX );
9.819 + load_spreg( R_EDX, R_SSR );
9.820 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.821 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_SSR, Rn );
9.822 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.823 sh4_x86.tstate = TSTATE_NONE;
9.824 :}
9.825 STC.L SPC, @-Rn {:
9.826 @@ -2741,8 +2692,9 @@
9.827 check_walign32( R_EAX );
9.828 ADD_imm8s_r32( -4, R_EAX );
9.829 MMU_TRANSLATE_WRITE( R_EAX );
9.830 + load_spreg( R_EDX, R_SPC );
9.831 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.832 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_SPC, Rn );
9.833 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.834 sh4_x86.tstate = TSTATE_NONE;
9.835 :}
9.836 STC.L SGR, @-Rn {:
9.837 @@ -2752,8 +2704,9 @@
9.838 check_walign32( R_EAX );
9.839 ADD_imm8s_r32( -4, R_EAX );
9.840 MMU_TRANSLATE_WRITE( R_EAX );
9.841 + load_spreg( R_EDX, R_SGR );
9.842 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.843 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_SGR, Rn );
9.844 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.845 sh4_x86.tstate = TSTATE_NONE;
9.846 :}
9.847 STC.L DBR, @-Rn {:
9.848 @@ -2763,8 +2716,9 @@
9.849 check_walign32( R_EAX );
9.850 ADD_imm8s_r32( -4, R_EAX );
9.851 MMU_TRANSLATE_WRITE( R_EAX );
9.852 + load_spreg( R_EDX, R_DBR );
9.853 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.854 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_DBR, Rn );
9.855 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.856 sh4_x86.tstate = TSTATE_NONE;
9.857 :}
9.858 STC.L Rm_BANK, @-Rn {:
9.859 @@ -2774,8 +2728,9 @@
9.860 check_walign32( R_EAX );
9.861 ADD_imm8s_r32( -4, R_EAX );
9.862 MMU_TRANSLATE_WRITE( R_EAX );
9.863 + load_spreg( R_EDX, REG_OFFSET(r_bank[Rm_BANK]) );
9.864 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.865 - MEM_WRITE_LONG_CACHED_SP( R_EAX, REG_OFFSET(r_bank[Rm_BANK]), Rn );
9.866 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.867 sh4_x86.tstate = TSTATE_NONE;
9.868 :}
9.869 STC.L GBR, @-Rn {:
9.870 @@ -2784,8 +2739,9 @@
9.871 check_walign32( R_EAX );
9.872 ADD_imm8s_r32( -4, R_EAX );
9.873 MMU_TRANSLATE_WRITE( R_EAX );
9.874 + load_spreg( R_EDX, R_GBR );
9.875 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.876 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_GBR, Rn );
9.877 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.878 sh4_x86.tstate = TSTATE_NONE;
9.879 :}
9.880 STS FPSCR, Rn {:
9.881 @@ -2801,8 +2757,9 @@
9.882 check_walign32( R_EAX );
9.883 ADD_imm8s_r32( -4, R_EAX );
9.884 MMU_TRANSLATE_WRITE( R_EAX );
9.885 + load_spreg( R_EDX, R_FPSCR );
9.886 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.887 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_FPSCR, Rn );
9.888 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.889 sh4_x86.tstate = TSTATE_NONE;
9.890 :}
9.891 STS FPUL, Rn {:
9.892 @@ -2818,8 +2775,9 @@
9.893 check_walign32( R_EAX );
9.894 ADD_imm8s_r32( -4, R_EAX );
9.895 MMU_TRANSLATE_WRITE( R_EAX );
9.896 + load_spreg( R_EDX, R_FPUL );
9.897 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.898 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_FPUL, Rn );
9.899 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.900 sh4_x86.tstate = TSTATE_NONE;
9.901 :}
9.902 STS MACH, Rn {:
9.903 @@ -2833,8 +2791,9 @@
9.904 check_walign32( R_EAX );
9.905 ADD_imm8s_r32( -4, R_EAX );
9.906 MMU_TRANSLATE_WRITE( R_EAX );
9.907 + load_spreg( R_EDX, R_MACH );
9.908 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.909 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_MACH, Rn );
9.910 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.911 sh4_x86.tstate = TSTATE_NONE;
9.912 :}
9.913 STS MACL, Rn {:
9.914 @@ -2848,8 +2807,9 @@
9.915 check_walign32( R_EAX );
9.916 ADD_imm8s_r32( -4, R_EAX );
9.917 MMU_TRANSLATE_WRITE( R_EAX );
9.918 + load_spreg( R_EDX, R_MACL );
9.919 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.920 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_MACL, Rn );
9.921 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.922 sh4_x86.tstate = TSTATE_NONE;
9.923 :}
9.924 STS PR, Rn {:
9.925 @@ -2863,8 +2823,9 @@
9.926 check_walign32( R_EAX );
9.927 ADD_imm8s_r32( -4, R_EAX );
9.928 MMU_TRANSLATE_WRITE( R_EAX );
9.929 + load_spreg( R_EDX, R_PR );
9.930 ADD_imm8s_sh4r( -4, REG_OFFSET(r[Rn]) );
9.931 - MEM_WRITE_LONG_CACHED_SP( R_EAX, R_PR, Rn );
9.932 + MEM_WRITE_LONG( R_EAX, R_EDX );
9.933 sh4_x86.tstate = TSTATE_NONE;
9.934 :}
9.935
10.1 --- a/src/sh4/x86op.h Sat Dec 20 03:01:40 2008 +0000
10.2 +++ b/src/sh4/x86op.h Mon Dec 22 09:51:11 2008 +0000
10.3 @@ -112,7 +112,7 @@
10.4 /* ebp+disp32 modrm form */
10.5 #define MODRM_r32_ebp32(r1,disp) OP(0x85 | (r1<<3)); OP32(disp)
10.6
10.7 -/* esp+disp32 modrm+sib form */
10.8 +/* esp+disp8 modrm+sib form */
10.9 #define MODRM_r32_esp8(r1,disp) OP(0x44 | (r1<<3)); OP(0x24); OP(disp)
10.10
10.11 #define MODRM_r32_sh4r(r1,disp) if(disp>127){ MODRM_r32_ebp32(r1,disp);}else{ MODRM_r32_ebp8(r1,(unsigned char)disp); }
10.12 @@ -138,7 +138,7 @@
10.13 #define CALL_r32(r1) OP(0xFF); MODRM_rm32_r32(r1,2)
10.14 #define CALL_ptr(ptr) OP(0xE8); OP32( (((char *)ptr) - (char *)xlat_output) - 4)
10.15 #define CALL_sh4r(disp) OP(0xFF); MODRM_r32_sh4r(2, disp)
10.16 -#define CALL_r32ind(r1,disp) OP(0xFF); OP(0x50 + r1); OP(disp)
10.17 +#define CALL_r32disp8(r1,disp) OP(0xFF); OP(0x50 + r1); OP(disp)
10.18 #define CLC() OP(0xF8)
10.19 #define CMC() OP(0xF5)
10.20 #define CMP_sh4r_r32(disp,r1) OP(0x3B); MODRM_r32_sh4r(r1,disp)
10.21 @@ -158,6 +158,10 @@
10.22 #define MOV_sh4r_r32(disp, r1) OP(0x8B); MODRM_r32_sh4r(r1,disp)
10.23 #define MOV_r32_r32ind(r2,r1) OP(0x89); OP(0 + (r2<<3) + r1 )
10.24 #define MOV_r32ind_r32(r1,r2) OP(0x8B); OP(0 + (r2<<3) + r1 )
10.25 +#define MOV_r32_r32disp32(r2,r1,disp) OP(0x89); OP(0x80 + (r2<<3) + r1); OP32(disp)
10.26 +#define MOV_r32_ebpr32disp32(r2,r1,disp) OP(0x89); OP(0x84 + (r2<<3)); OP(0x05 + (r1<<3)); OP32(disp)
10.27 +#define MOV_r32disp32_r32(r1,disp,r2) OP(0x8B); OP(0x80 + (r2<<3) + r1); OP32(disp)
10.28 +#define MOV_r32disp32x4_r32(r1,disp,r2) OP(0x8B); OP(0x04 + (r2<<3)); OP(0x85+(r1<<3)); OP32(disp)
10.29 #define MOV_r32_esp8(r1,disp) OP(0x89); MODRM_r32_esp8(r1,disp)
10.30 #define MOV_esp8_r32(disp,r1) OP(0x8B); MODRM_r32_esp8(r1,disp)
10.31 #define MOVSX_r8_r32(r1,r2) OP(0x0F); OP(0xBE); MODRM_rm32_r32(r1,r2)
11.1 --- a/src/test/testsh4x86.c Sat Dec 20 03:01:40 2008 +0000
11.2 +++ b/src/test/testsh4x86.c Mon Dec 22 09:51:11 2008 +0000
11.3 @@ -35,6 +35,8 @@
11.4
11.5 #define MAX_INS_SIZE 32
11.6
11.7 +
11.8 +struct mem_region_fn **sh4_address_space = (void *)0x12345432;
11.9 char *option_list = "s:o:d:h";
11.10 struct option longopts[1] = { { NULL, 0, 0, 0 } };
11.11
11.12 @@ -54,6 +56,7 @@
11.13 { "sh4_cpu_period", &sh4_cpu_period },
11.14 { "mmu_vma_to_phys_read", mmu_vma_to_phys_read },
11.15 { "mmu_vma_to_phys_write", mmu_vma_to_phys_write },
11.16 + { "sh4_address_space", 0x12345432 },
11.17 { "sh4_write_fpscr", sh4_write_fpscr },
11.18 { "sh4_write_sr", sh4_write_sr },
11.19 { "sh4_read_sr", sh4_read_sr },
11.20 @@ -129,6 +132,7 @@
11.21 uint32_t sh4_sleep_run_slice(uint32_t nanosecs) { return nanosecs; }
11.22 gboolean gui_error_dialog( const char *fmt, ... ) { return TRUE; }
11.23 struct sh4_icache_struct sh4_icache;
11.24 +struct mem_region_fn mem_region_unmapped;
11.25
11.26 void usage()
11.27 {
.