filename | src/sh4/x86op.h |
changeset | 939:6f2302afeb89 |
prev | 936:f394309c399a |
author | nkeynes |
date | Sat Jan 03 03:30:26 2009 +0000 (13 years ago) |
branch | lxdream-mem |
permissions | -rw-r--r-- |
last change | MMU work-in-progress * Move SDRAM out into separate sdram.c * Move all page-table management into mmu.c * Convert UTLB management to use the new page-tables * Rip out all calls to mmu_vma_to_phys_* and replace with direct access |
file | annotate | diff | log | raw |
1.1 --- a/src/sh4/x86op.h Sat Dec 27 02:59:35 2008 +00001.2 +++ b/src/sh4/x86op.h Sat Jan 03 03:30:26 2009 +00001.3 @@ -55,12 +55,18 @@1.4 #define LEA_sh4r_rptr(disp, r1) REXW(); LEA_sh4r_r32(disp,r1)1.5 #define MOV_moffptr_EAX(offptr) REXW(); MOV_moff32_EAX( offptr )1.6 #define load_exc_backpatch( x86reg ) REXW(); OP(0xB8 + x86reg); sh4_x86_add_backpatch( xlat_output, pc, -2 ); OP64( 0 )1.7 +#define MOV_backpatch_esp8( disp ) REXW(); OP(0xC7); MODRM_r32_esp8(0, disp); sh4_x86_add_backpatch( xlat_output, pc, -2); OP64(0)1.8 +1.9 +/* imm64 operations are only defined for x86-64 */1.10 +#define MOV_imm64_r32(i64,r1) REXW(); OP(0xB8+r1); OP64(i64)1.11 +1.12 #else /* 32-bit system */1.13 #define OPPTR(x) OP32((uint32_t)(x))1.14 #define AND_imm8s_rptr(imm, r1) AND_imm8s_r32( imm, r1 )1.15 #define LEA_sh4r_rptr(disp, r1) LEA_sh4r_r32(disp,r1)1.16 #define MOV_moffptr_EAX(offptr) MOV_moff32_EAX( offptr )1.17 #define load_exc_backpatch( x86reg ) OP(0xB8 + x86reg); sh4_x86_add_backpatch( xlat_output, pc, -2 ); OP32( 0 )1.18 +#define MOV_backpatch_esp8( disp ) OP(0xC7); MODRM_r32_esp8(0, disp); sh4_x86_add_backpatch( xlat_output, pc, -2); OP32(0)1.19 #endif1.20 #define STACK_ALIGN 161.21 #define POP_r32(r1) OP(0x58 + r1)1.22 @@ -117,6 +123,9 @@1.24 #define MODRM_r32_sh4r(r1,disp) if(disp>127){ MODRM_r32_ebp32(r1,disp);}else{ MODRM_r32_ebp8(r1,(unsigned char)disp); }1.26 +/* Absolute displacement (no base) */1.27 +#define MODRM_r32_disp32(r1,disp) OP(0x05 | (r1<<3)); OP32(disp)1.28 +1.29 #define REXW() OP(0x48)1.31 /* Major opcodes */1.32 @@ -133,6 +142,7 @@1.33 #define AND_r32_r32(r1,r2) OP(0x23); MODRM_rm32_r32(r1,r2)1.34 #define AND_imm8_r8(imm8, r1) OP(0x80); MODRM_rm32_r32(r1,4); OP(imm8)1.35 #define AND_imm8s_r32(imm8,r1) OP(0x83); MODRM_rm32_r32(r1,4); OP(imm8)1.36 +#define AND_imm8s_sh4r(imm8,disp) OP(0x83); MODRM_r32_sh4r(4,disp); OP(imm8)1.37 #define AND_imm32_r32(imm,r1) OP(0x81); MODRM_rm32_r32(r1,4); OP32(imm)1.38 #define AND_sh4r_r32(disp,r1) OP(0x23); MODRM_r32_sh4r(r1, disp)1.39 #define CALL_r32(r1) OP(0xFF); MODRM_rm32_r32(r1,2)1.40 @@ -148,10 +158,13 @@1.41 #define CMP_imm8s_sh4r(imm,disp) OP(0x83); MODRM_r32_sh4r(7,disp) OP(imm)1.42 #define DEC_r32(r1) OP(0x48+r1)1.43 #define IMUL_r32(r1) OP(0xF7); MODRM_rm32_r32(r1,5)1.44 +#define IMUL_esp8(disp) OP(0xF7); MODRM_r32_esp8(5,disp)1.45 #define INC_r32(r1) OP(0x40+r1)1.46 #define JMP_rel8(label) OP(0xEB); MARK_JMP8(label); OP(-1);1.47 +#define JMP_r32disp8(r1,disp) OP(0xFF); OP(0x60 + r1); OP(disp)1.48 #define LEA_sh4r_r32(disp,r1) OP(0x8D); MODRM_r32_sh4r(r1,disp)1.49 #define LEA_r32disp8_r32(r1, disp, r2) OP(0x8D); OP( 0x40 + (r2<<3) + r1); OP(disp)1.50 +#define MOV_imm32_r32(i32,r1) OP(0xB8+r1); OP32(i32)1.51 #define MOV_r32_r32(r1,r2) OP(0x89); MODRM_r32_rm32(r1,r2)1.52 #define MOV_r32_sh4r(r1,disp) OP(0x89); MODRM_r32_sh4r(r1,disp)1.53 #define MOV_moff32_EAX(off) OP(0xA1); OPPTR(off)
.