--- a/src/sh4/x86op.h Sat Dec 27 02:59:35 2008 +0000 +++ b/src/sh4/x86op.h Sat Jan 03 03:30:26 2009 +0000 @@ -55,12 +55,18 @@ #define LEA_sh4r_rptr(disp, r1) REXW(); LEA_sh4r_r32(disp,r1) #define MOV_moffptr_EAX(offptr) REXW(); MOV_moff32_EAX( offptr ) #define load_exc_backpatch( x86reg ) REXW(); OP(0xB8 + x86reg); sh4_x86_add_backpatch( xlat_output, pc, -2 ); OP64( 0 ) +#define MOV_backpatch_esp8( disp ) REXW(); OP(0xC7); MODRM_r32_esp8(0, disp); sh4_x86_add_backpatch( xlat_output, pc, -2); OP64(0) + +/* imm64 operations are only defined for x86-64 */ +#define MOV_imm64_r32(i64,r1) REXW(); OP(0xB8+r1); OP64(i64) + #else /* 32-bit system */ #define OPPTR(x) OP32((uint32_t)(x)) #define AND_imm8s_rptr(imm, r1) AND_imm8s_r32( imm, r1 ) #define LEA_sh4r_rptr(disp, r1) LEA_sh4r_r32(disp,r1) #define MOV_moffptr_EAX(offptr) MOV_moff32_EAX( offptr ) #define load_exc_backpatch( x86reg ) OP(0xB8 + x86reg); sh4_x86_add_backpatch( xlat_output, pc, -2 ); OP32( 0 ) +#define MOV_backpatch_esp8( disp ) OP(0xC7); MODRM_r32_esp8(0, disp); sh4_x86_add_backpatch( xlat_output, pc, -2); OP32(0) #endif #define STACK_ALIGN 16 #define POP_r32(r1) OP(0x58 + r1) @@ -117,6 +123,9 @@ #define MODRM_r32_sh4r(r1,disp) if(disp>127){ MODRM_r32_ebp32(r1,disp);}else{ MODRM_r32_ebp8(r1,(unsigned char)disp); } +/* Absolute displacement (no base) */ +#define MODRM_r32_disp32(r1,disp) OP(0x05 | (r1<<3)); OP32(disp) + #define REXW() OP(0x48) /* Major opcodes */ @@ -133,6 +142,7 @@ #define AND_r32_r32(r1,r2) OP(0x23); MODRM_rm32_r32(r1,r2) #define AND_imm8_r8(imm8, r1) OP(0x80); MODRM_rm32_r32(r1,4); OP(imm8) #define AND_imm8s_r32(imm8,r1) OP(0x83); MODRM_rm32_r32(r1,4); OP(imm8) +#define AND_imm8s_sh4r(imm8,disp) OP(0x83); MODRM_r32_sh4r(4,disp); OP(imm8) #define AND_imm32_r32(imm,r1) OP(0x81); MODRM_rm32_r32(r1,4); OP32(imm) #define AND_sh4r_r32(disp,r1) OP(0x23); MODRM_r32_sh4r(r1, disp) #define CALL_r32(r1) OP(0xFF); MODRM_rm32_r32(r1,2) @@ -148,10 +158,13 @@ #define CMP_imm8s_sh4r(imm,disp) OP(0x83); MODRM_r32_sh4r(7,disp) OP(imm) #define DEC_r32(r1) OP(0x48+r1) #define IMUL_r32(r1) OP(0xF7); MODRM_rm32_r32(r1,5) +#define IMUL_esp8(disp) OP(0xF7); MODRM_r32_esp8(5,disp) #define INC_r32(r1) OP(0x40+r1) #define JMP_rel8(label) OP(0xEB); MARK_JMP8(label); OP(-1); +#define JMP_r32disp8(r1,disp) OP(0xFF); OP(0x60 + r1); OP(disp) #define LEA_sh4r_r32(disp,r1) OP(0x8D); MODRM_r32_sh4r(r1,disp) #define LEA_r32disp8_r32(r1, disp, r2) OP(0x8D); OP( 0x40 + (r2<<3) + r1); OP(disp) +#define MOV_imm32_r32(i32,r1) OP(0xB8+r1); OP32(i32) #define MOV_r32_r32(r1,r2) OP(0x89); MODRM_r32_rm32(r1,r2) #define MOV_r32_sh4r(r1,disp) OP(0x89); MODRM_r32_sh4r(r1,disp) #define MOV_moff32_EAX(off) OP(0xA1); OPPTR(off)