1.1 --- a/src/xlat/x86/x86op.h Wed Mar 04 23:12:21 2009 +0000
1.2 +++ b/src/xlat/x86/x86op.h Thu Mar 05 21:42:35 2009 +0000
1.7 - * x86/x86-64 Instruction generation macros
1.8 + * x86/x86-64 Instruction generator
1.10 * Copyright (c) 2009 Nathan Keynes.
1.13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
1.14 * GNU General Public License for more details.
1.17 +#ifndef lxdream_x86op_H
1.18 +#define lxdream_x86op_H
1.24 #define PREF_REXR 0x44
1.25 #define PREF_REXW 0x48
1.27 +/* PREF_REXW if required for pointer operations, otherwise 0 */
1.28 +#define PREF_PTR ((sizeof(void *) == 8) ? PREF_REXW : 0)
1.30 extern unsigned char *xlat_output;
1.32 #define OP(x) *xlat_output++ = (x)
1.34 #define x86_encode_r64_rm64(opcode,rr,rb) x86_encode_reg_rm(PREF_REXW,opcode,rr,rb)
1.35 #define x86_encode_r32_mem32(opcode,rr,rb,rx,ss,disp32) x86_encode_modrm(0,opcode,rr,rb,rx,ss,disp32)
1.36 #define x86_encode_r64_mem64(opcode,rr,rb,rx,ss,disp32) x86_encode_modrm(PREF_REXW,opcode,rr,rb,rx,ss,disp32)
1.37 -#define x86_encode_rptr_memptr(opcode,rr,rb,rx,ss,disp32) x86_encode_modrm( (sizeof(void *)==8) ? PREF_REXW : 0,opcode,rr,rb,rx,ss,disp32)
1.38 +#define x86_encode_rptr_memptr(opcode,rr,rb,rx,ss,disp32) x86_encode_modrm(PREF_PTR,opcode,rr,rb,rx,ss,disp32)
1.39 #define x86_encode_r32_mem32disp32(opcode,rr,rb,disp32) x86_encode_modrm(0,opcode,rr,rb,-1,0,disp32)
1.40 #define x86_encode_r64_mem64disp64(opcode,rr,rb,disp32) x86_encode_modrm(PREF_REXW,opcode,rr,rb,-1,0,disp32)
1.41 #define x86_encode_r32_ripdisp32(opcode,rr,disp32) x86_encode_modrm_rip(0,opcode,rr,disp32)
1.43 #define x86_encode_imms_rm64(opcode8,opcode32,reg,imm,rb) \
1.44 if( IS_INT8(((int32_t)imm)) ) { x86_encode_r64_rm64(opcode8,reg,rb); OP((int8_t)imm); \
1.45 } else { x86_encode_r64_rm64(opcode32,reg,rb); OP32(imm); }
1.46 +#define x86_encode_imms_rmptr(opcode8,opcode32,reg,imm,rb) \
1.47 + if( IS_INT8(((int32_t)imm)) ) { x86_encode_reg_rm( PREF_PTR, opcode8,reg,rb); OP((int8_t)imm); \
1.48 + } else { x86_encode_reg_rm( PREF_PTR, opcode32,reg,rb); OP32(imm); }
1.49 #define x86_encode_imms_rbpdisp32(opcode8,opcode32,reg,imm,disp) \
1.50 if( IS_INT8(((int32_t)imm)) ) { x86_encode_r32_rbpdisp32(opcode8,reg,disp); OP((int8_t)imm); \
1.51 } else { x86_encode_r32_rbpdisp32(opcode32,reg,disp); OP32(imm); }
1.53 #define ANDL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x23, r1, disp)
1.54 #define ANDQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x21, r1, r2)
1.55 #define ANDQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 4, imm, r1)
1.56 +#define ANDP_imms_rptr(imm,r1) x86_encode_imms_rmptr(0x83, 0x81, 4, imm, r1)
1.58 #define CLC() OP(0xF8)
1.59 #define CLD() OP(0xFC)
1.61 #define MOVQ_r64_rspdisp(r1,disp) x86_encode_r64_rspdisp64(0x89, r1, disp)
1.62 #define MOVQ_rbpdisp_r64(disp,r1) x86_encode_r64_rbpdisp64(0x8B, r1, disp)
1.63 #define MOVQ_rspdisp_r64(disp,r1) x86_encode_r64_rspdisp64(0x8B, r1, disp)
1.64 -#define MOVP_immptr_rptr(p,r1) x86_encode_opcodereg( (sizeof(void*)==8 ? PREF_REXW : 0), 0xB8, r1); OPPTR(p)
1.65 +#define MOVP_immptr_rptr(p,r1) x86_encode_opcodereg( PREF_PTR, 0xB8, r1); OPPTR(p)
1.66 #define MOVP_moffptr_rax(p) if( sizeof(void*)==8 ) { OP(PREF_REXW); } OP(0xA1); OPPTR(p)
1.67 +#define MOVP_rptr_rptr(r1,r2) x86_encode_reg_rm(PREF_PTR, 0x89, r1, r2)
1.68 #define MOVP_sib_rptr(ss,ii,bb,d,r1) x86_encode_rptr_memptr(0x8B, r1, bb, ii, ss, d)
1.70 #define MOVSXL_r8_r32(r1,r2) x86_encode_r32_rm32(0x0FBE, r2, r1)
1.71 @@ -766,3 +778,12 @@
1.72 #define MOVSHDUP_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F16, r2, r1)
1.73 #define MOVSLDUP_rbpdisp_xmm(dsp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F12, r1, dsp)
1.74 #define MOVSLDUP_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F12, r2, r1)
1.76 +/************************ Import calling conventions *************************/
1.77 +#if SIZEOF_VOID_P == 8
1.78 +#include "xlat/x86/amd64abi.h"
1.79 +#else /* 32-bit system */
1.80 +#include "xlat/x86/ia32abi.h"
1.83 +#endif /* !lxdream_x86op_H */