filename | src/xlat/x86/x86op.h |
changeset | 1191:12fdf3aafcd4 |
prev | 1186:2dc47c67bb93 |
author | nkeynes |
date | Fri Dec 02 18:18:04 2011 +1000 (12 years ago) |
permissions | -rw-r--r-- |
last change | SH4 shadow-mode tweaks - Fix exceptions generated by the translator to account for the excepting instruction(s) in the cycle counts. - Compare floating point regs bitwise rather than with FP comparisons (otherwise can fail due to nan != nan) - Dump the translated block when we abort with an inconsistency |
view | annotate | diff | log | raw |
1 /**
2 * $Id$
3 *
4 * x86/x86-64 Instruction generator
5 *
6 * Copyright (c) 2009 Nathan Keynes.
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
17 */
19 #ifndef lxdream_x86op_H
20 #define lxdream_x86op_H
22 #include <stdint.h>
23 #include <assert.h>
25 /******************************** Constants *****************************/
27 #define REG_NONE -1
29 /* 64-bit general-purpose regs */
30 #define REG_RAX 0
31 #define REG_RCX 1
32 #define REG_RDX 2
33 #define REG_RBX 3
34 #define REG_RSP 4
35 #define REG_RBP 5
36 #define REG_RSI 6
37 #define REG_RDI 7
38 #define REG_R8 8
39 #define REG_R9 9
40 #define REG_R10 10
41 #define REG_R11 11
42 #define REG_R12 12
43 #define REG_R13 13
44 #define REG_R14 14
45 #define REG_R15 15
47 /* 32-bit general-purpose regs */
48 #define REG_EAX 0
49 #define REG_ECX 1
50 #define REG_EDX 2
51 #define REG_EBX 3
52 #define REG_ESP 4
53 #define REG_EBP 5
54 #define REG_ESI 6
55 #define REG_EDI 7
56 #define REG_R8D 8
57 #define REG_R9D 9
58 #define REG_R10D 10
59 #define REG_R11D 11
60 #define REG_R12D 12
61 #define REG_R13D 13
62 #define REG_R14D 14
63 #define REG_R15D 15
65 /* 8-bit general-purpose regs (no-rex prefix) */
66 #define REG_AL 0
67 #define REG_CL 1
68 #define REG_DL 2
69 #define REG_BL 3
70 #define REG_AH 4
71 #define REG_CH 5
72 #define REG_DH 6
73 #define REG_BH 7
75 /* 8-bit general-purpose regs (rex-prefix) */
76 #define REG_SPL 4
77 #define REG_BPL 5
78 #define REG_SIL 6
79 #define REG_DIL 7
80 #define REG_R8L 8
81 #define REG_R9L 9
82 #define REG_R10L 10
83 #define REG_R11L 11
84 #define REG_R12L 12
85 #define REG_R13L 13
86 #define REG_R14L 14
87 #define REG_R15L 15
89 /* Condition flag variants */
90 #define X86_COND_O 0x00 /* OF=1 */
91 #define X86_COND_NO 0x01 /* OF=0 */
92 #define X86_COND_B 0x02 /* CF=1 */
93 #define X86_COND_C 0x02 /* CF=1 */
94 #define X86_CONF_NAE 0x02 /* CF=1 */
95 #define X86_COND_AE 0x03 /* CF=0 */
96 #define X86_COND_NB 0x03 /* CF=0 */
97 #define X86_COND_NC 0x03 /* CF=0 */
98 #define X86_COND_E 0x04 /* ZF=1 */
99 #define X86_COND_Z 0x04 /* ZF=1 */
100 #define X86_COND_NE 0x05 /* ZF=0 */
101 #define X86_COND_NZ 0x05 /* ZF=0 */
102 #define X86_COND_BE 0x06 /* CF=1 || ZF=1 */
103 #define X86_COND_NA 0x06 /* CF=1 || ZF=1 */
104 #define X86_COND_A 0x07 /* CF=0 && ZF=0 */
105 #define X86_COND_NBE 0x07 /* CF=0 && ZF=0 */
106 #define X86_COND_S 0x08 /* SF=1 */
107 #define X86_COND_NS 0x09 /* SF=0 */
108 #define X86_COND_P 0x0A /* PF=1 */
109 #define X86_COND_PE 0x0A /* PF=1 */
110 #define X86_COND_NP 0x0B /* PF=0 */
111 #define X86_COND_PO 0x0B /* PF=0 */
112 #define X86_COND_L 0x0C /* SF!=OF */
113 #define X86_COND_NGE 0x0C /* SF!=OF */
114 #define X86_COND_GE 0x0D /* SF=OF */
115 #define X86_COND_NL 0x0D /* SF=OF */
116 #define X86_COND_LE 0x0E /* ZF=1 || SF!=OF */
117 #define X86_COND_NG 0x0E /* ZF=1 || SF!=OF */
118 #define X86_COND_G 0x0F /* ZF=0 && SF=OF */
119 #define X86_COND_NLE 0x0F /* ZF=0 && SF=OF */
121 /* SSE floating pointer comparison variants */
122 #define SSE_CMP_EQ 0x00
123 #define SSE_CMP_LT 0x01
124 #define SSE_CMP_LE 0x02
125 #define SSE_CMP_UNORD 0x03
126 #define SSE_CMP_NE 0x04
127 #define SSE_CMP_NLT 0x05
128 #define SSE_CMP_NLE 0x06
129 #define SSE_CMP_ORD 0x07
131 /************************** Internal definitions ***************************/
132 #define PREF_REXB 0x41
133 #define PREF_REXX 0x42
134 #define PREF_REXR 0x44
135 #define PREF_REXW 0x48
137 /* PREF_REXW if required for pointer operations, otherwise 0 */
138 #define PREF_PTR ((sizeof(void *) == 8) ? PREF_REXW : 0)
140 extern unsigned char *xlat_output;
142 #define OP(x) *xlat_output++ = (x)
143 #define OP16(x) *((uint16_t *)xlat_output) = (x); xlat_output+=2
144 #define OP32(x) *((uint32_t *)xlat_output) = (x); xlat_output+=4
145 #define OP64(x) *((uint64_t *)xlat_output) = (x); xlat_output+=8
146 #define OPPTR(x) *((void **)xlat_output) = ((void *)x); xlat_output+=(sizeof(void*))
148 /* Primary opcode emitter, eg OPCODE(0x0FBE) for MOVSX */
149 #define OPCODE(x) if( (x) > 0xFFFF ) { OP((x)>>16); OP(((x)>>8)&0xFF); OP((x)&0xFF); } else if( (x) > 0xFF ) { OP((x)>>8); OP((x)&0xFF); } else { OP(x); }
151 /* Test if immediate value is representable as a signed 8-bit integer */
152 #define IS_INT8(imm) ((imm) >= INT8_MIN && (imm) <= INT8_MAX)
154 /**
155 * Encode opcode+reg with no mod/rm (eg MOV imm64, r32)
156 */
157 static inline void x86_encode_opcodereg( int rexw, uint32_t opcode, int reg )
158 {
159 int rex = rexw;
160 reg &= 0x0F;
161 if( reg >= 8 ) {
162 rex |= PREF_REXB;
163 reg -= 8;
164 }
165 if( rex != 0 ) {
166 OP(rex);
167 }
168 OPCODE(opcode + reg);
169 }
171 /**
172 * Encode opcode with mod/rm reg-reg operation.
173 * @param opcode primary instruction opcode
174 * @param rr reg field
175 * @param rb r/m field
176 */
177 static inline void x86_encode_reg_rm( int rexw, uint32_t opcode, int rr, int rb )
178 {
179 int rex = rexw;
180 rr &= 0x0F;
181 rb &= 0x0F;
182 if( rr >= 8 ) {
183 rex |= PREF_REXR;
184 rr -= 8;
185 }
186 if( rb >= 8 ) {
187 rex |= PREF_REXB;
188 rb -= 8;
189 }
190 if( rex != 0 ) {
191 OP(rex);
192 }
193 OPCODE(opcode);
194 OP(0xC0|(rr<<3)|rb);
195 }
197 /**
198 * Encode opcode + 32-bit mod/rm memory address. (RIP-relative not supported here)
199 * @param rexw REX.W prefix is required, otherwise 0
200 * @param rr Reg-field register (required).
201 * @param rb Base (unscaled) register, or -1 for no base register.
202 * @param rx Index (scaled) register, or -1 for no index register
203 * @param ss Scale shift (0..3) applied to index register (ignored if no index register)
204 * @param disp32 Signed displacement (0 for none)
205 */
206 static inline void FORCEINLINE x86_encode_modrm( int rexw, uint32_t opcode, int rr, int rb, int rx, int ss, int32_t disp32 )
207 {
208 /* Construct the rex prefix where necessary */
209 int rex = rexw;
210 rr &= 0x0F;
211 if( rr >= 8 ) {
212 rex |= PREF_REXR;
213 rr -= 8;
214 }
215 if( rb != -1 ) {
216 rb &= 0x0F;
217 if( rb >= 8 ) {
218 rex |= PREF_REXB;
219 rb -= 8;
220 }
221 }
222 if( rx != -1 ) {
223 rx &= 0x0F;
224 if( rx >= 8 ) {
225 rex |= PREF_REXX;
226 rx -= 8;
227 }
228 }
230 if( rex != 0 ) {
231 OP(rex);
232 }
233 OPCODE(opcode);
235 if( rx == -1 ) {
236 if( rb == -1 ) {
237 /* [disp32] displacement only - use SIB form for 64-bit mode safety */
238 OP(0x04|(rr<<3));
239 OP(0x25);
240 OP32(disp32);
241 } else if( rb == REG_ESP ) { /* [%esp + disp32] - SIB is mandatory for %esp/%r12 encodings */
242 if( disp32 == 0 ) {
243 OP(0x04|(rr<<3));
244 OP(0x24);
245 } else if( IS_INT8(disp32) ) {
246 OP(0x44|(rr<<3));
247 OP(0x24);
248 OP((int8_t)disp32);
249 } else {
250 OP(0x84|(rr<<3));
251 OP(0x24);
252 OP32(disp32);
253 }
254 } else {
255 if( disp32 == 0 && rb != REG_EBP ) { /* [%ebp] is encoded as [%ebp+0] */
256 OP((rr<<3)|rb);
257 } else if( IS_INT8(disp32) ) {
258 OP(0x40|(rr<<3)|rb);
259 OP((int8_t)disp32);
260 } else {
261 OP(0x80|(rr<<3)|rb);
262 OP32(disp32);
263 }
264 }
265 } else { /* We have a scaled index. Goody */
266 assert( ((rx != REG_ESP) || (rex&PREF_REXX)) && "Bug: attempt to index through %esp" ); /* Indexing by %esp is impossible */
267 if( rb == -1 ) { /* [disp32 + rx << ss] */
268 OP(0x04|(rr<<3));
269 OP(0x05|(ss<<6)|(rx<<3));
270 OP32(disp32);
271 } else if( disp32 == 0 && rb != REG_EBP ) { /* [rb + rx << ss]. (Again, %ebp needs to be %ebp+0) */
272 OP(0x04|(rr<<3));
273 OP((ss<<6)|(rx<<3)|rb);
274 } else if( IS_INT8(disp32) ) {
275 OP(0x44|(rr<<3));
276 OP((ss<<6)|(rx<<3)|rb);
277 OP((int8_t)disp32);
278 } else {
279 OP(0x84|(rr<<3));
280 OP((ss<<6)|(rx<<3)|rb);
281 OP32(disp32);
282 }
283 }
284 }
286 /**
287 * Encode opcode + RIP-relative mod/rm (64-bit mode only)
288 * @param rexw PREF_REXW or 0
289 * @param opcode primary instruction opcode
290 * @param rr mod/rm reg field
291 * @param disp32 RIP-relative displacement
292 */
293 static inline void x86_encode_modrm_rip(int rexw, uint32_t opcode, int rr, int32_t disp32)
294 {
295 int rex = rexw;
296 rr &= 0x0F;
297 if( rr >= 8 ) {
298 rex |= PREF_REXR;
299 rr -= 8;
300 }
301 if( rex != 0 ) {
302 OP(rex);
303 }
304 OPCODE(opcode);
305 OP(0x05|(rr<<3));
306 OP32(disp32);
307 }
309 /* 32/64-bit op emitters. 64-bit versions include a rex.w prefix. Note that any
310 * other prefixes (mandatory or otherwise) need to be emitted prior to these
311 * functions
312 */
313 #define x86_encode_opcode64(opcode,reg) x86_encode_opcodereg(PREF_REXW, opcode,reg)
314 #define x86_encode_opcode32(opcode,reg) x86_encode_opcodereg(0,opcode,reg)
315 #define x86_encode_r32_rm32(opcode,rr,rb) x86_encode_reg_rm(0,opcode,rr,rb)
316 #define x86_encode_r64_rm64(opcode,rr,rb) x86_encode_reg_rm(PREF_REXW,opcode,rr,rb)
317 #define x86_encode_rptr_rmptr(opcode,rr,rb) x86_encode_reg_rm(PREF_PTR,opcode,rr,rb)
318 #define x86_encode_r32_mem32(opcode,rr,rb,rx,ss,disp32) x86_encode_modrm(0,opcode,rr,rb,rx,ss,disp32)
319 #define x86_encode_r64_mem64(opcode,rr,rb,rx,ss,disp32) x86_encode_modrm(PREF_REXW,opcode,rr,rb,rx,ss,disp32)
320 #define x86_encode_rptr_memptr(opcode,rr,rb,rx,ss,disp32) x86_encode_modrm(PREF_PTR,opcode,rr,rb,rx,ss,disp32)
321 #define x86_encode_r32_mem32disp32(opcode,rr,rb,disp32) x86_encode_modrm(0,opcode,rr,rb,-1,0,disp32)
322 #define x86_encode_r64_mem64disp64(opcode,rr,rb,disp32) x86_encode_modrm(PREF_REXW,opcode,rr,rb,-1,0,disp32)
323 #define x86_encode_rptr_memptrdisp(opcode,rr,rb,disp32) x86_encode_modrm(PREF_PTR,opcode,rr,rb,-1,0,disp32)
324 #define x86_encode_r32_ripdisp32(opcode,rr,disp32) x86_encode_modrm_rip(0,opcode,rr,disp32)
325 #define x86_encode_r64_ripdisp64(opcode,rr,disp32) x86_encode_modrm_rip(PREF_REXW,opcode,rr,disp32)
327 /* Convenience versions for the common rbp/rsp relative displacements */
328 #define x86_encode_r32_rbpdisp32(opcode,rr,disp32) x86_encode_modrm(0,opcode,rr,REG_RBP,-1,0,disp32)
329 #define x86_encode_r64_rbpdisp64(opcode,rr,disp32) x86_encode_modrm(PREF_REXW,opcode,rr,REG_RBP,-1,0,disp32)
330 #define x86_encode_r32_rspdisp32(opcode,rr,disp32) x86_encode_modrm(0,opcode,rr,REG_RSP,-1,0,disp32)
331 #define x86_encode_r64_rspdisp64(opcode,rr,disp32) x86_encode_modrm(PREF_REXW,opcode,rr,REG_RSP,-1,0,disp32)
333 /* Immediate-selection variants (for instructions with imm8s/imm32 variants) */
334 #define x86_encode_imms_rm32(opcode8,opcode32,reg,imm,rb) \
335 if( IS_INT8(((int32_t)imm)) ) { x86_encode_r32_rm32(opcode8,reg,rb); OP((int8_t)imm); \
336 } else { x86_encode_r32_rm32(opcode32,reg,rb); OP32(imm); }
337 #define x86_encode_imms_rm64(opcode8,opcode32,reg,imm,rb) \
338 if( IS_INT8(((int32_t)imm)) ) { x86_encode_r64_rm64(opcode8,reg,rb); OP((int8_t)imm); \
339 } else { x86_encode_r64_rm64(opcode32,reg,rb); OP32(imm); }
340 #define x86_encode_imms_rmptr(opcode8,opcode32,reg,imm,rb) \
341 if( IS_INT8(((int32_t)imm)) ) { x86_encode_reg_rm( PREF_PTR, opcode8,reg,rb); OP((int8_t)imm); \
342 } else { x86_encode_reg_rm( PREF_PTR, opcode32,reg,rb); OP32(imm); }
343 #define x86_encode_imms_rbpdisp32(opcode8,opcode32,reg,imm,disp) \
344 if( IS_INT8(((int32_t)imm)) ) { x86_encode_r32_rbpdisp32(opcode8,reg,disp); OP((int8_t)imm); \
345 } else { x86_encode_r32_rbpdisp32(opcode32,reg,disp); OP32(imm); }
346 #define x86_encode_imms_r32disp32(opcode8,opcode32,reg,imm,rb,disp) \
347 if( IS_INT8(((int32_t)imm)) ) { x86_encode_r32_mem32disp32(opcode8,reg,rb,disp); OP((int8_t)imm); \
348 } else { x86_encode_r32_mem32disp32(opcode32,reg,rb,disp); OP32(imm); }
349 #define x86_encode_imms_rbpdisp64(opcode8,opcode32,reg,imm,disp) \
350 if( IS_INT8(((int32_t)imm)) ) { x86_encode_r64_rbpdisp64(opcode8,reg,disp); OP((int8_t)imm); \
351 } else { x86_encode_r64_rbpdisp64(opcode32,reg,disp); OP32(imm); }
353 /*************************** Instruction definitions ***********************/
354 /* Note this does not try to be an exhaustive definition of the instruction -
355 * it generally only has the forms that we actually need here.
356 */
357 /* Core Integer instructions */
358 #define ADCB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 2, r1); OP(imm)
359 #define ADCB_r8_r8(r1,r2) x86_encode_r32_rm32(0x10, r1, r2)
360 #define ADCL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 2, imm, r1)
361 #define ADCL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83, 0x81, 2, imm, disp)
362 #define ADCL_r32_r32(r1,r2) x86_encode_r32_rm32(0x11, r1, r2)
363 #define ADCL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x11, r1, disp)
364 #define ADCL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x13, r1, disp)
365 #define ADCQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 2, imm, r1)
366 #define ADCQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x11, r1, r2)
368 #define ADDB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 0, r1); OP(imm)
369 #define ADDB_r8_r8(r1,r2) x86_encode_r32_rm32(0x00, r1, r2)
370 #define ADDL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 0, imm, r1)
371 #define ADDL_imms_r32disp(imm,rb,d) x86_encode_imms_r32disp32(0x83, 0x81, 0, imm, rb, d)
372 #define ADDL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83, 0x81, 0, imm, disp)
373 #define ADDL_r32_r32(r1,r2) x86_encode_r32_rm32(0x01, r1, r2)
374 #define ADDL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x01, r1, disp)
375 #define ADDL_r32_r32disp(r1,r2,dsp) x86_encode_r32_mem32disp32(0x01, r1, r2, dsp)
376 #define ADDL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x03, r1, disp)
377 #define ADDQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 0, imm, r1)
378 #define ADDQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x01, r1, r2)
380 #define ANDB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 4, r1); OP(imm)
381 #define ANDB_r8_r8(r1,r2) x86_encode_r32_rm32(0x20, r1, r2)
382 #define ANDL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 4, imm, r1)
383 #define ANDL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83,0x81,4,imm,disp)
384 #define ANDL_r32_r32(r1,r2) x86_encode_r32_rm32(0x21, r1, r2)
385 #define ANDL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x21, r1, disp)
386 #define ANDL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x23, r1, disp)
387 #define ANDQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x21, r1, r2)
388 #define ANDQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 4, imm, r1)
389 #define ANDP_imms_rptr(imm,r1) x86_encode_imms_rmptr(0x83, 0x81, 4, imm, r1)
391 #define CLC() OP(0xF8)
392 #define CLD() OP(0xFC)
393 #define CMC() OP(0xF5)
395 #define CMOVCCL_cc_r32_r32(cc,r1,r2) x86_encode_r32_rm32(0x0F40+(cc), r2, r1)
396 #define CMOVCCL_cc_rbpdisp_r32(cc,d,r1) x86_encode_r32_rbpdisp32(0x0F40+(cc), r1, d)
398 #define CMPB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 7, r1); OP(imm)
399 #define CMPB_imms_rbpdisp(imm,disp) x86_encode_r32_rbpdisp32(0x80, 7, disp); OP(imm)
400 #define CMPB_r8_r8(r1,r2) x86_encode_r32_rm32(0x38, r1, r2)
401 #define CMPL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 7, imm, r1)
402 #define CMPL_imms_r32disp(imm,rb,d) x86_encode_imms_r32disp32(0x83, 0x81, 7, imm, rb, d)
403 #define CMPL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83, 0x81, 7, imm, disp)
404 #define CMPL_r32_r32(r1,r2) x86_encode_r32_rm32(0x39, r1, r2)
405 #define CMPL_r32_r32disp(r1,r2,dsp) x86_encode_r32_mem32disp32(0x39, r1, r2, dsp)
406 #define CMPL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x39, r1, disp)
407 #define CMPL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x3B, r1, disp)
408 #define CMPQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 7, imm, r1)
409 #define CMPQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x39, r1, r2)
411 #define IDIVL_r32(r1) x86_encode_r32_rm32(0xF7, 7, r1)
412 #define IDIVL_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xF7, 7, disp)
413 #define IDIVQ_r64(r1) x86_encode_r64_rm64(0xF7, 7, r1)
415 #define IMULL_imms_r32(imm,r1) x86_encode_imms_rm32(0x6B,0x69, r1, imm, r1)
416 #define IMULL_r32(r1) x86_encode_r32_rm32(0xF7, 5, r1)
417 #define IMULL_r32_r32(r1,r2) x86_encode_r32_rm32(0x0FAF, r2, r1)
418 #define IMULL_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xF7, 5, disp)
419 #define IMULL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x0FAF, r1, disp)
420 #define IMULL_rspdisp(disp) x86_encode_r32_rspdisp32(0xF7, 5, disp)
421 #define IMULL_rspdisp_r32(disp,r1) x86_encode_r32_rspdisp32(0x0FAF, r1, disp)
422 #define IMULQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x6B,0x69, r1, imm, r1)
423 #define IMULQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x0FAF, r2, r1)
425 #define INC_r32(r1) x86_encode_r32_rm32(0xFF, 0, r1)
427 #define LEAL_r32disp_r32(r1,disp,r2) x86_encode_r32_mem32(0x8D, r2, r1, -1, 0, disp)
428 #define LEAL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x8D, r1, disp)
429 #define LEAL_sib_r32(ss,ii,bb,d,r1) x86_encode_r32_mem32(0x8D, r1, bb, ii, ss, d)
430 #define LEAQ_r64disp_r64(r1,disp,r2) x86_encode_r64_mem64(0x8D, r2, r1, -1, 0, disp)
431 #define LEAQ_rbpdisp_r64(disp,r1) x86_encode_r64_rbpdisp64(0x8D, r1, disp)
432 #define LEAP_rptrdisp_rptr(r1,d,r2) x86_encode_rptr_memptr(0x8D, r2, r1, -1, 0, d)
433 #define LEAP_rbpdisp_rptr(disp,r1) x86_encode_rptr_memptr(0x8D, r1, REG_RBP, -1, 0, disp)
434 #define LEAP_sib_rptr(ss,ii,bb,d,r1) x86_encode_rptr_memptr(0x8D, r1, bb, ii, ss, d)
436 #define MOVB_r8_r8(r1,r2) x86_encode_r32_rm32(0x88, r1, r2)
437 #define MOVL_imm32_r32(i32,r1) x86_encode_opcode32(0xB8, r1); OP32(i32)
438 #define MOVL_imm32_rbpdisp(i,disp) x86_encode_r32_rbpdisp32(0xC7,0,disp); OP32(i)
439 #define MOVL_imm32_rspdisp(i,disp) x86_encode_r32_rspdisp32(0xC7,0,disp); OP32(i)
440 #define MOVL_moffptr_eax(p) OP(0xA1); OPPTR(p)
441 #define MOVL_r32_r32(r1,r2) x86_encode_r32_rm32(0x89, r1, r2)
442 #define MOVL_r32_r32disp(r1,r2,dsp) x86_encode_r32_mem32disp32(0x89, r1, r2, dsp)
443 #define MOVL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x89, r1, disp)
444 #define MOVL_r32_rspdisp(r1,disp) x86_encode_r32_rspdisp32(0x89, r1, disp)
445 #define MOVL_r32_sib(r1,ss,ii,bb,d) x86_encode_r32_mem32(0x89, r1, bb, ii, ss, d)
446 #define MOVL_r32disp_r32(r1,dsp,r2) x86_encode_r32_mem32disp32(0x8B, r2, r1, dsp)
447 #define MOVL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x8B, r1, disp)
448 #define MOVL_rspdisp_r32(disp,r1) x86_encode_r32_rspdisp32(0x8B, r1, disp)
449 #define MOVL_sib_r32(ss,ii,bb,d,r1) x86_encode_r32_mem32(0x8B, r1, bb, ii, ss, d)
450 #define MOVQ_imm64_r64(i64,r1) x86_encode_opcode64(0xB8, r1); OP64(i64)
451 #define MOVQ_moffptr_rax(p) OP(PREF_REXW); OP(0xA1); OPPTR(p)
452 #define MOVQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x89, r1, r2)
453 #define MOVQ_r64_rbpdisp(r1,disp) x86_encode_r64_rbpdisp64(0x89, r1, disp)
454 #define MOVQ_r64_rspdisp(r1,disp) x86_encode_r64_rspdisp64(0x89, r1, disp)
455 #define MOVQ_rbpdisp_r64(disp,r1) x86_encode_r64_rbpdisp64(0x8B, r1, disp)
456 #define MOVQ_rspdisp_r64(disp,r1) x86_encode_r64_rspdisp64(0x8B, r1, disp)
457 #define MOVP_immptr_rptr(p,r1) x86_encode_opcodereg( PREF_PTR, 0xB8, r1); OPPTR(p)
458 #define MOVP_moffptr_rax(p) if( sizeof(void*)==8 ) { OP(PREF_REXW); } OP(0xA1); OPPTR(p)
459 #define MOVP_rptr_rptr(r1,r2) x86_encode_reg_rm(PREF_PTR, 0x89, r1, r2)
460 #define MOVP_sib_rptr(ss,ii,bb,d,r1) x86_encode_rptr_memptr(0x8B, r1, bb, ii, ss, d)
461 #define MOVP_rptrdisp_rptr(r1,dsp,r2) x86_encode_rptr_memptrdisp(0x8B, r2, r1, dsp)
463 #define MOVSXL_r8_r32(r1,r2) x86_encode_r32_rm32(0x0FBE, r2, r1)
464 #define MOVSXL_r16_r32(r1,r2) x86_encode_r32_rm32(0x0FBF, r2, r1)
465 #define MOVSXL_rbpdisp8_r32(disp,r1) x86_encode_r32_rbpdisp32(0x0FBE, r1, disp)
466 #define MOVSXL_rbpdisp16_r32(dsp,r1) x86_encode_r32_rbpdisp32(0x0FBF, r1, dsp)
467 #define MOVSXQ_imm32_r64(i32,r1) x86_encode_r64_rm64(0xC7, 0, r1); OP32(i32) /* Technically a MOV */
468 #define MOVSXQ_r8_r64(r1,r2) x86_encode_r64_rm64(0x0FBE, r2, r1)
469 #define MOVSXQ_r16_r64(r1,r2) x86_encode_r64_rm64(0x0FBF, r2, r1)
470 #define MOVSXQ_r32_r64(r1,r2) x86_encode_r64_rm64(0x63, r2, r1)
471 #define MOVSXQ_rbpdisp32_r64(dsp,r1) x86_encode_r64_rbpdisp64(0x63, r1, dsp)
473 #define MOVZXL_r8_r32(r1,r2) x86_encode_r32_rm32(0x0FB6, r2, r1)
474 #define MOVZXL_r16_r32(r1,r2) x86_encode_r32_rm32(0x0FB7, r2, r1)
475 #define MOVZXL_rbpdisp8_r32(disp,r1) x86_encode_r32_rbpdisp32(0x0FB6, r1, disp)
476 #define MOVZXL_rbpdisp16_r32(dsp,r1) x86_encode_r32_rbpdisp32(0x0FB7, r1, dsp)
478 #define MULL_r32(r1) x86_encode_r32_rm32(0xF7, 4, r1)
479 #define MULL_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xF7,4,disp)
480 #define MULL_rspdisp(disp) x86_encode_r32_rspdisp32(0xF7,4,disp)
482 #define NEGB_r8(r1) x86_encode_r32_rm32(0xF6, 3, r1)
483 #define NEGL_r32(r1) x86_encode_r32_rm32(0xF7, 3, r1)
484 #define NEGL_rbpdisp(r1) x86_encode_r32_rbspdisp32(0xF7, 3, disp)
485 #define NEGQ_r64(r1) x86_encode_r64_rm64(0xF7, 3, r1)
487 #define NOP() OP(0x90)
488 #define NOP2() OP(0x66); OP(0x90)
490 #define NOTB_r8(r1) x86_encode_r32_rm32(0xF6, 2, r1)
491 #define NOTL_r32(r1) x86_encode_r32_rm32(0xF7, 2, r1)
492 #define NOTL_rbpdisp(r1) x86_encode_r32_rbspdisp32(0xF7, 2, disp)
493 #define NOTQ_r64(r1) x86_encode_r64_rm64(0xF7, 2, r1)
495 #define ORB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 1, r1); OP(imm)
496 #define ORB_r8_r8(r1,r2) x86_encode_r32_rm32(0x08, r1, r2)
497 #define ORL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 1, imm, r1)
498 #define ORL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83,0x81,1,imm,disp)
499 #define ORL_r32_r32(r1,r2) x86_encode_r32_rm32(0x09, r1, r2)
500 #define ORL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x09, r1, disp)
501 #define ORL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x0B, r1, disp)
502 #define ORQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 1, imm, r1)
503 #define ORQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x09, r1, r2)
505 #define POP_r32(r1) x86_encode_opcode32(0x58, r1)
507 #define PUSH_imm32(imm) OP(0x68); OP32(imm)
508 #define PUSH_r32(r1) x86_encode_opcode32(0x50, r1)
510 #define RCLL_cl_r32(r1) x86_encode_r32_rm32(0xD3,2,r1)
511 #define RCLL_imm_r32(imm,r1) if( imm == 1 ) { x86_encode_r32_rm32(0xD1,2,r1); } else { x86_encode_r32_rm32(0xC1,2,r1); OP(imm); }
512 #define RCLQ_cl_r64(r1) x86_encode_r64_rm64(0xD3,2,r1)
513 #define RCLQ_imm_r64(imm,r1) if( imm == 1 ) { x86_encode_r64_rm64(0xD1,2,r1); } else { x86_encode_r64_rm64(0xC1,2,r1); OP(imm); }
514 #define RCRL_cl_r32(r1) x86_encode_r32_rm32(0xD3,3,r1)
515 #define RCRL_imm_r32(imm,r1) if( imm == 1 ) { x86_encode_r32_rm32(0xD1,3,r1); } else { x86_encode_r32_rm32(0xC1,3,r1); OP(imm); }
516 #define RCRQ_cl_r64(r1) x86_encode_r64_rm64(0xD3,3,r1)
517 #define RCRQ_imm_r64(imm,r1) if( imm == 1 ) { x86_encode_r64_rm64(0xD1,3,r1); } else { x86_encode_r64_rm64(0xC1,3,r1); OP(imm); }
518 #define ROLL_cl_r32(r1) x86_encode_r32_rm32(0xD3,0,r1)
519 #define ROLL_imm_r32(imm,r1) if( imm == 1 ) { x86_encode_r32_rm32(0xD1,0,r1); } else { x86_encode_r32_rm32(0xC1,0,r1); OP(imm); }
520 #define ROLQ_cl_r64(r1) x86_encode_r64_rm64(0xD3,0,r1)
521 #define ROLQ_imm_r64(imm,r1) if( imm == 1 ) { x86_encode_r64_rm64(0xD1,0,r1); } else { x86_encode_r64_rm64(0xC1,0,r1); OP(imm); }
522 #define RORL_cl_r32(r1) x86_encode_r32_rm32(0xD3,1,r1)
523 #define RORL_imm_r32(imm,r1) if( imm == 1 ) { x86_encode_r32_rm32(0xD1,1,r1); } else { x86_encode_r32_rm32(0xC1,1,r1); OP(imm); }
524 #define RORQ_cl_r64(r1) x86_encode_r64_rm64(0xD3,1,r1)
525 #define RORQ_imm_r64(imm,r1) if( imm == 1 ) { x86_encode_r64_rm64(0xD1,1,r1); } else { x86_encode_r64_rm64(0xC1,1,r1); OP(imm); }
527 #define SARL_cl_r32(r1) x86_encode_r32_rm32(0xD3,7,r1)
528 #define SARL_imm_r32(imm,r1) if( imm == 1 ) { x86_encode_r32_rm32(0xD1,7,r1); } else { x86_encode_r32_rm32(0xC1,7,r1); OP(imm); }
529 #define SARQ_cl_r64(r1) x86_encode_r64_rm64(0xD3,7,r1)
530 #define SARQ_imm_r64(imm,r1) if( imm == 1 ) { x86_encode_r64_rm64(0xD1,7,r1); } else { x86_encode_r64_rm64(0xC1,7,r1); OP(imm); }
531 #define SHLL_cl_r32(r1) x86_encode_r32_rm32(0xD3,4,r1)
532 #define SHLL_imm_r32(imm,r1) if( imm == 1 ) { x86_encode_r32_rm32(0xD1,4,r1); } else { x86_encode_r32_rm32(0xC1,4,r1); OP(imm); }
533 #define SHLQ_cl_r64(r1) x86_encode_r64_rm64(0xD3,4,r1)
534 #define SHLQ_imm_r64(imm,r1) if( imm == 1 ) { x86_encode_r64_rm64(0xD1,4,r1); } else { x86_encode_r64_rm64(0xC1,4,r1); OP(imm); }
535 #define SHRL_cl_r32(r1) x86_encode_r32_rm32(0xD3,5,r1)
536 #define SHRL_imm_r32(imm,r1) if( imm == 1 ) { x86_encode_r32_rm32(0xD1,5,r1); } else { x86_encode_r32_rm32(0xC1,5,r1); OP(imm); }
537 #define SHRQ_cl_r64(r1) x86_encode_r64_rm64(0xD3,5,r1)
538 #define SHRQ_imm_r64(imm,r1) if( imm == 1 ) { x86_encode_r64_rm64(0xD1,5,r1); } else { x86_encode_r64_rm64(0xC1,5,r1); OP(imm); }
540 #define SBBB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 3, r1); OP(imm)
541 #define SBBB_r8_r8(r1,r2) x86_encode_r32_rm32(0x18, r1, r2)
542 #define SBBL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 3, imm, r1)
543 #define SBBL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83,0x81,3,imm,disp)
544 #define SBBL_r32_r32(r1,r2) x86_encode_r32_rm32(0x19, r1, r2)
545 #define SBBL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x19, r1, disp)
546 #define SBBL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x1B, r1, disp)
547 #define SBBQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 3, imm, r1)
548 #define SBBQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x19, r1, r2)
550 #define SETCCB_cc_r8(cc,r1) x86_encode_r32_rm32(0x0F90+(cc), 0, r1)
551 #define SETCCB_cc_rbpdisp(cc,disp) x86_encode_r32_rbpdisp32(0x0F90+(cc), 0, disp)
553 #define STC() OP(0xF9)
554 #define STD() OP(0xFD)
556 #define SUBB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 5, r1); OP(imm)
557 #define SUBB_r8_r8(r1,r2) x86_encode_r32_rm32(0x28, r1, r2)
558 #define SUBL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 5, imm, r1)
559 #define SUBL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83,0x81,5,imm,disp)
560 #define SUBL_r32_r32(r1,r2) x86_encode_r32_rm32(0x29, r1, r2)
561 #define SUBL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x29, r1, disp)
562 #define SUBL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x2B, r1, disp)
563 #define SUBQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 5, imm, r1)
564 #define SUBQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x29, r1, r2)
566 #define TESTB_imms_r8(imm,r1) x86_encode_r32_rm32(0xF6, 0, r1); OP(imm)
567 #define TESTB_r8_r8(r1,r2) x86_encode_r32_rm32(0x84, r1, r2)
568 #define TESTL_imms_r32(imm,r1) x86_encode_r32_rm32(0xF7, 0, r1); OP32(imm)
569 #define TESTL_imms_rbpdisp(imm,dsp) x86_encode_r32_rbpdisp32(0xF7, 0, dsp); OP32(imm)
570 #define TESTL_r32_r32(r1,r2) x86_encode_r32_rm32(0x85, r1, r2)
571 #define TESTL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x85, r1, disp)
572 #define TESTL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x85, r1, disp) /* Same OP */
573 #define TESTQ_imms_r64(imm,r1) x86_encode_r64_rm64(0xF7, 0, r1); OP32(imm)
574 #define TESTQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x85, r1, r2)
575 #define TESTP_rptr_rptr(r1,r2) x86_encode_rptr_rmptr(0x85, r1, r2)
577 #define XCHGB_r8_r8(r1,r2) x86_encode_r32_rm32(0x86, r1, r2)
578 #define XCHGL_r32_r32(r1,r2) x86_encode_r32_rm32(0x87, r1, r2)
579 #define XCHGQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x87, r1, r2)
581 #define XORB_imms_r8(imm,r1) x86_encode_r32_rm32(0x80, 6, r1); OP(imm)
582 #define XORB_r8_r8(r1,r2) x86_encode_r32_rm32(0x30, r1, r2)
583 #define XORL_imms_r32(imm,r1) x86_encode_imms_rm32(0x83, 0x81, 6, imm, r1)
584 #define XORL_imms_rbpdisp(imm,disp) x86_encode_imms_rbpdisp32(0x83,0x81,6,imm,disp)
585 #define XORL_r32_r32(r1,r2) x86_encode_r32_rm32(0x31, r1, r2)
586 #define XORL_r32_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x31, r1, disp)
587 #define XORL_rbpdisp_r32(disp,r1) x86_encode_r32_rbpdisp32(0x33, r1, disp)
588 #define XORQ_imms_r64(imm,r1) x86_encode_imms_rm64(0x83, 0x81, 6, imm, r1)
589 #define XORQ_r64_r64(r1,r2) x86_encode_r64_rm64(0x31, r1, r2)
591 /* Control flow */
592 #define CALL_rel(rel) OP(0xE8); OP32(rel)
593 #define CALL_imm32(ptr) x86_encode_r32_mem32disp32(0xFF, 2, -1, ptr)
594 #define CALL_r32(r1) x86_encode_r32_rm32(0xFF, 2, r1)
595 #define CALL_r32disp(r1,disp) x86_encode_r32_mem32disp32(0xFF, 2, r1, disp)
597 #define JCC_cc_rel8(cc,rel) OP(0x70+(cc)); OP(rel)
598 #define JCC_cc_rel32(cc,rel) OP(0x0F); OP(0x80+(cc)); OP32(rel)
599 #define JCC_cc_rel(cc,rel) if( IS_INT8(rel) ) { JCC_cc_rel8(cc,(int8_t)rel); } else { JCC_cc_rel32(cc,rel); }
600 #define JCC_cc_prerel(cc,rel) if( IS_INT8(rel) ) { JCC_cc_rel8(cc,(int8_t)((rel)-2)); } else { JCC_cc_rel32(cc,((rel)-6)); }
602 #define JMP_rel8(rel) OP(0xEB); OP(rel)
603 #define JMP_rel32(rel) OP(0xE9); OP32(rel)
604 #define JMP_rel(rel) if( IS_INT8(rel) ) { JMP_rel8((int8_t)rel); } else { JMP_rel32(rel); }
605 #define JMP_prerel(rel) if( IS_INT8(((int32_t)rel)-2) ) { JMP_rel8(((int8_t)rel)-2); } else { JMP_rel32(((int32_t)rel)-5); }
606 #define JMP_rptr(r1) x86_encode_r32_rm32(0xFF, 4, r1)
607 #define JMP_r32disp(r1,disp) x86_encode_r32_mem32disp32(0xFF, 4, r1, disp)
608 #define RET() OP(0xC3)
609 #define RET_imm(imm) OP(0xC2); OP16(imm)
612 /* x87 Floating point instructions */
613 #define FABS_st0() OP(0xD9); OP(0xE1)
614 #define FADDP_st(st) OP(0xDE); OP(0xC0+(st))
615 #define FCHS_st0() OP(0xD9); OP(0xE0)
616 #define FCOMIP_st(st) OP(0xDF); OP(0xF0+(st))
617 #define FDIVP_st(st) OP(0xDE); OP(0xF8+(st))
618 #define FILD_r32disp(r32, disp) x86_encode_r32_mem32disp32(0xDB, 0, r32, disp)
619 #define FLD0_st0() OP(0xD9); OP(0xEE);
620 #define FLD1_st0() OP(0xD9); OP(0xE8);
621 #define FLDCW_r32disp(r32, disp) x86_encode_r32_mem32disp32(0xD9, 5, r32, disp)
622 #define FMULP_st(st) OP(0xDE); OP(0xC8+(st))
623 #define FNSTCW_r32disp(r32, disp) x86_encode_r32_mem32disp32(0xD9, 7, r32, disp)
624 #define FPOP_st() OP(0xDD); OP(0xC0); OP(0xD9); OP(0xF7)
625 #define FSUBP_st(st) OP(0xDE); OP(0xE8+(st))
626 #define FSQRT_st0() OP(0xD9); OP(0xFA)
628 #define FILD_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xDB, 0, disp)
629 #define FLDF_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xD9, 0, disp)
630 #define FLDD_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xDD, 0, disp)
631 #define FISTP_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xDB, 3, disp)
632 #define FSTPF_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xD9, 3, disp)
633 #define FSTPD_rbpdisp(disp) x86_encode_r32_rbpdisp32(0xDD, 3, disp)
636 /* SSE Packed floating point instructions */
637 #define ADDPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F58, r1, disp)
638 #define ADDPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F58, r2, r1)
639 #define ANDPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F54, r1, disp)
640 #define ANDPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F54, r2, r1)
641 #define ANDNPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F55, r1, disp)
642 #define ANDNPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F55, r2, r1)
643 #define CMPPS_cc_rbpdisp_xmm(cc,d,r) x86_encode_r32_rbpdisp32(0x0FC2, r, d); OP(cc)
644 #define CMPPS_cc_xmm_xmm(cc,r1,r2) x86_encode_r32_rm32(0x0FC2, r2, r1); OP(cc)
645 #define DIVPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F5E, r1, disp)
646 #define DIVPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F5E, r2, r1)
647 #define MAXPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F5F, r1, disp)
648 #define MAXPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F5F, r2, r1)
649 #define MINPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F5D, r1, disp)
650 #define MINPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F5D, r2, r1)
651 #define MOV_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F28, r2, r1)
652 #define MOVAPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F28, r1, disp)
653 #define MOVAPS_xmm_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x0F29, r1, disp)
654 #define MOVHLPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F12, r2, r1)
655 #define MOVHPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F16, r1, disp)
656 #define MOVHPS_xmm_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x0F17, r1, disp)
657 #define MOVLHPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F16, r2, r1)
658 #define MOVLPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F12, r1, disp)
659 #define MOVLPS_xmm_rbpdisp(r1,disp) x86_encode_r32_rbpdisp32(0x0F13, r1, disp)
660 #define MOVUPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F10, r1, disp)
661 #define MOVUPS_xmm_rbpdisp(disp,r1) x86_encode_r32_rbpdisp32(0x0F11, r1, disp)
662 #define MULPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F59, r2, r1)
663 #define MULPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0xF59, r1, disp)
664 #define ORPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F56, r1, disp)
665 #define ORPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F56, r2, r1)
666 #define RCPPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0xF53, r1, disp)
667 #define RCPPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F53, r2, r1)
668 #define RSQRTPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F52, r1, disp)
669 #define RSQRTPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F52, r2, r1)
670 #define SHUFPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0FC6, r1, disp)
671 #define SHUFPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0FC6, r2, r1)
672 #define SQRTPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F51, r1, disp)
673 #define SQRTPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F51, r2, r1)
674 #define SUBPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F5C, r1, disp)
675 #define SUBPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F5C, r2, r1)
676 #define UNPCKHPS_rbpdisp_xmm(dsp,r1) x86_encode_r32_rbpdisp32(0x0F15, r1, disp)
677 #define UNPCKHPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F15, r2, r1)
678 #define UNPCKLPS_rbpdisp_xmm(dsp,r1) x86_encode_r32_rbpdisp32(0x0F14, r1, disp)
679 #define UNPCKLPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F14, r2, r1)
680 #define XORPS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F57, r1, disp)
681 #define XORPS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F57, r2, r1)
683 /* SSE Scalar floating point instructions */
684 #define ADDSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F58, r1, disp)
685 #define ADDSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F58, r2, r1)
686 #define CMPSS_cc_rbpdisp_xmm(cc,d,r) OP(0xF3); x86_encode_r32_rbpdisp32(0x0FC2, r, d); OP(cc)
687 #define CMPSS_cc_xmm_xmm(cc,r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0FC2, r2, r1); OP(cc)
688 #define COMISS_rbpdisp_xmm(disp,r1) x86_encode_r32_rbpdisp32(0x0F2F, r1, disp)
689 #define COMISS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F2F, r2, r1)
690 #define DIVSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F5E, r1, disp)
691 #define DIVSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F5E, r2, r1)
692 #define MAXSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F5F, r1, disp)
693 #define MAXSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F5F, r2, r1)
694 #define MINSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F5D, r1, disp)
695 #define MINSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F5D, r2, r1)
696 #define MOVSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F10, r1, disp)
697 #define MOVSS_xmm_rbpdisp(r1,disp) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F11, r1, disp)
698 #define MOVSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F10, r2, r1)
699 #define MULSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0xF59, r1, disp)
700 #define MULSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F59, r2, r1)
701 #define RCPSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0xF53, r1, disp)
702 #define RCPSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F53, r2, r1)
703 #define RSQRTSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F52, r1, disp)
704 #define RSQRTSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F52, r2, r1)
705 #define SQRTSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F51, r1, disp)
706 #define SQRTSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F51, r2, r1)
707 #define SUBSS_rbpdisp_xmm(disp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F5C, r1, disp)
708 #define SUBSS_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F5C, r2, r1)
709 #define UCOMISS_rbpdisp_xmm(dsp,r1) x86_encode_r32_rbpdisp32(0x0F2E, r1, dsp)
710 #define UCOMISS_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F2E, r2, r1)
712 /* SSE2 Packed floating point instructions */
713 #define ADDPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F58, r1, disp)
714 #define ADDPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F58, r2, r1)
715 #define ANDPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F54, r1, disp)
716 #define ANDPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F54, r2, r1)
717 #define ANDNPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F55, r1, disp)
718 #define ANDNPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F55, r2, r1)
719 #define CMPPD_cc_rbpdisp_xmm(cc,d,r) OP(0x66); x86_encode_r32_rbpdisp32(0x0FC2, r, d); OP(cc)
720 #define CMPPD_cc_xmm_xmm(cc,r1,r2) OP(0x66); x86_encode_r32_rm32(0x0FC2, r2, r1); OP(cc)
721 #define CVTPD2PS_rbpdisp_xmm(dsp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F5A, r1, disp)
722 #define CVTPD2PS_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F5A, r2, r1)
723 #define CVTPS2PD_rbpdisp_xmm(dsp,r1) x86_encode_r32_rbpdisp32(0x0F5A, r1, disp)
724 #define CVTPS2PD_xmm_xmm(r1,r2) x86_encode_r32_rm32(0x0F5A, r2, r1)
725 #define DIVPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F5E, r1, disp)
726 #define DIVPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F5E, r2, r1)
727 #define MAXPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F5F, r1, disp)
728 #define MAXPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F5F, r2, r1)
729 #define MINPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F5D, r1, disp)
730 #define MINPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F5D, r2, r1)
731 #define MOVHPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F16, r1, disp)
732 #define MOVHPD_xmm_rbpdisp(r1,disp) OP(0x66); x86_encode_r32_rbpdisp32(0x0F17, r1, disp)
733 #define MOVLPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F12, r1, disp)
734 #define MOVLPD_xmm_rbpdisp(r1,disp) OP(0x66); x86_encode_r32_rbpdisp32(0x0F13, r1, disp)
735 #define MULPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0xF59, r1, disp)
736 #define MULPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F59, r2, r1)
737 #define ORPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F56, r1, disp)
738 #define ORPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F56, r2, r1)
739 #define SHUFPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0FC6, r1, disp)
740 #define SHUFPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0FC6, r2, r1)
741 #define SUBPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F5C, r1, disp)
742 #define SUBPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F5C, r2, r1)
743 #define UNPCKHPD_rbpdisp_xmm(dsp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F15, r1, disp)
744 #define UNPCKHPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F15, r2, r1)
745 #define UNPCKLPD_rbpdisp_xmm(dsp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F14, r1, disp)
746 #define UNPCKLPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F14, r2, r1)
747 #define XORPD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F57, r1, disp)
748 #define XORPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F57, r2, r1)
751 /* SSE2 Scalar floating point instructions */
752 #define ADDSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F58, r1, disp)
753 #define ADDSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F58, r2, r1)
754 #define CMPSD_cc_rbpdisp_xmm(cc,d,r) OP(0xF2); x86_encode_r32_rbpdisp32(0x0FC2, r, d); OP(cc)
755 #define CMPSD_cc_xmm_xmm(cc,r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0FC2, r2, r1); OP(cc)
756 #define COMISD_rbpdisp_xmm(disp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F2F, r1, disp)
757 #define COMISD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F2F, r2, r1)
758 #define DIVSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F5E, r1, disp)
759 #define DIVSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F5E, r2, r1)
760 #define MAXSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F5F, r1, disp)
761 #define MAXSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F5F, r2, r1)
762 #define MINSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F5D, r1, disp)
763 #define MINSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F5D, r2, r1)
764 #define MOVSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F10, r1, disp)
765 #define MOVSD_xmm_rbpdisp(r1,disp) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F11, r1, disp)
766 #define MOVSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F10, r2, r1)
767 #define MULSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0xF59, r1, disp)
768 #define MULSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F59, r2, r1)
769 #define SQRTSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F51, r1, disp)
770 #define SQRTSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F51, r2, r1)
771 #define SUBSD_rbpdisp_xmm(disp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F5C, r1, disp)
772 #define SUBSD_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F5C, r2, r1)
773 #define UCOMISD_rbpdisp_xmm(dsp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F2E, r1, dsp)
774 #define UCOMISD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F2E, r2, r1)
776 /* SSE3 floating point instructions */
777 #define ADDSUBPD_rbpdisp_xmm(dsp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0FD0, r1, dsp)
778 #define ADDSUBPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0FD0, r2, r1)
779 #define ADDSUBPS_rbpdisp_xmm(dsp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0FD0, r1, dsp)
780 #define ADDSUBPS_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0FD0, r2, r1)
781 #define HADDPD_rbpdisp_xmm(dsp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F7C, r1, dsp)
782 #define HADDPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F7C, r2, r1)
783 #define HADDPS_rbpdisp_xmm(dsp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F7C, r1, dsp)
784 #define HADDPS_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F7C, r2, r1)
785 #define HSUBPD_rbpdisp_xmm(dsp,r1) OP(0x66); x86_encode_r32_rbpdisp32(0x0F7D, r1, dsp)
786 #define HSUBPD_xmm_xmm(r1,r2) OP(0x66); x86_encode_r32_rm32(0x0F7D, r2, r1)
787 #define HSUBPS_rbpdisp_xmm(dsp,r1) OP(0xF2); x86_encode_r32_rbpdisp32(0x0F7D, r1, dsp)
788 #define HSUBPS_xmm_xmm(r1,r2) OP(0xF2); x86_encode_r32_rm32(0x0F7D, r2, r1)
789 #define MOVSHDUP_rbpdisp_xmm(dsp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F16, r1, dsp)
790 #define MOVSHDUP_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F16, r2, r1)
791 #define MOVSLDUP_rbpdisp_xmm(dsp,r1) OP(0xF3); x86_encode_r32_rbpdisp32(0x0F12, r1, dsp)
792 #define MOVSLDUP_xmm_xmm(r1,r2) OP(0xF3); x86_encode_r32_rm32(0x0F12, r2, r1)
794 /************************ Import calling conventions *************************/
795 #if SIZEOF_VOID_P == 8
796 #include "xlat/x86/amd64abi.h"
797 #else /* 32-bit system */
798 #include "xlat/x86/ia32abi.h"
799 #endif
801 #endif /* !lxdream_x86op_H */
.