ViewVC Help
View File | Revision Log | Show Annotations | Revision Graph | Root Listing
root/cebix/BasiliskII/src/uae_cpu/compiler/codegen_x86.h
Revision: 1.23
Committed: 2006-07-23T10:20:23Z (17 years, 11 months ago) by gbeauche
Content type: text/plain
Branch: MAIN
CVS Tags: nigel-build-19
Changes since 1.22: +11 -11 lines
Log Message:
icc9.1 & gcc4.1 warning fixes

File Contents

# Content
1 /******************** -*- mode: C; tab-width: 8 -*- ********************
2 *
3 * Run-time assembler for IA-32 and AMD64
4 *
5 ***********************************************************************/
6
7
8 /***********************************************************************
9 *
10 * This file is derived from CCG.
11 *
12 * Copyright 1999, 2000, 2001, 2002, 2003 Ian Piumarta
13 *
14 * Adaptations and enhancements for AMD64 support, Copyright 2003-2006
15 * Gwenole Beauchesne
16 *
17 * Basilisk II (C) 1997-2006 Christian Bauer
18 *
19 * This program is free software; you can redistribute it and/or modify
20 * it under the terms of the GNU General Public License as published by
21 * the Free Software Foundation; either version 2 of the License, or
22 * (at your option) any later version.
23 *
24 * This program is distributed in the hope that it will be useful,
25 * but WITHOUT ANY WARRANTY; without even the implied warranty of
26 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 * GNU General Public License for more details.
28 *
29 * You should have received a copy of the GNU General Public License
30 * along with this program; if not, write to the Free Software
31 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 *
33 ***********************************************************************/
34
35 #ifndef X86_RTASM_H
36 #define X86_RTASM_H
37
38 /* NOTES
39 *
40 * o Best viewed on a 1024x768 screen with fixed-6x10 font ;-)
41 *
42 * TODO
43 *
44 * o Fix FIXMEs
45 * o i387 FPU instructions
46 * o SSE instructions
47 * o Optimize for cases where register numbers are not integral constants
48 */
49
50 /* --- Configuration ------------------------------------------------------- */
51
52 /* Define to settle a "flat" register set, i.e. different regno for
53 each size variant. */
54 #ifndef X86_FLAT_REGISTERS
55 #define X86_FLAT_REGISTERS 1
56 #endif
57
58 /* Define to generate x86-64 code. */
59 #ifndef X86_TARGET_64BIT
60 #define X86_TARGET_64BIT 0
61 #endif
62
63 /* Define to optimize ALU instructions. */
64 #ifndef X86_OPTIMIZE_ALU
65 #define X86_OPTIMIZE_ALU 1
66 #endif
67
68 /* Define to optimize rotate/shift instructions. */
69 #ifndef X86_OPTIMIZE_ROTSHI
70 #define X86_OPTIMIZE_ROTSHI 1
71 #endif
72
73 /* Define to optimize absolute addresses for RIP relative addressing. */
74 #ifndef X86_RIP_RELATIVE_ADDR
75 #define X86_RIP_RELATIVE_ADDR 1
76 #endif
77
78
79 /* --- Macros -------------------------------------------------------------- */
80
81 /* Functions used to emit code.
82 *
83 * x86_emit_byte(B)
84 * x86_emit_word(W)
85 * x86_emit_long(L)
86 */
87
88 /* Get pointer to current code
89 *
90 * x86_get_target()
91 */
92
93 /* Abort assembler, fatal failure.
94 *
95 * x86_emit_failure(MSG)
96 */
97
98 #define x86_emit_failure0(MSG) (x86_emit_failure(MSG),0)
99
100
101 /* --- Register set -------------------------------------------------------- */
102
103 enum {
104 X86_RIP = -2,
105 #if X86_FLAT_REGISTERS
106 X86_NOREG = 0,
107 X86_Reg8L_Base = 0x10,
108 X86_Reg8H_Base = 0x20,
109 X86_Reg16_Base = 0x30,
110 X86_Reg32_Base = 0x40,
111 X86_Reg64_Base = 0x50,
112 X86_RegMMX_Base = 0x60,
113 X86_RegXMM_Base = 0x70,
114 #else
115 X86_NOREG = -1,
116 X86_Reg8L_Base = 0,
117 X86_Reg8H_Base = 16,
118 X86_Reg16_Base = 0,
119 X86_Reg32_Base = 0,
120 X86_Reg64_Base = 0,
121 X86_RegMMX_Base = 0,
122 X86_RegXMM_Base = 0,
123 #endif
124 };
125
126 enum {
127 X86_AL = X86_Reg8L_Base,
128 X86_CL, X86_DL, X86_BL,
129 X86_SPL, X86_BPL, X86_SIL, X86_DIL,
130 X86_R8B, X86_R9B, X86_R10B, X86_R11B,
131 X86_R12B, X86_R13B, X86_R14B, X86_R15B,
132 X86_AH = X86_Reg8H_Base + 4,
133 X86_CH, X86_DH, X86_BH
134 };
135
136 enum {
137 X86_AX = X86_Reg16_Base,
138 X86_CX, X86_DX, X86_BX,
139 X86_SP, X86_BP, X86_SI, X86_DI,
140 X86_R8W, X86_R9W, X86_R10W, X86_R11W,
141 X86_R12W, X86_R13W, X86_R14W, X86_R15W
142 };
143
144 enum {
145 X86_EAX = X86_Reg32_Base,
146 X86_ECX, X86_EDX, X86_EBX,
147 X86_ESP, X86_EBP, X86_ESI, X86_EDI,
148 X86_R8D, X86_R9D, X86_R10D, X86_R11D,
149 X86_R12D, X86_R13D, X86_R14D, X86_R15D
150 };
151
152 enum {
153 X86_RAX = X86_Reg64_Base,
154 X86_RCX, X86_RDX, X86_RBX,
155 X86_RSP, X86_RBP, X86_RSI, X86_RDI,
156 X86_R8, X86_R9, X86_R10, X86_R11,
157 X86_R12, X86_R13, X86_R14, X86_R15
158 };
159
160 enum {
161 X86_MM0 = X86_RegMMX_Base,
162 X86_MM1, X86_MM2, X86_MM3,
163 X86_MM4, X86_MM5, X86_MM6, X86_MM7,
164 };
165
166 enum {
167 X86_XMM0 = X86_RegXMM_Base,
168 X86_XMM1, X86_XMM2, X86_XMM3,
169 X86_XMM4, X86_XMM5, X86_XMM6, X86_XMM7,
170 X86_XMM8, X86_XMM9, X86_XMM10, X86_XMM11,
171 X86_XMM12, X86_XMM13, X86_XMM14, X86_XMM15
172 };
173
174 /* Register control and access
175 *
176 * _r0P(R) Null register?
177 * _rIP(R) RIP register?
178 * _rXP(R) Extended register?
179 *
180 * _rC(R) Class of register (only valid if X86_FLAT_REGISTERS)
181 * _rR(R) Full register number
182 * _rN(R) Short register number for encoding
183 *
184 * _r1(R) 8-bit register ID
185 * _r2(R) 16-bit register ID
186 * _r4(R) 32-bit register ID
187 * _r8(R) 64-bit register ID
188 * _rM(R) MMX register ID
189 * _rX(R) XMM register ID
190 * _rA(R) Address register ID used for EA calculation
191 */
192
193 #define _r0P(R) ((int)(R) == (int)X86_NOREG)
194 #define _rIP(R) (X86_TARGET_64BIT ? ((int)(R) == (int)X86_RIP) : 0)
195
196 #if X86_FLAT_REGISTERS
197 #define _rC(R) ((R) & 0xf0)
198 #define _rR(R) ((R) & 0x0f)
199 #define _rN(R) ((R) & 0x07)
200 #define _rXP(R) ((R) > 0 && _rR(R) > 7)
201 #else
202 #define _rN(R) ((R) & 0x07)
203 #define _rR(R) (int(R))
204 #define _rXP(R) (_rR(R) > 7 && _rR(R) < 16)
205 #endif
206
207 #if !defined(_ASM_SAFETY) || ! X86_FLAT_REGISTERS
208 #define _r1(R) _rN(R)
209 #define _r2(R) _rN(R)
210 #define _r4(R) _rN(R)
211 #define _r8(R) _rN(R)
212 #define _rA(R) _rN(R)
213 #define _rM(R) _rN(R)
214 #define _rX(R) _rN(R)
215 #else
216 #define _r1(R) ( ((_rC(R) & (X86_Reg8L_Base | X86_Reg8H_Base)) != 0) ? _rN(R) : x86_emit_failure0( "8-bit register required"))
217 #define _r2(R) ( (_rC(R) == X86_Reg16_Base) ? _rN(R) : x86_emit_failure0("16-bit register required"))
218 #define _r4(R) ( (_rC(R) == X86_Reg32_Base) ? _rN(R) : x86_emit_failure0("32-bit register required"))
219 #define _r8(R) ( (_rC(R) == X86_Reg64_Base) ? _rN(R) : x86_emit_failure0("64-bit register required"))
220 #define _rA(R) ( X86_TARGET_64BIT ? \
221 ( (_rC(R) == X86_Reg64_Base) ? _rN(R) : x86_emit_failure0("not a valid 64-bit base/index expression")) : \
222 ( (_rC(R) == X86_Reg32_Base) ? _rN(R) : x86_emit_failure0("not a valid 32-bit base/index expression")) )
223 #define _rM(R) ( (_rC(R) == X86_RegMMX_Base) ? _rN(R) : x86_emit_failure0("MMX register required"))
224 #define _rX(R) ( (_rC(R) == X86_RegXMM_Base) ? _rN(R) : x86_emit_failure0("SSE register required"))
225 #endif
226
227 #define _rSP() (X86_TARGET_64BIT ? (int)X86_RSP : (int)X86_ESP)
228 #define _r1e8lP(R) (int(R) >= X86_SPL && int(R) <= X86_DIL)
229 #define _rbpP(R) (_rR(R) == _rR(X86_RBP))
230 #define _rspP(R) (_rR(R) == _rR(X86_RSP))
231 #define _rbp13P(R) (_rN(R) == _rN(X86_RBP))
232 #define _rsp12P(R) (_rN(R) == _rN(X86_RSP))
233
234
235 /* ========================================================================= */
236 /* --- UTILITY ------------------------------------------------------------- */
237 /* ========================================================================= */
238
239 typedef signed char _sc;
240 typedef unsigned char _uc;
241 typedef signed short _ss;
242 typedef unsigned short _us;
243 typedef signed int _sl;
244 typedef unsigned int _ul;
245
246 #define _UC(X) ((_uc )(unsigned long)(X))
247 #define _US(X) ((_us )(unsigned long)(X))
248 #define _SL(X) ((_sl )(unsigned long)(X))
249 #define _UL(X) ((_ul )(unsigned long)(X))
250
251 #define _PUC(X) ((_uc *)(X))
252 #define _PUS(X) ((_us *)(X))
253 #define _PSL(X) ((_sl *)(X))
254 #define _PUL(X) ((_ul *)(X))
255
256 #define _B(B) x86_emit_byte((B))
257 #define _W(W) x86_emit_word((W))
258 #define _L(L) x86_emit_long((L))
259 #define _Q(Q) x86_emit_quad((Q))
260
261 #define _MASK(N) ((unsigned)((1<<(N)))-1)
262 #define _siP(N,I) (!((((unsigned)(I))^(((unsigned)(I))<<1))&~_MASK(N)))
263 #define _uiP(N,I) (!(((unsigned)(I))&~_MASK(N)))
264 #define _suiP(N,I) (_siP(N,I) | _uiP(N,I))
265
266 #ifndef _ASM_SAFETY
267 #define _ck_s(W,I) (_UL(I) & _MASK(W))
268 #define _ck_u(W,I) (_UL(I) & _MASK(W))
269 #define _ck_su(W,I) (_UL(I) & _MASK(W))
270 #define _ck_d(W,I) (_UL(I) & _MASK(W))
271 #else
272 #define _ck_s(W,I) (_siP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0( "signed integer `"#I"' too large for "#W"-bit field"))
273 #define _ck_u(W,I) (_uiP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0("unsigned integer `"#I"' too large for "#W"-bit field"))
274 #define _ck_su(W,I) (_suiP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0( "integer `"#I"' too large for "#W"-bit field"))
275 #define _ck_d(W,I) (_siP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0( "displacement `"#I"' too large for "#W"-bit field"))
276 #endif
277
278 #define _s0P(I) ((I)==0)
279 #define _s8P(I) _siP(8,I)
280 #define _s16P(I) _siP(16,I)
281 #define _u8P(I) _uiP(8,I)
282 #define _u16P(I) _uiP(16,I)
283
284 #define _su8(I) _ck_su(8,I)
285 #define _su16(I) _ck_su(16,I)
286
287 #define _s1(I) _ck_s( 1,I)
288 #define _s2(I) _ck_s( 2,I)
289 #define _s3(I) _ck_s( 3,I)
290 #define _s4(I) _ck_s( 4,I)
291 #define _s5(I) _ck_s( 5,I)
292 #define _s6(I) _ck_s( 6,I)
293 #define _s7(I) _ck_s( 7,I)
294 #define _s8(I) _ck_s( 8,I)
295 #define _s9(I) _ck_s( 9,I)
296 #define _s10(I) _ck_s(10,I)
297 #define _s11(I) _ck_s(11,I)
298 #define _s12(I) _ck_s(12,I)
299 #define _s13(I) _ck_s(13,I)
300 #define _s14(I) _ck_s(14,I)
301 #define _s15(I) _ck_s(15,I)
302 #define _s16(I) _ck_s(16,I)
303 #define _s17(I) _ck_s(17,I)
304 #define _s18(I) _ck_s(18,I)
305 #define _s19(I) _ck_s(19,I)
306 #define _s20(I) _ck_s(20,I)
307 #define _s21(I) _ck_s(21,I)
308 #define _s22(I) _ck_s(22,I)
309 #define _s23(I) _ck_s(23,I)
310 #define _s24(I) _ck_s(24,I)
311 #define _s25(I) _ck_s(25,I)
312 #define _s26(I) _ck_s(26,I)
313 #define _s27(I) _ck_s(27,I)
314 #define _s28(I) _ck_s(28,I)
315 #define _s29(I) _ck_s(29,I)
316 #define _s30(I) _ck_s(30,I)
317 #define _s31(I) _ck_s(31,I)
318 #define _u1(I) _ck_u( 1,I)
319 #define _u2(I) _ck_u( 2,I)
320 #define _u3(I) _ck_u( 3,I)
321 #define _u4(I) _ck_u( 4,I)
322 #define _u5(I) _ck_u( 5,I)
323 #define _u6(I) _ck_u( 6,I)
324 #define _u7(I) _ck_u( 7,I)
325 #define _u8(I) _ck_u( 8,I)
326 #define _u9(I) _ck_u( 9,I)
327 #define _u10(I) _ck_u(10,I)
328 #define _u11(I) _ck_u(11,I)
329 #define _u12(I) _ck_u(12,I)
330 #define _u13(I) _ck_u(13,I)
331 #define _u14(I) _ck_u(14,I)
332 #define _u15(I) _ck_u(15,I)
333 #define _u16(I) _ck_u(16,I)
334 #define _u17(I) _ck_u(17,I)
335 #define _u18(I) _ck_u(18,I)
336 #define _u19(I) _ck_u(19,I)
337 #define _u20(I) _ck_u(20,I)
338 #define _u21(I) _ck_u(21,I)
339 #define _u22(I) _ck_u(22,I)
340 #define _u23(I) _ck_u(23,I)
341 #define _u24(I) _ck_u(24,I)
342 #define _u25(I) _ck_u(25,I)
343 #define _u26(I) _ck_u(26,I)
344 #define _u27(I) _ck_u(27,I)
345 #define _u28(I) _ck_u(28,I)
346 #define _u29(I) _ck_u(29,I)
347 #define _u30(I) _ck_u(30,I)
348 #define _u31(I) _ck_u(31,I)
349
350 /* ========================================================================= */
351 /* --- ASSEMBLER ----------------------------------------------------------- */
352 /* ========================================================================= */
353
354 #define _b00 0
355 #define _b01 1
356 #define _b10 2
357 #define _b11 3
358
359 #define _b000 0
360 #define _b001 1
361 #define _b010 2
362 #define _b011 3
363 #define _b100 4
364 #define _b101 5
365 #define _b110 6
366 #define _b111 7
367
368 #define _OFF4(D) (_UL(D) - _UL(x86_get_target()))
369 #define _CKD8(D) _ck_d(8, ((_uc) _OFF4(D)) )
370
371 #define _D8(D) (_B(0), ((*(_PUC(x86_get_target())-1))= _CKD8(D)))
372 #define _D32(D) (_L(0), ((*(_PUL(x86_get_target())-1))= _OFF4(D)))
373
374 #ifndef _ASM_SAFETY
375 # define _M(M) (M)
376 # define _r(R) (R)
377 # define _m(M) (M)
378 # define _s(S) (S)
379 # define _i(I) (I)
380 # define _b(B) (B)
381 #else
382 # define _M(M) (((M)>3) ? x86_emit_failure0("internal error: mod = " #M) : (M))
383 # define _r(R) (((R)>7) ? x86_emit_failure0("internal error: reg = " #R) : (R))
384 # define _m(M) (((M)>7) ? x86_emit_failure0("internal error: r/m = " #M) : (M))
385 # define _s(S) (((S)>3) ? x86_emit_failure0("internal error: memory scale = " #S) : (S))
386 # define _i(I) (((I)>7) ? x86_emit_failure0("internal error: memory index = " #I) : (I))
387 # define _b(B) (((B)>7) ? x86_emit_failure0("internal error: memory base = " #B) : (B))
388 #endif
389
390 #define _Mrm(Md,R,M) _B((_M(Md)<<6)|(_r(R)<<3)|_m(M))
391 #define _SIB(Sc,I, B) _B((_s(Sc)<<6)|(_i(I)<<3)|_b(B))
392
393 #define _SCL(S) ((((S)==1) ? _b00 : \
394 (((S)==2) ? _b01 : \
395 (((S)==4) ? _b10 : \
396 (((S)==8) ? _b11 : x86_emit_failure0("illegal scale: " #S))))))
397
398
399 /* --- Memory subformats - urgh! ------------------------------------------- */
400
401 /* _r_D() is RIP addressing mode if X86_TARGET_64BIT, use _r_DSIB() instead */
402 #define _r_D( R, D ) (_Mrm(_b00,_rN(R),_b101 ) ,_L((_sl)(D)))
403 #define _r_DSIB(R, D ) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(1),_b100 ,_b101 ),_L((_sl)(D)))
404 #define _r_0B( R, B ) (_Mrm(_b00,_rN(R),_rA(B)) )
405 #define _r_0BIS(R, B,I,S) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_rA(B)) )
406 #define _r_1B( R, D,B ) (_Mrm(_b01,_rN(R),_rA(B)) ,_B((_sc)(D)))
407 #define _r_1BIS(R, D,B,I,S) (_Mrm(_b01,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_rA(B)),_B((_sc)(D)))
408 #define _r_4B( R, D,B ) (_Mrm(_b10,_rN(R),_rA(B)) ,_L((_sl)(D)))
409 #define _r_4IS( R, D,I,S) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_b101 ),_L((_sl)(D)))
410 #define _r_4BIS(R, D,B,I,S) (_Mrm(_b10,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_rA(B)),_L((_sl)(D)))
411
412 #define _r_DB( R, D,B ) ((_s0P(D) && (!_rbp13P(B)) ? _r_0B (R, B ) : (_s8P(D) ? _r_1B( R,D,B ) : _r_4B( R,D,B ))))
413 #define _r_DBIS(R, D,B,I,S) ((_s0P(D) && (!_rbp13P(B)) ? _r_0BIS(R, B,I,S) : (_s8P(D) ? _r_1BIS(R,D,B,I,S) : _r_4BIS(R,D,B,I,S))))
414
415 /* Use RIP-addressing in 64-bit mode, if possible */
416 #define _x86_RIP_addressing_possible(D,O) (X86_RIP_RELATIVE_ADDR && \
417 ((uintptr)x86_get_target() + 4 + (O) - (D) <= 0xffffffff))
418
419 #define _r_X( R, D,B,I,S,O) (_r0P(I) ? (_r0P(B) ? (!X86_TARGET_64BIT ? _r_D(R,D) : \
420 (_x86_RIP_addressing_possible(D, O) ? \
421 _r_D(R, (D) - ((uintptr)x86_get_target() + 4 + (O))) : \
422 _r_DSIB(R,D))) : \
423 (_rIP(B) ? _r_D (R,D ) : \
424 (_rsp12P(B) ? _r_DBIS(R,D,_rSP(),_rSP(),1) : \
425 _r_DB (R,D, B )))) : \
426 (_r0P(B) ? _r_4IS (R,D, I,S) : \
427 (!_rspP(I) ? _r_DBIS(R,D, B, I,S) : \
428 x86_emit_failure("illegal index register: %esp"))))
429
430
431 /* --- Instruction formats ------------------------------------------------- */
432
433 #define _m32only(X) (! X86_TARGET_64BIT ? X : x86_emit_failure("invalid instruction in 64-bit mode"))
434 #define _m64only(X) ( X86_TARGET_64BIT ? X : x86_emit_failure("invalid instruction in 32-bit mode"))
435 #define _m64(X) ( X86_TARGET_64BIT ? X : ((void)0) )
436
437 /* _format Opcd ModR/M dN(rB,rI,Sc) imm... */
438
439 #define _d16() ( _B(0x66 ) )
440 #define _O( OP ) ( _B( OP ) )
441 #define _Or( OP,R ) ( _B( (OP)|_r(R)) )
442 #define _OO( OP ) ( _B((OP)>>8), _B(( (OP) )&0xff) )
443 #define _OOr( OP,R ) ( _B((OP)>>8), _B(( (OP)|_r(R))&0xff) )
444 #define _Os( OP,B ) ( _s8P(B) ? _B(((OP)|_b10)) : _B(OP) )
445 #define _sW( W ) ( _s8P(W) ? _B(W):_W(W) )
446 #define _sL( L ) ( _s8P(L) ? _B(L):_L(L) )
447 #define _sWO( W ) ( _s8P(W) ? 1 : 2 )
448 #define _sLO( L ) ( _s8P(L) ? 1 : 4 )
449 #define _O_B( OP ,B ) ( _O ( OP ) ,_B(B) )
450 #define _O_W( OP ,W ) ( _O ( OP ) ,_W(W) )
451 #define _O_L( OP ,L ) ( _O ( OP ) ,_L(L) )
452 #define _OO_L( OP ,L ) ( _OO ( OP ) ,_L(L) )
453 #define _O_D8( OP ,D ) ( _O ( OP ) ,_D8(D) )
454 #define _O_D32( OP ,D ) ( _O ( OP ) ,_D32(D) )
455 #define _OO_D32( OP ,D ) ( _OO ( OP ) ,_D32(D) )
456 #define _Os_sW( OP ,W ) ( _Os ( OP,W) ,_sW(W) )
457 #define _Os_sL( OP ,L ) ( _Os ( OP,L) ,_sL(L) )
458 #define _O_W_B( OP ,W,B) ( _O ( OP ) ,_W(W),_B(B))
459 #define _Or_B( OP,R ,B ) ( _Or ( OP,R) ,_B(B) )
460 #define _Or_W( OP,R ,W ) ( _Or ( OP,R) ,_W(W) )
461 #define _Or_L( OP,R ,L ) ( _Or ( OP,R) ,_L(L) )
462 #define _Or_Q( OP,R ,Q ) ( _Or ( OP,R) ,_Q(Q) )
463 #define _O_Mrm( OP ,MO,R,M ) ( _O ( OP ),_Mrm(MO,R,M ) )
464 #define _OO_Mrm( OP ,MO,R,M ) ( _OO ( OP ),_Mrm(MO,R,M ) )
465 #define _O_Mrm_B( OP ,MO,R,M ,B ) ( _O ( OP ),_Mrm(MO,R,M ) ,_B(B) )
466 #define _O_Mrm_W( OP ,MO,R,M ,W ) ( _O ( OP ),_Mrm(MO,R,M ) ,_W(W) )
467 #define _O_Mrm_L( OP ,MO,R,M ,L ) ( _O ( OP ),_Mrm(MO,R,M ) ,_L(L) )
468 #define _OO_Mrm_B( OP ,MO,R,M ,B ) ( _OO ( OP ),_Mrm(MO,R,M ) ,_B(B) )
469 #define _Os_Mrm_sW(OP ,MO,R,M ,W ) ( _Os ( OP,W),_Mrm(MO,R,M ),_sW(W) )
470 #define _Os_Mrm_sL(OP ,MO,R,M ,L ) ( _Os ( OP,L),_Mrm(MO,R,M ),_sL(L) )
471 #define _O_r_X( OP ,R ,MD,MB,MI,MS ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,0) )
472 #define _OO_r_X( OP ,R ,MD,MB,MI,MS ) ( _OO ( OP ),_r_X( R ,MD,MB,MI,MS,0) )
473 #define _O_r_X_B( OP ,R ,MD,MB,MI,MS,B ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,1) ,_B(B) )
474 #define _O_r_X_W( OP ,R ,MD,MB,MI,MS,W ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,2) ,_W(W) )
475 #define _O_r_X_L( OP ,R ,MD,MB,MI,MS,L ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,4) ,_L(L) )
476 #define _OO_r_X_B( OP ,R ,MD,MB,MI,MS,B ) ( _OO ( OP ),_r_X( R ,MD,MB,MI,MS,1) ,_B(B) )
477 #define _Os_r_X_sW(OP ,R ,MD,MB,MI,MS,W ) ( _Os ( OP,W),_r_X( R ,MD,MB,MI,MS,_sWO(W)),_sW(W))
478 #define _Os_r_X_sL(OP ,R ,MD,MB,MI,MS,L ) ( _Os ( OP,L),_r_X( R ,MD,MB,MI,MS,_sLO(L)),_sL(L))
479 #define _O_X_B( OP ,MD,MB,MI,MS,B ) ( _O_r_X_B( OP ,0 ,MD,MB,MI,MS ,B) )
480 #define _O_X_W( OP ,MD,MB,MI,MS,W ) ( _O_r_X_W( OP ,0 ,MD,MB,MI,MS ,W) )
481 #define _O_X_L( OP ,MD,MB,MI,MS,L ) ( _O_r_X_L( OP ,0 ,MD,MB,MI,MS ,L) )
482
483
484 /* --- REX prefixes -------------------------------------------------------- */
485
486 #define _VOID() ((void)0)
487 #define _BIT(X) (!!(X))
488 #define _d64(W,R,X,B) (_B(0x40|(W)<<3|(R)<<2|(X)<<1|(B)))
489
490 #define __REXwrxb(L,W,R,X,B) ((W|R|X|B) || (L) ? _d64(W,R,X,B) : _VOID())
491 #define __REXwrx_(L,W,R,X,MR) (__REXwrxb(L,W,R,X,_BIT(_rIP(MR)?0:_rXP(MR))))
492 #define __REXw_x_(L,W,R,X,MR) (__REXwrx_(L,W,_BIT(_rXP(R)),X,MR))
493 #define __REX_reg(RR) (__REXwrxb(0,0,0,00,_BIT(_rXP(RR))))
494 #define __REX_mem(MB,MI) (__REXwrxb(0,0,0,_BIT(_rXP(MI)),_BIT(_rXP(MB))))
495
496 // FIXME: can't mix new (SPL,BPL,SIL,DIL) with (AH,BH,CH,DH)
497 #define _REXBrr(RR,MR) _m64(__REXw_x_(_r1e8lP(RR)||_r1e8lP(MR),0,RR,0,MR))
498 #define _REXBmr(MB,MI,RD) _m64(__REXw_x_(_r1e8lP(RD)||_r1e8lP(MB),0,RD,_BIT(_rXP(MI)),MB))
499 #define _REXBrm(RS,MB,MI) _REXBmr(MB,MI,RS)
500
501 #define _REXBLrr(RR,MR) _m64(__REXw_x_(_r1e8lP(MR),0,RR,0,MR))
502 #define _REXLrr(RR,MR) _m64(__REXw_x_(0,0,RR,0,MR))
503 #define _REXLmr(MB,MI,RD) _m64(__REXw_x_(0,0,RD,_BIT(_rXP(MI)),MB))
504 #define _REXLrm(RS,MB,MI) _REXLmr(MB,MI,RS)
505 #define _REXLr(RR) _m64(__REX_reg(RR))
506 #define _REXLm(MB,MI) _m64(__REX_mem(MB,MI))
507
508 #define _REXQrr(RR,MR) _m64only(__REXw_x_(0,1,RR,0,MR))
509 #define _REXQmr(MB,MI,RD) _m64only(__REXw_x_(0,1,RD,_BIT(_rXP(MI)),MB))
510 #define _REXQrm(RS,MB,MI) _REXQmr(MB,MI,RS)
511 #define _REXQr(RR) _m64only(__REX_reg(RR))
512 #define _REXQm(MB,MI) _m64only(__REX_mem(MB,MI))
513
514
515 /* ========================================================================= */
516 /* --- Fully-qualified intrinsic instructions ------------------------------ */
517 /* ========================================================================= */
518
519 /* OPCODE + i = immediate operand
520 * + r = register operand
521 * + m = memory operand (disp,base,index,scale)
522 * + sr/sm = a star preceding a register or memory
523 * + 0 = top of stack register (for FPU instructions)
524 *
525 * NOTE in x86-64 mode: a memory operand with only a valid
526 * displacement value will lead to the expect absolute mode. If
527 * RIP addressing is necessary, X86_RIP shall be used as the base
528 * register argument.
529 */
530
531 /* --- ALU instructions ---------------------------------------------------- */
532
533 enum {
534 X86_ADD = 0,
535 X86_OR = 1,
536 X86_ADC = 2,
537 X86_SBB = 3,
538 X86_AND = 4,
539 X86_SUB = 5,
540 X86_XOR = 6,
541 X86_CMP = 7,
542 };
543
544 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
545
546 #define _ALUBrr(OP,RS, RD) (_REXBrr(RS, RD), _O_Mrm (((OP) << 3) ,_b11,_r1(RS),_r1(RD) ))
547 #define _ALUBmr(OP, MD, MB, MI, MS, RD) (_REXBmr(MB, MI, RD), _O_r_X (((OP) << 3) + 2 ,_r1(RD) ,MD,MB,MI,MS ))
548 #define _ALUBrm(OP, RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (((OP) << 3) ,_r1(RS) ,MD,MB,MI,MS ))
549 #define _ALUBir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AL) ? \
550 (_REXBrr(0, RD), _O_B (((OP) << 3) + 4 ,_su8(IM))) : \
551 (_REXBrr(0, RD), _O_Mrm_B (0x80 ,_b11,OP ,_r1(RD) ,_su8(IM))) )
552 #define _ALUBim(OP, IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X_B (0x80 ,OP ,MD,MB,MI,MS ,_su8(IM)))
553
554 #define _ALUWrr(OP, RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r2(RS),_r2(RD) ))
555 #define _ALUWmr(OP, MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r2(RD) ,MD,MB,MI,MS ))
556 #define _ALUWrm(OP, RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r2(RS) ,MD,MB,MI,MS ))
557 #define _ALUWir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AX) ? \
558 (_d16(), _REXLrr(0, RD), _O_W (((OP) << 3) + 5 ,_su16(IM))) : \
559 (_d16(), _REXLrr(0, RD), _Os_Mrm_sW (0x81 ,_b11,OP ,_r2(RD) ,_su16(IM))) )
560 #define _ALUWim(OP, IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _Os_r_X_sW (0x81 ,OP ,MD,MB,MI,MS ,_su16(IM)))
561
562 #define _ALULrr(OP, RS, RD) (_REXLrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r4(RS),_r4(RD) ))
563 #define _ALULmr(OP, MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r4(RD) ,MD,MB,MI,MS ))
564 #define _ALULrm(OP, RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r4(RS) ,MD,MB,MI,MS ))
565 #define _ALULir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_EAX) ? \
566 (_REXLrr(0, RD), _O_L (((OP) << 3) + 5 ,IM )) : \
567 (_REXLrr(0, RD), _Os_Mrm_sL (0x81 ,_b11,OP ,_r4(RD) ,IM )) )
568 #define _ALULim(OP, IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _Os_r_X_sL (0x81 ,OP ,MD,MB,MI,MS ,IM ))
569
570 #define _ALUQrr(OP, RS, RD) (_REXQrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r8(RS),_r8(RD) ))
571 #define _ALUQmr(OP, MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r8(RD) ,MD,MB,MI,MS ))
572 #define _ALUQrm(OP, RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r8(RS) ,MD,MB,MI,MS ))
573 #define _ALUQir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_RAX) ? \
574 (_REXQrr(0, RD), _O_L (((OP) << 3) + 5 ,IM )) : \
575 (_REXQrr(0, RD), _Os_Mrm_sL (0x81 ,_b11,OP ,_r8(RD) ,IM )) )
576 #define _ALUQim(OP, IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _Os_r_X_sL (0x81 ,OP ,MD,MB,MI,MS ,IM ))
577
578 #define ADCBrr(RS, RD) _ALUBrr(X86_ADC, RS, RD)
579 #define ADCBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_ADC, MD, MB, MI, MS, RD)
580 #define ADCBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_ADC, RS, MD, MB, MI, MS)
581 #define ADCBir(IM, RD) _ALUBir(X86_ADC, IM, RD)
582 #define ADCBim(IM, MD, MB, MI, MS) _ALUBim(X86_ADC, IM, MD, MB, MI, MS)
583
584 #define ADCWrr(RS, RD) _ALUWrr(X86_ADC, RS, RD)
585 #define ADCWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_ADC, MD, MB, MI, MS, RD)
586 #define ADCWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_ADC, RS, MD, MB, MI, MS)
587 #define ADCWir(IM, RD) _ALUWir(X86_ADC, IM, RD)
588 #define ADCWim(IM, MD, MB, MI, MS) _ALUWim(X86_ADC, IM, MD, MB, MI, MS)
589
590 #define ADCLrr(RS, RD) _ALULrr(X86_ADC, RS, RD)
591 #define ADCLmr(MD, MB, MI, MS, RD) _ALULmr(X86_ADC, MD, MB, MI, MS, RD)
592 #define ADCLrm(RS, MD, MB, MI, MS) _ALULrm(X86_ADC, RS, MD, MB, MI, MS)
593 #define ADCLir(IM, RD) _ALULir(X86_ADC, IM, RD)
594 #define ADCLim(IM, MD, MB, MI, MS) _ALULim(X86_ADC, IM, MD, MB, MI, MS)
595
596 #define ADCQrr(RS, RD) _ALUQrr(X86_ADC, RS, RD)
597 #define ADCQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_ADC, MD, MB, MI, MS, RD)
598 #define ADCQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_ADC, RS, MD, MB, MI, MS)
599 #define ADCQir(IM, RD) _ALUQir(X86_ADC, IM, RD)
600 #define ADCQim(IM, MD, MB, MI, MS) _ALUQim(X86_ADC, IM, MD, MB, MI, MS)
601
602 #define ADDBrr(RS, RD) _ALUBrr(X86_ADD, RS, RD)
603 #define ADDBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_ADD, MD, MB, MI, MS, RD)
604 #define ADDBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_ADD, RS, MD, MB, MI, MS)
605 #define ADDBir(IM, RD) _ALUBir(X86_ADD, IM, RD)
606 #define ADDBim(IM, MD, MB, MI, MS) _ALUBim(X86_ADD, IM, MD, MB, MI, MS)
607
608 #define ADDWrr(RS, RD) _ALUWrr(X86_ADD, RS, RD)
609 #define ADDWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_ADD, MD, MB, MI, MS, RD)
610 #define ADDWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_ADD, RS, MD, MB, MI, MS)
611 #define ADDWir(IM, RD) _ALUWir(X86_ADD, IM, RD)
612 #define ADDWim(IM, MD, MB, MI, MS) _ALUWim(X86_ADD, IM, MD, MB, MI, MS)
613
614 #define ADDLrr(RS, RD) _ALULrr(X86_ADD, RS, RD)
615 #define ADDLmr(MD, MB, MI, MS, RD) _ALULmr(X86_ADD, MD, MB, MI, MS, RD)
616 #define ADDLrm(RS, MD, MB, MI, MS) _ALULrm(X86_ADD, RS, MD, MB, MI, MS)
617 #define ADDLir(IM, RD) _ALULir(X86_ADD, IM, RD)
618 #define ADDLim(IM, MD, MB, MI, MS) _ALULim(X86_ADD, IM, MD, MB, MI, MS)
619
620 #define ADDQrr(RS, RD) _ALUQrr(X86_ADD, RS, RD)
621 #define ADDQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_ADD, MD, MB, MI, MS, RD)
622 #define ADDQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_ADD, RS, MD, MB, MI, MS)
623 #define ADDQir(IM, RD) _ALUQir(X86_ADD, IM, RD)
624 #define ADDQim(IM, MD, MB, MI, MS) _ALUQim(X86_ADD, IM, MD, MB, MI, MS)
625
626 #define ANDBrr(RS, RD) _ALUBrr(X86_AND, RS, RD)
627 #define ANDBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_AND, MD, MB, MI, MS, RD)
628 #define ANDBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_AND, RS, MD, MB, MI, MS)
629 #define ANDBir(IM, RD) _ALUBir(X86_AND, IM, RD)
630 #define ANDBim(IM, MD, MB, MI, MS) _ALUBim(X86_AND, IM, MD, MB, MI, MS)
631
632 #define ANDWrr(RS, RD) _ALUWrr(X86_AND, RS, RD)
633 #define ANDWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_AND, MD, MB, MI, MS, RD)
634 #define ANDWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_AND, RS, MD, MB, MI, MS)
635 #define ANDWir(IM, RD) _ALUWir(X86_AND, IM, RD)
636 #define ANDWim(IM, MD, MB, MI, MS) _ALUWim(X86_AND, IM, MD, MB, MI, MS)
637
638 #define ANDLrr(RS, RD) _ALULrr(X86_AND, RS, RD)
639 #define ANDLmr(MD, MB, MI, MS, RD) _ALULmr(X86_AND, MD, MB, MI, MS, RD)
640 #define ANDLrm(RS, MD, MB, MI, MS) _ALULrm(X86_AND, RS, MD, MB, MI, MS)
641 #define ANDLir(IM, RD) _ALULir(X86_AND, IM, RD)
642 #define ANDLim(IM, MD, MB, MI, MS) _ALULim(X86_AND, IM, MD, MB, MI, MS)
643
644 #define ANDQrr(RS, RD) _ALUQrr(X86_AND, RS, RD)
645 #define ANDQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_AND, MD, MB, MI, MS, RD)
646 #define ANDQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_AND, RS, MD, MB, MI, MS)
647 #define ANDQir(IM, RD) _ALUQir(X86_AND, IM, RD)
648 #define ANDQim(IM, MD, MB, MI, MS) _ALUQim(X86_AND, IM, MD, MB, MI, MS)
649
650 #define CMPBrr(RS, RD) _ALUBrr(X86_CMP, RS, RD)
651 #define CMPBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_CMP, MD, MB, MI, MS, RD)
652 #define CMPBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_CMP, RS, MD, MB, MI, MS)
653 #define CMPBir(IM, RD) _ALUBir(X86_CMP, IM, RD)
654 #define CMPBim(IM, MD, MB, MI, MS) _ALUBim(X86_CMP, IM, MD, MB, MI, MS)
655
656 #define CMPWrr(RS, RD) _ALUWrr(X86_CMP, RS, RD)
657 #define CMPWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_CMP, MD, MB, MI, MS, RD)
658 #define CMPWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_CMP, RS, MD, MB, MI, MS)
659 #define CMPWir(IM, RD) _ALUWir(X86_CMP, IM, RD)
660 #define CMPWim(IM, MD, MB, MI, MS) _ALUWim(X86_CMP, IM, MD, MB, MI, MS)
661
662 #define CMPLrr(RS, RD) _ALULrr(X86_CMP, RS, RD)
663 #define CMPLmr(MD, MB, MI, MS, RD) _ALULmr(X86_CMP, MD, MB, MI, MS, RD)
664 #define CMPLrm(RS, MD, MB, MI, MS) _ALULrm(X86_CMP, RS, MD, MB, MI, MS)
665 #define CMPLir(IM, RD) _ALULir(X86_CMP, IM, RD)
666 #define CMPLim(IM, MD, MB, MI, MS) _ALULim(X86_CMP, IM, MD, MB, MI, MS)
667
668 #define CMPQrr(RS, RD) _ALUQrr(X86_CMP, RS, RD)
669 #define CMPQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_CMP, MD, MB, MI, MS, RD)
670 #define CMPQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_CMP, RS, MD, MB, MI, MS)
671 #define CMPQir(IM, RD) _ALUQir(X86_CMP, IM, RD)
672 #define CMPQim(IM, MD, MB, MI, MS) _ALUQim(X86_CMP, IM, MD, MB, MI, MS)
673
674 #define ORBrr(RS, RD) _ALUBrr(X86_OR, RS, RD)
675 #define ORBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_OR, MD, MB, MI, MS, RD)
676 #define ORBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_OR, RS, MD, MB, MI, MS)
677 #define ORBir(IM, RD) _ALUBir(X86_OR, IM, RD)
678 #define ORBim(IM, MD, MB, MI, MS) _ALUBim(X86_OR, IM, MD, MB, MI, MS)
679
680 #define ORWrr(RS, RD) _ALUWrr(X86_OR, RS, RD)
681 #define ORWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_OR, MD, MB, MI, MS, RD)
682 #define ORWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_OR, RS, MD, MB, MI, MS)
683 #define ORWir(IM, RD) _ALUWir(X86_OR, IM, RD)
684 #define ORWim(IM, MD, MB, MI, MS) _ALUWim(X86_OR, IM, MD, MB, MI, MS)
685
686 #define ORLrr(RS, RD) _ALULrr(X86_OR, RS, RD)
687 #define ORLmr(MD, MB, MI, MS, RD) _ALULmr(X86_OR, MD, MB, MI, MS, RD)
688 #define ORLrm(RS, MD, MB, MI, MS) _ALULrm(X86_OR, RS, MD, MB, MI, MS)
689 #define ORLir(IM, RD) _ALULir(X86_OR, IM, RD)
690 #define ORLim(IM, MD, MB, MI, MS) _ALULim(X86_OR, IM, MD, MB, MI, MS)
691
692 #define ORQrr(RS, RD) _ALUQrr(X86_OR, RS, RD)
693 #define ORQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_OR, MD, MB, MI, MS, RD)
694 #define ORQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_OR, RS, MD, MB, MI, MS)
695 #define ORQir(IM, RD) _ALUQir(X86_OR, IM, RD)
696 #define ORQim(IM, MD, MB, MI, MS) _ALUQim(X86_OR, IM, MD, MB, MI, MS)
697
698 #define SBBBrr(RS, RD) _ALUBrr(X86_SBB, RS, RD)
699 #define SBBBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_SBB, MD, MB, MI, MS, RD)
700 #define SBBBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_SBB, RS, MD, MB, MI, MS)
701 #define SBBBir(IM, RD) _ALUBir(X86_SBB, IM, RD)
702 #define SBBBim(IM, MD, MB, MI, MS) _ALUBim(X86_SBB, IM, MD, MB, MI, MS)
703
704 #define SBBWrr(RS, RD) _ALUWrr(X86_SBB, RS, RD)
705 #define SBBWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_SBB, MD, MB, MI, MS, RD)
706 #define SBBWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_SBB, RS, MD, MB, MI, MS)
707 #define SBBWir(IM, RD) _ALUWir(X86_SBB, IM, RD)
708 #define SBBWim(IM, MD, MB, MI, MS) _ALUWim(X86_SBB, IM, MD, MB, MI, MS)
709
710 #define SBBLrr(RS, RD) _ALULrr(X86_SBB, RS, RD)
711 #define SBBLmr(MD, MB, MI, MS, RD) _ALULmr(X86_SBB, MD, MB, MI, MS, RD)
712 #define SBBLrm(RS, MD, MB, MI, MS) _ALULrm(X86_SBB, RS, MD, MB, MI, MS)
713 #define SBBLir(IM, RD) _ALULir(X86_SBB, IM, RD)
714 #define SBBLim(IM, MD, MB, MI, MS) _ALULim(X86_SBB, IM, MD, MB, MI, MS)
715
716 #define SBBQrr(RS, RD) _ALUQrr(X86_SBB, RS, RD)
717 #define SBBQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_SBB, MD, MB, MI, MS, RD)
718 #define SBBQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_SBB, RS, MD, MB, MI, MS)
719 #define SBBQir(IM, RD) _ALUQir(X86_SBB, IM, RD)
720 #define SBBQim(IM, MD, MB, MI, MS) _ALUQim(X86_SBB, IM, MD, MB, MI, MS)
721
722 #define SUBBrr(RS, RD) _ALUBrr(X86_SUB, RS, RD)
723 #define SUBBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_SUB, MD, MB, MI, MS, RD)
724 #define SUBBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_SUB, RS, MD, MB, MI, MS)
725 #define SUBBir(IM, RD) _ALUBir(X86_SUB, IM, RD)
726 #define SUBBim(IM, MD, MB, MI, MS) _ALUBim(X86_SUB, IM, MD, MB, MI, MS)
727
728 #define SUBWrr(RS, RD) _ALUWrr(X86_SUB, RS, RD)
729 #define SUBWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_SUB, MD, MB, MI, MS, RD)
730 #define SUBWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_SUB, RS, MD, MB, MI, MS)
731 #define SUBWir(IM, RD) _ALUWir(X86_SUB, IM, RD)
732 #define SUBWim(IM, MD, MB, MI, MS) _ALUWim(X86_SUB, IM, MD, MB, MI, MS)
733
734 #define SUBLrr(RS, RD) _ALULrr(X86_SUB, RS, RD)
735 #define SUBLmr(MD, MB, MI, MS, RD) _ALULmr(X86_SUB, MD, MB, MI, MS, RD)
736 #define SUBLrm(RS, MD, MB, MI, MS) _ALULrm(X86_SUB, RS, MD, MB, MI, MS)
737 #define SUBLir(IM, RD) _ALULir(X86_SUB, IM, RD)
738 #define SUBLim(IM, MD, MB, MI, MS) _ALULim(X86_SUB, IM, MD, MB, MI, MS)
739
740 #define SUBQrr(RS, RD) _ALUQrr(X86_SUB, RS, RD)
741 #define SUBQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_SUB, MD, MB, MI, MS, RD)
742 #define SUBQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_SUB, RS, MD, MB, MI, MS)
743 #define SUBQir(IM, RD) _ALUQir(X86_SUB, IM, RD)
744 #define SUBQim(IM, MD, MB, MI, MS) _ALUQim(X86_SUB, IM, MD, MB, MI, MS)
745
746 #define XORBrr(RS, RD) _ALUBrr(X86_XOR, RS, RD)
747 #define XORBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_XOR, MD, MB, MI, MS, RD)
748 #define XORBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_XOR, RS, MD, MB, MI, MS)
749 #define XORBir(IM, RD) _ALUBir(X86_XOR, IM, RD)
750 #define XORBim(IM, MD, MB, MI, MS) _ALUBim(X86_XOR, IM, MD, MB, MI, MS)
751
752 #define XORWrr(RS, RD) _ALUWrr(X86_XOR, RS, RD)
753 #define XORWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_XOR, MD, MB, MI, MS, RD)
754 #define XORWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_XOR, RS, MD, MB, MI, MS)
755 #define XORWir(IM, RD) _ALUWir(X86_XOR, IM, RD)
756 #define XORWim(IM, MD, MB, MI, MS) _ALUWim(X86_XOR, IM, MD, MB, MI, MS)
757
758 #define XORLrr(RS, RD) _ALULrr(X86_XOR, RS, RD)
759 #define XORLmr(MD, MB, MI, MS, RD) _ALULmr(X86_XOR, MD, MB, MI, MS, RD)
760 #define XORLrm(RS, MD, MB, MI, MS) _ALULrm(X86_XOR, RS, MD, MB, MI, MS)
761 #define XORLir(IM, RD) _ALULir(X86_XOR, IM, RD)
762 #define XORLim(IM, MD, MB, MI, MS) _ALULim(X86_XOR, IM, MD, MB, MI, MS)
763
764 #define XORQrr(RS, RD) _ALUQrr(X86_XOR, RS, RD)
765 #define XORQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_XOR, MD, MB, MI, MS, RD)
766 #define XORQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_XOR, RS, MD, MB, MI, MS)
767 #define XORQir(IM, RD) _ALUQir(X86_XOR, IM, RD)
768 #define XORQim(IM, MD, MB, MI, MS) _ALUQim(X86_XOR, IM, MD, MB, MI, MS)
769
770
771 /* --- Shift/Rotate instructions ------------------------------------------- */
772
773 enum {
774 X86_ROL = 0,
775 X86_ROR = 1,
776 X86_RCL = 2,
777 X86_RCR = 3,
778 X86_SHL = 4,
779 X86_SHR = 5,
780 X86_SAR = 7,
781 };
782
783 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
784
785 #define _ROTSHIBir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
786 (_REXBrr(0, RD), _O_Mrm (0xd0 ,_b11,OP,_r1(RD) )) : \
787 (_REXBrr(0, RD), _O_Mrm_B (0xc0 ,_b11,OP,_r1(RD) ,_u8(IM))) )
788 #define _ROTSHIBim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
789 (_REXBrm(0, MB, MI), _O_r_X (0xd0 ,OP ,MD,MB,MI,MS )) : \
790 (_REXBrm(0, MB, MI), _O_r_X_B (0xc0 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
791 #define _ROTSHIBrr(OP,RS,RD) (((RS) == X86_CL) ? \
792 (_REXBrr(RS, RD), _O_Mrm (0xd2 ,_b11,OP,_r1(RD) )) : \
793 x86_emit_failure("source register must be CL" ) )
794 #define _ROTSHIBrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
795 (_REXBrm(RS, MB, MI), _O_r_X (0xd2 ,OP ,MD,MB,MI,MS )) : \
796 x86_emit_failure("source register must be CL" ) )
797
798 #define _ROTSHIWir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
799 (_d16(), _REXLrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r2(RD) )) : \
800 (_d16(), _REXLrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r2(RD) ,_u8(IM))) )
801 #define _ROTSHIWim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
802 (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
803 (_d16(), _REXLrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
804 #define _ROTSHIWrr(OP,RS,RD) (((RS) == X86_CL) ? \
805 (_d16(), _REXLrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r2(RD) )) : \
806 x86_emit_failure("source register must be CL" ) )
807 #define _ROTSHIWrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
808 (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
809 x86_emit_failure("source register must be CL" ) )
810
811 #define _ROTSHILir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
812 (_REXLrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r4(RD) )) : \
813 (_REXLrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r4(RD) ,_u8(IM))) )
814 #define _ROTSHILim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
815 (_REXLrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
816 (_REXLrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
817 #define _ROTSHILrr(OP,RS,RD) (((RS) == X86_CL) ? \
818 (_REXLrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r4(RD) )) : \
819 x86_emit_failure("source register must be CL" ) )
820 #define _ROTSHILrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
821 (_REXLrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
822 x86_emit_failure("source register must be CL" ) )
823
824 #define _ROTSHIQir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
825 (_REXQrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r8(RD) )) : \
826 (_REXQrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r8(RD) ,_u8(IM))) )
827 #define _ROTSHIQim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
828 (_REXQrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
829 (_REXQrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
830 #define _ROTSHIQrr(OP,RS,RD) (((RS) == X86_CL) ? \
831 (_REXQrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r8(RD) )) : \
832 x86_emit_failure("source register must be CL" ) )
833 #define _ROTSHIQrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
834 (_REXQrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
835 x86_emit_failure("source register must be CL" ) )
836
837 #define ROLBir(IM, RD) _ROTSHIBir(X86_ROL, IM, RD)
838 #define ROLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_ROL, IM, MD, MB, MI, MS)
839 #define ROLBrr(RS, RD) _ROTSHIBrr(X86_ROL, RS, RD)
840 #define ROLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_ROL, RS, MD, MB, MI, MS)
841
842 #define ROLWir(IM, RD) _ROTSHIWir(X86_ROL, IM, RD)
843 #define ROLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_ROL, IM, MD, MB, MI, MS)
844 #define ROLWrr(RS, RD) _ROTSHIWrr(X86_ROL, RS, RD)
845 #define ROLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_ROL, RS, MD, MB, MI, MS)
846
847 #define ROLLir(IM, RD) _ROTSHILir(X86_ROL, IM, RD)
848 #define ROLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_ROL, IM, MD, MB, MI, MS)
849 #define ROLLrr(RS, RD) _ROTSHILrr(X86_ROL, RS, RD)
850 #define ROLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_ROL, RS, MD, MB, MI, MS)
851
852 #define ROLQir(IM, RD) _ROTSHIQir(X86_ROL, IM, RD)
853 #define ROLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_ROL, IM, MD, MB, MI, MS)
854 #define ROLQrr(RS, RD) _ROTSHIQrr(X86_ROL, RS, RD)
855 #define ROLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_ROL, RS, MD, MB, MI, MS)
856
857 #define RORBir(IM, RD) _ROTSHIBir(X86_ROR, IM, RD)
858 #define RORBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_ROR, IM, MD, MB, MI, MS)
859 #define RORBrr(RS, RD) _ROTSHIBrr(X86_ROR, RS, RD)
860 #define RORBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_ROR, RS, MD, MB, MI, MS)
861
862 #define RORWir(IM, RD) _ROTSHIWir(X86_ROR, IM, RD)
863 #define RORWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_ROR, IM, MD, MB, MI, MS)
864 #define RORWrr(RS, RD) _ROTSHIWrr(X86_ROR, RS, RD)
865 #define RORWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_ROR, RS, MD, MB, MI, MS)
866
867 #define RORLir(IM, RD) _ROTSHILir(X86_ROR, IM, RD)
868 #define RORLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_ROR, IM, MD, MB, MI, MS)
869 #define RORLrr(RS, RD) _ROTSHILrr(X86_ROR, RS, RD)
870 #define RORLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_ROR, RS, MD, MB, MI, MS)
871
872 #define RORQir(IM, RD) _ROTSHIQir(X86_ROR, IM, RD)
873 #define RORQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_ROR, IM, MD, MB, MI, MS)
874 #define RORQrr(RS, RD) _ROTSHIQrr(X86_ROR, RS, RD)
875 #define RORQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_ROR, RS, MD, MB, MI, MS)
876
877 #define RCLBir(IM, RD) _ROTSHIBir(X86_RCL, IM, RD)
878 #define RCLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_RCL, IM, MD, MB, MI, MS)
879 #define RCLBrr(RS, RD) _ROTSHIBrr(X86_RCL, RS, RD)
880 #define RCLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_RCL, RS, MD, MB, MI, MS)
881
882 #define RCLWir(IM, RD) _ROTSHIWir(X86_RCL, IM, RD)
883 #define RCLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_RCL, IM, MD, MB, MI, MS)
884 #define RCLWrr(RS, RD) _ROTSHIWrr(X86_RCL, RS, RD)
885 #define RCLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_RCL, RS, MD, MB, MI, MS)
886
887 #define RCLLir(IM, RD) _ROTSHILir(X86_RCL, IM, RD)
888 #define RCLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_RCL, IM, MD, MB, MI, MS)
889 #define RCLLrr(RS, RD) _ROTSHILrr(X86_RCL, RS, RD)
890 #define RCLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_RCL, RS, MD, MB, MI, MS)
891
892 #define RCLQir(IM, RD) _ROTSHIQir(X86_RCL, IM, RD)
893 #define RCLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_RCL, IM, MD, MB, MI, MS)
894 #define RCLQrr(RS, RD) _ROTSHIQrr(X86_RCL, RS, RD)
895 #define RCLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_RCL, RS, MD, MB, MI, MS)
896
897 #define RCRBir(IM, RD) _ROTSHIBir(X86_RCR, IM, RD)
898 #define RCRBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_RCR, IM, MD, MB, MI, MS)
899 #define RCRBrr(RS, RD) _ROTSHIBrr(X86_RCR, RS, RD)
900 #define RCRBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_RCR, RS, MD, MB, MI, MS)
901
902 #define RCRWir(IM, RD) _ROTSHIWir(X86_RCR, IM, RD)
903 #define RCRWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_RCR, IM, MD, MB, MI, MS)
904 #define RCRWrr(RS, RD) _ROTSHIWrr(X86_RCR, RS, RD)
905 #define RCRWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_RCR, RS, MD, MB, MI, MS)
906
907 #define RCRLir(IM, RD) _ROTSHILir(X86_RCR, IM, RD)
908 #define RCRLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_RCR, IM, MD, MB, MI, MS)
909 #define RCRLrr(RS, RD) _ROTSHILrr(X86_RCR, RS, RD)
910 #define RCRLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_RCR, RS, MD, MB, MI, MS)
911
912 #define RCRQir(IM, RD) _ROTSHIQir(X86_RCR, IM, RD)
913 #define RCRQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_RCR, IM, MD, MB, MI, MS)
914 #define RCRQrr(RS, RD) _ROTSHIQrr(X86_RCR, RS, RD)
915 #define RCRQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_RCR, RS, MD, MB, MI, MS)
916
917 #define SHLBir(IM, RD) _ROTSHIBir(X86_SHL, IM, RD)
918 #define SHLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SHL, IM, MD, MB, MI, MS)
919 #define SHLBrr(RS, RD) _ROTSHIBrr(X86_SHL, RS, RD)
920 #define SHLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SHL, RS, MD, MB, MI, MS)
921
922 #define SHLWir(IM, RD) _ROTSHIWir(X86_SHL, IM, RD)
923 #define SHLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SHL, IM, MD, MB, MI, MS)
924 #define SHLWrr(RS, RD) _ROTSHIWrr(X86_SHL, RS, RD)
925 #define SHLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SHL, RS, MD, MB, MI, MS)
926
927 #define SHLLir(IM, RD) _ROTSHILir(X86_SHL, IM, RD)
928 #define SHLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SHL, IM, MD, MB, MI, MS)
929 #define SHLLrr(RS, RD) _ROTSHILrr(X86_SHL, RS, RD)
930 #define SHLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SHL, RS, MD, MB, MI, MS)
931
932 #define SHLQir(IM, RD) _ROTSHIQir(X86_SHL, IM, RD)
933 #define SHLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SHL, IM, MD, MB, MI, MS)
934 #define SHLQrr(RS, RD) _ROTSHIQrr(X86_SHL, RS, RD)
935 #define SHLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SHL, RS, MD, MB, MI, MS)
936
937 #define SHRBir(IM, RD) _ROTSHIBir(X86_SHR, IM, RD)
938 #define SHRBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SHR, IM, MD, MB, MI, MS)
939 #define SHRBrr(RS, RD) _ROTSHIBrr(X86_SHR, RS, RD)
940 #define SHRBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SHR, RS, MD, MB, MI, MS)
941
942 #define SHRWir(IM, RD) _ROTSHIWir(X86_SHR, IM, RD)
943 #define SHRWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SHR, IM, MD, MB, MI, MS)
944 #define SHRWrr(RS, RD) _ROTSHIWrr(X86_SHR, RS, RD)
945 #define SHRWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SHR, RS, MD, MB, MI, MS)
946
947 #define SHRLir(IM, RD) _ROTSHILir(X86_SHR, IM, RD)
948 #define SHRLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SHR, IM, MD, MB, MI, MS)
949 #define SHRLrr(RS, RD) _ROTSHILrr(X86_SHR, RS, RD)
950 #define SHRLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SHR, RS, MD, MB, MI, MS)
951
952 #define SHRQir(IM, RD) _ROTSHIQir(X86_SHR, IM, RD)
953 #define SHRQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SHR, IM, MD, MB, MI, MS)
954 #define SHRQrr(RS, RD) _ROTSHIQrr(X86_SHR, RS, RD)
955 #define SHRQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SHR, RS, MD, MB, MI, MS)
956
957 #define SALBir SHLBir
958 #define SALBim SHLBim
959 #define SALBrr SHLBrr
960 #define SALBrm SHLBrm
961
962 #define SALWir SHLWir
963 #define SALWim SHLWim
964 #define SALWrr SHLWrr
965 #define SALWrm SHLWrm
966
967 #define SALLir SHLLir
968 #define SALLim SHLLim
969 #define SALLrr SHLLrr
970 #define SALLrm SHLLrm
971
972 #define SALQir SHLQir
973 #define SALQim SHLQim
974 #define SALQrr SHLQrr
975 #define SALQrm SHLQrm
976
977 #define SARBir(IM, RD) _ROTSHIBir(X86_SAR, IM, RD)
978 #define SARBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SAR, IM, MD, MB, MI, MS)
979 #define SARBrr(RS, RD) _ROTSHIBrr(X86_SAR, RS, RD)
980 #define SARBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SAR, RS, MD, MB, MI, MS)
981
982 #define SARWir(IM, RD) _ROTSHIWir(X86_SAR, IM, RD)
983 #define SARWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SAR, IM, MD, MB, MI, MS)
984 #define SARWrr(RS, RD) _ROTSHIWrr(X86_SAR, RS, RD)
985 #define SARWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SAR, RS, MD, MB, MI, MS)
986
987 #define SARLir(IM, RD) _ROTSHILir(X86_SAR, IM, RD)
988 #define SARLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SAR, IM, MD, MB, MI, MS)
989 #define SARLrr(RS, RD) _ROTSHILrr(X86_SAR, RS, RD)
990 #define SARLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SAR, RS, MD, MB, MI, MS)
991
992 #define SARQir(IM, RD) _ROTSHIQir(X86_SAR, IM, RD)
993 #define SARQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SAR, IM, MD, MB, MI, MS)
994 #define SARQrr(RS, RD) _ROTSHIQrr(X86_SAR, RS, RD)
995 #define SARQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SAR, RS, MD, MB, MI, MS)
996
997
998 /* --- Bit test instructions ----------------------------------------------- */
999
1000 enum {
1001 X86_BT = 4,
1002 X86_BTS = 5,
1003 X86_BTR = 6,
1004 X86_BTC = 7,
1005 };
1006
1007 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1008
1009 #define _BTWir(OP, IM, RD) (_d16(), _REXLrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r2(RD) ,_u8(IM)))
1010 #define _BTWim(OP, IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
1011 #define _BTWrr(OP, RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r2(RS),_r2(RD) ))
1012 #define _BTWrm(OP, RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r2(RS) ,MD,MB,MI,MS ))
1013
1014 #define _BTLir(OP, IM, RD) (_REXLrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r4(RD) ,_u8(IM)))
1015 #define _BTLim(OP, IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
1016 #define _BTLrr(OP, RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r4(RS),_r4(RD) ))
1017 #define _BTLrm(OP, RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r4(RS) ,MD,MB,MI,MS ))
1018
1019 #define _BTQir(OP, IM, RD) (_REXQrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r8(RD) ,_u8(IM)))
1020 #define _BTQim(OP, IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
1021 #define _BTQrr(OP, RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r8(RS),_r8(RD) ))
1022 #define _BTQrm(OP, RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r8(RS) ,MD,MB,MI,MS ))
1023
1024 #define BTWir(IM, RD) _BTWir(X86_BT, IM, RD)
1025 #define BTWim(IM, MD, MB, MI, MS) _BTWim(X86_BT, IM, MD, MB, MI, MS)
1026 #define BTWrr(RS, RD) _BTWrr(X86_BT, RS, RD)
1027 #define BTWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BT, RS, MD, MB, MI, MS)
1028
1029 #define BTLir(IM, RD) _BTLir(X86_BT, IM, RD)
1030 #define BTLim(IM, MD, MB, MI, MS) _BTLim(X86_BT, IM, MD, MB, MI, MS)
1031 #define BTLrr(RS, RD) _BTLrr(X86_BT, RS, RD)
1032 #define BTLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BT, RS, MD, MB, MI, MS)
1033
1034 #define BTQir(IM, RD) _BTQir(X86_BT, IM, RD)
1035 #define BTQim(IM, MD, MB, MI, MS) _BTQim(X86_BT, IM, MD, MB, MI, MS)
1036 #define BTQrr(RS, RD) _BTQrr(X86_BT, RS, RD)
1037 #define BTQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BT, RS, MD, MB, MI, MS)
1038
1039 #define BTCWir(IM, RD) _BTWir(X86_BTC, IM, RD)
1040 #define BTCWim(IM, MD, MB, MI, MS) _BTWim(X86_BTC, IM, MD, MB, MI, MS)
1041 #define BTCWrr(RS, RD) _BTWrr(X86_BTC, RS, RD)
1042 #define BTCWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTC, RS, MD, MB, MI, MS)
1043
1044 #define BTCLir(IM, RD) _BTLir(X86_BTC, IM, RD)
1045 #define BTCLim(IM, MD, MB, MI, MS) _BTLim(X86_BTC, IM, MD, MB, MI, MS)
1046 #define BTCLrr(RS, RD) _BTLrr(X86_BTC, RS, RD)
1047 #define BTCLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTC, RS, MD, MB, MI, MS)
1048
1049 #define BTCQir(IM, RD) _BTQir(X86_BTC, IM, RD)
1050 #define BTCQim(IM, MD, MB, MI, MS) _BTQim(X86_BTC, IM, MD, MB, MI, MS)
1051 #define BTCQrr(RS, RD) _BTQrr(X86_BTC, RS, RD)
1052 #define BTCQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTC, RS, MD, MB, MI, MS)
1053
1054 #define BTRWir(IM, RD) _BTWir(X86_BTR, IM, RD)
1055 #define BTRWim(IM, MD, MB, MI, MS) _BTWim(X86_BTR, IM, MD, MB, MI, MS)
1056 #define BTRWrr(RS, RD) _BTWrr(X86_BTR, RS, RD)
1057 #define BTRWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTR, RS, MD, MB, MI, MS)
1058
1059 #define BTRLir(IM, RD) _BTLir(X86_BTR, IM, RD)
1060 #define BTRLim(IM, MD, MB, MI, MS) _BTLim(X86_BTR, IM, MD, MB, MI, MS)
1061 #define BTRLrr(RS, RD) _BTLrr(X86_BTR, RS, RD)
1062 #define BTRLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTR, RS, MD, MB, MI, MS)
1063
1064 #define BTRQir(IM, RD) _BTQir(X86_BTR, IM, RD)
1065 #define BTRQim(IM, MD, MB, MI, MS) _BTQim(X86_BTR, IM, MD, MB, MI, MS)
1066 #define BTRQrr(RS, RD) _BTQrr(X86_BTR, RS, RD)
1067 #define BTRQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTR, RS, MD, MB, MI, MS)
1068
1069 #define BTSWir(IM, RD) _BTWir(X86_BTS, IM, RD)
1070 #define BTSWim(IM, MD, MB, MI, MS) _BTWim(X86_BTS, IM, MD, MB, MI, MS)
1071 #define BTSWrr(RS, RD) _BTWrr(X86_BTS, RS, RD)
1072 #define BTSWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTS, RS, MD, MB, MI, MS)
1073
1074 #define BTSLir(IM, RD) _BTLir(X86_BTS, IM, RD)
1075 #define BTSLim(IM, MD, MB, MI, MS) _BTLim(X86_BTS, IM, MD, MB, MI, MS)
1076 #define BTSLrr(RS, RD) _BTLrr(X86_BTS, RS, RD)
1077 #define BTSLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTS, RS, MD, MB, MI, MS)
1078
1079 #define BTSQir(IM, RD) _BTQir(X86_BTS, IM, RD)
1080 #define BTSQim(IM, MD, MB, MI, MS) _BTQim(X86_BTS, IM, MD, MB, MI, MS)
1081 #define BTSQrr(RS, RD) _BTQrr(X86_BTS, RS, RD)
1082 #define BTSQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTS, RS, MD, MB, MI, MS)
1083
1084
1085 /* --- Move instructions --------------------------------------------------- */
1086
1087 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1088
1089 #define MOVBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x88 ,_b11,_r1(RS),_r1(RD) ))
1090 #define MOVBmr(MD, MB, MI, MS, RD) (_REXBmr(MB, MI, RD), _O_r_X (0x8a ,_r1(RD) ,MD,MB,MI,MS ))
1091 #define MOVBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x88 ,_r1(RS) ,MD,MB,MI,MS ))
1092 #define MOVBir(IM, R) (_REXBrr(0, R), _Or_B (0xb0,_r1(R) ,_su8(IM)))
1093 #define MOVBim(IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_X_B (0xc6 ,MD,MB,MI,MS ,_su8(IM)))
1094
1095 #define MOVWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x89 ,_b11,_r2(RS),_r2(RD) ))
1096 #define MOVWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _O_r_X (0x8b ,_r2(RD) ,MD,MB,MI,MS ))
1097 #define MOVWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0x89 ,_r2(RS) ,MD,MB,MI,MS ))
1098 #define MOVWir(IM, R) (_d16(), _REXLrr(0, R), _Or_W (0xb8,_r2(R) ,_su16(IM)))
1099 #define MOVWim(IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_X_W (0xc7 ,MD,MB,MI,MS ,_su16(IM)))
1100
1101 #define MOVLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x89 ,_b11,_r4(RS),_r4(RD) ))
1102 #define MOVLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (0x8b ,_r4(RD) ,MD,MB,MI,MS ))
1103 #define MOVLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x89 ,_r4(RS) ,MD,MB,MI,MS ))
1104 #define MOVLir(IM, R) (_REXLrr(0, R), _Or_L (0xb8,_r4(R) ,IM ))
1105 #define MOVLim(IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_X_L (0xc7 ,MD,MB,MI,MS ,IM ))
1106
1107 #define MOVQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x89 ,_b11,_r8(RS),_r8(RD) ))
1108 #define MOVQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (0x8b ,_r8(RD) ,MD,MB,MI,MS ))
1109 #define MOVQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x89 ,_r8(RS) ,MD,MB,MI,MS ))
1110 #define MOVQir(IM, R) (_REXQrr(0, R), _Or_Q (0xb8,_r8(R) ,IM ))
1111 #define MOVQim(IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_X_L (0xc7 ,MD,MB,MI,MS ,IM ))
1112
1113
1114 /* --- Unary and Multiply/Divide instructions ------------------------------ */
1115
1116 enum {
1117 X86_NOT = 2,
1118 X86_NEG = 3,
1119 X86_MUL = 4,
1120 X86_IMUL = 5,
1121 X86_DIV = 6,
1122 X86_IDIV = 7,
1123 };
1124
1125 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1126
1127 #define _UNARYBr(OP, RS) (_REXBrr(0, RS), _O_Mrm (0xf6 ,_b11,OP ,_r1(RS) ))
1128 #define _UNARYBm(OP, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xf6 ,OP ,MD,MB,MI,MS ))
1129 #define _UNARYWr(OP, RS) (_d16(), _REXLrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r2(RS) ))
1130 #define _UNARYWm(OP, MD, MB, MI, MS) (_d16(), _REXLmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1131 #define _UNARYLr(OP, RS) (_REXLrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r4(RS) ))
1132 #define _UNARYLm(OP, MD, MB, MI, MS) (_REXLmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1133 #define _UNARYQr(OP, RS) (_REXQrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r8(RS) ))
1134 #define _UNARYQm(OP, MD, MB, MI, MS) (_REXQmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1135
1136 #define NOTBr(RS) _UNARYBr(X86_NOT, RS)
1137 #define NOTBm(MD, MB, MI, MS) _UNARYBm(X86_NOT, MD, MB, MI, MS)
1138 #define NOTWr(RS) _UNARYWr(X86_NOT, RS)
1139 #define NOTWm(MD, MB, MI, MS) _UNARYWm(X86_NOT, MD, MB, MI, MS)
1140 #define NOTLr(RS) _UNARYLr(X86_NOT, RS)
1141 #define NOTLm(MD, MB, MI, MS) _UNARYLm(X86_NOT, MD, MB, MI, MS)
1142 #define NOTQr(RS) _UNARYQr(X86_NOT, RS)
1143 #define NOTQm(MD, MB, MI, MS) _UNARYQm(X86_NOT, MD, MB, MI, MS)
1144
1145 #define NEGBr(RS) _UNARYBr(X86_NEG, RS)
1146 #define NEGBm(MD, MB, MI, MS) _UNARYBm(X86_NEG, MD, MB, MI, MS)
1147 #define NEGWr(RS) _UNARYWr(X86_NEG, RS)
1148 #define NEGWm(MD, MB, MI, MS) _UNARYWm(X86_NEG, MD, MB, MI, MS)
1149 #define NEGLr(RS) _UNARYLr(X86_NEG, RS)
1150 #define NEGLm(MD, MB, MI, MS) _UNARYLm(X86_NEG, MD, MB, MI, MS)
1151 #define NEGQr(RS) _UNARYQr(X86_NEG, RS)
1152 #define NEGQm(MD, MB, MI, MS) _UNARYQm(X86_NEG, MD, MB, MI, MS)
1153
1154 #define MULBr(RS) _UNARYBr(X86_MUL, RS)
1155 #define MULBm(MD, MB, MI, MS) _UNARYBm(X86_MUL, MD, MB, MI, MS)
1156 #define MULWr(RS) _UNARYWr(X86_MUL, RS)
1157 #define MULWm(MD, MB, MI, MS) _UNARYWm(X86_MUL, MD, MB, MI, MS)
1158 #define MULLr(RS) _UNARYLr(X86_MUL, RS)
1159 #define MULLm(MD, MB, MI, MS) _UNARYLm(X86_MUL, MD, MB, MI, MS)
1160 #define MULQr(RS) _UNARYQr(X86_MUL, RS)
1161 #define MULQm(MD, MB, MI, MS) _UNARYQm(X86_MUL, MD, MB, MI, MS)
1162
1163 #define IMULBr(RS) _UNARYBr(X86_IMUL, RS)
1164 #define IMULBm(MD, MB, MI, MS) _UNARYBm(X86_IMUL, MD, MB, MI, MS)
1165 #define IMULWr(RS) _UNARYWr(X86_IMUL, RS)
1166 #define IMULWm(MD, MB, MI, MS) _UNARYWm(X86_IMUL, MD, MB, MI, MS)
1167 #define IMULLr(RS) _UNARYLr(X86_IMUL, RS)
1168 #define IMULLm(MD, MB, MI, MS) _UNARYLm(X86_IMUL, MD, MB, MI, MS)
1169 #define IMULQr(RS) _UNARYQr(X86_IMUL, RS)
1170 #define IMULQm(MD, MB, MI, MS) _UNARYQm(X86_IMUL, MD, MB, MI, MS)
1171
1172 #define DIVBr(RS) _UNARYBr(X86_DIV, RS)
1173 #define DIVBm(MD, MB, MI, MS) _UNARYBm(X86_DIV, MD, MB, MI, MS)
1174 #define DIVWr(RS) _UNARYWr(X86_DIV, RS)
1175 #define DIVWm(MD, MB, MI, MS) _UNARYWm(X86_DIV, MD, MB, MI, MS)
1176 #define DIVLr(RS) _UNARYLr(X86_DIV, RS)
1177 #define DIVLm(MD, MB, MI, MS) _UNARYLm(X86_DIV, MD, MB, MI, MS)
1178 #define DIVQr(RS) _UNARYQr(X86_DIV, RS)
1179 #define DIVQm(MD, MB, MI, MS) _UNARYQm(X86_DIV, MD, MB, MI, MS)
1180
1181 #define IDIVBr(RS) _UNARYBr(X86_IDIV, RS)
1182 #define IDIVBm(MD, MB, MI, MS) _UNARYBm(X86_IDIV, MD, MB, MI, MS)
1183 #define IDIVWr(RS) _UNARYWr(X86_IDIV, RS)
1184 #define IDIVWm(MD, MB, MI, MS) _UNARYWm(X86_IDIV, MD, MB, MI, MS)
1185 #define IDIVLr(RS) _UNARYLr(X86_IDIV, RS)
1186 #define IDIVLm(MD, MB, MI, MS) _UNARYLm(X86_IDIV, MD, MB, MI, MS)
1187 #define IDIVQr(RS) _UNARYQr(X86_IDIV, RS)
1188 #define IDIVQm(MD, MB, MI, MS) _UNARYQm(X86_IDIV, MD, MB, MI, MS)
1189
1190 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1191
1192 #define IMULWrr(RS, RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0faf ,_b11,_r2(RD),_r2(RS) ))
1193 #define IMULWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0faf ,_r2(RD) ,MD,MB,MI,MS ))
1194
1195 #define IMULWirr(IM,RS,RD) (_d16(), _REXLrr(RS, RD), _Os_Mrm_sW (0x69 ,_b11,_r2(RS),_r2(RD) ,_su16(IM) ))
1196 #define IMULWimr(IM,MD,MB,MI,MS,RD) (_d16(), _REXLmr(MB, MI, RD), _Os_r_X_sW (0x69 ,_r2(RD) ,MD,MB,MI,MS ,_su16(IM) ))
1197
1198 #define IMULLir(IM, RD) (_REXLrr(0, RD), _Os_Mrm_sL (0x69 ,_b11,_r4(RD),_r4(RD) ,IM ))
1199 #define IMULLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0faf ,_b11,_r4(RD),_r4(RS) ))
1200 #define IMULLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0faf ,_r4(RD) ,MD,MB,MI,MS ))
1201
1202 #define IMULQir(IM, RD) (_REXQrr(0, RD), _Os_Mrm_sL (0x69 ,_b11,_r8(RD),_r8(RD) ,IM ))
1203 #define IMULQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0faf ,_b11,_r8(RD),_r8(RS) ))
1204 #define IMULQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0faf ,_r8(RD) ,MD,MB,MI,MS ))
1205
1206 #define IMULLirr(IM,RS,RD) (_REXLrr(RS, RD), _Os_Mrm_sL (0x69 ,_b11,_r4(RS),_r4(RD) ,IM ))
1207 #define IMULLimr(IM,MD,MB,MI,MS,RD) (_REXLmr(MB, MI, RD), _Os_r_X_sL (0x69 ,_r4(RD) ,MD,MB,MI,MS ,IM ))
1208
1209 #define IMULQirr(IM,RS,RD) (_REXQrr(RS, RD), _Os_Mrm_sL (0x69 ,_b11,_r8(RS),_r8(RD) ,IM ))
1210 #define IMULQimr(IM,MD,MB,MI,MS,RD) (_REXQmr(MB, MI, RD), _Os_r_X_sL (0x69 ,_r8(RD) ,MD,MB,MI,MS ,IM ))
1211
1212
1213 /* --- Control Flow related instructions ----------------------------------- */
1214
1215 enum {
1216 X86_CC_O = 0x0,
1217 X86_CC_NO = 0x1,
1218 X86_CC_NAE = 0x2,
1219 X86_CC_B = 0x2,
1220 X86_CC_C = 0x2,
1221 X86_CC_AE = 0x3,
1222 X86_CC_NB = 0x3,
1223 X86_CC_NC = 0x3,
1224 X86_CC_E = 0x4,
1225 X86_CC_Z = 0x4,
1226 X86_CC_NE = 0x5,
1227 X86_CC_NZ = 0x5,
1228 X86_CC_BE = 0x6,
1229 X86_CC_NA = 0x6,
1230 X86_CC_A = 0x7,
1231 X86_CC_NBE = 0x7,
1232 X86_CC_S = 0x8,
1233 X86_CC_NS = 0x9,
1234 X86_CC_P = 0xa,
1235 X86_CC_PE = 0xa,
1236 X86_CC_NP = 0xb,
1237 X86_CC_PO = 0xb,
1238 X86_CC_L = 0xc,
1239 X86_CC_NGE = 0xc,
1240 X86_CC_GE = 0xd,
1241 X86_CC_NL = 0xd,
1242 X86_CC_LE = 0xe,
1243 X86_CC_NG = 0xe,
1244 X86_CC_G = 0xf,
1245 X86_CC_NLE = 0xf,
1246 };
1247
1248 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1249
1250 // FIXME: no prefix is availble to encode a 32-bit operand size in 64-bit mode
1251 #define CALLm(M) _O_D32 (0xe8 ,(int)(M) )
1252 #define _CALLLsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b010,_r4(R) ))
1253 #define _CALLQsr(R) (_REXQrr(0, R), _O_Mrm (0xff ,_b11,_b010,_r8(R) ))
1254 #define CALLsr(R) ( X86_TARGET_64BIT ? _CALLQsr(R) : _CALLLsr(R))
1255 #define CALLsm(D,B,I,S) (_REXLrm(0, B, I), _O_r_X (0xff ,_b010 ,(int)(D),B,I,S ))
1256
1257 // FIXME: no prefix is availble to encode a 32-bit operand size in 64-bit mode
1258 #define JMPSm(M) _O_D8 (0xeb ,(int)(M) )
1259 #define JMPm(M) _O_D32 (0xe9 ,(int)(M) )
1260 #define _JMPLsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b100,_r4(R) ))
1261 #define _JMPQsr(R) (_REXQrr(0, R), _O_Mrm (0xff ,_b11,_b100,_r8(R) ))
1262 #define JMPsr(R) ( X86_TARGET_64BIT ? _JMPQsr(R) : _JMPLsr(R))
1263 #define JMPsm(D,B,I,S) (_REXLrm(0, B, I), _O_r_X (0xff ,_b100 ,(int)(D),B,I,S ))
1264
1265 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1266 #define JCCSii(CC, D) _O_B (0x70|(CC) ,(_sc)(int)(D) )
1267 #define JCCSim(CC, D) _O_D8 (0x70|(CC) ,(int)(D) )
1268 #define JOSm(D) JCCSim(0x0, D)
1269 #define JNOSm(D) JCCSim(0x1, D)
1270 #define JBSm(D) JCCSim(0x2, D)
1271 #define JNAESm(D) JCCSim(0x2, D)
1272 #define JNBSm(D) JCCSim(0x3, D)
1273 #define JAESm(D) JCCSim(0x3, D)
1274 #define JESm(D) JCCSim(0x4, D)
1275 #define JZSm(D) JCCSim(0x4, D)
1276 #define JNESm(D) JCCSim(0x5, D)
1277 #define JNZSm(D) JCCSim(0x5, D)
1278 #define JBESm(D) JCCSim(0x6, D)
1279 #define JNASm(D) JCCSim(0x6, D)
1280 #define JNBESm(D) JCCSim(0x7, D)
1281 #define JASm(D) JCCSim(0x7, D)
1282 #define JSSm(D) JCCSim(0x8, D)
1283 #define JNSSm(D) JCCSim(0x9, D)
1284 #define JPSm(D) JCCSim(0xa, D)
1285 #define JPESm(D) JCCSim(0xa, D)
1286 #define JNPSm(D) JCCSim(0xb, D)
1287 #define JPOSm(D) JCCSim(0xb, D)
1288 #define JLSm(D) JCCSim(0xc, D)
1289 #define JNGESm(D) JCCSim(0xc, D)
1290 #define JNLSm(D) JCCSim(0xd, D)
1291 #define JGESm(D) JCCSim(0xd, D)
1292 #define JLESm(D) JCCSim(0xe, D)
1293 #define JNGSm(D) JCCSim(0xe, D)
1294 #define JNLESm(D) JCCSim(0xf, D)
1295 #define JGSm(D) JCCSim(0xf, D)
1296
1297 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1298 #define JCCii(CC, D) _OO_L (0x0f80|(CC) ,(int)(D) )
1299 #define JCCim(CC, D) _OO_D32 (0x0f80|(CC) ,(int)(D) )
1300 #define JOm(D) JCCim(0x0, D)
1301 #define JNOm(D) JCCim(0x1, D)
1302 #define JBm(D) JCCim(0x2, D)
1303 #define JNAEm(D) JCCim(0x2, D)
1304 #define JNBm(D) JCCim(0x3, D)
1305 #define JAEm(D) JCCim(0x3, D)
1306 #define JEm(D) JCCim(0x4, D)
1307 #define JZm(D) JCCim(0x4, D)
1308 #define JNEm(D) JCCim(0x5, D)
1309 #define JNZm(D) JCCim(0x5, D)
1310 #define JBEm(D) JCCim(0x6, D)
1311 #define JNAm(D) JCCim(0x6, D)
1312 #define JNBEm(D) JCCim(0x7, D)
1313 #define JAm(D) JCCim(0x7, D)
1314 #define JSm(D) JCCim(0x8, D)
1315 #define JNSm(D) JCCim(0x9, D)
1316 #define JPm(D) JCCim(0xa, D)
1317 #define JPEm(D) JCCim(0xa, D)
1318 #define JNPm(D) JCCim(0xb, D)
1319 #define JPOm(D) JCCim(0xb, D)
1320 #define JLm(D) JCCim(0xc, D)
1321 #define JNGEm(D) JCCim(0xc, D)
1322 #define JNLm(D) JCCim(0xd, D)
1323 #define JGEm(D) JCCim(0xd, D)
1324 #define JLEm(D) JCCim(0xe, D)
1325 #define JNGm(D) JCCim(0xe, D)
1326 #define JNLEm(D) JCCim(0xf, D)
1327 #define JGm(D) JCCim(0xf, D)
1328
1329 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1330 #define SETCCir(CC, RD) (_REXBrr(0, RD), _OO_Mrm (0x0f90|(CC) ,_b11,_b000,_r1(RD) ))
1331 #define SETOr(RD) SETCCir(0x0,RD)
1332 #define SETNOr(RD) SETCCir(0x1,RD)
1333 #define SETBr(RD) SETCCir(0x2,RD)
1334 #define SETNAEr(RD) SETCCir(0x2,RD)
1335 #define SETNBr(RD) SETCCir(0x3,RD)
1336 #define SETAEr(RD) SETCCir(0x3,RD)
1337 #define SETEr(RD) SETCCir(0x4,RD)
1338 #define SETZr(RD) SETCCir(0x4,RD)
1339 #define SETNEr(RD) SETCCir(0x5,RD)
1340 #define SETNZr(RD) SETCCir(0x5,RD)
1341 #define SETBEr(RD) SETCCir(0x6,RD)
1342 #define SETNAr(RD) SETCCir(0x6,RD)
1343 #define SETNBEr(RD) SETCCir(0x7,RD)
1344 #define SETAr(RD) SETCCir(0x7,RD)
1345 #define SETSr(RD) SETCCir(0x8,RD)
1346 #define SETNSr(RD) SETCCir(0x9,RD)
1347 #define SETPr(RD) SETCCir(0xa,RD)
1348 #define SETPEr(RD) SETCCir(0xa,RD)
1349 #define SETNPr(RD) SETCCir(0xb,RD)
1350 #define SETPOr(RD) SETCCir(0xb,RD)
1351 #define SETLr(RD) SETCCir(0xc,RD)
1352 #define SETNGEr(RD) SETCCir(0xc,RD)
1353 #define SETNLr(RD) SETCCir(0xd,RD)
1354 #define SETGEr(RD) SETCCir(0xd,RD)
1355 #define SETLEr(RD) SETCCir(0xe,RD)
1356 #define SETNGr(RD) SETCCir(0xe,RD)
1357 #define SETNLEr(RD) SETCCir(0xf,RD)
1358 #define SETGr(RD) SETCCir(0xf,RD)
1359
1360 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1361 #define SETCCim(CC,MD,MB,MI,MS) (_REXBrm(0, MB, MI), _OO_r_X (0x0f90|(CC) ,_b000 ,MD,MB,MI,MS ))
1362 #define SETOm(D, B, I, S) SETCCim(0x0, D, B, I, S)
1363 #define SETNOm(D, B, I, S) SETCCim(0x1, D, B, I, S)
1364 #define SETBm(D, B, I, S) SETCCim(0x2, D, B, I, S)
1365 #define SETNAEm(D, B, I, S) SETCCim(0x2, D, B, I, S)
1366 #define SETNBm(D, B, I, S) SETCCim(0x3, D, B, I, S)
1367 #define SETAEm(D, B, I, S) SETCCim(0x3, D, B, I, S)
1368 #define SETEm(D, B, I, S) SETCCim(0x4, D, B, I, S)
1369 #define SETZm(D, B, I, S) SETCCim(0x4, D, B, I, S)
1370 #define SETNEm(D, B, I, S) SETCCim(0x5, D, B, I, S)
1371 #define SETNZm(D, B, I, S) SETCCim(0x5, D, B, I, S)
1372 #define SETBEm(D, B, I, S) SETCCim(0x6, D, B, I, S)
1373 #define SETNAm(D, B, I, S) SETCCim(0x6, D, B, I, S)
1374 #define SETNBEm(D, B, I, S) SETCCim(0x7, D, B, I, S)
1375 #define SETAm(D, B, I, S) SETCCim(0x7, D, B, I, S)
1376 #define SETSm(D, B, I, S) SETCCim(0x8, D, B, I, S)
1377 #define SETNSm(D, B, I, S) SETCCim(0x9, D, B, I, S)
1378 #define SETPm(D, B, I, S) SETCCim(0xa, D, B, I, S)
1379 #define SETPEm(D, B, I, S) SETCCim(0xa, D, B, I, S)
1380 #define SETNPm(D, B, I, S) SETCCim(0xb, D, B, I, S)
1381 #define SETPOm(D, B, I, S) SETCCim(0xb, D, B, I, S)
1382 #define SETLm(D, B, I, S) SETCCim(0xc, D, B, I, S)
1383 #define SETNGEm(D, B, I, S) SETCCim(0xc, D, B, I, S)
1384 #define SETNLm(D, B, I, S) SETCCim(0xd, D, B, I, S)
1385 #define SETGEm(D, B, I, S) SETCCim(0xd, D, B, I, S)
1386 #define SETLEm(D, B, I, S) SETCCim(0xe, D, B, I, S)
1387 #define SETNGm(D, B, I, S) SETCCim(0xe, D, B, I, S)
1388 #define SETNLEm(D, B, I, S) SETCCim(0xf, D, B, I, S)
1389 #define SETGm(D, B, I, S) SETCCim(0xf, D, B, I, S)
1390
1391 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1392 #define CMOVWrr(CC,RS,RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0f40|(CC) ,_b11,_r2(RD),_r2(RS) ))
1393 #define CMOVWmr(CC,MD,MB,MI,MS,RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0f40|(CC) ,_r2(RD) ,MD,MB,MI,MS ))
1394 #define CMOVLrr(CC,RS,RD) (_REXLrr(RD, RS), _OO_Mrm (0x0f40|(CC) ,_b11,_r4(RD),_r4(RS) ))
1395 #define CMOVLmr(CC,MD,MB,MI,MS,RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0f40|(CC) ,_r4(RD) ,MD,MB,MI,MS ))
1396 #define CMOVQrr(CC,RS,RD) (_REXQrr(RD, RS), _OO_Mrm (0x0f40|(CC) ,_b11,_r8(RD),_r8(RS) ))
1397 #define CMOVQmr(CC,MD,MB,MI,MS,RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0f40|(CC) ,_r8(RD) ,MD,MB,MI,MS ))
1398
1399
1400 /* --- Push/Pop instructions ----------------------------------------------- */
1401
1402 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1403
1404 #define POPWr(RD) _m32only((_d16(), _Or (0x58,_r2(RD) )))
1405 #define POPWm(MD, MB, MI, MS) _m32only((_d16(), _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS )))
1406
1407 #define POPLr(RD) _m32only( _Or (0x58,_r4(RD) ))
1408 #define POPLm(MD, MB, MI, MS) _m32only( _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS ))
1409
1410 #define POPQr(RD) _m64only((_REXQr(RD), _Or (0x58,_r8(RD) )))
1411 #define POPQm(MD, MB, MI, MS) _m64only((_REXQm(MB, MI), _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS )))
1412
1413 #define PUSHWr(RS) _m32only((_d16(), _Or (0x50,_r2(RS) )))
1414 #define PUSHWm(MD, MB, MI, MS) _m32only((_d16(), _O_r_X (0xff, ,_b110 ,MD,MB,MI,MS )))
1415 #define PUSHWi(IM) _m32only((_d16(), _Os_sW (0x68 ,IM )))
1416
1417 #define PUSHLr(RS) _m32only( _Or (0x50,_r4(RS) ))
1418 #define PUSHLm(MD, MB, MI, MS) _m32only( _O_r_X (0xff ,_b110 ,MD,MB,MI,MS ))
1419 #define PUSHLi(IM) _m32only( _Os_sL (0x68 ,IM ))
1420
1421 #define PUSHQr(RS) _m64only((_REXQr(RS), _Or (0x50,_r8(RS) )))
1422 #define PUSHQm(MD, MB, MI, MS) _m64only((_REXQm(MB, MI), _O_r_X (0xff ,_b110 ,MD,MB,MI,MS )))
1423 #define PUSHQi(IM) _m64only( _Os_sL (0x68 ,IM ))
1424
1425 #define POPA() (_d16(), _O (0x61 ))
1426 #define POPAD() _O (0x61 )
1427
1428 #define PUSHA() (_d16(), _O (0x60 ))
1429 #define PUSHAD() _O (0x60 )
1430
1431 #define POPF() _O (0x9d )
1432 #define PUSHF() _O (0x9c )
1433
1434
1435 /* --- Test instructions --------------------------------------------------- */
1436
1437 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1438
1439 #define TESTBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x84 ,_b11,_r1(RS),_r1(RD) ))
1440 #define TESTBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x84 ,_r1(RS) ,MD,MB,MI,MS ))
1441 #define TESTBir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AL) ? \
1442 (_REXBrr(0, RD), _O_B (0xa8 ,_u8(IM))) : \
1443 (_REXBrr(0, RD), _O_Mrm_B (0xf6 ,_b11,_b000 ,_r1(RD) ,_u8(IM))) )
1444 #define TESTBim(IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X_B (0xf6 ,_b000 ,MD,MB,MI,MS ,_u8(IM)))
1445
1446 #define TESTWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x85 ,_b11,_r2(RS),_r2(RD) ))
1447 #define TESTWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0x85 ,_r2(RS) ,MD,MB,MI,MS ))
1448 #define TESTWir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AX) ? \
1449 (_d16(), _REXLrr(0, RD), _O_W (0xa9 ,_u16(IM))) : \
1450 (_d16(), _REXLrr(0, RD), _O_Mrm_W (0xf7 ,_b11,_b000 ,_r2(RD) ,_u16(IM))) )
1451 #define TESTWim(IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X_W (0xf7 ,_b000 ,MD,MB,MI,MS ,_u16(IM)))
1452
1453 #define TESTLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x85 ,_b11,_r4(RS),_r4(RD) ))
1454 #define TESTLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x85 ,_r4(RS) ,MD,MB,MI,MS ))
1455 #define TESTLir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_EAX) ? \
1456 (_REXLrr(0, RD), _O_L (0xa9 ,IM )) : \
1457 (_REXLrr(0, RD), _O_Mrm_L (0xf7 ,_b11,_b000 ,_r4(RD) ,IM )) )
1458 #define TESTLim(IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X_L (0xf7 ,_b000 ,MD,MB,MI,MS ,IM ))
1459
1460 #define TESTQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x85 ,_b11,_r8(RS),_r8(RD) ))
1461 #define TESTQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x85 ,_r8(RS) ,MD,MB,MI,MS ))
1462 #define TESTQir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_RAX) ? \
1463 (_REXQrr(0, RD), _O_L (0xa9 ,IM )) : \
1464 (_REXQrr(0, RD), _O_Mrm_L (0xf7 ,_b11,_b000 ,_r8(RD) ,IM )) )
1465 #define TESTQim(IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X_L (0xf7 ,_b000 ,MD,MB,MI,MS ,IM ))
1466
1467
1468 /* --- Exchange instructions ----------------------------------------------- */
1469
1470 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1471
1472 #define CMPXCHGBrr(RS, RD) (_REXBrr(RS, RD), _OO_Mrm (0x0fb0 ,_b11,_r1(RS),_r1(RD) ))
1473 #define CMPXCHGBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _OO_r_X (0x0fb0 ,_r1(RS) ,MD,MB,MI,MS ))
1474
1475 #define CMPXCHGWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r2(RS),_r2(RD) ))
1476 #define CMPXCHGWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r2(RS) ,MD,MB,MI,MS ))
1477
1478 #define CMPXCHGLrr(RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r4(RS),_r4(RD) ))
1479 #define CMPXCHGLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r4(RS) ,MD,MB,MI,MS ))
1480
1481 #define CMPXCHGQrr(RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r8(RS),_r8(RD) ))
1482 #define CMPXCHGQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r8(RS) ,MD,MB,MI,MS ))
1483
1484 #define XADDBrr(RS, RD) (_REXBrr(RS, RD), _OO_Mrm (0x0fc0 ,_b11,_r1(RS),_r1(RD) ))
1485 #define XADDBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _OO_r_X (0x0fc0 ,_r1(RS) ,MD,MB,MI,MS ))
1486
1487 #define XADDWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r2(RS),_r2(RD) ))
1488 #define XADDWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r2(RS) ,MD,MB,MI,MS ))
1489
1490 #define XADDLrr(RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r4(RS),_r4(RD) ))
1491 #define XADDLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r4(RS) ,MD,MB,MI,MS ))
1492
1493 #define XADDQrr(RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r8(RS),_r8(RD) ))
1494 #define XADDQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r8(RS) ,MD,MB,MI,MS ))
1495
1496 #define XCHGBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x86 ,_b11,_r1(RS),_r1(RD) ))
1497 #define XCHGBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x86 ,_r1(RS) ,MD,MB,MI,MS ))
1498
1499 #define XCHGWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x87 ,_b11,_r2(RS),_r2(RD) ))
1500 #define XCHGWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0x87 ,_r2(RS) ,MD,MB,MI,MS ))
1501
1502 #define XCHGLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x87 ,_b11,_r4(RS),_r4(RD) ))
1503 #define XCHGLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x87 ,_r4(RS) ,MD,MB,MI,MS ))
1504
1505 #define XCHGQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x87 ,_b11,_r8(RS),_r8(RD) ))
1506 #define XCHGQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x87 ,_r8(RS) ,MD,MB,MI,MS ))
1507
1508
1509 /* --- Increment/Decrement instructions ------------------------------------ */
1510
1511 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1512
1513 #define DECBm(MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xfe ,_b001 ,MD,MB,MI,MS ))
1514 #define DECBr(RD) (_REXBrr(0, RD), _O_Mrm (0xfe ,_b11,_b001 ,_r1(RD) ))
1515
1516 #define DECWm(MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1517 #define DECWr(RD) (! X86_TARGET_64BIT ? (_d16(), _Or (0x48,_r2(RD) )) : \
1518 (_d16(), _REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r2(RD) )))
1519
1520 #define DECLm(MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1521 #define DECLr(RD) (! X86_TARGET_64BIT ? _Or (0x48,_r4(RD) ) : \
1522 (_REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r4(RD) )))
1523
1524 #define DECQm(MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1525 #define DECQr(RD) (_REXQrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r8(RD) ))
1526
1527 #define INCBm(MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xfe ,_b000 ,MD,MB,MI,MS ))
1528 #define INCBr(RD) (_REXBrr(0, RD), _O_Mrm (0xfe ,_b11,_b000 ,_r1(RD) ))
1529
1530 #define INCWm(MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1531 #define INCWr(RD) (! X86_TARGET_64BIT ? (_d16(), _Or (0x40,_r2(RD) )) : \
1532 (_d16(), _REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r2(RD) )) )
1533
1534 #define INCLm(MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1535 #define INCLr(RD) (! X86_TARGET_64BIT ? _Or (0x40,_r4(RD) ) : \
1536 (_REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r4(RD) )))
1537
1538 #define INCQm(MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1539 #define INCQr(RD) (_REXQrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r8(RD) ))
1540
1541
1542 /* --- Misc instructions --------------------------------------------------- */
1543
1544 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1545
1546 #define BSFWrr(RS, RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0fbc ,_b11,_r2(RD),_r2(RS) ))
1547 #define BSFWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fbc ,_r2(RD) ,MD,MB,MI,MS ))
1548 #define BSRWrr(RS, RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0fbd ,_b11,_r2(RD),_r2(RS) ))
1549 #define BSRWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fbd ,_r2(RD) ,MD,MB,MI,MS ))
1550
1551 #define BSFLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fbc ,_b11,_r4(RD),_r4(RS) ))
1552 #define BSFLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbc ,_r4(RD) ,MD,MB,MI,MS ))
1553 #define BSRLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fbd ,_b11,_r4(RD),_r4(RS) ))
1554 #define BSRLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbd ,_r4(RD) ,MD,MB,MI,MS ))
1555
1556 #define BSFQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fbc ,_b11,_r8(RD),_r8(RS) ))
1557 #define BSFQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fbc ,_r8(RD) ,MD,MB,MI,MS ))
1558 #define BSRQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fbd ,_b11,_r8(RD),_r8(RS) ))
1559 #define BSRQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fbd ,_r8(RD) ,MD,MB,MI,MS ))
1560
1561 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1562
1563 #define MOVSBWrr(RS, RD) (_d16(), _REXBLrr(RD, RS), _OO_Mrm (0x0fbe ,_b11,_r2(RD),_r1(RS) ))
1564 #define MOVSBWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fbe ,_r2(RD) ,MD,MB,MI,MS ))
1565 #define MOVZBWrr(RS, RD) (_d16(), _REXBLrr(RD, RS), _OO_Mrm (0x0fb6 ,_b11,_r2(RD),_r1(RS) ))
1566 #define MOVZBWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fb6 ,_r2(RD) ,MD,MB,MI,MS ))
1567
1568 #define MOVSBLrr(RS, RD) (_REXBLrr(RD, RS), _OO_Mrm (0x0fbe ,_b11,_r4(RD),_r1(RS) ))
1569 #define MOVSBLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbe ,_r4(RD) ,MD,MB,MI,MS ))
1570 #define MOVZBLrr(RS, RD) (_REXBLrr(RD, RS), _OO_Mrm (0x0fb6 ,_b11,_r4(RD),_r1(RS) ))
1571 #define MOVZBLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fb6 ,_r4(RD) ,MD,MB,MI,MS ))
1572
1573 #define MOVSBQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fbe ,_b11,_r8(RD),_r1(RS) ))
1574 #define MOVSBQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fbe ,_r8(RD) ,MD,MB,MI,MS ))
1575 #define MOVZBQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fb6 ,_b11,_r8(RD),_r1(RS) ))
1576 #define MOVZBQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fb6 ,_r8(RD) ,MD,MB,MI,MS ))
1577
1578 #define MOVSWLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fbf ,_b11,_r4(RD),_r2(RS) ))
1579 #define MOVSWLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbf ,_r4(RD) ,MD,MB,MI,MS ))
1580 #define MOVZWLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fb7 ,_b11,_r4(RD),_r2(RS) ))
1581 #define MOVZWLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fb7 ,_r4(RD) ,MD,MB,MI,MS ))
1582
1583 #define MOVSWQrr(RS, RD) _m64only((_REXQrr(RD, RS), _OO_Mrm (0x0fbf ,_b11,_r8(RD),_r2(RS) )))
1584 #define MOVSWQmr(MD, MB, MI, MS, RD) _m64only((_REXQmr(MB, MI, RD), _OO_r_X (0x0fbf ,_r8(RD) ,MD,MB,MI,MS )))
1585 #define MOVZWQrr(RS, RD) _m64only((_REXQrr(RD, RS), _OO_Mrm (0x0fb7 ,_b11,_r8(RD),_r2(RS) )))
1586 #define MOVZWQmr(MD, MB, MI, MS, RD) _m64only((_REXQmr(MB, MI, RD), _OO_r_X (0x0fb7 ,_r8(RD) ,MD,MB,MI,MS )))
1587
1588 #define MOVSLQrr(RS, RD) _m64only((_REXQrr(RD, RS), _O_Mrm (0x63 ,_b11,_r8(RD),_r4(RS) )))
1589 #define MOVSLQmr(MD, MB, MI, MS, RD) _m64only((_REXQmr(MB, MI, RD), _O_r_X (0x63 ,_r8(RD) ,MD,MB,MI,MS )))
1590
1591 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1592
1593 #define LEALmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (0x8d ,_r4(RD) ,MD,MB,MI,MS ))
1594 #define LEAQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (0x8d ,_r4(RD) ,MD,MB,MI,MS ))
1595
1596 #define BSWAPLr(R) (_REXLrr(0, R), _OOr (0x0fc8,_r4(R) ))
1597 #define BSWAPQr(R) (_REXQrr(0, R), _OOr (0x0fc8,_r8(R) ))
1598
1599 #define CLC() _O (0xf8 )
1600 #define STC() _O (0xf9 )
1601 #define CMC() _O (0xf5 )
1602
1603 #define CLD() _O (0xfc )
1604 #define STD() _O (0xfd )
1605
1606 #define CBTW() (_d16(), _O (0x98 ))
1607 #define CWTL() _O (0x98 )
1608 #define CLTQ() _m64only(_REXQrr(0, 0), _O (0x98 ))
1609
1610 #define CBW CBTW
1611 #define CWDE CWTL
1612 #define CDQE CLTQ
1613
1614 #define CWTD() (_d16(), _O (0x99 ))
1615 #define CLTD() _O (0x99 )
1616 #define CQTO() _m64only(_REXQrr(0, 0), _O (0x99 ))
1617
1618 #define CWD CWTD
1619 #define CDQ CLTD
1620 #define CQO CQTO
1621
1622 #define LAHF() _O (0x9f )
1623 #define SAHF() _O (0x9e )
1624
1625 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1626
1627 #define CPUID() _OO (0x0fa2 )
1628 #define RDTSC() _OO (0xff31 )
1629
1630 #define ENTERii(W, B) _O_W_B (0xc8 ,_su16(W),_su8(B))
1631
1632 #define LEAVE() _O (0xc9 )
1633 #define RET() _O (0xc3 )
1634 #define RETi(IM) _O_W (0xc2 ,_su16(IM))
1635
1636 #define NOP() _O (0x90 )
1637
1638
1639 /* --- Media 64-bit instructions ------------------------------------------- */
1640
1641 #define EMMS() _OO (0x0f77 )
1642
1643
1644 /* --- Media 128-bit instructions ------------------------------------------ */
1645
1646 enum {
1647 X86_SSE_CC_EQ = 0,
1648 X86_SSE_CC_LT = 1,
1649 X86_SSE_CC_GT = 1,
1650 X86_SSE_CC_LE = 2,
1651 X86_SSE_CC_GE = 2,
1652 X86_SSE_CC_U = 3,
1653 X86_SSE_CC_NE = 4,
1654 X86_SSE_CC_NLT = 5,
1655 X86_SSE_CC_NGT = 5,
1656 X86_SSE_CC_NLE = 6,
1657 X86_SSE_CC_NGE = 6,
1658 X86_SSE_CC_O = 7
1659 };
1660
1661 enum {
1662 X86_SSE_CVTIS = 0x2a,
1663 X86_SSE_CVTSI = 0x2d,
1664 X86_SSE_UCOMI = 0x2e,
1665 X86_SSE_COMI = 0x2f,
1666 X86_SSE_CMP = 0xc2,
1667 X86_SSE_SQRT = 0x51,
1668 X86_SSE_RSQRT = 0x52,
1669 X86_SSE_RCP = 0x53,
1670 X86_SSE_AND = 0x54,
1671 X86_SSE_ANDN = 0x55,
1672 X86_SSE_OR = 0x56,
1673 X86_SSE_XOR = 0x57,
1674 X86_SSE_ADD = 0x58,
1675 X86_SSE_MUL = 0x59,
1676 X86_SSE_CVTSD = 0x5a,
1677 X86_SSE_CVTDT = 0x5b,
1678 X86_SSE_SUB = 0x5c,
1679 X86_SSE_MIN = 0x5d,
1680 X86_SSE_DIV = 0x5e,
1681 X86_SSE_MAX = 0x5f,
1682 X86_SSE_MOVMSK = 0x50,
1683 X86_SSE_PACKSSDW = 0x6b,
1684 X86_SSE_PACKSSWB = 0x63,
1685 X86_SSE_PACKUSWB = 0x67,
1686 X86_SSE_PADDB = 0xfc,
1687 X86_SSE_PADDD = 0xfe,
1688 X86_SSE_PADDQ = 0xd4,
1689 X86_SSE_PADDSB = 0xec,
1690 X86_SSE_PADDSW = 0xed,
1691 X86_SSE_PADDUSB = 0xdc,
1692 X86_SSE_PADDUSW = 0xdd,
1693 X86_SSE_PADDW = 0xfd,
1694 X86_SSE_PAND = 0xdb,
1695 X86_SSE_PANDN = 0xdf,
1696 X86_SSE_PAVGB = 0xe0,
1697 X86_SSE_PAVGW = 0xe3,
1698 X86_SSE_PCMPEQB = 0x74,
1699 X86_SSE_PCMPEQD = 0x76,
1700 X86_SSE_PCMPEQW = 0x75,
1701 X86_SSE_PCMPGTB = 0x64,
1702 X86_SSE_PCMPGTD = 0x66,
1703 X86_SSE_PCMPGTW = 0x65,
1704 X86_SSE_PMADDWD = 0xf5,
1705 X86_SSE_PMAXSW = 0xee,
1706 X86_SSE_PMAXUB = 0xde,
1707 X86_SSE_PMINSW = 0xea,
1708 X86_SSE_PMINUB = 0xda,
1709 X86_SSE_PMOVMSKB = 0xd7,
1710 X86_SSE_PMULHUW = 0xe4,
1711 X86_SSE_PMULHW = 0xe5,
1712 X86_SSE_PMULLW = 0xd5,
1713 X86_SSE_PMULUDQ = 0xf4,
1714 X86_SSE_POR = 0xeb,
1715 X86_SSE_PSADBW = 0xf6,
1716 X86_SSE_PSLLD = 0xf2,
1717 X86_SSE_PSLLQ = 0xf3,
1718 X86_SSE_PSLLW = 0xf1,
1719 X86_SSE_PSRAD = 0xe2,
1720 X86_SSE_PSRAW = 0xe1,
1721 X86_SSE_PSRLD = 0xd2,
1722 X86_SSE_PSRLQ = 0xd3,
1723 X86_SSE_PSRLW = 0xd1,
1724 X86_SSE_PSUBB = 0xf8,
1725 X86_SSE_PSUBD = 0xfa,
1726 X86_SSE_PSUBQ = 0xfb,
1727 X86_SSE_PSUBSB = 0xe8,
1728 X86_SSE_PSUBSW = 0xe9,
1729 X86_SSE_PSUBUSB = 0xd8,
1730 X86_SSE_PSUBUSW = 0xd9,
1731 X86_SSE_PSUBW = 0xf9,
1732 X86_SSE_PUNPCKHBW = 0x68,
1733 X86_SSE_PUNPCKHDQ = 0x6a,
1734 X86_SSE_PUNPCKHQDQ = 0x6d,
1735 X86_SSE_PUNPCKHWD = 0x69,
1736 X86_SSE_PUNPCKLBW = 0x60,
1737 X86_SSE_PUNPCKLDQ = 0x62,
1738 X86_SSE_PUNPCKLQDQ = 0x6c,
1739 X86_SSE_PUNPCKLWD = 0x61,
1740 X86_SSE_PXOR = 0xef,
1741 };
1742
1743 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1744
1745 #define __SSELir(OP,MO,IM,RD) (_REXLrr(0, RD), _OO_Mrm_B (0x0f00|(OP) ,_b11,MO ,_rX(RD) ,_u8(IM)))
1746 #define __SSELim(OP,MO,IM,MD,MB,MI,MS) (_REXLrm(0, MB, MI), _OO_r_X_B (0x0f00|(OP) ,MO ,MD,MB,MI,MS ,_u8(IM)))
1747 #define __SSELrr(OP,RS,RSA,RD,RDA) (_REXLrr(RD, RS), _OO_Mrm (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ))
1748 #define __SSELmr(OP,MD,MB,MI,MS,RD,RDA) (_REXLmr(MB, MI, RD), _OO_r_X (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ))
1749 #define __SSELrm(OP,RS,RSA,MD,MB,MI,MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0f00|(OP) ,RSA(RS) ,MD,MB,MI,MS ))
1750 #define __SSELirr(OP,IM,RS,RD) (_REXLrr(RD, RS), _OO_Mrm_B (0x0f00|(OP) ,_b11,_rX(RD),_rX(RS) ,_u8(IM)))
1751 #define __SSELimr(OP,IM,MD,MB,MI,MS,RD) (_REXLmr(MB, MI, RD), _OO_r_X_B (0x0f00|(OP) ,_rX(RD) ,MD,MB,MI,MS ,_u8(IM)))
1752
1753 #define __SSEQrr(OP,RS,RSA,RD,RDA) (_REXQrr(RD, RS), _OO_Mrm (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ))
1754 #define __SSEQmr(OP,MD,MB,MI,MS,RD,RDA) (_REXQmr(MB, MI, RD), _OO_r_X (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ))
1755 #define __SSEQrm(OP,RS,RSA,MD,MB,MI,MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0f00|(OP) ,RSA(RS) ,MD,MB,MI,MS ))
1756
1757 #define _SSELrr(PX,OP,RS,RSA,RD,RDA) (_B(PX), __SSELrr(OP, RS, RSA, RD, RDA))
1758 #define _SSELmr(PX,OP,MD,MB,MI,MS,RD,RDA) (_B(PX), __SSELmr(OP, MD, MB, MI, MS, RD, RDA))
1759 #define _SSELrm(PX,OP,RS,RSA,MD,MB,MI,MS) (_B(PX), __SSELrm(OP, RS, RSA, MD, MB, MI, MS))
1760 #define _SSELir(PX,OP,MO,IM,RD) (_B(PX), __SSELir(OP, MO, IM, RD))
1761 #define _SSELim(PX,OP,MO,IM,MD,MB,MI,MS) (_B(PX), __SSELim(OP, MO, IM, MD, MB, MI, MS))
1762 #define _SSELirr(PX,OP,IM,RS,RD) (_B(PX), __SSELirr(OP, IM, RS, RD))
1763 #define _SSELimr(PX,OP,IM,MD,MB,MI,MS,RD) (_B(PX), __SSELimr(OP, IM, MD, MB, MI, MS, RD))
1764
1765 #define _SSEQrr(PX,OP,RS,RSA,RD,RDA) (_B(PX), __SSEQrr(OP, RS, RSA, RD, RDA))
1766 #define _SSEQmr(PX,OP,MD,MB,MI,MS,RD,RDA) (_B(PX), __SSEQmr(OP, MD, MB, MI, MS, RD, RDA))
1767 #define _SSEQrm(PX,OP,RS,RSA,MD,MB,MI,MS) (_B(PX), __SSEQrm(OP, RS, RSA, MD, MB, MI, MS))
1768
1769 #define _SSEPSrr(OP,RS,RD) __SSELrr( OP, RS,_rX, RD,_rX)
1770 #define _SSEPSmr(OP,MD,MB,MI,MS,RD) __SSELmr( OP, MD, MB, MI, MS, RD,_rX)
1771 #define _SSEPSrm(OP,RS,MD,MB,MI,MS) __SSELrm( OP, RS,_rX, MD, MB, MI, MS)
1772 #define _SSEPSirr(OP,IM,RS,RD) __SSELirr( OP, IM, RS, RD)
1773 #define _SSEPSimr(OP,IM,MD,MB,MI,MS,RD) __SSELimr( OP, IM, MD, MB, MI, MS, RD)
1774
1775 #define _SSEPDrr(OP,RS,RD) _SSELrr(0x66, OP, RS,_rX, RD,_rX)
1776 #define _SSEPDmr(OP,MD,MB,MI,MS,RD) _SSELmr(0x66, OP, MD, MB, MI, MS, RD,_rX)
1777 #define _SSEPDrm(OP,RS,MD,MB,MI,MS) _SSELrm(0x66, OP, RS,_rX, MD, MB, MI, MS)
1778 #define _SSEPDirr(OP,IM,RS,RD) _SSELirr(0x66, OP, IM, RS, RD)
1779 #define _SSEPDimr(OP,IM,MD,MB,MI,MS,RD) _SSELimr(0x66, OP, IM, MD, MB, MI, MS, RD)
1780
1781 #define _SSESSrr(OP,RS,RD) _SSELrr(0xf3, OP, RS,_rX, RD,_rX)
1782 #define _SSESSmr(OP,MD,MB,MI,MS,RD) _SSELmr(0xf3, OP, MD, MB, MI, MS, RD,_rX)
1783 #define _SSESSrm(OP,RS,MD,MB,MI,MS) _SSELrm(0xf3, OP, RS,_rX, MD, MB, MI, MS)
1784 #define _SSESSirr(OP,IM,RS,RD) _SSELirr(0xf3, OP, IM, RS, RD)
1785 #define _SSESSimr(OP,IM,MD,MB,MI,MS,RD) _SSELimr(0xf3, OP, IM, MD, MB, MI, MS, RD)
1786
1787 #define _SSESDrr(OP,RS,RD) _SSELrr(0xf2, OP, RS,_rX, RD,_rX)
1788 #define _SSESDmr(OP,MD,MB,MI,MS,RD) _SSELmr(0xf2, OP, MD, MB, MI, MS, RD,_rX)
1789 #define _SSESDrm(OP,RS,MD,MB,MI,MS) _SSELrm(0xf2, OP, RS,_rX, MD, MB, MI, MS)
1790 #define _SSESDirr(OP,IM,RS,RD) _SSELirr(0xf2, IM, OP, RS, RD)
1791 #define _SSESDimr(OP,IM,MD,MB,MI,MS,RD) _SSELimr(0xf2, IM, OP, MD, MB, MI, MS, RD)
1792
1793 #define ADDPSrr(RS, RD) _SSEPSrr(X86_SSE_ADD, RS, RD)
1794 #define ADDPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
1795 #define ADDPDrr(RS, RD) _SSEPDrr(X86_SSE_ADD, RS, RD)
1796 #define ADDPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
1797
1798 #define ADDSSrr(RS, RD) _SSESSrr(X86_SSE_ADD, RS, RD)
1799 #define ADDSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
1800 #define ADDSDrr(RS, RD) _SSESDrr(X86_SSE_ADD, RS, RD)
1801 #define ADDSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
1802
1803 #define ANDNPSrr(RS, RD) _SSEPSrr(X86_SSE_ANDN, RS, RD)
1804 #define ANDNPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_ANDN, MD, MB, MI, MS, RD)
1805 #define ANDNPDrr(RS, RD) _SSEPDrr(X86_SSE_ANDN, RS, RD)
1806 #define ANDNPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_ANDN, MD, MB, MI, MS, RD)
1807
1808 #define ANDPSrr(RS, RD) _SSEPSrr(X86_SSE_AND, RS, RD)
1809 #define ANDPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_AND, MD, MB, MI, MS, RD)
1810 #define ANDPDrr(RS, RD) _SSEPDrr(X86_SSE_AND, RS, RD)
1811 #define ANDPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_AND, MD, MB, MI, MS, RD)
1812
1813 #define CMPPSrr(IM, RS, RD) _SSEPSirr(X86_SSE_CMP, IM, RS, RD)
1814 #define CMPPSmr(IM, MD, MB, MI, MS, RD) _SSEPSimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
1815 #define CMPPDrr(IM, RS, RD) _SSEPDirr(X86_SSE_CMP, IM, RS, RD)
1816 #define CMPPDmr(IM, MD, MB, MI, MS, RD) _SSEPDimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
1817
1818 #define CMPSSrr(IM, RS, RD) _SSESSirr(X86_SSE_CMP, IM, RS, RD)
1819 #define CMPSSmr(IM, MD, MB, MI, MS, RD) _SSESSimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
1820 #define CMPSDrr(IM, RS, RD) _SSESDirr(X86_SSE_CMP, IM, RS, RD)
1821 #define CMPSDmr(IM, MD, MB, MI, MS, RD) _SSESDimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
1822
1823 #define DIVPSrr(RS, RD) _SSEPSrr(X86_SSE_DIV, RS, RD)
1824 #define DIVPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
1825 #define DIVPDrr(RS, RD) _SSEPDrr(X86_SSE_DIV, RS, RD)
1826 #define DIVPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
1827
1828 #define DIVSSrr(RS, RD) _SSESSrr(X86_SSE_DIV, RS, RD)
1829 #define DIVSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
1830 #define DIVSDrr(RS, RD) _SSESDrr(X86_SSE_DIV, RS, RD)
1831 #define DIVSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
1832
1833 #define MAXPSrr(RS, RD) _SSEPSrr(X86_SSE_MAX, RS, RD)
1834 #define MAXPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
1835 #define MAXPDrr(RS, RD) _SSEPDrr(X86_SSE_MAX, RS, RD)
1836 #define MAXPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
1837
1838 #define MAXSSrr(RS, RD) _SSESSrr(X86_SSE_MAX, RS, RD)
1839 #define MAXSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
1840 #define MAXSDrr(RS, RD) _SSESDrr(X86_SSE_MAX, RS, RD)
1841 #define MAXSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
1842
1843 #define MINPSrr(RS, RD) _SSEPSrr(X86_SSE_MIN, RS, RD)
1844 #define MINPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
1845 #define MINPDrr(RS, RD) _SSEPDrr(X86_SSE_MIN, RS, RD)
1846 #define MINPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
1847
1848 #define MINSSrr(RS, RD) _SSESSrr(X86_SSE_MIN, RS, RD)
1849 #define MINSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
1850 #define MINSDrr(RS, RD) _SSESDrr(X86_SSE_MIN, RS, RD)
1851 #define MINSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
1852
1853 #define MULPSrr(RS, RD) _SSEPSrr(X86_SSE_MUL, RS, RD)
1854 #define MULPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
1855 #define MULPDrr(RS, RD) _SSEPDrr(X86_SSE_MUL, RS, RD)
1856 #define MULPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
1857
1858 #define MULSSrr(RS, RD) _SSESSrr(X86_SSE_MUL, RS, RD)
1859 #define MULSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
1860 #define MULSDrr(RS, RD) _SSESDrr(X86_SSE_MUL, RS, RD)
1861 #define MULSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
1862
1863 #define ORPSrr(RS, RD) _SSEPSrr(X86_SSE_OR, RS, RD)
1864 #define ORPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_OR, MD, MB, MI, MS, RD)
1865 #define ORPDrr(RS, RD) _SSEPDrr(X86_SSE_OR, RS, RD)
1866 #define ORPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_OR, MD, MB, MI, MS, RD)
1867
1868 #define RCPPSrr(RS, RD) _SSEPSrr(X86_SSE_RCP, RS, RD)
1869 #define RCPPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_RCP, MD, MB, MI, MS, RD)
1870 #define RCPSSrr(RS, RD) _SSESSrr(X86_SSE_RCP, RS, RD)
1871 #define RCPSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_RCP, MD, MB, MI, MS, RD)
1872
1873 #define RSQRTPSrr(RS, RD) _SSEPSrr(X86_SSE_RSQRT, RS, RD)
1874 #define RSQRTPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_RSQRT, MD, MB, MI, MS, RD)
1875 #define RSQRTSSrr(RS, RD) _SSESSrr(X86_SSE_RSQRT, RS, RD)
1876 #define RSQRTSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_RSQRT, MD, MB, MI, MS, RD)
1877
1878 #define SQRTPSrr(RS, RD) _SSEPSrr(X86_SSE_SQRT, RS, RD)
1879 #define SQRTPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
1880 #define SQRTPDrr(RS, RD) _SSEPDrr(X86_SSE_SQRT, RS, RD)
1881 #define SQRTPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
1882
1883 #define SQRTSSrr(RS, RD) _SSESSrr(X86_SSE_SQRT, RS, RD)
1884 #define SQRTSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
1885 #define SQRTSDrr(RS, RD) _SSESDrr(X86_SSE_SQRT, RS, RD)
1886 #define SQRTSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
1887
1888 #define SUBPSrr(RS, RD) _SSEPSrr(X86_SSE_SUB, RS, RD)
1889 #define SUBPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
1890 #define SUBPDrr(RS, RD) _SSEPDrr(X86_SSE_SUB, RS, RD)
1891 #define SUBPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
1892
1893 #define SUBSSrr(RS, RD) _SSESSrr(X86_SSE_SUB, RS, RD)
1894 #define SUBSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
1895 #define SUBSDrr(RS, RD) _SSESDrr(X86_SSE_SUB, RS, RD)
1896 #define SUBSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
1897
1898 #define XORPSrr(RS, RD) _SSEPSrr(X86_SSE_XOR, RS, RD)
1899 #define XORPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_XOR, MD, MB, MI, MS, RD)
1900 #define XORPDrr(RS, RD) _SSEPDrr(X86_SSE_XOR, RS, RD)
1901 #define XORPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_XOR, MD, MB, MI, MS, RD)
1902
1903 #define COMISSrr(RS, RD) _SSESSrr(X86_SSE_COMI, RS, RD)
1904 #define COMISSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_COMI, MD, MB, MI, MS, RD)
1905 #define COMISDrr(RS, RD) _SSESDrr(X86_SSE_COMI, RS, RD)
1906 #define COMISDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_COMI, MD, MB, MI, MS, RD)
1907
1908 #define UCOMISSrr(RS, RD) _SSESSrr(X86_SSE_UCOMI, RS, RD)
1909 #define UCOMISSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_UCOMI, MD, MB, MI, MS, RD)
1910 #define UCOMISDrr(RS, RD) _SSESDrr(X86_SSE_UCOMI, RS, RD)
1911 #define UCOMISDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_UCOMI, MD, MB, MI, MS, RD)
1912
1913 #define MOVAPSrr(RS, RD) _SSEPSrr(0x28, RS, RD)
1914 #define MOVAPSmr(MD, MB, MI, MS, RD) _SSEPSmr(0x28, MD, MB, MI, MS, RD)
1915 #define MOVAPSrm(RS, MD, MB, MI, MS) _SSEPSrm(0x29, RS, MD, MB, MI, MS)
1916
1917 #define MOVAPDrr(RS, RD) _SSEPDrr(0x28, RS, RD)
1918 #define MOVAPDmr(MD, MB, MI, MS, RD) _SSEPDmr(0x28, MD, MB, MI, MS, RD)
1919 #define MOVAPDrm(RS, MD, MB, MI, MS) _SSEPDrm(0x29, RS, MD, MB, MI, MS)
1920
1921 #define CVTPS2PIrr(RS, RD) __SSELrr( X86_SSE_CVTSI, RS,_rX, RD,_rM)
1922 #define CVTPS2PImr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTSI, MD, MB, MI, MS, RD,_rM)
1923 #define CVTPD2PIrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTSI, RS,_rX, RD,_rM)
1924 #define CVTPD2PImr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTSI, MD, MB, MI, MS, RD,_rM)
1925
1926 #define CVTPI2PSrr(RS, RD) __SSELrr( X86_SSE_CVTIS, RS,_rM, RD,_rX)
1927 #define CVTPI2PSmr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTIS, MD, MB, MI, MS, RD,_rX)
1928 #define CVTPI2PDrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTIS, RS,_rM, RD,_rX)
1929 #define CVTPI2PDmr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTIS, MD, MB, MI, MS, RD,_rX)
1930
1931 #define CVTPS2PDrr(RS, RD) __SSELrr( X86_SSE_CVTSD, RS,_rX, RD,_rX)
1932 #define CVTPS2PDmr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTSD, MD, MB, MI, MS, RD,_rX)
1933 #define CVTPD2PSrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTSD, RS,_rX, RD,_rX)
1934 #define CVTPD2PSmr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTSD, MD, MB, MI, MS, RD,_rX)
1935
1936 #define CVTSS2SDrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTSD, RS,_rX, RD,_rX)
1937 #define CVTSS2SDmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTSD, MD, MB, MI, MS, RD,_rX)
1938 #define CVTSD2SSrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTSD, RS,_rX, RD,_rX)
1939 #define CVTSD2SSmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTSD, MD, MB, MI, MS, RD,_rX)
1940
1941 #define CVTSS2SILrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTSI, RS,_rX, RD,_r4)
1942 #define CVTSS2SILmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTSI, MD, MB, MI, MS, RD,_r4)
1943 #define CVTSD2SILrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTSI, RS,_rX, RD,_r4)
1944 #define CVTSD2SILmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTSI, MD, MB, MI, MS, RD,_r4)
1945
1946 #define CVTSI2SSLrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTIS, RS,_r4, RD,_rX)
1947 #define CVTSI2SSLmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTIS, MD, MB, MI, MS, RD,_rX)
1948 #define CVTSI2SDLrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTIS, RS,_r4, RD,_rX)
1949 #define CVTSI2SDLmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTIS, MD, MB, MI, MS, RD,_rX)
1950
1951 #define CVTSS2SIQrr(RS, RD) _SSEQrr(0xf3, X86_SSE_CVTSI, RS,_rX, RD,_r8)
1952 #define CVTSS2SIQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf3, X86_SSE_CVTSI, MD, MB, MI, MS, RD,_r8)
1953 #define CVTSD2SIQrr(RS, RD) _SSEQrr(0xf2, X86_SSE_CVTSI, RS,_rX, RD,_r8)
1954 #define CVTSD2SIQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf2, X86_SSE_CVTSI, MD, MB, MI, MS, RD,_r8)
1955
1956 #define CVTSI2SSQrr(RS, RD) _SSEQrr(0xf3, X86_SSE_CVTIS, RS,_r8, RD,_rX)
1957 #define CVTSI2SSQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf3, X86_SSE_CVTIS, MD, MB, MI, MS, RD,_rX)
1958 #define CVTSI2SDQrr(RS, RD) _SSEQrr(0xf2, X86_SSE_CVTIS, RS,_r8, RD,_rX)
1959 #define CVTSI2SDQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf2, X86_SSE_CVTIS, MD, MB, MI, MS, RD,_rX)
1960
1961 #define MOVDLXrr(RS, RD) _SSELrr(0x66, 0x6e, RS,_r4, RD,_rX)
1962 #define MOVDLXmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x6e, MD, MB, MI, MS, RD,_rX)
1963 #define MOVDQXrr(RS, RD) _SSEQrr(0x66, 0x6e, RS,_r8, RD,_rX)
1964 #define MOVDQXmr(MD, MB, MI, MS, RD) _SSEQmr(0x66, 0x6e, MD, MB, MI, MS, RD,_rX)
1965
1966 #define MOVDXLrr(RS, RD) _SSELrr(0x66, 0x7e, RS,_rX, RD,_r4)
1967 #define MOVDXLrm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x7e, RS,_rX, MD, MB, MI, MS)
1968 #define MOVDXQrr(RS, RD) _SSEQrr(0x66, 0x7e, RS,_rX, RD,_r8)
1969 #define MOVDXQrm(RS, MD, MB, MI, MS) _SSEQrm(0x66, 0x7e, RS,_rX, MD, MB, MI, MS)
1970
1971 #define MOVDLMrr(RS, RD) __SSELrr( 0x6e, RS,_r4, RD,_rM)
1972 #define MOVDLMmr(MD, MB, MI, MS, RD) __SSELmr( 0x6e, MD, MB, MI, MS, RD,_rM)
1973 #define MOVDQMrr(RS, RD) __SSEQrr( 0x6e, RS,_r8, RD,_rM)
1974 #define MOVDQMmr(MD, MB, MI, MS, RD) __SSEQmr( 0x6e, MD, MB, MI, MS, RD,_rM)
1975
1976 #define MOVDMLrr(RS, RD) __SSELrr( 0x7e, RS,_rM, RD,_r4)
1977 #define MOVDMLrm(RS, MD, MB, MI, MS) __SSELrm( 0x7e, RS,_rM, MD, MB, MI, MS)
1978 #define MOVDMQrr(RS, RD) __SSEQrr( 0x7e, RS,_rM, RD,_r8)
1979 #define MOVDMQrm(RS, MD, MB, MI, MS) __SSEQrm( 0x7e, RS,_rM, MD, MB, MI, MS)
1980
1981 #define MOVDQ2Qrr(RS, RD) _SSELrr(0xf2, 0xd6, RS,_rX, RD,_rM)
1982 #define MOVMSKPSrr(RS, RD) __SSELrr( 0x50, RS,_rX, RD,_r4)
1983 #define MOVMSKPDrr(RS, RD) _SSELrr(0x66, 0x50, RS,_rX, RD,_r4)
1984
1985 #define MOVHLPSrr(RS, RD) __SSELrr( 0x12, RS,_rX, RD,_rX)
1986 #define MOVLHPSrr(RS, RD) __SSELrr( 0x16, RS,_rX, RD,_rX)
1987
1988 #define MOVDQArr(RS, RD) _SSELrr(0x66, 0x6f, RS,_rX, RD,_rX)
1989 #define MOVDQAmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x6f, MD, MB, MI, MS, RD,_rX)
1990 #define MOVDQArm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x7f, RS,_rX, MD, MB, MI, MS)
1991
1992 #define MOVDQUrr(RS, RD) _SSELrr(0xf3, 0x6f, RS,_rX, RD,_rX)
1993 #define MOVDQUmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, 0x6f, MD, MB, MI, MS, RD,_rX)
1994 #define MOVDQUrm(RS, MD, MB, MI, MS) _SSELrm(0xf3, 0x7f, RS,_rX, MD, MB, MI, MS)
1995
1996 #define MOVHPDmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x16, MD, MB, MI, MS, RD,_rX)
1997 #define MOVHPDrm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x17, RS,_rX, MD, MB, MI, MS)
1998 #define MOVHPSmr(MD, MB, MI, MS, RD) __SSELmr( 0x16, MD, MB, MI, MS, RD,_rX)
1999 #define MOVHPSrm(RS, MD, MB, MI, MS) __SSELrm( 0x17, RS,_rX, MD, MB, MI, MS)
2000
2001 #define MOVLPDmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x12, MD, MB, MI, MS, RD,_rX)
2002 #define MOVLPDrm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x13, RS,_rX, MD, MB, MI, MS)
2003 #define MOVLPSmr(MD, MB, MI, MS, RD) __SSELmr( 0x12, MD, MB, MI, MS, RD,_rX)
2004 #define MOVLPSrm(RS, MD, MB, MI, MS) __SSELrm( 0x13, RS,_rX, MD, MB, MI, MS)
2005
2006
2007 /* --- FLoating-Point instructions ----------------------------------------- */
2008
2009 #define _ESCmi(D,B,I,S,OP) (_REXLrm(0,B,I), _O_r_X(0xd8|(OP & 7), (OP >> 3), D,B,I,S))
2010
2011 #define FLDr(R) _OOr(0xd9c0,_rN(R))
2012 #define FLDLm(D,B,I,S) _ESCmi(D,B,I,S,005)
2013 #define FLDSm(D,B,I,S) _ESCmi(D,B,I,S,001)
2014 #define FLDTm(D,B,I,S) _ESCmi(D,B,I,S,053)
2015
2016 #define FSTr(R) _OOr(0xddd0,_rN(R))
2017 #define FSTSm(D,B,I,S) _ESCmi(D,B,I,S,021)
2018 #define FSTLm(D,B,I,S) _ESCmi(D,B,I,S,025)
2019
2020 #define FSTPr(R) _OOr(0xddd8,_rN(R))
2021 #define FSTPSm(D,B,I,S) _ESCmi(D,B,I,S,031)
2022 #define FSTPLm(D,B,I,S) _ESCmi(D,B,I,S,035)
2023 #define FSTPTm(D,B,I,S) _ESCmi(D,B,I,S,073)
2024
2025 #define FADDr0(R) _OOr(0xd8c0,_rN(R))
2026 #define FADD0r(R) _OOr(0xdcc0,_rN(R))
2027 #define FADDP0r(R) _OOr(0xdec0,_rN(R))
2028 #define FADDSm(D,B,I,S) _ESCmi(D,B,I,S,000)
2029 #define FADDLm(D,B,I,S) _ESCmi(D,B,I,S,004)
2030
2031 #define FSUBSm(D,B,I,S) _ESCmi(D,B,I,S,040)
2032 #define FSUBLm(D,B,I,S) _ESCmi(D,B,I,S,044)
2033 #define FSUBr0(R) _OOr(0xd8e0,_rN(R))
2034 #define FSUB0r(R) _OOr(0xdce8,_rN(R))
2035 #define FSUBP0r(R) _OOr(0xdee8,_rN(R))
2036
2037 #define FSUBRr0(R) _OOr(0xd8e8,_rN(R))
2038 #define FSUBR0r(R) _OOr(0xdce0,_rN(R))
2039 #define FSUBRP0r(R) _OOr(0xdee0,_rN(R))
2040 #define FSUBRSm(D,B,I,S) _ESCmi(D,B,I,S,050)
2041 #define FSUBRLm(D,B,I,S) _ESCmi(D,B,I,S,054)
2042
2043 #define FMULr0(R) _OOr(0xd8c8,_rN(R))
2044 #define FMUL0r(R) _OOr(0xdcc8,_rN(R))
2045 #define FMULP0r(R) _OOr(0xdec8,_rN(R))
2046 #define FMULSm(D,B,I,S) _ESCmi(D,B,I,S,010)
2047 #define FMULLm(D,B,I,S) _ESCmi(D,B,I,S,014)
2048
2049 #define FDIVr0(R) _OOr(0xd8f0,_rN(R))
2050 #define FDIV0r(R) _OOr(0xdcf8,_rN(R))
2051 #define FDIVP0r(R) _OOr(0xdef8,_rN(R))
2052 #define FDIVSm(D,B,I,S) _ESCmi(D,B,I,S,060)
2053 #define FDIVLm(D,B,I,S) _ESCmi(D,B,I,S,064)
2054
2055 #define FDIVRr0(R) _OOr(0xd8f8,_rN(R))
2056 #define FDIVR0r(R) _OOr(0xdcf0,_rN(R))
2057 #define FDIVRP0r(R) _OOr(0xdef0,_rN(R))
2058 #define FDIVRSm(D,B,I,S) _ESCmi(D,B,I,S,070)
2059 #define FDIVRLm(D,B,I,S) _ESCmi(D,B,I,S,074)
2060
2061 #define FCMOVBr0(R) _OOr(0xdac0,_rN(R))
2062 #define FCMOVBEr0(R) _OOr(0xdad0,_rN(R))
2063 #define FCMOVEr0(R) _OOr(0xdac8,_rN(R))
2064 #define FCMOVNBr0(R) _OOr(0xdbc0,_rN(R))
2065 #define FCMOVNBEr0(R) _OOr(0xdbd0,_rN(R))
2066 #define FCMOVNEr0(R) _OOr(0xdbc8,_rN(R))
2067 #define FCMOVNUr0(R) _OOr(0xdbd8,_rN(R))
2068 #define FCMOVUr0(R) _OOr(0xdad8,_rN(R))
2069 #define FCOMIr0(R) _OOr(0xdbf0,_rN(R))
2070 #define FCOMIPr0(R) _OOr(0xdff0,_rN(R))
2071
2072 #define FCOMr(R) _OOr(0xd8d0,_rN(R))
2073 #define FCOMSm(D,B,I,S) _ESCmi(D,B,I,S,020)
2074 #define FCOMLm(D,B,I,S) _ESCmi(D,B,I,S,024)
2075
2076 #define FCOMPr(R) _OOr(0xd8d8,_rN(R))
2077 #define FCOMPSm(D,B,I,S) _ESCmi(D,B,I,S,030)
2078 #define FCOMPLm(D,B,I,S) _ESCmi(D,B,I,S,034)
2079
2080 #define FUCOMIr0(R) _OOr(0xdbe8,_rN(R))
2081 #define FUCOMIPr0(R) _OOr(0xdfe8,_rN(R))
2082 #define FUCOMPr(R) _OOr(0xdde8,_rN(R))
2083 #define FUCOMr(R) _OOr(0xdde0,_rN(R))
2084
2085 #define FIADDLm(D,B,I,S) _ESCmi(D,B,I,S,002)
2086 #define FICOMLm(D,B,I,S) _ESCmi(D,B,I,S,022)
2087 #define FICOMPLm(D,B,I,S) _ESCmi(D,B,I,S,032)
2088 #define FIDIVLm(D,B,I,S) _ESCmi(D,B,I,S,062)
2089 #define FIDIVRLm(D,B,I,S) _ESCmi(D,B,I,S,072)
2090 #define FILDLm(D,B,I,S) _ESCmi(D,B,I,S,003)
2091 #define FILDQm(D,B,I,S) _ESCmi(D,B,I,S,057)
2092 #define FIMULLm(D,B,I,S) _ESCmi(D,B,I,S,012)
2093 #define FISTLm(D,B,I,S) _ESCmi(D,B,I,S,023)
2094 #define FISTPLm(D,B,I,S) _ESCmi(D,B,I,S,033)
2095 #define FISTPQm(D,B,I,S) _ESCmi(D,B,I,S,077)
2096 #define FISUBLm(D,B,I,S) _ESCmi(D,B,I,S,042)
2097 #define FISUBRLm(D,B,I,S) _ESCmi(D,B,I,S,052)
2098
2099 #define FREEr(R) _OOr(0xddc0,_rN(R))
2100 #define FXCHr(R) _OOr(0xd9c8,_rN(R))
2101
2102 #endif /* X86_RTASM_H */