ViewVC Help
View File | Revision Log | Show Annotations | Revision Graph | Root Listing
root/cebix/BasiliskII/src/uae_cpu/compiler/codegen_x86.h
Revision: 1.2
Committed: 2003-01-31T23:48:10Z (21 years, 5 months ago) by gbeauche
Content type: text/plain
Branch: MAIN
Changes since 1.1: +119 -22 lines
Log Message:
Add some FPU instructions. Minor clean-ups.

File Contents

# Content
1 /******************** -*- mode: C; tab-width: 8 -*- ********************
2 *
3 * Run-time assembler for i386 and x86-64
4 *
5 ***********************************************************************/
6
7
8 /***********************************************************************
9 *
10 * This file is derived from GNU lightning.
11 *
12 * Copyright 1999, 2000, 2001, 2002, 2003 Ian Piumarta
13 *
14 * Adaptations and enhancements for x86-64 support, Copyright 2003
15 * Gwenole Beauchesne
16 *
17 * Basilisk II (C) 1997-2003 Christian Bauer
18 *
19 * This program is free software; you can redistribute it and/or modify
20 * it under the terms of the GNU General Public License as published by
21 * the Free Software Foundation; either version 2 of the License, or
22 * (at your option) any later version.
23 *
24 * This program is distributed in the hope that it will be useful,
25 * but WITHOUT ANY WARRANTY; without even the implied warranty of
26 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 * GNU General Public License for more details.
28 *
29 * You should have received a copy of the GNU General Public License
30 * along with this program; if not, write to the Free Software
31 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 *
33 ***********************************************************************/
34
35 #ifndef X86_RTASM_H
36 #define X86_RTASM_H
37
38 /* NOTES
39 *
40 * o Best viewed on a 1024x768 screen with fixed-6x10 font ;-)
41 *
42 * TODO
43 *
44 * o Fix FIXMEs
45 * o Conditional moves
46 * o i387 FPU instructions
47 * o SSE instructions
48 * o Add notes about RIP addressing mode for x86-64
49 * o Optimize for cases where register numbers are not integral constants
50 */
51
52 /* --- Configuration ------------------------------------------------------- */
53
54 /* Define to settle a "flat" register set, i.e. different regno for
55 each size variant. */
56 #ifndef X86_FLAT_REGISTERS
57 #define X86_FLAT_REGISTERS 1
58 #endif
59
60 /* Define to generate x86-64 code. */
61 #ifndef X86_TARGET_64BIT
62 #define X86_TARGET_64BIT 0
63 #endif
64
65 /* Define to optimize ALU instructions. */
66 #ifndef X86_OPTIMIZE_ALU
67 #define X86_OPTIMIZE_ALU 1
68 #endif
69
70 /* Define to optimize rotate/shift instructions. */
71 #ifndef X86_OPTIMIZE_ROTSHI
72 #define X86_OPTIMIZE_ROTSHI 1
73 #endif
74
75
76 /* --- Macros -------------------------------------------------------------- */
77
78 /* Functions used to emit code.
79 *
80 * x86_emit_byte(B)
81 * x86_emit_word(W)
82 * x86_emit_long(L)
83 */
84
85 /* Get pointer to current code
86 *
87 * x86_get_target()
88 */
89
90 /* Abort assembler, fatal failure.
91 *
92 * x86_emit_failure(MSG)
93 */
94
95
96 /* --- Register set -------------------------------------------------------- */
97
98 enum {
99 #if X86_FLAT_REGISTERS
100 X86_NOREG = 0,
101 X86_Reg8L_Base = 0x10,
102 X86_Reg8H_Base = 0x110,
103 X86_Reg16_Base = 0x20,
104 X86_Reg32_Base = 0x40,
105 X86_Reg64_Base = 0x80,
106 #else
107 X86_NOREG = -1,
108 X86_Reg8L_Base = 0,
109 X86_Reg8H_Base = 16,
110 X86_Reg16_Base = 0,
111 X86_Reg32_Base = 0,
112 X86_Reg64_Base = 0,
113 #endif
114 };
115
116 enum {
117 X86_AL = X86_Reg8L_Base,
118 X86_CL, X86_DL, X86_BL,
119 X86_AH, X86_CH, X86_DH, X86_BH,
120 X86_R8B, X86_R9B, X86_R10B, X86_R11B,
121 X86_R12B, X86_R13B, X86_R14B, X86_R15B,
122 X86_SPL = X86_Reg8H_Base + 4,
123 X86_BPL, X86_SIL, X86_DIL
124 };
125
126 enum {
127 X86_AX = X86_Reg16_Base,
128 X86_CX, X86_DX, X86_BX,
129 X86_SP, X86_BP, X86_SI, X86_DI,
130 X86_R8W, X86_R9W, X86_R10W, X86_R11W,
131 X86_R12W, X86_R13W, X86_R14W, X86_R15W
132 };
133
134 enum {
135 X86_EAX = X86_Reg32_Base,
136 X86_ECX, X86_EDX, X86_EBX,
137 X86_ESP, X86_EBP, X86_ESI, X86_EDI,
138 X86_R8D, X86_R9D, X86_R10D, X86_R11D,
139 X86_R12D, X86_R13D, X86_R14D, X86_R15D
140 };
141
142 enum {
143 X86_RAX = X86_Reg64_Base,
144 X86_RCX, X86_RDX, X86_RBX,
145 X86_RSP, X86_RBP, X86_RSI, X86_RDI,
146 X86_R8, X86_R9, X86_R10, X86_R11,
147 X86_R12, X86_R13, X86_R14, X86_R15
148 };
149
150 /* Register control and access
151 *
152 * _rS(R) Size of register (only valid if X86_FLAT_REGISTERS)
153 *
154 * _rR(R) Full register number
155 * _rN(R) Short register number for encoding
156 *
157 * _rAP(R) Is it the accumulator register ?
158 * _rXP(R) Is it an extended register designed for x86-64 ?
159 * _r0P(R) Is it the nil register ?
160 *
161 * _r1(R) 8-bit register ID
162 * _r2(R) 16-bit register ID
163 * _r4(R) 32-bit register ID
164 * _r8(R) 64-bit register ID
165 */
166
167 #if X86_FLAT_REGISTERS
168
169 #define _rS(R) (((R)>>4)&0xf)
170 #define _rR(R) ((R)&0xf)
171 #define _rN(R) ((R)&0x7)
172 #define _rAP(R) (_rR(R) == _rR(X86_RAX))
173 #define _rXP(R) (_rR(R)>7)
174 #define _r0P(R) ((R)==0)
175 #define _rSP(R) (_rN(R)==_rN(X86_RSP))
176 #define _rBP(R) ((R)==_rN(X86_RBP))
177
178 #ifndef _ASM_SAFETY
179 #define _r1(R) _rN(R)
180 #define _r2(R) _rN(R)
181 #define _r4(R) _rN(R)
182 #define _r8(R) _rN(R)
183 #define _rm4(R) _rN(R)
184 #else
185 #define _r1(R) ((_rS(R)==1) ? _rN(R) : x86_emit_failure( "8-bit register required"))
186 #define _r2(R) ((_rS(R)==2) ? _rN(R) : x86_emit_failure("16-bit register required"))
187 #define _r4(R) ((_rS(R)==4) ? _rN(R) : x86_emit_failure("32-bit register required"))
188 #define _r8(R) ((_rS(R)==8) ? _rN(R) : x86_emit_failure("64-bit register required"))
189 #define _rm4(R) (X86_TARGET_64BIT ? \
190 ((_rS(R)==8) ? _rN(R) : x86_emit_failure("not a valid 64-bit base/index expression")) : \
191 ((_rS(R)==4) ? _rN(R) : x86_emit_failure("not a valid 32-bit base/index expression")) )
192 #endif
193
194 #else
195
196 #define _rN(R) ((R)&0x7)
197 #define _rR(R) ((R)&0xf)
198 #define _rXP(X) (_rR(X)>7)
199 #define _r0P(R) ((R)==-1)
200 #define _rSP(R) (_rN(R)==_rN(X86_RSP))
201 #define _rBP(R) ((R)==_rN(X86_RBP))
202 #define _r1(R) _rN(R)
203 #define _r2(R) _rN(R)
204 #define _r4(R) _rN(R)
205 #define _r8(R) _rN(R)
206
207 #endif
208
209
210 /* ========================================================================= */
211 /* --- UTILITY ------------------------------------------------------------- */
212 /* ========================================================================= */
213
214 typedef char _sc;
215 typedef unsigned char _uc;
216 typedef unsigned short _us;
217 typedef int _sl;
218 typedef unsigned int _ul;
219
220 #define _UC(X) ((_uc )(X))
221 #define _US(X) ((_us )(X))
222 #define _SL(X) ((_sl )(X))
223 #define _UL(X) ((_ul )(X))
224
225 # define _PUC(X) ((_uc *)(X))
226 # define _PUS(X) ((_us *)(X))
227 # define _PSL(X) ((_sl *)(X))
228 # define _PUL(X) ((_ul *)(X))
229
230 #define _B(B) x86_emit_byte((B))
231 #define _W(W) x86_emit_word((W))
232 #define _L(L) x86_emit_long((L))
233
234 #define _MASK(N) ((unsigned)((1<<(N)))-1)
235 #define _siP(N,I) (!((((unsigned)(I))^(((unsigned)(I))<<1))&~_MASK(N)))
236 #define _uiP(N,I) (!(((unsigned)(I))&~_MASK(N)))
237 #define _suiP(N,I) (_siP(N,I) | _uiP(N,I))
238
239 #ifndef _ASM_SAFETY
240 #define _ck_s(W,I) (_UL(I) & _MASK(W))
241 #define _ck_u(W,I) (_UL(I) & _MASK(W))
242 #define _ck_su(W,I) (_UL(I) & _MASK(W))
243 #define _ck_d(W,I) (_UL(I) & _MASK(W))
244 #else
245 #define _ck_s(W,I) (_siP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure( "signed integer `"#I"' too large for "#W"-bit field"))
246 #define _ck_u(W,I) (_uiP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure("unsigned integer `"#I"' too large for "#W"-bit field"))
247 #define _ck_su(W,I) (_suiP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure( "integer `"#I"' too large for "#W"-bit field"))
248 #define _ck_d(W,I) (_siP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure( "displacement `"#I"' too large for "#W"-bit field"))
249 #endif
250
251 #define _s0P(I) ((I)==0)
252 #define _s8P(I) _siP(8,I)
253 #define _s16P(I) _siP(16,I)
254 #define _u8P(I) _uiP(8,I)
255 #define _u16P(I) _uiP(16,I)
256
257 #define _su8(I) _ck_su(8,I)
258 #define _su16(I) _ck_su(16,I)
259
260 #define _s1(I) _ck_s( 1,I)
261 #define _s2(I) _ck_s( 2,I)
262 #define _s3(I) _ck_s( 3,I)
263 #define _s4(I) _ck_s( 4,I)
264 #define _s5(I) _ck_s( 5,I)
265 #define _s6(I) _ck_s( 6,I)
266 #define _s7(I) _ck_s( 7,I)
267 #define _s8(I) _ck_s( 8,I)
268 #define _s9(I) _ck_s( 9,I)
269 #define _s10(I) _ck_s(10,I)
270 #define _s11(I) _ck_s(11,I)
271 #define _s12(I) _ck_s(12,I)
272 #define _s13(I) _ck_s(13,I)
273 #define _s14(I) _ck_s(14,I)
274 #define _s15(I) _ck_s(15,I)
275 #define _s16(I) _ck_s(16,I)
276 #define _s17(I) _ck_s(17,I)
277 #define _s18(I) _ck_s(18,I)
278 #define _s19(I) _ck_s(19,I)
279 #define _s20(I) _ck_s(20,I)
280 #define _s21(I) _ck_s(21,I)
281 #define _s22(I) _ck_s(22,I)
282 #define _s23(I) _ck_s(23,I)
283 #define _s24(I) _ck_s(24,I)
284 #define _s25(I) _ck_s(25,I)
285 #define _s26(I) _ck_s(26,I)
286 #define _s27(I) _ck_s(27,I)
287 #define _s28(I) _ck_s(28,I)
288 #define _s29(I) _ck_s(29,I)
289 #define _s30(I) _ck_s(30,I)
290 #define _s31(I) _ck_s(31,I)
291 #define _u1(I) _ck_u( 1,I)
292 #define _u2(I) _ck_u( 2,I)
293 #define _u3(I) _ck_u( 3,I)
294 #define _u4(I) _ck_u( 4,I)
295 #define _u5(I) _ck_u( 5,I)
296 #define _u6(I) _ck_u( 6,I)
297 #define _u7(I) _ck_u( 7,I)
298 #define _u8(I) _ck_u( 8,I)
299 #define _u9(I) _ck_u( 9,I)
300 #define _u10(I) _ck_u(10,I)
301 #define _u11(I) _ck_u(11,I)
302 #define _u12(I) _ck_u(12,I)
303 #define _u13(I) _ck_u(13,I)
304 #define _u14(I) _ck_u(14,I)
305 #define _u15(I) _ck_u(15,I)
306 #define _u16(I) _ck_u(16,I)
307 #define _u17(I) _ck_u(17,I)
308 #define _u18(I) _ck_u(18,I)
309 #define _u19(I) _ck_u(19,I)
310 #define _u20(I) _ck_u(20,I)
311 #define _u21(I) _ck_u(21,I)
312 #define _u22(I) _ck_u(22,I)
313 #define _u23(I) _ck_u(23,I)
314 #define _u24(I) _ck_u(24,I)
315 #define _u25(I) _ck_u(25,I)
316 #define _u26(I) _ck_u(26,I)
317 #define _u27(I) _ck_u(27,I)
318 #define _u28(I) _ck_u(28,I)
319 #define _u29(I) _ck_u(29,I)
320 #define _u30(I) _ck_u(30,I)
321 #define _u31(I) _ck_u(31,I)
322
323 /* ========================================================================= */
324 /* --- ASSEMBLER ----------------------------------------------------------- */
325 /* ========================================================================= */
326
327 #define _b00 0
328 #define _b01 1
329 #define _b10 2
330 #define _b11 3
331
332 #define _b000 0
333 #define _b001 1
334 #define _b010 2
335 #define _b011 3
336 #define _b100 4
337 #define _b101 5
338 #define _b110 6
339 #define _b111 7
340
341 #define _OFF4(D) (_UL(D) - _UL(x86_get_target()))
342 #define _CKD8(D) _ck_d(8, ((_uc) _OFF4(D)) )
343
344 #define _D8(D) (_B(0), ((*(_PUC(x86_get_target())-1))= _CKD8(D)))
345 #define _D32(D) (_L(0), ((*(_PUL(x86_get_target())-1))= _OFF4(D)))
346
347 #ifndef _ASM_SAFETY
348 # define _M(M) (M)
349 # define _r(R) (R)
350 # define _m(M) (M)
351 # define _s(S) (S)
352 # define _i(I) (I)
353 # define _b(B) (B)
354 #else
355 # define _M(M) (((M)>3) ? x86_emit_failure("internal error: mod = " #M) : (M))
356 # define _r(R) (((R)>7) ? x86_emit_failure("internal error: reg = " #R) : (R))
357 # define _m(M) (((M)>7) ? x86_emit_failure("internal error: r/m = " #M) : (M))
358 # define _s(S) (((S)>3) ? x86_emit_failure("internal error: memory scale = " #S) : (S))
359 # define _i(I) (((I)>7) ? x86_emit_failure("internal error: memory index = " #I) : (I))
360 # define _b(B) (((B)>7) ? x86_emit_failure("internal error: memory base = " #B) : (B))
361 #endif
362
363 #define _Mrm(Md,R,M) _B((_M(Md)<<6)|(_r(R)<<3)|_m(M))
364 #define _SIB(Sc,I, B) _B((_s(Sc)<<6)|(_i(I)<<3)|_b(B))
365
366 #define _SCL(S) ((((S)==1) ? _b00 : \
367 (((S)==2) ? _b01 : \
368 (((S)==4) ? _b10 : \
369 (((S)==8) ? _b11 : x86_emit_failure("illegal scale: " #S))))))
370
371
372 /* --- Memory subformats - urgh! ------------------------------------------- */
373
374 #define _r_D( R, D ) (_Mrm(_b00,_rN(R),_b101 ) ,_L((long)(D)))
375 #define _r_0B( R, B ) (_Mrm(_b00,_rN(R),_rm4(B)) )
376 #define _r_0BIS(R, B,I,S) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(S),_rm4(I),_rm4(B)) )
377 #define _r_1B( R, D,B ) (_Mrm(_b01,_rN(R),_rm4(B)) ,_B((long)(D)))
378 #define _r_1BIS(R, D,B,I,S) (_Mrm(_b01,_rN(R),_b100 ),_SIB(_SCL(S),_rm4(I),_rm4(B)),_B((long)(D)))
379 #define _r_4B( R, D,B ) (_Mrm(_b10,_rN(R),_rm4(B)) ,_L((long)(D)))
380 #define _r_4IS( R, D,I,S) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(S),_rm4(I),_b101 ),_L((long)(D)))
381 #define _r_4BIS(R, D,B,I,S) (_Mrm(_b10,_rN(R),_b100 ),_SIB(_SCL(S),_rm4(I),_rm4(B)),_L((long)(D)))
382
383 #define _r_DB( R, D,B ) ((_s0P(D) && (!_rBP(B)) ? _r_0B (R, B ) : (_s8P(D) ? _r_1B( R,D,B ) : _r_4B( R,D,B ))))
384 #define _r_DBIS(R, D,B,I,S) ((_s0P(D) ? _r_0BIS(R, B,I,S) : (_s8P(D) ? _r_1BIS(R,D,B,I,S) : _r_4BIS(R,D,B,I,S))))
385
386 // FIXME:
387 #define _r_X( R, D,B,I,S) (_r0P(I) ? (_r0P(B) ? _r_D (R,D ) : \
388 (_rSP(B) ? _r_DBIS(R,D,X86_ESP,X86_ESP,1) : \
389 _r_DB (R,D, B ))) : \
390 (_r0P(B) ? _r_4IS (R,D, I,S) : \
391 ((_rR(I)!=_rR(X86_RSP)) ? _r_DBIS(R,D, B, I,S) : \
392 x86_emit_failure("illegal index register: %esp"))))
393
394
395 /* --- Instruction formats ------------------------------------------------- */
396
397 #define _m32only(X) (! X86_TARGET_64BIT ? X : x86_emit_failure("invalid instruction in 64-bit mode"))
398 #define _m64only(X) ( X86_TARGET_64BIT ? X : x86_emit_failure("invalid instruction in 32-bit mode"))
399 #define _m64(X) ( X86_TARGET_64BIT ? X : ((void)0) )
400
401 /* _format Opcd ModR/M dN(rB,rI,Sc) imm... */
402
403 #define _d16() ( _B(0x66 ) )
404 #define _O( OP ) ( _B( OP ) )
405 #define _Or( OP,R ) ( _B( (OP)|_r(R)) )
406 #define _OO( OP ) ( _B((OP)>>8), _B( (OP) ) )
407 #define _OOr( OP,R ) ( _B((OP)>>8), _B( (OP)|_r(R)) )
408 #define _Os( OP,B ) ( _s8P(B) ? _B(((OP)|_b10)) : _B(OP) )
409 #define _sW( W ) ( _s8P(W) ? _B(W):_W(W) )
410 #define _sL( L ) ( _s8P(L) ? _B(L):_L(L) )
411 #define _O_B( OP ,B ) ( _O ( OP ) ,_B(B) )
412 #define _O_W( OP ,W ) ( _O ( OP ) ,_W(W) )
413 #define _O_L( OP ,L ) ( _O ( OP ) ,_L(L) )
414 #define _O_D8( OP ,D ) ( _O ( OP ) ,_D8(D) )
415 #define _O_D32( OP ,D ) ( _O ( OP ) ,_D32(D) )
416 #define _OO_D32( OP ,D ) ( _OO ( OP ) ,_D32(D) )
417 #define _Os_sW( OP ,W ) ( _Os ( OP,W) ,_sW(W) )
418 #define _Os_sL( OP ,L ) ( _Os ( OP,L) ,_sL(L) )
419 #define _O_W_B( OP ,W,B) ( _O ( OP ) ,_W(W),_B(B))
420 #define _Or_B( OP,R ,B ) ( _Or ( OP,R) ,_B(B) )
421 #define _Or_W( OP,R ,W ) ( _Or ( OP,R) ,_W(W) )
422 #define _Or_L( OP,R ,L ) ( _Or ( OP,R) ,_L(L) )
423 #define _O_Mrm( OP ,MO,R,M ) ( _O ( OP ),_Mrm(MO,R,M ) )
424 #define _OO_Mrm( OP ,MO,R,M ) ( _OO ( OP ),_Mrm(MO,R,M ) )
425 #define _O_Mrm_B( OP ,MO,R,M ,B ) ( _O ( OP ),_Mrm(MO,R,M ) ,_B(B) )
426 #define _O_Mrm_W( OP ,MO,R,M ,W ) ( _O ( OP ),_Mrm(MO,R,M ) ,_W(W) )
427 #define _O_Mrm_L( OP ,MO,R,M ,L ) ( _O ( OP ),_Mrm(MO,R,M ) ,_L(L) )
428 #define _OO_Mrm_B( OP ,MO,R,M ,B ) ( _OO ( OP ),_Mrm(MO,R,M ) ,_B(B) )
429 #define _Os_Mrm_sW(OP ,MO,R,M ,W ) ( _Os ( OP,W),_Mrm(MO,R,M ),_sW(W) )
430 #define _Os_Mrm_sL(OP ,MO,R,M ,L ) ( _Os ( OP,L),_Mrm(MO,R,M ),_sL(L) )
431 #define _O_r_X( OP ,R ,MD,MB,MI,MS ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS) )
432 #define _OO_r_X( OP ,R ,MD,MB,MI,MS ) ( _OO ( OP ),_r_X( R ,MD,MB,MI,MS) )
433 #define _O_r_X_B( OP ,R ,MD,MB,MI,MS,B ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS) ,_B(B) )
434 #define _O_r_X_W( OP ,R ,MD,MB,MI,MS,W ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS) ,_W(W) )
435 #define _O_r_X_L( OP ,R ,MD,MB,MI,MS,L ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS) ,_L(L) )
436 #define _OO_r_X_B( OP ,R ,MD,MB,MI,MS,B ) ( _OO ( OP ),_r_X( R ,MD,MB,MI,MS) ,_B(B) )
437 #define _Os_r_X_sW(OP ,R ,MD,MB,MI,MS,W ) ( _Os ( OP,W),_r_X( R ,MD,MB,MI,MS),_sW(W) )
438 #define _Os_r_X_sL(OP ,R ,MD,MB,MI,MS,L ) ( _Os ( OP,L),_r_X( R ,MD,MB,MI,MS),_sL(L) )
439 #define _O_X_B( OP ,MD,MB,MI,MS,B ) ( _O_r_X_B( OP ,0 ,MD,MB,MI,MS ,B) )
440 #define _O_X_W( OP ,MD,MB,MI,MS,W ) ( _O_r_X_W( OP ,0 ,MD,MB,MI,MS ,W) )
441 #define _O_X_L( OP ,MD,MB,MI,MS,L ) ( _O_r_X_L( OP ,0 ,MD,MB,MI,MS ,L) )
442
443
444 /* --- REX prefixes -------------------------------------------------------- */
445
446 #define _VOID() ((void)0)
447 #define _BIT(X) (!!(X))
448 #define _d64(W,R,X,B) (_B(0x40|(W)<<3|(R)<<2|(X)<<1|(B)))
449
450 #define __REXwrxb(L,W,R,X,B) ((W|R|X|B) || (L) ? _d64(W,R,X,B) : _VOID())
451 #define __REXwrx_(L,W,R,X,MR) (__REXwrxb(L,W,R,X,_BIT(_rXP(MR))))
452 #define __REXw_x_(L,W,R,X,MR) (__REXwrx_(L,W,_BIT(_rXP(R)),X,MR))
453
454 // FIXME: can't mix new (SPL,BPL,SIL,DIL) with (AH,BH,CH,DH)
455 #define _REXBrr(RR,MR) _m64(__REXw_x_(((RR)|(MR))>=X86_SPL,0,RR,0,MR))
456 #define _REXBmr(MB,MI,RD) _m64(__REXw_x_(((RR)|(MR))>=X86_SPL,0,RD,_BIT(_rXP(MI)),MB))
457 #define _REXBrm(RS,MB,MI) _REXBmr(MB,MI,RS)
458
459 #define _REXLrr(RR,MR) _m64(__REXw_x_(0,0,RR,0,MR))
460 #define _REXLmr(MB,MI,RD) _m64(__REXw_x_(0,0,RD,_BIT(_rXP(MI)),MB))
461 #define _REXLrm(RS,MB,MI) _REXLmr(MB,MI,RS)
462
463 #define _REXQrr(RR,MR) _m64only(__REXw_x_(0,1,RR,0,MR))
464 #define _REXQmr(MB,MI,RD) _m64only(__REXw_x_(0,1,RD,_BIT(_rXP(MI)),MB))
465 #define _REXQrm(RS,MB,MI) _REXQmr(MB,MI,RS)
466
467
468 /* ========================================================================= */
469 /* --- Fully-qualified intrinsic instructions ------------------------------ */
470 /* ========================================================================= */
471
472 /* OPCODE + i = immediate operand
473 * + r = register operand
474 * + m = memory operand (disp,base,index,scale)
475 * + sr/sm = a star preceding a register or memory
476 * + 0 = top of stack register (for FPU instructions)
477 */
478
479 /* --- ALU instructions ---------------------------------------------------- */
480
481 enum {
482 X86_ADD = 0,
483 X86_OR = 1,
484 X86_ADC = 2,
485 X86_SBB = 3,
486 X86_AND = 4,
487 X86_SUB = 5,
488 X86_XOR = 6,
489 X86_CMP = 7,
490 };
491
492 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
493
494 #define _ALUBrr(OP,RS, RD) (_REXBrr(RS, RD), _O_Mrm (((OP) << 3) ,_b11,_r1(RS),_r1(RD) ))
495 #define _ALUBmr(OP, MD, MB, MI, MS, RD) (_REXBmr(MB, MI, RD), _O_r_X (((OP) << 3) + 2,_r1(RD) ,MD,MB,MI,MS ))
496 #define _ALUBrm(OP, RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (((OP) << 3) , ,_r1(RS) ,MD,MB,MI,MS ))
497 #define _ALUBir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AL) ? \
498 (_REXBrr(0, RD), _O_B (((OP) << 3) + 4 ,_su8(IM))) : \
499 (_REXBrr(0, RD), _O_Mrm_B (0x80 ,_b11,OP ,_r1(RD) ,_su8(IM))) )
500 #define _ALUBim(OP, IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X_B (0x80 ,OP ,MD,MB,MI,MS ,_su8(IM)))
501
502 #define _ALUWrr(OP, RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r2(RS),_r2(RD) ))
503 #define _ALUWmr(OP, MD, MB, MI, MS, RD) (_d16(), _REXLmr(MD, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r2(RD) ,MD,MB,MI,MS ))
504 #define _ALUWrm(OP, RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MD, MI), _O_r_X (((OP) << 3) + 1 ,_r2(RS) ,MD,MB,MI,MS ))
505 #define _ALUWir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AX) ? \
506 (_d16(), _REXLrr(0, RD), _O_W (((OP) << 3) + 5 ,_su16(IM))) : \
507 (_d16(), _REXLrr(0, RD), _Os_Mrm_sW (0x81 ,_b11,OP ,_r2(RD) ,_su16(IM))) )
508 #define _ALUWim(OP, IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MD, MI), _Os_r_X_sW (0x81 ,OP ,MD,MB,MI,MS ,_su16(IM)))
509
510 #define _ALULrr(OP, RS, RD) (_REXLrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r4(RS),_r4(RD) ))
511 #define _ALULmr(OP, MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r4(RD) ,MD,MB,MI,MS ))
512 #define _ALULrm(OP, RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r4(RS) ,MD,MB,MI,MS ))
513 #define _ALULir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_EAX) ? \
514 (_REXLrr(0, RD), _O_L (((OP) << 3) + 5 ,IM )) : \
515 (_REXLrr(0, RD), _Os_Mrm_sL (0x81 ,_b11,OP ,_r4(RD) ,IM )) )
516 #define _ALULim(OP, IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _Os_r_X_sL (0x81 ,OP ,MD,MB,MI,MS ,IM ))
517
518 #define _ALUQrr(OP, RS, RD) (_REXQrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r8(RS),_r8(RD) ))
519 #define _ALUQmr(OP, MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r8(RD) ,MD,MB,MI,MS ))
520 #define _ALUQrm(OP, RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r8(RS) ,MD,MB,MI,MS ))
521 #define _ALUQir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_RAX) ? \
522 (_REXQrr(0, RD), _O_L (((OP) << 3) + 5 ,IM )) : \
523 (_REXQrr(0, RD), _Os_Mrm_sL (0x81 ,_b11,OP ,_r8(RD) ,IM )) )
524 #define _ALUQim(OP, IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _Os_r_X_sL (0x81 ,OP ,MD,MB,MI,MS ,IM ))
525
526 #define ADCBrr(RS, RD) _ALUBrr(X86_ADC, RS, RD)
527 #define ADCBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_ADC, MD, MB, MI, MS, RD)
528 #define ADCBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_ADC, RS, MD, MB, MI, MS)
529 #define ADCBir(IM, RD) _ALUBir(X86_ADC, IM, RD)
530 #define ADCBim(IM, MD, MB, MI, MS) _ALUBim(X86_ADC, IM, MD, MB, MI, MS)
531
532 #define ADCWrr(RS, RD) _ALUWrr(X86_ADC, RS, RD)
533 #define ADCWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_ADC, MD, MB, MI, MS, RD)
534 #define ADCWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_ADC, RS, MD, MB, MI, MS)
535 #define ADCWir(IM, RD) _ALUWir(X86_ADC, IM, RD)
536 #define ADCWim(IM, MD, MB, MI, MS) _ALUWim(X86_ADC, IM, MD, MB, MI, MS)
537
538 #define ADCLrr(RS, RD) _ALULrr(X86_ADC, RS, RD)
539 #define ADCLmr(MD, MB, MI, MS, RD) _ALULmr(X86_ADC, MD, MB, MI, MS, RD)
540 #define ADCLrm(RS, MD, MB, MI, MS) _ALULrm(X86_ADC, RS, MD, MB, MI, MS)
541 #define ADCLir(IM, RD) _ALULir(X86_ADC, IM, RD)
542 #define ADCLim(IM, MD, MB, MI, MS) _ALULim(X86_ADC, IM, MD, MB, MI, MS)
543
544 #define ADCQrr(RS, RD) _ALUQrr(X86_ADC, RS, RD)
545 #define ADCQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_ADC, MD, MB, MI, MS, RD)
546 #define ADCQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_ADC, RS, MD, MB, MI, MS)
547 #define ADCQir(IM, RD) _ALUQir(X86_ADC, IM, RD)
548 #define ADCQim(IM, MD, MB, MI, MS) _ALUQim(X86_ADC, IM, MD, MB, MI, MS)
549
550 #define ADDBrr(RS, RD) _ALUBrr(X86_ADD, RS, RD)
551 #define ADDBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_ADD, MD, MB, MI, MS, RD)
552 #define ADDBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_ADD, RS, MD, MB, MI, MS)
553 #define ADDBir(IM, RD) _ALUBir(X86_ADD, IM, RD)
554 #define ADDBim(IM, MD, MB, MI, MS) _ALUBim(X86_ADD, IM, MD, MB, MI, MS)
555
556 #define ADDWrr(RS, RD) _ALUWrr(X86_ADD, RS, RD)
557 #define ADDWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_ADD, MD, MB, MI, MS, RD)
558 #define ADDWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_ADD, RS, MD, MB, MI, MS)
559 #define ADDWir(IM, RD) _ALUWir(X86_ADD, IM, RD)
560 #define ADDWim(IM, MD, MB, MI, MS) _ALUWim(X86_ADD, IM, MD, MB, MI, MS)
561
562 #define ADDLrr(RS, RD) _ALULrr(X86_ADD, RS, RD)
563 #define ADDLmr(MD, MB, MI, MS, RD) _ALULmr(X86_ADD, MD, MB, MI, MS, RD)
564 #define ADDLrm(RS, MD, MB, MI, MS) _ALULrm(X86_ADD, RS, MD, MB, MI, MS)
565 #define ADDLir(IM, RD) _ALULir(X86_ADD, IM, RD)
566 #define ADDLim(IM, MD, MB, MI, MS) _ALULim(X86_ADD, IM, MD, MB, MI, MS)
567
568 #define ADDQrr(RS, RD) _ALUQrr(X86_ADD, RS, RD)
569 #define ADDQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_ADD, MD, MB, MI, MS, RD)
570 #define ADDQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_ADD, RS, MD, MB, MI, MS)
571 #define ADDQir(IM, RD) _ALUQir(X86_ADD, IM, RD)
572 #define ADDQim(IM, MD, MB, MI, MS) _ALUQim(X86_ADD, IM, MD, MB, MI, MS)
573
574 #define ANDBrr(RS, RD) _ALUBrr(X86_AND, RS, RD)
575 #define ANDBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_AND, MD, MB, MI, MS, RD)
576 #define ANDBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_AND, RS, MD, MB, MI, MS)
577 #define ANDBir(IM, RD) _ALUBir(X86_AND, IM, RD)
578 #define ANDBim(IM, MD, MB, MI, MS) _ALUBim(X86_AND, IM, MD, MB, MI, MS)
579
580 #define ANDWrr(RS, RD) _ALUWrr(X86_AND, RS, RD)
581 #define ANDWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_AND, MD, MB, MI, MS, RD)
582 #define ANDWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_AND, RS, MD, MB, MI, MS)
583 #define ANDWir(IM, RD) _ALUWir(X86_AND, IM, RD)
584 #define ANDWim(IM, MD, MB, MI, MS) _ALUWim(X86_AND, IM, MD, MB, MI, MS)
585
586 #define ANDLrr(RS, RD) _ALULrr(X86_AND, RS, RD)
587 #define ANDLmr(MD, MB, MI, MS, RD) _ALULmr(X86_AND, MD, MB, MI, MS, RD)
588 #define ANDLrm(RS, MD, MB, MI, MS) _ALULrm(X86_AND, RS, MD, MB, MI, MS)
589 #define ANDLir(IM, RD) _ALULir(X86_AND, IM, RD)
590 #define ANDLim(IM, MD, MB, MI, MS) _ALULim(X86_AND, IM, MD, MB, MI, MS)
591
592 #define ANDQrr(RS, RD) _ALUQrr(X86_AND, RS, RD)
593 #define ANDQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_AND, MD, MB, MI, MS, RD)
594 #define ANDQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_AND, RS, MD, MB, MI, MS)
595 #define ANDQir(IM, RD) _ALUQir(X86_AND, IM, RD)
596 #define ANDQim(IM, MD, MB, MI, MS) _ALUQim(X86_AND, IM, MD, MB, MI, MS)
597
598 #define CMPBrr(RS, RD) _ALUBrr(X86_CMP, RS, RD)
599 #define CMPBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_CMP, MD, MB, MI, MS, RD)
600 #define CMPBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_CMP, RS, MD, MB, MI, MS)
601 #define CMPBir(IM, RD) _ALUBir(X86_CMP, IM, RD)
602 #define CMPBim(IM, MD, MB, MI, MS) _ALUBim(X86_CMP, IM, MD, MB, MI, MS)
603
604 #define CMPWrr(RS, RD) _ALUWrr(X86_CMP, RS, RD)
605 #define CMPWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_CMP, MD, MB, MI, MS, RD)
606 #define CMPWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_CMP, RS, MD, MB, MI, MS)
607 #define CMPWir(IM, RD) _ALUWir(X86_CMP, IM, RD)
608 #define CMPWim(IM, MD, MB, MI, MS) _ALUWim(X86_CMP, IM, MD, MB, MI, MS)
609
610 #define CMPLrr(RS, RD) _ALULrr(X86_CMP, RS, RD)
611 #define CMPLmr(MD, MB, MI, MS, RD) _ALULmr(X86_CMP, MD, MB, MI, MS, RD)
612 #define CMPLrm(RS, MD, MB, MI, MS) _ALULrm(X86_CMP, RS, MD, MB, MI, MS)
613 #define CMPLir(IM, RD) _ALULir(X86_CMP, IM, RD)
614 #define CMPLim(IM, MD, MB, MI, MS) _ALULim(X86_CMP, IM, MD, MB, MI, MS)
615
616 #define CMPQrr(RS, RD) _ALUQrr(X86_CMP, RS, RD)
617 #define CMPQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_CMP, MD, MB, MI, MS, RD)
618 #define CMPQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_CMP, RS, MD, MB, MI, MS)
619 #define CMPQir(IM, RD) _ALUQir(X86_CMP, IM, RD)
620 #define CMPQim(IM, MD, MB, MI, MS) _ALUQim(X86_CMP, IM, MD, MB, MI, MS)
621
622 #define ORBrr(RS, RD) _ALUBrr(X86_OR, RS, RD)
623 #define ORBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_OR, MD, MB, MI, MS, RD)
624 #define ORBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_OR, RS, MD, MB, MI, MS)
625 #define ORBir(IM, RD) _ALUBir(X86_OR, IM, RD)
626 #define ORBim(IM, MD, MB, MI, MS) _ALUBim(X86_OR, IM, MD, MB, MI, MS)
627
628 #define ORWrr(RS, RD) _ALUWrr(X86_OR, RS, RD)
629 #define ORWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_OR, MD, MB, MI, MS, RD)
630 #define ORWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_OR, RS, MD, MB, MI, MS)
631 #define ORWir(IM, RD) _ALUWir(X86_OR, IM, RD)
632 #define ORWim(IM, MD, MB, MI, MS) _ALUWim(X86_OR, IM, MD, MB, MI, MS)
633
634 #define ORLrr(RS, RD) _ALULrr(X86_OR, RS, RD)
635 #define ORLmr(MD, MB, MI, MS, RD) _ALULmr(X86_OR, MD, MB, MI, MS, RD)
636 #define ORLrm(RS, MD, MB, MI, MS) _ALULrm(X86_OR, RS, MD, MB, MI, MS)
637 #define ORLir(IM, RD) _ALULir(X86_OR, IM, RD)
638 #define ORLim(IM, MD, MB, MI, MS) _ALULim(X86_OR, IM, MD, MB, MI, MS)
639
640 #define ORQrr(RS, RD) _ALUQrr(X86_OR, RS, RD)
641 #define ORQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_OR, MD, MB, MI, MS, RD)
642 #define ORQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_OR, RS, MD, MB, MI, MS)
643 #define ORQir(IM, RD) _ALUQir(X86_OR, IM, RD)
644 #define ORQim(IM, MD, MB, MI, MS) _ALUQim(X86_OR, IM, MD, MB, MI, MS)
645
646 #define SBBBrr(RS, RD) _ALUBrr(X86_SBB, RS, RD)
647 #define SBBBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_SBB, MD, MB, MI, MS, RD)
648 #define SBBBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_SBB, RS, MD, MB, MI, MS)
649 #define SBBBir(IM, RD) _ALUBir(X86_SBB, IM, RD)
650 #define SBBBim(IM, MD, MB, MI, MS) _ALUBim(X86_SBB, IM, MD, MB, MI, MS)
651
652 #define SBBWrr(RS, RD) _ALUWrr(X86_SBB, RS, RD)
653 #define SBBWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_SBB, MD, MB, MI, MS, RD)
654 #define SBBWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_SBB, RS, MD, MB, MI, MS)
655 #define SBBWir(IM, RD) _ALUWir(X86_SBB, IM, RD)
656 #define SBBWim(IM, MD, MB, MI, MS) _ALUWim(X86_SBB, IM, MD, MB, MI, MS)
657
658 #define SBBLrr(RS, RD) _ALULrr(X86_SBB, RS, RD)
659 #define SBBLmr(MD, MB, MI, MS, RD) _ALULmr(X86_SBB, MD, MB, MI, MS, RD)
660 #define SBBLrm(RS, MD, MB, MI, MS) _ALULrm(X86_SBB, RS, MD, MB, MI, MS)
661 #define SBBLir(IM, RD) _ALULir(X86_SBB, IM, RD)
662 #define SBBLim(IM, MD, MB, MI, MS) _ALULim(X86_SBB, IM, MD, MB, MI, MS)
663
664 #define SBBQrr(RS, RD) _ALUQrr(X86_SBB, RS, RD)
665 #define SBBQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_SBB, MD, MB, MI, MS, RD)
666 #define SBBQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_SBB, RS, MD, MB, MI, MS)
667 #define SBBQir(IM, RD) _ALUQir(X86_SBB, IM, RD)
668 #define SBBQim(IM, MD, MB, MI, MS) _ALUQim(X86_SBB, IM, MD, MB, MI, MS)
669
670 #define SUBBrr(RS, RD) _ALUBrr(X86_SUB, RS, RD)
671 #define SUBBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_SUB, MD, MB, MI, MS, RD)
672 #define SUBBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_SUB, RS, MD, MB, MI, MS)
673 #define SUBBir(IM, RD) _ALUBir(X86_SUB, IM, RD)
674 #define SUBBim(IM, MD, MB, MI, MS) _ALUBim(X86_SUB, IM, MD, MB, MI, MS)
675
676 #define SUBWrr(RS, RD) _ALUWrr(X86_SUB, RS, RD)
677 #define SUBWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_SUB, MD, MB, MI, MS, RD)
678 #define SUBWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_SUB, RS, MD, MB, MI, MS)
679 #define SUBWir(IM, RD) _ALUWir(X86_SUB, IM, RD)
680 #define SUBWim(IM, MD, MB, MI, MS) _ALUWim(X86_SUB, IM, MD, MB, MI, MS)
681
682 #define SUBLrr(RS, RD) _ALULrr(X86_SUB, RS, RD)
683 #define SUBLmr(MD, MB, MI, MS, RD) _ALULmr(X86_SUB, MD, MB, MI, MS, RD)
684 #define SUBLrm(RS, MD, MB, MI, MS) _ALULrm(X86_SUB, RS, MD, MB, MI, MS)
685 #define SUBLir(IM, RD) _ALULir(X86_SUB, IM, RD)
686 #define SUBLim(IM, MD, MB, MI, MS) _ALULim(X86_SUB, IM, MD, MB, MI, MS)
687
688 #define SUBQrr(RS, RD) _ALUQrr(X86_SUB, RS, RD)
689 #define SUBQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_SUB, MD, MB, MI, MS, RD)
690 #define SUBQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_SUB, RS, MD, MB, MI, MS)
691 #define SUBQir(IM, RD) _ALUQir(X86_SUB, IM, RD)
692 #define SUBQim(IM, MD, MB, MI, MS) _ALUQim(X86_SUB, IM, MD, MB, MI, MS)
693
694 #define XORBrr(RS, RD) _ALUBrr(X86_XOR, RS, RD)
695 #define XORBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_XOR, MD, MB, MI, MS, RD)
696 #define XORBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_XOR, RS, MD, MB, MI, MS)
697 #define XORBir(IM, RD) _ALUBir(X86_XOR, IM, RD)
698 #define XORBim(IM, MD, MB, MI, MS) _ALUBim(X86_XOR, IM, MD, MB, MI, MS)
699
700 #define XORWrr(RS, RD) _ALUWrr(X86_XOR, RS, RD)
701 #define XORWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_XOR, MD, MB, MI, MS, RD)
702 #define XORWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_XOR, RS, MD, MB, MI, MS)
703 #define XORWir(IM, RD) _ALUWir(X86_XOR, IM, RD)
704 #define XORWim(IM, MD, MB, MI, MS) _ALUWim(X86_XOR, IM, MD, MB, MI, MS)
705
706 #define XORLrr(RS, RD) _ALULrr(X86_XOR, RS, RD)
707 #define XORLmr(MD, MB, MI, MS, RD) _ALULmr(X86_XOR, MD, MB, MI, MS, RD)
708 #define XORLrm(RS, MD, MB, MI, MS) _ALULrm(X86_XOR, RS, MD, MB, MI, MS)
709 #define XORLir(IM, RD) _ALULir(X86_XOR, IM, RD)
710 #define XORLim(IM, MD, MB, MI, MS) _ALULim(X86_XOR, IM, MD, MB, MI, MS)
711
712 #define XORQrr(RS, RD) _ALUQrr(X86_XOR, RS, RD)
713 #define XORQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_XOR, MD, MB, MI, MS, RD)
714 #define XORQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_XOR, RS, MD, MB, MI, MS)
715 #define XORQir(IM, RD) _ALUQir(X86_XOR, IM, RD)
716 #define XORQim(IM, MD, MB, MI, MS) _ALUQim(X86_XOR, IM, MD, MB, MI, MS)
717
718
719 /* --- Shift/Rotate instructions ------------------------------------------- */
720
721 enum {
722 X86_ROL = 0,
723 X86_ROR = 1,
724 X86_RCL = 2,
725 X86_RCR = 3,
726 X86_SHL = 4,
727 X86_SHR = 5,
728 X86_SAR = 7,
729 };
730
731 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
732
733 #define _ROTSHIBir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
734 (_REXBrr(0, RD), _O_Mrm (0xd0 ,_b11,OP,_r1(RD) )) : \
735 (_REXBrr(0, RD), _O_Mrm_B (0xc0 ,_b11,OP,_r1(RD) ,_u8(IM))) )
736 #define _ROTSHIBim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
737 (_REXBrm(0, MB, MI), _O_r_X (0xd0 ,OP ,MD,MB,MI,MS )) : \
738 (_REXBrm(0, MB, MI), _O_r_X_B (0xc0 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
739 #define _ROTSHIBrr(OP,RS,RD) (((RS) == X86_CL) ? \
740 (_REXBrr(RS, RD), _O_Mrm (0xd2 ,_b11,OP,_r1(RD) )) : \
741 x86_emit_failure("source register must be CL" ) )
742 #define _ROTSHIBrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
743 (_REXBrm(RS, MB, MI), _O_r_X (0xd2 ,OP ,MD,MB,MI,MS )) : \
744 x86_emit_failure("source register must be CL" ) )
745
746 #define _ROTSHIWir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
747 (_d16(), _REXLrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r2(RD) )) : \
748 (_d16(), _REXLrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r2(RD) ,_u8(IM))) )
749 #define _ROTSHIWim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
750 (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
751 (_d16(), _REXLrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
752 #define _ROTSHIWrr(OP,RS,RD) (((RS) == X86_CL) ? \
753 (_d16(), _REXLrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r2(RD) )) : \
754 x86_emit_failure("source register must be CL" ) )
755 #define _ROTSHIWrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
756 (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
757 x86_emit_failure("source register must be CL" ) )
758
759 #define _ROTSHILir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
760 (_REXLrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r4(RD) )) : \
761 (_REXLrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r4(RD) ,_u8(IM))) )
762 #define _ROTSHILim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
763 (_REXLrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
764 (_REXLrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
765 #define _ROTSHILrr(OP,RS,RD) (((RS) == X86_CL) ? \
766 (_REXLrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r4(RD) )) : \
767 x86_emit_failure("source register must be CL" ) )
768 #define _ROTSHILrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
769 (_REXLrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
770 x86_emit_failure("source register must be CL" ) )
771
772 #define _ROTSHIQir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
773 (_REXQrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r8(RD) )) : \
774 (_REXQrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r8(RD) ,_u8(IM))) )
775 #define _ROTSHIQim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
776 (_REXQrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
777 (_REXQrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
778 #define _ROTSHIQrr(OP,RS,RD) (((RS) == X86_CL) ? \
779 (_REXQrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r8(RD) )) : \
780 x86_emit_failure("source register must be CL" ) )
781 #define _ROTSHIQrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
782 (_REXQrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
783 x86_emit_failure("source register must be CL" ) )
784
785 #define ROLBir(IM, RD) _ROTSHIBir(X86_ROL, IM, RD)
786 #define ROLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_ROL, IM, MD, MB, MI, MS)
787 #define ROLBrr(RS, RD) _ROTSHIBrr(X86_ROL, RS, RD)
788 #define ROLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_ROL, RS, MD, MB, MI, MS)
789
790 #define ROLWir(IM, RD) _ROTSHIWir(X86_ROL, IM, RD)
791 #define ROLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_ROL, IM, MD, MB, MI, MS)
792 #define ROLWrr(RS, RD) _ROTSHIWrr(X86_ROL, RS, RD)
793 #define ROLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_ROL, RS, MD, MB, MI, MS)
794
795 #define ROLLir(IM, RD) _ROTSHILir(X86_ROL, IM, RD)
796 #define ROLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_ROL, IM, MD, MB, MI, MS)
797 #define ROLLrr(RS, RD) _ROTSHILrr(X86_ROL, RS, RD)
798 #define ROLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_ROL, RS, MD, MB, MI, MS)
799
800 #define ROLQir(IM, RD) _ROTSHIQir(X86_ROL, IM, RD)
801 #define ROLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_ROL, IM, MD, MB, MI, MS)
802 #define ROLQrr(RS, RD) _ROTSHIQrr(X86_ROL, RS, RD)
803 #define ROLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_ROL, RS, MD, MB, MI, MS)
804
805 #define RORBir(IM, RD) _ROTSHIBir(X86_ROR, IM, RD)
806 #define RORBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_ROR, IM, MD, MB, MI, MS)
807 #define RORBrr(RS, RD) _ROTSHIBrr(X86_ROR, RS, RD)
808 #define RORBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_ROR, RS, MD, MB, MI, MS)
809
810 #define RORWir(IM, RD) _ROTSHIWir(X86_ROR, IM, RD)
811 #define RORWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_ROR, IM, MD, MB, MI, MS)
812 #define RORWrr(RS, RD) _ROTSHIWrr(X86_ROR, RS, RD)
813 #define RORWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_ROR, RS, MD, MB, MI, MS)
814
815 #define RORLir(IM, RD) _ROTSHILir(X86_ROR, IM, RD)
816 #define RORLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_ROR, IM, MD, MB, MI, MS)
817 #define RORLrr(RS, RD) _ROTSHILrr(X86_ROR, RS, RD)
818 #define RORLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_ROR, RS, MD, MB, MI, MS)
819
820 #define RORQir(IM, RD) _ROTSHIQir(X86_ROR, IM, RD)
821 #define RORQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_ROR, IM, MD, MB, MI, MS)
822 #define RORQrr(RS, RD) _ROTSHIQrr(X86_ROR, RS, RD)
823 #define RORQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_ROR, RS, MD, MB, MI, MS)
824
825 #define RCLBir(IM, RD) _ROTSHIBir(X86_RCL, IM, RD)
826 #define RCLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_RCL, IM, MD, MB, MI, MS)
827 #define RCLBrr(RS, RD) _ROTSHIBrr(X86_RCL, RS, RD)
828 #define RCLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_RCL, RS, MD, MB, MI, MS)
829
830 #define RCLWir(IM, RD) _ROTSHIWir(X86_RCL, IM, RD)
831 #define RCLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_RCL, IM, MD, MB, MI, MS)
832 #define RCLWrr(RS, RD) _ROTSHIWrr(X86_RCL, RS, RD)
833 #define RCLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_RCL, RS, MD, MB, MI, MS)
834
835 #define RCLLir(IM, RD) _ROTSHILir(X86_RCL, IM, RD)
836 #define RCLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_RCL, IM, MD, MB, MI, MS)
837 #define RCLLrr(RS, RD) _ROTSHILrr(X86_RCL, RS, RD)
838 #define RCLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_RCL, RS, MD, MB, MI, MS)
839
840 #define RCLQir(IM, RD) _ROTSHIQir(X86_RCL, IM, RD)
841 #define RCLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_RCL, IM, MD, MB, MI, MS)
842 #define RCLQrr(RS, RD) _ROTSHIQrr(X86_RCL, RS, RD)
843 #define RCLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_RCL, RS, MD, MB, MI, MS)
844
845 #define RCRBir(IM, RD) _ROTSHIBir(X86_RCR, IM, RD)
846 #define RCRBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_RCR, IM, MD, MB, MI, MS)
847 #define RCRBrr(RS, RD) _ROTSHIBrr(X86_RCR, RS, RD)
848 #define RCRBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_RCR, RS, MD, MB, MI, MS)
849
850 #define RCRWir(IM, RD) _ROTSHIWir(X86_RCR, IM, RD)
851 #define RCRWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_RCR, IM, MD, MB, MI, MS)
852 #define RCRWrr(RS, RD) _ROTSHIWrr(X86_RCR, RS, RD)
853 #define RCRWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_RCR, RS, MD, MB, MI, MS)
854
855 #define RCRLir(IM, RD) _ROTSHILir(X86_RCR, IM, RD)
856 #define RCRLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_RCR, IM, MD, MB, MI, MS)
857 #define RCRLrr(RS, RD) _ROTSHILrr(X86_RCR, RS, RD)
858 #define RCRLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_RCR, RS, MD, MB, MI, MS)
859
860 #define RCRQir(IM, RD) _ROTSHIQir(X86_RCR, IM, RD)
861 #define RCRQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_RCR, IM, MD, MB, MI, MS)
862 #define RCRQrr(RS, RD) _ROTSHIQrr(X86_RCR, RS, RD)
863 #define RCRQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_RCR, RS, MD, MB, MI, MS)
864
865 #define SHLBir(IM, RD) _ROTSHIBir(X86_SHL, IM, RD)
866 #define SHLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SHL, IM, MD, MB, MI, MS)
867 #define SHLBrr(RS, RD) _ROTSHIBrr(X86_SHL, RS, RD)
868 #define SHLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SHL, RS, MD, MB, MI, MS)
869
870 #define SHLWir(IM, RD) _ROTSHIWir(X86_SHL, IM, RD)
871 #define SHLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SHL, IM, MD, MB, MI, MS)
872 #define SHLWrr(RS, RD) _ROTSHIWrr(X86_SHL, RS, RD)
873 #define SHLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SHL, RS, MD, MB, MI, MS)
874
875 #define SHLLir(IM, RD) _ROTSHILir(X86_SHL, IM, RD)
876 #define SHLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SHL, IM, MD, MB, MI, MS)
877 #define SHLLrr(RS, RD) _ROTSHILrr(X86_SHL, RS, RD)
878 #define SHLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SHL, RS, MD, MB, MI, MS)
879
880 #define SHLQir(IM, RD) _ROTSHIQir(X86_SHL, IM, RD)
881 #define SHLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SHL, IM, MD, MB, MI, MS)
882 #define SHLQrr(RS, RD) _ROTSHIQrr(X86_SHL, RS, RD)
883 #define SHLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SHL, RS, MD, MB, MI, MS)
884
885 #define SHRBir(IM, RD) _ROTSHIBir(X86_SHR, IM, RD)
886 #define SHRBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SHR, IM, MD, MB, MI, MS)
887 #define SHRBrr(RS, RD) _ROTSHIBrr(X86_SHR, RS, RD)
888 #define SHRBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SHR, RS, MD, MB, MI, MS)
889
890 #define SHRWir(IM, RD) _ROTSHIWir(X86_SHR, IM, RD)
891 #define SHRWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SHR, IM, MD, MB, MI, MS)
892 #define SHRWrr(RS, RD) _ROTSHIWrr(X86_SHR, RS, RD)
893 #define SHRWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SHR, RS, MD, MB, MI, MS)
894
895 #define SHRLir(IM, RD) _ROTSHILir(X86_SHR, IM, RD)
896 #define SHRLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SHR, IM, MD, MB, MI, MS)
897 #define SHRLrr(RS, RD) _ROTSHILrr(X86_SHR, RS, RD)
898 #define SHRLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SHR, RS, MD, MB, MI, MS)
899
900 #define SHRQir(IM, RD) _ROTSHIQir(X86_SHR, IM, RD)
901 #define SHRQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SHR, IM, MD, MB, MI, MS)
902 #define SHRQrr(RS, RD) _ROTSHIQrr(X86_SHR, RS, RD)
903 #define SHRQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SHR, RS, MD, MB, MI, MS)
904
905 #define SALBir SHLBir
906 #define SALBim SHLBim
907 #define SALBrr SHLBrr
908 #define SALBrm SHLBrm
909
910 #define SALWir SHLWir
911 #define SALWim SHLWim
912 #define SALWrr SHLWrr
913 #define SALWrm SHLWrm
914
915 #define SALLir SHLLir
916 #define SALLim SHLLim
917 #define SALLrr SHLLrr
918 #define SALLrm SHLLrm
919
920 #define SALQir SHLQir
921 #define SALQim SHLQim
922 #define SALQrr SHLQrr
923 #define SALQrm SHLQrm
924
925 #define SARBir(IM, RD) _ROTSHIBir(X86_SAR, IM, RD)
926 #define SARBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SAR, IM, MD, MB, MI, MS)
927 #define SARBrr(RS, RD) _ROTSHIBrr(X86_SAR, RS, RD)
928 #define SARBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SAR, RS, MD, MB, MI, MS)
929
930 #define SARWir(IM, RD) _ROTSHIWir(X86_SAR, IM, RD)
931 #define SARWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SAR, IM, MD, MB, MI, MS)
932 #define SARWrr(RS, RD) _ROTSHIWrr(X86_SAR, RS, RD)
933 #define SARWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SAR, RS, MD, MB, MI, MS)
934
935 #define SARLir(IM, RD) _ROTSHILir(X86_SAR, IM, RD)
936 #define SARLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SAR, IM, MD, MB, MI, MS)
937 #define SARLrr(RS, RD) _ROTSHILrr(X86_SAR, RS, RD)
938 #define SARLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SAR, RS, MD, MB, MI, MS)
939
940 #define SARQir(IM, RD) _ROTSHIQir(X86_SAR, IM, RD)
941 #define SARQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SAR, IM, MD, MB, MI, MS)
942 #define SARQrr(RS, RD) _ROTSHIQrr(X86_SAR, RS, RD)
943 #define SARQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SAR, RS, MD, MB, MI, MS)
944
945
946 /* --- Bit test instructions ----------------------------------------------- */
947
948 enum {
949 X86_BT = 4,
950 X86_BTS = 5,
951 X86_BTR = 6,
952 X86_BTC = 7,
953 };
954
955 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
956
957 #define _BTWir(OP, IM, RD) (_d16(), _REXLrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r2(RD) ,_u8(IM)))
958 #define _BTWim(OP, IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
959 #define _BTWrr(OP, RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r2(RS),_r2(RD) ))
960 #define _BTWrm(OP, RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r2(RS) ,MD,MB,MI,MS ))
961
962 #define _BTLir(OP, IM, RD) (_REXLrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r4(RD) ,_u8(IM)))
963 #define _BTLim(OP, IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
964 #define _BTLrr(OP, RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r4(RS),_r4(RD) ))
965 #define _BTLrm(OP, RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r4(RS) ,MD,MB,MI,MS ))
966
967 #define _BTQir(OP, IM, RD) (_REXQrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r8(RD) ,_u8(IM)))
968 #define _BTQim(OP, IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
969 #define _BTQrr(OP, RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r8(RS),_r8(RD) ))
970 #define _BTQrm(OP, RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r8(RS) ,MD,MB,MI,MS ))
971
972 #define BTWir(IM, RD) _BTWir(X86_BT, IM, RD)
973 #define BTWim(IM, MD, MB, MI, MS) _BTWim(X86_BT, IM, MD, MI, MS)
974 #define BTWrr(RS, RD) _BTWrr(X86_BT, RS, RD)
975 #define BTWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BT, RS, MD, MB, MI, MS)
976
977 #define BTLir(IM, RD) _BTLir(X86_BT, IM, RD)
978 #define BTLim(IM, MD, MB, MI, MS) _BTLim(X86_BT, IM, MD, MB, MI, MS)
979 #define BTLrr(RS, RD) _BTLrr(X86_BT, RS, RD)
980 #define BTLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BT, RS, MD, MB, MI, MS)
981
982 #define BTQir(IM, RD) _BTQir(X86_BT, IM, RD)
983 #define BTQim(IM, MD, MB, MI, MS) _BTQim(X86_BT, IM, MD, MB, MI, MS)
984 #define BTQrr(RS, RD) _BTQrr(X86_BT, RS, RD)
985 #define BTQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BT, RS, MD, MB, MI, MS)
986
987 #define BTCWir(IM, RD) _BTWir(X86_BTC, IM, RD)
988 #define BTCWim(IM, MD, MB, MI, MS) _BTWim(X86_BTC, IM, MD, MI, MS)
989 #define BTCWrr(RS, RD) _BTWrr(X86_BTC, RS, RD)
990 #define BTCWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTC, RS, MD, MB, MI, MS)
991
992 #define BTCLir(IM, RD) _BTLir(X86_BTC, IM, RD)
993 #define BTCLim(IM, MD, MB, MI, MS) _BTLim(X86_BTC, IM, MD, MB, MI, MS)
994 #define BTCLrr(RS, RD) _BTLrr(X86_BTC, RS, RD)
995 #define BTCLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTC, RS, MD, MB, MI, MS)
996
997 #define BTCQir(IM, RD) _BTQir(X86_BTC, IM, RD)
998 #define BTCQim(IM, MD, MB, MI, MS) _BTQim(X86_BTC, IM, MD, MB, MI, MS)
999 #define BTCQrr(RS, RD) _BTQrr(X86_BTC, RS, RD)
1000 #define BTCQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTC, RS, MD, MB, MI, MS)
1001
1002 #define BTRWir(IM, RD) _BTWir(X86_BTR, IM, RD)
1003 #define BTRWim(IM, MD, MB, MI, MS) _BTWim(X86_BTR, IM, MD, MI, MS)
1004 #define BTRWrr(RS, RD) _BTWrr(X86_BTR, RS, RD)
1005 #define BTRWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTR, RS, MD, MB, MI, MS)
1006
1007 #define BTRLir(IM, RD) _BTLir(X86_BTR, IM, RD)
1008 #define BTRLim(IM, MD, MB, MI, MS) _BTLim(X86_BTR, IM, MD, MB, MI, MS)
1009 #define BTRLrr(RS, RD) _BTLrr(X86_BTR, RS, RD)
1010 #define BTRLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTR, RS, MD, MB, MI, MS)
1011
1012 #define BTRQir(IM, RD) _BTQir(X86_BTR, IM, RD)
1013 #define BTRQim(IM, MD, MB, MI, MS) _BTQim(X86_BTR, IM, MD, MB, MI, MS)
1014 #define BTRQrr(RS, RD) _BTQrr(X86_BTR, RS, RD)
1015 #define BTRQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTR, RS, MD, MB, MI, MS)
1016
1017 #define BTSWir(IM, RD) _BTWir(X86_BTS, IM, RD)
1018 #define BTSWim(IM, MD, MB, MI, MS) _BTWim(X86_BTS, IM, MD, MI, MS)
1019 #define BTSWrr(RS, RD) _BTWrr(X86_BTS, RS, RD)
1020 #define BTSWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTS, RS, MD, MB, MI, MS)
1021
1022 #define BTSLir(IM, RD) _BTLir(X86_BTS, IM, RD)
1023 #define BTSLim(IM, MD, MB, MI, MS) _BTLim(X86_BTS, IM, MD, MB, MI, MS)
1024 #define BTSLrr(RS, RD) _BTLrr(X86_BTS, RS, RD)
1025 #define BTSLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTS, RS, MD, MB, MI, MS)
1026
1027 #define BTSQir(IM, RD) _BTQir(X86_BTS, IM, RD)
1028 #define BTSQim(IM, MD, MB, MI, MS) _BTQim(X86_BTS, IM, MD, MB, MI, MS)
1029 #define BTSQrr(RS, RD) _BTQrr(X86_BTS, RS, RD)
1030 #define BTSQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTS, RS, MD, MB, MI, MS)
1031
1032
1033 /* --- Move instructions --------------------------------------------------- */
1034
1035 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1036
1037 #define MOVBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x80 ,_b11,_r1(RS),_r1(RD) ))
1038 #define MOVBmr(MD, MB, MI, MS, RD) (_REXBmr(MB, MI, RD), _O_r_X (0x8a ,_r1(RD) ,MD,MB,MI,MS ))
1039 #define MOVBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x88 ,_r1(RS) ,MD,MB,MI,MS ))
1040 #define MOVBir(IM, R) (_REXBrr(0, R), _Or_B (0xb0,_r1(R) ,_su8(IM)))
1041 #define MOVBim(IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_X_B (0xc6 ,MD,MB,MI,MS ,_su8(IM)))
1042
1043 #define MOVWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x89 ,_b11,_r2(RS),_r2(RD) ))
1044 #define MOVWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MD, MI, RD), _O_r_X (0x8b ,_r2(RD) ,MD,MB,MI,MS ))
1045 #define MOVWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MD, MI), _O_r_X (0x89 ,_r2(RS) ,MD,MB,MI,MS ))
1046 #define MOVWir(IM, R) (_d16(), _REXLrr(0, R), _Or_W (0xb8,_r2(R) ,_su16(IM)))
1047 #define MOVWim(IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MD, MI), _O_X_W (0xc7 ,MD,MB,MI,MS ,_su16(IM)))
1048
1049 #define MOVLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x89 ,_b11,_r4(RS),_r4(RD) ))
1050 #define MOVLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (0x8b ,_r4(RD) ,MD,MB,MI,MS ))
1051 #define MOVLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x89 ,_r4(RS) ,MD,MB,MI,MS ))
1052 #define MOVLir(IM, R) (_REXLrr(0, R), _Or_L (0xb8,_r4(R) ,IM ))
1053 #define MOVLim(IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_X_L (0xc7 ,MD,MB,MI,MS ,IM ))
1054
1055 #define MOVQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x89 ,_b11,_r8(RS),_r8(RD) ))
1056 #define MOVQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (0x8b ,_r8(RD) ,MD,MB,MI,MS ))
1057 #define MOVQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x89 ,_r8(RS) ,MD,MB,MI,MS ))
1058 #define MOVQir(IM, R) (_REXQrr(0, R), _Or_L (0xb8,_r8(R) ,IM ))
1059 #define MOVQim(IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_X_L (0xc7 ,MD,MB,MI,MS ,IM ))
1060
1061
1062 /* --- Unary and Multiply/Divide instructions ------------------------------ */
1063
1064 enum {
1065 X86_NOT = 2,
1066 X86_NEG = 3,
1067 X86_MUL = 4,
1068 X86_IMUL = 5,
1069 X86_DIV = 6,
1070 X86_IDIV = 7,
1071 };
1072
1073 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1074
1075 #define _UNARYBr(OP, RS) (_REXBrr(0, RS), _O_Mrm (0xf6 ,_b11,OP ,_r1(RS) ))
1076 #define _UNARYBm(OP, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xf6 ,OP ,MD,MB,MI,MS ))
1077 #define _UNARYWr(OP, RS) (_d16(), _REXLrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r2(RS) ))
1078 #define _UNARYWm(OP, MD, MB, MI, MS) (_d16(), _REXLmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1079 #define _UNARYLr(OP, RS) (_REXLrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r4(RS) ))
1080 #define _UNARYLm(OP, MD, MB, MI, MS) (_REXLmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1081 #define _UNARYQr(OP, RS) (_REXQrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r8(RS) ))
1082 #define _UNARYQm(OP, MD, MB, MI, MS) (_REXQmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1083
1084 #define NOTBr(RS) _UNARYBr(X86_NOT, RS)
1085 #define NOTBm(MD, MB, MI, MS) _UNARYBm(X86_NOT, MD, MB, MI, MS)
1086 #define NOTWr(RS) _UNARYWr(X86_NOT, RS)
1087 #define NOTWm(MD, MB, MI, MS) _UNARYWm(X86_NOT, MD, MB, MI, MS)
1088 #define NOTLr(RS) _UNARYLr(X86_NOT, RS)
1089 #define NOTLm(MD, MB, MI, MS) _UNARYLm(X86_NOT, MD, MB, MI, MS)
1090 #define NOTQr(RS) _UNARYQr(X86_NOT, RS)
1091 #define NOTQm(MD, MB, MI, MS) _UNARYQm(X86_NOT, MD, MB, MI, MS)
1092
1093 #define NEGBr(RS) _UNARYBr(X86_NEG, RS)
1094 #define NEGBm(MD, MB, MI, MS) _UNARYBm(X86_NEG, MD, MB, MI, MS)
1095 #define NEGWr(RS) _UNARYWr(X86_NEG, RS)
1096 #define NEGWm(MD, MB, MI, MS) _UNARYWm(X86_NEG, MD, MB, MI, MS)
1097 #define NEGLr(RS) _UNARYLr(X86_NEG, RS)
1098 #define NEGLm(MD, MB, MI, MS) _UNARYLm(X86_NEG, MD, MB, MI, MS)
1099 #define NEGQr(RS) _UNARYQr(X86_NEG, RS)
1100 #define NEGQm(MD, MB, MI, MS) _UNARYQm(X86_NEG, MD, MB, MI, MS)
1101
1102 #define MULBr(RS) _UNARYBr(X86_MUL, RS)
1103 #define MULBm(MD, MB, MI, MS) _UNARYBm(X86_MUL, MD, MB, MI, MS)
1104 #define MULWr(RS) _UNARYWr(X86_MUL, RS)
1105 #define MULWm(MD, MB, MI, MS) _UNARYWm(X86_MUL, MD, MB, MI, MS)
1106 #define MULLr(RS) _UNARYLr(X86_MUL, RS)
1107 #define MULLm(MD, MB, MI, MS) _UNARYLm(X86_MUL, MD, MB, MI, MS)
1108 #define MULQr(RS) _UNARYQr(X86_MUL, RS)
1109 #define MULQm(MD, MB, MI, MS) _UNARYQm(X86_MUL, MD, MB, MI, MS)
1110
1111 #define IMULBr(RS) _UNARYBr(X86_IMUL, RS)
1112 #define IMULBm(MD, MB, MI, MS) _UNARYBm(X86_IMUL, MD, MB, MI, MS)
1113 #define IMULWr(RS) _UNARYWr(X86_IMUL, RS)
1114 #define IMULWm(MD, MB, MI, MS) _UNARYWm(X86_IMUL, MD, MB, MI, MS)
1115 #define IMULLr(RS) _UNARYLr(X86_IMUL, RS)
1116 #define IMULLm(MD, MB, MI, MS) _UNARYLm(X86_IMUL, MD, MB, MI, MS)
1117 #define IMULQr(RS) _UNARYQr(X86_IMUL, RS)
1118 #define IMULQm(MD, MB, MI, MS) _UNARYQm(X86_IMUL, MD, MB, MI, MS)
1119
1120 #define DIVBr(RS) _UNARYBr(X86_DIV, RS)
1121 #define DIVBm(MD, MB, MI, MS) _UNARYBm(X86_DIV, MD, MB, MI, MS)
1122 #define DIVWr(RS) _UNARYWr(X86_DIV, RS)
1123 #define DIVWm(MD, MB, MI, MS) _UNARYWm(X86_DIV, MD, MB, MI, MS)
1124 #define DIVLr(RS) _UNARYLr(X86_DIV, RS)
1125 #define DIVLm(MD, MB, MI, MS) _UNARYLm(X86_DIV, MD, MB, MI, MS)
1126 #define DIVQr(RS) _UNARYQr(X86_DIV, RS)
1127 #define DIVQm(MD, MB, MI, MS) _UNARYQm(X86_DIV, MD, MB, MI, MS)
1128
1129 #define IDIVBr(RS) _UNARYBr(X86_IDIV, RS)
1130 #define IDIVBm(MD, MB, MI, MS) _UNARYBm(X86_IDIV, MD, MB, MI, MS)
1131 #define IDIVWr(RS) _UNARYWr(X86_IDIV, RS)
1132 #define IDIVWm(MD, MB, MI, MS) _UNARYWm(X86_IDIV, MD, MB, MI, MS)
1133 #define IDIVLr(RS) _UNARYLr(X86_IDIV, RS)
1134 #define IDIVLm(MD, MB, MI, MS) _UNARYLm(X86_IDIV, MD, MB, MI, MS)
1135 #define IDIVQr(RS) _UNARYQr(X86_IDIV, RS)
1136 #define IDIVQm(MD, MB, MI, MS) _UNARYQm(X86_IDIV, MD, MB, MI, MS)
1137
1138 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1139
1140 #define IMULWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0faf ,_b11,_r2(RS),_r2(RD) ))
1141 #define IMULWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0faf ,_r2(RD) ,MD,MB,MI,MS ))
1142
1143 #define IMULWirr(IM,RS,RD) (_d16(), _REXLrr(RS, RD), _Os_Mrm_sW (0x69 ,_b11,_r2(RS),_r2(RD) ,_su16(IM) ))
1144 #define IMULWimr(IM,MD,MB,MI,MS,RD) (_d16(), _REXLmr(MB, MI, RD), _Os_r_X_sW (0x69 ,_r2(RD) ,MD,MB,MI,MS ,_su16(IM) ))
1145
1146 #define IMULLir(IM, RD) (_REXLrr(0, RD), _Os_Mrm_sL (0x69 ,_b11,_r4(RD),_r4(RD) ,IM ))
1147 #define IMULLrr(RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0faf ,_b11,_r4(RD),_r4(RS) ))
1148 #define IMULLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0faf ,_r4(RD) ,MD,MB,MI,MS ))
1149
1150 #define IMULQir(IM, RD) (_REXQrr(0, RD), _Os_Mrm_sL (0x69 ,_b11,_r8(RD),_r8(RD) ,IM ))
1151 #define IMULQrr(RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0faf ,_b11,_r8(RD),_r8(RS) ))
1152 #define IMULQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0faf ,_r8(RD) ,MD,MB,MI,MS ))
1153
1154 #define IMULLirr(IM,RS,RD) (_REXLrr(RS, RD), _Os_Mrm_sL (0x69 ,_b11,_r4(RS),_r4(RD) ,IM ))
1155 #define IMULLimr(IM,MD,MB,MI,MS,RD) (_REXLmr(MB, MI, RD), _Os_r_X_sL (0x69 ,_r4(RD) ,MD,MB,MI,MS ,IM ))
1156
1157 #define IMULQirr(IM,RS,RD) (_REXQrr(RS, RD), _Os_Mrm_sL (0x69 ,_b11,_r8(RS),_r8(RD) ,IM ))
1158 #define IMULQimr(IM,MD,MB,MI,MS,RD) (_REXQmr(MB, MI, RD), _Os_r_X_sL (0x69 ,_r8(RD) ,MD,MB,MI,MS ,IM ))
1159
1160
1161 /* --- Control Flow related instructions ----------------------------------- */
1162
1163 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1164
1165 // FIXME: no prefix is availble to encode a 32-bit operand size in 64-bit mode
1166 #define CALLm(M) _O_D32 (0xe8 ,(int)(M) )
1167 #define CALLsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b010,_r4(R) ))
1168 #define CALLQsr(R) (_REXQrr(0, R), _O_Mrm (0xff ,_b11,_b010,_r8(R) ))
1169 #define CALLsm(D,B,I,S) (_REXLrm(0, B, I), _O_r_X (0xff ,_b010 ,(int)(D),B,I,S ))
1170
1171 // FIXME: no prefix is availble to encode a 32-bit operand size in 64-bit mode
1172 #define JMPSm(M) _O_D8 (0xeb ,(int)(D) )
1173 #define JMPm(M) _O_D32 (0xe9 ,(int)(D) )
1174 #define JMPsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b100,_r4(R) ))
1175 #define JMPQsr(R) (_REXQrr(0, R), _O_Mrm (0xff ,_b11,_b100,_r8(R) ))
1176 #define JMPsm(D,B,I,S) (_REXLrm(0, B, I), _O_r_X (0xff ,_b100 ,(int)(D),B,I,S ))
1177
1178 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1179 #define JCCSim(CC, D) _O_D8 (0x70|(CC) ,(int)(D) )
1180 #define JOSm(D) JCCSim(0x0, D)
1181 #define JNOSm(D) JCCSim(0x1, D)
1182 #define JBSm(D) JCCSim(0x2, D)
1183 #define JNAESm(D) JCCSim(0x2, D)
1184 #define JNBSm(D) JCCSim(0x3, D)
1185 #define JAESm(D) JCCSim(0x3, D)
1186 #define JESm(D) JCCSim(0x4, D)
1187 #define JZSm(D) JCCSim(0x4, D)
1188 #define JNESm(D) JCCSim(0x5, D)
1189 #define JNZSm(D) JCCSim(0x5, D)
1190 #define JBESm(D) JCCSim(0x6, D)
1191 #define JNASm(D) JCCSim(0x6, D)
1192 #define JNBESm(D) JCCSim(0x7, D)
1193 #define JASm(D) JCCSim(0x7, D)
1194 #define JSSm(D) JCCSim(0x8, D)
1195 #define JNSSm(D) JCCSim(0x9, D)
1196 #define JPSm(D) JCCSim(0xa, D)
1197 #define JPESm(D) JCCSim(0xa, D)
1198 #define JNPSm(D) JCCSim(0xb, D)
1199 #define JPOSm(D) JCCSim(0xb, D)
1200 #define JLSm(D) JCCSim(0xc, D)
1201 #define JNGESm(D) JCCSim(0xc, D)
1202 #define JNLSm(D) JCCSim(0xd, D)
1203 #define JGESm(D) JCCSim(0xd, D)
1204 #define JLESm(D) JCCSim(0xe, D)
1205 #define JNGSm(D) JCCSim(0xe, D)
1206 #define JNLESm(D) JCCSim(0xf, D)
1207 #define JGSm(D) JCCSim(0xf, D)
1208
1209 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1210 #define JCCim(CC, D) _OO_D32 (0x0f80|(CC) ,(int)(D) )
1211 #define JOm(D) JCCim(0x0, D)
1212 #define JNOm(D) JCCim(0x1, D)
1213 #define JBm(D) JCCim(0x2, D)
1214 #define JNAEm(D) JCCim(0x2, D)
1215 #define JNBm(D) JCCim(0x3, D)
1216 #define JAEm(D) JCCim(0x3, D)
1217 #define JEm(D) JCCim(0x4, D)
1218 #define JZm(D) JCCim(0x4, D)
1219 #define JNEm(D) JCCim(0x5, D)
1220 #define JNZm(D) JCCim(0x5, D)
1221 #define JBEm(D) JCCim(0x6, D)
1222 #define JNAm(D) JCCim(0x6, D)
1223 #define JNBEm(D) JCCim(0x7, D)
1224 #define JAm(D) JCCim(0x7, D)
1225 #define JSm(D) JCCim(0x8, D)
1226 #define JNSm(D) JCCim(0x9, D)
1227 #define JPm(D) JCCim(0xa, D)
1228 #define JPEm(D) JCCim(0xa, D)
1229 #define JNPm(D) JCCim(0xb, D)
1230 #define JPOm(D) JCCim(0xb, D)
1231 #define JLm(D) JCCim(0xc, D)
1232 #define JNGEm(D) JCCim(0xc, D)
1233 #define JNLm(D) JCCim(0xd, D)
1234 #define JGEm(D) JCCim(0xd, D)
1235 #define JLEm(D) JCCim(0xe, D)
1236 #define JNGm(D) JCCim(0xe, D)
1237 #define JNLEm(D) JCCim(0xf, D)
1238 #define JGm(D) JCCim(0xf, D)
1239
1240 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1241 #define SETCCir(CC, RD) (_REXBrr(0, RD), _OO_Mrm (0x0f90|(CC) ,_b11,_b000,_r1(RD) ))
1242 #define SETOr(RD) SETCCir(0x0,RD)
1243 #define SETNOr(RD) SETCCir(0x1,RD)
1244 #define SETBr(RD) SETCCir(0x2,RD)
1245 #define SETNAEr(RD) SETCCir(0x2,RD)
1246 #define SETNBr(RD) SETCCir(0x3,RD)
1247 #define SETAEr(RD) SETCCir(0x3,RD)
1248 #define SETEr(RD) SETCCir(0x4,RD)
1249 #define SETZr(RD) SETCCir(0x4,RD)
1250 #define SETNEr(RD) SETCCir(0x5,RD)
1251 #define SETNZr(RD) SETCCir(0x5,RD)
1252 #define SETBEr(RD) SETCCir(0x6,RD)
1253 #define SETNAr(RD) SETCCir(0x6,RD)
1254 #define SETNBEr(RD) SETCCir(0x7,RD)
1255 #define SETAr(RD) SETCCir(0x7,RD)
1256 #define SETSr(RD) SETCCir(0x8,RD)
1257 #define SETNSr(RD) SETCCir(0x9,RD)
1258 #define SETPr(RD) SETCCir(0xa,RD)
1259 #define SETPEr(RD) SETCCir(0xa,RD)
1260 #define SETNPr(RD) SETCCir(0xb,RD)
1261 #define SETPOr(RD) SETCCir(0xb,RD)
1262 #define SETLr(RD) SETCCir(0xc,RD)
1263 #define SETNGEr(RD) SETCCir(0xc,RD)
1264 #define SETNLr(RD) SETCCir(0xd,RD)
1265 #define SETGEr(RD) SETCCir(0xd,RD)
1266 #define SETLEr(RD) SETCCir(0xe,RD)
1267 #define SETNGr(RD) SETCCir(0xe,RD)
1268 #define SETNLEr(RD) SETCCir(0xf,RD)
1269 #define SETGr(RD) SETCCir(0xf,RD)
1270
1271 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1272 #define SETCCim(CC,MD,MB,MI,MS) (_REXBrm(0, MB, MI), _OO_r_X (0x0f90|(CC) ,_b000 ,MD,MB,MI,MS ))
1273 #define SETOm(D, B, I, S) SETCCim(0x0, D, B, I, S)
1274 #define SETNOm(D, B, I, S) SETCCim(0x1, D, B, I, S)
1275 #define SETBm(D, B, I, S) SETCCim(0x2, D, B, I, S)
1276 #define SETNAEm(D, B, I, S) SETCCim(0x2, D, B, I, S)
1277 #define SETNBm(D, B, I, S) SETCCim(0x3, D, B, I, S)
1278 #define SETAEm(D, B, I, S) SETCCim(0x3, D, B, I, S)
1279 #define SETEm(D, B, I, S) SETCCim(0x4, D, B, I, S)
1280 #define SETZm(D, B, I, S) SETCCim(0x4, D, B, I, S)
1281 #define SETNEm(D, B, I, S) SETCCim(0x5, D, B, I, S)
1282 #define SETNZm(D, B, I, S) SETCCim(0x5, D, B, I, S)
1283 #define SETBEm(D, B, I, S) SETCCim(0x6, D, B, I, S)
1284 #define SETNAm(D, B, I, S) SETCCim(0x6, D, B, I, S)
1285 #define SETNBEm(D, B, I, S) SETCCim(0x7, D, B, I, S)
1286 #define SETAm(D, B, I, S) SETCCim(0x7, D, B, I, S)
1287 #define SETSm(D, B, I, S) SETCCim(0x8, D, B, I, S)
1288 #define SETNSm(D, B, I, S) SETCCim(0x9, D, B, I, S)
1289 #define SETPm(D, B, I, S) SETCCim(0xa, D, B, I, S)
1290 #define SETPEm(D, B, I, S) SETCCim(0xa, D, B, I, S)
1291 #define SETNPm(D, B, I, S) SETCCim(0xb, D, B, I, S)
1292 #define SETPOm(D, B, I, S) SETCCim(0xb, D, B, I, S)
1293 #define SETLm(D, B, I, S) SETCCim(0xc, D, B, I, S)
1294 #define SETNGEm(D, B, I, S) SETCCim(0xc, D, B, I, S)
1295 #define SETNLm(D, B, I, S) SETCCim(0xd, D, B, I, S)
1296 #define SETGEm(D, B, I, S) SETCCim(0xd, D, B, I, S)
1297 #define SETLEm(D, B, I, S) SETCCim(0xe, D, B, I, S)
1298 #define SETNGm(D, B, I, S) SETCCim(0xe, D, B, I, S)
1299 #define SETNLEm(D, B, I, S) SETCCim(0xf, D, B, I, S)
1300 #define SETGm(D, B, I, S) SETCCim(0xf, D, B, I, S)
1301
1302
1303 /* --- Push/Pop instructions ----------------------------------------------- */
1304
1305 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1306
1307 #define POPWr(RD) _m32only((_d16(), _Or (0x58,_r2(RD) )))
1308 #define POPWm(MD, MB, MI, MS) _m32only((_d16(), _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS )))
1309
1310 #define POPLr(RD) _m32only( _Or (0x58,_r4(RD) ))
1311 #define POPLm(MD, MB, MI, MS) _m32only( _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS ))
1312
1313 #define POPQr(RD) _m64only( _Or (0x58,_r8(RD) ))
1314 #define POPQm(MD, MB, MI, MS) _m64only( _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS ))
1315
1316 #define PUSHWr(RS) _m32only((_d16(), _Or (0x50,_r2(RS) )))
1317 #define PUSHWm(MD, MB, MI, MS) _m32only((_d16(), _O_r_X (0xff, ,_b110 ,MD,MB,MI,MS )))
1318 #define PUSHWi(IM) _m32only((_d16(), _Os_sW (0x68 ,IM )))
1319
1320 #define PUSHLr(RS) _m32only( _Or (0x50,_r4(RS) ))
1321 #define PUSHLm(MD, MB, MI, MS) _m32only( _O_r_X (0xff ,_b110 ,MD,MB,MI,MS ))
1322 #define PUSHLi(IM) _m32only( _Os_sL (0x68 ,IM ))
1323
1324 #define PUSHQr(RS) _m64only( _Or (0x50,_r8(RS) ))
1325 #define PUSHQm(MD, MB, MI, MS) _m64only( _O_r_X (0xff ,_b110 ,MD,MB,MI,MS ))
1326 #define PUSHQi(IM) _m64only( _Os_sL (0x68 ,IM ))
1327
1328 #define POPA() (_d16(), _O (0x61 ))
1329 #define POPAD() _O (0x61 )
1330
1331 #define PUSHA() (_d16(), _O (0x60 ))
1332 #define PUSHAD() _O (0x60 )
1333
1334 #define POPF() (_d16(), _O (0x9d ))
1335 #define POPFD() _O (0x9d )
1336
1337 #define PUSHF() _O (0x9c )
1338 #define PUSHFD() (_d16(), _O (0x9c ))
1339
1340
1341 /* --- Test instructions --------------------------------------------------- */
1342
1343 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1344
1345 #define TESTBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x84 ,_b11,_r1(RS),_r1(RD) ))
1346 #define TESTBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x84 ,_r1(RS) ,MD,MB,MI,MS ))
1347 #define TESTBir(IM, RD) (_REXBrr(0, RD), _O_Mrm_B (0xf6 ,_b11,_b000 ,_r1(RD) ,_u8(IM)))
1348 #define TESTBim(IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X_B (0xf6 ,_b000 ,MD,MB,MI,MS ,_u8(IM)))
1349
1350 #define TESTWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x85 ,_b11,_r2(RS),_r2(RD) ))
1351 #define TESTWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MD, MI), _O_r_X (0x85 ,_r2(RS) ,MD,MB,MI,MS ))
1352 #define TESTWir(IM, RD) (_d16(), _REXLrr(0, RD), _O_Mrm_W (0xf7 ,_b11,_b000 ,_r2(RD) ,_u16(IM)))
1353 #define TESTWim(IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MD, MI), _O_r_X_W (0xf7 ,_b000 ,MD,MB,MI,MS ,_u16(IM)))
1354
1355 #define TESTLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x85 ,_b11,_r4(RS),_r4(RD) ))
1356 #define TESTLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x85 ,_r4(RS) ,MD,MB,MI,MS ))
1357 #define TESTLir(IM, RD) (_REXLrr(0, RD), _O_Mrm_L (0xf7 ,_b11,_b000 ,_r4(RD) ,IM ))
1358 #define TESTLim(IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X_L (0xf7 ,_b000 ,MD,MB,MI,MS ,IM ))
1359
1360 #define TESTQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x85 ,_b11,_r8(RS),_r8(RD) ))
1361 #define TESTQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x85 ,_r8(RS) ,MD,MB,MI,MS ))
1362 #define TESTQir(IM, RD) (_REXQrr(0, RD), _O_Mrm_L (0xf7 ,_b11,_b000 ,_r8(RD) ,IM ))
1363 #define TESTQim(IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X_L (0xf7 ,_b000 ,MD,MB,MI,MS ,IM ))
1364
1365
1366 /* --- Exchange instructions ----------------------------------------------- */
1367
1368 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1369
1370 #define CMPXCHGBrr(RS, RD) (_REXBrr(RS, RD), _OO_Mrm (0x0fb0 ,_b11,_r1(RS),_r1(RD) ))
1371 #define CMPXCHGBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _OO_r_X (0x0fb0 ,_r1(RS) ,MD,MB,MI,MS ))
1372
1373 #define CMPXCHGWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r2(RS),_r2(RD) ))
1374 #define CMPXCHGWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r2(RS) ,MD,MB,MI,MS ))
1375
1376 #define CMPXCHGLrr(RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r4(RS),_r4(RD) ))
1377 #define CMPXCHGLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r4(RS) ,MD,MB,MI,MS ))
1378
1379 #define CMPXCHGQrr(RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r8(RS),_r8(RD) ))
1380 #define CMPXCHGQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r8(RS) ,MD,MB,MI,MS ))
1381
1382 #define XADDBrr(RS, RD) (_REXBrr(RS, RD), _OO_Mrm (0x0fc0 ,_b11,_r1(RS),_r1(RD) ))
1383 #define XADDBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _OO_r_X (0x0fc0 ,_r1(RS) ,MD,MB,MI,MS ))
1384
1385 #define XADDWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r2(RS),_r2(RD) ))
1386 #define XADDWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r2(RS) ,MD,MB,MI,MS ))
1387
1388 #define XADDLrr(RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r4(RS),_r4(RD) ))
1389 #define XADDLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r4(RS) ,MD,MB,MI,MS ))
1390
1391 #define XADDQrr(RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r8(RS),_r8(RD) ))
1392 #define XADDQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r8(RS) ,MD,MB,MI,MS ))
1393
1394 #define XCHGBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x86 ,_b11,_r1(RS),_r1(RD) ))
1395 #define XCHGBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x86 ,_r1(RS) ,MD,MB,MI,MS ))
1396
1397 #define XCHGWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x87 ,_b11,_r2(RS),_r2(RD) ))
1398 #define XCHGWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0x87 ,_r2(RS) ,MD,MB,MI,MS ))
1399
1400 #define XCHGLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x87 ,_b11,_r4(RS),_r4(RD) ))
1401 #define XCHGLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x87 ,_r4(RS) ,MD,MB,MI,MS ))
1402
1403 #define XCHGQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x87 ,_b11,_r8(RS),_r8(RD) ))
1404 #define XCHGQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x87 ,_r8(RS) ,MD,MB,MI,MS ))
1405
1406
1407 /* --- Increment/Decrement instructions ------------------------------------ */
1408
1409 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1410
1411 #define DECBm(MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xfe ,_b001 ,MD,MB,MI,MS ))
1412 #define DECBr(RD) (_REXBrr(0, RD), _O_Mrm (0xfe ,_b11,_b001 ,_r1(RD) ))
1413
1414 #define DECWm(MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1415 #define DECWr(RD) (! X86_TARGET_64BIT ? (_d16(), _Or (0x48,_r2(RD) )) : \
1416 (_d16(), _REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r2(RD) )))
1417
1418 #define DECLm(MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1419 #define DECLr(RD) (! X86_TARGET_64BIT ? _Or (0x48,_r4(RD) ) : \
1420 (_REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r4(RD) )))
1421
1422 #define DECQm(MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1423 #define DECQr(RD) (_REXQrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r8(RD) ))
1424
1425 #define INCBm(MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xfe ,_b000 ,MD,MB,MI,MS ))
1426 #define INCBr(RD) (_REXBrr(0, RD), _O_Mrm (0xfe ,_b11,_b000 ,_r1(RD) ))
1427
1428 #define INCWm(MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1429 #define INCWr(RD) (! X86_TARGET_64BIT ? (_d16(), _Or (0x40,_r2(RD) )) : \
1430 (_d16(), _REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r2(RD) )) )
1431
1432 #define INCLm(MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1433 #define INCLr(RD) (! X86_TARGET_64BIT ? _Or (0x40,_r4(RD) ) : \
1434 (_REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r4(RD) )))
1435
1436 #define INCQm(MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1437 #define INCQr(RD) (_REXQrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r8(RD) ))
1438
1439
1440 /* --- Misc/Garbage instructions ------------------------------------------- */
1441
1442 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1443
1444 #define LEALmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (0x8d ,_r4(RD) ,MD,MB,MI,MS ))
1445
1446 #define BSWAPLr(R) (_REXLrr(0, R), _OOr (0x0fc8,_r4(R) ))
1447 #define BSWAPQr(R) (_REXQrr(0, R), _OOr (0x0fc8,_r8(R) ))
1448
1449 #define CLC() _O (0xf8 )
1450 #define STC() _O (0xf9 )
1451
1452 #define CMC() _O (0xf5 )
1453 #define CLD() _O (0xfc )
1454 #define STD() _O (0xfd )
1455
1456 #define CBTW() (_d16(), _O (0x98 ))
1457 #define CWTL() _O (0x98 )
1458 #define CLTQ() _m64only(_REXQrr(0, 0), _O (0x98 ))
1459
1460 #define CBW CBTW
1461 #define CWDE CWTL
1462 #define CDQE CLTQ
1463
1464 #define CWTD() (_d16(), _O (0x99 ))
1465 #define CLTD() _O (0x99 )
1466 #define CQTO() _m64only(_REXQrr(0, 0), _O (0x99 ))
1467
1468 #define CWD CWTD
1469 #define CDQ CLTD
1470 #define CQO CQTO
1471
1472 #define LAHF() _m32only( _O (0x9f ))
1473 #define SAHF() _m32only( _O (0x9e ))
1474
1475 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1476
1477 #define RDTSC() _OO (0xff31 )
1478
1479 #define ENTERii(W, B) _O_W_B (0xc8 ,_su16(W),_su8(B))
1480
1481 #define LEAVE() _O (0xc9 )
1482 #define RET() _O (0xc3 )
1483 #define RETi(IM) _O_W (0xc2 ,_su16(IM))
1484
1485 #define NOP() _O (0x90 )
1486
1487
1488 /* --- FLoating-Point instructions ----------------------------------------- */
1489
1490 #define _ESCmi(D,B,I,S,OP) (_REXLrm(0,B,I), _O_r_X(0xd8|(OP & 7), (OP >> 3), D,B,I,S))
1491
1492 #define FLDr(R) _OOr(0xd9c0,_rN(R))
1493 #define FLDLm(D,B,I,S) _ESCmi(D,B,I,S,005)
1494 #define FLDSm(D,B,I,S) _ESCmi(D,B,I,S,001)
1495 #define FLDTm(D,B,I,S) _ESCmi(D,B,I,S,053)
1496
1497 #define FSTr(R) _OOr(0xddd0,_rN(R))
1498 #define FSTSm(D,B,I,S) _ESCmi(D,B,I,S,021)
1499 #define FSTLm(D,B,I,S) _ESCmi(D,B,I,S,025)
1500
1501 #define FSTPr(R) _OOr(0xddd8,_rN(R))
1502 #define FSTPSm(D,B,I,S) _ESCmi(D,B,I,S,031)
1503 #define FSTPLm(D,B,I,S) _ESCmi(D,B,I,S,035)
1504 #define FSTPTm(D,B,I,S) _ESCmi(D,B,I,S,073)
1505
1506 #define FADDr0(R) _OOr(0xd8c0,_rN(R))
1507 #define FADD0r(R) _OOr(0xdcc0,_rN(R))
1508 #define FADDP0r(R) _OOr(0xdec0,_rN(R))
1509 #define FADDSm(D,B,I,S) _ESCmi(D,B,I,S,000)
1510 #define FADDLm(D,B,I,S) _ESCmi(D,B,I,S,004)
1511
1512 #define FSUBSm(D,B,I,S) _ESCmi(D,B,I,S,040)
1513 #define FSUBLm(D,B,I,S) _ESCmi(D,B,I,S,044)
1514 #define FSUBr0(R) _OOr(0xd8e0,_rN(R))
1515 #define FSUB0r(R) _OOr(0xdce8,_rN(R))
1516 #define FSUBP0r(R) _OOr(0xdee8,_rN(R))
1517
1518 #define FSUBRr0(R) _OOr(0xd8e8,_rN(R))
1519 #define FSUBR0r(R) _OOr(0xdce0,_rN(R))
1520 #define FSUBRP0r(R) _OOr(0xdee0,_rN(R))
1521 #define FSUBRSm(D,B,I,S) _ESCmi(D,B,I,S,050)
1522 #define FSUBRLm(D,B,I,S) _ESCmi(D,B,I,S,054)
1523
1524 #define FMULr0(R) _OOr(0xd8c8,_rN(R))
1525 #define FMUL0r(R) _OOr(0xdcc8,_rN(R))
1526 #define FMULP0r(R) _OOr(0xdec8,_rN(R))
1527 #define FMULSm(D,B,I,S) _ESCmi(D,B,I,S,010)
1528 #define FMULLm(D,B,I,S) _ESCmi(D,B,I,S,014)
1529
1530 #define FDIVr0(R) _OOr(0xd8f0,_rN(R))
1531 #define FDIV0r(R) _OOr(0xdcf8,_rN(R))
1532 #define FDIVP0r(R) _OOr(0xdef8,_rN(R))
1533 #define FDIVSm(D,B,I,S) _ESCmi(D,B,I,S,060)
1534 #define FDIVLm(D,B,I,S) _ESCmi(D,B,I,S,064)
1535
1536 #define FDIVRr0(R) _OOr(0xd8f8,_rN(R))
1537 #define FDIVR0r(R) _OOr(0xdcf0,_rN(R))
1538 #define FDIVRP0r(R) _OOr(0xdef0,_rN(R))
1539 #define FDIVRSm(D,B,I,S) _ESCmi(D,B,I,S,070)
1540 #define FDIVRLm(D,B,I,S) _ESCmi(D,B,I,S,074)
1541
1542 #define FCMOVBr0(R) _OOr(0xdac0,_rN(R))
1543 #define FCMOVBEr0(R) _OOr(0xdad0,_rN(R))
1544 #define FCMOVEr0(R) _OOr(0xdac8,_rN(R))
1545 #define FCMOVNBr0(R) _OOr(0xdbc0,_rN(R))
1546 #define FCMOVNBEr0(R) _OOr(0xdbd0,_rN(R))
1547 #define FCMOVNEr0(R) _OOr(0xdbc8,_rN(R))
1548 #define FCMOVNUr0(R) _OOr(0xdbd8,_rN(R))
1549 #define FCMOVUr0(R) _OOr(0xdad8,_rN(R))
1550 #define FCOMIr0(R) _OOr(0xdbf0,_rN(R))
1551 #define FCOMIPr0(R) _OOr(0xdff0,_rN(R))
1552
1553 #define FCOMr(R) _OOr(0xd8d0,_rN(R))
1554 #define FCOMSm(D,B,I,S) _ESCmi(D,B,I,S,020)
1555 #define FCOMLm(D,B,I,S) _ESCmi(D,B,I,S,024)
1556
1557 #define FCOMPr(R) _OOr(0xd8d8,_rN(R))
1558 #define FCOMPSm(D,B,I,S) _ESCmi(D,B,I,S,030)
1559 #define FCOMPLm(D,B,I,S) _ESCmi(D,B,I,S,034)
1560
1561 #define FUCOMIr0(R) _OOr(0xdbe8,_rN(R))
1562 #define FUCOMIPr0(R) _OOr(0xdfe8,_rN(R))
1563 #define FUCOMPr(R) _OOr(0xdde8,_rN(R))
1564 #define FUCOMr(R) _OOr(0xdde0,_rN(R))
1565
1566 #define FIADDLm(D,B,I,S) _ESCmi(D,B,I,S,002)
1567 #define FICOMLm(D,B,I,S) _ESCmi(D,B,I,S,022)
1568 #define FICOMPLm(D,B,I,S) _ESCmi(D,B,I,S,032)
1569 #define FIDIVLm(D,B,I,S) _ESCmi(D,B,I,S,062)
1570 #define FIDIVRLm(D,B,I,S) _ESCmi(D,B,I,S,072)
1571 #define FILDLm(D,B,I,S) _ESCmi(D,B,I,S,003)
1572 #define FILDQm(D,B,I,S) _ESCmi(D,B,I,S,057)
1573 #define FIMULLm(D,B,I,S) _ESCmi(D,B,I,S,012)
1574 #define FISTLm(D,B,I,S) _ESCmi(D,B,I,S,023)
1575 #define FISTPLm(D,B,I,S) _ESCmi(D,B,I,S,033)
1576 #define FISTPQm(D,B,I,S) _ESCmi(D,B,I,S,077)
1577 #define FISUBLm(D,B,I,S) _ESCmi(D,B,I,S,042)
1578 #define FISUBRLm(D,B,I,S) _ESCmi(D,B,I,S,052)
1579
1580 #define FREEr(R) _OOr(0xddc0,_rN(R))
1581 #define FXCHr(R) _OOr(0xd9c8,_rN(R))
1582
1583 #endif /* X86_RTASM_H */