ViewVC Help
View File | Revision Log | Show Annotations | Revision Graph | Root Listing
root/cebix/BasiliskII/src/uae_cpu/compiler/codegen_x86.h
Revision: 1.31
Committed: 2008-02-16T22:14:41Z (16 years, 7 months ago) by gbeauche
Content type: text/plain
Branch: MAIN
CVS Tags: HEAD
Changes since 1.30: +4 -4 lines
Log Message:
Use D suffix for 64-bit real, even though L is the actual GNU assembler suffix.

File Contents

# User Rev Content
1 gbeauche 1.2 /******************** -*- mode: C; tab-width: 8 -*- ********************
2 gbeauche 1.1 *
3 gbeauche 1.11 * Run-time assembler for IA-32 and AMD64
4 gbeauche 1.1 *
5     ***********************************************************************/
6    
7    
8     /***********************************************************************
9     *
10 gbeauche 1.11 * This file is derived from CCG.
11 gbeauche 1.1 *
12     * Copyright 1999, 2000, 2001, 2002, 2003 Ian Piumarta
13     *
14 gbeauche 1.28 * Adaptations and enhancements for AMD64 support, Copyright 2003-2008
15 gbeauche 1.1 * Gwenole Beauchesne
16     *
17 gbeauche 1.24 * Basilisk II (C) 1997-2008 Christian Bauer
18 gbeauche 1.1 *
19     * This program is free software; you can redistribute it and/or modify
20     * it under the terms of the GNU General Public License as published by
21     * the Free Software Foundation; either version 2 of the License, or
22     * (at your option) any later version.
23     *
24     * This program is distributed in the hope that it will be useful,
25     * but WITHOUT ANY WARRANTY; without even the implied warranty of
26     * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27     * GNU General Public License for more details.
28     *
29     * You should have received a copy of the GNU General Public License
30     * along with this program; if not, write to the Free Software
31     * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32     *
33     ***********************************************************************/
34    
35     #ifndef X86_RTASM_H
36     #define X86_RTASM_H
37    
38     /* NOTES
39     *
40     * o Best viewed on a 1024x768 screen with fixed-6x10 font ;-)
41     *
42     * TODO
43     *
44     * o Fix FIXMEs
45     * o SSE instructions
46     * o Optimize for cases where register numbers are not integral constants
47     */
48    
49     /* --- Configuration ------------------------------------------------------- */
50    
51     /* Define to settle a "flat" register set, i.e. different regno for
52     each size variant. */
53     #ifndef X86_FLAT_REGISTERS
54     #define X86_FLAT_REGISTERS 1
55     #endif
56    
57     /* Define to generate x86-64 code. */
58     #ifndef X86_TARGET_64BIT
59     #define X86_TARGET_64BIT 0
60     #endif
61    
62     /* Define to optimize ALU instructions. */
63     #ifndef X86_OPTIMIZE_ALU
64     #define X86_OPTIMIZE_ALU 1
65     #endif
66    
67     /* Define to optimize rotate/shift instructions. */
68     #ifndef X86_OPTIMIZE_ROTSHI
69     #define X86_OPTIMIZE_ROTSHI 1
70     #endif
71    
72 gbeauche 1.15 /* Define to optimize absolute addresses for RIP relative addressing. */
73     #ifndef X86_RIP_RELATIVE_ADDR
74 gbeauche 1.16 #define X86_RIP_RELATIVE_ADDR 1
75 gbeauche 1.15 #endif
76    
77 gbeauche 1.1
78     /* --- Macros -------------------------------------------------------------- */
79    
80     /* Functions used to emit code.
81     *
82     * x86_emit_byte(B)
83     * x86_emit_word(W)
84     * x86_emit_long(L)
85     */
86    
87     /* Get pointer to current code
88     *
89     * x86_get_target()
90     */
91    
92     /* Abort assembler, fatal failure.
93     *
94     * x86_emit_failure(MSG)
95     */
96    
97 gbeauche 1.6 #define x86_emit_failure0(MSG) (x86_emit_failure(MSG),0)
98    
99 gbeauche 1.1
100     /* --- Register set -------------------------------------------------------- */
101    
102 gbeauche 1.2 enum {
103 gbeauche 1.4 X86_RIP = -2,
104 gbeauche 1.1 #if X86_FLAT_REGISTERS
105 gbeauche 1.3 X86_NOREG = 0,
106     X86_Reg8L_Base = 0x10,
107     X86_Reg8H_Base = 0x20,
108     X86_Reg16_Base = 0x30,
109     X86_Reg32_Base = 0x40,
110     X86_Reg64_Base = 0x50,
111     X86_RegMMX_Base = 0x60,
112     X86_RegXMM_Base = 0x70,
113 gbeauche 1.30 X86_RegFPU_Base = 0x80
114 gbeauche 1.1 #else
115 gbeauche 1.3 X86_NOREG = -1,
116     X86_Reg8L_Base = 0,
117     X86_Reg8H_Base = 16,
118     X86_Reg16_Base = 0,
119     X86_Reg32_Base = 0,
120     X86_Reg64_Base = 0,
121     X86_RegMMX_Base = 0,
122     X86_RegXMM_Base = 0,
123 gbeauche 1.30 X86_RegFPU_Base = 0
124 gbeauche 1.1 #endif
125 gbeauche 1.2 };
126 gbeauche 1.1
127 gbeauche 1.2 enum {
128 gbeauche 1.1 X86_AL = X86_Reg8L_Base,
129     X86_CL, X86_DL, X86_BL,
130 gbeauche 1.14 X86_SPL, X86_BPL, X86_SIL, X86_DIL,
131 gbeauche 1.1 X86_R8B, X86_R9B, X86_R10B, X86_R11B,
132     X86_R12B, X86_R13B, X86_R14B, X86_R15B,
133 gbeauche 1.14 X86_AH = X86_Reg8H_Base + 4,
134     X86_CH, X86_DH, X86_BH
135 gbeauche 1.2 };
136 gbeauche 1.1
137 gbeauche 1.2 enum {
138 gbeauche 1.1 X86_AX = X86_Reg16_Base,
139     X86_CX, X86_DX, X86_BX,
140     X86_SP, X86_BP, X86_SI, X86_DI,
141     X86_R8W, X86_R9W, X86_R10W, X86_R11W,
142     X86_R12W, X86_R13W, X86_R14W, X86_R15W
143 gbeauche 1.2 };
144 gbeauche 1.1
145 gbeauche 1.2 enum {
146 gbeauche 1.1 X86_EAX = X86_Reg32_Base,
147     X86_ECX, X86_EDX, X86_EBX,
148     X86_ESP, X86_EBP, X86_ESI, X86_EDI,
149     X86_R8D, X86_R9D, X86_R10D, X86_R11D,
150     X86_R12D, X86_R13D, X86_R14D, X86_R15D
151 gbeauche 1.2 };
152 gbeauche 1.1
153 gbeauche 1.2 enum {
154 gbeauche 1.1 X86_RAX = X86_Reg64_Base,
155     X86_RCX, X86_RDX, X86_RBX,
156     X86_RSP, X86_RBP, X86_RSI, X86_RDI,
157     X86_R8, X86_R9, X86_R10, X86_R11,
158     X86_R12, X86_R13, X86_R14, X86_R15
159 gbeauche 1.2 };
160 gbeauche 1.1
161 gbeauche 1.3 enum {
162     X86_MM0 = X86_RegMMX_Base,
163     X86_MM1, X86_MM2, X86_MM3,
164     X86_MM4, X86_MM5, X86_MM6, X86_MM7,
165     };
166    
167     enum {
168     X86_XMM0 = X86_RegXMM_Base,
169     X86_XMM1, X86_XMM2, X86_XMM3,
170     X86_XMM4, X86_XMM5, X86_XMM6, X86_XMM7,
171     X86_XMM8, X86_XMM9, X86_XMM10, X86_XMM11,
172     X86_XMM12, X86_XMM13, X86_XMM14, X86_XMM15
173     };
174    
175 gbeauche 1.30 enum {
176     X86_ST0 = X86_RegFPU_Base,
177     X86_ST1, X86_ST2, X86_ST3,
178     X86_ST4, X86_ST5, X86_ST6, X86_ST7
179     };
180    
181 gbeauche 1.1 /* Register control and access
182     *
183 gbeauche 1.3 * _r0P(R) Null register?
184 gbeauche 1.4 * _rIP(R) RIP register?
185 gbeauche 1.3 * _rXP(R) Extended register?
186 gbeauche 1.1 *
187 gbeauche 1.3 * _rC(R) Class of register (only valid if X86_FLAT_REGISTERS)
188 gbeauche 1.1 * _rR(R) Full register number
189     * _rN(R) Short register number for encoding
190     *
191     * _r1(R) 8-bit register ID
192     * _r2(R) 16-bit register ID
193     * _r4(R) 32-bit register ID
194     * _r8(R) 64-bit register ID
195 gbeauche 1.3 * _rM(R) MMX register ID
196     * _rX(R) XMM register ID
197 gbeauche 1.30 * _rF(R) FPU register ID
198 gbeauche 1.3 * _rA(R) Address register ID used for EA calculation
199 gbeauche 1.1 */
200    
201 gbeauche 1.30 #define _rST0P(R) ((int)(R) == (int)X86_ST0)
202 gbeauche 1.6 #define _r0P(R) ((int)(R) == (int)X86_NOREG)
203 gbeauche 1.20 #define _rIP(R) (X86_TARGET_64BIT ? ((int)(R) == (int)X86_RIP) : 0)
204 gbeauche 1.1
205 gbeauche 1.16 #if X86_FLAT_REGISTERS
206 gbeauche 1.3 #define _rC(R) ((R) & 0xf0)
207     #define _rR(R) ((R) & 0x0f)
208     #define _rN(R) ((R) & 0x07)
209 gbeauche 1.11 #define _rXP(R) ((R) > 0 && _rR(R) > 7)
210 gbeauche 1.16 #else
211     #define _rN(R) ((R) & 0x07)
212     #define _rR(R) (int(R))
213 gbeauche 1.19 #define _rXP(R) (_rR(R) > 7 && _rR(R) < 16)
214 gbeauche 1.16 #endif
215 gbeauche 1.1
216 gbeauche 1.3 #if !defined(_ASM_SAFETY) || ! X86_FLAT_REGISTERS
217 gbeauche 1.1 #define _r1(R) _rN(R)
218     #define _r2(R) _rN(R)
219     #define _r4(R) _rN(R)
220     #define _r8(R) _rN(R)
221 gbeauche 1.3 #define _rA(R) _rN(R)
222     #define _rM(R) _rN(R)
223     #define _rX(R) _rN(R)
224 gbeauche 1.30 #define _rF(R) _rN(R)
225 gbeauche 1.1 #else
226 gbeauche 1.6 #define _r1(R) ( ((_rC(R) & (X86_Reg8L_Base | X86_Reg8H_Base)) != 0) ? _rN(R) : x86_emit_failure0( "8-bit register required"))
227     #define _r2(R) ( (_rC(R) == X86_Reg16_Base) ? _rN(R) : x86_emit_failure0("16-bit register required"))
228     #define _r4(R) ( (_rC(R) == X86_Reg32_Base) ? _rN(R) : x86_emit_failure0("32-bit register required"))
229     #define _r8(R) ( (_rC(R) == X86_Reg64_Base) ? _rN(R) : x86_emit_failure0("64-bit register required"))
230 gbeauche 1.3 #define _rA(R) ( X86_TARGET_64BIT ? \
231 gbeauche 1.6 ( (_rC(R) == X86_Reg64_Base) ? _rN(R) : x86_emit_failure0("not a valid 64-bit base/index expression")) : \
232     ( (_rC(R) == X86_Reg32_Base) ? _rN(R) : x86_emit_failure0("not a valid 32-bit base/index expression")) )
233     #define _rM(R) ( (_rC(R) == X86_RegMMX_Base) ? _rN(R) : x86_emit_failure0("MMX register required"))
234     #define _rX(R) ( (_rC(R) == X86_RegXMM_Base) ? _rN(R) : x86_emit_failure0("SSE register required"))
235 gbeauche 1.30 #define _rF(R) ( (_rC(R) == X86_RegFPU_Base) ? _rN(R) : x86_emit_failure0("FPU register required"))
236 gbeauche 1.1 #endif
237    
238 gbeauche 1.6 #define _rSP() (X86_TARGET_64BIT ? (int)X86_RSP : (int)X86_ESP)
239 gbeauche 1.14 #define _r1e8lP(R) (int(R) >= X86_SPL && int(R) <= X86_DIL)
240 gbeauche 1.3 #define _rbpP(R) (_rR(R) == _rR(X86_RBP))
241     #define _rspP(R) (_rR(R) == _rR(X86_RSP))
242 gbeauche 1.13 #define _rbp13P(R) (_rN(R) == _rN(X86_RBP))
243 gbeauche 1.3 #define _rsp12P(R) (_rN(R) == _rN(X86_RSP))
244 gbeauche 1.1
245    
246     /* ========================================================================= */
247     /* --- UTILITY ------------------------------------------------------------- */
248     /* ========================================================================= */
249    
250 gbeauche 1.10 typedef signed char _sc;
251 gbeauche 1.1 typedef unsigned char _uc;
252 gbeauche 1.10 typedef signed short _ss;
253 gbeauche 1.1 typedef unsigned short _us;
254 gbeauche 1.10 typedef signed int _sl;
255 gbeauche 1.1 typedef unsigned int _ul;
256    
257 gbeauche 1.18 #define _UC(X) ((_uc )(unsigned long)(X))
258     #define _US(X) ((_us )(unsigned long)(X))
259     #define _SL(X) ((_sl )(unsigned long)(X))
260     #define _UL(X) ((_ul )(unsigned long)(X))
261 gbeauche 1.1
262 gbeauche 1.19 #define _PUC(X) ((_uc *)(X))
263     #define _PUS(X) ((_us *)(X))
264     #define _PSL(X) ((_sl *)(X))
265     #define _PUL(X) ((_ul *)(X))
266 gbeauche 1.1
267     #define _B(B) x86_emit_byte((B))
268     #define _W(W) x86_emit_word((W))
269     #define _L(L) x86_emit_long((L))
270 gbeauche 1.15 #define _Q(Q) x86_emit_quad((Q))
271 gbeauche 1.1
272     #define _MASK(N) ((unsigned)((1<<(N)))-1)
273     #define _siP(N,I) (!((((unsigned)(I))^(((unsigned)(I))<<1))&~_MASK(N)))
274     #define _uiP(N,I) (!(((unsigned)(I))&~_MASK(N)))
275     #define _suiP(N,I) (_siP(N,I) | _uiP(N,I))
276    
277     #ifndef _ASM_SAFETY
278     #define _ck_s(W,I) (_UL(I) & _MASK(W))
279     #define _ck_u(W,I) (_UL(I) & _MASK(W))
280     #define _ck_su(W,I) (_UL(I) & _MASK(W))
281     #define _ck_d(W,I) (_UL(I) & _MASK(W))
282     #else
283 gbeauche 1.6 #define _ck_s(W,I) (_siP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0( "signed integer `"#I"' too large for "#W"-bit field"))
284     #define _ck_u(W,I) (_uiP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0("unsigned integer `"#I"' too large for "#W"-bit field"))
285     #define _ck_su(W,I) (_suiP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0( "integer `"#I"' too large for "#W"-bit field"))
286     #define _ck_d(W,I) (_siP(W,I) ? (_UL(I) & _MASK(W)) : x86_emit_failure0( "displacement `"#I"' too large for "#W"-bit field"))
287 gbeauche 1.1 #endif
288    
289     #define _s0P(I) ((I)==0)
290     #define _s8P(I) _siP(8,I)
291     #define _s16P(I) _siP(16,I)
292     #define _u8P(I) _uiP(8,I)
293     #define _u16P(I) _uiP(16,I)
294    
295     #define _su8(I) _ck_su(8,I)
296     #define _su16(I) _ck_su(16,I)
297    
298     #define _s1(I) _ck_s( 1,I)
299     #define _s2(I) _ck_s( 2,I)
300     #define _s3(I) _ck_s( 3,I)
301     #define _s4(I) _ck_s( 4,I)
302     #define _s5(I) _ck_s( 5,I)
303     #define _s6(I) _ck_s( 6,I)
304     #define _s7(I) _ck_s( 7,I)
305     #define _s8(I) _ck_s( 8,I)
306     #define _s9(I) _ck_s( 9,I)
307     #define _s10(I) _ck_s(10,I)
308     #define _s11(I) _ck_s(11,I)
309     #define _s12(I) _ck_s(12,I)
310     #define _s13(I) _ck_s(13,I)
311     #define _s14(I) _ck_s(14,I)
312     #define _s15(I) _ck_s(15,I)
313     #define _s16(I) _ck_s(16,I)
314     #define _s17(I) _ck_s(17,I)
315     #define _s18(I) _ck_s(18,I)
316     #define _s19(I) _ck_s(19,I)
317     #define _s20(I) _ck_s(20,I)
318     #define _s21(I) _ck_s(21,I)
319     #define _s22(I) _ck_s(22,I)
320     #define _s23(I) _ck_s(23,I)
321     #define _s24(I) _ck_s(24,I)
322     #define _s25(I) _ck_s(25,I)
323     #define _s26(I) _ck_s(26,I)
324     #define _s27(I) _ck_s(27,I)
325     #define _s28(I) _ck_s(28,I)
326     #define _s29(I) _ck_s(29,I)
327     #define _s30(I) _ck_s(30,I)
328     #define _s31(I) _ck_s(31,I)
329     #define _u1(I) _ck_u( 1,I)
330     #define _u2(I) _ck_u( 2,I)
331     #define _u3(I) _ck_u( 3,I)
332     #define _u4(I) _ck_u( 4,I)
333     #define _u5(I) _ck_u( 5,I)
334     #define _u6(I) _ck_u( 6,I)
335     #define _u7(I) _ck_u( 7,I)
336     #define _u8(I) _ck_u( 8,I)
337     #define _u9(I) _ck_u( 9,I)
338     #define _u10(I) _ck_u(10,I)
339     #define _u11(I) _ck_u(11,I)
340     #define _u12(I) _ck_u(12,I)
341     #define _u13(I) _ck_u(13,I)
342     #define _u14(I) _ck_u(14,I)
343     #define _u15(I) _ck_u(15,I)
344     #define _u16(I) _ck_u(16,I)
345     #define _u17(I) _ck_u(17,I)
346     #define _u18(I) _ck_u(18,I)
347     #define _u19(I) _ck_u(19,I)
348     #define _u20(I) _ck_u(20,I)
349     #define _u21(I) _ck_u(21,I)
350     #define _u22(I) _ck_u(22,I)
351     #define _u23(I) _ck_u(23,I)
352     #define _u24(I) _ck_u(24,I)
353     #define _u25(I) _ck_u(25,I)
354     #define _u26(I) _ck_u(26,I)
355     #define _u27(I) _ck_u(27,I)
356     #define _u28(I) _ck_u(28,I)
357     #define _u29(I) _ck_u(29,I)
358     #define _u30(I) _ck_u(30,I)
359     #define _u31(I) _ck_u(31,I)
360    
361     /* ========================================================================= */
362     /* --- ASSEMBLER ----------------------------------------------------------- */
363     /* ========================================================================= */
364    
365     #define _b00 0
366     #define _b01 1
367     #define _b10 2
368     #define _b11 3
369    
370     #define _b000 0
371     #define _b001 1
372     #define _b010 2
373     #define _b011 3
374     #define _b100 4
375     #define _b101 5
376     #define _b110 6
377     #define _b111 7
378    
379     #define _OFF4(D) (_UL(D) - _UL(x86_get_target()))
380     #define _CKD8(D) _ck_d(8, ((_uc) _OFF4(D)) )
381    
382     #define _D8(D) (_B(0), ((*(_PUC(x86_get_target())-1))= _CKD8(D)))
383     #define _D32(D) (_L(0), ((*(_PUL(x86_get_target())-1))= _OFF4(D)))
384    
385     #ifndef _ASM_SAFETY
386     # define _M(M) (M)
387     # define _r(R) (R)
388     # define _m(M) (M)
389     # define _s(S) (S)
390     # define _i(I) (I)
391     # define _b(B) (B)
392     #else
393 gbeauche 1.6 # define _M(M) (((M)>3) ? x86_emit_failure0("internal error: mod = " #M) : (M))
394     # define _r(R) (((R)>7) ? x86_emit_failure0("internal error: reg = " #R) : (R))
395     # define _m(M) (((M)>7) ? x86_emit_failure0("internal error: r/m = " #M) : (M))
396     # define _s(S) (((S)>3) ? x86_emit_failure0("internal error: memory scale = " #S) : (S))
397     # define _i(I) (((I)>7) ? x86_emit_failure0("internal error: memory index = " #I) : (I))
398     # define _b(B) (((B)>7) ? x86_emit_failure0("internal error: memory base = " #B) : (B))
399 gbeauche 1.1 #endif
400    
401     #define _Mrm(Md,R,M) _B((_M(Md)<<6)|(_r(R)<<3)|_m(M))
402     #define _SIB(Sc,I, B) _B((_s(Sc)<<6)|(_i(I)<<3)|_b(B))
403    
404     #define _SCL(S) ((((S)==1) ? _b00 : \
405     (((S)==2) ? _b01 : \
406     (((S)==4) ? _b10 : \
407 gbeauche 1.6 (((S)==8) ? _b11 : x86_emit_failure0("illegal scale: " #S))))))
408 gbeauche 1.1
409    
410     /* --- Memory subformats - urgh! ------------------------------------------- */
411    
412 gbeauche 1.4 /* _r_D() is RIP addressing mode if X86_TARGET_64BIT, use _r_DSIB() instead */
413 gbeauche 1.23 #define _r_D( R, D ) (_Mrm(_b00,_rN(R),_b101 ) ,_L((_sl)(D)))
414     #define _r_DSIB(R, D ) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(1),_b100 ,_b101 ),_L((_sl)(D)))
415 gbeauche 1.3 #define _r_0B( R, B ) (_Mrm(_b00,_rN(R),_rA(B)) )
416     #define _r_0BIS(R, B,I,S) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_rA(B)) )
417 gbeauche 1.23 #define _r_1B( R, D,B ) (_Mrm(_b01,_rN(R),_rA(B)) ,_B((_sc)(D)))
418     #define _r_1BIS(R, D,B,I,S) (_Mrm(_b01,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_rA(B)),_B((_sc)(D)))
419     #define _r_4B( R, D,B ) (_Mrm(_b10,_rN(R),_rA(B)) ,_L((_sl)(D)))
420     #define _r_4IS( R, D,I,S) (_Mrm(_b00,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_b101 ),_L((_sl)(D)))
421     #define _r_4BIS(R, D,B,I,S) (_Mrm(_b10,_rN(R),_b100 ),_SIB(_SCL(S),_rA(I),_rA(B)),_L((_sl)(D)))
422 gbeauche 1.3
423 gbeauche 1.13 #define _r_DB( R, D,B ) ((_s0P(D) && (!_rbp13P(B)) ? _r_0B (R, B ) : (_s8P(D) ? _r_1B( R,D,B ) : _r_4B( R,D,B ))))
424     #define _r_DBIS(R, D,B,I,S) ((_s0P(D) && (!_rbp13P(B)) ? _r_0BIS(R, B,I,S) : (_s8P(D) ? _r_1BIS(R,D,B,I,S) : _r_4BIS(R,D,B,I,S))))
425 gbeauche 1.3
426 gbeauche 1.15 /* Use RIP-addressing in 64-bit mode, if possible */
427     #define _x86_RIP_addressing_possible(D,O) (X86_RIP_RELATIVE_ADDR && \
428     ((uintptr)x86_get_target() + 4 + (O) - (D) <= 0xffffffff))
429    
430     #define _r_X( R, D,B,I,S,O) (_r0P(I) ? (_r0P(B) ? (!X86_TARGET_64BIT ? _r_D(R,D) : \
431     (_x86_RIP_addressing_possible(D, O) ? \
432     _r_D(R, (D) - ((uintptr)x86_get_target() + 4 + (O))) : \
433     _r_DSIB(R,D))) : \
434 gbeauche 1.11 (_rIP(B) ? _r_D (R,D ) : \
435 gbeauche 1.3 (_rsp12P(B) ? _r_DBIS(R,D,_rSP(),_rSP(),1) : \
436 gbeauche 1.4 _r_DB (R,D, B )))) : \
437 gbeauche 1.3 (_r0P(B) ? _r_4IS (R,D, I,S) : \
438     (!_rspP(I) ? _r_DBIS(R,D, B, I,S) : \
439     x86_emit_failure("illegal index register: %esp"))))
440 gbeauche 1.1
441    
442     /* --- Instruction formats ------------------------------------------------- */
443    
444     #define _m32only(X) (! X86_TARGET_64BIT ? X : x86_emit_failure("invalid instruction in 64-bit mode"))
445     #define _m64only(X) ( X86_TARGET_64BIT ? X : x86_emit_failure("invalid instruction in 32-bit mode"))
446     #define _m64(X) ( X86_TARGET_64BIT ? X : ((void)0) )
447    
448     /* _format Opcd ModR/M dN(rB,rI,Sc) imm... */
449    
450     #define _d16() ( _B(0x66 ) )
451     #define _O( OP ) ( _B( OP ) )
452     #define _Or( OP,R ) ( _B( (OP)|_r(R)) )
453 gbeauche 1.23 #define _OO( OP ) ( _B((OP)>>8), _B(( (OP) )&0xff) )
454     #define _OOr( OP,R ) ( _B((OP)>>8), _B(( (OP)|_r(R))&0xff) )
455 gbeauche 1.1 #define _Os( OP,B ) ( _s8P(B) ? _B(((OP)|_b10)) : _B(OP) )
456     #define _sW( W ) ( _s8P(W) ? _B(W):_W(W) )
457     #define _sL( L ) ( _s8P(L) ? _B(L):_L(L) )
458 gbeauche 1.15 #define _sWO( W ) ( _s8P(W) ? 1 : 2 )
459     #define _sLO( L ) ( _s8P(L) ? 1 : 4 )
460 gbeauche 1.1 #define _O_B( OP ,B ) ( _O ( OP ) ,_B(B) )
461     #define _O_W( OP ,W ) ( _O ( OP ) ,_W(W) )
462     #define _O_L( OP ,L ) ( _O ( OP ) ,_L(L) )
463 gbeauche 1.20 #define _OO_L( OP ,L ) ( _OO ( OP ) ,_L(L) )
464 gbeauche 1.1 #define _O_D8( OP ,D ) ( _O ( OP ) ,_D8(D) )
465     #define _O_D32( OP ,D ) ( _O ( OP ) ,_D32(D) )
466     #define _OO_D32( OP ,D ) ( _OO ( OP ) ,_D32(D) )
467     #define _Os_sW( OP ,W ) ( _Os ( OP,W) ,_sW(W) )
468     #define _Os_sL( OP ,L ) ( _Os ( OP,L) ,_sL(L) )
469     #define _O_W_B( OP ,W,B) ( _O ( OP ) ,_W(W),_B(B))
470     #define _Or_B( OP,R ,B ) ( _Or ( OP,R) ,_B(B) )
471     #define _Or_W( OP,R ,W ) ( _Or ( OP,R) ,_W(W) )
472     #define _Or_L( OP,R ,L ) ( _Or ( OP,R) ,_L(L) )
473 gbeauche 1.15 #define _Or_Q( OP,R ,Q ) ( _Or ( OP,R) ,_Q(Q) )
474 gbeauche 1.1 #define _O_Mrm( OP ,MO,R,M ) ( _O ( OP ),_Mrm(MO,R,M ) )
475     #define _OO_Mrm( OP ,MO,R,M ) ( _OO ( OP ),_Mrm(MO,R,M ) )
476     #define _O_Mrm_B( OP ,MO,R,M ,B ) ( _O ( OP ),_Mrm(MO,R,M ) ,_B(B) )
477     #define _O_Mrm_W( OP ,MO,R,M ,W ) ( _O ( OP ),_Mrm(MO,R,M ) ,_W(W) )
478     #define _O_Mrm_L( OP ,MO,R,M ,L ) ( _O ( OP ),_Mrm(MO,R,M ) ,_L(L) )
479     #define _OO_Mrm_B( OP ,MO,R,M ,B ) ( _OO ( OP ),_Mrm(MO,R,M ) ,_B(B) )
480     #define _Os_Mrm_sW(OP ,MO,R,M ,W ) ( _Os ( OP,W),_Mrm(MO,R,M ),_sW(W) )
481     #define _Os_Mrm_sL(OP ,MO,R,M ,L ) ( _Os ( OP,L),_Mrm(MO,R,M ),_sL(L) )
482 gbeauche 1.15 #define _O_r_X( OP ,R ,MD,MB,MI,MS ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,0) )
483     #define _OO_r_X( OP ,R ,MD,MB,MI,MS ) ( _OO ( OP ),_r_X( R ,MD,MB,MI,MS,0) )
484     #define _O_r_X_B( OP ,R ,MD,MB,MI,MS,B ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,1) ,_B(B) )
485     #define _O_r_X_W( OP ,R ,MD,MB,MI,MS,W ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,2) ,_W(W) )
486     #define _O_r_X_L( OP ,R ,MD,MB,MI,MS,L ) ( _O ( OP ),_r_X( R ,MD,MB,MI,MS,4) ,_L(L) )
487     #define _OO_r_X_B( OP ,R ,MD,MB,MI,MS,B ) ( _OO ( OP ),_r_X( R ,MD,MB,MI,MS,1) ,_B(B) )
488     #define _Os_r_X_sW(OP ,R ,MD,MB,MI,MS,W ) ( _Os ( OP,W),_r_X( R ,MD,MB,MI,MS,_sWO(W)),_sW(W))
489     #define _Os_r_X_sL(OP ,R ,MD,MB,MI,MS,L ) ( _Os ( OP,L),_r_X( R ,MD,MB,MI,MS,_sLO(L)),_sL(L))
490 gbeauche 1.1 #define _O_X_B( OP ,MD,MB,MI,MS,B ) ( _O_r_X_B( OP ,0 ,MD,MB,MI,MS ,B) )
491     #define _O_X_W( OP ,MD,MB,MI,MS,W ) ( _O_r_X_W( OP ,0 ,MD,MB,MI,MS ,W) )
492     #define _O_X_L( OP ,MD,MB,MI,MS,L ) ( _O_r_X_L( OP ,0 ,MD,MB,MI,MS ,L) )
493    
494    
495     /* --- REX prefixes -------------------------------------------------------- */
496    
497     #define _VOID() ((void)0)
498     #define _BIT(X) (!!(X))
499     #define _d64(W,R,X,B) (_B(0x40|(W)<<3|(R)<<2|(X)<<1|(B)))
500    
501     #define __REXwrxb(L,W,R,X,B) ((W|R|X|B) || (L) ? _d64(W,R,X,B) : _VOID())
502 gbeauche 1.4 #define __REXwrx_(L,W,R,X,MR) (__REXwrxb(L,W,R,X,_BIT(_rIP(MR)?0:_rXP(MR))))
503 gbeauche 1.1 #define __REXw_x_(L,W,R,X,MR) (__REXwrx_(L,W,_BIT(_rXP(R)),X,MR))
504 gbeauche 1.13 #define __REX_reg(RR) (__REXwrxb(0,0,0,00,_BIT(_rXP(RR))))
505 gbeauche 1.16 #define __REX_mem(MB,MI) (__REXwrxb(0,0,0,_BIT(_rXP(MI)),_BIT(_rXP(MB))))
506 gbeauche 1.1
507     // FIXME: can't mix new (SPL,BPL,SIL,DIL) with (AH,BH,CH,DH)
508 gbeauche 1.14 #define _REXBrr(RR,MR) _m64(__REXw_x_(_r1e8lP(RR)||_r1e8lP(MR),0,RR,0,MR))
509     #define _REXBmr(MB,MI,RD) _m64(__REXw_x_(_r1e8lP(RD)||_r1e8lP(MB),0,RD,_BIT(_rXP(MI)),MB))
510 gbeauche 1.1 #define _REXBrm(RS,MB,MI) _REXBmr(MB,MI,RS)
511    
512 gbeauche 1.14 #define _REXBLrr(RR,MR) _m64(__REXw_x_(_r1e8lP(MR),0,RR,0,MR))
513 gbeauche 1.1 #define _REXLrr(RR,MR) _m64(__REXw_x_(0,0,RR,0,MR))
514     #define _REXLmr(MB,MI,RD) _m64(__REXw_x_(0,0,RD,_BIT(_rXP(MI)),MB))
515     #define _REXLrm(RS,MB,MI) _REXLmr(MB,MI,RS)
516 gbeauche 1.13 #define _REXLr(RR) _m64(__REX_reg(RR))
517     #define _REXLm(MB,MI) _m64(__REX_mem(MB,MI))
518 gbeauche 1.1
519     #define _REXQrr(RR,MR) _m64only(__REXw_x_(0,1,RR,0,MR))
520     #define _REXQmr(MB,MI,RD) _m64only(__REXw_x_(0,1,RD,_BIT(_rXP(MI)),MB))
521     #define _REXQrm(RS,MB,MI) _REXQmr(MB,MI,RS)
522 gbeauche 1.13 #define _REXQr(RR) _m64only(__REX_reg(RR))
523     #define _REXQm(MB,MI) _m64only(__REX_mem(MB,MI))
524 gbeauche 1.1
525    
526     /* ========================================================================= */
527     /* --- Fully-qualified intrinsic instructions ------------------------------ */
528     /* ========================================================================= */
529    
530     /* OPCODE + i = immediate operand
531     * + r = register operand
532     * + m = memory operand (disp,base,index,scale)
533     * + sr/sm = a star preceding a register or memory
534 gbeauche 1.2 * + 0 = top of stack register (for FPU instructions)
535 gbeauche 1.4 *
536     * NOTE in x86-64 mode: a memory operand with only a valid
537     * displacement value will lead to the expect absolute mode. If
538     * RIP addressing is necessary, X86_RIP shall be used as the base
539     * register argument.
540 gbeauche 1.1 */
541    
542     /* --- ALU instructions ---------------------------------------------------- */
543    
544 gbeauche 1.2 enum {
545 gbeauche 1.1 X86_ADD = 0,
546     X86_OR = 1,
547     X86_ADC = 2,
548     X86_SBB = 3,
549     X86_AND = 4,
550     X86_SUB = 5,
551     X86_XOR = 6,
552     X86_CMP = 7,
553 gbeauche 1.2 };
554 gbeauche 1.1
555     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
556    
557     #define _ALUBrr(OP,RS, RD) (_REXBrr(RS, RD), _O_Mrm (((OP) << 3) ,_b11,_r1(RS),_r1(RD) ))
558 gbeauche 1.20 #define _ALUBmr(OP, MD, MB, MI, MS, RD) (_REXBmr(MB, MI, RD), _O_r_X (((OP) << 3) + 2 ,_r1(RD) ,MD,MB,MI,MS ))
559     #define _ALUBrm(OP, RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (((OP) << 3) ,_r1(RS) ,MD,MB,MI,MS ))
560 gbeauche 1.1 #define _ALUBir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AL) ? \
561     (_REXBrr(0, RD), _O_B (((OP) << 3) + 4 ,_su8(IM))) : \
562     (_REXBrr(0, RD), _O_Mrm_B (0x80 ,_b11,OP ,_r1(RD) ,_su8(IM))) )
563     #define _ALUBim(OP, IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X_B (0x80 ,OP ,MD,MB,MI,MS ,_su8(IM)))
564    
565     #define _ALUWrr(OP, RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r2(RS),_r2(RD) ))
566 gbeauche 1.11 #define _ALUWmr(OP, MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r2(RD) ,MD,MB,MI,MS ))
567     #define _ALUWrm(OP, RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r2(RS) ,MD,MB,MI,MS ))
568 gbeauche 1.1 #define _ALUWir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AX) ? \
569     (_d16(), _REXLrr(0, RD), _O_W (((OP) << 3) + 5 ,_su16(IM))) : \
570     (_d16(), _REXLrr(0, RD), _Os_Mrm_sW (0x81 ,_b11,OP ,_r2(RD) ,_su16(IM))) )
571 gbeauche 1.11 #define _ALUWim(OP, IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _Os_r_X_sW (0x81 ,OP ,MD,MB,MI,MS ,_su16(IM)))
572 gbeauche 1.1
573     #define _ALULrr(OP, RS, RD) (_REXLrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r4(RS),_r4(RD) ))
574     #define _ALULmr(OP, MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r4(RD) ,MD,MB,MI,MS ))
575     #define _ALULrm(OP, RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r4(RS) ,MD,MB,MI,MS ))
576     #define _ALULir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_EAX) ? \
577     (_REXLrr(0, RD), _O_L (((OP) << 3) + 5 ,IM )) : \
578     (_REXLrr(0, RD), _Os_Mrm_sL (0x81 ,_b11,OP ,_r4(RD) ,IM )) )
579     #define _ALULim(OP, IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _Os_r_X_sL (0x81 ,OP ,MD,MB,MI,MS ,IM ))
580    
581     #define _ALUQrr(OP, RS, RD) (_REXQrr(RS, RD), _O_Mrm (((OP) << 3) + 1,_b11,_r8(RS),_r8(RD) ))
582     #define _ALUQmr(OP, MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (((OP) << 3) + 3 ,_r8(RD) ,MD,MB,MI,MS ))
583     #define _ALUQrm(OP, RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (((OP) << 3) + 1 ,_r8(RS) ,MD,MB,MI,MS ))
584     #define _ALUQir(OP, IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_RAX) ? \
585     (_REXQrr(0, RD), _O_L (((OP) << 3) + 5 ,IM )) : \
586     (_REXQrr(0, RD), _Os_Mrm_sL (0x81 ,_b11,OP ,_r8(RD) ,IM )) )
587     #define _ALUQim(OP, IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _Os_r_X_sL (0x81 ,OP ,MD,MB,MI,MS ,IM ))
588    
589     #define ADCBrr(RS, RD) _ALUBrr(X86_ADC, RS, RD)
590     #define ADCBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_ADC, MD, MB, MI, MS, RD)
591     #define ADCBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_ADC, RS, MD, MB, MI, MS)
592     #define ADCBir(IM, RD) _ALUBir(X86_ADC, IM, RD)
593     #define ADCBim(IM, MD, MB, MI, MS) _ALUBim(X86_ADC, IM, MD, MB, MI, MS)
594    
595     #define ADCWrr(RS, RD) _ALUWrr(X86_ADC, RS, RD)
596     #define ADCWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_ADC, MD, MB, MI, MS, RD)
597     #define ADCWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_ADC, RS, MD, MB, MI, MS)
598     #define ADCWir(IM, RD) _ALUWir(X86_ADC, IM, RD)
599     #define ADCWim(IM, MD, MB, MI, MS) _ALUWim(X86_ADC, IM, MD, MB, MI, MS)
600    
601     #define ADCLrr(RS, RD) _ALULrr(X86_ADC, RS, RD)
602     #define ADCLmr(MD, MB, MI, MS, RD) _ALULmr(X86_ADC, MD, MB, MI, MS, RD)
603     #define ADCLrm(RS, MD, MB, MI, MS) _ALULrm(X86_ADC, RS, MD, MB, MI, MS)
604     #define ADCLir(IM, RD) _ALULir(X86_ADC, IM, RD)
605     #define ADCLim(IM, MD, MB, MI, MS) _ALULim(X86_ADC, IM, MD, MB, MI, MS)
606    
607     #define ADCQrr(RS, RD) _ALUQrr(X86_ADC, RS, RD)
608     #define ADCQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_ADC, MD, MB, MI, MS, RD)
609     #define ADCQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_ADC, RS, MD, MB, MI, MS)
610     #define ADCQir(IM, RD) _ALUQir(X86_ADC, IM, RD)
611     #define ADCQim(IM, MD, MB, MI, MS) _ALUQim(X86_ADC, IM, MD, MB, MI, MS)
612    
613     #define ADDBrr(RS, RD) _ALUBrr(X86_ADD, RS, RD)
614     #define ADDBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_ADD, MD, MB, MI, MS, RD)
615     #define ADDBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_ADD, RS, MD, MB, MI, MS)
616     #define ADDBir(IM, RD) _ALUBir(X86_ADD, IM, RD)
617     #define ADDBim(IM, MD, MB, MI, MS) _ALUBim(X86_ADD, IM, MD, MB, MI, MS)
618    
619     #define ADDWrr(RS, RD) _ALUWrr(X86_ADD, RS, RD)
620     #define ADDWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_ADD, MD, MB, MI, MS, RD)
621     #define ADDWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_ADD, RS, MD, MB, MI, MS)
622     #define ADDWir(IM, RD) _ALUWir(X86_ADD, IM, RD)
623     #define ADDWim(IM, MD, MB, MI, MS) _ALUWim(X86_ADD, IM, MD, MB, MI, MS)
624    
625     #define ADDLrr(RS, RD) _ALULrr(X86_ADD, RS, RD)
626     #define ADDLmr(MD, MB, MI, MS, RD) _ALULmr(X86_ADD, MD, MB, MI, MS, RD)
627     #define ADDLrm(RS, MD, MB, MI, MS) _ALULrm(X86_ADD, RS, MD, MB, MI, MS)
628     #define ADDLir(IM, RD) _ALULir(X86_ADD, IM, RD)
629     #define ADDLim(IM, MD, MB, MI, MS) _ALULim(X86_ADD, IM, MD, MB, MI, MS)
630    
631     #define ADDQrr(RS, RD) _ALUQrr(X86_ADD, RS, RD)
632     #define ADDQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_ADD, MD, MB, MI, MS, RD)
633     #define ADDQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_ADD, RS, MD, MB, MI, MS)
634     #define ADDQir(IM, RD) _ALUQir(X86_ADD, IM, RD)
635     #define ADDQim(IM, MD, MB, MI, MS) _ALUQim(X86_ADD, IM, MD, MB, MI, MS)
636    
637     #define ANDBrr(RS, RD) _ALUBrr(X86_AND, RS, RD)
638     #define ANDBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_AND, MD, MB, MI, MS, RD)
639     #define ANDBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_AND, RS, MD, MB, MI, MS)
640     #define ANDBir(IM, RD) _ALUBir(X86_AND, IM, RD)
641     #define ANDBim(IM, MD, MB, MI, MS) _ALUBim(X86_AND, IM, MD, MB, MI, MS)
642    
643     #define ANDWrr(RS, RD) _ALUWrr(X86_AND, RS, RD)
644     #define ANDWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_AND, MD, MB, MI, MS, RD)
645     #define ANDWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_AND, RS, MD, MB, MI, MS)
646     #define ANDWir(IM, RD) _ALUWir(X86_AND, IM, RD)
647     #define ANDWim(IM, MD, MB, MI, MS) _ALUWim(X86_AND, IM, MD, MB, MI, MS)
648    
649     #define ANDLrr(RS, RD) _ALULrr(X86_AND, RS, RD)
650     #define ANDLmr(MD, MB, MI, MS, RD) _ALULmr(X86_AND, MD, MB, MI, MS, RD)
651     #define ANDLrm(RS, MD, MB, MI, MS) _ALULrm(X86_AND, RS, MD, MB, MI, MS)
652     #define ANDLir(IM, RD) _ALULir(X86_AND, IM, RD)
653     #define ANDLim(IM, MD, MB, MI, MS) _ALULim(X86_AND, IM, MD, MB, MI, MS)
654    
655     #define ANDQrr(RS, RD) _ALUQrr(X86_AND, RS, RD)
656     #define ANDQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_AND, MD, MB, MI, MS, RD)
657     #define ANDQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_AND, RS, MD, MB, MI, MS)
658     #define ANDQir(IM, RD) _ALUQir(X86_AND, IM, RD)
659     #define ANDQim(IM, MD, MB, MI, MS) _ALUQim(X86_AND, IM, MD, MB, MI, MS)
660    
661     #define CMPBrr(RS, RD) _ALUBrr(X86_CMP, RS, RD)
662     #define CMPBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_CMP, MD, MB, MI, MS, RD)
663     #define CMPBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_CMP, RS, MD, MB, MI, MS)
664     #define CMPBir(IM, RD) _ALUBir(X86_CMP, IM, RD)
665     #define CMPBim(IM, MD, MB, MI, MS) _ALUBim(X86_CMP, IM, MD, MB, MI, MS)
666    
667     #define CMPWrr(RS, RD) _ALUWrr(X86_CMP, RS, RD)
668     #define CMPWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_CMP, MD, MB, MI, MS, RD)
669     #define CMPWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_CMP, RS, MD, MB, MI, MS)
670     #define CMPWir(IM, RD) _ALUWir(X86_CMP, IM, RD)
671     #define CMPWim(IM, MD, MB, MI, MS) _ALUWim(X86_CMP, IM, MD, MB, MI, MS)
672    
673     #define CMPLrr(RS, RD) _ALULrr(X86_CMP, RS, RD)
674     #define CMPLmr(MD, MB, MI, MS, RD) _ALULmr(X86_CMP, MD, MB, MI, MS, RD)
675     #define CMPLrm(RS, MD, MB, MI, MS) _ALULrm(X86_CMP, RS, MD, MB, MI, MS)
676     #define CMPLir(IM, RD) _ALULir(X86_CMP, IM, RD)
677     #define CMPLim(IM, MD, MB, MI, MS) _ALULim(X86_CMP, IM, MD, MB, MI, MS)
678    
679     #define CMPQrr(RS, RD) _ALUQrr(X86_CMP, RS, RD)
680     #define CMPQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_CMP, MD, MB, MI, MS, RD)
681     #define CMPQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_CMP, RS, MD, MB, MI, MS)
682     #define CMPQir(IM, RD) _ALUQir(X86_CMP, IM, RD)
683     #define CMPQim(IM, MD, MB, MI, MS) _ALUQim(X86_CMP, IM, MD, MB, MI, MS)
684    
685     #define ORBrr(RS, RD) _ALUBrr(X86_OR, RS, RD)
686     #define ORBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_OR, MD, MB, MI, MS, RD)
687     #define ORBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_OR, RS, MD, MB, MI, MS)
688     #define ORBir(IM, RD) _ALUBir(X86_OR, IM, RD)
689     #define ORBim(IM, MD, MB, MI, MS) _ALUBim(X86_OR, IM, MD, MB, MI, MS)
690    
691     #define ORWrr(RS, RD) _ALUWrr(X86_OR, RS, RD)
692     #define ORWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_OR, MD, MB, MI, MS, RD)
693     #define ORWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_OR, RS, MD, MB, MI, MS)
694     #define ORWir(IM, RD) _ALUWir(X86_OR, IM, RD)
695     #define ORWim(IM, MD, MB, MI, MS) _ALUWim(X86_OR, IM, MD, MB, MI, MS)
696    
697     #define ORLrr(RS, RD) _ALULrr(X86_OR, RS, RD)
698     #define ORLmr(MD, MB, MI, MS, RD) _ALULmr(X86_OR, MD, MB, MI, MS, RD)
699     #define ORLrm(RS, MD, MB, MI, MS) _ALULrm(X86_OR, RS, MD, MB, MI, MS)
700     #define ORLir(IM, RD) _ALULir(X86_OR, IM, RD)
701     #define ORLim(IM, MD, MB, MI, MS) _ALULim(X86_OR, IM, MD, MB, MI, MS)
702    
703     #define ORQrr(RS, RD) _ALUQrr(X86_OR, RS, RD)
704     #define ORQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_OR, MD, MB, MI, MS, RD)
705     #define ORQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_OR, RS, MD, MB, MI, MS)
706     #define ORQir(IM, RD) _ALUQir(X86_OR, IM, RD)
707     #define ORQim(IM, MD, MB, MI, MS) _ALUQim(X86_OR, IM, MD, MB, MI, MS)
708    
709     #define SBBBrr(RS, RD) _ALUBrr(X86_SBB, RS, RD)
710     #define SBBBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_SBB, MD, MB, MI, MS, RD)
711     #define SBBBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_SBB, RS, MD, MB, MI, MS)
712     #define SBBBir(IM, RD) _ALUBir(X86_SBB, IM, RD)
713     #define SBBBim(IM, MD, MB, MI, MS) _ALUBim(X86_SBB, IM, MD, MB, MI, MS)
714    
715     #define SBBWrr(RS, RD) _ALUWrr(X86_SBB, RS, RD)
716     #define SBBWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_SBB, MD, MB, MI, MS, RD)
717     #define SBBWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_SBB, RS, MD, MB, MI, MS)
718     #define SBBWir(IM, RD) _ALUWir(X86_SBB, IM, RD)
719     #define SBBWim(IM, MD, MB, MI, MS) _ALUWim(X86_SBB, IM, MD, MB, MI, MS)
720    
721     #define SBBLrr(RS, RD) _ALULrr(X86_SBB, RS, RD)
722     #define SBBLmr(MD, MB, MI, MS, RD) _ALULmr(X86_SBB, MD, MB, MI, MS, RD)
723     #define SBBLrm(RS, MD, MB, MI, MS) _ALULrm(X86_SBB, RS, MD, MB, MI, MS)
724     #define SBBLir(IM, RD) _ALULir(X86_SBB, IM, RD)
725     #define SBBLim(IM, MD, MB, MI, MS) _ALULim(X86_SBB, IM, MD, MB, MI, MS)
726    
727     #define SBBQrr(RS, RD) _ALUQrr(X86_SBB, RS, RD)
728     #define SBBQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_SBB, MD, MB, MI, MS, RD)
729     #define SBBQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_SBB, RS, MD, MB, MI, MS)
730     #define SBBQir(IM, RD) _ALUQir(X86_SBB, IM, RD)
731     #define SBBQim(IM, MD, MB, MI, MS) _ALUQim(X86_SBB, IM, MD, MB, MI, MS)
732    
733     #define SUBBrr(RS, RD) _ALUBrr(X86_SUB, RS, RD)
734     #define SUBBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_SUB, MD, MB, MI, MS, RD)
735     #define SUBBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_SUB, RS, MD, MB, MI, MS)
736     #define SUBBir(IM, RD) _ALUBir(X86_SUB, IM, RD)
737     #define SUBBim(IM, MD, MB, MI, MS) _ALUBim(X86_SUB, IM, MD, MB, MI, MS)
738    
739     #define SUBWrr(RS, RD) _ALUWrr(X86_SUB, RS, RD)
740     #define SUBWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_SUB, MD, MB, MI, MS, RD)
741     #define SUBWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_SUB, RS, MD, MB, MI, MS)
742     #define SUBWir(IM, RD) _ALUWir(X86_SUB, IM, RD)
743     #define SUBWim(IM, MD, MB, MI, MS) _ALUWim(X86_SUB, IM, MD, MB, MI, MS)
744    
745     #define SUBLrr(RS, RD) _ALULrr(X86_SUB, RS, RD)
746     #define SUBLmr(MD, MB, MI, MS, RD) _ALULmr(X86_SUB, MD, MB, MI, MS, RD)
747     #define SUBLrm(RS, MD, MB, MI, MS) _ALULrm(X86_SUB, RS, MD, MB, MI, MS)
748     #define SUBLir(IM, RD) _ALULir(X86_SUB, IM, RD)
749     #define SUBLim(IM, MD, MB, MI, MS) _ALULim(X86_SUB, IM, MD, MB, MI, MS)
750    
751     #define SUBQrr(RS, RD) _ALUQrr(X86_SUB, RS, RD)
752     #define SUBQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_SUB, MD, MB, MI, MS, RD)
753     #define SUBQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_SUB, RS, MD, MB, MI, MS)
754     #define SUBQir(IM, RD) _ALUQir(X86_SUB, IM, RD)
755     #define SUBQim(IM, MD, MB, MI, MS) _ALUQim(X86_SUB, IM, MD, MB, MI, MS)
756    
757     #define XORBrr(RS, RD) _ALUBrr(X86_XOR, RS, RD)
758     #define XORBmr(MD, MB, MI, MS, RD) _ALUBmr(X86_XOR, MD, MB, MI, MS, RD)
759     #define XORBrm(RS, MD, MB, MI, MS) _ALUBrm(X86_XOR, RS, MD, MB, MI, MS)
760     #define XORBir(IM, RD) _ALUBir(X86_XOR, IM, RD)
761     #define XORBim(IM, MD, MB, MI, MS) _ALUBim(X86_XOR, IM, MD, MB, MI, MS)
762    
763     #define XORWrr(RS, RD) _ALUWrr(X86_XOR, RS, RD)
764     #define XORWmr(MD, MB, MI, MS, RD) _ALUWmr(X86_XOR, MD, MB, MI, MS, RD)
765     #define XORWrm(RS, MD, MB, MI, MS) _ALUWrm(X86_XOR, RS, MD, MB, MI, MS)
766     #define XORWir(IM, RD) _ALUWir(X86_XOR, IM, RD)
767     #define XORWim(IM, MD, MB, MI, MS) _ALUWim(X86_XOR, IM, MD, MB, MI, MS)
768    
769     #define XORLrr(RS, RD) _ALULrr(X86_XOR, RS, RD)
770     #define XORLmr(MD, MB, MI, MS, RD) _ALULmr(X86_XOR, MD, MB, MI, MS, RD)
771     #define XORLrm(RS, MD, MB, MI, MS) _ALULrm(X86_XOR, RS, MD, MB, MI, MS)
772     #define XORLir(IM, RD) _ALULir(X86_XOR, IM, RD)
773     #define XORLim(IM, MD, MB, MI, MS) _ALULim(X86_XOR, IM, MD, MB, MI, MS)
774    
775     #define XORQrr(RS, RD) _ALUQrr(X86_XOR, RS, RD)
776     #define XORQmr(MD, MB, MI, MS, RD) _ALUQmr(X86_XOR, MD, MB, MI, MS, RD)
777     #define XORQrm(RS, MD, MB, MI, MS) _ALUQrm(X86_XOR, RS, MD, MB, MI, MS)
778     #define XORQir(IM, RD) _ALUQir(X86_XOR, IM, RD)
779     #define XORQim(IM, MD, MB, MI, MS) _ALUQim(X86_XOR, IM, MD, MB, MI, MS)
780    
781    
782     /* --- Shift/Rotate instructions ------------------------------------------- */
783    
784 gbeauche 1.2 enum {
785 gbeauche 1.1 X86_ROL = 0,
786     X86_ROR = 1,
787     X86_RCL = 2,
788     X86_RCR = 3,
789     X86_SHL = 4,
790     X86_SHR = 5,
791     X86_SAR = 7,
792 gbeauche 1.2 };
793 gbeauche 1.1
794     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
795    
796     #define _ROTSHIBir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
797     (_REXBrr(0, RD), _O_Mrm (0xd0 ,_b11,OP,_r1(RD) )) : \
798     (_REXBrr(0, RD), _O_Mrm_B (0xc0 ,_b11,OP,_r1(RD) ,_u8(IM))) )
799     #define _ROTSHIBim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
800     (_REXBrm(0, MB, MI), _O_r_X (0xd0 ,OP ,MD,MB,MI,MS )) : \
801     (_REXBrm(0, MB, MI), _O_r_X_B (0xc0 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
802     #define _ROTSHIBrr(OP,RS,RD) (((RS) == X86_CL) ? \
803     (_REXBrr(RS, RD), _O_Mrm (0xd2 ,_b11,OP,_r1(RD) )) : \
804     x86_emit_failure("source register must be CL" ) )
805     #define _ROTSHIBrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
806     (_REXBrm(RS, MB, MI), _O_r_X (0xd2 ,OP ,MD,MB,MI,MS )) : \
807     x86_emit_failure("source register must be CL" ) )
808    
809     #define _ROTSHIWir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
810     (_d16(), _REXLrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r2(RD) )) : \
811     (_d16(), _REXLrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r2(RD) ,_u8(IM))) )
812     #define _ROTSHIWim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
813     (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
814     (_d16(), _REXLrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
815     #define _ROTSHIWrr(OP,RS,RD) (((RS) == X86_CL) ? \
816     (_d16(), _REXLrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r2(RD) )) : \
817     x86_emit_failure("source register must be CL" ) )
818     #define _ROTSHIWrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
819     (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
820     x86_emit_failure("source register must be CL" ) )
821    
822     #define _ROTSHILir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
823     (_REXLrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r4(RD) )) : \
824     (_REXLrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r4(RD) ,_u8(IM))) )
825     #define _ROTSHILim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
826     (_REXLrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
827     (_REXLrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
828     #define _ROTSHILrr(OP,RS,RD) (((RS) == X86_CL) ? \
829     (_REXLrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r4(RD) )) : \
830     x86_emit_failure("source register must be CL" ) )
831     #define _ROTSHILrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
832     (_REXLrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
833     x86_emit_failure("source register must be CL" ) )
834    
835     #define _ROTSHIQir(OP,IM,RD) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
836     (_REXQrr(0, RD), _O_Mrm (0xd1 ,_b11,OP,_r8(RD) )) : \
837     (_REXQrr(0, RD), _O_Mrm_B (0xc1 ,_b11,OP,_r8(RD) ,_u8(IM))) )
838     #define _ROTSHIQim(OP,IM,MD,MB,MI,MS) (X86_OPTIMIZE_ROTSHI && ((IM) == 1) ? \
839     (_REXQrm(0, MB, MI), _O_r_X (0xd1 ,OP ,MD,MB,MI,MS )) : \
840     (_REXQrm(0, MB, MI), _O_r_X_B (0xc1 ,OP ,MD,MB,MI,MS ,_u8(IM))) )
841     #define _ROTSHIQrr(OP,RS,RD) (((RS) == X86_CL) ? \
842     (_REXQrr(RS, RD), _O_Mrm (0xd3 ,_b11,OP,_r8(RD) )) : \
843     x86_emit_failure("source register must be CL" ) )
844     #define _ROTSHIQrm(OP,RS,MD,MB,MI,MS) (((RS) == X86_CL) ? \
845     (_REXQrm(RS, MB, MI), _O_r_X (0xd3 ,OP ,MD,MB,MI,MS )) : \
846     x86_emit_failure("source register must be CL" ) )
847    
848     #define ROLBir(IM, RD) _ROTSHIBir(X86_ROL, IM, RD)
849     #define ROLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_ROL, IM, MD, MB, MI, MS)
850     #define ROLBrr(RS, RD) _ROTSHIBrr(X86_ROL, RS, RD)
851     #define ROLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_ROL, RS, MD, MB, MI, MS)
852    
853     #define ROLWir(IM, RD) _ROTSHIWir(X86_ROL, IM, RD)
854     #define ROLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_ROL, IM, MD, MB, MI, MS)
855     #define ROLWrr(RS, RD) _ROTSHIWrr(X86_ROL, RS, RD)
856     #define ROLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_ROL, RS, MD, MB, MI, MS)
857    
858     #define ROLLir(IM, RD) _ROTSHILir(X86_ROL, IM, RD)
859     #define ROLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_ROL, IM, MD, MB, MI, MS)
860     #define ROLLrr(RS, RD) _ROTSHILrr(X86_ROL, RS, RD)
861     #define ROLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_ROL, RS, MD, MB, MI, MS)
862    
863     #define ROLQir(IM, RD) _ROTSHIQir(X86_ROL, IM, RD)
864     #define ROLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_ROL, IM, MD, MB, MI, MS)
865     #define ROLQrr(RS, RD) _ROTSHIQrr(X86_ROL, RS, RD)
866     #define ROLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_ROL, RS, MD, MB, MI, MS)
867    
868     #define RORBir(IM, RD) _ROTSHIBir(X86_ROR, IM, RD)
869     #define RORBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_ROR, IM, MD, MB, MI, MS)
870     #define RORBrr(RS, RD) _ROTSHIBrr(X86_ROR, RS, RD)
871     #define RORBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_ROR, RS, MD, MB, MI, MS)
872    
873     #define RORWir(IM, RD) _ROTSHIWir(X86_ROR, IM, RD)
874     #define RORWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_ROR, IM, MD, MB, MI, MS)
875     #define RORWrr(RS, RD) _ROTSHIWrr(X86_ROR, RS, RD)
876     #define RORWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_ROR, RS, MD, MB, MI, MS)
877    
878     #define RORLir(IM, RD) _ROTSHILir(X86_ROR, IM, RD)
879     #define RORLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_ROR, IM, MD, MB, MI, MS)
880     #define RORLrr(RS, RD) _ROTSHILrr(X86_ROR, RS, RD)
881     #define RORLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_ROR, RS, MD, MB, MI, MS)
882    
883     #define RORQir(IM, RD) _ROTSHIQir(X86_ROR, IM, RD)
884     #define RORQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_ROR, IM, MD, MB, MI, MS)
885     #define RORQrr(RS, RD) _ROTSHIQrr(X86_ROR, RS, RD)
886     #define RORQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_ROR, RS, MD, MB, MI, MS)
887    
888     #define RCLBir(IM, RD) _ROTSHIBir(X86_RCL, IM, RD)
889     #define RCLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_RCL, IM, MD, MB, MI, MS)
890     #define RCLBrr(RS, RD) _ROTSHIBrr(X86_RCL, RS, RD)
891     #define RCLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_RCL, RS, MD, MB, MI, MS)
892    
893     #define RCLWir(IM, RD) _ROTSHIWir(X86_RCL, IM, RD)
894     #define RCLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_RCL, IM, MD, MB, MI, MS)
895     #define RCLWrr(RS, RD) _ROTSHIWrr(X86_RCL, RS, RD)
896     #define RCLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_RCL, RS, MD, MB, MI, MS)
897    
898     #define RCLLir(IM, RD) _ROTSHILir(X86_RCL, IM, RD)
899     #define RCLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_RCL, IM, MD, MB, MI, MS)
900     #define RCLLrr(RS, RD) _ROTSHILrr(X86_RCL, RS, RD)
901     #define RCLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_RCL, RS, MD, MB, MI, MS)
902    
903     #define RCLQir(IM, RD) _ROTSHIQir(X86_RCL, IM, RD)
904     #define RCLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_RCL, IM, MD, MB, MI, MS)
905     #define RCLQrr(RS, RD) _ROTSHIQrr(X86_RCL, RS, RD)
906     #define RCLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_RCL, RS, MD, MB, MI, MS)
907    
908     #define RCRBir(IM, RD) _ROTSHIBir(X86_RCR, IM, RD)
909     #define RCRBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_RCR, IM, MD, MB, MI, MS)
910     #define RCRBrr(RS, RD) _ROTSHIBrr(X86_RCR, RS, RD)
911     #define RCRBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_RCR, RS, MD, MB, MI, MS)
912    
913     #define RCRWir(IM, RD) _ROTSHIWir(X86_RCR, IM, RD)
914     #define RCRWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_RCR, IM, MD, MB, MI, MS)
915     #define RCRWrr(RS, RD) _ROTSHIWrr(X86_RCR, RS, RD)
916     #define RCRWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_RCR, RS, MD, MB, MI, MS)
917    
918     #define RCRLir(IM, RD) _ROTSHILir(X86_RCR, IM, RD)
919     #define RCRLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_RCR, IM, MD, MB, MI, MS)
920     #define RCRLrr(RS, RD) _ROTSHILrr(X86_RCR, RS, RD)
921     #define RCRLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_RCR, RS, MD, MB, MI, MS)
922    
923     #define RCRQir(IM, RD) _ROTSHIQir(X86_RCR, IM, RD)
924     #define RCRQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_RCR, IM, MD, MB, MI, MS)
925     #define RCRQrr(RS, RD) _ROTSHIQrr(X86_RCR, RS, RD)
926     #define RCRQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_RCR, RS, MD, MB, MI, MS)
927    
928     #define SHLBir(IM, RD) _ROTSHIBir(X86_SHL, IM, RD)
929     #define SHLBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SHL, IM, MD, MB, MI, MS)
930     #define SHLBrr(RS, RD) _ROTSHIBrr(X86_SHL, RS, RD)
931     #define SHLBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SHL, RS, MD, MB, MI, MS)
932    
933     #define SHLWir(IM, RD) _ROTSHIWir(X86_SHL, IM, RD)
934     #define SHLWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SHL, IM, MD, MB, MI, MS)
935     #define SHLWrr(RS, RD) _ROTSHIWrr(X86_SHL, RS, RD)
936     #define SHLWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SHL, RS, MD, MB, MI, MS)
937    
938     #define SHLLir(IM, RD) _ROTSHILir(X86_SHL, IM, RD)
939     #define SHLLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SHL, IM, MD, MB, MI, MS)
940     #define SHLLrr(RS, RD) _ROTSHILrr(X86_SHL, RS, RD)
941     #define SHLLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SHL, RS, MD, MB, MI, MS)
942    
943     #define SHLQir(IM, RD) _ROTSHIQir(X86_SHL, IM, RD)
944     #define SHLQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SHL, IM, MD, MB, MI, MS)
945     #define SHLQrr(RS, RD) _ROTSHIQrr(X86_SHL, RS, RD)
946     #define SHLQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SHL, RS, MD, MB, MI, MS)
947    
948     #define SHRBir(IM, RD) _ROTSHIBir(X86_SHR, IM, RD)
949     #define SHRBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SHR, IM, MD, MB, MI, MS)
950     #define SHRBrr(RS, RD) _ROTSHIBrr(X86_SHR, RS, RD)
951     #define SHRBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SHR, RS, MD, MB, MI, MS)
952    
953     #define SHRWir(IM, RD) _ROTSHIWir(X86_SHR, IM, RD)
954     #define SHRWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SHR, IM, MD, MB, MI, MS)
955     #define SHRWrr(RS, RD) _ROTSHIWrr(X86_SHR, RS, RD)
956     #define SHRWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SHR, RS, MD, MB, MI, MS)
957    
958     #define SHRLir(IM, RD) _ROTSHILir(X86_SHR, IM, RD)
959     #define SHRLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SHR, IM, MD, MB, MI, MS)
960     #define SHRLrr(RS, RD) _ROTSHILrr(X86_SHR, RS, RD)
961     #define SHRLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SHR, RS, MD, MB, MI, MS)
962    
963     #define SHRQir(IM, RD) _ROTSHIQir(X86_SHR, IM, RD)
964     #define SHRQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SHR, IM, MD, MB, MI, MS)
965     #define SHRQrr(RS, RD) _ROTSHIQrr(X86_SHR, RS, RD)
966     #define SHRQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SHR, RS, MD, MB, MI, MS)
967    
968     #define SALBir SHLBir
969     #define SALBim SHLBim
970     #define SALBrr SHLBrr
971     #define SALBrm SHLBrm
972    
973     #define SALWir SHLWir
974     #define SALWim SHLWim
975     #define SALWrr SHLWrr
976     #define SALWrm SHLWrm
977    
978     #define SALLir SHLLir
979     #define SALLim SHLLim
980     #define SALLrr SHLLrr
981     #define SALLrm SHLLrm
982    
983     #define SALQir SHLQir
984     #define SALQim SHLQim
985     #define SALQrr SHLQrr
986     #define SALQrm SHLQrm
987    
988     #define SARBir(IM, RD) _ROTSHIBir(X86_SAR, IM, RD)
989     #define SARBim(IM, MD, MB, MI, MS) _ROTSHIBim(X86_SAR, IM, MD, MB, MI, MS)
990     #define SARBrr(RS, RD) _ROTSHIBrr(X86_SAR, RS, RD)
991     #define SARBrm(RS, MD, MB, MI, MS) _ROTSHIBrm(X86_SAR, RS, MD, MB, MI, MS)
992    
993     #define SARWir(IM, RD) _ROTSHIWir(X86_SAR, IM, RD)
994     #define SARWim(IM, MD, MB, MI, MS) _ROTSHIWim(X86_SAR, IM, MD, MB, MI, MS)
995     #define SARWrr(RS, RD) _ROTSHIWrr(X86_SAR, RS, RD)
996     #define SARWrm(RS, MD, MB, MI, MS) _ROTSHIWrm(X86_SAR, RS, MD, MB, MI, MS)
997    
998     #define SARLir(IM, RD) _ROTSHILir(X86_SAR, IM, RD)
999     #define SARLim(IM, MD, MB, MI, MS) _ROTSHILim(X86_SAR, IM, MD, MB, MI, MS)
1000     #define SARLrr(RS, RD) _ROTSHILrr(X86_SAR, RS, RD)
1001     #define SARLrm(RS, MD, MB, MI, MS) _ROTSHILrm(X86_SAR, RS, MD, MB, MI, MS)
1002    
1003     #define SARQir(IM, RD) _ROTSHIQir(X86_SAR, IM, RD)
1004     #define SARQim(IM, MD, MB, MI, MS) _ROTSHIQim(X86_SAR, IM, MD, MB, MI, MS)
1005     #define SARQrr(RS, RD) _ROTSHIQrr(X86_SAR, RS, RD)
1006     #define SARQrm(RS, MD, MB, MI, MS) _ROTSHIQrm(X86_SAR, RS, MD, MB, MI, MS)
1007    
1008    
1009     /* --- Bit test instructions ----------------------------------------------- */
1010    
1011 gbeauche 1.2 enum {
1012 gbeauche 1.1 X86_BT = 4,
1013     X86_BTS = 5,
1014     X86_BTR = 6,
1015     X86_BTC = 7,
1016 gbeauche 1.2 };
1017 gbeauche 1.1
1018     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1019    
1020     #define _BTWir(OP, IM, RD) (_d16(), _REXLrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r2(RD) ,_u8(IM)))
1021     #define _BTWim(OP, IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
1022     #define _BTWrr(OP, RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r2(RS),_r2(RD) ))
1023     #define _BTWrm(OP, RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r2(RS) ,MD,MB,MI,MS ))
1024    
1025     #define _BTLir(OP, IM, RD) (_REXLrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r4(RD) ,_u8(IM)))
1026     #define _BTLim(OP, IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
1027     #define _BTLrr(OP, RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r4(RS),_r4(RD) ))
1028     #define _BTLrm(OP, RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r4(RS) ,MD,MB,MI,MS ))
1029    
1030     #define _BTQir(OP, IM, RD) (_REXQrr(0, RD), _OO_Mrm_B (0x0fba ,_b11,OP ,_r8(RD) ,_u8(IM)))
1031     #define _BTQim(OP, IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _OO_r_X_B (0x0fba ,OP ,MD,MB,MI,MS ,_u8(IM)))
1032     #define _BTQrr(OP, RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0f83|((OP)<<3),_b11,_r8(RS),_r8(RD) ))
1033     #define _BTQrm(OP, RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0f83|((OP)<<3) ,_r8(RS) ,MD,MB,MI,MS ))
1034    
1035     #define BTWir(IM, RD) _BTWir(X86_BT, IM, RD)
1036 gbeauche 1.20 #define BTWim(IM, MD, MB, MI, MS) _BTWim(X86_BT, IM, MD, MB, MI, MS)
1037 gbeauche 1.1 #define BTWrr(RS, RD) _BTWrr(X86_BT, RS, RD)
1038     #define BTWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BT, RS, MD, MB, MI, MS)
1039    
1040     #define BTLir(IM, RD) _BTLir(X86_BT, IM, RD)
1041     #define BTLim(IM, MD, MB, MI, MS) _BTLim(X86_BT, IM, MD, MB, MI, MS)
1042     #define BTLrr(RS, RD) _BTLrr(X86_BT, RS, RD)
1043     #define BTLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BT, RS, MD, MB, MI, MS)
1044    
1045     #define BTQir(IM, RD) _BTQir(X86_BT, IM, RD)
1046     #define BTQim(IM, MD, MB, MI, MS) _BTQim(X86_BT, IM, MD, MB, MI, MS)
1047     #define BTQrr(RS, RD) _BTQrr(X86_BT, RS, RD)
1048     #define BTQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BT, RS, MD, MB, MI, MS)
1049    
1050     #define BTCWir(IM, RD) _BTWir(X86_BTC, IM, RD)
1051 gbeauche 1.20 #define BTCWim(IM, MD, MB, MI, MS) _BTWim(X86_BTC, IM, MD, MB, MI, MS)
1052 gbeauche 1.1 #define BTCWrr(RS, RD) _BTWrr(X86_BTC, RS, RD)
1053     #define BTCWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTC, RS, MD, MB, MI, MS)
1054    
1055     #define BTCLir(IM, RD) _BTLir(X86_BTC, IM, RD)
1056     #define BTCLim(IM, MD, MB, MI, MS) _BTLim(X86_BTC, IM, MD, MB, MI, MS)
1057     #define BTCLrr(RS, RD) _BTLrr(X86_BTC, RS, RD)
1058     #define BTCLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTC, RS, MD, MB, MI, MS)
1059    
1060     #define BTCQir(IM, RD) _BTQir(X86_BTC, IM, RD)
1061     #define BTCQim(IM, MD, MB, MI, MS) _BTQim(X86_BTC, IM, MD, MB, MI, MS)
1062     #define BTCQrr(RS, RD) _BTQrr(X86_BTC, RS, RD)
1063     #define BTCQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTC, RS, MD, MB, MI, MS)
1064    
1065     #define BTRWir(IM, RD) _BTWir(X86_BTR, IM, RD)
1066 gbeauche 1.20 #define BTRWim(IM, MD, MB, MI, MS) _BTWim(X86_BTR, IM, MD, MB, MI, MS)
1067 gbeauche 1.1 #define BTRWrr(RS, RD) _BTWrr(X86_BTR, RS, RD)
1068     #define BTRWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTR, RS, MD, MB, MI, MS)
1069    
1070     #define BTRLir(IM, RD) _BTLir(X86_BTR, IM, RD)
1071     #define BTRLim(IM, MD, MB, MI, MS) _BTLim(X86_BTR, IM, MD, MB, MI, MS)
1072     #define BTRLrr(RS, RD) _BTLrr(X86_BTR, RS, RD)
1073     #define BTRLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTR, RS, MD, MB, MI, MS)
1074    
1075     #define BTRQir(IM, RD) _BTQir(X86_BTR, IM, RD)
1076     #define BTRQim(IM, MD, MB, MI, MS) _BTQim(X86_BTR, IM, MD, MB, MI, MS)
1077     #define BTRQrr(RS, RD) _BTQrr(X86_BTR, RS, RD)
1078     #define BTRQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTR, RS, MD, MB, MI, MS)
1079    
1080     #define BTSWir(IM, RD) _BTWir(X86_BTS, IM, RD)
1081 gbeauche 1.20 #define BTSWim(IM, MD, MB, MI, MS) _BTWim(X86_BTS, IM, MD, MB, MI, MS)
1082 gbeauche 1.1 #define BTSWrr(RS, RD) _BTWrr(X86_BTS, RS, RD)
1083     #define BTSWrm(RS, MD, MB, MI, MS) _BTWrm(X86_BTS, RS, MD, MB, MI, MS)
1084    
1085     #define BTSLir(IM, RD) _BTLir(X86_BTS, IM, RD)
1086     #define BTSLim(IM, MD, MB, MI, MS) _BTLim(X86_BTS, IM, MD, MB, MI, MS)
1087     #define BTSLrr(RS, RD) _BTLrr(X86_BTS, RS, RD)
1088     #define BTSLrm(RS, MD, MB, MI, MS) _BTLrm(X86_BTS, RS, MD, MB, MI, MS)
1089    
1090     #define BTSQir(IM, RD) _BTQir(X86_BTS, IM, RD)
1091     #define BTSQim(IM, MD, MB, MI, MS) _BTQim(X86_BTS, IM, MD, MB, MI, MS)
1092     #define BTSQrr(RS, RD) _BTQrr(X86_BTS, RS, RD)
1093     #define BTSQrm(RS, MD, MB, MI, MS) _BTQrm(X86_BTS, RS, MD, MB, MI, MS)
1094    
1095    
1096     /* --- Move instructions --------------------------------------------------- */
1097    
1098     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1099    
1100 gbeauche 1.9 #define MOVBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x88 ,_b11,_r1(RS),_r1(RD) ))
1101 gbeauche 1.1 #define MOVBmr(MD, MB, MI, MS, RD) (_REXBmr(MB, MI, RD), _O_r_X (0x8a ,_r1(RD) ,MD,MB,MI,MS ))
1102     #define MOVBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x88 ,_r1(RS) ,MD,MB,MI,MS ))
1103     #define MOVBir(IM, R) (_REXBrr(0, R), _Or_B (0xb0,_r1(R) ,_su8(IM)))
1104     #define MOVBim(IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_X_B (0xc6 ,MD,MB,MI,MS ,_su8(IM)))
1105    
1106     #define MOVWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x89 ,_b11,_r2(RS),_r2(RD) ))
1107 gbeauche 1.11 #define MOVWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _O_r_X (0x8b ,_r2(RD) ,MD,MB,MI,MS ))
1108     #define MOVWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0x89 ,_r2(RS) ,MD,MB,MI,MS ))
1109 gbeauche 1.1 #define MOVWir(IM, R) (_d16(), _REXLrr(0, R), _Or_W (0xb8,_r2(R) ,_su16(IM)))
1110 gbeauche 1.11 #define MOVWim(IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_X_W (0xc7 ,MD,MB,MI,MS ,_su16(IM)))
1111 gbeauche 1.1
1112     #define MOVLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x89 ,_b11,_r4(RS),_r4(RD) ))
1113     #define MOVLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (0x8b ,_r4(RD) ,MD,MB,MI,MS ))
1114     #define MOVLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x89 ,_r4(RS) ,MD,MB,MI,MS ))
1115     #define MOVLir(IM, R) (_REXLrr(0, R), _Or_L (0xb8,_r4(R) ,IM ))
1116     #define MOVLim(IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_X_L (0xc7 ,MD,MB,MI,MS ,IM ))
1117    
1118     #define MOVQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x89 ,_b11,_r8(RS),_r8(RD) ))
1119     #define MOVQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (0x8b ,_r8(RD) ,MD,MB,MI,MS ))
1120     #define MOVQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x89 ,_r8(RS) ,MD,MB,MI,MS ))
1121 gbeauche 1.15 #define MOVQir(IM, R) (_REXQrr(0, R), _Or_Q (0xb8,_r8(R) ,IM ))
1122 gbeauche 1.1 #define MOVQim(IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_X_L (0xc7 ,MD,MB,MI,MS ,IM ))
1123    
1124    
1125     /* --- Unary and Multiply/Divide instructions ------------------------------ */
1126    
1127 gbeauche 1.2 enum {
1128 gbeauche 1.1 X86_NOT = 2,
1129     X86_NEG = 3,
1130     X86_MUL = 4,
1131     X86_IMUL = 5,
1132     X86_DIV = 6,
1133     X86_IDIV = 7,
1134 gbeauche 1.2 };
1135 gbeauche 1.1
1136     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1137    
1138     #define _UNARYBr(OP, RS) (_REXBrr(0, RS), _O_Mrm (0xf6 ,_b11,OP ,_r1(RS) ))
1139     #define _UNARYBm(OP, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xf6 ,OP ,MD,MB,MI,MS ))
1140     #define _UNARYWr(OP, RS) (_d16(), _REXLrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r2(RS) ))
1141     #define _UNARYWm(OP, MD, MB, MI, MS) (_d16(), _REXLmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1142     #define _UNARYLr(OP, RS) (_REXLrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r4(RS) ))
1143     #define _UNARYLm(OP, MD, MB, MI, MS) (_REXLmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1144     #define _UNARYQr(OP, RS) (_REXQrr(0, RS), _O_Mrm (0xf7 ,_b11,OP ,_r8(RS) ))
1145     #define _UNARYQm(OP, MD, MB, MI, MS) (_REXQmr(MB, MI, 0), _O_r_X (0xf7 ,OP ,MD,MB,MI,MS ))
1146    
1147     #define NOTBr(RS) _UNARYBr(X86_NOT, RS)
1148     #define NOTBm(MD, MB, MI, MS) _UNARYBm(X86_NOT, MD, MB, MI, MS)
1149     #define NOTWr(RS) _UNARYWr(X86_NOT, RS)
1150     #define NOTWm(MD, MB, MI, MS) _UNARYWm(X86_NOT, MD, MB, MI, MS)
1151     #define NOTLr(RS) _UNARYLr(X86_NOT, RS)
1152     #define NOTLm(MD, MB, MI, MS) _UNARYLm(X86_NOT, MD, MB, MI, MS)
1153     #define NOTQr(RS) _UNARYQr(X86_NOT, RS)
1154     #define NOTQm(MD, MB, MI, MS) _UNARYQm(X86_NOT, MD, MB, MI, MS)
1155    
1156     #define NEGBr(RS) _UNARYBr(X86_NEG, RS)
1157     #define NEGBm(MD, MB, MI, MS) _UNARYBm(X86_NEG, MD, MB, MI, MS)
1158     #define NEGWr(RS) _UNARYWr(X86_NEG, RS)
1159     #define NEGWm(MD, MB, MI, MS) _UNARYWm(X86_NEG, MD, MB, MI, MS)
1160     #define NEGLr(RS) _UNARYLr(X86_NEG, RS)
1161     #define NEGLm(MD, MB, MI, MS) _UNARYLm(X86_NEG, MD, MB, MI, MS)
1162     #define NEGQr(RS) _UNARYQr(X86_NEG, RS)
1163     #define NEGQm(MD, MB, MI, MS) _UNARYQm(X86_NEG, MD, MB, MI, MS)
1164    
1165     #define MULBr(RS) _UNARYBr(X86_MUL, RS)
1166     #define MULBm(MD, MB, MI, MS) _UNARYBm(X86_MUL, MD, MB, MI, MS)
1167     #define MULWr(RS) _UNARYWr(X86_MUL, RS)
1168     #define MULWm(MD, MB, MI, MS) _UNARYWm(X86_MUL, MD, MB, MI, MS)
1169     #define MULLr(RS) _UNARYLr(X86_MUL, RS)
1170     #define MULLm(MD, MB, MI, MS) _UNARYLm(X86_MUL, MD, MB, MI, MS)
1171     #define MULQr(RS) _UNARYQr(X86_MUL, RS)
1172     #define MULQm(MD, MB, MI, MS) _UNARYQm(X86_MUL, MD, MB, MI, MS)
1173    
1174     #define IMULBr(RS) _UNARYBr(X86_IMUL, RS)
1175     #define IMULBm(MD, MB, MI, MS) _UNARYBm(X86_IMUL, MD, MB, MI, MS)
1176     #define IMULWr(RS) _UNARYWr(X86_IMUL, RS)
1177     #define IMULWm(MD, MB, MI, MS) _UNARYWm(X86_IMUL, MD, MB, MI, MS)
1178     #define IMULLr(RS) _UNARYLr(X86_IMUL, RS)
1179     #define IMULLm(MD, MB, MI, MS) _UNARYLm(X86_IMUL, MD, MB, MI, MS)
1180     #define IMULQr(RS) _UNARYQr(X86_IMUL, RS)
1181     #define IMULQm(MD, MB, MI, MS) _UNARYQm(X86_IMUL, MD, MB, MI, MS)
1182    
1183     #define DIVBr(RS) _UNARYBr(X86_DIV, RS)
1184     #define DIVBm(MD, MB, MI, MS) _UNARYBm(X86_DIV, MD, MB, MI, MS)
1185     #define DIVWr(RS) _UNARYWr(X86_DIV, RS)
1186     #define DIVWm(MD, MB, MI, MS) _UNARYWm(X86_DIV, MD, MB, MI, MS)
1187     #define DIVLr(RS) _UNARYLr(X86_DIV, RS)
1188     #define DIVLm(MD, MB, MI, MS) _UNARYLm(X86_DIV, MD, MB, MI, MS)
1189     #define DIVQr(RS) _UNARYQr(X86_DIV, RS)
1190     #define DIVQm(MD, MB, MI, MS) _UNARYQm(X86_DIV, MD, MB, MI, MS)
1191    
1192     #define IDIVBr(RS) _UNARYBr(X86_IDIV, RS)
1193     #define IDIVBm(MD, MB, MI, MS) _UNARYBm(X86_IDIV, MD, MB, MI, MS)
1194     #define IDIVWr(RS) _UNARYWr(X86_IDIV, RS)
1195     #define IDIVWm(MD, MB, MI, MS) _UNARYWm(X86_IDIV, MD, MB, MI, MS)
1196     #define IDIVLr(RS) _UNARYLr(X86_IDIV, RS)
1197     #define IDIVLm(MD, MB, MI, MS) _UNARYLm(X86_IDIV, MD, MB, MI, MS)
1198     #define IDIVQr(RS) _UNARYQr(X86_IDIV, RS)
1199     #define IDIVQm(MD, MB, MI, MS) _UNARYQm(X86_IDIV, MD, MB, MI, MS)
1200    
1201     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1202    
1203 gbeauche 1.15 #define IMULWrr(RS, RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0faf ,_b11,_r2(RD),_r2(RS) ))
1204 gbeauche 1.1 #define IMULWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0faf ,_r2(RD) ,MD,MB,MI,MS ))
1205    
1206     #define IMULWirr(IM,RS,RD) (_d16(), _REXLrr(RS, RD), _Os_Mrm_sW (0x69 ,_b11,_r2(RS),_r2(RD) ,_su16(IM) ))
1207     #define IMULWimr(IM,MD,MB,MI,MS,RD) (_d16(), _REXLmr(MB, MI, RD), _Os_r_X_sW (0x69 ,_r2(RD) ,MD,MB,MI,MS ,_su16(IM) ))
1208    
1209     #define IMULLir(IM, RD) (_REXLrr(0, RD), _Os_Mrm_sL (0x69 ,_b11,_r4(RD),_r4(RD) ,IM ))
1210 gbeauche 1.15 #define IMULLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0faf ,_b11,_r4(RD),_r4(RS) ))
1211 gbeauche 1.1 #define IMULLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0faf ,_r4(RD) ,MD,MB,MI,MS ))
1212    
1213     #define IMULQir(IM, RD) (_REXQrr(0, RD), _Os_Mrm_sL (0x69 ,_b11,_r8(RD),_r8(RD) ,IM ))
1214 gbeauche 1.15 #define IMULQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0faf ,_b11,_r8(RD),_r8(RS) ))
1215 gbeauche 1.1 #define IMULQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0faf ,_r8(RD) ,MD,MB,MI,MS ))
1216    
1217     #define IMULLirr(IM,RS,RD) (_REXLrr(RS, RD), _Os_Mrm_sL (0x69 ,_b11,_r4(RS),_r4(RD) ,IM ))
1218     #define IMULLimr(IM,MD,MB,MI,MS,RD) (_REXLmr(MB, MI, RD), _Os_r_X_sL (0x69 ,_r4(RD) ,MD,MB,MI,MS ,IM ))
1219    
1220     #define IMULQirr(IM,RS,RD) (_REXQrr(RS, RD), _Os_Mrm_sL (0x69 ,_b11,_r8(RS),_r8(RD) ,IM ))
1221     #define IMULQimr(IM,MD,MB,MI,MS,RD) (_REXQmr(MB, MI, RD), _Os_r_X_sL (0x69 ,_r8(RD) ,MD,MB,MI,MS ,IM ))
1222    
1223    
1224     /* --- Control Flow related instructions ----------------------------------- */
1225    
1226 gbeauche 1.5 enum {
1227     X86_CC_O = 0x0,
1228     X86_CC_NO = 0x1,
1229     X86_CC_NAE = 0x2,
1230     X86_CC_B = 0x2,
1231     X86_CC_C = 0x2,
1232     X86_CC_AE = 0x3,
1233     X86_CC_NB = 0x3,
1234     X86_CC_NC = 0x3,
1235     X86_CC_E = 0x4,
1236     X86_CC_Z = 0x4,
1237     X86_CC_NE = 0x5,
1238     X86_CC_NZ = 0x5,
1239     X86_CC_BE = 0x6,
1240     X86_CC_NA = 0x6,
1241     X86_CC_A = 0x7,
1242     X86_CC_NBE = 0x7,
1243     X86_CC_S = 0x8,
1244     X86_CC_NS = 0x9,
1245     X86_CC_P = 0xa,
1246     X86_CC_PE = 0xa,
1247     X86_CC_NP = 0xb,
1248     X86_CC_PO = 0xb,
1249     X86_CC_L = 0xc,
1250     X86_CC_NGE = 0xc,
1251     X86_CC_GE = 0xd,
1252     X86_CC_NL = 0xd,
1253     X86_CC_LE = 0xe,
1254     X86_CC_NG = 0xe,
1255     X86_CC_G = 0xf,
1256     X86_CC_NLE = 0xf,
1257     };
1258    
1259 gbeauche 1.1 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1260    
1261     // FIXME: no prefix is availble to encode a 32-bit operand size in 64-bit mode
1262     #define CALLm(M) _O_D32 (0xe8 ,(int)(M) )
1263 gbeauche 1.13 #define _CALLLsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b010,_r4(R) ))
1264 gbeauche 1.27 #define _CALLQsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b010,_r8(R) ))
1265 gbeauche 1.13 #define CALLsr(R) ( X86_TARGET_64BIT ? _CALLQsr(R) : _CALLLsr(R))
1266 gbeauche 1.1 #define CALLsm(D,B,I,S) (_REXLrm(0, B, I), _O_r_X (0xff ,_b010 ,(int)(D),B,I,S ))
1267    
1268     // FIXME: no prefix is availble to encode a 32-bit operand size in 64-bit mode
1269 gbeauche 1.13 #define JMPSm(M) _O_D8 (0xeb ,(int)(M) )
1270     #define JMPm(M) _O_D32 (0xe9 ,(int)(M) )
1271     #define _JMPLsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b100,_r4(R) ))
1272 gbeauche 1.27 #define _JMPQsr(R) (_REXLrr(0, R), _O_Mrm (0xff ,_b11,_b100,_r8(R) ))
1273 gbeauche 1.13 #define JMPsr(R) ( X86_TARGET_64BIT ? _JMPQsr(R) : _JMPLsr(R))
1274 gbeauche 1.1 #define JMPsm(D,B,I,S) (_REXLrm(0, B, I), _O_r_X (0xff ,_b100 ,(int)(D),B,I,S ))
1275    
1276     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1277 gbeauche 1.10 #define JCCSii(CC, D) _O_B (0x70|(CC) ,(_sc)(int)(D) )
1278 gbeauche 1.1 #define JCCSim(CC, D) _O_D8 (0x70|(CC) ,(int)(D) )
1279 gbeauche 1.27 #define JOSm(D) JCCSim(X86_CC_O, D)
1280     #define JNOSm(D) JCCSim(X86_CC_NO, D)
1281     #define JBSm(D) JCCSim(X86_CC_B, D)
1282     #define JNAESm(D) JCCSim(X86_CC_NAE, D)
1283     #define JNBSm(D) JCCSim(X86_CC_NB, D)
1284     #define JAESm(D) JCCSim(X86_CC_AE, D)
1285     #define JESm(D) JCCSim(X86_CC_E, D)
1286     #define JZSm(D) JCCSim(X86_CC_Z, D)
1287     #define JNESm(D) JCCSim(X86_CC_NE, D)
1288     #define JNZSm(D) JCCSim(X86_CC_NZ, D)
1289     #define JBESm(D) JCCSim(X86_CC_BE, D)
1290     #define JNASm(D) JCCSim(X86_CC_NA, D)
1291     #define JNBESm(D) JCCSim(X86_CC_NBE, D)
1292     #define JASm(D) JCCSim(X86_CC_A, D)
1293     #define JSSm(D) JCCSim(X86_CC_S, D)
1294     #define JNSSm(D) JCCSim(X86_CC_NS, D)
1295     #define JPSm(D) JCCSim(X86_CC_P, D)
1296     #define JPESm(D) JCCSim(X86_CC_PE, D)
1297     #define JNPSm(D) JCCSim(X86_CC_NP, D)
1298     #define JPOSm(D) JCCSim(X86_CC_PO, D)
1299     #define JLSm(D) JCCSim(X86_CC_L, D)
1300     #define JNGESm(D) JCCSim(X86_CC_NGE, D)
1301     #define JNLSm(D) JCCSim(X86_CC_NL, D)
1302     #define JGESm(D) JCCSim(X86_CC_GE, D)
1303     #define JLESm(D) JCCSim(X86_CC_LE, D)
1304     #define JNGSm(D) JCCSim(X86_CC_NG, D)
1305     #define JNLESm(D) JCCSim(X86_CC_NLE, D)
1306     #define JGSm(D) JCCSim(X86_CC_G, D)
1307 gbeauche 1.1
1308     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1309 gbeauche 1.10 #define JCCii(CC, D) _OO_L (0x0f80|(CC) ,(int)(D) )
1310 gbeauche 1.1 #define JCCim(CC, D) _OO_D32 (0x0f80|(CC) ,(int)(D) )
1311 gbeauche 1.27 #define JOm(D) JCCim(X86_CC_O, D)
1312     #define JNOm(D) JCCim(X86_CC_NO, D)
1313     #define JBm(D) JCCim(X86_CC_B, D)
1314     #define JNAEm(D) JCCim(X86_CC_NAE, D)
1315     #define JNBm(D) JCCim(X86_CC_NB, D)
1316     #define JAEm(D) JCCim(X86_CC_AE, D)
1317     #define JEm(D) JCCim(X86_CC_E, D)
1318     #define JZm(D) JCCim(X86_CC_Z, D)
1319     #define JNEm(D) JCCim(X86_CC_NE, D)
1320     #define JNZm(D) JCCim(X86_CC_NZ, D)
1321     #define JBEm(D) JCCim(X86_CC_BE, D)
1322     #define JNAm(D) JCCim(X86_CC_NA, D)
1323     #define JNBEm(D) JCCim(X86_CC_NBE, D)
1324     #define JAm(D) JCCim(X86_CC_A, D)
1325     #define JSm(D) JCCim(X86_CC_S, D)
1326     #define JNSm(D) JCCim(X86_CC_NS, D)
1327     #define JPm(D) JCCim(X86_CC_P, D)
1328     #define JPEm(D) JCCim(X86_CC_PE, D)
1329     #define JNPm(D) JCCim(X86_CC_NP, D)
1330     #define JPOm(D) JCCim(X86_CC_PO, D)
1331     #define JLm(D) JCCim(X86_CC_L, D)
1332     #define JNGEm(D) JCCim(X86_CC_NGE, D)
1333     #define JNLm(D) JCCim(X86_CC_NL, D)
1334     #define JGEm(D) JCCim(X86_CC_GE, D)
1335     #define JLEm(D) JCCim(X86_CC_LE, D)
1336     #define JNGm(D) JCCim(X86_CC_NG, D)
1337     #define JNLEm(D) JCCim(X86_CC_NLE, D)
1338     #define JGm(D) JCCim(X86_CC_G, D)
1339 gbeauche 1.1
1340     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1341     #define SETCCir(CC, RD) (_REXBrr(0, RD), _OO_Mrm (0x0f90|(CC) ,_b11,_b000,_r1(RD) ))
1342 gbeauche 1.27 #define SETOr(RD) SETCCir(X86_CC_O, RD)
1343     #define SETNOr(RD) SETCCir(X86_CC_NO, RD)
1344     #define SETBr(RD) SETCCir(X86_CC_B, RD)
1345     #define SETNAEr(RD) SETCCir(X86_CC_NAE, RD)
1346     #define SETNBr(RD) SETCCir(X86_CC_NB, RD)
1347     #define SETAEr(RD) SETCCir(X86_CC_AE, RD)
1348     #define SETEr(RD) SETCCir(X86_CC_E, RD)
1349     #define SETZr(RD) SETCCir(X86_CC_Z, RD)
1350     #define SETNEr(RD) SETCCir(X86_CC_NE, RD)
1351     #define SETNZr(RD) SETCCir(X86_CC_NZ, RD)
1352     #define SETBEr(RD) SETCCir(X86_CC_BE, RD)
1353     #define SETNAr(RD) SETCCir(X86_CC_NA, RD)
1354     #define SETNBEr(RD) SETCCir(X86_CC_NBE, RD)
1355     #define SETAr(RD) SETCCir(X86_CC_A, RD)
1356     #define SETSr(RD) SETCCir(X86_CC_S, RD)
1357     #define SETNSr(RD) SETCCir(X86_CC_NS, RD)
1358     #define SETPr(RD) SETCCir(X86_CC_P, RD)
1359     #define SETPEr(RD) SETCCir(X86_CC_PE, RD)
1360     #define SETNPr(RD) SETCCir(X86_CC_NP, RD)
1361     #define SETPOr(RD) SETCCir(X86_CC_PO, RD)
1362     #define SETLr(RD) SETCCir(X86_CC_L, RD)
1363     #define SETNGEr(RD) SETCCir(X86_CC_NGE, RD)
1364     #define SETNLr(RD) SETCCir(X86_CC_NL, RD)
1365     #define SETGEr(RD) SETCCir(X86_CC_GE, RD)
1366     #define SETLEr(RD) SETCCir(X86_CC_LE, RD)
1367     #define SETNGr(RD) SETCCir(X86_CC_NG, RD)
1368     #define SETNLEr(RD) SETCCir(X86_CC_NLE, RD)
1369     #define SETGr(RD) SETCCir(X86_CC_G, RD)
1370 gbeauche 1.1
1371     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1372     #define SETCCim(CC,MD,MB,MI,MS) (_REXBrm(0, MB, MI), _OO_r_X (0x0f90|(CC) ,_b000 ,MD,MB,MI,MS ))
1373 gbeauche 1.27 #define SETOm(D, B, I, S) SETCCim(X86_CC_O, D, B, I, S)
1374     #define SETNOm(D, B, I, S) SETCCim(X86_CC_NO, D, B, I, S)
1375     #define SETBm(D, B, I, S) SETCCim(X86_CC_B, D, B, I, S)
1376     #define SETNAEm(D, B, I, S) SETCCim(X86_CC_NAE, D, B, I, S)
1377     #define SETNBm(D, B, I, S) SETCCim(X86_CC_NB, D, B, I, S)
1378     #define SETAEm(D, B, I, S) SETCCim(X86_CC_AE, D, B, I, S)
1379     #define SETEm(D, B, I, S) SETCCim(X86_CC_E, D, B, I, S)
1380     #define SETZm(D, B, I, S) SETCCim(X86_CC_Z, D, B, I, S)
1381     #define SETNEm(D, B, I, S) SETCCim(X86_CC_NE, D, B, I, S)
1382     #define SETNZm(D, B, I, S) SETCCim(X86_CC_NZ, D, B, I, S)
1383     #define SETBEm(D, B, I, S) SETCCim(X86_CC_BE, D, B, I, S)
1384     #define SETNAm(D, B, I, S) SETCCim(X86_CC_NA, D, B, I, S)
1385     #define SETNBEm(D, B, I, S) SETCCim(X86_CC_NBE, D, B, I, S)
1386     #define SETAm(D, B, I, S) SETCCim(X86_CC_A, D, B, I, S)
1387     #define SETSm(D, B, I, S) SETCCim(X86_CC_S, D, B, I, S)
1388     #define SETNSm(D, B, I, S) SETCCim(X86_CC_NS, D, B, I, S)
1389     #define SETPm(D, B, I, S) SETCCim(X86_CC_P, D, B, I, S)
1390     #define SETPEm(D, B, I, S) SETCCim(X86_CC_PE, D, B, I, S)
1391     #define SETNPm(D, B, I, S) SETCCim(X86_CC_NP, D, B, I, S)
1392     #define SETPOm(D, B, I, S) SETCCim(X86_CC_PO, D, B, I, S)
1393     #define SETLm(D, B, I, S) SETCCim(X86_CC_L, D, B, I, S)
1394     #define SETNGEm(D, B, I, S) SETCCim(X86_CC_NGE, D, B, I, S)
1395     #define SETNLm(D, B, I, S) SETCCim(X86_CC_NL, D, B, I, S)
1396     #define SETGEm(D, B, I, S) SETCCim(X86_CC_GE, D, B, I, S)
1397     #define SETLEm(D, B, I, S) SETCCim(X86_CC_LE, D, B, I, S)
1398     #define SETNGm(D, B, I, S) SETCCim(X86_CC_NG, D, B, I, S)
1399     #define SETNLEm(D, B, I, S) SETCCim(X86_CC_NLE, D, B, I, S)
1400     #define SETGm(D, B, I, S) SETCCim(X86_CC_G, D, B, I, S)
1401 gbeauche 1.1
1402 gbeauche 1.5 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1403     #define CMOVWrr(CC,RS,RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0f40|(CC) ,_b11,_r2(RD),_r2(RS) ))
1404     #define CMOVWmr(CC,MD,MB,MI,MS,RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0f40|(CC) ,_r2(RD) ,MD,MB,MI,MS ))
1405     #define CMOVLrr(CC,RS,RD) (_REXLrr(RD, RS), _OO_Mrm (0x0f40|(CC) ,_b11,_r4(RD),_r4(RS) ))
1406     #define CMOVLmr(CC,MD,MB,MI,MS,RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0f40|(CC) ,_r4(RD) ,MD,MB,MI,MS ))
1407     #define CMOVQrr(CC,RS,RD) (_REXQrr(RD, RS), _OO_Mrm (0x0f40|(CC) ,_b11,_r8(RD),_r8(RS) ))
1408     #define CMOVQmr(CC,MD,MB,MI,MS,RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0f40|(CC) ,_r8(RD) ,MD,MB,MI,MS ))
1409    
1410 gbeauche 1.1
1411     /* --- Push/Pop instructions ----------------------------------------------- */
1412    
1413     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1414    
1415     #define POPWr(RD) _m32only((_d16(), _Or (0x58,_r2(RD) )))
1416     #define POPWm(MD, MB, MI, MS) _m32only((_d16(), _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS )))
1417    
1418     #define POPLr(RD) _m32only( _Or (0x58,_r4(RD) ))
1419     #define POPLm(MD, MB, MI, MS) _m32only( _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS ))
1420    
1421 gbeauche 1.13 #define POPQr(RD) _m64only((_REXQr(RD), _Or (0x58,_r8(RD) )))
1422     #define POPQm(MD, MB, MI, MS) _m64only((_REXQm(MB, MI), _O_r_X (0x8f ,_b000 ,MD,MB,MI,MS )))
1423 gbeauche 1.1
1424     #define PUSHWr(RS) _m32only((_d16(), _Or (0x50,_r2(RS) )))
1425     #define PUSHWm(MD, MB, MI, MS) _m32only((_d16(), _O_r_X (0xff, ,_b110 ,MD,MB,MI,MS )))
1426     #define PUSHWi(IM) _m32only((_d16(), _Os_sW (0x68 ,IM )))
1427    
1428     #define PUSHLr(RS) _m32only( _Or (0x50,_r4(RS) ))
1429     #define PUSHLm(MD, MB, MI, MS) _m32only( _O_r_X (0xff ,_b110 ,MD,MB,MI,MS ))
1430     #define PUSHLi(IM) _m32only( _Os_sL (0x68 ,IM ))
1431    
1432 gbeauche 1.13 #define PUSHQr(RS) _m64only((_REXQr(RS), _Or (0x50,_r8(RS) )))
1433     #define PUSHQm(MD, MB, MI, MS) _m64only((_REXQm(MB, MI), _O_r_X (0xff ,_b110 ,MD,MB,MI,MS )))
1434 gbeauche 1.1 #define PUSHQi(IM) _m64only( _Os_sL (0x68 ,IM ))
1435    
1436     #define POPA() (_d16(), _O (0x61 ))
1437     #define POPAD() _O (0x61 )
1438    
1439     #define PUSHA() (_d16(), _O (0x60 ))
1440     #define PUSHAD() _O (0x60 )
1441    
1442 gbeauche 1.11 #define POPF() _O (0x9d )
1443 gbeauche 1.1 #define PUSHF() _O (0x9c )
1444    
1445    
1446     /* --- Test instructions --------------------------------------------------- */
1447    
1448     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1449    
1450     #define TESTBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x84 ,_b11,_r1(RS),_r1(RD) ))
1451     #define TESTBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x84 ,_r1(RS) ,MD,MB,MI,MS ))
1452 gbeauche 1.10 #define TESTBir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AL) ? \
1453     (_REXBrr(0, RD), _O_B (0xa8 ,_u8(IM))) : \
1454     (_REXBrr(0, RD), _O_Mrm_B (0xf6 ,_b11,_b000 ,_r1(RD) ,_u8(IM))) )
1455 gbeauche 1.1 #define TESTBim(IM, MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X_B (0xf6 ,_b000 ,MD,MB,MI,MS ,_u8(IM)))
1456    
1457     #define TESTWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x85 ,_b11,_r2(RS),_r2(RD) ))
1458 gbeauche 1.11 #define TESTWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0x85 ,_r2(RS) ,MD,MB,MI,MS ))
1459 gbeauche 1.10 #define TESTWir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_AX) ? \
1460     (_d16(), _REXLrr(0, RD), _O_W (0xa9 ,_u16(IM))) : \
1461     (_d16(), _REXLrr(0, RD), _O_Mrm_W (0xf7 ,_b11,_b000 ,_r2(RD) ,_u16(IM))) )
1462 gbeauche 1.11 #define TESTWim(IM, MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X_W (0xf7 ,_b000 ,MD,MB,MI,MS ,_u16(IM)))
1463 gbeauche 1.1
1464     #define TESTLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x85 ,_b11,_r4(RS),_r4(RD) ))
1465     #define TESTLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x85 ,_r4(RS) ,MD,MB,MI,MS ))
1466 gbeauche 1.10 #define TESTLir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_EAX) ? \
1467     (_REXLrr(0, RD), _O_L (0xa9 ,IM )) : \
1468     (_REXLrr(0, RD), _O_Mrm_L (0xf7 ,_b11,_b000 ,_r4(RD) ,IM )) )
1469 gbeauche 1.1 #define TESTLim(IM, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X_L (0xf7 ,_b000 ,MD,MB,MI,MS ,IM ))
1470    
1471     #define TESTQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x85 ,_b11,_r8(RS),_r8(RD) ))
1472     #define TESTQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x85 ,_r8(RS) ,MD,MB,MI,MS ))
1473 gbeauche 1.10 #define TESTQir(IM, RD) (X86_OPTIMIZE_ALU && ((RD) == X86_RAX) ? \
1474     (_REXQrr(0, RD), _O_L (0xa9 ,IM )) : \
1475     (_REXQrr(0, RD), _O_Mrm_L (0xf7 ,_b11,_b000 ,_r8(RD) ,IM )) )
1476 gbeauche 1.1 #define TESTQim(IM, MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X_L (0xf7 ,_b000 ,MD,MB,MI,MS ,IM ))
1477    
1478    
1479     /* --- Exchange instructions ----------------------------------------------- */
1480    
1481     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1482    
1483     #define CMPXCHGBrr(RS, RD) (_REXBrr(RS, RD), _OO_Mrm (0x0fb0 ,_b11,_r1(RS),_r1(RD) ))
1484     #define CMPXCHGBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _OO_r_X (0x0fb0 ,_r1(RS) ,MD,MB,MI,MS ))
1485    
1486     #define CMPXCHGWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r2(RS),_r2(RD) ))
1487     #define CMPXCHGWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r2(RS) ,MD,MB,MI,MS ))
1488    
1489     #define CMPXCHGLrr(RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r4(RS),_r4(RD) ))
1490     #define CMPXCHGLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r4(RS) ,MD,MB,MI,MS ))
1491    
1492     #define CMPXCHGQrr(RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0fb1 ,_b11,_r8(RS),_r8(RD) ))
1493     #define CMPXCHGQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0fb1 ,_r8(RS) ,MD,MB,MI,MS ))
1494    
1495     #define XADDBrr(RS, RD) (_REXBrr(RS, RD), _OO_Mrm (0x0fc0 ,_b11,_r1(RS),_r1(RD) ))
1496     #define XADDBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _OO_r_X (0x0fc0 ,_r1(RS) ,MD,MB,MI,MS ))
1497    
1498     #define XADDWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r2(RS),_r2(RD) ))
1499     #define XADDWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r2(RS) ,MD,MB,MI,MS ))
1500    
1501     #define XADDLrr(RS, RD) (_REXLrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r4(RS),_r4(RD) ))
1502     #define XADDLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r4(RS) ,MD,MB,MI,MS ))
1503    
1504     #define XADDQrr(RS, RD) (_REXQrr(RS, RD), _OO_Mrm (0x0fc1 ,_b11,_r8(RS),_r8(RD) ))
1505     #define XADDQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0fc1 ,_r8(RS) ,MD,MB,MI,MS ))
1506    
1507     #define XCHGBrr(RS, RD) (_REXBrr(RS, RD), _O_Mrm (0x86 ,_b11,_r1(RS),_r1(RD) ))
1508     #define XCHGBrm(RS, MD, MB, MI, MS) (_REXBrm(RS, MB, MI), _O_r_X (0x86 ,_r1(RS) ,MD,MB,MI,MS ))
1509    
1510     #define XCHGWrr(RS, RD) (_d16(), _REXLrr(RS, RD), _O_Mrm (0x87 ,_b11,_r2(RS),_r2(RD) ))
1511     #define XCHGWrm(RS, MD, MB, MI, MS) (_d16(), _REXLrm(RS, MB, MI), _O_r_X (0x87 ,_r2(RS) ,MD,MB,MI,MS ))
1512    
1513     #define XCHGLrr(RS, RD) (_REXLrr(RS, RD), _O_Mrm (0x87 ,_b11,_r4(RS),_r4(RD) ))
1514     #define XCHGLrm(RS, MD, MB, MI, MS) (_REXLrm(RS, MB, MI), _O_r_X (0x87 ,_r4(RS) ,MD,MB,MI,MS ))
1515    
1516     #define XCHGQrr(RS, RD) (_REXQrr(RS, RD), _O_Mrm (0x87 ,_b11,_r8(RS),_r8(RD) ))
1517     #define XCHGQrm(RS, MD, MB, MI, MS) (_REXQrm(RS, MB, MI), _O_r_X (0x87 ,_r8(RS) ,MD,MB,MI,MS ))
1518    
1519    
1520     /* --- Increment/Decrement instructions ------------------------------------ */
1521    
1522     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1523    
1524     #define DECBm(MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xfe ,_b001 ,MD,MB,MI,MS ))
1525     #define DECBr(RD) (_REXBrr(0, RD), _O_Mrm (0xfe ,_b11,_b001 ,_r1(RD) ))
1526    
1527     #define DECWm(MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1528     #define DECWr(RD) (! X86_TARGET_64BIT ? (_d16(), _Or (0x48,_r2(RD) )) : \
1529     (_d16(), _REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r2(RD) )))
1530    
1531     #define DECLm(MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1532     #define DECLr(RD) (! X86_TARGET_64BIT ? _Or (0x48,_r4(RD) ) : \
1533     (_REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r4(RD) )))
1534    
1535     #define DECQm(MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X (0xff ,_b001 ,MD,MB,MI,MS ))
1536     #define DECQr(RD) (_REXQrr(0, RD), _O_Mrm (0xff ,_b11,_b001 ,_r8(RD) ))
1537    
1538     #define INCBm(MD, MB, MI, MS) (_REXBrm(0, MB, MI), _O_r_X (0xfe ,_b000 ,MD,MB,MI,MS ))
1539     #define INCBr(RD) (_REXBrr(0, RD), _O_Mrm (0xfe ,_b11,_b000 ,_r1(RD) ))
1540    
1541     #define INCWm(MD, MB, MI, MS) (_d16(), _REXLrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1542     #define INCWr(RD) (! X86_TARGET_64BIT ? (_d16(), _Or (0x40,_r2(RD) )) : \
1543     (_d16(), _REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r2(RD) )) )
1544    
1545     #define INCLm(MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1546     #define INCLr(RD) (! X86_TARGET_64BIT ? _Or (0x40,_r4(RD) ) : \
1547     (_REXLrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r4(RD) )))
1548    
1549     #define INCQm(MD, MB, MI, MS) (_REXQrm(0, MB, MI), _O_r_X (0xff ,_b000 ,MD,MB,MI,MS ))
1550     #define INCQr(RD) (_REXQrr(0, RD), _O_Mrm (0xff ,_b11,_b000 ,_r8(RD) ))
1551    
1552    
1553 gbeauche 1.5 /* --- Misc instructions --------------------------------------------------- */
1554    
1555     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1556    
1557     #define BSFWrr(RS, RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0fbc ,_b11,_r2(RD),_r2(RS) ))
1558     #define BSFWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fbc ,_r2(RD) ,MD,MB,MI,MS ))
1559     #define BSRWrr(RS, RD) (_d16(), _REXLrr(RD, RS), _OO_Mrm (0x0fbd ,_b11,_r2(RD),_r2(RS) ))
1560     #define BSRWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fbd ,_r2(RD) ,MD,MB,MI,MS ))
1561    
1562     #define BSFLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fbc ,_b11,_r4(RD),_r4(RS) ))
1563     #define BSFLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbc ,_r4(RD) ,MD,MB,MI,MS ))
1564     #define BSRLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fbd ,_b11,_r4(RD),_r4(RS) ))
1565     #define BSRLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbd ,_r4(RD) ,MD,MB,MI,MS ))
1566    
1567     #define BSFQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fbc ,_b11,_r8(RD),_r8(RS) ))
1568     #define BSFQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fbc ,_r8(RD) ,MD,MB,MI,MS ))
1569     #define BSRQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fbd ,_b11,_r8(RD),_r8(RS) ))
1570     #define BSRQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fbd ,_r8(RD) ,MD,MB,MI,MS ))
1571 gbeauche 1.1
1572     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1573    
1574 gbeauche 1.15 #define MOVSBWrr(RS, RD) (_d16(), _REXBLrr(RD, RS), _OO_Mrm (0x0fbe ,_b11,_r2(RD),_r1(RS) ))
1575 gbeauche 1.7 #define MOVSBWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fbe ,_r2(RD) ,MD,MB,MI,MS ))
1576 gbeauche 1.15 #define MOVZBWrr(RS, RD) (_d16(), _REXBLrr(RD, RS), _OO_Mrm (0x0fb6 ,_b11,_r2(RD),_r1(RS) ))
1577 gbeauche 1.7 #define MOVZBWmr(MD, MB, MI, MS, RD) (_d16(), _REXLmr(MB, MI, RD), _OO_r_X (0x0fb6 ,_r2(RD) ,MD,MB,MI,MS ))
1578    
1579 gbeauche 1.14 #define MOVSBLrr(RS, RD) (_REXBLrr(RD, RS), _OO_Mrm (0x0fbe ,_b11,_r4(RD),_r1(RS) ))
1580 gbeauche 1.7 #define MOVSBLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbe ,_r4(RD) ,MD,MB,MI,MS ))
1581 gbeauche 1.14 #define MOVZBLrr(RS, RD) (_REXBLrr(RD, RS), _OO_Mrm (0x0fb6 ,_b11,_r4(RD),_r1(RS) ))
1582 gbeauche 1.7 #define MOVZBLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fb6 ,_r4(RD) ,MD,MB,MI,MS ))
1583    
1584     #define MOVSBQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fbe ,_b11,_r8(RD),_r1(RS) ))
1585     #define MOVSBQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fbe ,_r8(RD) ,MD,MB,MI,MS ))
1586     #define MOVZBQrr(RS, RD) (_REXQrr(RD, RS), _OO_Mrm (0x0fb6 ,_b11,_r8(RD),_r1(RS) ))
1587     #define MOVZBQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _OO_r_X (0x0fb6 ,_r8(RD) ,MD,MB,MI,MS ))
1588    
1589     #define MOVSWLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fbf ,_b11,_r4(RD),_r2(RS) ))
1590     #define MOVSWLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fbf ,_r4(RD) ,MD,MB,MI,MS ))
1591     #define MOVZWLrr(RS, RD) (_REXLrr(RD, RS), _OO_Mrm (0x0fb7 ,_b11,_r4(RD),_r2(RS) ))
1592     #define MOVZWLmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _OO_r_X (0x0fb7 ,_r4(RD) ,MD,MB,MI,MS ))
1593    
1594 gbeauche 1.20 #define MOVSWQrr(RS, RD) _m64only((_REXQrr(RD, RS), _OO_Mrm (0x0fbf ,_b11,_r8(RD),_r2(RS) )))
1595     #define MOVSWQmr(MD, MB, MI, MS, RD) _m64only((_REXQmr(MB, MI, RD), _OO_r_X (0x0fbf ,_r8(RD) ,MD,MB,MI,MS )))
1596     #define MOVZWQrr(RS, RD) _m64only((_REXQrr(RD, RS), _OO_Mrm (0x0fb7 ,_b11,_r8(RD),_r2(RS) )))
1597     #define MOVZWQmr(MD, MB, MI, MS, RD) _m64only((_REXQmr(MB, MI, RD), _OO_r_X (0x0fb7 ,_r8(RD) ,MD,MB,MI,MS )))
1598 gbeauche 1.7
1599     #define MOVSLQrr(RS, RD) _m64only((_REXQrr(RD, RS), _O_Mrm (0x63 ,_b11,_r8(RD),_r4(RS) )))
1600     #define MOVSLQmr(MD, MB, MI, MS, RD) _m64only((_REXQmr(MB, MI, RD), _O_r_X (0x63 ,_r8(RD) ,MD,MB,MI,MS )))
1601    
1602     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1603    
1604 gbeauche 1.1 #define LEALmr(MD, MB, MI, MS, RD) (_REXLmr(MB, MI, RD), _O_r_X (0x8d ,_r4(RD) ,MD,MB,MI,MS ))
1605 gbeauche 1.22 #define LEAQmr(MD, MB, MI, MS, RD) (_REXQmr(MB, MI, RD), _O_r_X (0x8d ,_r4(RD) ,MD,MB,MI,MS ))
1606 gbeauche 1.1
1607     #define BSWAPLr(R) (_REXLrr(0, R), _OOr (0x0fc8,_r4(R) ))
1608     #define BSWAPQr(R) (_REXQrr(0, R), _OOr (0x0fc8,_r8(R) ))
1609    
1610     #define CLC() _O (0xf8 )
1611     #define STC() _O (0xf9 )
1612 gbeauche 1.20 #define CMC() _O (0xf5 )
1613 gbeauche 1.1
1614     #define CLD() _O (0xfc )
1615     #define STD() _O (0xfd )
1616    
1617     #define CBTW() (_d16(), _O (0x98 ))
1618     #define CWTL() _O (0x98 )
1619     #define CLTQ() _m64only(_REXQrr(0, 0), _O (0x98 ))
1620    
1621     #define CBW CBTW
1622     #define CWDE CWTL
1623     #define CDQE CLTQ
1624    
1625     #define CWTD() (_d16(), _O (0x99 ))
1626     #define CLTD() _O (0x99 )
1627     #define CQTO() _m64only(_REXQrr(0, 0), _O (0x99 ))
1628    
1629     #define CWD CWTD
1630     #define CDQ CLTD
1631     #define CQO CQTO
1632    
1633 gbeauche 1.19 #define LAHF() _O (0x9f )
1634     #define SAHF() _O (0x9e )
1635 gbeauche 1.1
1636 gbeauche 1.2 /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
1637    
1638 gbeauche 1.6 #define CPUID() _OO (0x0fa2 )
1639 gbeauche 1.1 #define RDTSC() _OO (0xff31 )
1640    
1641     #define ENTERii(W, B) _O_W_B (0xc8 ,_su16(W),_su8(B))
1642    
1643     #define LEAVE() _O (0xc9 )
1644     #define RET() _O (0xc3 )
1645     #define RETi(IM) _O_W (0xc2 ,_su16(IM))
1646    
1647     #define NOP() _O (0x90 )
1648 gbeauche 1.3
1649    
1650 gbeauche 1.22 /* --- Media 64-bit instructions ------------------------------------------- */
1651    
1652 gbeauche 1.29 enum {
1653     X86_MMX_PABSB = 0x1c, // 2P
1654     X86_MMX_PABSW = 0x1d, // 2P
1655     X86_MMX_PABSD = 0x1e, // 2P
1656     X86_MMX_PACKSSWB = 0x63,
1657     X86_MMX_PACKSSDW = 0x6b,
1658     X86_MMX_PACKUSWB = 0x67,
1659     X86_MMX_PADDB = 0xfc,
1660     X86_MMX_PADDW = 0xfd,
1661     X86_MMX_PADDD = 0xfe,
1662     X86_MMX_PADDQ = 0xd4,
1663     X86_MMX_PADDSB = 0xec,
1664     X86_MMX_PADDSW = 0xed,
1665     X86_MMX_PADDUSB = 0xdc,
1666     X86_MMX_PADDUSW = 0xdd,
1667     X86_MMX_PAND = 0xdb,
1668     X86_MMX_PANDN = 0xdf,
1669     X86_MMX_PAVGB = 0xe0,
1670     X86_MMX_PAVGW = 0xe3,
1671     X86_MMX_PCMPEQB = 0x74,
1672     X86_MMX_PCMPEQW = 0x75,
1673     X86_MMX_PCMPEQD = 0x76,
1674     X86_MMX_PCMPGTB = 0x64,
1675     X86_MMX_PCMPGTW = 0x65,
1676     X86_MMX_PCMPGTD = 0x66,
1677     X86_MMX_PEXTRW = 0xc5, // 64, /r ib
1678     X86_MMX_PHADDW = 0x01, // 2P
1679     X86_MMX_PHADDD = 0x02, // 2P
1680     X86_MMX_PHADDSW = 0x03, // 2P
1681     X86_MMX_PHSUBW = 0x05, // 2P
1682     X86_MMX_PHSUBD = 0x06, // 2P
1683     X86_MMX_PHSUBSW = 0x07, // 2P
1684     X86_MMX_PINSRW = 0xc4, // 64, /r ib
1685     X86_MMX_PMADDUBSW = 0x04, // 2P
1686     X86_MMX_PMADDWD = 0xf5,
1687     X86_MMX_PMAXSW = 0xee,
1688     X86_MMX_PMAXUB = 0xde,
1689     X86_MMX_PMINSW = 0xea,
1690     X86_MMX_PMINUB = 0xda,
1691     X86_MMX_PMOVMSKB = 0xd7, // 64
1692     X86_MMX_PMULHRSW = 0x0b, // 2P
1693     X86_MMX_PMULHUW = 0xe4,
1694     X86_MMX_PMULHW = 0xe5,
1695     X86_MMX_PMULLW = 0xd5,
1696     X86_MMX_PMULUDQ = 0xf4,
1697     X86_MMX_POR = 0xeb,
1698     X86_MMX_PSADBW = 0xf6,
1699     X86_MMX_PSHUFB = 0x00, // 2P
1700     X86_MMX_PSHUFW = 0x70, // /r ib
1701     X86_MMX_PSIGNB = 0x08, // 2P
1702     X86_MMX_PSIGNW = 0x09, // 2P
1703     X86_MMX_PSIGND = 0x0a, // 2P
1704     X86_MMX_PSLLW = 0xf1,
1705     X86_MMX_PSLLWi = 0x71, // /6 ib
1706     X86_MMX_PSLLD = 0xf2,
1707     X86_MMX_PSLLDi = 0x72, // /6 ib
1708     X86_MMX_PSLLQ = 0xf3,
1709     X86_MMX_PSLLQi = 0x73, // /6 ib
1710     X86_MMX_PSRAW = 0xe1,
1711     X86_MMX_PSRAWi = 0x71, // /4 ib
1712     X86_MMX_PSRAD = 0xe2,
1713     X86_MMX_PSRADi = 0x72, // /4 ib
1714     X86_MMX_PSRLW = 0xd1,
1715     X86_MMX_PSRLWi = 0x71, // /2 ib
1716     X86_MMX_PSRLD = 0xd2,
1717     X86_MMX_PSRLDi = 0x72, // /2 ib
1718     X86_MMX_PSRLQ = 0xd3,
1719     X86_MMX_PSRLQi = 0x73, // /2 ib
1720     X86_MMX_PSUBB = 0xf8,
1721     X86_MMX_PSUBW = 0xf9,
1722     X86_MMX_PSUBD = 0xfa,
1723     X86_MMX_PSUBQ = 0xfb,
1724     X86_MMX_PSUBSB = 0xe8,
1725     X86_MMX_PSUBSW = 0xe9,
1726     X86_MMX_PSUBUSB = 0xd8,
1727     X86_MMX_PSUBUSW = 0xd9,
1728     X86_MMX_PUNPCKHBW = 0x68,
1729     X86_MMX_PUNPCKHWD = 0x69,
1730     X86_MMX_PUNPCKHDQ = 0x6a,
1731     X86_MMX_PUNPCKLBW = 0x60,
1732     X86_MMX_PUNPCKLWD = 0x61,
1733     X86_MMX_PUNPCKLDQ = 0x62,
1734     X86_MMX_PXOR = 0xef,
1735     };
1736    
1737     #define __MMXLrr(OP,RS,RSA,RD,RDA) (_REXLrr(RD, RS), _OO_Mrm (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ))
1738     #define __MMXLmr(OP,MD,MB,MI,MS,RD,RDA) (_REXLmr(MB, MI, RD), _OO_r_X (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ))
1739     #define __MMXLrm(OP,RS,RSA,MD,MB,MI,MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0f00|(OP) ,RSA(RS) ,MD,MB,MI,MS ))
1740     #define __MMXLirr(OP,IM,RS,RSA,RD,RDA) (_REXLrr(RD, RS), _OO_Mrm_B (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ,_u8(IM)))
1741     #define __MMXLimr(OP,IM,MD,MB,MI,MS,RD,RDA) (_REXLmr(MB, MI, RS), _OO_r_X_B (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ,_u8(IM)))
1742     #define __MMXQrr(OP,RS,RSA,RD,RDA) (_REXQrr(RD, RS), _OO_Mrm (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ))
1743     #define __MMXQmr(OP,MD,MB,MI,MS,RD,RDA) (_REXQmr(MB, MI, RD), _OO_r_X (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ))
1744     #define __MMXQrm(OP,RS,RSA,MD,MB,MI,MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0f00|(OP) ,RSA(RS) ,MD,MB,MI,MS ))
1745     #define __MMXQirr(OP,IM,RS,RSA,RD,RDA) (_REXQrr(RD, RS), _OO_Mrm_B (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ,_u8(IM)))
1746     #define __MMXQimr(OP,IM,MD,MB,MI,MS,RD,RDA) (_REXQmr(MB, MI, RS), _OO_r_X_B (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ,_u8(IM)))
1747     #define __MMX1Lrr(PX,OP,RS,RSA,RD,RDA) (_REXLrr(RD, RS), _B(0x0f),_OO_Mrm(((PX)<<8)|(OP) ,_b11,RDA(RD),RSA(RS) ))
1748     #define __MMX1Lmr(PX,OP,MD,MB,MI,MS,RD,RDA) (_REXLmr(MB, MI, RD), _B(0x0f),_OO_r_X(((PX)<<8)|(OP) ,RDA(RD) ,MD,MB,MI,MS ))
1749     #define __MMX1Lrm(PX,OP,RS,RSA,MD,MB,MI,MS) (_REXLrm(RS, MB, MI), _B(0x0f),_OO_r_X(((PX)<<8)|(OP) ,RSA(RS) ,MD,MB,MI,MS ))
1750    
1751     #define _MMXLrr(OP,RS,RD) __MMXLrr(OP,RS,_rM,RD,_rM)
1752     #define _MMXLmr(OP,MD,MB,MI,MS,RD) __MMXLmr(OP,MD,MB,MI,MS,RD,_rM)
1753     #define _MMXLrm(OP,RS,MD,MB,MI,MS) __MMXLrm(OP,RS,_rM,MD,MB,MI,MS)
1754     #define _MMXQrr(OP,RS,RD) __MMXQrr(OP,RS,_rM,RD,_rM)
1755     #define _MMXQmr(OP,MD,MB,MI,MS,RD) __MMXQmr(OP,MD,MB,MI,MS,RD,_rM)
1756     #define _MMXQrm(OP,RS,MD,MB,MI,MS) __MMXQrm(OP,RS,_rM,MD,MB,MI,MS)
1757     #define _2P_MMXLrr(OP,RS,RD) __MMX1Lrr(0x38, OP,RS,_rM,RD,_rM)
1758     #define _2P_MMXLmr(OP,MD,MB,MI,MS,RD) __MMX1Lmr(0x38, OP,MD,MB,MI,MS,RD,_rM)
1759     #define _2P_MMXLrm(OP,RS,MD,MB,MI,MS) __MMX1Lrm(0x38, OP,RS,_rM,MD,MB,MI,MS)
1760    
1761     #define MMX_MOVDMDrr(RS, RD) __MMXLrr(0x6e, RS,_r4, RD,_rM)
1762     #define MMX_MOVQMDrr(RS, RD) __MMXQrr(0x6e, RS,_r8, RD,_rM)
1763     #define MMX_MOVDMSrr(RS, RD) __MMXLrr(0x7e, RD,_r4, RS,_rM)
1764     #define MMX_MOVQMSrr(RS, RD) __MMXQrr(0x7e, RD,_r8, RS,_rM)
1765    
1766     #define MMX_MOVDmr(MD, MB, MI, MS, RD) _MMXLmr(0x6e, MD, MB, MI, MS, RD)
1767     #define MMX_MOVDrm(RS, MD, MB, MI, MS) _MMXLrm(0x7e, RS, MD, MB, MI, MS)
1768     #define MMX_MOVQrr(RS, RD) _MMXLrr(0x6f, RS, RD)
1769     #define MMX_MOVQmr(MD, MB, MI, MS, RD) _MMXLmr(0x6f, MD, MB, MI, MS, RD)
1770     #define MMX_MOVQrm(RS, MD, MB, MI, MS) _MMXLrm(0x7f, RS, MD, MB, MI, MS)
1771    
1772     // Original MMX instructions
1773     #define MMX_PACKSSWBrr(RS, RD) _MMXLrr(X86_MMX_PACKSSWB,RS,RD)
1774     #define MMX_PACKSSWBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PACKSSWB, MD, MB, MI, MS, RD)
1775     #define MMX_PACKSSDWrr(RS, RD) _MMXLrr(X86_MMX_PACKSSDW,RS,RD)
1776     #define MMX_PACKSSDWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PACKSSDW, MD, MB, MI, MS, RD)
1777     #define MMX_PACKUSWBrr(RS, RD) _MMXLrr(X86_MMX_PACKUSWB,RS,RD)
1778     #define MMX_PACKUSWBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PACKUSWB, MD, MB, MI, MS, RD)
1779     #define MMX_PADDBrr(RS, RD) _MMXLrr(X86_MMX_PADDB,RS,RD)
1780     #define MMX_PADDBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDB, MD, MB, MI, MS, RD)
1781     #define MMX_PADDWrr(RS, RD) _MMXLrr(X86_MMX_PADDW,RS,RD)
1782     #define MMX_PADDWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDW, MD, MB, MI, MS, RD)
1783     #define MMX_PADDDrr(RS, RD) _MMXLrr(X86_MMX_PADDD,RS,RD)
1784     #define MMX_PADDDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDD, MD, MB, MI, MS, RD)
1785     #define MMX_PADDQrr(RS, RD) _MMXLrr(X86_MMX_PADDQ,RS,RD)
1786     #define MMX_PADDQmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDQ, MD, MB, MI, MS, RD)
1787     #define MMX_PADDSBrr(RS, RD) _MMXLrr(X86_MMX_PADDSB,RS,RD)
1788     #define MMX_PADDSBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDSB, MD, MB, MI, MS, RD)
1789     #define MMX_PADDSWrr(RS, RD) _MMXLrr(X86_MMX_PADDSW,RS,RD)
1790     #define MMX_PADDSWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDSW, MD, MB, MI, MS, RD)
1791     #define MMX_PADDUSBrr(RS, RD) _MMXLrr(X86_MMX_PADDUSB,RS,RD)
1792     #define MMX_PADDUSBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDUSB, MD, MB, MI, MS, RD)
1793     #define MMX_PADDUSWrr(RS, RD) _MMXLrr(X86_MMX_PADDUSW,RS,RD)
1794     #define MMX_PADDUSWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PADDUSW, MD, MB, MI, MS, RD)
1795     #define MMX_PANDrr(RS, RD) _MMXLrr(X86_MMX_PAND,RS,RD)
1796     #define MMX_PANDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PAND, MD, MB, MI, MS, RD)
1797     #define MMX_PANDNrr(RS, RD) _MMXLrr(X86_MMX_PANDN,RS,RD)
1798     #define MMX_PANDNmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PANDN, MD, MB, MI, MS, RD)
1799     #define MMX_PAVGBrr(RS, RD) _MMXLrr(X86_MMX_PAVGB,RS,RD)
1800     #define MMX_PAVGBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PAVGB, MD, MB, MI, MS, RD)
1801     #define MMX_PAVGWrr(RS, RD) _MMXLrr(X86_MMX_PAVGW,RS,RD)
1802     #define MMX_PAVGWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PAVGW, MD, MB, MI, MS, RD)
1803     #define MMX_PCMPEQBrr(RS, RD) _MMXLrr(X86_MMX_PCMPEQB,RS,RD)
1804     #define MMX_PCMPEQBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PCMPEQB, MD, MB, MI, MS, RD)
1805     #define MMX_PCMPEQWrr(RS, RD) _MMXLrr(X86_MMX_PCMPEQW,RS,RD)
1806     #define MMX_PCMPEQWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PCMPEQW, MD, MB, MI, MS, RD)
1807     #define MMX_PCMPEQDrr(RS, RD) _MMXLrr(X86_MMX_PCMPEQD,RS,RD)
1808     #define MMX_PCMPEQDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PCMPEQD, MD, MB, MI, MS, RD)
1809     #define MMX_PCMPGTBrr(RS, RD) _MMXLrr(X86_MMX_PCMPGTB,RS,RD)
1810     #define MMX_PCMPGTBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PCMPGTB, MD, MB, MI, MS, RD)
1811     #define MMX_PCMPGTWrr(RS, RD) _MMXLrr(X86_MMX_PCMPGTW,RS,RD)
1812     #define MMX_PCMPGTWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PCMPGTW, MD, MB, MI, MS, RD)
1813     #define MMX_PCMPGTDrr(RS, RD) _MMXLrr(X86_MMX_PCMPGTD,RS,RD)
1814     #define MMX_PCMPGTDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PCMPGTD, MD, MB, MI, MS, RD)
1815     #define MMX_PMADDWDrr(RS, RD) _MMXLrr(X86_MMX_PMADDWD,RS,RD)
1816     #define MMX_PMADDWDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMADDWD, MD, MB, MI, MS, RD)
1817     #define MMX_PMAXSWrr(RS, RD) _MMXLrr(X86_MMX_PMAXSW,RS,RD)
1818     #define MMX_PMAXSWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMAXSW, MD, MB, MI, MS, RD)
1819     #define MMX_PMAXUBrr(RS, RD) _MMXLrr(X86_MMX_PMAXUB,RS,RD)
1820     #define MMX_PMAXUBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMAXUB, MD, MB, MI, MS, RD)
1821     #define MMX_PMINSWrr(RS, RD) _MMXLrr(X86_MMX_PMINSW,RS,RD)
1822     #define MMX_PMINSWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMINSW, MD, MB, MI, MS, RD)
1823     #define MMX_PMINUBrr(RS, RD) _MMXLrr(X86_MMX_PMINUB,RS,RD)
1824     #define MMX_PMINUBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMINUB, MD, MB, MI, MS, RD)
1825     #define MMX_PMULHUWrr(RS, RD) _MMXLrr(X86_MMX_PMULHUW,RS,RD)
1826     #define MMX_PMULHUWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMULHUW, MD, MB, MI, MS, RD)
1827     #define MMX_PMULHWrr(RS, RD) _MMXLrr(X86_MMX_PMULHW,RS,RD)
1828     #define MMX_PMULHWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMULHW, MD, MB, MI, MS, RD)
1829     #define MMX_PMULLWrr(RS, RD) _MMXLrr(X86_MMX_PMULLW,RS,RD)
1830     #define MMX_PMULLWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMULLW, MD, MB, MI, MS, RD)
1831     #define MMX_PMULUDQrr(RS, RD) _MMXLrr(X86_MMX_PMULUDQ,RS,RD)
1832     #define MMX_PMULUDQmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PMULUDQ, MD, MB, MI, MS, RD)
1833     #define MMX_PORrr(RS, RD) _MMXLrr(X86_MMX_POR,RS,RD)
1834     #define MMX_PORmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_POR, MD, MB, MI, MS, RD)
1835     #define MMX_PSADBWrr(RS, RD) _MMXLrr(X86_MMX_PSADBW,RS,RD)
1836     #define MMX_PSADBWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSADBW, MD, MB, MI, MS, RD)
1837     #define MMX_PSLLWir(IM, RD) __MMXLirr(X86_MMX_PSLLWi, IM, RD,_rM, _b110,_rN)
1838     #define MMX_PSLLWrr(RS, RD) _MMXLrr(X86_MMX_PSLLW,RS,RD)
1839     #define MMX_PSLLWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSLLW, MD, MB, MI, MS, RD)
1840     #define MMX_PSLLDir(IM, RD) __MMXLirr(X86_MMX_PSLLDi, IM, RD,_rM, _b110,_rN)
1841     #define MMX_PSLLDrr(RS, RD) _MMXLrr(X86_MMX_PSLLD,RS,RD)
1842     #define MMX_PSLLDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSLLD, MD, MB, MI, MS, RD)
1843     #define MMX_PSLLQir(IM, RD) __MMXLirr(X86_MMX_PSLLQi, IM, RD,_rM, _b110,_rN)
1844     #define MMX_PSLLQrr(RS, RD) _MMXLrr(X86_MMX_PSLLQ,RS,RD)
1845     #define MMX_PSLLQmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSLLQ, MD, MB, MI, MS, RD)
1846     #define MMX_PSRAWir(IM, RD) __MMXLirr(X86_MMX_PSRAWi, IM, RD,_rM, _b100,_rN)
1847     #define MMX_PSRAWrr(RS, RD) _MMXLrr(X86_MMX_PSRAW,RS,RD)
1848     #define MMX_PSRAWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSRAW, MD, MB, MI, MS, RD)
1849     #define MMX_PSRADir(IM, RD) __MMXLirr(X86_MMX_PSRADi, IM, RD,_rM, _b100,_rN)
1850     #define MMX_PSRADrr(RS, RD) _MMXLrr(X86_MMX_PSRAD,RS,RD)
1851     #define MMX_PSRADmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSRAD, MD, MB, MI, MS, RD)
1852     #define MMX_PSRLWir(IM, RD) __MMXLirr(X86_MMX_PSRLWi, IM, RD,_rM, _b010,_rN)
1853     #define MMX_PSRLWrr(RS, RD) _MMXLrr(X86_MMX_PSRLW,RS,RD)
1854     #define MMX_PSRLWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSRLW, MD, MB, MI, MS, RD)
1855     #define MMX_PSRLDir(IM, RD) __MMXLirr(X86_MMX_PSRLDi, IM, RD,_rM, _b010,_rN)
1856     #define MMX_PSRLDrr(RS, RD) _MMXLrr(X86_MMX_PSRLD,RS,RD)
1857     #define MMX_PSRLDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSRLD, MD, MB, MI, MS, RD)
1858     #define MMX_PSRLQir(IM, RD) __MMXLirr(X86_MMX_PSRLQi, IM, RD,_rM, _b010,_rN)
1859     #define MMX_PSRLQrr(RS, RD) _MMXLrr(X86_MMX_PSRLQ,RS,RD)
1860     #define MMX_PSRLQmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSRLQ, MD, MB, MI, MS, RD)
1861     #define MMX_PSUBBrr(RS, RD) _MMXLrr(X86_MMX_PSUBB,RS,RD)
1862     #define MMX_PSUBBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBB, MD, MB, MI, MS, RD)
1863     #define MMX_PSUBWrr(RS, RD) _MMXLrr(X86_MMX_PSUBW,RS,RD)
1864     #define MMX_PSUBWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBW, MD, MB, MI, MS, RD)
1865     #define MMX_PSUBDrr(RS, RD) _MMXLrr(X86_MMX_PSUBD,RS,RD)
1866     #define MMX_PSUBDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBD, MD, MB, MI, MS, RD)
1867     #define MMX_PSUBQrr(RS, RD) _MMXLrr(X86_MMX_PSUBQ,RS,RD)
1868     #define MMX_PSUBQmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBQ, MD, MB, MI, MS, RD)
1869     #define MMX_PSUBSBrr(RS, RD) _MMXLrr(X86_MMX_PSUBSB,RS,RD)
1870     #define MMX_PSUBSBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBSB, MD, MB, MI, MS, RD)
1871     #define MMX_PSUBSWrr(RS, RD) _MMXLrr(X86_MMX_PSUBSW,RS,RD)
1872     #define MMX_PSUBSWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBSW, MD, MB, MI, MS, RD)
1873     #define MMX_PSUBUSBrr(RS, RD) _MMXLrr(X86_MMX_PSUBUSB,RS,RD)
1874     #define MMX_PSUBUSBmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBUSB, MD, MB, MI, MS, RD)
1875     #define MMX_PSUBUSWrr(RS, RD) _MMXLrr(X86_MMX_PSUBUSW,RS,RD)
1876     #define MMX_PSUBUSWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PSUBUSW, MD, MB, MI, MS, RD)
1877     #define MMX_PUNPCKHBWrr(RS, RD) _MMXLrr(X86_MMX_PUNPCKHBW,RS,RD)
1878     #define MMX_PUNPCKHBWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PUNPCKHBW, MD, MB, MI, MS, RD)
1879     #define MMX_PUNPCKHWDrr(RS, RD) _MMXLrr(X86_MMX_PUNPCKHWD,RS,RD)
1880     #define MMX_PUNPCKHWDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PUNPCKHWD, MD, MB, MI, MS, RD)
1881     #define MMX_PUNPCKHDQrr(RS, RD) _MMXLrr(X86_MMX_PUNPCKHDQ,RS,RD)
1882     #define MMX_PUNPCKHDQmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PUNPCKHDQ, MD, MB, MI, MS, RD)
1883     #define MMX_PUNPCKLBWrr(RS, RD) _MMXLrr(X86_MMX_PUNPCKLBW,RS,RD)
1884     #define MMX_PUNPCKLBWmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PUNPCKLBW, MD, MB, MI, MS, RD)
1885     #define MMX_PUNPCKLWDrr(RS, RD) _MMXLrr(X86_MMX_PUNPCKLWD,RS,RD)
1886     #define MMX_PUNPCKLWDmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PUNPCKLWD, MD, MB, MI, MS, RD)
1887     #define MMX_PUNPCKLDQrr(RS, RD) _MMXLrr(X86_MMX_PUNPCKLDQ,RS,RD)
1888     #define MMX_PUNPCKLDQmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PUNPCKLDQ, MD, MB, MI, MS, RD)
1889     #define MMX_PXORrr(RS, RD) _MMXLrr(X86_MMX_PXOR,RS,RD)
1890     #define MMX_PXORmr(MD,MB,MI,MS,RD) _MMXLmr(X86_MMX_PXOR, MD, MB, MI, MS, RD)
1891    
1892     #define MMX_PSHUFWirr(IM, RS, RD) __MMXLirr(X86_MMX_PSHUFW, IM, RS,_rM, RD,_rM)
1893     #define MMX_PSHUFWimr(IM, MD, MB, MI, MS, RD) __MMXLimr(X86_MMX_PSHUFW, IM, MD, MB, MI, MS, RD,_rM)
1894     #define MMX_PEXTRWLirr(IM, RS, RD) __MMXLirr(X86_MMX_PEXTRW, IM, RS,_rM, RD,_r4)
1895     #define MMX_PEXTRWQirr(IM, RS, RD) __MMXQirr(X86_MMX_PEXTRW, IM, RS,_rM, RD,_r8)
1896     #define MMX_PINSRWLirr(IM, RS, RD) __MMXLirr(X86_MMX_PINSRW, IM, RS,_r4, RD,_rM)
1897     #define MMX_PINSRWLimr(IM, MD, MB, MI, MS, RD) __MMXLimr(X86_MMX_PINSRW, IM, MD, MB, MI, MS, RD,_r4)
1898     #define MMX_PINSRWQirr(IM, RS, RD) __MMXQirr(X86_MMX_PINSRW, IM, RS,_r4, RD,_rM)
1899     #define MMX_PINSRWQimr(IM, MD, MB, MI, MS, RD) __MMXQimr(X86_MMX_PINSRW, IM, MD, MB, MI, MS, RD,_r8)
1900    
1901     // Additionnal MMX instructions, brought by SSSE3 ISA
1902     #define MMX_PABSBrr(RS, RD) _2P_MMXLrr(X86_MMX_PABSB,RS,RD)
1903     #define MMX_PABSBmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PABSB, MD, MB, MI, MS, RD)
1904     #define MMX_PABSWrr(RS, RD) _2P_MMXLrr(X86_MMX_PABSW,RS,RD)
1905     #define MMX_PABSWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PABSW, MD, MB, MI, MS, RD)
1906     #define MMX_PABSDrr(RS, RD) _2P_MMXLrr(X86_MMX_PABSD,RS,RD)
1907     #define MMX_PABSDmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PABSD, MD, MB, MI, MS, RD)
1908     #define MMX_PHADDWrr(RS, RD) _2P_MMXLrr(X86_MMX_PHADDW,RS,RD)
1909     #define MMX_PHADDWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PHADDW, MD, MB, MI, MS, RD)
1910     #define MMX_PHADDDrr(RS, RD) _2P_MMXLrr(X86_MMX_PHADDD,RS,RD)
1911     #define MMX_PHADDDmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PHADDD, MD, MB, MI, MS, RD)
1912     #define MMX_PHADDSWrr(RS, RD) _2P_MMXLrr(X86_MMX_PHADDSW,RS,RD)
1913     #define MMX_PHADDSWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PHADDSW, MD, MB, MI, MS, RD)
1914     #define MMX_PHSUBWrr(RS, RD) _2P_MMXLrr(X86_MMX_PHSUBW,RS,RD)
1915     #define MMX_PHSUBWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PHSUBW, MD, MB, MI, MS, RD)
1916     #define MMX_PHSUBDrr(RS, RD) _2P_MMXLrr(X86_MMX_PHSUBD,RS,RD)
1917     #define MMX_PHSUBDmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PHSUBD, MD, MB, MI, MS, RD)
1918     #define MMX_PHSUBSWrr(RS, RD) _2P_MMXLrr(X86_MMX_PHSUBSW,RS,RD)
1919     #define MMX_PHSUBSWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PHSUBSW, MD, MB, MI, MS, RD)
1920     #define MMX_PMADDUBSWrr(RS, RD) _2P_MMXLrr(X86_MMX_PMADDUBSW,RS,RD)
1921     #define MMX_PMADDUBSWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PMADDUBSW, MD, MB, MI, MS, RD)
1922     #define MMX_PMULHRSWrr(RS, RD) _2P_MMXLrr(X86_MMX_PMULHRSW,RS,RD)
1923     #define MMX_PMULHRSWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PMULHRSW, MD, MB, MI, MS, RD)
1924     #define MMX_PSHUFBrr(RS, RD) _2P_MMXLrr(X86_MMX_PSHUFB,RS,RD)
1925     #define MMX_PSHUFBmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PSHUFB, MD, MB, MI, MS, RD)
1926     #define MMX_PSIGNBrr(RS, RD) _2P_MMXLrr(X86_MMX_PSIGNB,RS,RD)
1927     #define MMX_PSIGNBmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PSIGNB, MD, MB, MI, MS, RD)
1928     #define MMX_PSIGNWrr(RS, RD) _2P_MMXLrr(X86_MMX_PSIGNW,RS,RD)
1929     #define MMX_PSIGNWmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PSIGNW, MD, MB, MI, MS, RD)
1930     #define MMX_PSIGNDrr(RS, RD) _2P_MMXLrr(X86_MMX_PSIGND,RS,RD)
1931     #define MMX_PSIGNDmr(MD,MB,MI,MS,RD) _2P_MMXLmr(X86_MMX_PSIGND, MD, MB, MI, MS, RD)
1932    
1933 gbeauche 1.22 #define EMMS() _OO (0x0f77 )
1934    
1935    
1936 gbeauche 1.3 /* --- Media 128-bit instructions ------------------------------------------ */
1937    
1938     enum {
1939 gbeauche 1.22 X86_SSE_CC_EQ = 0,
1940     X86_SSE_CC_LT = 1,
1941     X86_SSE_CC_GT = 1,
1942     X86_SSE_CC_LE = 2,
1943     X86_SSE_CC_GE = 2,
1944     X86_SSE_CC_U = 3,
1945 gbeauche 1.26 X86_SSE_CC_NEQ = 4,
1946 gbeauche 1.22 X86_SSE_CC_NLT = 5,
1947     X86_SSE_CC_NGT = 5,
1948     X86_SSE_CC_NLE = 6,
1949     X86_SSE_CC_NGE = 6,
1950     X86_SSE_CC_O = 7
1951     };
1952    
1953     enum {
1954 gbeauche 1.3 X86_SSE_UCOMI = 0x2e,
1955     X86_SSE_COMI = 0x2f,
1956 gbeauche 1.22 X86_SSE_CMP = 0xc2,
1957 gbeauche 1.3 X86_SSE_SQRT = 0x51,
1958     X86_SSE_RSQRT = 0x52,
1959     X86_SSE_RCP = 0x53,
1960     X86_SSE_AND = 0x54,
1961     X86_SSE_ANDN = 0x55,
1962     X86_SSE_OR = 0x56,
1963     X86_SSE_XOR = 0x57,
1964     X86_SSE_ADD = 0x58,
1965     X86_SSE_MUL = 0x59,
1966     X86_SSE_SUB = 0x5c,
1967     X86_SSE_MIN = 0x5d,
1968     X86_SSE_DIV = 0x5e,
1969     X86_SSE_MAX = 0x5f,
1970 gbeauche 1.28 X86_SSE_CVTDQ2PD = 0xe6,
1971     X86_SSE_CVTDQ2PS = 0x5b,
1972     X86_SSE_CVTPD2DQ = 0xe6,
1973     X86_SSE_CVTPD2PI = 0x2d,
1974     X86_SSE_CVTPD2PS = 0x5a,
1975     X86_SSE_CVTPI2PD = 0x2a,
1976     X86_SSE_CVTPI2PS = 0x2a,
1977     X86_SSE_CVTPS2DQ = 0x5b,
1978     X86_SSE_CVTPS2PD = 0x5a,
1979     X86_SSE_CVTPS2PI = 0x2d,
1980     X86_SSE_CVTSD2SI = 0x2d,
1981     X86_SSE_CVTSD2SS = 0x5a,
1982     X86_SSE_CVTSI2SD = 0x2a,
1983     X86_SSE_CVTSI2SS = 0x2a,
1984     X86_SSE_CVTSS2SD = 0x5a,
1985     X86_SSE_CVTSS2SI = 0x2d,
1986     X86_SSE_CVTTPD2PI = 0x2c,
1987     X86_SSE_CVTTPD2DQ = 0xe6,
1988     X86_SSE_CVTTPS2DQ = 0x5b,
1989     X86_SSE_CVTTPS2PI = 0x2c,
1990     X86_SSE_CVTTSD2SI = 0x2c,
1991     X86_SSE_CVTTSS2SI = 0x2c,
1992 gbeauche 1.22 X86_SSE_MOVMSK = 0x50,
1993     X86_SSE_PACKSSDW = 0x6b,
1994     X86_SSE_PACKSSWB = 0x63,
1995     X86_SSE_PACKUSWB = 0x67,
1996     X86_SSE_PADDB = 0xfc,
1997     X86_SSE_PADDD = 0xfe,
1998     X86_SSE_PADDQ = 0xd4,
1999     X86_SSE_PADDSB = 0xec,
2000     X86_SSE_PADDSW = 0xed,
2001     X86_SSE_PADDUSB = 0xdc,
2002     X86_SSE_PADDUSW = 0xdd,
2003     X86_SSE_PADDW = 0xfd,
2004     X86_SSE_PAND = 0xdb,
2005     X86_SSE_PANDN = 0xdf,
2006     X86_SSE_PAVGB = 0xe0,
2007     X86_SSE_PAVGW = 0xe3,
2008     X86_SSE_PCMPEQB = 0x74,
2009     X86_SSE_PCMPEQD = 0x76,
2010     X86_SSE_PCMPEQW = 0x75,
2011     X86_SSE_PCMPGTB = 0x64,
2012     X86_SSE_PCMPGTD = 0x66,
2013     X86_SSE_PCMPGTW = 0x65,
2014     X86_SSE_PMADDWD = 0xf5,
2015     X86_SSE_PMAXSW = 0xee,
2016     X86_SSE_PMAXUB = 0xde,
2017     X86_SSE_PMINSW = 0xea,
2018     X86_SSE_PMINUB = 0xda,
2019     X86_SSE_PMOVMSKB = 0xd7,
2020     X86_SSE_PMULHUW = 0xe4,
2021     X86_SSE_PMULHW = 0xe5,
2022     X86_SSE_PMULLW = 0xd5,
2023     X86_SSE_PMULUDQ = 0xf4,
2024     X86_SSE_POR = 0xeb,
2025     X86_SSE_PSADBW = 0xf6,
2026     X86_SSE_PSLLD = 0xf2,
2027     X86_SSE_PSLLQ = 0xf3,
2028     X86_SSE_PSLLW = 0xf1,
2029     X86_SSE_PSRAD = 0xe2,
2030     X86_SSE_PSRAW = 0xe1,
2031     X86_SSE_PSRLD = 0xd2,
2032     X86_SSE_PSRLQ = 0xd3,
2033     X86_SSE_PSRLW = 0xd1,
2034     X86_SSE_PSUBB = 0xf8,
2035     X86_SSE_PSUBD = 0xfa,
2036     X86_SSE_PSUBQ = 0xfb,
2037     X86_SSE_PSUBSB = 0xe8,
2038     X86_SSE_PSUBSW = 0xe9,
2039     X86_SSE_PSUBUSB = 0xd8,
2040     X86_SSE_PSUBUSW = 0xd9,
2041     X86_SSE_PSUBW = 0xf9,
2042     X86_SSE_PUNPCKHBW = 0x68,
2043     X86_SSE_PUNPCKHDQ = 0x6a,
2044     X86_SSE_PUNPCKHQDQ = 0x6d,
2045     X86_SSE_PUNPCKHWD = 0x69,
2046     X86_SSE_PUNPCKLBW = 0x60,
2047     X86_SSE_PUNPCKLDQ = 0x62,
2048     X86_SSE_PUNPCKLQDQ = 0x6c,
2049     X86_SSE_PUNPCKLWD = 0x61,
2050     X86_SSE_PXOR = 0xef,
2051 gbeauche 1.25 X86_SSSE3_PSHUFB = 0x00,
2052 gbeauche 1.3 };
2053    
2054     /* _format Opcd ,Mod ,r ,m ,mem=dsp+sib ,imm... */
2055    
2056 gbeauche 1.25 #define _SSSE3Lrr(OP1,OP2,RS,RSA,RD,RDA) (_B(0x66), _REXLrr(RD,RD), _B(0x0f), _OO_Mrm (((OP1)<<8)|(OP2) ,_b11,RDA(RD),RSA(RS) ))
2057     #define _SSSE3Lmr(OP1,OP2,MD,MB,MI,MS,RD,RDA) (_B(0x66), _REXLmr(MB, MI, RD), _B(0x0f), _OO_r_X (((OP1)<<8)|(OP2) ,RDA(RD) ,MD,MB,MI,MS ))
2058     #define _SSSE3Lirr(OP1,OP2,IM,RS,RD) (_B(0x66), _REXLrr(RD, RS), _B(0x0f), _OO_Mrm_B (((OP1)<<8)|(OP2) ,_b11,_rX(RD),_rX(RS) ,_u8(IM)))
2059     #define _SSSE3Limr(OP1,OP2,IM,MD,MB,MI,MS,RD) (_B(0x66), _REXLmr(MB, MI, RD), _B(0x0f), _OO_r_X_B (((OP1)<<8)|(OP2) ,_rX(RD) ,MD,MB,MI,MS ,_u8(IM)))
2060    
2061 gbeauche 1.21 #define __SSELir(OP,MO,IM,RD) (_REXLrr(0, RD), _OO_Mrm_B (0x0f00|(OP) ,_b11,MO ,_rX(RD) ,_u8(IM)))
2062     #define __SSELim(OP,MO,IM,MD,MB,MI,MS) (_REXLrm(0, MB, MI), _OO_r_X_B (0x0f00|(OP) ,MO ,MD,MB,MI,MS ,_u8(IM)))
2063 gbeauche 1.3 #define __SSELrr(OP,RS,RSA,RD,RDA) (_REXLrr(RD, RS), _OO_Mrm (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ))
2064     #define __SSELmr(OP,MD,MB,MI,MS,RD,RDA) (_REXLmr(MB, MI, RD), _OO_r_X (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ))
2065     #define __SSELrm(OP,RS,RSA,MD,MB,MI,MS) (_REXLrm(RS, MB, MI), _OO_r_X (0x0f00|(OP) ,RSA(RS) ,MD,MB,MI,MS ))
2066 gbeauche 1.21 #define __SSELirr(OP,IM,RS,RD) (_REXLrr(RD, RS), _OO_Mrm_B (0x0f00|(OP) ,_b11,_rX(RD),_rX(RS) ,_u8(IM)))
2067     #define __SSELimr(OP,IM,MD,MB,MI,MS,RD) (_REXLmr(MB, MI, RD), _OO_r_X_B (0x0f00|(OP) ,_rX(RD) ,MD,MB,MI,MS ,_u8(IM)))
2068 gbeauche 1.3
2069     #define __SSEQrr(OP,RS,RSA,RD,RDA) (_REXQrr(RD, RS), _OO_Mrm (0x0f00|(OP) ,_b11,RDA(RD),RSA(RS) ))
2070     #define __SSEQmr(OP,MD,MB,MI,MS,RD,RDA) (_REXQmr(MB, MI, RD), _OO_r_X (0x0f00|(OP) ,RDA(RD) ,MD,MB,MI,MS ))
2071     #define __SSEQrm(OP,RS,RSA,MD,MB,MI,MS) (_REXQrm(RS, MB, MI), _OO_r_X (0x0f00|(OP) ,RSA(RS) ,MD,MB,MI,MS ))
2072    
2073     #define _SSELrr(PX,OP,RS,RSA,RD,RDA) (_B(PX), __SSELrr(OP, RS, RSA, RD, RDA))
2074     #define _SSELmr(PX,OP,MD,MB,MI,MS,RD,RDA) (_B(PX), __SSELmr(OP, MD, MB, MI, MS, RD, RDA))
2075     #define _SSELrm(PX,OP,RS,RSA,MD,MB,MI,MS) (_B(PX), __SSELrm(OP, RS, RSA, MD, MB, MI, MS))
2076 gbeauche 1.21 #define _SSELir(PX,OP,MO,IM,RD) (_B(PX), __SSELir(OP, MO, IM, RD))
2077     #define _SSELim(PX,OP,MO,IM,MD,MB,MI,MS) (_B(PX), __SSELim(OP, MO, IM, MD, MB, MI, MS))
2078     #define _SSELirr(PX,OP,IM,RS,RD) (_B(PX), __SSELirr(OP, IM, RS, RD))
2079     #define _SSELimr(PX,OP,IM,MD,MB,MI,MS,RD) (_B(PX), __SSELimr(OP, IM, MD, MB, MI, MS, RD))
2080 gbeauche 1.3
2081     #define _SSEQrr(PX,OP,RS,RSA,RD,RDA) (_B(PX), __SSEQrr(OP, RS, RSA, RD, RDA))
2082     #define _SSEQmr(PX,OP,MD,MB,MI,MS,RD,RDA) (_B(PX), __SSEQmr(OP, MD, MB, MI, MS, RD, RDA))
2083     #define _SSEQrm(PX,OP,RS,RSA,MD,MB,MI,MS) (_B(PX), __SSEQrm(OP, RS, RSA, MD, MB, MI, MS))
2084    
2085     #define _SSEPSrr(OP,RS,RD) __SSELrr( OP, RS,_rX, RD,_rX)
2086     #define _SSEPSmr(OP,MD,MB,MI,MS,RD) __SSELmr( OP, MD, MB, MI, MS, RD,_rX)
2087     #define _SSEPSrm(OP,RS,MD,MB,MI,MS) __SSELrm( OP, RS,_rX, MD, MB, MI, MS)
2088 gbeauche 1.22 #define _SSEPSirr(OP,IM,RS,RD) __SSELirr( OP, IM, RS, RD)
2089     #define _SSEPSimr(OP,IM,MD,MB,MI,MS,RD) __SSELimr( OP, IM, MD, MB, MI, MS, RD)
2090 gbeauche 1.3
2091     #define _SSEPDrr(OP,RS,RD) _SSELrr(0x66, OP, RS,_rX, RD,_rX)
2092     #define _SSEPDmr(OP,MD,MB,MI,MS,RD) _SSELmr(0x66, OP, MD, MB, MI, MS, RD,_rX)
2093     #define _SSEPDrm(OP,RS,MD,MB,MI,MS) _SSELrm(0x66, OP, RS,_rX, MD, MB, MI, MS)
2094 gbeauche 1.22 #define _SSEPDirr(OP,IM,RS,RD) _SSELirr(0x66, OP, IM, RS, RD)
2095     #define _SSEPDimr(OP,IM,MD,MB,MI,MS,RD) _SSELimr(0x66, OP, IM, MD, MB, MI, MS, RD)
2096 gbeauche 1.3
2097     #define _SSESSrr(OP,RS,RD) _SSELrr(0xf3, OP, RS,_rX, RD,_rX)
2098     #define _SSESSmr(OP,MD,MB,MI,MS,RD) _SSELmr(0xf3, OP, MD, MB, MI, MS, RD,_rX)
2099     #define _SSESSrm(OP,RS,MD,MB,MI,MS) _SSELrm(0xf3, OP, RS,_rX, MD, MB, MI, MS)
2100 gbeauche 1.22 #define _SSESSirr(OP,IM,RS,RD) _SSELirr(0xf3, OP, IM, RS, RD)
2101     #define _SSESSimr(OP,IM,MD,MB,MI,MS,RD) _SSELimr(0xf3, OP, IM, MD, MB, MI, MS, RD)
2102 gbeauche 1.3
2103     #define _SSESDrr(OP,RS,RD) _SSELrr(0xf2, OP, RS,_rX, RD,_rX)
2104     #define _SSESDmr(OP,MD,MB,MI,MS,RD) _SSELmr(0xf2, OP, MD, MB, MI, MS, RD,_rX)
2105     #define _SSESDrm(OP,RS,MD,MB,MI,MS) _SSELrm(0xf2, OP, RS,_rX, MD, MB, MI, MS)
2106 gbeauche 1.26 #define _SSESDirr(OP,IM,RS,RD) _SSELirr(0xf2, OP, IM, RS, RD)
2107     #define _SSESDimr(OP,IM,MD,MB,MI,MS,RD) _SSELimr(0xf2, OP, IM, MD, MB, MI, MS, RD)
2108 gbeauche 1.3
2109     #define ADDPSrr(RS, RD) _SSEPSrr(X86_SSE_ADD, RS, RD)
2110     #define ADDPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
2111     #define ADDPDrr(RS, RD) _SSEPDrr(X86_SSE_ADD, RS, RD)
2112     #define ADDPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
2113    
2114     #define ADDSSrr(RS, RD) _SSESSrr(X86_SSE_ADD, RS, RD)
2115     #define ADDSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
2116     #define ADDSDrr(RS, RD) _SSESDrr(X86_SSE_ADD, RS, RD)
2117     #define ADDSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_ADD, MD, MB, MI, MS, RD)
2118    
2119     #define ANDNPSrr(RS, RD) _SSEPSrr(X86_SSE_ANDN, RS, RD)
2120     #define ANDNPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_ANDN, MD, MB, MI, MS, RD)
2121     #define ANDNPDrr(RS, RD) _SSEPDrr(X86_SSE_ANDN, RS, RD)
2122     #define ANDNPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_ANDN, MD, MB, MI, MS, RD)
2123    
2124     #define ANDPSrr(RS, RD) _SSEPSrr(X86_SSE_AND, RS, RD)
2125     #define ANDPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_AND, MD, MB, MI, MS, RD)
2126     #define ANDPDrr(RS, RD) _SSEPDrr(X86_SSE_AND, RS, RD)
2127     #define ANDPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_AND, MD, MB, MI, MS, RD)
2128    
2129 gbeauche 1.22 #define CMPPSrr(IM, RS, RD) _SSEPSirr(X86_SSE_CMP, IM, RS, RD)
2130     #define CMPPSmr(IM, MD, MB, MI, MS, RD) _SSEPSimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
2131     #define CMPPDrr(IM, RS, RD) _SSEPDirr(X86_SSE_CMP, IM, RS, RD)
2132     #define CMPPDmr(IM, MD, MB, MI, MS, RD) _SSEPDimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
2133    
2134     #define CMPSSrr(IM, RS, RD) _SSESSirr(X86_SSE_CMP, IM, RS, RD)
2135     #define CMPSSmr(IM, MD, MB, MI, MS, RD) _SSESSimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
2136     #define CMPSDrr(IM, RS, RD) _SSESDirr(X86_SSE_CMP, IM, RS, RD)
2137     #define CMPSDmr(IM, MD, MB, MI, MS, RD) _SSESDimr(X86_SSE_CMP, IM, MD, MB, MI, MS, RD)
2138    
2139 gbeauche 1.3 #define DIVPSrr(RS, RD) _SSEPSrr(X86_SSE_DIV, RS, RD)
2140     #define DIVPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
2141     #define DIVPDrr(RS, RD) _SSEPDrr(X86_SSE_DIV, RS, RD)
2142     #define DIVPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
2143    
2144     #define DIVSSrr(RS, RD) _SSESSrr(X86_SSE_DIV, RS, RD)
2145     #define DIVSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
2146     #define DIVSDrr(RS, RD) _SSESDrr(X86_SSE_DIV, RS, RD)
2147     #define DIVSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_DIV, MD, MB, MI, MS, RD)
2148    
2149     #define MAXPSrr(RS, RD) _SSEPSrr(X86_SSE_MAX, RS, RD)
2150     #define MAXPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
2151     #define MAXPDrr(RS, RD) _SSEPDrr(X86_SSE_MAX, RS, RD)
2152     #define MAXPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
2153    
2154     #define MAXSSrr(RS, RD) _SSESSrr(X86_SSE_MAX, RS, RD)
2155     #define MAXSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
2156     #define MAXSDrr(RS, RD) _SSESDrr(X86_SSE_MAX, RS, RD)
2157     #define MAXSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_MAX, MD, MB, MI, MS, RD)
2158    
2159     #define MINPSrr(RS, RD) _SSEPSrr(X86_SSE_MIN, RS, RD)
2160     #define MINPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
2161     #define MINPDrr(RS, RD) _SSEPDrr(X86_SSE_MIN, RS, RD)
2162     #define MINPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
2163    
2164     #define MINSSrr(RS, RD) _SSESSrr(X86_SSE_MIN, RS, RD)
2165     #define MINSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
2166     #define MINSDrr(RS, RD) _SSESDrr(X86_SSE_MIN, RS, RD)
2167     #define MINSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_MIN, MD, MB, MI, MS, RD)
2168    
2169     #define MULPSrr(RS, RD) _SSEPSrr(X86_SSE_MUL, RS, RD)
2170     #define MULPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
2171     #define MULPDrr(RS, RD) _SSEPDrr(X86_SSE_MUL, RS, RD)
2172     #define MULPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
2173    
2174     #define MULSSrr(RS, RD) _SSESSrr(X86_SSE_MUL, RS, RD)
2175     #define MULSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
2176     #define MULSDrr(RS, RD) _SSESDrr(X86_SSE_MUL, RS, RD)
2177     #define MULSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_MUL, MD, MB, MI, MS, RD)
2178    
2179     #define ORPSrr(RS, RD) _SSEPSrr(X86_SSE_OR, RS, RD)
2180     #define ORPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_OR, MD, MB, MI, MS, RD)
2181     #define ORPDrr(RS, RD) _SSEPDrr(X86_SSE_OR, RS, RD)
2182     #define ORPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_OR, MD, MB, MI, MS, RD)
2183    
2184     #define RCPPSrr(RS, RD) _SSEPSrr(X86_SSE_RCP, RS, RD)
2185     #define RCPPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_RCP, MD, MB, MI, MS, RD)
2186     #define RCPSSrr(RS, RD) _SSESSrr(X86_SSE_RCP, RS, RD)
2187     #define RCPSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_RCP, MD, MB, MI, MS, RD)
2188    
2189     #define RSQRTPSrr(RS, RD) _SSEPSrr(X86_SSE_RSQRT, RS, RD)
2190     #define RSQRTPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_RSQRT, MD, MB, MI, MS, RD)
2191     #define RSQRTSSrr(RS, RD) _SSESSrr(X86_SSE_RSQRT, RS, RD)
2192     #define RSQRTSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_RSQRT, MD, MB, MI, MS, RD)
2193    
2194     #define SQRTPSrr(RS, RD) _SSEPSrr(X86_SSE_SQRT, RS, RD)
2195     #define SQRTPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
2196     #define SQRTPDrr(RS, RD) _SSEPDrr(X86_SSE_SQRT, RS, RD)
2197     #define SQRTPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
2198    
2199     #define SQRTSSrr(RS, RD) _SSESSrr(X86_SSE_SQRT, RS, RD)
2200     #define SQRTSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
2201     #define SQRTSDrr(RS, RD) _SSESDrr(X86_SSE_SQRT, RS, RD)
2202     #define SQRTSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_SQRT, MD, MB, MI, MS, RD)
2203    
2204     #define SUBPSrr(RS, RD) _SSEPSrr(X86_SSE_SUB, RS, RD)
2205     #define SUBPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
2206     #define SUBPDrr(RS, RD) _SSEPDrr(X86_SSE_SUB, RS, RD)
2207     #define SUBPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
2208    
2209     #define SUBSSrr(RS, RD) _SSESSrr(X86_SSE_SUB, RS, RD)
2210     #define SUBSSmr(MD, MB, MI, MS, RD) _SSESSmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
2211     #define SUBSDrr(RS, RD) _SSESDrr(X86_SSE_SUB, RS, RD)
2212     #define SUBSDmr(MD, MB, MI, MS, RD) _SSESDmr(X86_SSE_SUB, MD, MB, MI, MS, RD)
2213    
2214     #define XORPSrr(RS, RD) _SSEPSrr(X86_SSE_XOR, RS, RD)
2215     #define XORPSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_XOR, MD, MB, MI, MS, RD)
2216     #define XORPDrr(RS, RD) _SSEPDrr(X86_SSE_XOR, RS, RD)
2217     #define XORPDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_XOR, MD, MB, MI, MS, RD)
2218    
2219 gbeauche 1.26 #define COMISSrr(RS, RD) _SSEPSrr(X86_SSE_COMI, RS, RD)
2220     #define COMISSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_COMI, MD, MB, MI, MS, RD)
2221     #define COMISDrr(RS, RD) _SSEPDrr(X86_SSE_COMI, RS, RD)
2222     #define COMISDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_COMI, MD, MB, MI, MS, RD)
2223    
2224     #define UCOMISSrr(RS, RD) _SSEPSrr(X86_SSE_UCOMI, RS, RD)
2225     #define UCOMISSmr(MD, MB, MI, MS, RD) _SSEPSmr(X86_SSE_UCOMI, MD, MB, MI, MS, RD)
2226     #define UCOMISDrr(RS, RD) _SSEPDrr(X86_SSE_UCOMI, RS, RD)
2227     #define UCOMISDmr(MD, MB, MI, MS, RD) _SSEPDmr(X86_SSE_UCOMI, MD, MB, MI, MS, RD)
2228 gbeauche 1.3
2229     #define MOVAPSrr(RS, RD) _SSEPSrr(0x28, RS, RD)
2230     #define MOVAPSmr(MD, MB, MI, MS, RD) _SSEPSmr(0x28, MD, MB, MI, MS, RD)
2231     #define MOVAPSrm(RS, MD, MB, MI, MS) _SSEPSrm(0x29, RS, MD, MB, MI, MS)
2232    
2233     #define MOVAPDrr(RS, RD) _SSEPDrr(0x28, RS, RD)
2234     #define MOVAPDmr(MD, MB, MI, MS, RD) _SSEPDmr(0x28, MD, MB, MI, MS, RD)
2235     #define MOVAPDrm(RS, MD, MB, MI, MS) _SSEPDrm(0x29, RS, MD, MB, MI, MS)
2236    
2237 gbeauche 1.28 #define CVTDQ2PDrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTDQ2PD, RS,_rX, RD,_rX)
2238     #define CVTDQ2PDmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTDQ2PD, MD, MB, MI, MS, RD,_rX)
2239     #define CVTDQ2PSrr(RS, RD) __SSELrr( X86_SSE_CVTDQ2PS, RS,_rX, RD,_rX)
2240     #define CVTDQ2PSmr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTDQ2PS, MD, MB, MI, MS, RD,_rX)
2241     #define CVTPD2DQrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTPD2DQ, RS,_rX, RD,_rX)
2242     #define CVTPD2DQmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTPD2DQ, MD, MB, MI, MS, RD,_rX)
2243     #define CVTPD2PIrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTPD2PI, RS,_rX, RD,_rM)
2244     #define CVTPD2PImr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTPD2PI, MD, MB, MI, MS, RD,_rM)
2245     #define CVTPD2PSrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTPD2PS, RS,_rX, RD,_rX)
2246     #define CVTPD2PSmr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTPD2PS, MD, MB, MI, MS, RD,_rX)
2247     #define CVTPI2PDrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTPI2PD, RS,_rM, RD,_rX)
2248     #define CVTPI2PDmr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTPI2PD, MD, MB, MI, MS, RD,_rX)
2249     #define CVTPI2PSrr(RS, RD) __SSELrr( X86_SSE_CVTPI2PS, RS,_rM, RD,_rX)
2250     #define CVTPI2PSmr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTPI2PS, MD, MB, MI, MS, RD,_rX)
2251     #define CVTPS2DQrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTPS2DQ, RS,_rX, RD,_rX)
2252     #define CVTPS2DQmr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTPS2DQ, MD, MB, MI, MS, RD,_rX)
2253     #define CVTPS2PDrr(RS, RD) __SSELrr( X86_SSE_CVTPS2PD, RS,_rX, RD,_rX)
2254     #define CVTPS2PDmr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTPS2PD, MD, MB, MI, MS, RD,_rX)
2255     #define CVTPS2PIrr(RS, RD) __SSELrr( X86_SSE_CVTPS2PI, RS,_rX, RD,_rM)
2256     #define CVTPS2PImr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTPS2PI, MD, MB, MI, MS, RD,_rM)
2257     #define CVTSD2SILrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTSD2SI, RS,_rX, RD,_r4)
2258     #define CVTSD2SILmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTSD2SI, MD, MB, MI, MS, RD,_r4)
2259     #define CVTSD2SIQrr(RS, RD) _SSEQrr(0xf2, X86_SSE_CVTSD2SI, RS,_rX, RD,_r8)
2260     #define CVTSD2SIQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf2, X86_SSE_CVTSD2SI, MD, MB, MI, MS, RD,_r8)
2261     #define CVTSD2SSrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTSD2SS, RS,_rX, RD,_rX)
2262     #define CVTSD2SSmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTSD2SS, MD, MB, MI, MS, RD,_rX)
2263     #define CVTSI2SDLrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTSI2SD, RS,_r4, RD,_rX)
2264     #define CVTSI2SDLmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTSI2SD, MD, MB, MI, MS, RD,_rX)
2265     #define CVTSI2SDQrr(RS, RD) _SSEQrr(0xf2, X86_SSE_CVTSI2SD, RS,_r8, RD,_rX)
2266     #define CVTSI2SDQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf2, X86_SSE_CVTSI2SD, MD, MB, MI, MS, RD,_rX)
2267     #define CVTSI2SSLrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTSI2SS, RS,_r4, RD,_rX)
2268     #define CVTSI2SSLmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTSI2SS, MD, MB, MI, MS, RD,_rX)
2269     #define CVTSI2SSQrr(RS, RD) _SSEQrr(0xf3, X86_SSE_CVTSI2SS, RS,_r8, RD,_rX)
2270     #define CVTSI2SSQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf3, X86_SSE_CVTSI2SS, MD, MB, MI, MS, RD,_rX)
2271     #define CVTSS2SDrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTSS2SD, RS,_rX, RD,_rX)
2272     #define CVTSS2SDmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTSS2SD, MD, MB, MI, MS, RD,_rX)
2273     #define CVTSS2SILrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTSS2SI, RS,_rX, RD,_r4)
2274     #define CVTSS2SILmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTSS2SI, MD, MB, MI, MS, RD,_r4)
2275     #define CVTSS2SIQrr(RS, RD) _SSEQrr(0xf3, X86_SSE_CVTSS2SI, RS,_rX, RD,_r8)
2276     #define CVTSS2SIQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf3, X86_SSE_CVTSS2SI, MD, MB, MI, MS, RD,_r8)
2277     #define CVTTPD2PIrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTTPD2PI, RS,_rX, RD,_rM)
2278     #define CVTTPD2PImr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTTPD2PI, MD, MB, MI, MS, RD,_rM)
2279     #define CVTTPD2DQrr(RS, RD) _SSELrr(0x66, X86_SSE_CVTTPD2DQ, RS,_rX, RD,_rX)
2280     #define CVTTPD2DQmr(MD, MB, MI, MS, RD) _SSELmr(0x66, X86_SSE_CVTTPD2DQ, MD, MB, MI, MS, RD,_rX)
2281     #define CVTTPS2DQrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTTPS2DQ, RS,_rX, RD,_rX)
2282     #define CVTTPS2DQmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTTPS2DQ, MD, MB, MI, MS, RD,_rX)
2283     #define CVTTPS2PIrr(RS, RD) __SSELrr( X86_SSE_CVTTPS2PI, RS,_rX, RD,_rM)
2284     #define CVTTPS2PImr(MD, MB, MI, MS, RD) __SSELmr( X86_SSE_CVTTPS2PI, MD, MB, MI, MS, RD,_rM)
2285     #define CVTTSD2SILrr(RS, RD) _SSELrr(0xf2, X86_SSE_CVTTSD2SI, RS,_rX, RD,_r4)
2286     #define CVTTSD2SILmr(MD, MB, MI, MS, RD) _SSELmr(0xf2, X86_SSE_CVTTSD2SI, MD, MB, MI, MS, RD,_r4)
2287     #define CVTTSD2SIQrr(RS, RD) _SSEQrr(0xf2, X86_SSE_CVTTSD2SI, RS,_rX, RD,_r8)
2288     #define CVTTSD2SIQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf2, X86_SSE_CVTTSD2SI, MD, MB, MI, MS, RD,_r8)
2289     #define CVTTSS2SILrr(RS, RD) _SSELrr(0xf3, X86_SSE_CVTTSS2SI, RS,_rX, RD,_r4)
2290     #define CVTTSS2SILmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, X86_SSE_CVTTSS2SI, MD, MB, MI, MS, RD,_r4)
2291     #define CVTTSS2SIQrr(RS, RD) _SSEQrr(0xf3, X86_SSE_CVTTSS2SI, RS,_rX, RD,_r8)
2292     #define CVTTSS2SIQmr(MD, MB, MI, MS, RD) _SSEQmr(0xf3, X86_SSE_CVTTSS2SI, MD, MB, MI, MS, RD,_r8)
2293 gbeauche 1.3
2294 gbeauche 1.26 #define MOVDXDrr(RS, RD) _SSELrr(0x66, 0x6e, RS,_r4, RD,_rX)
2295     #define MOVDXDmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x6e, MD, MB, MI, MS, RD,_rX)
2296     #define MOVQXDrr(RS, RD) _SSEQrr(0x66, 0x6e, RS,_r8, RD,_rX)
2297     #define MOVQXDmr(MD, MB, MI, MS, RD) _SSEQmr(0x66, 0x6e, MD, MB, MI, MS, RD,_rX)
2298    
2299     #define MOVDXSrr(RS, RD) _SSELrr(0x66, 0x7e, RD,_r4, RS,_rX)
2300     #define MOVDXSrm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x7e, RS,_rX, MD, MB, MI, MS)
2301     #define MOVQXSrr(RS, RD) _SSEQrr(0x66, 0x7e, RD,_r8, RS,_rX)
2302     #define MOVQXSrm(RS, MD, MB, MI, MS) _SSEQrm(0x66, 0x7e, RS,_rX, MD, MB, MI, MS)
2303 gbeauche 1.3
2304     #define MOVDLMrr(RS, RD) __SSELrr( 0x6e, RS,_r4, RD,_rM)
2305     #define MOVDLMmr(MD, MB, MI, MS, RD) __SSELmr( 0x6e, MD, MB, MI, MS, RD,_rM)
2306     #define MOVDQMrr(RS, RD) __SSEQrr( 0x6e, RS,_r8, RD,_rM)
2307     #define MOVDQMmr(MD, MB, MI, MS, RD) __SSEQmr( 0x6e, MD, MB, MI, MS, RD,_rM)
2308    
2309     #define MOVDMLrr(RS, RD) __SSELrr( 0x7e, RS,_rM, RD,_r4)
2310     #define MOVDMLrm(RS, MD, MB, MI, MS) __SSELrm( 0x7e, RS,_rM, MD, MB, MI, MS)
2311     #define MOVDMQrr(RS, RD) __SSEQrr( 0x7e, RS,_rM, RD,_r8)
2312     #define MOVDMQrm(RS, MD, MB, MI, MS) __SSEQrm( 0x7e, RS,_rM, MD, MB, MI, MS)
2313    
2314     #define MOVDQ2Qrr(RS, RD) _SSELrr(0xf2, 0xd6, RS,_rX, RD,_rM)
2315 gbeauche 1.22 #define MOVMSKPSrr(RS, RD) __SSELrr( 0x50, RS,_rX, RD,_r4)
2316     #define MOVMSKPDrr(RS, RD) _SSELrr(0x66, 0x50, RS,_rX, RD,_r4)
2317    
2318 gbeauche 1.3 #define MOVHLPSrr(RS, RD) __SSELrr( 0x12, RS,_rX, RD,_rX)
2319     #define MOVLHPSrr(RS, RD) __SSELrr( 0x16, RS,_rX, RD,_rX)
2320    
2321     #define MOVDQArr(RS, RD) _SSELrr(0x66, 0x6f, RS,_rX, RD,_rX)
2322     #define MOVDQAmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x6f, MD, MB, MI, MS, RD,_rX)
2323     #define MOVDQArm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x7f, RS,_rX, MD, MB, MI, MS)
2324    
2325     #define MOVDQUrr(RS, RD) _SSELrr(0xf3, 0x6f, RS,_rX, RD,_rX)
2326     #define MOVDQUmr(MD, MB, MI, MS, RD) _SSELmr(0xf3, 0x6f, MD, MB, MI, MS, RD,_rX)
2327     #define MOVDQUrm(RS, MD, MB, MI, MS) _SSELrm(0xf3, 0x7f, RS,_rX, MD, MB, MI, MS)
2328    
2329     #define MOVHPDmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x16, MD, MB, MI, MS, RD,_rX)
2330     #define MOVHPDrm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x17, RS,_rX, MD, MB, MI, MS)
2331     #define MOVHPSmr(MD, MB, MI, MS, RD) __SSELmr( 0x16, MD, MB, MI, MS, RD,_rX)
2332     #define MOVHPSrm(RS, MD, MB, MI, MS) __SSELrm( 0x17, RS,_rX, MD, MB, MI, MS)
2333    
2334     #define MOVLPDmr(MD, MB, MI, MS, RD) _SSELmr(0x66, 0x12, MD, MB, MI, MS, RD,_rX)
2335     #define MOVLPDrm(RS, MD, MB, MI, MS) _SSELrm(0x66, 0x13, RS,_rX, MD, MB, MI, MS)
2336     #define MOVLPSmr(MD, MB, MI, MS, RD) __SSELmr( 0x12, MD, MB, MI, MS, RD,_rX)
2337     #define MOVLPSrm(RS, MD, MB, MI, MS) __SSELrm( 0x13, RS,_rX, MD, MB, MI, MS)
2338 gbeauche 1.2
2339    
2340 gbeauche 1.30 /* --- Floating-Point instructions ----------------------------------------- */
2341 gbeauche 1.2
2342 gbeauche 1.30 enum {
2343     X86_F2XM1 = 0xd9f0,
2344     X86_FABS = 0xd9e1,
2345     X86_FADD = 0xd8c0, // m32fp, m64fp, sti0, st0i, pst0i
2346     X86_FIADD = 0xda00, // m32int, m16int
2347     X86_FBLD = 0xdf04, // mem
2348     X86_FBSTP = 0xdf06, // mem
2349     X86_FCHS = 0xd9e0,
2350     X86_FCMOVB = 0xdac0, // sti0
2351     X86_FCMOVE = 0xdac8, // sti0
2352     X86_FCMOVBE = 0xdad0, // sti0
2353     X86_FCMOVU = 0xdad8, // sti0
2354     X86_FCMOVNB = 0xdbc0, // sti0
2355     X86_FCMOVNE = 0xdbc8, // sti0
2356     X86_FCMOVNBE = 0xdbd0, // sti0
2357     X86_FCMOVNU = 0xdbd8, // sti0
2358     X86_FCOM = 0xd8d2, // m32fp, m64fp, sti
2359     X86_FCOMP = 0xd8db, // m32fp, m64fp, sti
2360     X86_FCOMPP = 0xded9,
2361     X86_FCOMI = 0xdbf0, // sti0
2362     X86_FCOMIP = 0xdff0, // sti0
2363     X86_FUCOMI = 0xdbe8, // sti0
2364     X86_FUCOMIP = 0xdfe8, // sti0
2365     X86_FCOS = 0xd9ff,
2366     X86_FDECSTP = 0xd9f6,
2367     X86_FDIV = 0xd8f6, // m32fp, m64fp, sti0, st0i, pst0i
2368     X86_FIDIV = 0xda06, // m32int, m16int
2369     X86_FDIVR = 0xd8ff, // m32fp, m64fp, sti0, st0i, pst0i
2370     X86_FIDIVR = 0xda07, // m32int, m16int
2371     X86_FFREE = 0xddc0, // sti
2372     X86_FICOM = 0xda02, // m32int, m16int
2373     X86_FICOMP = 0xda03, // m32int, m16int
2374     X86_FILD = 0xdb00, // m32int, m16int
2375     X86_FILDQ = 0xdf05, // mem
2376     X86_FINCSTP = 0xd9f7,
2377     X86_FIST = 0xdb02, // m32int, m16int
2378     X86_FISTP = 0xdb03, // m32int, m16int
2379     X86_FISTPQ = 0xdf07, // mem
2380     X86_FISTTP = 0xdb01, // m32int, m16int
2381     X86_FISTTPQ = 0xdd01, // mem
2382     X86_FLD = 0xd900, // m32fp, m64fp
2383     X86_FLDT = 0xdb05, // mem
2384     X86_FLD1 = 0xd9e8,
2385     X86_FLDL2T = 0xd9e9,
2386     X86_FLDL2E = 0xd9ea,
2387     X86_FLDPI = 0xd9eb,
2388     X86_FLDLG2 = 0xd9ec,
2389     X86_FLDLN2 = 0xd9ed,
2390     X86_FLDZ = 0xd9ee,
2391     X86_FMUL = 0xd8c9, // m32fp, m64fp, sti0, st0i, pst0i
2392     X86_FIMUL = 0xda01, // m32int, m16int
2393     X86_FNOP = 0xd9d0,
2394     X86_FPATAN = 0xd9f3,
2395     X86_FPREM = 0xd9f8,
2396     X86_FPREM1 = 0xd9f5,
2397     X86_FPTAN = 0xd9f2,
2398     X86_FRNDINT = 0xd9fc,
2399     X86_FSCALE = 0xd9fd,
2400     X86_FSIN = 0xd9fe,
2401     X86_FSINCOS = 0xd9fb,
2402     X86_FSQRT = 0xd9fa,
2403     X86_FSTS = 0xd902, // mem
2404 gbeauche 1.31 X86_FSTD = 0xdd02, // mem
2405 gbeauche 1.30 X86_FST = 0xddd0, // sti
2406     X86_FSTPS = 0xd903, // mem
2407 gbeauche 1.31 X86_FSTPD = 0xdd03, // mem
2408 gbeauche 1.30 X86_FSTPT = 0xdb07, // mem
2409     X86_FSTP = 0xddd8, // sti
2410     X86_FSUB = 0xd8e4, // m32fp, m64fp, sti0, st0i, pst0i
2411     X86_FISUB = 0xda04, // m32int, m16int
2412     X86_FSUBR = 0xd8ed, // m32fp, m64fp, sti0, st0i, pst0i
2413     X86_FISUBR = 0xda05, // m32int, m16int
2414     X86_FTST = 0xd9e4,
2415     X86_FUCOM = 0xdde0, // sti
2416     X86_FUCOMP = 0xdde8, // sti
2417     X86_FUCOMPP = 0xdae9,
2418     X86_FXAM = 0xd9e5,
2419     X86_FXCH = 0xd9c8, // sti
2420     X86_FXTRACT = 0xd9f4,
2421     X86_FYL2X = 0xd9f1,
2422     X86_FYL2XP1 = 0xd9f9,
2423     };
2424 gbeauche 1.2
2425 gbeauche 1.30 #define _FPU(OP) _OO(OP)
2426     #define _FPUm(OP, MD, MB, MI, MS) (_REXLrm(0, MB, MI), _O_r_X((OP)>>8, (OP)&7, MD, MB, MI, MS))
2427     #define _FPUSm(OP, MD, MB, MI, MS) _FPUm(OP, MD, MB, MI, MS)
2428     #define _FPUDm(OP, MD, MB, MI, MS) _FPUm((OP)|0x400, MD, MB, MI, MS)
2429     #define _FPULm(OP, MD, MB, MI, MS) _FPUm(OP, MD, MB, MI, MS)
2430     #define _FPUWm(OP, MD, MB, MI, MS) _FPUm((OP)|0x400, MD, MB, MI, MS)
2431     #define _FPUr(OP, RR) _OOr((OP)&0xfff8, _rF(RR))
2432     #define _FPU0r(OP, RD) _FPUr((OP)|0x400, RD)
2433     #define _FPUr0(OP, RS) _FPUr((OP) , RS)
2434     #define _FPUrr(OP, RS, RD) (_rST0P(RS) ? _FPU0r(OP, RD) : (_rST0P(RD) ? _FPUr0(OP, RS) : x86_emit_failure("FPU instruction without st0")))
2435     #define _FPUP0r(OP, RD) _FPU0r((OP)|0x200, RD)
2436    
2437     #define F2XM1() _FPU(X86_F2XM1)
2438     #define FABS() _FPU(X86_FABS)
2439     #define FADDSm(MD, MB, MI, MS) _FPUSm(X86_FADD, MD, MB, MI, MS)
2440     #define FADDDm(MD, MB, MI, MS) _FPUDm(X86_FADD, MD, MB, MI, MS)
2441     #define FADDP0r(RD) _FPUP0r(X86_FADD, RD)
2442     #define FADDrr(RS, RD) _FPUrr(X86_FADD, RS, RD)
2443     #define FADD0r(RD) _FPU0r(X86_FADD, RD)
2444     #define FADDr0(RS) _FPUr0(X86_FADD, RS)
2445     #define FIADDWm(MD, MB, MI, MS) _FPUWm(X86_FIADD, MD, MB, MI, MS)
2446     #define FIADDLm(MD, MB, MI, MS) _FPULm(X86_FIADD, MD, MB, MI, MS)
2447     #define FBLDm(MD, MB, MI, MS) _FPUm(X86_FBLD, MD, MB, MI, MS)
2448     #define FBSTPm(MD, MB, MI, MS) _FPUm(X86_FBSTP, MD, MB, MI, MS)
2449     #define FCHS() _FPU(X86_FCHS)
2450     #define FCMOVBr0(RS) _FPUr0(X86_FCMOVB, RS)
2451     #define FCMOVEr0(RS) _FPUr0(X86_FCMOVE, RS)
2452     #define FCMOVBEr0(RS) _FPUr0(X86_FCMOVBE, RS)
2453     #define FCMOVUr0(RS) _FPUr0(X86_FCMOVU, RS)
2454     #define FCMOVNBr0(RS) _FPUr0(X86_FCMOVNB, RS)
2455     #define FCMOVNEr0(RS) _FPUr0(X86_FCMOVNE, RS)
2456     #define FCMOVNBEr0(RS) _FPUr0(X86_FCMOVNBE, RS)
2457     #define FCMOVNUr0(RS) _FPUr0(X86_FCMOVNU, RS)
2458     #define FCOMSm(MD, MB, MI, MS) _FPUSm(X86_FCOM, MD, MB, MI, MS)
2459     #define FCOMDm(MD, MB, MI, MS) _FPUDm(X86_FCOM, MD, MB, MI, MS)
2460     #define FCOMr(RD) _FPUr(X86_FCOM, RD)
2461     #define FCOMPSm(MD, MB, MI, MS) _FPUSm(X86_FCOMP, MD, MB, MI, MS)
2462     #define FCOMPDm(MD, MB, MI, MS) _FPUDm(X86_FCOMP, MD, MB, MI, MS)
2463     #define FCOMPr(RD) _FPUr(X86_FCOMP, RD)
2464     #define FCOMPP() _FPU(X86_FCOMPP)
2465     #define FCOMIr0(RS) _FPUr0(X86_FCOMI, RS)
2466     #define FCOMIPr0(RS) _FPUr0(X86_FCOMIP, RS)
2467     #define FUCOMIr0(RS) _FPUr0(X86_FUCOMI, RS)
2468     #define FUCOMIPr0(RS) _FPUr0(X86_FUCOMIP, RS)
2469     #define FCOS() _FPU(X86_FCOS)
2470     #define FDECSTP() _FPU(X86_FDECSTP)
2471     #define FDIVSm(MD, MB, MI, MS) _FPUSm(X86_FDIV, MD, MB, MI, MS)
2472     #define FDIVDm(MD, MB, MI, MS) _FPUDm(X86_FDIV, MD, MB, MI, MS)
2473     #define FDIVP0r(RD) _FPUP0r(X86_FDIV, RD)
2474     #define FDIVrr(RS, RD) _FPUrr(X86_FDIV, RS, RD)
2475     #define FDIV0r(RD) _FPU0r(X86_FDIV, RD)
2476     #define FDIVr0(RS) _FPUr0(X86_FDIV, RS)
2477     #define FIDIVWm(MD, MB, MI, MS) _FPUWm(X86_FIDIV, MD, MB, MI, MS)
2478     #define FIDIVLm(MD, MB, MI, MS) _FPULm(X86_FIDIV, MD, MB, MI, MS)
2479     #define FDIVRSm(MD, MB, MI, MS) _FPUSm(X86_FDIVR, MD, MB, MI, MS)
2480     #define FDIVRDm(MD, MB, MI, MS) _FPUDm(X86_FDIVR, MD, MB, MI, MS)
2481     #define FDIVRP0r(RD) _FPUP0r(X86_FDIVR, RD)
2482     #define FDIVRrr(RS, RD) _FPUrr(X86_FDIVR, RS, RD)
2483     #define FDIVR0r(RD) _FPU0r(X86_FDIVR, RD)
2484     #define FDIVRr0(RS) _FPUr0(X86_FDIVR, RS)
2485     #define FIDIVRWm(MD, MB, MI, MS) _FPUWm(X86_FIDIVR, MD, MB, MI, MS)
2486     #define FIDIVRLm(MD, MB, MI, MS) _FPULm(X86_FIDIVR, MD, MB, MI, MS)
2487     #define FFREEr(RD) _FPUr(X86_FFREE, RD)
2488     #define FICOMWm(MD, MB, MI, MS) _FPUWm(X86_FICOM, MD, MB, MI, MS)
2489     #define FICOMLm(MD, MB, MI, MS) _FPULm(X86_FICOM, MD, MB, MI, MS)
2490     #define FICOMPWm(MD, MB, MI, MS) _FPUWm(X86_FICOMP, MD, MB, MI, MS)
2491     #define FICOMPLm(MD, MB, MI, MS) _FPULm(X86_FICOMP, MD, MB, MI, MS)
2492     #define FILDWm(MD, MB, MI, MS) _FPUWm(X86_FILD, MD, MB, MI, MS)
2493     #define FILDLm(MD, MB, MI, MS) _FPULm(X86_FILD, MD, MB, MI, MS)
2494     #define FILDQm(MD, MB, MI, MS) _FPUm(X86_FILDQ, MD, MB, MI, MS)
2495     #define FINCSTP() _FPU(X86_FINCSTP)
2496     #define FISTWm(MD, MB, MI, MS) _FPUWm(X86_FIST, MD, MB, MI, MS)
2497     #define FISTLm(MD, MB, MI, MS) _FPULm(X86_FIST, MD, MB, MI, MS)
2498     #define FISTPWm(MD, MB, MI, MS) _FPUWm(X86_FISTP, MD, MB, MI, MS)
2499     #define FISTPLm(MD, MB, MI, MS) _FPULm(X86_FISTP, MD, MB, MI, MS)
2500     #define FISTPQm(MD, MB, MI, MS) _FPUm(X86_FISTPQ, MD, MB, MI, MS)
2501     #define FISTTPWm(MD, MB, MI, MS) _FPUWm(X86_FISTTP, MD, MB, MI, MS)
2502     #define FISTTPLm(MD, MB, MI, MS) _FPULm(X86_FISTTP, MD, MB, MI, MS)
2503     #define FISTTPQm(MD, MB, MI, MS) _FPUm(X86_FISTTPQ, MD, MB, MI, MS)
2504     #define FLDSm(MD, MB, MI, MS) _FPUSm(X86_FLD, MD, MB, MI, MS)
2505     #define FLDDm(MD, MB, MI, MS) _FPUDm(X86_FLD, MD, MB, MI, MS)
2506     #define FLDTm(MD, MB, MI, MS) _FPUm(X86_FLDT, MD, MB, MI, MS)
2507     #define FLD1() _FPU(X86_FLD1)
2508     #define FLDL2T() _FPU(X86_FLDL2T)
2509     #define FLDL2E() _FPU(X86_FLDL2E)
2510     #define FLDPI() _FPU(X86_FLDPI)
2511     #define FLDLG2() _FPU(X86_FLDLG2)
2512     #define FLDLN2() _FPU(X86_FLDLN2)
2513     #define FLDZ() _FPU(X86_FLDZ)
2514     #define FMULSm(MD, MB, MI, MS) _FPUSm(X86_FMUL, MD, MB, MI, MS)
2515     #define FMULDm(MD, MB, MI, MS) _FPUDm(X86_FMUL, MD, MB, MI, MS)
2516     #define FMULP0r(RD) _FPUP0r(X86_FMUL, RD)
2517     #define FMULrr(RS, RD) _FPUrr(X86_FMUL, RS, RD)
2518     #define FMUL0r(RD) _FPU0r(X86_FMUL, RD)
2519     #define FMULr0(RS) _FPUr0(X86_FMUL, RS)
2520     #define FIMULWm(MD, MB, MI, MS) _FPUWm(X86_FIMUL, MD, MB, MI, MS)
2521     #define FIMULLm(MD, MB, MI, MS) _FPULm(X86_FIMUL, MD, MB, MI, MS)
2522     #define FNOP() _FPU(X86_FNOP)
2523     #define FPATAN() _FPU(X86_FPATAN)
2524     #define FPREM() _FPU(X86_FPREM)
2525     #define FPREM1() _FPU(X86_FPREM1)
2526     #define FPTAN() _FPU(X86_FPTAN)
2527     #define FRNDINT() _FPU(X86_FRNDINT)
2528     #define FSCALE() _FPU(X86_FSCALE)
2529     #define FSIN() _FPU(X86_FSIN)
2530     #define FSINCOS() _FPU(X86_FSINCOS)
2531     #define FSQRT() _FPU(X86_FSQRT)
2532     #define FSTSm(MD, MB, MI, MS) _FPUm(X86_FSTS, MD, MB, MI, MS)
2533 gbeauche 1.31 #define FSTDm(MD, MB, MI, MS) _FPUm(X86_FSTD, MD, MB, MI, MS)
2534 gbeauche 1.30 #define FSTr(RD) _FPUr(X86_FST, RD)
2535     #define FSTPSm(MD, MB, MI, MS) _FPUm(X86_FSTPS, MD, MB, MI, MS)
2536 gbeauche 1.31 #define FSTPDm(MD, MB, MI, MS) _FPUm(X86_FSTPD, MD, MB, MI, MS)
2537 gbeauche 1.30 #define FSTPTm(MD, MB, MI, MS) _FPUm(X86_FSTPT, MD, MB, MI, MS)
2538     #define FSTPr(RD) _FPUr(X86_FSTP, RD)
2539     #define FSUBSm(MD, MB, MI, MS) _FPUSm(X86_FSUB, MD, MB, MI, MS)
2540     #define FSUBDm(MD, MB, MI, MS) _FPUDm(X86_FSUB, MD, MB, MI, MS)
2541     #define FSUBP0r(RD) _FPUP0r(X86_FSUB, RD)
2542     #define FSUBrr(RS, RD) _FPUrr(X86_FSUB, RS, RD)
2543     #define FSUB0r(RD) _FPU0r(X86_FSUB, RD)
2544     #define FSUBr0(RS) _FPUr0(X86_FSUB, RS)
2545     #define FISUBWm(MD, MB, MI, MS) _FPUWm(X86_FISUB, MD, MB, MI, MS)
2546     #define FISUBLm(MD, MB, MI, MS) _FPULm(X86_FISUB, MD, MB, MI, MS)
2547     #define FSUBRSm(MD, MB, MI, MS) _FPUSm(X86_FSUBR, MD, MB, MI, MS)
2548     #define FSUBRDm(MD, MB, MI, MS) _FPUDm(X86_FSUBR, MD, MB, MI, MS)
2549     #define FSUBRP0r(RD) _FPUP0r(X86_FSUBR, RD)
2550     #define FSUBRrr(RS, RD) _FPUrr(X86_FSUBR, RS, RD)
2551     #define FSUBR0r(RD) _FPU0r(X86_FSUBR, RD)
2552     #define FSUBRr0(RS) _FPUr0(X86_FSUBR, RS)
2553     #define FISUBRWm(MD, MB, MI, MS) _FPUWm(X86_FISUBR, MD, MB, MI, MS)
2554     #define FISUBRLm(MD, MB, MI, MS) _FPULm(X86_FISUBR, MD, MB, MI, MS)
2555     #define FTST() _FPU(X86_FTST)
2556     #define FUCOMr(RD) _FPUr(X86_FUCOM, RD)
2557     #define FUCOMPr(RD) _FPUr(X86_FUCOMP, RD)
2558     #define FUCOMPP() _FPU(X86_FUCOMPP)
2559     #define FXAM() _FPU(X86_FXAM)
2560     #define FXCHr(RD) _FPUr(X86_FXCH, RD)
2561     #define FXTRACT() _FPU(X86_FXTRACT)
2562     #define FYL2X() _FPU(X86_FYL2X)
2563     #define FYL2XP1() _FPU(X86_FYL2XP1)
2564 gbeauche 1.1
2565     #endif /* X86_RTASM_H */