200 |
|
} |
201 |
|
LENDFUNC(NONE,READ,1,raw_pop_l_r,(R4 r)) |
202 |
|
|
203 |
+ |
LOWFUNC(NONE,READ,1,raw_pop_l_m,(MEMW d)) |
204 |
+ |
{ |
205 |
+ |
#if defined(__x86_64__) |
206 |
+ |
POPQm(d, X86_NOREG, X86_NOREG, 1); |
207 |
+ |
#else |
208 |
+ |
POPLm(d, X86_NOREG, X86_NOREG, 1); |
209 |
+ |
#endif |
210 |
+ |
} |
211 |
+ |
LENDFUNC(NONE,READ,1,raw_pop_l_m,(MEMW d)) |
212 |
+ |
|
213 |
|
LOWFUNC(WRITE,NONE,2,raw_bt_l_ri,(R4 r, IMM i)) |
214 |
|
{ |
215 |
|
BTLir(i, r); |
935 |
|
} |
936 |
|
LENDFUNC(WRITE,NONE,2,raw_test_b_rr,(R1 d, R1 s)) |
937 |
|
|
938 |
+ |
LOWFUNC(WRITE,NONE,2,raw_xor_l_ri,(RW4 d, IMM i)) |
939 |
+ |
{ |
940 |
+ |
XORLir(i, d); |
941 |
+ |
} |
942 |
+ |
LENDFUNC(WRITE,NONE,2,raw_xor_l_ri,(RW4 d, IMM i)) |
943 |
+ |
|
944 |
|
LOWFUNC(WRITE,NONE,2,raw_and_l_ri,(RW4 d, IMM i)) |
945 |
|
{ |
946 |
|
ANDLir(i, d); |
1224 |
|
} |
1225 |
|
LENDFUNC(NONE,READ,1,raw_pop_l_r,(R4 r)) |
1226 |
|
|
1227 |
+ |
LOWFUNC(NONE,READ,1,raw_pop_l_m,(MEMW d)) |
1228 |
+ |
{ |
1229 |
+ |
emit_byte(0x8f); |
1230 |
+ |
emit_byte(0x05); |
1231 |
+ |
emit_long(d); |
1232 |
+ |
} |
1233 |
+ |
LENDFUNC(NONE,READ,1,raw_pop_l_m,(MEMW d)) |
1234 |
+ |
|
1235 |
|
LOWFUNC(WRITE,NONE,2,raw_bt_l_ri,(R4 r, IMM i)) |
1236 |
|
{ |
1237 |
|
emit_byte(0x0f); |
2524 |
|
} |
2525 |
|
LENDFUNC(WRITE,NONE,2,raw_test_b_rr,(R1 d, R1 s)) |
2526 |
|
|
2527 |
+ |
LOWFUNC(WRITE,NONE,2,raw_xor_l_ri,(RW4 d, IMM i)) |
2528 |
+ |
{ |
2529 |
+ |
emit_byte(0x81); |
2530 |
+ |
emit_byte(0xf0+d); |
2531 |
+ |
emit_long(i); |
2532 |
+ |
} |
2533 |
+ |
LENDFUNC(WRITE,NONE,2,raw_xor_l_ri,(RW4 d, IMM i)) |
2534 |
+ |
|
2535 |
|
LOWFUNC(WRITE,NONE,2,raw_and_l_ri,(RW4 d, IMM i)) |
2536 |
|
{ |
2537 |
|
if (optimize_imm8 && isbyte(i)) { |
3246 |
|
raw_sahf(0); |
3247 |
|
} |
3248 |
|
|
3249 |
+ |
#define FLAG_NREG3 0 /* Set to -1 if any register will do */ |
3250 |
+ |
static __inline__ void raw_flags_set_zero(int s, int tmp) |
3251 |
+ |
{ |
3252 |
+ |
raw_mov_l_rr(tmp,s); |
3253 |
+ |
raw_lahf(s); /* flags into ah */ |
3254 |
+ |
raw_and_l_ri(s,0xffffbfff); |
3255 |
+ |
raw_and_l_ri(tmp,0x00004000); |
3256 |
+ |
raw_xor_l_ri(tmp,0x00004000); |
3257 |
+ |
raw_or_l(s,tmp); |
3258 |
+ |
raw_sahf(s); |
3259 |
+ |
} |
3260 |
+ |
|
3261 |
|
#else |
3262 |
|
|
3263 |
|
#define FLAG_NREG1 -1 /* Set to -1 if any register will do */ |
3284 |
|
raw_popfl(); |
3285 |
|
} |
3286 |
|
|
3287 |
+ |
#define FLAG_NREG3 -1 /* Set to -1 if any register will do */ |
3288 |
+ |
static __inline__ void raw_flags_set_zero(int s, int tmp) |
3289 |
+ |
{ |
3290 |
+ |
raw_mov_l_rr(tmp,s); |
3291 |
+ |
raw_pushfl(); |
3292 |
+ |
raw_pop_l_r(s); |
3293 |
+ |
raw_and_l_ri(s,0xffffffbf); |
3294 |
+ |
raw_and_l_ri(tmp,0x00000040); |
3295 |
+ |
raw_xor_l_ri(tmp,0x00000040); |
3296 |
+ |
raw_or_l(s,tmp); |
3297 |
+ |
raw_push_l_r(s); |
3298 |
+ |
raw_popfl(); |
3299 |
+ |
} |
3300 |
|
#endif |
3301 |
|
|
3302 |
|
/* Apparently, there are enough instructions between flag store and |
3322 |
|
raw_mov_l_rm(target,(uintptr)live.state[r].mem); |
3323 |
|
} |
3324 |
|
|
3268 |
– |
#define NATIVE_FLAG_Z 0x40 |
3269 |
– |
static __inline__ void raw_flags_set_zero(int f, int r, int t) |
3270 |
– |
{ |
3271 |
– |
// FIXME: this is really suboptimal |
3272 |
– |
raw_pushfl(); |
3273 |
– |
raw_pop_l_r(f); |
3274 |
– |
raw_and_l_ri(f,~NATIVE_FLAG_Z); |
3275 |
– |
raw_test_l_rr(r,r); |
3276 |
– |
raw_mov_l_ri(r,0); |
3277 |
– |
raw_mov_l_ri(t,NATIVE_FLAG_Z); |
3278 |
– |
raw_cmov_l_rr(r,t,NATIVE_CC_EQ); |
3279 |
– |
raw_or_l(f,r); |
3280 |
– |
raw_push_l_r(f); |
3281 |
– |
raw_popfl(); |
3282 |
– |
} |
3283 |
– |
|
3325 |
|
static __inline__ void raw_inc_sp(int off) |
3326 |
|
{ |
3327 |
|
raw_add_l_ri(ESP_INDEX,off); |