77 |
|
uae_s8 can_byte[]={0,1,2,3,-1}; |
78 |
|
uae_s8 can_word[]={0,1,2,3,5,6,7,-1}; |
79 |
|
|
80 |
+ |
#if USE_OPTIMIZED_CALLS |
81 |
+ |
/* Make sure interpretive core does not use cpuopti */ |
82 |
+ |
uae_u8 call_saved[]={0,0,0,1,1,1,1,1}; |
83 |
+ |
#else |
84 |
|
/* cpuopti mutate instruction handlers to assume registers are saved |
85 |
|
by the caller */ |
86 |
|
uae_u8 call_saved[]={0,0,0,0,1,0,0,0}; |
87 |
+ |
#endif |
88 |
|
|
89 |
|
/* This *should* be the same as call_saved. But: |
90 |
|
- We might not really know which registers are saved, and which aren't, |
467 |
|
|
468 |
|
LOWFUNC(READ,NONE,3,raw_cmov_l_rr,(RW4 d, R4 s, IMM cc)) |
469 |
|
{ |
470 |
< |
CMOVLrr(cc, s, d); |
470 |
> |
if (have_cmov) |
471 |
> |
CMOVLrr(cc, s, d); |
472 |
> |
else { /* replacement using branch and mov */ |
473 |
> |
#if defined(__x86_64__) |
474 |
> |
write_log("x86-64 implementations are bound to have CMOV!\n"); |
475 |
> |
abort(); |
476 |
> |
#endif |
477 |
> |
JCCSii(cc^1, 2); |
478 |
> |
MOVLrr(s, d); |
479 |
> |
} |
480 |
|
} |
481 |
|
LENDFUNC(READ,NONE,3,raw_cmov_l_rr,(RW4 d, R4 s, IMM cc)) |
482 |
|
|
634 |
|
|
635 |
|
LOWFUNC(NONE,READ,5,raw_cmov_l_rm_indexed,(W4 d, IMM base, R4 index, IMM factor, IMM cond)) |
636 |
|
{ |
637 |
< |
CMOVLmr(cond, base, X86_NOREG, index, factor, d); |
637 |
> |
if (have_cmov) |
638 |
> |
CMOVLmr(cond, base, X86_NOREG, index, factor, d); |
639 |
> |
else { /* replacement using branch and mov */ |
640 |
> |
#if defined(__x86_64__) |
641 |
> |
write_log("x86-64 implementations are bound to have CMOV!\n"); |
642 |
> |
abort(); |
643 |
> |
#endif |
644 |
> |
JCCSii(cond^1, 7); |
645 |
> |
MOVLmr(base, X86_NOREG, index, factor, d); |
646 |
> |
} |
647 |
|
} |
648 |
|
LENDFUNC(NONE,READ,5,raw_cmov_l_rm_indexed,(W4 d, IMM base, R4 index, IMM factor, IMM cond)) |
649 |
|
|
650 |
|
LOWFUNC(NONE,READ,3,raw_cmov_l_rm,(W4 d, IMM mem, IMM cond)) |
651 |
|
{ |
652 |
< |
CMOVLmr(cond, mem, X86_NOREG, X86_NOREG, 1, d); |
652 |
> |
if (have_cmov) |
653 |
> |
CMOVLmr(cond, mem, X86_NOREG, X86_NOREG, 1, d); |
654 |
> |
else { /* replacement using branch and mov */ |
655 |
> |
#if defined(__x86_64__) |
656 |
> |
write_log("x86-64 implementations are bound to have CMOV!\n"); |
657 |
> |
abort(); |
658 |
> |
#endif |
659 |
> |
JCCSii(cond^1, 6); |
660 |
> |
MOVLmr(mem, X86_NOREG, X86_NOREG, 1, d); |
661 |
> |
} |
662 |
|
} |
663 |
|
LENDFUNC(NONE,READ,3,raw_cmov_l_rm,(W4 d, IMM mem, IMM cond)) |
664 |
|
|
1120 |
|
|
1121 |
|
LOWFUNC(READ,WRITE,0,raw_pushfl,(void)) |
1122 |
|
{ |
1123 |
< |
PUSHFD(); |
1123 |
> |
PUSHF(); |
1124 |
|
} |
1125 |
|
LENDFUNC(READ,WRITE,0,raw_pushfl,(void)) |
1126 |
|
|
1127 |
|
LOWFUNC(WRITE,READ,0,raw_popfl,(void)) |
1128 |
|
{ |
1129 |
< |
POPFD(); |
1129 |
> |
POPF(); |
1130 |
|
} |
1131 |
|
LENDFUNC(WRITE,READ,0,raw_popfl,(void)) |
1132 |
|
|
3479 |
|
X86_PROCESSOR_K6, |
3480 |
|
X86_PROCESSOR_ATHLON, |
3481 |
|
X86_PROCESSOR_PENTIUM4, |
3482 |
+ |
X86_PROCESSOR_K8, |
3483 |
|
X86_PROCESSOR_max |
3484 |
|
}; |
3485 |
|
|
3490 |
|
"PentiumPro", |
3491 |
|
"K6", |
3492 |
|
"Athlon", |
3493 |
< |
"Pentium4" |
3493 |
> |
"Pentium4", |
3494 |
> |
"K8" |
3495 |
|
}; |
3496 |
|
|
3497 |
|
static struct ptt { |
3508 |
|
{ 16, 15, 16, 7, 16 }, |
3509 |
|
{ 32, 7, 32, 7, 32 }, |
3510 |
|
{ 16, 7, 16, 7, 16 }, |
3511 |
< |
{ 0, 0, 0, 0, 0 } |
3511 |
> |
{ 0, 0, 0, 0, 0 }, |
3512 |
> |
{ 16, 7, 16, 7, 16 } |
3513 |
|
}; |
3514 |
|
|
3515 |
|
static void |
3573 |
|
struct cpuinfo_x86 *c = &cpuinfo; |
3574 |
|
|
3575 |
|
/* Defaults */ |
3576 |
+ |
c->x86_processor = X86_PROCESSOR_max; |
3577 |
|
c->x86_vendor = X86_VENDOR_UNKNOWN; |
3578 |
|
c->cpuid_level = -1; /* CPUID not detected */ |
3579 |
|
c->x86_model = c->x86_mask = 0; /* So far unknown... */ |
3609 |
|
c->x86 = 4; |
3610 |
|
} |
3611 |
|
|
3612 |
+ |
/* AMD-defined flags: level 0x80000001 */ |
3613 |
+ |
uae_u32 xlvl; |
3614 |
+ |
cpuid(0x80000000, &xlvl, NULL, NULL, NULL); |
3615 |
+ |
if ( (xlvl & 0xffff0000) == 0x80000000 ) { |
3616 |
+ |
if ( xlvl >= 0x80000001 ) { |
3617 |
+ |
uae_u32 features; |
3618 |
+ |
cpuid(0x80000001, NULL, NULL, NULL, &features); |
3619 |
+ |
if (features & (1 << 29)) { |
3620 |
+ |
/* Assume x86-64 if long mode is supported */ |
3621 |
+ |
c->x86_processor = X86_PROCESSOR_K8; |
3622 |
+ |
} |
3623 |
+ |
} |
3624 |
+ |
} |
3625 |
+ |
|
3626 |
|
/* Canonicalize processor ID */ |
3577 |
– |
c->x86_processor = X86_PROCESSOR_max; |
3627 |
|
switch (c->x86) { |
3628 |
|
case 3: |
3629 |
|
c->x86_processor = X86_PROCESSOR_I386; |
3645 |
|
break; |
3646 |
|
case 15: |
3647 |
|
if (c->x86_vendor == X86_VENDOR_INTEL) { |
3648 |
< |
/* Assume any BranID >= 8 and family == 15 yields a Pentium 4 */ |
3648 |
> |
/* Assume any BrandID >= 8 and family == 15 yields a Pentium 4 */ |
3649 |
|
if (c->x86_brand_id >= 8) |
3650 |
|
c->x86_processor = X86_PROCESSOR_PENTIUM4; |
3651 |
|
} |
3652 |
+ |
if (c->x86_vendor == X86_VENDOR_AMD) { |
3653 |
+ |
/* Assume an Athlon processor if family == 15 and it was not |
3654 |
+ |
detected as an x86-64 so far */ |
3655 |
+ |
if (c->x86_processor == X86_PROCESSOR_max) |
3656 |
+ |
c->x86_processor = X86_PROCESSOR_ATHLON; |
3657 |
+ |
} |
3658 |
|
break; |
3659 |
|
} |
3660 |
|
if (c->x86_processor == X86_PROCESSOR_max) { |
3662 |
|
fprintf(stderr, " Family : %d\n", c->x86); |
3663 |
|
fprintf(stderr, " Model : %d\n", c->x86_model); |
3664 |
|
fprintf(stderr, " Mask : %d\n", c->x86_mask); |
3665 |
+ |
fprintf(stderr, " Vendor : %s [%d]\n", c->x86_vendor_id, c->x86_vendor); |
3666 |
|
if (c->x86_brand_id) |
3667 |
|
fprintf(stderr, " BrandID : %02x\n", c->x86_brand_id); |
3668 |
|
abort(); |
3669 |
|
} |
3670 |
|
|
3671 |
|
/* Have CMOV support? */ |
3672 |
< |
have_cmov = (c->x86_hwcap & (1 << 15)) && true; |
3672 |
> |
have_cmov = c->x86_hwcap & (1 << 15); |
3673 |
|
|
3674 |
|
/* Can the host CPU suffer from partial register stalls? */ |
3675 |
|
have_rat_stall = (c->x86_vendor == X86_VENDOR_INTEL); |