111 |
|
#endif |
112 |
|
} |
113 |
|
|
114 |
< |
static unsigned long REGPARAM2 op_illg_1 (uae_u32 opcode) REGPARAM; |
114 |
> |
static void REGPARAM2 op_illg_1 (uae_u32 opcode) REGPARAM; |
115 |
|
|
116 |
< |
static unsigned long REGPARAM2 op_illg_1 (uae_u32 opcode) |
116 |
> |
static void REGPARAM2 op_illg_1 (uae_u32 opcode) |
117 |
|
{ |
118 |
|
op_illg (cft_map (opcode)); |
119 |
– |
return 4; |
119 |
|
} |
120 |
|
|
121 |
|
static void build_cpufunctbl (void) |
122 |
|
{ |
123 |
|
int i; |
124 |
|
unsigned long opcode; |
125 |
< |
int cpu_level = (FPUType ? 3 : CPUType >= 2 ? 2 : CPUType == 1 ? 1 : 0); |
126 |
< |
struct cputbl *tbl = (cpu_level == 3 ? op_smalltbl_0 |
127 |
< |
: cpu_level == 2 ? op_smalltbl_1 |
128 |
< |
: cpu_level == 1 ? op_smalltbl_2 |
129 |
< |
: op_smalltbl_3); |
125 |
> |
int cpu_level = 0; // 68000 (default) |
126 |
> |
if (CPUType == 4) |
127 |
> |
cpu_level = 4; // 68040 with FPU |
128 |
> |
else { |
129 |
> |
if (FPUType) |
130 |
> |
cpu_level = 3; // 68020 with FPU |
131 |
> |
else if (CPUType >= 2) |
132 |
> |
cpu_level = 2; // 68020 |
133 |
> |
else if (CPUType == 1) |
134 |
> |
cpu_level = 1; |
135 |
> |
} |
136 |
> |
struct cputbl *tbl = ( |
137 |
> |
cpu_level == 4 ? op_smalltbl_0 |
138 |
> |
: cpu_level == 3 ? op_smalltbl_1 |
139 |
> |
: cpu_level == 2 ? op_smalltbl_2 |
140 |
> |
: cpu_level == 1 ? op_smalltbl_3 |
141 |
> |
: op_smalltbl_4); |
142 |
|
|
143 |
|
for (opcode = 0; opcode < 65536; opcode++) |
144 |
|
cpufunctbl[cft_map (opcode)] = op_illg_1; |
721 |
|
regs.spcflags |= SPCFLAG_INT; |
722 |
|
} |
723 |
|
|
724 |
< |
static int caar, cacr; |
724 |
> |
static int caar, cacr, tc, itt0, itt1, dtt0, dtt1; |
725 |
|
|
726 |
|
void m68k_move2c (int regno, uae_u32 *regp) |
727 |
|
{ |
732 |
|
case 0: regs.sfc = *regp & 7; break; |
733 |
|
case 1: regs.dfc = *regp & 7; break; |
734 |
|
case 2: cacr = *regp & 0x3; break; /* ignore C and CE */ |
735 |
+ |
case 3: tc = *regp & 0xc000; break; |
736 |
+ |
case 4: itt0 = *regp & 0xffffe364; break; |
737 |
+ |
case 5: itt1 = *regp & 0xffffe364; break; |
738 |
+ |
case 6: dtt0 = *regp & 0xffffe364; break; |
739 |
+ |
case 7: dtt1 = *regp & 0xffffe364; break; |
740 |
|
case 0x800: regs.usp = *regp; break; |
741 |
|
case 0x801: regs.vbr = *regp; break; |
742 |
|
case 0x802: caar = *regp &0xfc; break; |
757 |
|
case 0: *regp = regs.sfc; break; |
758 |
|
case 1: *regp = regs.dfc; break; |
759 |
|
case 2: *regp = cacr; break; |
760 |
+ |
case 3: *regp = tc; break; |
761 |
+ |
case 4: *regp = itt0; break; |
762 |
+ |
case 5: *regp = itt1; break; |
763 |
+ |
case 6: *regp = dtt0; break; |
764 |
+ |
case 7: *regp = dtt1; break; |
765 |
|
case 0x800: *regp = regs.usp; break; |
766 |
|
case 0x801: *regp = regs.vbr; break; |
767 |
|
case 0x802: *regp = caar; break; |
1050 |
|
regs.fpcr = regs.fpsr = regs.fpiar = 0; |
1051 |
|
} |
1052 |
|
|
1053 |
< |
unsigned long REGPARAM2 op_illg (uae_u32 opcode) |
1053 |
> |
void REGPARAM2 op_illg (uae_u32 opcode) |
1054 |
|
{ |
1055 |
|
uaecptr pc = m68k_getpc (); |
1056 |
|
|
1064 |
|
if (opcode == M68K_EXEC_RETURN) { |
1065 |
|
regs.spcflags |= SPCFLAG_BRK; |
1066 |
|
quit_program = 1; |
1067 |
< |
return 4; |
1067 |
> |
return; |
1068 |
|
} |
1069 |
|
|
1070 |
|
// Call EMUL_OP opcode |
1083 |
|
MakeFromSR(); |
1084 |
|
m68k_incpc(2); |
1085 |
|
fill_prefetch_0 (); |
1086 |
< |
return 4; |
1086 |
> |
return; |
1087 |
|
} |
1088 |
|
|
1089 |
|
if ((opcode & 0xF000) == 0xA000) { |
1090 |
|
Exception(0xA,0); |
1091 |
< |
return 4; |
1091 |
> |
return; |
1092 |
|
} |
1093 |
|
|
1094 |
+ |
// write_log ("Illegal instruction: %04x at %08lx\n", opcode, pc); |
1095 |
+ |
|
1096 |
|
if ((opcode & 0xF000) == 0xF000) { |
1097 |
|
Exception(0xB,0); |
1098 |
< |
return 4; |
1098 |
> |
return; |
1099 |
|
} |
1100 |
|
|
1101 |
|
write_log ("Illegal instruction: %04x at %08lx\n", opcode, pc); |
1102 |
+ |
|
1103 |
|
Exception (4,0); |
1080 |
– |
return 4; |
1104 |
|
} |
1105 |
|
|
1106 |
|
void mmu_op(uae_u32 opcode, uae_u16 extra) |
1195 |
|
|
1196 |
|
static void m68k_run_1 (void) |
1197 |
|
{ |
1198 |
< |
for (;;) { |
1199 |
< |
int cycles; |
1200 |
< |
uae_u32 opcode = GET_OPCODE; |
1201 |
< |
#if 0 |
1202 |
< |
if (get_ilong (0) != do_get_mem_long (®s.prefetch)) { |
1203 |
< |
debugging = 1; |
1204 |
< |
return; |
1182 |
< |
} |
1183 |
< |
#endif |
1184 |
< |
/* assert (!regs.stopped && !(regs.spcflags & SPCFLAG_STOP)); */ |
1185 |
< |
/* regs_backup[backup_pointer = (backup_pointer + 1) % 16] = regs;*/ |
1186 |
< |
#if COUNT_INSTRS == 2 |
1187 |
< |
if (table68k[cft_map (opcode)].handler != -1) |
1188 |
< |
instrcount[table68k[cft_map (opcode)].handler]++; |
1189 |
< |
#elif COUNT_INSTRS == 1 |
1190 |
< |
instrcount[opcode]++; |
1191 |
< |
#endif |
1192 |
< |
#if defined(X86_ASSEMBLYxxx) |
1193 |
< |
__asm__ __volatile__("\tcall *%%ebx" |
1194 |
< |
: "=&a" (cycles) : "b" (cpufunctbl[opcode]), "0" (opcode) |
1195 |
< |
: "%edx", "%ecx", |
1196 |
< |
"%esi", "%edi", "%ebp", "memory", "cc"); |
1197 |
< |
#else |
1198 |
< |
cycles = (*cpufunctbl[opcode])(opcode); |
1199 |
< |
#endif |
1200 |
< |
/*n_insns++;*/ |
1201 |
< |
if (regs.spcflags) { |
1202 |
< |
if (do_specialties ()) |
1203 |
< |
return; |
1198 |
> |
for (;;) { |
1199 |
> |
uae_u32 opcode = GET_OPCODE; |
1200 |
> |
(*cpufunctbl[opcode])(opcode); |
1201 |
> |
if (regs.spcflags) { |
1202 |
> |
if (do_specialties()) |
1203 |
> |
return; |
1204 |
> |
} |
1205 |
|
} |
1205 |
– |
} |
1206 |
|
} |
1207 |
|
|
1208 |
– |
#ifdef X86_ASSEMBLYxxx |
1209 |
– |
static __inline__ void m68k_run1 (void) |
1210 |
– |
{ |
1211 |
– |
/* Work around compiler bug: GCC doesn't push %ebp in m68k_run_1. */ |
1212 |
– |
__asm__ __volatile__ ("pushl %%ebp\n\tcall *%0\n\tpopl %%ebp" : : "r" (m68k_run_1) : "%eax", "%edx", "%ecx", "memory", "cc"); |
1213 |
– |
} |
1214 |
– |
#else |
1208 |
|
#define m68k_run1 m68k_run_1 |
1216 |
– |
#endif |
1209 |
|
|
1210 |
|
int in_m68k_go = 0; |
1211 |
|
|