ViewVC Help
View File | Revision Log | Show Annotations | Revision Graph | Root Listing
root/cebix/BasiliskII/src/uae_cpu/compiler/compemu_support.cpp
(Generate patch)

Comparing BasiliskII/src/uae_cpu/compiler/compemu_support.cpp (file contents):
Revision 1.4 by gbeauche, 2002-09-18T11:41:56Z vs.
Revision 1.6 by gbeauche, 2002-10-01T09:39:55Z

# Line 74 | Line 74 | compop_func *nfcompfunctbl[65536];
74   cpuop_func *nfcpufunctbl[65536];
75   uae_u8* comp_pc_p;
76  
77 + // From newcpu.cpp
78 + extern bool quit_program;
79 +
80   // gb-- Extra data for Basilisk II/JIT
81   #if JIT_DEBUG
82   static bool             JITDebug                        = false;        // Enable runtime disassemblers through mon?
# Line 88 | Line 91 | static bool            lazy_flush                      = true;         // Fl
91   static bool             avoid_fpu                       = true;         // Flag: compile FPU instructions ?
92   static bool             have_cmov                       = false;        // target has CMOV instructions ?
93   static bool             have_rat_stall          = true;         // target has partial register stalls ?
94 + static bool             tune_alignment          = false;        // Tune code alignments for running CPU ?
95 + static int              align_loops                     = 32;           // Align the start of loops
96 + static int              align_jumps                     = 32;           // Align the start of jumps
97   static int              zero_fd                         = -1;
98   static int              optcount[10]            = {
99          10,             // How often a block has to be executed before it is translated
# Line 104 | Line 110 | struct op_properties {
110   };
111   static op_properties prop[65536];
112  
107 // gb-- Control Flow Predicates
108
113   static inline int end_block(uae_u32 opcode)
114   {
115          return (prop[opcode].cflow & fl_end_block);
116   }
117  
114 static inline bool may_trap(uae_u32 opcode)
115 {
116        return (prop[opcode].cflow & fl_trap);
117 }
118
118   uae_u8* start_pc_p;
119   uae_u32 start_pc;
120   uae_u32 current_block_pc_p;
# Line 4562 | Line 4561 | void compiler_init(void)
4561          raw_init_cpu();
4562          write_log("<JIT compiler> : target processor has CMOV instructions : %s\n", have_cmov ? "yes" : "no");
4563          write_log("<JIT compiler> : target processor can suffer from partial register stalls : %s\n", have_rat_stall ? "yes" : "no");
4564 +        write_log("<JIT compiler> : alignment for loops, jumps are %d, %d\n", align_loops, align_jumps);
4565          
4566          // Translation cache flush mechanism
4567          lazy_flush = PrefsFindBool("jitlazyflush");
# Line 5407 | Line 5407 | static __inline__ void create_popalls(vo
5407       registers before jumping back to the various get-out routines.
5408       This generates the code for it.
5409    */
5410 <  popall_do_nothing=current_compile_p;
5410 >  align_target(align_jumps);
5411 >  popall_do_nothing=get_target();
5412    for (i=0;i<N_REGS;i++) {
5413        if (need_to_preserve[i])
5414            raw_pop_l_r(i);
5415    }
5416    raw_jmp((uae_u32)do_nothing);
5416  align_target(32);
5417    
5418 +  align_target(align_jumps);
5419    popall_execute_normal=get_target();
5420    for (i=0;i<N_REGS;i++) {
5421        if (need_to_preserve[i])
5422            raw_pop_l_r(i);
5423    }
5424    raw_jmp((uae_u32)execute_normal);
5424  align_target(32);
5425  
5426 +  align_target(align_jumps);
5427    popall_cache_miss=get_target();
5428    for (i=0;i<N_REGS;i++) {
5429        if (need_to_preserve[i])
5430            raw_pop_l_r(i);
5431    }
5432    raw_jmp((uae_u32)cache_miss);
5432  align_target(32);
5433  
5434 +  align_target(align_jumps);
5435    popall_recompile_block=get_target();
5436    for (i=0;i<N_REGS;i++) {
5437        if (need_to_preserve[i])
5438            raw_pop_l_r(i);
5439    }
5440    raw_jmp((uae_u32)recompile_block);
5441 <  align_target(32);
5442 <  
5441 >
5442 >  align_target(align_jumps);
5443    popall_exec_nostats=get_target();
5444    for (i=0;i<N_REGS;i++) {
5445        if (need_to_preserve[i])
5446            raw_pop_l_r(i);
5447    }
5448    raw_jmp((uae_u32)exec_nostats);
5449 <  align_target(32);
5450 <  
5449 >
5450 >  align_target(align_jumps);
5451    popall_check_checksum=get_target();
5452    for (i=0;i<N_REGS;i++) {
5453        if (need_to_preserve[i])
5454            raw_pop_l_r(i);
5455    }
5456    raw_jmp((uae_u32)check_checksum);
5457 <  align_target(32);
5458 <  
5457 >
5458 >  align_target(align_jumps);
5459    current_compile_p=get_target();
5460   #else
5461    popall_exec_nostats=(void *)exec_nostats;
# Line 5463 | Line 5464 | static __inline__ void create_popalls(vo
5464    popall_recompile_block=(void *)recompile_block;
5465    popall_do_nothing=(void *)do_nothing;
5466    popall_check_checksum=(void *)check_checksum;
5466  pushall_call_handler=get_target();  
5467   #endif
5468  
5469    /* And now, the code to do the matching pushes and then jump
# Line 5479 | Line 5479 | static __inline__ void create_popalls(vo
5479    raw_mov_l_rm(r,(uae_u32)&regs.pc_p);
5480    raw_and_l_ri(r,TAGMASK);
5481    raw_jmp_m_indexed((uae_u32)cache_tags,r,4);
5482 +
5483 + #ifdef X86_ASSEMBLY
5484 +  align_target(align_jumps);
5485 +  m68k_compile_execute = (void (*)(void))get_target();
5486 +  for (i=N_REGS;i--;) {
5487 +          if (need_to_preserve[i])
5488 +                  raw_push_l_r(i);
5489 +  }
5490 +  align_target(align_loops);
5491 +  uae_u32 dispatch_loop = (uae_u32)get_target();
5492 +  r=REG_PC_TMP;
5493 +  raw_mov_l_rm(r,(uae_u32)&regs.pc_p);
5494 +  raw_and_l_ri(r,TAGMASK);
5495 +  raw_call_m_indexed((uae_u32)cache_tags,r,4);
5496 +  raw_cmp_l_mi((uae_u32)&regs.spcflags,0);
5497 +  raw_jcc_b_oponly(NATIVE_CC_EQ);
5498 +  emit_byte(dispatch_loop-((uae_u32)get_target()+1));
5499 +  raw_call((uae_u32)m68k_do_specialties);
5500 +  raw_test_l_rr(REG_RESULT,REG_RESULT);
5501 +  raw_jcc_b_oponly(NATIVE_CC_EQ);
5502 +  emit_byte(dispatch_loop-((uae_u32)get_target()+1));
5503 +  raw_cmp_b_mi((uae_u32)&quit_program,0);
5504 +  raw_jcc_b_oponly(NATIVE_CC_EQ);
5505 +  emit_byte(dispatch_loop-((uae_u32)get_target()+1));
5506 +  for (i=0;i<N_REGS;i++) {
5507 +          if (need_to_preserve[i])
5508 +                  raw_pop_l_r(i);
5509 +  }
5510 +  raw_ret();
5511 + #endif
5512   }
5513  
5514   static __inline__ void reset_lists(void)
# Line 5496 | Line 5526 | static void prepare_block(blockinfo* bi)
5526      int i;
5527  
5528      set_target(current_compile_p);
5529 <    align_target(32);
5529 >    align_target(align_jumps);
5530      bi->direct_pen=(cpuop_func *)get_target();
5531      raw_mov_l_rm(0,(uae_u32)&(bi->pc_p));
5532      raw_mov_l_mr((uae_u32)&regs.pc_p,0);
5533      raw_jmp((uae_u32)popall_execute_normal);
5534  
5535 <    align_target(32);
5535 >    align_target(align_jumps);
5536      bi->direct_pcc=(cpuop_func *)get_target();
5537      raw_mov_l_rm(0,(uae_u32)&(bi->pc_p));
5538      raw_mov_l_mr((uae_u32)&regs.pc_p,0);
5539      raw_jmp((uae_u32)popall_check_checksum);
5510
5511    align_target(32);
5540      current_compile_p=get_target();
5541  
5542      bi->deplist=NULL;
# Line 5920 | Line 5948 | static void compile_block(cpu_history* p
5948  
5949          bi->needed_flags=liveflags[0];
5950  
5951 <        align_target(32);
5951 >        align_target(align_loops);
5952          was_comp=0;
5953  
5954          bi->direct_handler=(cpuop_func *)get_target();
# Line 6095 | Line 6123 | static void compile_block(cpu_history* p
6123                  raw_jmp((uae_u32)popall_do_nothing);
6124                  create_jmpdep(bi,0,tba,t1);
6125  
6126 <                align_target(16);
6126 >                align_target(align_jumps);
6127                  /* not-predicted outcome */
6128                  *branchadd=(uae_u32)get_target()-((uae_u32)branchadd+4);
6129                  live=tmp; /* Ouch again */
# Line 6201 | Line 6229 | static void compile_block(cpu_history* p
6229   #endif
6230          
6231          log_dump();
6232 <        align_target(32);
6232 >        align_target(align_jumps);
6233  
6234          /* This is the non-direct handler */
6235          bi->handler=
# Line 6217 | Line 6245 | static void compile_block(cpu_history* p
6245  
6246          raw_jmp((uae_u32)bi->direct_handler);
6247  
6220        align_target(32);
6248          current_compile_p=get_target();
6222
6249          raise_in_cl_list(bi);
6250          
6251          /* We will flush soon, anyway, so let's do it now */
# Line 6245 | Line 6271 | void exec_nostats(void)
6271   {
6272          for (;;)  {
6273                  uae_u32 opcode = GET_OPCODE;
6248 #ifdef X86_ASSEMBLY__disable
6249                __asm__ __volatile__("\tpushl %%ebp\n\tcall *%%ebx\n\tpopl %%ebp" /* FIXME */
6250                                                         : : "b" (cpufunctbl[opcode]), "a" (opcode)
6251                                                         : "%edx", "%ecx", "%esi", "%edi",  "%ebp", "memory", "cc");
6252 #else
6274                  (*cpufunctbl[opcode])(opcode);
6254 #endif
6275                  if (end_block(opcode) || SPCFLAGS_TEST(SPCFLAG_ALL)) {
6276                          return; /* We will deal with the spcflags in the caller */
6277                  }
# Line 6276 | Line 6296 | void execute_normal(void)
6296   #if FLIGHT_RECORDER
6297                          m68k_record_step(m68k_getpc());
6298   #endif
6279 #ifdef X86_ASSEMBLY__disable
6280                        __asm__ __volatile__("\tpushl %%ebp\n\tcall *%%ebx\n\tpopl %%ebp" /* FIXME */
6281                                                                 : : "b" (cpufunctbl[opcode]), "a" (opcode)
6282                                                                 : "%edx", "%ecx", "%esi", "%edi", "%ebp", "memory", "cc");
6283 #else
6299                          (*cpufunctbl[opcode])(opcode);
6285 #endif
6300                          if (end_block(opcode) || SPCFLAGS_TEST(SPCFLAG_ALL) || blocklen>=MAXRUN) {
6301                                  compile_block(pc_hist, blocklen);
6302                                  return; /* We will deal with the spcflags in the caller */
# Line 6295 | Line 6309 | void execute_normal(void)
6309  
6310   typedef void (*compiled_handler)(void);
6311  
6312 + #ifdef X86_ASSEMBLY
6313 + void (*m68k_compile_execute)(void) = NULL;
6314 + #else
6315   void m68k_do_compile_execute(void)
6316   {
6317          for (;;) {
6301 #ifdef X86_ASSEMBLY
6302                __asm__ __volatile__("\tpushl %%ebp\n\tcall *%%ebx\n\tpopl %%ebp" /* FIXME */
6303                                                         : : "b" (cache_tags[cacheline(regs.pc_p)].handler)
6304                                                         : "%edx", "%ecx", "%eax", "%esi", "%edi", "%ebp", "memory", "cc");
6305 #else
6318                  ((compiled_handler)(pushall_call_handler))();
6307 #endif
6319                  /* Whenever we return from that, we should check spcflags */
6320                  if (SPCFLAGS_TEST(SPCFLAG_ALL)) {
6321                          if (m68k_do_specialties ())
# Line 6312 | Line 6323 | void m68k_do_compile_execute(void)
6323                  }
6324          }
6325   }
6326 + #endif

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines