ViewVC Help
View File | Revision Log | Show Annotations | Revision Graph | Root Listing
root/cebix/BasiliskII/src/Windows/sysdeps.h
(Generate patch)

Comparing BasiliskII/src/Windows/sysdeps.h (file contents):
Revision 1.1 by gbeauche, 2005-03-17T00:24:25Z vs.
Revision 1.2 by gbeauche, 2006-03-28T06:58:30Z

# Line 152 | Line 152 | typedef int64 tm_time_t;
152   #define uae_u64 uint64
153   typedef uae_u32 uaecptr;
154  
155 /* Alignment restrictions */
156 #if defined(__i386__) || defined(__powerpc__) || defined(__m68k__) || defined(__x86_64__)
157 # define CPU_CAN_ACCESS_UNALIGNED
158 #endif
159
155   /* Timing functions */
156   extern void timer_init(void);
157   extern uint64 GetTicks_usec(void);
# Line 164 | Line 159 | extern void Delay_usec(uint32 usec);
159  
160   /* Spinlocks */
161   #ifdef __GNUC__
167
168 #if defined(__powerpc__) || defined(__ppc__)
169 #define HAVE_TEST_AND_SET 1
170 static inline int testandset(volatile int *p)
171 {
172        int ret;
173        __asm__ __volatile__("0:    lwarx       %0,0,%1\n"
174                                                 "      xor.    %0,%3,%0\n"
175                                                 "      bne             1f\n"
176                                                 "      stwcx.  %2,0,%1\n"
177                                                 "      bne-    0b\n"
178                                                 "1:    "
179                                                 : "=&r" (ret)
180                                                 : "r" (p), "r" (1), "r" (0)
181                                                 : "cr0", "memory");
182        return ret;
183 }
184 #endif
185
186 #if defined(__i386__) || defined(__x86_64__)
162   #define HAVE_TEST_AND_SET 1
163   static inline int testandset(volatile int *p)
164   {
# Line 195 | Line 170 | static inline int testandset(volatile in
170                                                   : "memory");
171          return ret;
172   }
198 #endif
199
200 #ifdef __alpha__
201 #define HAVE_TEST_AND_SET 1
202 static inline int testandset(volatile int *p)
203 {
204        int ret;
205        unsigned long one;
206
207        __asm__ __volatile__("0:        mov 1,%2\n"
208                                                 "      ldl_l %0,%1\n"
209                                                 "      stl_c %2,%1\n"
210                                                 "      beq %2,1f\n"
211                                                 ".subsection 2\n"
212                                                 "1:    br 0b\n"
213                                                 ".previous"
214                                                 : "=r" (ret), "=m" (*p), "=r" (one)
215                                                 : "m" (*p));
216        return ret;
217 }
218 #endif
219
173   #endif /* __GNUC__ */
174  
175   typedef volatile int spinlock_t;
# Line 254 | Line 207 | static inline int spin_trylock(spinlock_
207   }
208   #endif
209  
257 /* UAE CPU defines */
258 #ifdef WORDS_BIGENDIAN
259
260 #ifdef CPU_CAN_ACCESS_UNALIGNED
261
262 /* Big-endian CPUs which can do unaligned accesses */
263 static inline uae_u32 do_get_mem_long(uae_u32 *a) {return *a;}
264 static inline uae_u32 do_get_mem_word(uae_u16 *a) {return *a;}
265 static inline void do_put_mem_long(uae_u32 *a, uae_u32 v) {*a = v;}
266 static inline void do_put_mem_word(uae_u16 *a, uae_u32 v) {*a = v;}
267
268 #else /* CPU_CAN_ACCESS_UNALIGNED */
269
270 /* Big-endian CPUs which can not do unaligned accesses (this is not the most efficient way to do this...) */
271 static inline uae_u32 do_get_mem_long(uae_u32 *a) {uint8 *b = (uint8 *)a; return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3];}
272 static inline uae_u32 do_get_mem_word(uae_u16 *a) {uint8 *b = (uint8 *)a; return (b[0] << 8) | b[1];}
273 static inline void do_put_mem_long(uae_u32 *a, uae_u32 v) {uint8 *b = (uint8 *)a; b[0] = v >> 24; b[1] = v >> 16; b[2] = v >> 8; b[3] = v;}
274 static inline void do_put_mem_word(uae_u16 *a, uae_u32 v) {uint8 *b = (uint8 *)a; b[0] = v >> 8; b[1] = v;}
275
276 #endif /* CPU_CAN_ACCESS_UNALIGNED */
277
278 #else /* WORDS_BIGENDIAN */
279
280 #if defined(__i386__) || defined(__x86_64__)
281
210   /* Intel x86 */
211   #define X86_PPRO_OPT
212   static inline uae_u32 do_get_mem_long(uae_u32 *a) {uint32 retval; __asm__ ("bswap %0" : "=r" (retval) : "0" (*a) : "cc"); return retval;}
# Line 305 | Line 233 | static inline uae_u32 do_byteswap_16_g(u
233   static inline uae_u32 do_byteswap_16_g(uae_u32 v) {__asm__ ("rolw $8,%0" : "=r" (v) : "0" (v) : "cc"); return v;}
234   #endif
235  
308 #elif defined(CPU_CAN_ACCESS_UNALIGNED)
309
310 /* Other little-endian CPUs which can do unaligned accesses */
311 static inline uae_u32 do_get_mem_long(uae_u32 *a) {uint32 x = *a; return (x >> 24) | (x >> 8) & 0xff00 | (x << 8) & 0xff0000 | (x << 24);}
312 static inline uae_u32 do_get_mem_word(uae_u16 *a) {uint16 x = *a; return (x >> 8) | (x << 8);}
313 static inline void do_put_mem_long(uae_u32 *a, uae_u32 v) {*a = (v >> 24) | (v >> 8) & 0xff00 | (v << 8) & 0xff0000 | (v << 24);}
314 static inline void do_put_mem_word(uae_u16 *a, uae_u32 v) {*a = (v >> 8) | (v << 8);}
315
316 #else /* CPU_CAN_ACCESS_UNALIGNED */
317
318 /* Other little-endian CPUs which can not do unaligned accesses (this needs optimization) */
319 static inline uae_u32 do_get_mem_long(uae_u32 *a) {uint8 *b = (uint8 *)a; return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3];}
320 static inline uae_u32 do_get_mem_word(uae_u16 *a) {uint8 *b = (uint8 *)a; return (b[0] << 8) | b[1];}
321 static inline void do_put_mem_long(uae_u32 *a, uae_u32 v) {uint8 *b = (uint8 *)a; b[0] = v >> 24; b[1] = v >> 16; b[2] = v >> 8; b[3] = v;}
322 static inline void do_put_mem_word(uae_u16 *a, uae_u32 v) {uint8 *b = (uint8 *)a; b[0] = v >> 8; b[1] = v;}
323
324 #endif /* CPU_CAN_ACCESS_UNALIGNED */
325
326 #endif /* WORDS_BIGENDIAN */
327
236   #ifndef HAVE_OPTIMIZED_BYTESWAP_32
237   static inline uae_u32 do_byteswap_32_g(uae_u32 v)
238          { return (((v >> 24) & 0xff) | ((v >> 8) & 0xff00) | ((v & 0xff) << 24) | ((v & 0xff00) << 8)); }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines