ViewVC Help
View File | Revision Log | Show Annotations | Revision Graph | Root Listing
root/cebix/SheepShaver/src/Unix/sysdeps.h
(Generate patch)

Comparing SheepShaver/src/Unix/sysdeps.h (file contents):
Revision 1.19 by gbeauche, 2003-12-31T18:16:55Z vs.
Revision 1.23 by gbeauche, 2004-01-26T13:52:31Z

# Line 1 | Line 1
1   /*
2   *  sysdeps.h - System dependent definitions for Linux
3   *
4 < *  SheepShaver (C) 1997-2002 Christian Bauer and Marc Hellwig
4 > *  SheepShaver (C) 1997-2004 Christian Bauer and Marc Hellwig
5   *
6   *  This program is free software; you can redistribute it and/or modify
7   *  it under the terms of the GNU General Public License as published by
# Line 139 | Line 139 | typedef int64 intptr;
139   **/
140  
141   #if defined(__GNUC__)
142 < #if defined(__x86_64__)
142 > #if defined(__x86_64__) || defined(__i386__)
143   // Linux/AMD64 currently has no asm optimized bswap_32() in <byteswap.h>
144   #define opt_bswap_32 do_opt_bswap_32
145   static inline uint32 do_opt_bswap_32(uint32 x)
# Line 184 | Line 184 | static inline uint32 generic_bswap_32(ui
184                    ((x & 0x000000ff) << 24) );
185   }
186  
187 + #if defined(__i386__)
188 + #define opt_bswap_64 do_opt_bswap_64
189 + static inline uint64 do_opt_bswap_64(uint64 x)
190 + {
191 +  return (bswap_32(x >> 32) | (((uint64)bswap_32((uint32)x)) << 32));
192 + }
193 + #endif
194 +
195   #ifdef  opt_bswap_64
196   #undef  bswap_64
197   #define bswap_64 opt_bswap_64
# Line 217 | Line 225 | static inline uint64 tswap64(uint64 x) {
225   // spin locks
226   #ifdef __GNUC__
227  
228 < #ifdef __powerpc__
228 > #if defined(__powerpc__) || defined(__ppc__)
229   #define HAVE_TEST_AND_SET 1
230 < static inline int testandset(int *p)
230 > static inline int testandset(volatile int *p)
231   {
232          int ret;
233 <        __asm__ __volatile__("0:    lwarx %0,0,%1 ;"
234 <                                                 "      xor. %0,%3,%0;"
235 <                                                 "      bne 1f;"
236 <                                                 "      stwcx. %2,0,%1;"
237 <                                                 "      bne- 0b;"
233 >        __asm__ __volatile__("0:    lwarx       %0,0,%1\n"
234 >                                                 "      xor.    %0,%3,%0\n"
235 >                                                 "      bne             1f\n"
236 >                                                 "      stwcx.  %2,0,%1\n"
237 >                                                 "      bne-    0b\n"
238                                                   "1:    "
239                                                   : "=&r" (ret)
240                                                   : "r" (p), "r" (1), "r" (0)
# Line 237 | Line 245 | static inline int testandset(int *p)
245  
246   #ifdef __i386__
247   #define HAVE_TEST_AND_SET 1
248 < static inline int testandset(int *p)
248 > static inline int testandset(volatile int *p)
249   {
250 <        char ret;
250 >        int ret;
251          long int readval;
252 <        
253 <        __asm__ __volatile__("lock; cmpxchgl %3, %1; sete %0"
254 <                                                 : "=q" (ret), "=m" (*p), "=a" (readval)
255 <                                                 : "r" (1), "m" (*p), "a" (0)
252 >        /* Note: the "xchg" instruction does not need a "lock" prefix */
253 >        __asm__ __volatile__("xchgl %0, %1"
254 >                                                 : "=r" (ret), "=m" (*p), "=a" (readval)
255 >                                                 : "0" (1), "m" (*p)
256                                                   : "memory");
257          return ret;
258   }
# Line 252 | Line 260 | static inline int testandset(int *p)
260  
261   #ifdef __s390__
262   #define HAVE_TEST_AND_SET 1
263 < static inline int testandset(int *p)
263 > static inline int testandset(volatile int *p)
264   {
265          int ret;
266  
# Line 267 | Line 275 | static inline int testandset(int *p)
275  
276   #ifdef __alpha__
277   #define HAVE_TEST_AND_SET 1
278 < static inline int testandset(int *p)
278 > static inline int testandset(volatile int *p)
279   {
280          int ret;
281          unsigned long one;
# Line 287 | Line 295 | static inline int testandset(int *p)
295  
296   #ifdef __sparc__
297   #define HAVE_TEST_AND_SET 1
298 < static inline int testandset(int *p)
298 > static inline int testandset(volatile int *p)
299   {
300          int ret;
301  
# Line 302 | Line 310 | static inline int testandset(int *p)
310  
311   #ifdef __arm__
312   #define HAVE_TEST_AND_SET 1
313 < static inline int testandset(int *p)
313 > static inline int testandset(volatile int *p)
314   {
315          register unsigned int ret;
316          __asm__ __volatile__("swp %0, %1, [%2]"
# Line 317 | Line 325 | static inline int testandset(int *p)
325  
326   #if HAVE_TEST_AND_SET
327   #define HAVE_SPINLOCKS 1
328 < typedef int spinlock_t;
328 > typedef volatile int spinlock_t;
329  
330   static const spinlock_t SPIN_LOCK_UNLOCKED = 0;
331  

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines