197 |
|
extern uint64 GetTicks_usec(void); |
198 |
|
extern void Delay_usec(uint32 usec); |
199 |
|
|
200 |
+ |
/* Spinlocks */ |
201 |
+ |
#ifdef __GNUC__ |
202 |
+ |
|
203 |
+ |
#if defined(__powerpc__) || defined(__ppc__) |
204 |
+ |
#define HAVE_TEST_AND_SET 1 |
205 |
+ |
static inline int testandset(volatile int *p) |
206 |
+ |
{ |
207 |
+ |
int ret; |
208 |
+ |
__asm__ __volatile__("0: lwarx %0,0,%1\n" |
209 |
+ |
" xor. %0,%3,%0\n" |
210 |
+ |
" bne 1f\n" |
211 |
+ |
" stwcx. %2,0,%1\n" |
212 |
+ |
" bne- 0b\n" |
213 |
+ |
"1: " |
214 |
+ |
: "=&r" (ret) |
215 |
+ |
: "r" (p), "r" (1), "r" (0) |
216 |
+ |
: "cr0", "memory"); |
217 |
+ |
return ret; |
218 |
+ |
} |
219 |
+ |
#endif |
220 |
+ |
|
221 |
+ |
/* FIXME: SheepShaver occasionnally hangs with those locks */ |
222 |
+ |
#if 0 && (defined(__i386__) || defined(__x86_64__)) |
223 |
+ |
#define HAVE_TEST_AND_SET 1 |
224 |
+ |
static inline int testandset(volatile int *p) |
225 |
+ |
{ |
226 |
+ |
long int ret; |
227 |
+ |
/* Note: the "xchg" instruction does not need a "lock" prefix */ |
228 |
+ |
__asm__ __volatile__("xchgl %k0, %1" |
229 |
+ |
: "=r" (ret), "=m" (*p) |
230 |
+ |
: "0" (1), "m" (*p) |
231 |
+ |
: "memory"); |
232 |
+ |
return ret; |
233 |
+ |
} |
234 |
+ |
#endif |
235 |
+ |
|
236 |
+ |
#ifdef __s390__ |
237 |
+ |
#define HAVE_TEST_AND_SET 1 |
238 |
+ |
static inline int testandset(volatile int *p) |
239 |
+ |
{ |
240 |
+ |
int ret; |
241 |
+ |
|
242 |
+ |
__asm__ __volatile__("0: cs %0,%1,0(%2)\n" |
243 |
+ |
" jl 0b" |
244 |
+ |
: "=&d" (ret) |
245 |
+ |
: "r" (1), "a" (p), "0" (*p) |
246 |
+ |
: "cc", "memory" ); |
247 |
+ |
return ret; |
248 |
+ |
} |
249 |
+ |
#endif |
250 |
+ |
|
251 |
+ |
#ifdef __alpha__ |
252 |
+ |
#define HAVE_TEST_AND_SET 1 |
253 |
+ |
static inline int testandset(volatile int *p) |
254 |
+ |
{ |
255 |
+ |
int ret; |
256 |
+ |
unsigned long one; |
257 |
+ |
|
258 |
+ |
__asm__ __volatile__("0: mov 1,%2\n" |
259 |
+ |
" ldl_l %0,%1\n" |
260 |
+ |
" stl_c %2,%1\n" |
261 |
+ |
" beq %2,1f\n" |
262 |
+ |
".subsection 2\n" |
263 |
+ |
"1: br 0b\n" |
264 |
+ |
".previous" |
265 |
+ |
: "=r" (ret), "=m" (*p), "=r" (one) |
266 |
+ |
: "m" (*p)); |
267 |
+ |
return ret; |
268 |
+ |
} |
269 |
+ |
#endif |
270 |
+ |
|
271 |
+ |
#ifdef __sparc__ |
272 |
+ |
#define HAVE_TEST_AND_SET 1 |
273 |
+ |
static inline int testandset(volatile int *p) |
274 |
+ |
{ |
275 |
+ |
int ret; |
276 |
+ |
|
277 |
+ |
__asm__ __volatile__("ldstub [%1], %0" |
278 |
+ |
: "=r" (ret) |
279 |
+ |
: "r" (p) |
280 |
+ |
: "memory"); |
281 |
+ |
|
282 |
+ |
return (ret ? 1 : 0); |
283 |
+ |
} |
284 |
+ |
#endif |
285 |
+ |
|
286 |
+ |
#ifdef __arm__ |
287 |
+ |
#define HAVE_TEST_AND_SET 1 |
288 |
+ |
static inline int testandset(volatile int *p) |
289 |
+ |
{ |
290 |
+ |
register unsigned int ret; |
291 |
+ |
__asm__ __volatile__("swp %0, %1, [%2]" |
292 |
+ |
: "=r"(ret) |
293 |
+ |
: "0"(1), "r"(p)); |
294 |
+ |
|
295 |
+ |
return ret; |
296 |
+ |
} |
297 |
+ |
#endif |
298 |
+ |
|
299 |
+ |
#endif /* __GNUC__ */ |
300 |
+ |
|
301 |
+ |
typedef volatile int spinlock_t; |
302 |
+ |
|
303 |
+ |
static const spinlock_t SPIN_LOCK_UNLOCKED = 0; |
304 |
+ |
|
305 |
+ |
#if HAVE_TEST_AND_SET |
306 |
+ |
#define HAVE_SPINLOCKS 1 |
307 |
+ |
static inline void spin_lock(spinlock_t *lock) |
308 |
+ |
{ |
309 |
+ |
while (testandset(lock)); |
310 |
+ |
} |
311 |
+ |
|
312 |
+ |
static inline void spin_unlock(spinlock_t *lock) |
313 |
+ |
{ |
314 |
+ |
*lock = 0; |
315 |
+ |
} |
316 |
+ |
|
317 |
+ |
static inline int spin_trylock(spinlock_t *lock) |
318 |
+ |
{ |
319 |
+ |
return !testandset(lock); |
320 |
+ |
} |
321 |
+ |
#else |
322 |
+ |
static inline void spin_lock(spinlock_t *lock) |
323 |
+ |
{ |
324 |
+ |
} |
325 |
+ |
|
326 |
+ |
static inline void spin_unlock(spinlock_t *lock) |
327 |
+ |
{ |
328 |
+ |
} |
329 |
+ |
|
330 |
+ |
static inline int spin_trylock(spinlock_t *lock) |
331 |
+ |
{ |
332 |
+ |
return 1; |
333 |
+ |
} |
334 |
+ |
#endif |
335 |
+ |
|
336 |
+ |
/* X11 display fast locks */ |
337 |
+ |
#ifdef HAVE_SPINLOCKS |
338 |
+ |
#define X11_LOCK_TYPE spinlock_t |
339 |
+ |
#define X11_LOCK_INIT SPIN_LOCK_UNLOCKED |
340 |
+ |
#define XDisplayLock() spin_lock(&x_display_lock) |
341 |
+ |
#define XDisplayUnlock() spin_unlock(&x_display_lock) |
342 |
+ |
#elif defined(HAVE_PTHREADS) |
343 |
+ |
#define X11_LOCK_TYPE pthread_mutex_t |
344 |
+ |
#define X11_LOCK_INIT PTHREAD_MUTEX_INITIALIZER |
345 |
+ |
#define XDisplayLock() pthread_mutex_lock(&x_display_lock); |
346 |
+ |
#define XDisplayUnlock() pthread_mutex_unlock(&x_display_lock); |
347 |
+ |
#else |
348 |
+ |
#define XDisplayLock() |
349 |
+ |
#define XDisplayUnlock() |
350 |
+ |
#endif |
351 |
+ |
#ifdef X11_LOCK_TYPE |
352 |
+ |
extern X11_LOCK_TYPE x_display_lock; |
353 |
+ |
#endif |
354 |
+ |
|
355 |
|
#ifdef HAVE_PTHREADS |
356 |
|
/* Centralized pthread attribute setup */ |
357 |
|
void Set_pthread_attr(pthread_attr_t *attr, int priority); |