236 |
|
#else |
237 |
|
static inline void do_put_mem_word(uae_u16 *a, uae_u32 v) {__asm__ ("rolw $8,%0" : "=r" (v) : "0" (v) : "cc"); *a = v;} |
238 |
|
#endif |
239 |
+ |
#define HAVE_OPTIMIZED_BYTESWAP_32 |
240 |
+ |
/* bswap doesn't affect condition codes */ |
241 |
+ |
static inline uae_u32 do_byteswap_32(uae_u32 v) {__asm__ ("bswap %0" : "=r" (v) : "0" (v)); return v;} |
242 |
+ |
#define HAVE_OPTIMIZED_BYTESWAP_16 |
243 |
+ |
#ifdef X86_PPRO_OPT |
244 |
+ |
static inline uae_u32 do_byteswap_16(uae_u32 v) {__asm__ ("bswapl %0" : "=&r" (v) : "0" (v << 16) : "cc"); return v;} |
245 |
+ |
#else |
246 |
+ |
static inline uae_u32 do_byteswap_16(uae_u32 v) {__asm__ ("rolw $8,%0" : "=r" (v) : "0" (v) : "cc"); return v;} |
247 |
+ |
#endif |
248 |
|
|
249 |
|
#elif defined(CPU_CAN_ACCESS_UNALIGNED) |
250 |
|
|
266 |
|
|
267 |
|
#endif /* WORDS_BIGENDIAN */ |
268 |
|
|
269 |
+ |
#ifndef HAVE_OPTIMIZED_BYTESWAP_32 |
270 |
+ |
static inline uae_u32 do_byteswap_32(uae_u32 v) |
271 |
+ |
{ return (((v >> 24) & 0xff) | ((v >> 8) & 0xff00) | ((v & 0xff) << 24) | ((v & 0xff00) << 8)); } |
272 |
+ |
#endif |
273 |
+ |
|
274 |
+ |
#ifndef HAVE_OPTIMIZED_BYTESWAP_16 |
275 |
+ |
static inline uae_u32 do_byteswap_16(uae_u32 v) |
276 |
+ |
{ return (((v >> 8) & 0xff) | ((v & 0xff) << 8)); } |
277 |
+ |
#endif |
278 |
+ |
|
279 |
|
#define do_get_mem_byte(a) ((uae_u32)*((uae_u8 *)(a))) |
280 |
|
#define do_put_mem_byte(a, v) (*(uae_u8 *)(a) = (v)) |
281 |
|
|