/* * Done by Dietmar Hahn slock), SPIN_LOCK_UNUSED, SPIN_LOCK_USED); } while (ret == SPIN_LOCK_USED); } static inline void _raw_spin_unlock(spinlock_t *lck) { asm volatile ("st4.rel.nta [%0] = r0\n\t" :: "r"(&(lck->slock)) : "memory" ); } static inline uint32_t _raw_spin_trylock(spinlock_t* lck) { uint32_t ret; ret = ia64_cmpxchg_acq_32(&(lck->slock), SPIN_LOCK_USED, SPIN_LOCK_USED); return (ret == SPIN_LOCK_USED); } #endif /* _ARCH_SPINLOCK_H_ */