aboutsummaryrefslogtreecommitdiff
path: root/arch/parisc/include
diff options
context:
space:
mode:
Diffstat (limited to 'arch/parisc/include')
-rw-r--r--arch/parisc/include/asm/atomic.h6
-rw-r--r--arch/parisc/include/asm/spinlock.h8
-rw-r--r--arch/parisc/include/asm/spinlock_types.h4
3 files changed, 9 insertions, 9 deletions
diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h
index 8bc9e96699b..3a4ea778d4b 100644
--- a/arch/parisc/include/asm/atomic.h
+++ b/arch/parisc/include/asm/atomic.h
@@ -27,18 +27,18 @@
# define ATOMIC_HASH_SIZE 4
# define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) (a))/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ]))
-extern raw_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned;
+extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned;
/* Can't use raw_spin_lock_irq because of #include problems, so
* this is the substitute */
#define _atomic_spin_lock_irqsave(l,f) do { \
- raw_spinlock_t *s = ATOMIC_HASH(l); \
+ arch_spinlock_t *s = ATOMIC_HASH(l); \
local_irq_save(f); \
__raw_spin_lock(s); \
} while(0)
#define _atomic_spin_unlock_irqrestore(l,f) do { \
- raw_spinlock_t *s = ATOMIC_HASH(l); \
+ arch_spinlock_t *s = ATOMIC_HASH(l); \
__raw_spin_unlock(s); \
local_irq_restore(f); \
} while(0)
diff --git a/arch/parisc/include/asm/spinlock.h b/arch/parisc/include/asm/spinlock.h
index fae03e136fa..69e8dca2674 100644
--- a/arch/parisc/include/asm/spinlock.h
+++ b/arch/parisc/include/asm/spinlock.h
@@ -5,7 +5,7 @@
#include <asm/processor.h>
#include <asm/spinlock_types.h>
-static inline int __raw_spin_is_locked(raw_spinlock_t *x)
+static inline int __raw_spin_is_locked(arch_spinlock_t *x)
{
volatile unsigned int *a = __ldcw_align(x);
return *a == 0;
@@ -15,7 +15,7 @@ static inline int __raw_spin_is_locked(raw_spinlock_t *x)
#define __raw_spin_unlock_wait(x) \
do { cpu_relax(); } while (__raw_spin_is_locked(x))
-static inline void __raw_spin_lock_flags(raw_spinlock_t *x,
+static inline void __raw_spin_lock_flags(arch_spinlock_t *x,
unsigned long flags)
{
volatile unsigned int *a;
@@ -33,7 +33,7 @@ static inline void __raw_spin_lock_flags(raw_spinlock_t *x,
mb();
}
-static inline void __raw_spin_unlock(raw_spinlock_t *x)
+static inline void __raw_spin_unlock(arch_spinlock_t *x)
{
volatile unsigned int *a;
mb();
@@ -42,7 +42,7 @@ static inline void __raw_spin_unlock(raw_spinlock_t *x)
mb();
}
-static inline int __raw_spin_trylock(raw_spinlock_t *x)
+static inline int __raw_spin_trylock(arch_spinlock_t *x)
{
volatile unsigned int *a;
int ret;
diff --git a/arch/parisc/include/asm/spinlock_types.h b/arch/parisc/include/asm/spinlock_types.h
index 3f72f47cf4b..735caafb81f 100644
--- a/arch/parisc/include/asm/spinlock_types.h
+++ b/arch/parisc/include/asm/spinlock_types.h
@@ -9,10 +9,10 @@ typedef struct {
volatile unsigned int lock[4];
# define __RAW_SPIN_LOCK_UNLOCKED { { 1, 1, 1, 1 } }
#endif
-} raw_spinlock_t;
+} arch_spinlock_t;
typedef struct {
- raw_spinlock_t lock;
+ arch_spinlock_t lock;
volatile int counter;
} raw_rwlock_t;