[PATCH 7/7] MIPS: atomic.h: Reformat to fit in 79 columns

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Signed-off-by: Maciej W. Rozycki <macro@xxxxxxxxxxxxxxxx>
---
Hi,

 No comment.  Please apply.

  Maciej

linux-mips-atomic-format.diff
Index: linux-3.18-rc4-malta/arch/mips/include/asm/atomic.h
===================================================================
--- linux-3.18-rc4-malta.orig/arch/mips/include/asm/atomic.h	2014-11-15 05:56:06.000000000 +0000
+++ linux-3.18-rc4-malta/arch/mips/include/asm/atomic.h	2014-11-15 06:06:58.551778827 +0000
@@ -41,97 +41,97 @@
  */
 #define atomic_set(v, i)		((v)->counter = (i))
 
-#define ATOMIC_OP(op, c_op, asm_op)						\
-static __inline__ void atomic_##op(int i, atomic_t * v)				\
-{										\
-	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
-		int temp;							\
-										\
-		__asm__ __volatile__(						\
-		"	.set	arch=r4000				\n"	\
-		"1:	ll	%0, %1		# atomic_" #op "	\n"	\
-		"	" #asm_op " %0, %2				\n"	\
-		"	sc	%0, %1					\n"	\
-		"	beqzl	%0, 1b					\n"	\
-		"	.set	mips0					\n"	\
-		: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)		\
-		: "Ir" (i));							\
-	} else if (kernel_uses_llsc) {						\
-		int temp;							\
-										\
-		do {								\
-			__asm__ __volatile__(					\
-			"	.set	arch=r4000			\n"	\
-			"	ll	%0, %1		# atomic_" #op "\n"	\
-			"	" #asm_op " %0, %2			\n"	\
-			"	sc	%0, %1				\n"	\
-			"	.set	mips0				\n"	\
-			: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)	\
-			: "Ir" (i));						\
-		} while (unlikely(!temp));					\
-	} else {								\
-		unsigned long flags;						\
-										\
-		raw_local_irq_save(flags);					\
-		v->counter c_op i;						\
-		raw_local_irq_restore(flags);					\
-	}									\
-}										\
+#define ATOMIC_OP(op, c_op, asm_op)					      \
+static __inline__ void atomic_##op(int i, atomic_t * v)			      \
+{									      \
+	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
+		int temp;						      \
+									      \
+		__asm__ __volatile__(					      \
+		"	.set	arch=r4000				\n"   \
+		"1:	ll	%0, %1		# atomic_" #op "	\n"   \
+		"	" #asm_op " %0, %2				\n"   \
+		"	sc	%0, %1					\n"   \
+		"	beqzl	%0, 1b					\n"   \
+		"	.set	mips0					\n"   \
+		: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)	      \
+		: "Ir" (i));						      \
+	} else if (kernel_uses_llsc) {					      \
+		int temp;						      \
+									      \
+		do {							      \
+			__asm__ __volatile__(				      \
+			"	.set	arch=r4000			\n"   \
+			"	ll	%0, %1		# atomic_" #op "\n"   \
+			"	" #asm_op " %0, %2			\n"   \
+			"	sc	%0, %1				\n"   \
+			"	.set	mips0				\n"   \
+			: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)      \
+			: "Ir" (i));					      \
+		} while (unlikely(!temp));				      \
+	} else {							      \
+		unsigned long flags;					      \
+									      \
+		raw_local_irq_save(flags);				      \
+		v->counter c_op i;					      \
+		raw_local_irq_restore(flags);				      \
+	}								      \
+}
 
-#define ATOMIC_OP_RETURN(op, c_op, asm_op)					\
-static __inline__ int atomic_##op##_return(int i, atomic_t * v)			\
-{										\
-	int result;								\
-										\
-	smp_mb__before_llsc();							\
-										\
-	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
-		int temp;							\
-										\
-		__asm__ __volatile__(						\
-		"	.set	arch=r4000				\n"	\
-		"1:	ll	%1, %2		# atomic_" #op "_return	\n"	\
-		"	" #asm_op " %0, %1, %3				\n"	\
-		"	sc	%0, %2					\n"	\
-		"	beqzl	%0, 1b					\n"	\
-		"	" #asm_op " %0, %1, %3				\n"	\
-		"	.set	mips0					\n"	\
-		: "=&r" (result), "=&r" (temp),					\
-		  "+" GCC_OFF12_ASM() (v->counter)				\
-		: "Ir" (i));							\
-	} else if (kernel_uses_llsc) {						\
-		int temp;							\
-										\
-		do {								\
-			__asm__ __volatile__(					\
-			"	.set	arch=r4000			\n"	\
-			"	ll	%1, %2	# atomic_" #op "_return	\n"	\
-			"	" #asm_op " %0, %1, %3			\n"	\
-			"	sc	%0, %2				\n"	\
-			"	.set	mips0				\n"	\
-			: "=&r" (result), "=&r" (temp),				\
-			  "+" GCC_OFF12_ASM() (v->counter)			\
-			: "Ir" (i));						\
-		} while (unlikely(!result));					\
-										\
-		result = temp; result c_op i;					\
-	} else {								\
-		unsigned long flags;						\
-										\
-		raw_local_irq_save(flags);					\
-		result = v->counter;						\
-		result c_op i;							\
-		v->counter = result;						\
-		raw_local_irq_restore(flags);					\
-	}									\
-										\
-	smp_llsc_mb();								\
-										\
-	return result;								\
+#define ATOMIC_OP_RETURN(op, c_op, asm_op)				      \
+static __inline__ int atomic_##op##_return(int i, atomic_t * v)		      \
+{									      \
+	int result;							      \
+									      \
+	smp_mb__before_llsc();						      \
+									      \
+	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
+		int temp;						      \
+									      \
+		__asm__ __volatile__(					      \
+		"	.set	arch=r4000				\n"   \
+		"1:	ll	%1, %2		# atomic_" #op "_return	\n"   \
+		"	" #asm_op " %0, %1, %3				\n"   \
+		"	sc	%0, %2					\n"   \
+		"	beqzl	%0, 1b					\n"   \
+		"	" #asm_op " %0, %1, %3				\n"   \
+		"	.set	mips0					\n"   \
+		: "=&r" (result), "=&r" (temp),				      \
+		  "+" GCC_OFF12_ASM() (v->counter)			      \
+		: "Ir" (i));						      \
+	} else if (kernel_uses_llsc) {					      \
+		int temp;						      \
+									      \
+		do {							      \
+			__asm__ __volatile__(				      \
+			"	.set	arch=r4000			\n"   \
+			"	ll	%1, %2	# atomic_" #op "_return	\n"   \
+			"	" #asm_op " %0, %1, %3			\n"   \
+			"	sc	%0, %2				\n"   \
+			"	.set	mips0				\n"   \
+			: "=&r" (result), "=&r" (temp),			      \
+			  "+" GCC_OFF12_ASM() (v->counter)		      \
+			: "Ir" (i));					      \
+		} while (unlikely(!result));				      \
+									      \
+		result = temp; result c_op i;				      \
+	} else {							      \
+		unsigned long flags;					      \
+									      \
+		raw_local_irq_save(flags);				      \
+		result = v->counter;					      \
+		result c_op i;						      \
+		v->counter = result;					      \
+		raw_local_irq_restore(flags);				      \
+	}								      \
+									      \
+	smp_llsc_mb();							      \
+									      \
+	return result;							      \
 }
 
-#define ATOMIC_OPS(op, c_op, asm_op)						\
-	ATOMIC_OP(op, c_op, asm_op)						\
+#define ATOMIC_OPS(op, c_op, asm_op)					      \
+	ATOMIC_OP(op, c_op, asm_op)					      \
 	ATOMIC_OP_RETURN(op, c_op, asm_op)
 
 ATOMIC_OPS(add, +=, addu)
@@ -320,98 +320,98 @@ static __inline__ int __atomic_add_unles
  */
 #define atomic64_set(v, i)	((v)->counter = (i))
 
-#define ATOMIC64_OP(op, c_op, asm_op)						\
-static __inline__ void atomic64_##op(long i, atomic64_t * v)			\
-{										\
-	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
-		long temp;							\
-										\
-		__asm__ __volatile__(						\
-		"	.set	arch=r4000				\n"	\
-		"1:	lld	%0, %1		# atomic64_" #op "	\n"	\
-		"	" #asm_op " %0, %2				\n"	\
-		"	scd	%0, %1					\n"	\
-		"	beqzl	%0, 1b					\n"	\
-		"	.set	mips0					\n"	\
-		: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)		\
-		: "Ir" (i));							\
-	} else if (kernel_uses_llsc) {						\
-		long temp;							\
-										\
-		do {								\
-			__asm__ __volatile__(					\
-			"	.set	arch=r4000			\n"	\
-			"	lld	%0, %1		# atomic64_" #op "\n"	\
-			"	" #asm_op " %0, %2			\n"	\
-			"	scd	%0, %1				\n"	\
-			"	.set	mips0				\n"	\
-			: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)	\
-			: "Ir" (i));						\
-		} while (unlikely(!temp));					\
-	} else {								\
-		unsigned long flags;						\
-										\
-		raw_local_irq_save(flags);					\
-		v->counter c_op i;						\
-		raw_local_irq_restore(flags);					\
-	}									\
-}										\
+#define ATOMIC64_OP(op, c_op, asm_op)					      \
+static __inline__ void atomic64_##op(long i, atomic64_t * v)		      \
+{									      \
+	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
+		long temp;						      \
+									      \
+		__asm__ __volatile__(					      \
+		"	.set	arch=r4000				\n"   \
+		"1:	lld	%0, %1		# atomic64_" #op "	\n"   \
+		"	" #asm_op " %0, %2				\n"   \
+		"	scd	%0, %1					\n"   \
+		"	beqzl	%0, 1b					\n"   \
+		"	.set	mips0					\n"   \
+		: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)	      \
+		: "Ir" (i));						      \
+	} else if (kernel_uses_llsc) {					      \
+		long temp;						      \
+									      \
+		do {							      \
+			__asm__ __volatile__(				      \
+			"	.set	arch=r4000			\n"   \
+			"	lld	%0, %1		# atomic64_" #op "\n" \
+			"	" #asm_op " %0, %2			\n"   \
+			"	scd	%0, %1				\n"   \
+			"	.set	mips0				\n"   \
+			: "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)      \
+			: "Ir" (i));					      \
+		} while (unlikely(!temp));				      \
+	} else {							      \
+		unsigned long flags;					      \
+									      \
+		raw_local_irq_save(flags);				      \
+		v->counter c_op i;					      \
+		raw_local_irq_restore(flags);				      \
+	}								      \
+}
 
-#define ATOMIC64_OP_RETURN(op, c_op, asm_op)					\
-static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)		\
-{										\
-	long result;								\
-										\
-	smp_mb__before_llsc();							\
-										\
-	if (kernel_uses_llsc && R10000_LLSC_WAR) {				\
-		long temp;							\
-										\
-		__asm__ __volatile__(						\
-		"	.set	arch=r4000				\n"	\
-		"1:	lld	%1, %2		# atomic64_" #op "_return\n"	\
-		"	" #asm_op " %0, %1, %3				\n"	\
-		"	scd	%0, %2					\n"	\
-		"	beqzl	%0, 1b					\n"	\
-		"	" #asm_op " %0, %1, %3				\n"	\
-		"	.set	mips0					\n"	\
-		: "=&r" (result), "=&r" (temp),					\
-		  "+" GCC_OFF12_ASM() (v->counter)				\
-		: "Ir" (i));							\
-	} else if (kernel_uses_llsc) {						\
-		long temp;							\
-										\
-		do {								\
-			__asm__ __volatile__(					\
-			"	.set	arch=r4000			\n"	\
-			"	lld	%1, %2	# atomic64_" #op "_return\n"	\
-			"	" #asm_op " %0, %1, %3			\n"	\
-			"	scd	%0, %2				\n"	\
-			"	.set	mips0				\n"	\
-			: "=&r" (result), "=&r" (temp),				\
-			  "=" GCC_OFF12_ASM() (v->counter)			\
-			: "Ir" (i), GCC_OFF12_ASM() (v->counter)		\
-			: "memory");						\
-		} while (unlikely(!result));					\
-										\
-		result = temp; result c_op i;					\
-	} else {								\
-		unsigned long flags;						\
-										\
-		raw_local_irq_save(flags);					\
-		result = v->counter;						\
-		result c_op i;							\
-		v->counter = result;						\
-		raw_local_irq_restore(flags);					\
-	}									\
-										\
-	smp_llsc_mb();								\
-										\
-	return result;								\
+#define ATOMIC64_OP_RETURN(op, c_op, asm_op)				      \
+static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)	      \
+{									      \
+	long result;							      \
+									      \
+	smp_mb__before_llsc();						      \
+									      \
+	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
+		long temp;						      \
+									      \
+		__asm__ __volatile__(					      \
+		"	.set	arch=r4000				\n"   \
+		"1:	lld	%1, %2		# atomic64_" #op "_return\n"  \
+		"	" #asm_op " %0, %1, %3				\n"   \
+		"	scd	%0, %2					\n"   \
+		"	beqzl	%0, 1b					\n"   \
+		"	" #asm_op " %0, %1, %3				\n"   \
+		"	.set	mips0					\n"   \
+		: "=&r" (result), "=&r" (temp),				      \
+		  "+" GCC_OFF12_ASM() (v->counter)			      \
+		: "Ir" (i));						      \
+	} else if (kernel_uses_llsc) {					      \
+		long temp;						      \
+									      \
+		do {							      \
+			__asm__ __volatile__(				      \
+			"	.set	arch=r4000			\n"   \
+			"	lld	%1, %2	# atomic64_" #op "_return\n"  \
+			"	" #asm_op " %0, %1, %3			\n"   \
+			"	scd	%0, %2				\n"   \
+			"	.set	mips0				\n"   \
+			: "=&r" (result), "=&r" (temp),			      \
+			  "=" GCC_OFF12_ASM() (v->counter)		      \
+			: "Ir" (i), GCC_OFF12_ASM() (v->counter)	      \
+			: "memory");					      \
+		} while (unlikely(!result));				      \
+									      \
+		result = temp; result c_op i;				      \
+	} else {							      \
+		unsigned long flags;					      \
+									      \
+		raw_local_irq_save(flags);				      \
+		result = v->counter;					      \
+		result c_op i;						      \
+		v->counter = result;					      \
+		raw_local_irq_restore(flags);				      \
+	}								      \
+									      \
+	smp_llsc_mb();							      \
+									      \
+	return result;							      \
 }
 
-#define ATOMIC64_OPS(op, c_op, asm_op)						\
-	ATOMIC64_OP(op, c_op, asm_op)						\
+#define ATOMIC64_OPS(op, c_op, asm_op)					      \
+	ATOMIC64_OP(op, c_op, asm_op)					      \
 	ATOMIC64_OP_RETURN(op, c_op, asm_op)
 
 ATOMIC64_OPS(add, +=, daddu)
@@ -422,7 +422,8 @@ ATOMIC64_OPS(sub, -=, dsubu)
 #undef ATOMIC64_OP
 
 /*
- * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
+ * atomic64_sub_if_positive - conditionally subtract integer from atomic
+ *                            variable
  * @i: integer value to subtract
  * @v: pointer of type atomic64_t
  *





[Index of Archives]     [Linux MIPS Home]     [LKML Archive]     [Linux ARM Kernel]     [Linux ARM]     [Linux]     [Git]     [Yosemite News]     [Linux SCSI]     [Linux Hams]

  Powered by Linux