- unsigned long result;
-
- __asm__ __volatile__(
- "lwsync\n"
- "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
- "stdcx. %2,0,%1\n" /* else store conditional */
- "bne- 1b\n" /* retry if lost reservation */
- "isync\n"
- : "=&r"(result)
- : "r"(addr), "r"(val)
- : "memory", "cc");
-
- return result;
+ switch (len) {
+ case 4:
+ {
+ unsigned int old_val;
+
+ __asm__ __volatile__(
+ "lwsync\n"
+ "1:\t" "lwarx %0,0,%1\n" /* load and reserve */
+ "cmpd %0,%3\n" /* if load is not equal to */
+ "bne 2f\n" /* old, fail */
+ "stwcx. %2,0,%1\n" /* else store conditional */
+ "bne- 1b\n" /* retry if lost reservation */
+ "isync\n"
+ "2:\n"
+ : "=&r"(old_val),
+ : "r"(addr), "r"((unsigned int)_new),
+ "r"((unsigned int)old)
+ : "memory", "cc");
+
+ return old_val;
+ }
+#if (BITS_PER_LONG == 64)
+ case 8:
+ {
+ unsigned long old_val;
+
+ __asm__ __volatile__(
+ "lwsync\n"
+ "1:\t" "ldarx %0,0,%1\n" /* load and reserve */
+ "cmpd %0,%3\n" /* if load is not equal to */
+ "bne 2f\n" /* old, fail */
+ "stdcx. %2,0,%1\n" /* else store conditional */
+ "bne- 1b\n" /* retry if lost reservation */
+ "isync\n"
+ "2:\n"
+ : "=&r"(old_val),
+ : "r"(addr), "r"((unsigned long)_new),
+ "r"((unsigned long)old)
+ : "memory", "cc");
+
+ return old_val;
+ }
+#endif
+ }
+ /* generate an illegal instruction. Cannot catch this with linker tricks
+ * when optimizations are disabled. */
+ __asm__ __volatile__(ILLEGAL_INSTR);
+ return 0;