forked from luck/tmp_suning_uos_patched
567bb8fd47
There is nothing in these routines that inherently depends on R0 use. Given that these routines are inlined, it is rather easy to blow up the compiler by exhausting the spill class when performing a 64-bit swab. This presently manifests itself as the following: CC fs/ocfs2/suballoc.o fs/ocfs2/suballoc.c: In function 'ocfs2_reserve_suballoc_bits': fs/ocfs2/suballoc.c:638: error: unrecognizable insn: (insn 2793 1230 1231 103 arch/sh/include/asm/swab.h:33 (set (reg:HI 853) (subreg:HI (reg:SI 149 macl) 2)) -1 (expr_list:REG_DEAD (reg:SI 149 macl) (nil))) fs/ocfs2/suballoc.c:638: internal compiler error: in extract_insn, at recog.c:1991 This patch switches over to using an arbitrarily assigned register instead. While the same issue does not exist in the SH-5 case, there is likewise no harm in having an alternate register used for the byterev/shari pair. Signed-off-by: Paul Mundt <lethal@linux-sh.org>
61 lines
1015 B
C
61 lines
1015 B
C
#ifndef __ASM_SH_SWAB_H
|
|
#define __ASM_SH_SWAB_H
|
|
|
|
/*
|
|
* Copyright (C) 1999 Niibe Yutaka
|
|
* Copyright (C) 2000, 2001 Paolo Alberelli
|
|
*/
|
|
#include <linux/compiler.h>
|
|
#include <linux/types.h>
|
|
|
|
#define __SWAB_64_THRU_32__
|
|
|
|
static inline __attribute_const__ __u32 __arch_swab32(__u32 x)
|
|
{
|
|
__asm__(
|
|
#ifdef __SH5__
|
|
"byterev %1, %0\n\t"
|
|
"shari %0, 32, %0"
|
|
#else
|
|
"swap.b %1, %0\n\t"
|
|
"swap.w %0, %0\n\t"
|
|
"swap.b %0, %0"
|
|
#endif
|
|
: "=r" (x)
|
|
: "r" (x));
|
|
|
|
return x;
|
|
}
|
|
#define __arch_swab32 __arch_swab32
|
|
|
|
static inline __attribute_const__ __u16 __arch_swab16(__u16 x)
|
|
{
|
|
__asm__(
|
|
#ifdef __SH5__
|
|
"byterev %1, %0\n\t"
|
|
"shari %0, 32, %0"
|
|
#else
|
|
"swap.b %1, %0"
|
|
#endif
|
|
: "=r" (x)
|
|
: "r" (x));
|
|
|
|
return x;
|
|
}
|
|
#define __arch_swab16 __arch_swab16
|
|
|
|
static inline __u64 __arch_swab64(__u64 val)
|
|
{
|
|
union {
|
|
struct { __u32 a,b; } s;
|
|
__u64 u;
|
|
} v, w;
|
|
v.u = val;
|
|
w.s.b = __arch_swab32(v.s.a);
|
|
w.s.a = __arch_swab32(v.s.b);
|
|
return w.u;
|
|
}
|
|
#define __arch_swab64 __arch_swab64
|
|
|
|
#endif /* __ASM_SH_SWAB_H */
|