forked from luck/tmp_suning_uos_patched
122 lines
2.7 KiB
C
122 lines
2.7 KiB
C
|
#ifndef _ASM_GENERIC_UNALIGNED_H_
|
||
|
#define _ASM_GENERIC_UNALIGNED_H_
|
||
|
|
||
|
/*
|
||
|
* For the benefit of those who are trying to port Linux to another
|
||
|
* architecture, here are some C-language equivalents.
|
||
|
*
|
||
|
* This is based almost entirely upon Richard Henderson's
|
||
|
* asm-alpha/unaligned.h implementation. Some comments were
|
||
|
* taken from David Mosberger's asm-ia64/unaligned.h header.
|
||
|
*/
|
||
|
|
||
|
#include <linux/types.h>
|
||
|
|
||
|
/*
|
||
|
* The main single-value unaligned transfer routines.
|
||
|
*/
|
||
|
#define get_unaligned(ptr) \
|
||
|
((__typeof__(*(ptr)))__get_unaligned((ptr), sizeof(*(ptr))))
|
||
|
#define put_unaligned(x,ptr) \
|
||
|
__put_unaligned((unsigned long)(x), (ptr), sizeof(*(ptr)))
|
||
|
|
||
|
/*
|
||
|
* This function doesn't actually exist. The idea is that when
|
||
|
* someone uses the macros below with an unsupported size (datatype),
|
||
|
* the linker will alert us to the problem via an unresolved reference
|
||
|
* error.
|
||
|
*/
|
||
|
extern void bad_unaligned_access_length(void) __attribute__((noreturn));
|
||
|
|
||
|
struct __una_u64 { __u64 x __attribute__((packed)); };
|
||
|
struct __una_u32 { __u32 x __attribute__((packed)); };
|
||
|
struct __una_u16 { __u16 x __attribute__((packed)); };
|
||
|
|
||
|
/*
|
||
|
* Elemental unaligned loads
|
||
|
*/
|
||
|
|
||
|
static inline unsigned long __uldq(const __u64 *addr)
|
||
|
{
|
||
|
const struct __una_u64 *ptr = (const struct __una_u64 *) addr;
|
||
|
return ptr->x;
|
||
|
}
|
||
|
|
||
|
static inline unsigned long __uldl(const __u32 *addr)
|
||
|
{
|
||
|
const struct __una_u32 *ptr = (const struct __una_u32 *) addr;
|
||
|
return ptr->x;
|
||
|
}
|
||
|
|
||
|
static inline unsigned long __uldw(const __u16 *addr)
|
||
|
{
|
||
|
const struct __una_u16 *ptr = (const struct __una_u16 *) addr;
|
||
|
return ptr->x;
|
||
|
}
|
||
|
|
||
|
/*
|
||
|
* Elemental unaligned stores
|
||
|
*/
|
||
|
|
||
|
static inline void __ustq(__u64 val, __u64 *addr)
|
||
|
{
|
||
|
struct __una_u64 *ptr = (struct __una_u64 *) addr;
|
||
|
ptr->x = val;
|
||
|
}
|
||
|
|
||
|
static inline void __ustl(__u32 val, __u32 *addr)
|
||
|
{
|
||
|
struct __una_u32 *ptr = (struct __una_u32 *) addr;
|
||
|
ptr->x = val;
|
||
|
}
|
||
|
|
||
|
static inline void __ustw(__u16 val, __u16 *addr)
|
||
|
{
|
||
|
struct __una_u16 *ptr = (struct __una_u16 *) addr;
|
||
|
ptr->x = val;
|
||
|
}
|
||
|
|
||
|
static inline unsigned long __get_unaligned(const void *ptr, size_t size)
|
||
|
{
|
||
|
unsigned long val;
|
||
|
switch (size) {
|
||
|
case 1:
|
||
|
val = *(const __u8 *)ptr;
|
||
|
break;
|
||
|
case 2:
|
||
|
val = __uldw((const __u16 *)ptr);
|
||
|
break;
|
||
|
case 4:
|
||
|
val = __uldl((const __u32 *)ptr);
|
||
|
break;
|
||
|
case 8:
|
||
|
val = __uldq((const __u64 *)ptr);
|
||
|
break;
|
||
|
default:
|
||
|
bad_unaligned_access_length();
|
||
|
};
|
||
|
return val;
|
||
|
}
|
||
|
|
||
|
static inline void __put_unaligned(unsigned long val, void *ptr, size_t size)
|
||
|
{
|
||
|
switch (size) {
|
||
|
case 1:
|
||
|
*(__u8 *)ptr = val;
|
||
|
break;
|
||
|
case 2:
|
||
|
__ustw(val, (__u16 *)ptr);
|
||
|
break;
|
||
|
case 4:
|
||
|
__ustl(val, (__u32 *)ptr);
|
||
|
break;
|
||
|
case 8:
|
||
|
__ustq(val, (__u64 *)ptr);
|
||
|
break;
|
||
|
default:
|
||
|
bad_unaligned_access_length();
|
||
|
};
|
||
|
}
|
||
|
|
||
|
#endif /* _ASM_GENERIC_UNALIGNED_H */
|