forked from luck/tmp_suning_uos_patched
6445671b00
Let's use the standard L1_CACHE_ALIGN macro instead. Signed-off-by: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Signed-off-by: Matt Turner <mattst88@gmail.com>
23 lines
468 B
C
23 lines
468 B
C
/*
|
|
* include/asm-alpha/cache.h
|
|
*/
|
|
#ifndef __ARCH_ALPHA_CACHE_H
|
|
#define __ARCH_ALPHA_CACHE_H
|
|
|
|
|
|
/* Bytes per L1 (data) cache line. */
|
|
#if defined(CONFIG_ALPHA_GENERIC) || defined(CONFIG_ALPHA_EV6)
|
|
# define L1_CACHE_BYTES 64
|
|
# define L1_CACHE_SHIFT 6
|
|
#else
|
|
/* Both EV4 and EV5 are write-through, read-allocate,
|
|
direct-mapped, physical.
|
|
*/
|
|
# define L1_CACHE_BYTES 32
|
|
# define L1_CACHE_SHIFT 5
|
|
#endif
|
|
|
|
#define SMP_CACHE_BYTES L1_CACHE_BYTES
|
|
|
|
#endif
|