diff options
author | Paul Mundt <lethal@linux-sh.org> | 2007-03-01 15:56:31 +0900 |
---|---|---|
committer | Paul Mundt <lethal@linux-sh.org> | 2007-03-05 14:13:26 +0900 |
commit | 87e29cacb7d09c81b09224bec395f970df958af4 (patch) | |
tree | 536a67c79bc54a6de1e911b31b6918efa2700c6d /include | |
parent | 5c36e6578d81f79ede871d3e66a0d6beeffeb3dc (diff) | |
download | kernel_samsung_aries-87e29cacb7d09c81b09224bec395f970df958af4.zip kernel_samsung_aries-87e29cacb7d09c81b09224bec395f970df958af4.tar.gz kernel_samsung_aries-87e29cacb7d09c81b09224bec395f970df958af4.tar.bz2 |
sh: Use L1_CACHE_BYTES for .data.cacheline_aligned.
Previously this was using a hardcoded 32, use L1_CACHE_BYTES for
cacheline alignment instead.
Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-sh/cache.h | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/include/asm-sh/cache.h b/include/asm-sh/cache.h index e3a180c..9a3cb6b 100644 --- a/include/asm-sh/cache.h +++ b/include/asm-sh/cache.h @@ -21,6 +21,7 @@ #define L1_CACHE_ALIGN(x) (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1)) +#ifndef __ASSEMBLY__ struct cache_info { unsigned int ways; /* Number of cache ways */ unsigned int sets; /* Number of cache sets */ @@ -47,6 +48,6 @@ struct cache_info { unsigned long flags; }; - +#endif /* __ASSEMBLY__ */ #endif /* __KERNEL__ */ #endif /* __ASM_SH_CACHE_H */ |