mirror of
https://github.com/AsahiLinux/u-boot
synced 2024-12-05 02:51:00 +00:00
633b6ccedf
Current many cpu use the same flush_cache() function, which just call the flush_dcache_range(). So implement a weak flush_cache() for all the cpus to use. In original weak flush_cache() in arch/arm/lib/cache.c, there has some code for ARM1136 & ARM926ejs. But in the arch/arm/cpu/arm1136/cpu.c and arch/arm/cpu/arm926ejs/cache.c, there implements a real flush_cache() function as well. That means the original code for ARM1136 & ARM926ejs in weak flush_cache() of arch/arm/lib/cache.c is totally useless. So in this patch remove such code in flush_cache() and only call flush_dcache_range(). Signed-off-by: Josh Wu <josh.wu@atmel.com>
85 lines
1.6 KiB
C
85 lines
1.6 KiB
C
/*
|
|
* (C) Copyright 2011
|
|
* Ilya Yanok, EmCraft Systems
|
|
*
|
|
* SPDX-License-Identifier: GPL-2.0+
|
|
*/
|
|
#include <linux/types.h>
|
|
#include <common.h>
|
|
|
|
#ifndef CONFIG_SYS_DCACHE_OFF
|
|
|
|
#ifndef CONFIG_SYS_CACHELINE_SIZE
|
|
#define CONFIG_SYS_CACHELINE_SIZE 32
|
|
#endif
|
|
|
|
void invalidate_dcache_all(void)
|
|
{
|
|
asm volatile("mcr p15, 0, %0, c7, c6, 0\n" : : "r"(0));
|
|
}
|
|
|
|
void flush_dcache_all(void)
|
|
{
|
|
asm volatile(
|
|
"0:"
|
|
"mrc p15, 0, r15, c7, c14, 3\n"
|
|
"bne 0b\n"
|
|
"mcr p15, 0, %0, c7, c10, 4\n"
|
|
: : "r"(0) : "memory"
|
|
);
|
|
}
|
|
|
|
static int check_cache_range(unsigned long start, unsigned long stop)
|
|
{
|
|
int ok = 1;
|
|
|
|
if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
|
|
ok = 0;
|
|
|
|
if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
|
|
ok = 0;
|
|
|
|
if (!ok)
|
|
debug("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
|
|
start, stop);
|
|
|
|
return ok;
|
|
}
|
|
|
|
void invalidate_dcache_range(unsigned long start, unsigned long stop)
|
|
{
|
|
if (!check_cache_range(start, stop))
|
|
return;
|
|
|
|
while (start < stop) {
|
|
asm volatile("mcr p15, 0, %0, c7, c6, 1\n" : : "r"(start));
|
|
start += CONFIG_SYS_CACHELINE_SIZE;
|
|
}
|
|
}
|
|
|
|
void flush_dcache_range(unsigned long start, unsigned long stop)
|
|
{
|
|
if (!check_cache_range(start, stop))
|
|
return;
|
|
|
|
while (start < stop) {
|
|
asm volatile("mcr p15, 0, %0, c7, c14, 1\n" : : "r"(start));
|
|
start += CONFIG_SYS_CACHELINE_SIZE;
|
|
}
|
|
|
|
asm volatile("mcr p15, 0, %0, c7, c10, 4\n" : : "r"(0));
|
|
}
|
|
#else /* #ifndef CONFIG_SYS_DCACHE_OFF */
|
|
void invalidate_dcache_all(void)
|
|
{
|
|
}
|
|
|
|
void flush_dcache_all(void)
|
|
{
|
|
}
|
|
#endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
|
|
|
|
/*
|
|
* Stub implementations for l2 cache operations
|
|
*/
|
|
__weak void l2_cache_disable(void) {}
|