2018-05-06 21:58:06 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0+
|
2002-09-08 17:56:50 +00:00
|
|
|
/*
|
|
|
|
* (C) Copyright 2002
|
|
|
|
* Wolfgang Denk, DENX Software Engineering, wd@denx.de.
|
|
|
|
*/
|
|
|
|
|
|
|
|
/* for now: just dummy functions to satisfy the linker */
|
|
|
|
|
2005-01-09 23:16:25 +00:00
|
|
|
#include <common.h>
|
2014-12-10 05:25:22 +00:00
|
|
|
#include <malloc.h>
|
2005-01-09 23:16:25 +00:00
|
|
|
|
2015-07-27 03:40:17 +00:00
|
|
|
/*
|
|
|
|
* Flush range from all levels of d-cache/unified-cache.
|
|
|
|
* Affects the range [start, start + size - 1].
|
|
|
|
*/
|
2014-06-23 20:07:04 +00:00
|
|
|
__weak void flush_cache(unsigned long start, unsigned long size)
|
2002-09-08 17:56:50 +00:00
|
|
|
{
|
2015-07-27 03:40:17 +00:00
|
|
|
flush_dcache_range(start, start + size);
|
2002-09-08 17:56:50 +00:00
|
|
|
}
|
2011-06-16 23:30:50 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Default implementation:
|
|
|
|
* do a range flush for the entire range
|
|
|
|
*/
|
2014-06-23 20:07:04 +00:00
|
|
|
__weak void flush_dcache_all(void)
|
2011-06-16 23:30:50 +00:00
|
|
|
{
|
|
|
|
flush_cache(0, ~0);
|
|
|
|
}
|
2011-08-16 04:33:05 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Default implementation of enable_caches()
|
|
|
|
* Real implementation should be in platform code
|
|
|
|
*/
|
2014-06-23 20:07:04 +00:00
|
|
|
__weak void enable_caches(void)
|
2011-08-16 04:33:05 +00:00
|
|
|
{
|
|
|
|
puts("WARNING: Caches not enabled\n");
|
|
|
|
}
|
2014-12-10 05:25:22 +00:00
|
|
|
|
2015-07-27 03:40:16 +00:00
|
|
|
__weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
|
|
|
|
{
|
|
|
|
/* An empty stub, real implementation should be in platform code */
|
|
|
|
}
|
|
|
|
__weak void flush_dcache_range(unsigned long start, unsigned long stop)
|
|
|
|
{
|
|
|
|
/* An empty stub, real implementation should be in platform code */
|
|
|
|
}
|
|
|
|
|
2016-06-20 01:43:01 +00:00
|
|
|
int check_cache_range(unsigned long start, unsigned long stop)
|
|
|
|
{
|
|
|
|
int ok = 1;
|
|
|
|
|
|
|
|
if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
|
|
|
|
ok = 0;
|
|
|
|
|
|
|
|
if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
|
|
|
|
ok = 0;
|
|
|
|
|
|
|
|
if (!ok) {
|
2016-06-20 01:43:05 +00:00
|
|
|
warn_non_spl("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
|
|
|
|
start, stop);
|
2016-06-20 01:43:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
2014-12-10 05:25:22 +00:00
|
|
|
#ifdef CONFIG_SYS_NONCACHED_MEMORY
|
|
|
|
/*
|
|
|
|
* Reserve one MMU section worth of address space below the malloc() area that
|
|
|
|
* will be mapped uncached.
|
|
|
|
*/
|
|
|
|
static unsigned long noncached_start;
|
|
|
|
static unsigned long noncached_end;
|
|
|
|
static unsigned long noncached_next;
|
|
|
|
|
|
|
|
void noncached_init(void)
|
|
|
|
{
|
|
|
|
phys_addr_t start, end;
|
|
|
|
size_t size;
|
|
|
|
|
2019-08-27 17:54:31 +00:00
|
|
|
/* If this calculation changes, update board_f.c:reserve_noncached() */
|
2014-12-10 05:25:22 +00:00
|
|
|
end = ALIGN(mem_malloc_start, MMU_SECTION_SIZE) - MMU_SECTION_SIZE;
|
|
|
|
size = ALIGN(CONFIG_SYS_NONCACHED_MEMORY, MMU_SECTION_SIZE);
|
|
|
|
start = end - size;
|
|
|
|
|
|
|
|
debug("mapping memory %pa-%pa non-cached\n", &start, &end);
|
|
|
|
|
|
|
|
noncached_start = start;
|
|
|
|
noncached_end = end;
|
|
|
|
noncached_next = start;
|
|
|
|
|
2019-05-03 13:41:00 +00:00
|
|
|
#if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
|
2014-12-10 05:25:22 +00:00
|
|
|
mmu_set_region_dcache_behaviour(noncached_start, size, DCACHE_OFF);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
phys_addr_t noncached_alloc(size_t size, size_t align)
|
|
|
|
{
|
|
|
|
phys_addr_t next = ALIGN(noncached_next, align);
|
|
|
|
|
|
|
|
if (next >= noncached_end || (noncached_end - next) < size)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
debug("allocated %zu bytes of uncached memory @%pa\n", size, &next);
|
|
|
|
noncached_next = next + size;
|
|
|
|
|
|
|
|
return next;
|
|
|
|
}
|
|
|
|
#endif /* CONFIG_SYS_NONCACHED_MEMORY */
|
2015-10-23 16:06:40 +00:00
|
|
|
|
2017-03-18 13:01:44 +00:00
|
|
|
#if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
|
2015-10-23 16:06:40 +00:00
|
|
|
void invalidate_l2_cache(void)
|
|
|
|
{
|
|
|
|
unsigned int val = 0;
|
|
|
|
|
|
|
|
asm volatile("mcr p15, 1, %0, c15, c11, 0 @ invl l2 cache"
|
|
|
|
: : "r" (val) : "cc");
|
|
|
|
isb();
|
|
|
|
}
|
|
|
|
#endif
|