2011-03-10 21:36:32 +00:00
|
|
|
/*
|
|
|
|
* arch/arm/include/asm/assembler.h
|
|
|
|
*
|
|
|
|
* Copyright (C) 1996-2000 Russell King
|
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify
|
|
|
|
* it under the terms of the GNU General Public License version 2 as
|
|
|
|
* published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This file contains arm architecture specific defines
|
|
|
|
* for the different processors.
|
|
|
|
*
|
|
|
|
* Do not include any C declarations in this file - it is included by
|
|
|
|
* assembler source.
|
|
|
|
*/
|
|
|
|
|
2014-12-18 17:10:33 +00:00
|
|
|
#include <config.h>
|
2016-05-26 16:01:37 +00:00
|
|
|
#include <asm/unified.h>
|
2014-12-18 17:10:33 +00:00
|
|
|
|
2011-03-10 21:36:32 +00:00
|
|
|
/*
|
|
|
|
* Endian independent macros for shifting bytes within registers.
|
|
|
|
*/
|
|
|
|
#ifndef __ARMEB__
|
2014-12-18 17:10:33 +00:00
|
|
|
#define lspull lsr
|
|
|
|
#define lspush lsl
|
2011-03-10 21:36:32 +00:00
|
|
|
#define get_byte_0 lsl #0
|
|
|
|
#define get_byte_1 lsr #8
|
|
|
|
#define get_byte_2 lsr #16
|
|
|
|
#define get_byte_3 lsr #24
|
|
|
|
#define put_byte_0 lsl #0
|
|
|
|
#define put_byte_1 lsl #8
|
|
|
|
#define put_byte_2 lsl #16
|
|
|
|
#define put_byte_3 lsl #24
|
|
|
|
#else
|
2014-12-18 17:10:33 +00:00
|
|
|
#define lspull lsl
|
|
|
|
#define lspush lsr
|
2011-03-10 21:36:32 +00:00
|
|
|
#define get_byte_0 lsr #24
|
|
|
|
#define get_byte_1 lsr #16
|
|
|
|
#define get_byte_2 lsr #8
|
|
|
|
#define get_byte_3 lsl #0
|
|
|
|
#define put_byte_0 lsl #24
|
|
|
|
#define put_byte_1 lsl #16
|
|
|
|
#define put_byte_2 lsl #8
|
|
|
|
#define put_byte_3 lsl #0
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Data preload for architectures that support it
|
|
|
|
*/
|
|
|
|
#if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \
|
|
|
|
defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
|
|
|
|
defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) || \
|
|
|
|
defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_7A__) || \
|
|
|
|
defined(__ARM_ARCH_7R__)
|
|
|
|
#define PLD(code...) code
|
|
|
|
#else
|
|
|
|
#define PLD(code...)
|
|
|
|
#endif
|
|
|
|
|
2017-03-02 14:59:30 +00:00
|
|
|
/*
|
|
|
|
* We only support cores that support at least Thumb-1 and thus we use
|
|
|
|
* 'bx lr'
|
|
|
|
*/
|
2014-12-18 17:10:33 +00:00
|
|
|
.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
|
|
|
|
.macro ret\c, reg
|
|
|
|
.ifeqs "\reg", "lr"
|
|
|
|
bx\c \reg
|
|
|
|
.else
|
|
|
|
mov\c pc, \reg
|
|
|
|
.endif
|
|
|
|
.endm
|
|
|
|
.endr
|
|
|
|
|
2011-03-10 21:36:32 +00:00
|
|
|
/*
|
2014-12-18 17:10:33 +00:00
|
|
|
* Cache aligned, used for optimized memcpy/memset
|
|
|
|
* In the kernel this is only enabled for Feroceon CPU's...
|
|
|
|
* We disable it especially for Thumb builds since those instructions
|
|
|
|
* are not made in a Thumb ready way...
|
2011-03-10 21:36:32 +00:00
|
|
|
*/
|
2017-03-18 13:01:44 +00:00
|
|
|
#if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
|
2014-12-18 17:10:33 +00:00
|
|
|
#define CALGN(code...)
|
|
|
|
#else
|
2011-03-10 21:36:32 +00:00
|
|
|
#define CALGN(code...) code
|
2014-12-18 17:10:33 +00:00
|
|
|
#endif
|