175 lines
6.3 KiB
Diff
175 lines
6.3 KiB
Diff
From patchwork Fri Nov 3 00:11:27 2017
|
|
Content-Type: text/plain; charset="utf-8"
|
|
MIME-Version: 1.0
|
|
Content-Transfer-Encoding: 7bit
|
|
Subject: [U-Boot] arm64: support running at addr other than linked to
|
|
X-Patchwork-Submitter: Stephen Warren <swarren@wwwdotorg.org>
|
|
X-Patchwork-Id: 833593
|
|
X-Patchwork-Delegate: trini@ti.com
|
|
Message-Id: <20171103001127.2055-1-swarren@wwwdotorg.org>
|
|
To: Tom Rini <trini@konsulko.com>
|
|
Cc: u-boot@lists.denx.de, Stephen Warren <swarren@nvidia.com>
|
|
Date: Thu, 2 Nov 2017 18:11:27 -0600
|
|
From: Stephen Warren <swarren@wwwdotorg.org>
|
|
List-Id: U-Boot discussion <u-boot.lists.denx.de>
|
|
|
|
From: Stephen Warren <swarren@nvidia.com>
|
|
|
|
This is required in the case where U-Boot is typically loaded and run at
|
|
a particular address, but for some reason the RAM at that location is not
|
|
available, e.g. due to memory fragmentation loading other boot binaries or
|
|
firmware, splitting an SMP complex between various different OSs without
|
|
using e.g. the EL2 second-stage page tables to hide the memory asignments,
|
|
or due to known ECC failures.
|
|
|
|
Signed-off-by: Stephen Warren <swarren@nvidia.com>
|
|
---
|
|
arch/arm/Kconfig | 16 ++++++++++++++++
|
|
arch/arm/cpu/armv8/start.S | 26 ++++++++++++++++++++++++++
|
|
arch/arm/include/asm/config.h | 4 ----
|
|
arch/arm/lib/crt0_64.S | 8 ++++++++
|
|
arch/arm/lib/relocate_64.S | 23 ++++++++++++++++++-----
|
|
5 files changed, 68 insertions(+), 9 deletions(-)
|
|
|
|
diff --git a/arch/arm/Kconfig b/arch/arm/Kconfig
|
|
index 83b7aa51dc2c..294b456414bc 100644
|
|
--- a/arch/arm/Kconfig
|
|
+++ b/arch/arm/Kconfig
|
|
@@ -9,6 +9,22 @@ config ARM64
|
|
select PHYS_64BIT
|
|
select SYS_CACHE_SHIFT_6
|
|
|
|
+if ARM64
|
|
+config POSITION_INDEPENDENT
|
|
+ bool "Generate position-independent pre-relocation code"
|
|
+ help
|
|
+ U-Boot expects to be linked to a specific hard-coded address, and to
|
|
+ be loaded to and run from that address. This option lifts that
|
|
+ restriction, thus allowing the code to be loaded to and executed
|
|
+ from almost any address. This logic relies on the relocation
|
|
+ information that is embedded into the binary to support U-Boot
|
|
+ relocating itself to the top-of-RAM later during execution.
|
|
+endif
|
|
+
|
|
+config STATIC_RELA
|
|
+ bool
|
|
+ default y if ARM64 && !POSITION_INDEPENDENT
|
|
+
|
|
config DMA_ADDR_T_64BIT
|
|
bool
|
|
default y if ARM64
|
|
diff --git a/arch/arm/cpu/armv8/start.S b/arch/arm/cpu/armv8/start.S
|
|
index 5c500be51d1f..03e744e4a673 100644
|
|
--- a/arch/arm/cpu/armv8/start.S
|
|
+++ b/arch/arm/cpu/armv8/start.S
|
|
@@ -57,6 +57,32 @@ reset:
|
|
.globl save_boot_params_ret
|
|
save_boot_params_ret:
|
|
|
|
+#if CONFIG_POSITION_INDEPENDENT
|
|
+ /*
|
|
+ * Fix .rela.dyn relocations. This allows U-Boot to be loaded to and
|
|
+ * executed at a different address than it was linked at.
|
|
+ */
|
|
+pie_fixup:
|
|
+ adr x0, _start /* x0 <- Runtime value of _start */
|
|
+ ldr x1, _TEXT_BASE /* x1 <- Linked value of _start */
|
|
+ sub x9, x0, x1 /* x9 <- Run-vs-link offset */
|
|
+ adr x2, __rel_dyn_start /* x2 <- Runtime &__rel_dyn_start */
|
|
+ adr x3, __rel_dyn_end /* x3 <- Runtime &__rel_dyn_end */
|
|
+pie_fix_loop:
|
|
+ ldp x0, x1, [x2], #16 /* (x0, x1) <- (Link location, fixup) */
|
|
+ ldr x4, [x2], #8 /* x4 <- addend */
|
|
+ cmp w1, #1027 /* relative fixup? */
|
|
+ bne pie_skip_reloc
|
|
+ /* relative fix: store addend plus offset at dest location */
|
|
+ add x0, x0, x9
|
|
+ add x4, x4, x9
|
|
+ str x4, [x0]
|
|
+pie_skip_reloc:
|
|
+ cmp x2, x3
|
|
+ b.lo pie_fix_loop
|
|
+pie_fixup_done:
|
|
+#endif
|
|
+
|
|
#ifdef CONFIG_SYS_RESET_SCTRL
|
|
bl reset_sctrl
|
|
#endif
|
|
diff --git a/arch/arm/include/asm/config.h b/arch/arm/include/asm/config.h
|
|
index 5674d37c04df..9f178293818e 100644
|
|
--- a/arch/arm/include/asm/config.h
|
|
+++ b/arch/arm/include/asm/config.h
|
|
@@ -10,10 +10,6 @@
|
|
#define CONFIG_LMB
|
|
#define CONFIG_SYS_BOOT_RAMDISK_HIGH
|
|
|
|
-#ifdef CONFIG_ARM64
|
|
-#define CONFIG_STATIC_RELA
|
|
-#endif
|
|
-
|
|
#if defined(CONFIG_ARCH_LS1021A) || \
|
|
defined(CONFIG_CPU_PXA27X) || \
|
|
defined(CONFIG_CPU_MONAHANS) || \
|
|
diff --git a/arch/arm/lib/crt0_64.S b/arch/arm/lib/crt0_64.S
|
|
index 9c46c93ca4c5..da7c62cbe0aa 100644
|
|
--- a/arch/arm/lib/crt0_64.S
|
|
+++ b/arch/arm/lib/crt0_64.S
|
|
@@ -98,6 +98,14 @@ ENTRY(_main)
|
|
ldr x18, [x18, #GD_NEW_GD] /* x18 <- gd->new_gd */
|
|
|
|
adr lr, relocation_return
|
|
+#if CONFIG_POSITION_INDEPENDENT
|
|
+ /* Add in link-vs-runtime offset */
|
|
+ adr x0, _start /* x0 <- Runtime value of _start */
|
|
+ ldr x9, _TEXT_BASE /* x9 <- Linked value of _start */
|
|
+ sub x9, x9, x0 /* x9 <- Run-vs-link offset */
|
|
+ add lr, lr, x9
|
|
+#endif
|
|
+ /* Add in link-vs-relocation offset */
|
|
ldr x9, [x18, #GD_RELOC_OFF] /* x9 <- gd->reloc_off */
|
|
add lr, lr, x9 /* new return address after relocation */
|
|
ldr x0, [x18, #GD_RELOCADDR] /* x0 <- gd->relocaddr */
|
|
diff --git a/arch/arm/lib/relocate_64.S b/arch/arm/lib/relocate_64.S
|
|
index fdba004363af..04804524ed65 100644
|
|
--- a/arch/arm/lib/relocate_64.S
|
|
+++ b/arch/arm/lib/relocate_64.S
|
|
@@ -27,11 +27,24 @@ ENTRY(relocate_code)
|
|
/*
|
|
* Copy u-boot from flash to RAM
|
|
*/
|
|
- ldr x1, =__image_copy_start /* x1 <- SRC &__image_copy_start */
|
|
- subs x9, x0, x1 /* x9 <- relocation offset */
|
|
+ adr x1, __image_copy_start /* x1 <- Run &__image_copy_start */
|
|
+ subs x9, x0, x1 /* x8 <- Run to copy offset */
|
|
b.eq relocate_done /* skip relocation */
|
|
- ldr x2, =__image_copy_end /* x2 <- SRC &__image_copy_end */
|
|
+ /*
|
|
+ * Don't ldr x1, __image_copy_start here, since if the code is already
|
|
+ * running at an address other than it was linked to, that instruction
|
|
+ * will load the relocated value of __image_copy_start. To
|
|
+ * correctly apply relocations, we need to know the linked value.
|
|
+ *
|
|
+ * Linked &__image_copy_start, which we know was at
|
|
+ * CONFIG_SYS_TEXT_BASE, which is stored in _TEXT_BASE, as a non-
|
|
+ * relocated value, since it isn't a symbol reference.
|
|
+ */
|
|
+ ldr x1, _TEXT_BASE /* x1 <- Linked &__image_copy_start */
|
|
+ subs x9, x0, x1 /* x9 <- Link to copy offset */
|
|
|
|
+ adr x1, __image_copy_start /* x1 <- Run &__image_copy_start */
|
|
+ adr x2, __image_copy_end /* x2 <- Run &__image_copy_end */
|
|
copy_loop:
|
|
ldp x10, x11, [x1], #16 /* copy from source address [x1] */
|
|
stp x10, x11, [x0], #16 /* copy to target address [x0] */
|
|
@@ -42,8 +55,8 @@ copy_loop:
|
|
/*
|
|
* Fix .rela.dyn relocations
|
|
*/
|
|
- ldr x2, =__rel_dyn_start /* x2 <- SRC &__rel_dyn_start */
|
|
- ldr x3, =__rel_dyn_end /* x3 <- SRC &__rel_dyn_end */
|
|
+ adr x2, __rel_dyn_start /* x2 <- Run &__rel_dyn_start */
|
|
+ adr x3, __rel_dyn_end /* x3 <- Run &__rel_dyn_end */
|
|
fixloop:
|
|
ldp x0, x1, [x2], #16 /* (x0,x1) <- (SRC location, fixup) */
|
|
ldr x4, [x2], #8 /* x4 <- addend */
|