Skip to content

Commit

Permalink
Add aarch32 support to kernel loader
Browse files Browse the repository at this point in the history
Signed-off-by: Nick Spinale <nick@nickspinale.com>
  • Loading branch information
nspin committed Feb 2, 2024
1 parent 2fe64e5 commit 93f58dd
Show file tree
Hide file tree
Showing 34 changed files with 980 additions and 131 deletions.
152 changes: 152 additions & 0 deletions crates/sel4-kernel-loader/asm/aarch32/head.S
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
/*
* Copyright 2024, Colias Group, LLC
* Copyright 2020, Data61, CSIRO (ABN 41 687 119 230)
*
* SPDX-License-Identifier: GPL-2.0-only
*/

#include "macros.h"
#include "registers.h"
#include "mm.h"

.global _start;
.global secondary_entry;

.extern __primary_stack
.extern __primary_stack_size
.extern arch_main
.extern arch_secondary_main

.extern loader_level_0_table

.section ".text.startup"

_start:
mov r0, r2

cpsid ifa // Disable Async aborts that might be pending from bootloader

#if CONFIG_MAX_NUM_NODES > 1
/* Enable SMP */
mrc ACTLR(r1)
orr r1, r1, #(1 << 6) /* enable SMP bit */
#ifdef CONFIG_ARM_CORTEX_A9
orr r1, r1, #1 /* enable FW bit */
#endif
mcr ACTLR(r1)
#endif /* CONFIG_MAX_NUM_NODES > 1 */

ldr r0, =__bss_start // [TODO] GNU LD has __bss_start__ and __bss_end__ which feel more robust
ldr r1, =_end
mov r3, #0

clearzi:
cmp r0, r1
beq clearzi_exit
str r3, [r0]
add r0, r0, #4
b clearzi

clearzi_exit:

ldr r9, =__primary_stack_top
ldr r9, [r9]
mov sp, r9
bl leave_hyp
bl init_core_state
b arch_main
b hang


secondary_entry:
bl leave_hyp
bl init_core_state
b arch_secondary_main
b hang


hang:
wfe
b hang


BEGIN_LOCAL_FUNC(init_core_state)
stmfd sp!, {lr}

mov r0, #0
mcr IIALL(r0)
dcache isw

bl arm_enable_mmu

ldmfd sp!, {pc}
END_FUNC(init_core_state)


BEGIN_FUNC(leave_hyp)
.arch_extension virt
mrs r9, cpsr
ldr r9, =CPSR_SUPERVISOR
msr sp_svc, sp
msr spsr_cxsf, r9
msr elr_hyp, lr
eret
END_FUNC(leave_hyp)


BEGIN_FUNC(arm_enable_mmu)
stmfd sp!, {lr}

/* Clean D-Cache if enabled */
mrc SCTLR(r1)
and r1, r1, #(1 << 2)
cmp r1, #0
beq 1f
bl flush_dcache
1:
/* Ensure I-cache, D-cache and mmu are disabled. */
mrc SCTLR(r1)
bic r1, r1, #(1 << 12) /* Disable I-cache */
bic r1, r1, #(1 << 2) /* Disable D-Cache */
bic r1, r1, #(1 << 0) /* Disable MMU */
mcr SCTLR(r1)

/* invalidate caches. */
bl invalidate_dcache
bl invalidate_icache

/* Set up TTBR0, enable caching of pagetables. */
ldr r0, =loader_level_0_table
ldr r0, [r0]
orr r1, r0, #0x19
// orr r1, r0, #0
mcr TTBR0(r1)
mcr TLBIALL(r1)

/* Setup client to only have access to domain 0, and setup the DACR. */
mov r1, #1
mcr DACR(r1)

/* Setup misc MMU. */
mov r1, #0
mcr CONTEXTIDR(r1) /* set ASID to 0 */
mcr TTBCR(r1) /* set TTBCR to 0 */
mcr BPIALL(r1) /* flush branch target cache */
isb

/* Enable MMU, D-cache, and I-cache. */
mrc SCTLR(r0)
orr r0, r0, #(1 << 13) /* selects the base address of the exception vectors */
orr r0, r0, #(1 << 12) /* Enable I-cache */
orr r0, r0, #(1 << 2) /* Enable D-cache */
orr r0, r0, #(1 << 0) /* Enable MMU */
mcr SCTLR(r0)

/* Enable/disable Async aborts to drain pending bootloader aborts */
cpsie a
dsb
isb
cpsid a

ldmfd sp!, {pc}
END_FUNC(arm_enable_mmu)
16 changes: 16 additions & 0 deletions crates/sel4-kernel-loader/asm/aarch32/macros.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
/*
* Copyright 2023, Colias Group, LLC
*
* SPDX-License-Identifier: BSD-2-Clause
*/

#define BEGIN_LOCAL_FUNC(_name) \
.type _name, %function ; \
_name:

#define BEGIN_FUNC(_name) \
.global _name ; \
BEGIN_LOCAL_FUNC(_name)

#define END_FUNC(_name) \
.size _name, .-_name
53 changes: 53 additions & 0 deletions crates/sel4-kernel-loader/asm/aarch32/mm.S
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Copyright 2024, Colias Group, LLC
* Copyright 2014, General Dynamics C4 Systems
*
* SPDX-License-Identifier: GPL-2.0-only
*
* See:
* https://developer.arm.com/documentation/ddi0406/c/System-Level-Architecture/Common-Memory-System-Architecture-Features/Caches-and-branch-predictors/Cache-and-branch-predictor-maintenance-operations?lang=en
*/

#include "macros.h"
#include "registers.h"
#include "mm.h"

.text

BEGIN_FUNC(invalidate_dcache)
stmfd sp!, {r4-r11,lr}
dcache isw
ldmfd sp!, {r4-r11,pc}
END_FUNC(invalidate_dcache)

BEGIN_FUNC(invalidate_icache)
mcr IIALL(r1)
bx lr
END_FUNC(invalidate_icache)

BEGIN_FUNC(flush_dcache)
stmfd sp!, {r4-r11,lr}
dcache cisw
ldmfd sp!, {r4-r11,pc}
END_FUNC(flush_dcache)

BEGIN_FUNC(arm_disable_dcaches)
stmfd sp!, {lr}

/* Clean D-Cache if enabled */
mrc SCTLR(r1)
and r1, r1, #(1 << 2)
cmp r1, #0
beq 1f
bl flush_dcache
1:
/* disable D-cache disabled. */
mrc SCTLR(r1)
bic r1, r1, #(1 << 2) /* Disable D-Cache */
mcr SCTLR(r1)

/* invalidate dcaches. */
bl invalidate_dcache

ldmfd sp!, {pc}
END_FUNC(arm_disable_dcaches)
65 changes: 65 additions & 0 deletions crates/sel4-kernel-loader/asm/aarch32/mm.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/*
* Copyright 2023, Colias Group, LLC
* Copyright 2014, General Dynamics C4 Systems
*
* SPDX-License-Identifier: GPL-2.0-only
*/

#include "registers.h"

.macro dcache op
dmb
mrc CLIDR(r0)
mov r3, r0, lsr #23 //move LoC into position
ands r3, r3, #7 << 1 //extract LoC*2 from clidr

beq 5f //if loc is 0, then no need to clean
mov r10, #0 //start clean at cache level 0

1:
add r2, r10, r10, lsr #1 //work out 3x current cache level
mov r1, r0, lsr r2 //extract cache type bits from clidr
and r1, r1, #7 //mask of the bits for current cache only
cmp r1, #2 //see what cache we have at this level
blt 4f //skip if no cache, or just i-cache

mcr CSSELR(r10)
isb

mrc CCSIDR(r1)
and r2, r1, #7 //extract the length of the cache lines
add r2, r2, #4 //add 4 (line length offset)
movw r4, #0x3ff
ands r4, r4, r1, lsr #3 //find maximum number on the way size
clz r5, r4 //find bit position of way size increment
movw r7, #0x7fff
ands r7, r7, r1, lsr #13 //extract max number of the index size

2:
mov r9, r7 //create working copy of max index

3:
orr r11, r10, r4, lsl r5 //factor way and cache number into r11
orr r11, r11, r9, lsl r2 //factor index number into r11
.ifeqs "\op", "isw"
mcr DISW(r11)
.endif
.ifeqs "\op", "cisw"
mcr DCISW(r11)
.endif
subs r9, r9, #1 //decrement the index
bge 3b
subs r4, r4, #1 //decrement the way
bge 2b

4:
add r10, r10, #2 //increment cache number
cmp r3, r10
bgt 1b

5:
mov r10, #0 //swith back to cache level 0
mcr p15, 2, r10, c0, c0, 0 //select current cache level in cssr
dsb st
isb
.endm
27 changes: 27 additions & 0 deletions crates/sel4-kernel-loader/asm/aarch32/psci.S
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Copyright 2020, Data61, CSIRO (ABN 41 687 119 230)
*
* SPDX-License-Identifier: GPL-2.0-only
*/

#include "macros.h"

.macro psci_func op
stmfd sp!, {r3-r11, lr}
dsb
\op #0
ldmfd sp!, {r3-r11, pc}
.endm

.arch_extension sec
.arch_extension virt

.text

BEGIN_FUNC(smc_psci_func)
psci_func smc
END_FUNC(smc_psci_func)

BEGIN_FUNC(hvc_psci_func)
psci_func hvc
END_FUNC(hvc_psci_func)
65 changes: 65 additions & 0 deletions crates/sel4-kernel-loader/asm/aarch32/registers.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/*
* Copyright 2024, Colias Group, LLC
* Copyright 2014, General Dynamics C4 Systems
*
* SPDX-License-Identifier: GPL-2.0-only
*/

#define SCTLR(reg) p15, 0, reg, c1, c0, 0
#define CLIDR(reg) p15, 1, reg, c0, c0, 1
#define TTBR0(reg) p15, 0, reg, c2, c0, 0
#define TTBCR(reg) p15, 0, reg, c2, c0, 2
#define DACR(reg) p15, 0, reg, c3, c0, 0
#define IIALL(reg) p15, 0, reg, c7, c5, 0
#define BPIALL(reg) p15, 0, reg, c7, c5, 6
#define DTLBIALL(reg) p15, 0, reg, c8, c6, 0
#define TLBIALL(reg) p15, 0, reg, c8, c7, 0
#define DTLBIASID(reg) p15, 0, reg, c8, c6, 2
#define TLBIASID(reg) p15, 0, reg, c8, c7, 2
#define CONTEXTIDR(reg) p15, 0, reg, c13, c0, 1

/* Processor mode encodings */
#define PMODE_USER 0x10
#define PMODE_FIQ 0x11
#define PMODE_IRQ 0x12
#define PMODE_SUPERVISOR 0x13
#define PMODE_ABORT 0x17
#define PMODE_UNDEFINED 0x1b
#define PMODE_SYSTEM 0x1f

/* Processor exception mask bits */
#define PMASK_ASYNC_ABORT (1 << 8)
#define PMASK_IRQ (1 << 7)
#define PMASK_FIRQ (1 << 6)

#define CPSR_SUPERVISOR ( PMASK_FIRQ \
| PMASK_IRQ \
| PMASK_ASYNC_ABORT \
| PMODE_SUPERVISOR )

#define CCSIDR(reg) p15, 1, reg, c0, c0, 0
#define CSSELR(reg) p15, 2, reg, c0, c0, 0
#define ACTLR(reg) p15, 0, reg, c1, c0, 1
#define DISW(reg) p15, 0, reg, c7, c6, 2
#define DCISW(reg) p15, 0, reg, c7, c14, 2
#define HVBAR(reg) p15, 4, reg, c12, c0, 0
#define HCR(reg) p15, 4, reg, c1 , c1, 0
#define HSCTLR(reg) p15, 4, reg, c1 , c0, 0
#define HACTLR(reg) p15, 4, reg, c1 , c0, 1
#define HDCR(reg) p15, 4, reg, c1 , c1, 1
#define HCPTR(reg) p15, 4, reg, c1 , c1, 2
#define HSTR(reg) p15, 4, reg, c1 , c1, 3
#define HACR(reg) p15, 4, reg, c1 , c1, 7
#define HTCR(reg) p15, 4, reg, c2 , c0, 2
#define HADFSR(reg) p15, 4, reg, c5 , c1, 0
#define HAIFSR(reg) p15, 4, reg, c5 , c1, 1
#define HSR(reg) p15, 4, reg, c5 , c2, 0
#define HDFAR(reg) p15, 4, reg, c6 , c0, 0
#define HIFAR(reg) p15, 4, reg, c6 , c0, 2
#define HPFAR(reg) p15, 4, reg, c6 , c0, 4
#define HMAIR0(reg) p15, 4, reg, c10, c2, 0
#define HMAIR1(reg) p15, 4, reg, c10, c2, 1
#define HAMAIR0(reg) p15, 4, reg, c10, c3, 0
#define HAMAIR1(reg) p15, 4, reg, c10, c3, 1
#define HTPIDR(reg) p15, 4, reg, c13, c0, 2
#define HTTBR(rh,rl) p15, 4, rl, rh, c2
Loading

0 comments on commit 93f58dd

Please sign in to comment.