@@ -198,8 +198,11 @@ InitEL3:
198198 orr w1 , w1 , #( 1 << 1 ) / * Set IRQ bit (IRQs routed to EL3) * /
199199 msr SCR_EL3 , x1
200200
201- / * Configure cpu auxiliary control register EL1 * /
202- ldr x0 , = 0x80CA000 / * L1 Data prefetch control - 5 , Enable device split throttle , 2 independent data prefetch streams * /
201+ / * Configure CPUACTLR_EL1 - read - modify - write to preserve BL31 workarounds:
202+ * 859971 , 1319367 , CVE - 2017 - 5715 , CVE - 2018 - 3639 , CVE - 2022 - 23960 * /
203+ mrs x0 , S3_1_C15_C2_0 / * Read current CPUACTLR_EL1 * /
204+ ldr x1 , = 0x80CA000 / * L1 Data prefetch control - 5 , Enable device split throttle , 2 independent data prefetch streams * /
205+ orr x0 , x0 , x1 / * Merge with existing value * /
203206#if defined(CONFIG_ARM_ERRATA_855873) && CONFIG_ARM_ERRATA_855873
204207 / * Set ENDCCASCI bit in CPUACTLR_EL1 register , to execute data
205208 * cache clean operations as data cache clean and invalidate
@@ -1231,4 +1234,101 @@ gicv2_init_secure:
123112341 :
12321235 ret
12331236
1237+ #if defined(BOOT_EL1) && defined(EL2_HYPERVISOR) && EL2_HYPERVISOR == 1
1238+ / *
1239+ * Transition from EL2 to EL1 and jump to application
1240+ *
1241+ * Parameters:
1242+ * x0: entry_point - Address to jump to in EL1
1243+ * x1: dts_addr - Device tree address (passed in x0 to application)
1244+ *
1245+ * This function configures the necessary system registers for EL1 operation
1246+ * and performs an exception return (ERET) to drop from EL2 to EL1.
1247+ *
1248+ * Based on ARM Architecture Reference Manual and U - Boot implementation.
1249+ * /
1250+ . global el2_to_el1_boot
1251+ el2_to_el1_boot:
1252+ / * Save parameters - x0=entry_point , x1=dts_addr * /
1253+ mov x19 , x0 / * Save entry_point in x19 * /
1254+ mov x20 , x1 / * Save dts_addr in x20 * /
1255+
1256+ / * 1 . Configure timer access for EL1 * /
1257+ mrs x0 , CNTHCTL_EL2
1258+ orr x0 , x0 , # 3 / * EL1PCEN | EL1PCTEN - enable EL1 timer access * /
1259+ msr CNTHCTL_EL2 , x0
1260+ msr CNTVOFF_EL2 , xzr / * Clear virtual timer offset * /
1261+
1262+ / * 2 . Configure virtual processor ID * /
1263+ mrs x0 , MIDR_EL1
1264+ msr VPIDR_EL2 , x0
1265+ mrs x0 , MPIDR_EL1
1266+ msr VMPIDR_EL2 , x0
1267+
1268+ / * 3 . Disable coprocessor traps to EL2 * /
1269+ mov x0 , # 0x33ff / * CPTR_EL2: RES1 bits , no traps * /
1270+ msr CPTR_EL2 , x0
1271+ msr HSTR_EL2 , xzr / * No traps to EL2 on system registers * /
1272+ mov x0 , #( 3 << 20 ) / * CPACR_EL1: Full FP/SIMD access * /
1273+ msr CPACR_EL1 , x0
1274+
1275+ / * 4 . Initialize SCTLR_EL1 with safe defaults (RES1 bits , MMU/cache off) * /
1276+ / * RES1 bits: 29 , 28 , 23 , 22 , 20 , 11 = 0x30d00800 * /
1277+ movz x0 , # 0x800
1278+ movk x0 , # 0x30d0 , lsl # 16
1279+ msr SCTLR_EL1 , x0
1280+
1281+ / * 5 . Migrate stack pointer and vector base to EL1 * /
1282+ / * SP_EL1 must be 16 - byte aligned per ARM spec * /
1283+ mov x0 , sp
1284+ bic x0 , x0 , # 0xF / * Ensure 16 - byte alignment * /
1285+ msr SP_EL1 , x0
1286+ mrs x0 , VBAR_EL2
1287+ msr VBAR_EL1 , x0
1288+ dsb sy / * Ensure SP_EL1 and VBAR_EL1 writes complete * /
1289+ isb / * Ensure writes take effect * /
1290+
1291+ / * 6 . Configure HCR_EL2 - EL1 is AArch64 , no hypervisor calls * /
1292+ / * Check if PAuth (Pointer Authentication) is supported * /
1293+ mrs x0 , ID_AA64ISAR1_EL1 / * Read ISA feature register * /
1294+ mov x1 , #( 0xF << 28 ) / * GPI mask * /
1295+ orr x1 , x1 , #( 0xF << 24 ) / * GPA mask * /
1296+ orr x1 , x1 , #( 0xF << 8 ) / * API mask * /
1297+ orr x1 , x1 , #( 0xF << 4 ) / * APA mask * /
1298+ tst x0 , x1 / * Test if PAuth supported (Z= 1 if not supported) * /
1299+ mov x0 , #( 1 << 31 ) / * RW: EL1 is AArch64 * /
1300+ orr x0 , x0 , #( 1 << 29 ) / * HCD: Disable HVC instruction * /
1301+ mov x1 , x0 / * Copy base value * /
1302+ orr x1 , x1 , #( 1 << 41 ) / * API: Trap PAuth instructions * /
1303+ orr x1 , x1 , #( 1 << 40 ) / * APK: Trap PAuth key access * /
1304+ csel x0 , x0 , x1 , eq / * If PAuth not supported (eq) , use x0 (base) , else x1 (with traps) * /
1305+ msr HCR_EL2 , x0
1306+ dsb sy / * Ensure HCR_EL2 write completes * /
1307+ isb / * Ensure HCR_EL2 takes effect * /
1308+
1309+ / * 7 . Set up SPSR_EL2 for return to EL1h with all interrupts masked * /
1310+ / * M [ 3 : 0 ] = 0101 = EL1h (EL1 with SP_EL1) - NOT 0100 which is EL1t! * /
1311+ / * M [ 4 ] = 0 = AArch64 mode (bit 4 must be 0 for AArch64 , 1 for AArch32) * /
1312+ / * DAIF = 0xF = all interrupts masked * /
1313+ / * Value: 0x3C5 = ( 0xF << 6 ) | 0x5 * /
1314+ movz x0 , # 0x3C5 / * DAIF= 0xF (bits 9 : 6 ) , M [ 3 : 0 ] = 0x5 (EL1h) * /
1315+ msr SPSR_EL2 , x0
1316+ dsb sy / * Ensure SPSR_EL2 write completes * /
1317+ isb / * Ensure SPSR_EL2 takes effect * /
1318+
1319+ / * 8 . Set exception return address and DTB pointer , then ERET * /
1320+ / * Critical: All register writes must complete before eret * /
1321+ msr ELR_EL2 , x19 / * Entry point in ELR_EL2 * /
1322+ mov x0 , x20 / * DTB address in x0 (first arg) * /
1323+ mov x1 , xzr / * Zero remaining argument registers * /
1324+ mov x2 , xzr
1325+ mov x3 , xzr
1326+ dsb sy / * Ensure all writes complete * /
1327+ isb / * Ensure all effects are visible * /
1328+ eret / * Exception return to EL1 * /
1329+
1330+ / * Should never reach here * /
1331+ b .
1332+ #endif / * BOOT_EL1 && EL2_HYPERVISOR * /
1333+
12341334.end
0 commit comments