diff --git a/Makefile b/Makefile index 32918c385e571f284a9080ab2e15e2dc98d68b0d..18800cb838ca8fdf5a2a0d775047192f4312427d 100644 --- a/Makefile +++ b/Makefile @@ -141,6 +141,15 @@ else $(error Unknown BRANCH_PROTECTION value ${BRANCH_PROTECTION}) endif +# USE_SPINLOCK_CAS requires AArch64 build +ifeq (${USE_SPINLOCK_CAS},1) +ifneq (${ARCH},aarch64) + $(error USE_SPINLOCK_CAS requires AArch64) +else + $(info USE_SPINLOCK_CAS is an experimental feature) +endif +endif + ################################################################################ # Toolchain ################################################################################ @@ -690,6 +699,7 @@ $(eval $(call assert_boolean,WARMBOOT_ENABLE_DCACHE_EARLY)) $(eval $(call assert_boolean,BL2_AT_EL3)) $(eval $(call assert_boolean,BL2_IN_XIP_MEM)) $(eval $(call assert_boolean,BL2_INV_DCACHE)) +$(eval $(call assert_boolean,USE_SPINLOCK_CAS)) $(eval $(call assert_numeric,ARM_ARCH_MAJOR)) $(eval $(call assert_numeric,ARM_ARCH_MINOR)) @@ -755,6 +765,7 @@ $(eval $(call add_define,WARMBOOT_ENABLE_DCACHE_EARLY)) $(eval $(call add_define,BL2_AT_EL3)) $(eval $(call add_define,BL2_IN_XIP_MEM)) $(eval $(call add_define,BL2_INV_DCACHE)) +$(eval $(call add_define,USE_SPINLOCK_CAS)) ifeq (${SANITIZE_UB},trap) $(eval $(call add_define,MONITOR_TRAPS)) diff --git a/docs/design/firmware-design.rst b/docs/design/firmware-design.rst index dc0820826202c9bc06f0ce46a9b73e8a874e4af1..2cbd9c9462d101a547edb71ca7b411573b381fc1 100644 --- a/docs/design/firmware-design.rst +++ b/docs/design/firmware-design.rst @@ -2540,8 +2540,11 @@ Armv8.1-A This Architecture Extension is targeted when ``ARM_ARCH_MAJOR`` >= 8, or when ``ARM_ARCH_MAJOR`` == 8 and ``ARM_ARCH_MINOR`` >= 1. -- The Compare and Swap instruction is used to implement spinlocks. Otherwise, - the load-/store-exclusive instruction pair is used. +- By default, a load-/store-exclusive instruction pair is used to implement + spinlocks. The ``USE_SPINLOCK_CAS`` build option when set to 1 selects the + spinlock implementation using the ARMv8.1-LSE Compare and Swap instruction. + Notice this instruction is only available in AArch64 execution state, so + the option is only available to AArch64 builds. Armv8.2-A ~~~~~~~~~ diff --git a/docs/getting_started/user-guide.rst b/docs/getting_started/user-guide.rst index 44bfb7a33449d33a52ea3713c1e7a9abb79f3e54..ae276f25dcaf48886348df7e978e0286829fe121 100644 --- a/docs/getting_started/user-guide.rst +++ b/docs/getting_started/user-guide.rst @@ -820,6 +820,10 @@ Common build options reduces SRAM usage. Refer to `Library at ROM`_ for further details. Default is 0. +- ``USE_SPINLOCK_CAS``: Setting this build flag to 1 selects the spinlock + implementation variant using the ARMv8.1-LSE compare-and-swap instruction. + Notice this option is experimental and only available to AArch64 builds. + - ``V``: Verbose build. If assigned anything other than 0, the build commands are printed. Default is 0. diff --git a/lib/locks/exclusive/aarch64/spinlock.S b/lib/locks/exclusive/aarch64/spinlock.S index d0569f1cdc95ff515dcfb5a872f324ab4f83f4b2..e941b8a3458169d18e0fbd24f46467a070ece2b9 100644 --- a/lib/locks/exclusive/aarch64/spinlock.S +++ b/lib/locks/exclusive/aarch64/spinlock.S @@ -9,56 +9,38 @@ .globl spin_lock .globl spin_unlock -#if ARM_ARCH_AT_LEAST(8, 1) +#if USE_SPINLOCK_CAS +#if !ARM_ARCH_AT_LEAST(8, 1) +#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform +#endif /* * When compiled for ARMv8.1 or later, choose spin locks based on Compare and * Swap instruction. */ -# define USE_CAS 1 - -/* - * Lock contenders using CAS, upon failing to acquire the lock, wait with the - * monitor in open state. Therefore, a normal store upon unlocking won't - * generate an SEV. Use explicit SEV instruction with CAS unlock. - */ -# define COND_SEV() sev - -#else - -# define USE_CAS 0 - -/* - * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait - * with the monitor in exclusive state. A normal store upon unlocking will - * implicitly generate an envent; so, no explicit SEV with unlock is required. - */ -# define COND_SEV() - -#endif - -#if USE_CAS /* * Acquire lock using Compare and Swap instruction. * - * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns - * 0. + * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use + * load exclusive semantics to monitor the address and enter WFE. * * void spin_lock(spinlock_t *lock); */ func spin_lock mov w2, #1 - sevl -1: +1: mov w1, wzr +2: casa w1, w2, [x0] + cbz w1, 3f + ldxr w1, [x0] + cbz w1, 2b wfe - mov w1, wzr - casa w1, w2, [x0] - cbnz w1, 1b + b 1b +3: ret endfunc spin_lock -#else /* !USE_CAS */ +#else /* !USE_SPINLOCK_CAS */ /* * Acquire lock using load-/store-exclusive instruction pair. @@ -76,17 +58,18 @@ l2: ldaxr w1, [x0] ret endfunc spin_lock -#endif /* USE_CAS */ +#endif /* USE_SPINLOCK_CAS */ /* * Release lock previously acquired by spin_lock. * - * Unconditionally write 0, and conditionally generate an event. + * Use store-release to unconditionally clear the spinlock variable. + * Store operation generates an event to all cores waiting in WFE + * when address is monitored by the global monitor. * * void spin_unlock(spinlock_t *lock); */ func spin_unlock stlr wzr, [x0] - COND_SEV() ret endfunc spin_unlock diff --git a/make_helpers/defaults.mk b/make_helpers/defaults.mk index b6f76559cb7dcd30c199b70c2a51507e48506707..b7fb173b16c402c356d805915d1c078d20717e31 100644 --- a/make_helpers/defaults.mk +++ b/make_helpers/defaults.mk @@ -234,3 +234,8 @@ else endif SANITIZE_UB := off + +# For ARMv8.1 (AArch64) platforms, enabling this option selects the spinlock +# implementation variant using the ARMv8.1-LSE compare-and-swap instruction. +# Default: disabled +USE_SPINLOCK_CAS := 0