kern: fix unnecessarily strong load in aligned spinlock

Copy-paste error I didn't notice for two years, whoops
This commit is contained in:
Michael Scire 2022-12-25 01:56:23 -07:00
parent 04c9004e05
commit e4b9930bf3

View file

@ -77,7 +77,7 @@ namespace ams::kern::arch::arm64 {
__asm__ __volatile__(
" prfm pstl1keep, %[m_next_ticket]\n"
"1:\n"
" ldaxrh %w[tmp0], %[m_next_ticket]\n"
" ldxrh %w[tmp0], %[m_next_ticket]\n"
" add %w[tmp1], %w[tmp0], #0x1\n"
" stxrh %w[got_lock], %w[tmp1], %[m_next_ticket]\n"
" cbnz %w[got_lock], 1b\n"