]> Git Repo - linux.git/commitdiff
arm64: lib: __arch_clear_user(): fold fixups into body
authorMark Rutland <[email protected]>
Tue, 19 Oct 2021 16:02:07 +0000 (17:02 +0100)
committerWill Deacon <[email protected]>
Thu, 21 Oct 2021 09:45:21 +0000 (10:45 +0100)
Like other functions, __arch_clear_user() places its exception fixups in
the `.fixup` section without any clear association with
__arch_clear_user() itself. If we backtrace the fixup code, it will be
symbolized as an offset from the nearest prior symbol, which happens to
be `__entry_tramp_text_end`. Further, since the PC adjustment for the
fixup is akin to a direct branch rather than a function call,
__arch_clear_user() itself will be missing from the backtrace.

This is confusing and hinders debugging. In general this pattern will
also be problematic for CONFIG_LIVEPATCH, since fixups often return to
their associated function, but this isn't accurately captured in the
stacktrace.

To solve these issues for assembly functions, we must move fixups into
the body of the functions themselves, after the usual fast-path returns.
This patch does so for __arch_clear_user().

Inline assembly will be dealt with in subsequent patches.

Other than the improved backtracing, there should be no functional
change as a result of this patch.

Signed-off-by: Mark Rutland <[email protected]>
Acked-by: Robin Murphy <[email protected]>
Reviewed-by: Ard Biesheuvel <[email protected]>
Cc: Catalin Marinas <[email protected]>
Cc: James Morse <[email protected]>
Cc: Mark Brown <[email protected]>
Cc: Will Deacon <[email protected]>
Link: https://lore.kernel.org/r/[email protected]
Signed-off-by: Will Deacon <[email protected]>
arch/arm64/lib/clear_user.S

index a7efb2ad2a1c11d242da2d03421cf10dcdc3ab7e..2827fddc33f832d7349cb535440bebe40435f138 100644 (file)
@@ -45,13 +45,11 @@ USER(9f, sttrh      wzr, [x0])
 USER(7f, sttrb wzr, [x2, #-1])
 5:     mov     x0, #0
        ret
-SYM_FUNC_END(__arch_clear_user)
-EXPORT_SYMBOL(__arch_clear_user)
 
-       .section .fixup,"ax"
-       .align  2
+       // Exception fixups
 7:     sub     x0, x2, #5      // Adjust for faulting on the final byte...
 8:     add     x0, x0, #4      // ...or the second word of the 4-7 byte case
 9:     sub     x0, x2, x0
        ret
-       .previous
+SYM_FUNC_END(__arch_clear_user)
+EXPORT_SYMBOL(__arch_clear_user)
This page took 0.056063 seconds and 4 git commands to generate.