]> Git Repo - linux.git/commitdiff
Merge branch 'kcsan' of git://git.kernel.org/pub/scm/linux/kernel/git/paulmck/linux...
authorIngo Molnar <[email protected]>
Fri, 9 Oct 2020 06:35:01 +0000 (08:35 +0200)
committerIngo Molnar <[email protected]>
Fri, 9 Oct 2020 06:56:02 +0000 (08:56 +0200)
Pull KCSAN updates for v5.10 from Paul E. McKenney:

 - Improve kernel messages.

 - Be more permissive with bitops races under KCSAN_ASSUME_PLAIN_WRITES_ATOMIC=y.

 - Optimize debugfs stat counters.

 - Introduce the instrument_*read_write() annotations, to provide a
   finer description of certain ops - using KCSAN's compound instrumentation.
   Use them for atomic RNW and bitops, where appropriate.
   Doing this might find new races.
   (Depends on the compiler having tsan-compound-read-before-write=1 support.)

 - Support atomic built-ins, which will help certain architectures, such as s390.

 - Misc enhancements and smaller fixes.

Signed-off-by: Ingo Molnar <[email protected]>
1  2 
tools/objtool/check.c

diff --combined tools/objtool/check.c
index 90a66891441ab19db2503f5a3e13066733df818a,5eee156f6f903772511cbe2dfe112c4f38854c8b..3bd156d3974755bc6ce2f098671e063efb218825
@@@ -528,6 -528,61 +528,61 @@@ static const char *uaccess_safe_builtin
        "__tsan_write4",
        "__tsan_write8",
        "__tsan_write16",
+       "__tsan_read_write1",
+       "__tsan_read_write2",
+       "__tsan_read_write4",
+       "__tsan_read_write8",
+       "__tsan_read_write16",
+       "__tsan_atomic8_load",
+       "__tsan_atomic16_load",
+       "__tsan_atomic32_load",
+       "__tsan_atomic64_load",
+       "__tsan_atomic8_store",
+       "__tsan_atomic16_store",
+       "__tsan_atomic32_store",
+       "__tsan_atomic64_store",
+       "__tsan_atomic8_exchange",
+       "__tsan_atomic16_exchange",
+       "__tsan_atomic32_exchange",
+       "__tsan_atomic64_exchange",
+       "__tsan_atomic8_fetch_add",
+       "__tsan_atomic16_fetch_add",
+       "__tsan_atomic32_fetch_add",
+       "__tsan_atomic64_fetch_add",
+       "__tsan_atomic8_fetch_sub",
+       "__tsan_atomic16_fetch_sub",
+       "__tsan_atomic32_fetch_sub",
+       "__tsan_atomic64_fetch_sub",
+       "__tsan_atomic8_fetch_and",
+       "__tsan_atomic16_fetch_and",
+       "__tsan_atomic32_fetch_and",
+       "__tsan_atomic64_fetch_and",
+       "__tsan_atomic8_fetch_or",
+       "__tsan_atomic16_fetch_or",
+       "__tsan_atomic32_fetch_or",
+       "__tsan_atomic64_fetch_or",
+       "__tsan_atomic8_fetch_xor",
+       "__tsan_atomic16_fetch_xor",
+       "__tsan_atomic32_fetch_xor",
+       "__tsan_atomic64_fetch_xor",
+       "__tsan_atomic8_fetch_nand",
+       "__tsan_atomic16_fetch_nand",
+       "__tsan_atomic32_fetch_nand",
+       "__tsan_atomic64_fetch_nand",
+       "__tsan_atomic8_compare_exchange_strong",
+       "__tsan_atomic16_compare_exchange_strong",
+       "__tsan_atomic32_compare_exchange_strong",
+       "__tsan_atomic64_compare_exchange_strong",
+       "__tsan_atomic8_compare_exchange_weak",
+       "__tsan_atomic16_compare_exchange_weak",
+       "__tsan_atomic32_compare_exchange_weak",
+       "__tsan_atomic64_compare_exchange_weak",
+       "__tsan_atomic8_compare_exchange_val",
+       "__tsan_atomic16_compare_exchange_val",
+       "__tsan_atomic32_compare_exchange_val",
+       "__tsan_atomic64_compare_exchange_val",
+       "__tsan_atomic_thread_fence",
+       "__tsan_atomic_signal_fence",
        /* KCOV */
        "write_comp_data",
        "check_kcov_mode",
@@@ -619,7 -674,7 +674,7 @@@ static int add_jump_destinations(struc
                if (!is_static_jump(insn))
                        continue;
  
 -              if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
 +              if (insn->offset == FAKE_JUMP_OFFSET)
                        continue;
  
                reloc = find_reloc_by_dest_range(file->elf, insn->sec,
This page took 0.086836 seconds and 4 git commands to generate.