aboutsummaryrefslogtreecommitdiff
path: root/compiler-rt/lib
diff options
context:
space:
mode:
Diffstat (limited to 'compiler-rt/lib')
-rw-r--r--compiler-rt/lib/asan/tests/CMakeLists.txt14
-rw-r--r--compiler-rt/lib/builtins/cpu_model/x86.c7
-rw-r--r--compiler-rt/lib/msan/msan.h1
-rw-r--r--compiler-rt/lib/msan/msan_allocator.cpp44
-rw-r--r--compiler-rt/lib/msan/msan_report.cpp4
5 files changed, 60 insertions, 10 deletions
diff --git a/compiler-rt/lib/asan/tests/CMakeLists.txt b/compiler-rt/lib/asan/tests/CMakeLists.txt
index 9cd9c97..6d88c96 100644
--- a/compiler-rt/lib/asan/tests/CMakeLists.txt
+++ b/compiler-rt/lib/asan/tests/CMakeLists.txt
@@ -170,11 +170,21 @@ function(add_asan_tests arch test_runtime)
set(CONFIG_NAME ${ARCH_UPPER_CASE}${OS_NAME}Config)
set(CONFIG_NAME_DYNAMIC ${ARCH_UPPER_CASE}${OS_NAME}DynamicConfig)
+ # On some platforms, unit tests can be run against the runtime that shipped
+ # with the host compiler with COMPILER_RT_TEST_STANDALONE_BUILD_LIBS=OFF.
+ # COMPILER_RT_ASAN_UNIT_TESTS_USE_HOST_RUNTIME=ON removes the dependency
+ # on `asan`, allowing the tests to be run independently without
+ # a newly built asan runtime.
+ set(ASAN_UNIT_TEST_DEPS asan)
+ if(COMPILER_RT_ASAN_UNIT_TESTS_USE_HOST_RUNTIME)
+ set(ASAN_UNIT_TEST_DEPS)
+ endif()
+
# Closure to keep the values.
function(generate_asan_tests test_objects test_suite testname)
generate_compiler_rt_tests(${test_objects} ${test_suite} ${testname} ${arch}
COMPILE_DEPS ${ASAN_UNITTEST_HEADERS} ${ASAN_IGNORELIST_FILE}
- DEPS asan
+ DEPS ${ASAN_UNIT_TEST_DEPS}
KIND ${TEST_KIND}
${ARGN}
)
@@ -215,7 +225,7 @@ function(add_asan_tests arch test_runtime)
add_compiler_rt_test(AsanDynamicUnitTests "${dynamic_test_name}" "${arch}"
SUBDIR "${CONFIG_NAME_DYNAMIC}"
OBJECTS ${ASAN_INST_TEST_OBJECTS}
- DEPS asan ${ASAN_INST_TEST_OBJECTS}
+ DEPS ${ASAN_UNIT_TEST_DEPS} ${ASAN_INST_TEST_OBJECTS}
LINK_FLAGS ${ASAN_DYNAMIC_UNITTEST_INSTRUMENTED_LINK_FLAGS} ${TARGET_LINK_FLAGS} ${DYNAMIC_LINK_FLAGS}
)
endif()
diff --git a/compiler-rt/lib/builtins/cpu_model/x86.c b/compiler-rt/lib/builtins/cpu_model/x86.c
index a40675c..d91e13c 100644
--- a/compiler-rt/lib/builtins/cpu_model/x86.c
+++ b/compiler-rt/lib/builtins/cpu_model/x86.c
@@ -520,6 +520,13 @@ static const char *getIntelProcessorTypeAndSubtype(unsigned Family,
*Subtype = INTEL_COREI7_PANTHERLAKE;
break;
+ // Wildcatlake:
+ case 0xd5:
+ CPU = "wildcatlake";
+ *Type = INTEL_COREI7;
+ *Subtype = INTEL_COREI7_PANTHERLAKE;
+ break;
+
// Icelake Xeon:
case 0x6a:
case 0x6c:
diff --git a/compiler-rt/lib/msan/msan.h b/compiler-rt/lib/msan/msan.h
index 7fb58be..edb2699 100644
--- a/compiler-rt/lib/msan/msan.h
+++ b/compiler-rt/lib/msan/msan.h
@@ -303,6 +303,7 @@ u32 ChainOrigin(u32 id, StackTrace *stack);
const int STACK_TRACE_TAG_POISON = StackTrace::TAG_CUSTOM + 1;
const int STACK_TRACE_TAG_FIELDS = STACK_TRACE_TAG_POISON + 1;
const int STACK_TRACE_TAG_VPTR = STACK_TRACE_TAG_FIELDS + 1;
+const int STACK_TRACE_TAG_ALLOC_PADDING = STACK_TRACE_TAG_VPTR + 1;
#define GET_MALLOC_STACK_TRACE \
UNINITIALIZED BufferedStackTrace stack; \
diff --git a/compiler-rt/lib/msan/msan_allocator.cpp b/compiler-rt/lib/msan/msan_allocator.cpp
index 64df863..80608aa 100644
--- a/compiler-rt/lib/msan/msan_allocator.cpp
+++ b/compiler-rt/lib/msan/msan_allocator.cpp
@@ -217,25 +217,52 @@ static void *MsanAllocate(BufferedStackTrace *stack, uptr size, uptr alignment,
}
auto *meta = reinterpret_cast<Metadata *>(allocator.GetMetaData(allocated));
meta->requested_size = size;
+ uptr actually_allocated_size = allocator.GetActuallyAllocatedSize(allocated);
+ void* padding_start = reinterpret_cast<char*>(allocated) + size;
+ uptr padding_size = actually_allocated_size - size;
+
+ // - With calloc(7,1), we can set the ideal tagging:
+ // bytes 0-6: initialized, origin not set (and irrelevant)
+ // byte 7: uninitialized, origin TAG_ALLOC_PADDING
+ // bytes 8-15: uninitialized, origin TAG_ALLOC_PADDING
+ // - If we have malloc(7) and __msan_get_track_origins() > 1, the 4-byte
+ // origin granularity only allows the slightly suboptimal tagging:
+ // bytes 0-6: uninitialized, origin TAG_ALLOC
+ // byte 7: uninitialized, origin TAG_ALLOC (suboptimal)
+ // bytes 8-15: uninitialized, origin TAG_ALLOC_PADDING
+ // - If we have malloc(7) and __msan_get_track_origins() == 1, we use a
+ // single origin bean to reduce overhead:
+ // bytes 0-6: uninitialized, origin TAG_ALLOC
+ // byte 7: uninitialized, origin TAG_ALLOC (suboptimal)
+ // bytes 8-15: uninitialized, origin TAG_ALLOC (suboptimal)
+ if (__msan_get_track_origins() && flags()->poison_in_malloc &&
+ (zero || (__msan_get_track_origins() > 1))) {
+ stack->tag = STACK_TRACE_TAG_ALLOC_PADDING;
+ Origin o2 = Origin::CreateHeapOrigin(stack);
+ __msan_set_origin(padding_start, padding_size, o2.raw_id());
+ }
+
if (zero) {
if (allocator.FromPrimary(allocated))
__msan_clear_and_unpoison(allocated, size);
else
__msan_unpoison(allocated, size); // Mem is already zeroed.
+
+ if (flags()->poison_in_malloc)
+ __msan_poison(padding_start, padding_size);
} else if (flags()->poison_in_malloc) {
- __msan_poison(allocated, size);
+ __msan_poison(allocated, actually_allocated_size);
+
if (__msan_get_track_origins()) {
stack->tag = StackTrace::TAG_ALLOC;
Origin o = Origin::CreateHeapOrigin(stack);
- __msan_set_origin(allocated, size, o.raw_id());
+ __msan_set_origin(
+ allocated,
+ __msan_get_track_origins() == 1 ? actually_allocated_size : size,
+ o.raw_id());
}
}
- uptr actually_allocated_size = allocator.GetActuallyAllocatedSize(allocated);
- // For compatibility, the allocator converted 0-sized allocations into 1 byte
- if (size == 0 && actually_allocated_size > 0 && flags()->poison_in_malloc)
- __msan_poison(allocated, 1);
-
UnpoisonParam(2);
RunMallocHooks(allocated, size);
return allocated;
@@ -255,9 +282,10 @@ void __msan::MsanDeallocate(BufferedStackTrace *stack, void *p) {
if (flags()->poison_in_free && allocator.FromPrimary(p)) {
__msan_poison(p, size);
if (__msan_get_track_origins()) {
+ uptr actually_allocated_size = allocator.GetActuallyAllocatedSize(p);
stack->tag = StackTrace::TAG_DEALLOC;
Origin o = Origin::CreateHeapOrigin(stack);
- __msan_set_origin(p, size, o.raw_id());
+ __msan_set_origin(p, actually_allocated_size, o.raw_id());
}
}
if (MsanThread *t = GetCurrentThread()) {
diff --git a/compiler-rt/lib/msan/msan_report.cpp b/compiler-rt/lib/msan/msan_report.cpp
index 99bf81f..cd0bf67 100644
--- a/compiler-rt/lib/msan/msan_report.cpp
+++ b/compiler-rt/lib/msan/msan_report.cpp
@@ -90,6 +90,10 @@ static void DescribeOrigin(u32 id) {
Printf(" %sVirtual table ptr was destroyed%s\n", d.Origin(),
d.Default());
break;
+ case STACK_TRACE_TAG_ALLOC_PADDING:
+ Printf(" %sUninitialized value is outside of heap allocation%s\n",
+ d.Origin(), d.Default());
+ break;
default:
Printf(" %sUninitialized value was created%s\n", d.Origin(),
d.Default());