diff options
author | Kostya Serebryany <kcc@google.com> | 2017-08-22 01:28:32 +0000 |
---|---|---|
committer | Kostya Serebryany <kcc@google.com> | 2017-08-22 01:28:32 +0000 |
commit | 190938f453f63c379a3c07c0140a69d87ff812a8 (patch) | |
tree | 47371e59897d4420293492e8fc5d34cdb217e076 /lib/fuzzer | |
parent | 8713a9766f1d0a033de92dc57b8df67ae53ff425 (diff) |
[libFuzzer] apply changes lost during the migration to compiler-rt
git-svn-id: https://llvm.org/svn/llvm-project/compiler-rt/trunk@311420 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/fuzzer')
-rw-r--r-- | lib/fuzzer/CMakeLists.txt | 1 | ||||
-rw-r--r-- | lib/fuzzer/FuzzerClangCounters.cpp | 49 | ||||
-rw-r--r-- | lib/fuzzer/FuzzerDefs.h | 4 | ||||
-rw-r--r-- | lib/fuzzer/FuzzerLoop.cpp | 3 | ||||
-rw-r--r-- | lib/fuzzer/FuzzerTracePC.cpp | 33 | ||||
-rw-r--r-- | lib/fuzzer/FuzzerTracePC.h | 50 |
6 files changed, 108 insertions, 32 deletions
diff --git a/lib/fuzzer/CMakeLists.txt b/lib/fuzzer/CMakeLists.txt index 309a8b801..03cb7fb0a 100644 --- a/lib/fuzzer/CMakeLists.txt +++ b/lib/fuzzer/CMakeLists.txt @@ -1,4 +1,5 @@ set(LIBFUZZER_SOURCES + FuzzerClangCounters.cpp FuzzerCrossOver.cpp FuzzerDriver.cpp FuzzerExtFunctionsDlsym.cpp diff --git a/lib/fuzzer/FuzzerClangCounters.cpp b/lib/fuzzer/FuzzerClangCounters.cpp new file mode 100644 index 000000000..f69e922cf --- /dev/null +++ b/lib/fuzzer/FuzzerClangCounters.cpp @@ -0,0 +1,49 @@ +//===- FuzzerExtraCounters.cpp - Extra coverage counters ------------------===// +// +// The LLVM Compiler Infrastructure +// +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// +//===----------------------------------------------------------------------===// +// Coverage counters from Clang's SourceBasedCodeCoverage. +//===----------------------------------------------------------------------===// + +// Support for SourceBasedCodeCoverage is experimental: +// * Works only for the main binary, not DSOs yet. +// * Works only on Linux. +// * Does not implement print_pcs/print_coverage yet. +// * Is not fully evaluated for performance and sensitivity. +// We expect large performance drop due to 64-bit counters, +// and *maybe* better sensitivity due to more fine-grained counters. +// Preliminary comparison on a single benchmark (RE2) shows +// a bit worse sensitivity though. + +#include "FuzzerDefs.h" + +#if LIBFUZZER_LINUX +__attribute__((weak)) extern uint64_t __start___llvm_prf_cnts; +__attribute__((weak)) extern uint64_t __stop___llvm_prf_cnts; +namespace fuzzer { +uint64_t *ClangCountersBegin() { return &__start___llvm_prf_cnts; } +uint64_t *ClangCountersEnd() { return &__stop___llvm_prf_cnts; } +} // namespace fuzzer +#else +// TODO: Implement on Mac (if the data shows it's worth it). +//__attribute__((visibility("hidden"))) +//extern uint64_t CountersStart __asm("section$start$__DATA$__llvm_prf_cnts"); +//__attribute__((visibility("hidden"))) +//extern uint64_t CountersEnd __asm("section$end$__DATA$__llvm_prf_cnts"); +namespace fuzzer { +uint64_t *ClangCountersBegin() { return nullptr; } +uint64_t *ClangCountersEnd() { return nullptr; } +} // namespace fuzzer +#endif + +namespace fuzzer { +ATTRIBUTE_NO_SANITIZE_ALL +void ClearClangCounters() { // hand-written memset, don't asan-ify. + for (auto P = ClangCountersBegin(); P < ClangCountersEnd(); P++) + *P = 0; +} +} diff --git a/lib/fuzzer/FuzzerDefs.h b/lib/fuzzer/FuzzerDefs.h index 27f571923..bbb44514a 100644 --- a/lib/fuzzer/FuzzerDefs.h +++ b/lib/fuzzer/FuzzerDefs.h @@ -123,6 +123,10 @@ uint8_t *ExtraCountersBegin(); uint8_t *ExtraCountersEnd(); void ClearExtraCounters(); +uint64_t *ClangCountersBegin(); +uint64_t *ClangCountersEnd(); +void ClearClangCounters(); + } // namespace fuzzer #endif // LLVM_FUZZER_DEFS_H diff --git a/lib/fuzzer/FuzzerLoop.cpp b/lib/fuzzer/FuzzerLoop.cpp index 2064783f3..234945932 100644 --- a/lib/fuzzer/FuzzerLoop.cpp +++ b/lib/fuzzer/FuzzerLoop.cpp @@ -388,11 +388,12 @@ void Fuzzer::ShuffleAndMinimize(UnitVector *InitialCorpus) { uint8_t dummy; ExecuteCallback(&dummy, 0); - for (const auto &U : *InitialCorpus) { + for (auto &U : *InitialCorpus) { RunOne(U.data(), U.size()); CheckExitOnSrcPosOrItem(); TryDetectingAMemoryLeak(U.data(), U.size(), /*DuringInitialCorpusExecution*/ true); + U.clear(); } PrintStats("INITED"); if (Corpus.empty()) { diff --git a/lib/fuzzer/FuzzerTracePC.cpp b/lib/fuzzer/FuzzerTracePC.cpp index d038374dc..ebd33d3ec 100644 --- a/lib/fuzzer/FuzzerTracePC.cpp +++ b/lib/fuzzer/FuzzerTracePC.cpp @@ -31,6 +31,9 @@ uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs]; ATTRIBUTE_INTERFACE uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs]; +// Used by -fsanitize-coverage=stack-depth to track stack depth +ATTRIBUTE_INTERFACE thread_local uintptr_t __sancov_lowest_stack; + namespace fuzzer { TracePC TPC; @@ -126,6 +129,8 @@ void TracePC::PrintModuleInfo() { _Exit(1); } } + if (size_t NumClangCounters = ClangCountersEnd() - ClangCountersBegin()) + Printf("INFO: %zd Clang Coverage Counters\n", NumClangCounters); } ATTRIBUTE_NO_SANITIZE_ALL @@ -137,13 +142,12 @@ void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) { } void TracePC::UpdateObservedPCs() { + auto Observe = [&](uintptr_t PC) { + bool Inserted = ObservedPCs.insert(PC).second; + if (Inserted && DoPrintNewPCs) + PrintPC("\tNEW_PC: %p %F %L\n", "\tNEW_PC: %p\n", PC + 1); + }; if (NumPCsInPCTables) { - auto Observe = [&](uintptr_t PC) { - bool Inserted = ObservedPCs.insert(PC).second; - if (Inserted && DoPrintNewPCs) - PrintPC("\tNEW_PC: %p %F %L\n", "\tNEW_PC: %p\n", PC + 1); - }; - if (NumInline8bitCounters == NumPCsInPCTables) { for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) { uint8_t *Beg = ModuleCounters[i].Start; @@ -167,6 +171,13 @@ void TracePC::UpdateObservedPCs() { } } } + if (size_t NumClangCounters = + ClangCountersEnd() - ClangCountersBegin()) { + auto P = ClangCountersBegin(); + for (size_t Idx = 0; Idx < NumClangCounters; Idx++) + if (P[Idx]) + Observe((uintptr_t)Idx); + } } inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) { @@ -332,6 +343,14 @@ void TracePC::ClearInlineCounters() { } } +void TracePC::RecordInitialStack() { + InitialStack = __sancov_lowest_stack; +} + +uintptr_t TracePC::GetMaxStackOffset() const { + return InitialStack - __sancov_lowest_stack; // Stack grows down +} + } // namespace fuzzer extern "C" { @@ -342,8 +361,6 @@ void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) { uint32_t Idx = *Guard; __sancov_trace_pc_pcs[Idx] = PC; __sancov_trace_pc_guard_8bit_counters[Idx]++; - // Uncomment the following line to get stack-depth profiling. - // fuzzer::TPC.RecordCurrentStack(); } // Best-effort support for -fsanitize-coverage=trace-pc, which is available diff --git a/lib/fuzzer/FuzzerTracePC.h b/lib/fuzzer/FuzzerTracePC.h index ea6794c75..56f1820f7 100644 --- a/lib/fuzzer/FuzzerTracePC.h +++ b/lib/fuzzer/FuzzerTracePC.h @@ -91,6 +91,7 @@ class TracePC { memset(Counters(), 0, GetNumPCs()); ClearExtraCounters(); ClearInlineCounters(); + ClearClangCounters(); } void ClearInlineCounters(); @@ -119,19 +120,8 @@ class TracePC { return PCs()[Idx]; } - void RecordCurrentStack() { - uintptr_t Stack = GetCurrentStack(); - if (Stack < LowestStack) - LowestStack = Stack; - } - void RecordInitialStack() { - InitialStack = GetCurrentStack(); - LowestStack = InitialStack; - } - uintptr_t GetCurrentStack() const { - return reinterpret_cast<uintptr_t>(__builtin_frame_address(0)); - } - uintptr_t GetMaxStackOffset() const { return InitialStack - LowestStack; } + void RecordInitialStack(); + uintptr_t GetMaxStackOffset() const; template<class CallBack> void ForEachObservedPC(CallBack CB) { @@ -166,7 +156,7 @@ private: std::set<uintptr_t> ObservedPCs; ValueBitMap ValueProfileMap; - uintptr_t InitialStack, LowestStack; // Assume stack grows down. + uintptr_t InitialStack; }; template <class Callback> @@ -196,14 +186,9 @@ void ForEachNonZeroByte(const uint8_t *Begin, const uint8_t *End, Handle8bitCounter(FirstFeature, P - Begin, V); } -template <class Callback> // bool Callback(size_t Feature) -ATTRIBUTE_NO_SANITIZE_ADDRESS -__attribute__((noinline)) -void TracePC::CollectFeatures(Callback HandleFeature) const { - uint8_t *Counters = this->Counters(); - size_t N = GetNumPCs(); - auto Handle8bitCounter = [&](size_t FirstFeature, - size_t Idx, uint8_t Counter) { +// Given a non-zero Counters returns a number in [0,7]. +template<class T> +unsigned CounterToFeature(T Counter) { assert(Counter); unsigned Bit = 0; /**/ if (Counter >= 128) Bit = 7; @@ -213,7 +198,18 @@ void TracePC::CollectFeatures(Callback HandleFeature) const { else if (Counter >= 4) Bit = 3; else if (Counter >= 3) Bit = 2; else if (Counter >= 2) Bit = 1; - HandleFeature(FirstFeature + Idx * 8 + Bit); + return Bit; +} + +template <class Callback> // bool Callback(size_t Feature) +ATTRIBUTE_NO_SANITIZE_ADDRESS +__attribute__((noinline)) +void TracePC::CollectFeatures(Callback HandleFeature) const { + uint8_t *Counters = this->Counters(); + size_t N = GetNumPCs(); + auto Handle8bitCounter = [&](size_t FirstFeature, + size_t Idx, uint8_t Counter) { + HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Counter)); }; size_t FirstFeature = 0; @@ -231,6 +227,14 @@ void TracePC::CollectFeatures(Callback HandleFeature) const { } } + if (size_t NumClangCounters = ClangCountersEnd() - ClangCountersBegin()) { + auto P = ClangCountersBegin(); + for (size_t Idx = 0; Idx < NumClangCounters; Idx++) + if (auto Cnt = P[Idx]) + HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Cnt)); + FirstFeature += NumClangCounters; + } + ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(), FirstFeature, Handle8bitCounter); FirstFeature += (ExtraCountersEnd() - ExtraCountersBegin()) * 8; |