mirror of
https://github.com/Gericom/teak-llvm.git
synced 2025-06-19 03:25:54 -04:00

Summary: This change allows to perform corpus merging in two steps. This is useful when the user wants to address the following two points simultaneously: 1) Get trustworthy incremental stats for the coverage and corpus size changes when adding new corpus units. 2) Make sure the shorter units will be preferred when two or more units give the same unique signal (equivalent to the `REDUCE` logic). This solution was brainstormed together with @kcc, hopefully it looks good to the other people too. The proposed use case scenario: 1) We have a `fuzz_target` binary and `existing_corpus` directory. 2) We do fuzzing and write new units into the `new_corpus` directory. 3) We want to merge the new corpus into the existing corpus and satisfy the points mentioned above. 4) We create an empty directory `merged_corpus` and run the first merge step: ` ./fuzz_target -merge=1 -merge_control_file=MCF ./merged_corpus ./existing_corpus ` this provides the initial stats for `existing_corpus`, e.g. from the output: ` MERGE-OUTER: 3 new files with 11 new features added; 11 new coverage edges ` 5) We recreate `merged_corpus` directory and run the second merge step: ` ./fuzz_target -merge=1 -merge_control_file=MCF ./merged_corpus ./existing_corpus ./new_corpus ` this provides the final stats for the merged corpus, e.g. from the output: ` MERGE-OUTER: 6 new files with 14 new features added; 14 new coverage edges ` Alternative solutions to this approach are: A) Store precise coverage information for every unit (not only unique signal). B) Execute the same two steps without reusing the control file. Either of these would be suboptimal as it would impose an extra disk or CPU load respectively, which is bad given the quadratic complexity in the worst case. Tested on Linux, Mac, Windows. Reviewers: morehouse, metzman, hctim, kcc Reviewed By: morehouse Subscribers: JDevlieghere, delcypher, mgrang, #sanitizers, llvm-commits, kcc Tags: #llvm, #sanitizers Differential Revision: https://reviews.llvm.org/D66107 llvm-svn: 371620
403 lines
14 KiB
C++
403 lines
14 KiB
C++
//===- FuzzerMerge.cpp - merging corpora ----------------------------------===//
|
|
//
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
// Merging corpora.
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
#include "FuzzerCommand.h"
|
|
#include "FuzzerMerge.h"
|
|
#include "FuzzerIO.h"
|
|
#include "FuzzerInternal.h"
|
|
#include "FuzzerTracePC.h"
|
|
#include "FuzzerUtil.h"
|
|
|
|
#include <fstream>
|
|
#include <iterator>
|
|
#include <set>
|
|
#include <sstream>
|
|
#include <unordered_set>
|
|
|
|
namespace fuzzer {
|
|
|
|
bool Merger::Parse(const std::string &Str, bool ParseCoverage) {
|
|
std::istringstream SS(Str);
|
|
return Parse(SS, ParseCoverage);
|
|
}
|
|
|
|
void Merger::ParseOrExit(std::istream &IS, bool ParseCoverage) {
|
|
if (!Parse(IS, ParseCoverage)) {
|
|
Printf("MERGE: failed to parse the control file (unexpected error)\n");
|
|
exit(1);
|
|
}
|
|
}
|
|
|
|
// The control file example:
|
|
//
|
|
// 3 # The number of inputs
|
|
// 1 # The number of inputs in the first corpus, <= the previous number
|
|
// file0
|
|
// file1
|
|
// file2 # One file name per line.
|
|
// STARTED 0 123 # FileID, file size
|
|
// FT 0 1 4 6 8 # FileID COV1 COV2 ...
|
|
// COV 0 7 8 9 # FileID COV1 COV1
|
|
// STARTED 1 456 # If FT is missing, the input crashed while processing.
|
|
// STARTED 2 567
|
|
// FT 2 8 9
|
|
// COV 2 11 12
|
|
bool Merger::Parse(std::istream &IS, bool ParseCoverage) {
|
|
LastFailure.clear();
|
|
std::string Line;
|
|
|
|
// Parse NumFiles.
|
|
if (!std::getline(IS, Line, '\n')) return false;
|
|
std::istringstream L1(Line);
|
|
size_t NumFiles = 0;
|
|
L1 >> NumFiles;
|
|
if (NumFiles == 0 || NumFiles > 10000000) return false;
|
|
|
|
// Parse NumFilesInFirstCorpus.
|
|
if (!std::getline(IS, Line, '\n')) return false;
|
|
std::istringstream L2(Line);
|
|
NumFilesInFirstCorpus = NumFiles + 1;
|
|
L2 >> NumFilesInFirstCorpus;
|
|
if (NumFilesInFirstCorpus > NumFiles) return false;
|
|
|
|
// Parse file names.
|
|
Files.resize(NumFiles);
|
|
for (size_t i = 0; i < NumFiles; i++)
|
|
if (!std::getline(IS, Files[i].Name, '\n'))
|
|
return false;
|
|
|
|
// Parse STARTED, FT, and COV lines.
|
|
size_t ExpectedStartMarker = 0;
|
|
const size_t kInvalidStartMarker = -1;
|
|
size_t LastSeenStartMarker = kInvalidStartMarker;
|
|
Vector<uint32_t> TmpFeatures;
|
|
Set<uint32_t> PCs;
|
|
while (std::getline(IS, Line, '\n')) {
|
|
std::istringstream ISS1(Line);
|
|
std::string Marker;
|
|
size_t N;
|
|
ISS1 >> Marker;
|
|
ISS1 >> N;
|
|
if (Marker == "STARTED") {
|
|
// STARTED FILE_ID FILE_SIZE
|
|
if (ExpectedStartMarker != N)
|
|
return false;
|
|
ISS1 >> Files[ExpectedStartMarker].Size;
|
|
LastSeenStartMarker = ExpectedStartMarker;
|
|
assert(ExpectedStartMarker < Files.size());
|
|
ExpectedStartMarker++;
|
|
} else if (Marker == "FT") {
|
|
// FT FILE_ID COV1 COV2 COV3 ...
|
|
size_t CurrentFileIdx = N;
|
|
if (CurrentFileIdx != LastSeenStartMarker)
|
|
return false;
|
|
LastSeenStartMarker = kInvalidStartMarker;
|
|
if (ParseCoverage) {
|
|
TmpFeatures.clear(); // use a vector from outer scope to avoid resizes.
|
|
while (ISS1 >> N)
|
|
TmpFeatures.push_back(N);
|
|
std::sort(TmpFeatures.begin(), TmpFeatures.end());
|
|
Files[CurrentFileIdx].Features = TmpFeatures;
|
|
}
|
|
} else if (Marker == "COV") {
|
|
size_t CurrentFileIdx = N;
|
|
if (ParseCoverage)
|
|
while (ISS1 >> N)
|
|
if (PCs.insert(N).second)
|
|
Files[CurrentFileIdx].Cov.push_back(N);
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
if (LastSeenStartMarker != kInvalidStartMarker)
|
|
LastFailure = Files[LastSeenStartMarker].Name;
|
|
|
|
FirstNotProcessedFile = ExpectedStartMarker;
|
|
return true;
|
|
}
|
|
|
|
size_t Merger::ApproximateMemoryConsumption() const {
|
|
size_t Res = 0;
|
|
for (const auto &F: Files)
|
|
Res += sizeof(F) + F.Features.size() * sizeof(F.Features[0]);
|
|
return Res;
|
|
}
|
|
|
|
// Decides which files need to be merged (add those to NewFiles).
|
|
// Returns the number of new features added.
|
|
size_t Merger::Merge(const Set<uint32_t> &InitialFeatures,
|
|
Set<uint32_t> *NewFeatures,
|
|
const Set<uint32_t> &InitialCov, Set<uint32_t> *NewCov,
|
|
Vector<std::string> *NewFiles) {
|
|
NewFiles->clear();
|
|
assert(NumFilesInFirstCorpus <= Files.size());
|
|
Set<uint32_t> AllFeatures = InitialFeatures;
|
|
|
|
// What features are in the initial corpus?
|
|
for (size_t i = 0; i < NumFilesInFirstCorpus; i++) {
|
|
auto &Cur = Files[i].Features;
|
|
AllFeatures.insert(Cur.begin(), Cur.end());
|
|
}
|
|
// Remove all features that we already know from all other inputs.
|
|
for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) {
|
|
auto &Cur = Files[i].Features;
|
|
Vector<uint32_t> Tmp;
|
|
std::set_difference(Cur.begin(), Cur.end(), AllFeatures.begin(),
|
|
AllFeatures.end(), std::inserter(Tmp, Tmp.begin()));
|
|
Cur.swap(Tmp);
|
|
}
|
|
|
|
// Sort. Give preference to
|
|
// * smaller files
|
|
// * files with more features.
|
|
std::sort(Files.begin() + NumFilesInFirstCorpus, Files.end(),
|
|
[&](const MergeFileInfo &a, const MergeFileInfo &b) -> bool {
|
|
if (a.Size != b.Size)
|
|
return a.Size < b.Size;
|
|
return a.Features.size() > b.Features.size();
|
|
});
|
|
|
|
// One greedy pass: add the file's features to AllFeatures.
|
|
// If new features were added, add this file to NewFiles.
|
|
for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) {
|
|
auto &Cur = Files[i].Features;
|
|
// Printf("%s -> sz %zd ft %zd\n", Files[i].Name.c_str(),
|
|
// Files[i].Size, Cur.size());
|
|
bool FoundNewFeatures = false;
|
|
for (auto Fe: Cur) {
|
|
if (AllFeatures.insert(Fe).second) {
|
|
FoundNewFeatures = true;
|
|
NewFeatures->insert(Fe);
|
|
}
|
|
}
|
|
if (FoundNewFeatures)
|
|
NewFiles->push_back(Files[i].Name);
|
|
for (auto Cov : Files[i].Cov)
|
|
if (InitialCov.find(Cov) == InitialCov.end())
|
|
NewCov->insert(Cov);
|
|
}
|
|
return NewFeatures->size();
|
|
}
|
|
|
|
Set<uint32_t> Merger::AllFeatures() const {
|
|
Set<uint32_t> S;
|
|
for (auto &File : Files)
|
|
S.insert(File.Features.begin(), File.Features.end());
|
|
return S;
|
|
}
|
|
|
|
// Inner process. May crash if the target crashes.
|
|
void Fuzzer::CrashResistantMergeInternalStep(const std::string &CFPath) {
|
|
Printf("MERGE-INNER: using the control file '%s'\n", CFPath.c_str());
|
|
Merger M;
|
|
std::ifstream IF(CFPath);
|
|
M.ParseOrExit(IF, false);
|
|
IF.close();
|
|
if (!M.LastFailure.empty())
|
|
Printf("MERGE-INNER: '%s' caused a failure at the previous merge step\n",
|
|
M.LastFailure.c_str());
|
|
|
|
Printf("MERGE-INNER: %zd total files;"
|
|
" %zd processed earlier; will process %zd files now\n",
|
|
M.Files.size(), M.FirstNotProcessedFile,
|
|
M.Files.size() - M.FirstNotProcessedFile);
|
|
|
|
std::ofstream OF(CFPath, std::ofstream::out | std::ofstream::app);
|
|
Set<size_t> AllFeatures;
|
|
auto PrintStatsWrapper = [this, &AllFeatures](const char* Where) {
|
|
this->PrintStats(Where, "\n", 0, AllFeatures.size());
|
|
};
|
|
Set<const TracePC::PCTableEntry *> AllPCs;
|
|
for (size_t i = M.FirstNotProcessedFile; i < M.Files.size(); i++) {
|
|
Fuzzer::MaybeExitGracefully();
|
|
auto U = FileToVector(M.Files[i].Name);
|
|
if (U.size() > MaxInputLen) {
|
|
U.resize(MaxInputLen);
|
|
U.shrink_to_fit();
|
|
}
|
|
|
|
// Write the pre-run marker.
|
|
OF << "STARTED " << i << " " << U.size() << "\n";
|
|
OF.flush(); // Flush is important since Command::Execute may crash.
|
|
// Run.
|
|
TPC.ResetMaps();
|
|
ExecuteCallback(U.data(), U.size());
|
|
// Collect coverage. We are iterating over the files in this order:
|
|
// * First, files in the initial corpus ordered by size, smallest first.
|
|
// * Then, all other files, smallest first.
|
|
// So it makes no sense to record all features for all files, instead we
|
|
// only record features that were not seen before.
|
|
Set<size_t> UniqFeatures;
|
|
TPC.CollectFeatures([&](size_t Feature) {
|
|
if (AllFeatures.insert(Feature).second)
|
|
UniqFeatures.insert(Feature);
|
|
});
|
|
TPC.UpdateObservedPCs();
|
|
// Show stats.
|
|
if (!(TotalNumberOfRuns & (TotalNumberOfRuns - 1)))
|
|
PrintStatsWrapper("pulse ");
|
|
if (TotalNumberOfRuns == M.NumFilesInFirstCorpus)
|
|
PrintStatsWrapper("LOADED");
|
|
// Write the post-run marker and the coverage.
|
|
OF << "FT " << i;
|
|
for (size_t F : UniqFeatures)
|
|
OF << " " << F;
|
|
OF << "\n";
|
|
OF << "COV " << i;
|
|
TPC.ForEachObservedPC([&](const TracePC::PCTableEntry *TE) {
|
|
if (AllPCs.insert(TE).second)
|
|
OF << " " << TPC.PCTableEntryIdx(TE);
|
|
});
|
|
OF << "\n";
|
|
OF.flush();
|
|
}
|
|
PrintStatsWrapper("DONE ");
|
|
}
|
|
|
|
static size_t WriteNewControlFile(const std::string &CFPath,
|
|
const Vector<SizedFile> &OldCorpus,
|
|
const Vector<SizedFile> &NewCorpus,
|
|
const Vector<MergeFileInfo> &KnownFiles) {
|
|
std::unordered_set<std::string> FilesToSkip;
|
|
for (auto &SF: KnownFiles)
|
|
FilesToSkip.insert(SF.Name);
|
|
|
|
Vector<std::string> FilesToUse;
|
|
auto MaybeUseFile = [=, &FilesToUse](std::string Name) {
|
|
if (FilesToSkip.find(Name) == FilesToSkip.end())
|
|
FilesToUse.push_back(Name);
|
|
};
|
|
for (auto &SF: OldCorpus)
|
|
MaybeUseFile(SF.File);
|
|
auto FilesToUseFromOldCorpus = FilesToUse.size();
|
|
for (auto &SF: NewCorpus)
|
|
MaybeUseFile(SF.File);
|
|
|
|
RemoveFile(CFPath);
|
|
std::ofstream ControlFile(CFPath);
|
|
ControlFile << FilesToUse.size() << "\n";
|
|
ControlFile << FilesToUseFromOldCorpus << "\n";
|
|
for (auto &FN: FilesToUse)
|
|
ControlFile << FN << "\n";
|
|
|
|
if (!ControlFile) {
|
|
Printf("MERGE-OUTER: failed to write to the control file: %s\n",
|
|
CFPath.c_str());
|
|
exit(1);
|
|
}
|
|
|
|
return FilesToUse.size();
|
|
}
|
|
|
|
// Outer process. Does not call the target code and thus should not fail.
|
|
void CrashResistantMerge(const Vector<std::string> &Args,
|
|
const Vector<SizedFile> &OldCorpus,
|
|
const Vector<SizedFile> &NewCorpus,
|
|
Vector<std::string> *NewFiles,
|
|
const Set<uint32_t> &InitialFeatures,
|
|
Set<uint32_t> *NewFeatures,
|
|
const Set<uint32_t> &InitialCov,
|
|
Set<uint32_t> *NewCov,
|
|
const std::string &CFPath,
|
|
bool V /*Verbose*/) {
|
|
if (NewCorpus.empty() && OldCorpus.empty()) return; // Nothing to merge.
|
|
size_t NumAttempts = 0;
|
|
Vector<MergeFileInfo> KnownFiles;
|
|
if (FileSize(CFPath)) {
|
|
VPrintf(V, "MERGE-OUTER: non-empty control file provided: '%s'\n",
|
|
CFPath.c_str());
|
|
Merger M;
|
|
std::ifstream IF(CFPath);
|
|
if (M.Parse(IF, /*ParseCoverage=*/true)) {
|
|
VPrintf(V, "MERGE-OUTER: control file ok, %zd files total,"
|
|
" first not processed file %zd\n",
|
|
M.Files.size(), M.FirstNotProcessedFile);
|
|
if (!M.LastFailure.empty())
|
|
VPrintf(V, "MERGE-OUTER: '%s' will be skipped as unlucky "
|
|
"(merge has stumbled on it the last time)\n",
|
|
M.LastFailure.c_str());
|
|
if (M.FirstNotProcessedFile >= M.Files.size()) {
|
|
// Merge has already been completed with the given merge control file.
|
|
if (M.Files.size() == OldCorpus.size() + NewCorpus.size()) {
|
|
VPrintf(
|
|
V,
|
|
"MERGE-OUTER: nothing to do, merge has been completed before\n");
|
|
exit(0);
|
|
}
|
|
|
|
// Number of input files likely changed, start merge from scratch, but
|
|
// reuse coverage information from the given merge control file.
|
|
VPrintf(
|
|
V,
|
|
"MERGE-OUTER: starting merge from scratch, but reusing coverage "
|
|
"information from the given control file\n");
|
|
KnownFiles = M.Files;
|
|
} else {
|
|
// There is a merge in progress, continue.
|
|
NumAttempts = M.Files.size() - M.FirstNotProcessedFile;
|
|
}
|
|
} else {
|
|
VPrintf(V, "MERGE-OUTER: bad control file, will overwrite it\n");
|
|
}
|
|
}
|
|
|
|
if (!NumAttempts) {
|
|
// The supplied control file is empty or bad, create a fresh one.
|
|
VPrintf(V, "MERGE-OUTER: "
|
|
"%zd files, %zd in the initial corpus, %zd processed earlier\n",
|
|
OldCorpus.size() + NewCorpus.size(), OldCorpus.size(),
|
|
KnownFiles.size());
|
|
NumAttempts = WriteNewControlFile(CFPath, OldCorpus, NewCorpus, KnownFiles);
|
|
}
|
|
|
|
// Execute the inner process until it passes.
|
|
// Every inner process should execute at least one input.
|
|
Command BaseCmd(Args);
|
|
BaseCmd.removeFlag("merge");
|
|
BaseCmd.removeFlag("fork");
|
|
BaseCmd.removeFlag("collect_data_flow");
|
|
for (size_t Attempt = 1; Attempt <= NumAttempts; Attempt++) {
|
|
Fuzzer::MaybeExitGracefully();
|
|
VPrintf(V, "MERGE-OUTER: attempt %zd\n", Attempt);
|
|
Command Cmd(BaseCmd);
|
|
Cmd.addFlag("merge_control_file", CFPath);
|
|
Cmd.addFlag("merge_inner", "1");
|
|
if (!V) {
|
|
Cmd.setOutputFile(getDevNull());
|
|
Cmd.combineOutAndErr();
|
|
}
|
|
auto ExitCode = ExecuteCommand(Cmd);
|
|
if (!ExitCode) {
|
|
VPrintf(V, "MERGE-OUTER: succesfull in %zd attempt(s)\n", Attempt);
|
|
break;
|
|
}
|
|
}
|
|
// Read the control file and do the merge.
|
|
Merger M;
|
|
std::ifstream IF(CFPath);
|
|
IF.seekg(0, IF.end);
|
|
VPrintf(V, "MERGE-OUTER: the control file has %zd bytes\n",
|
|
(size_t)IF.tellg());
|
|
IF.seekg(0, IF.beg);
|
|
M.ParseOrExit(IF, true);
|
|
IF.close();
|
|
VPrintf(V,
|
|
"MERGE-OUTER: consumed %zdMb (%zdMb rss) to parse the control file\n",
|
|
M.ApproximateMemoryConsumption() >> 20, GetPeakRSSMb());
|
|
|
|
M.Files.insert(M.Files.end(), KnownFiles.begin(), KnownFiles.end());
|
|
M.Merge(InitialFeatures, NewFeatures, InitialCov, NewCov, NewFiles);
|
|
VPrintf(V, "MERGE-OUTER: %zd new files with %zd new features added; "
|
|
"%zd new coverage edges\n",
|
|
NewFiles->size(), NewFeatures->size(), NewCov->size());
|
|
}
|
|
|
|
} // namespace fuzzer
|