diff options
Diffstat (limited to 'utils/google-benchmark/test/reporter_output_test.cc')
-rw-r--r-- | utils/google-benchmark/test/reporter_output_test.cc | 265 |
1 files changed, 84 insertions, 181 deletions
diff --git a/utils/google-benchmark/test/reporter_output_test.cc b/utils/google-benchmark/test/reporter_output_test.cc index e580008e6..00f02f264 100644 --- a/utils/google-benchmark/test/reporter_output_test.cc +++ b/utils/google-benchmark/test/reporter_output_test.cc @@ -1,134 +1,19 @@ #undef NDEBUG #include "benchmark/benchmark.h" -#include "../src/check.h" // NOTE: check.h is for internal use only! -#include "../src/re.h" // NOTE: re.h is for internal use only -#include <cassert> -#include <cstring> -#include <iostream> -#include <sstream> -#include <vector> +#include "output_test.h" #include <utility> -namespace { - -// ========================================================================= // -// -------------------------- Testing Case --------------------------------- // -// ========================================================================= // - -enum MatchRules { - MR_Default, // Skip non-matching lines until a match is found. - MR_Next // Match must occur on the next line. -}; - -struct TestCase { - std::string regex; - int match_rule; - - TestCase(std::string re, int rule = MR_Default) : regex(re), match_rule(rule) {} - - void Check(std::stringstream& remaining_output) const { - benchmark::Regex r; - std::string err_str; - r.Init(regex, &err_str); - CHECK(err_str.empty()) << "Could not construct regex \"" << regex << "\"" - << " got Error: " << err_str; - - std::string line; - while (remaining_output.eof() == false) { - CHECK(remaining_output.good()); - std::getline(remaining_output, line); - if (r.Match(line)) return; - CHECK(match_rule != MR_Next) << "Expected line \"" << line - << "\" to match regex \"" << regex << "\""; - } - - CHECK(remaining_output.eof() == false) - << "End of output reached before match for regex \"" << regex - << "\" was found"; - } -}; - -std::vector<TestCase> ConsoleOutputTests; -std::vector<TestCase> JSONOutputTests; -std::vector<TestCase> CSVOutputTests; - -std::vector<TestCase> ConsoleErrorTests; -std::vector<TestCase> JSONErrorTests; -std::vector<TestCase> CSVErrorTests; - -// ========================================================================= // -// -------------------------- Test Helpers --------------------------------- // -// ========================================================================= // - -class TestReporter : public benchmark::BenchmarkReporter { -public: - TestReporter(std::vector<benchmark::BenchmarkReporter*> reps) - : reporters_(reps) {} - - virtual bool ReportContext(const Context& context) { - bool last_ret = false; - bool first = true; - for (auto rep : reporters_) { - bool new_ret = rep->ReportContext(context); - CHECK(first || new_ret == last_ret) - << "Reports return different values for ReportContext"; - first = false; - last_ret = new_ret; - } - return last_ret; - } - - virtual void ReportRuns(const std::vector<Run>& report) { - for (auto rep : reporters_) - rep->ReportRuns(report); - } - - virtual void Finalize() { - for (auto rep : reporters_) - rep->Finalize(); - } - -private: - std::vector<benchmark::BenchmarkReporter*> reporters_; -}; - - -#define CONCAT2(x, y) x##y -#define CONCAT(x, y) CONCAT2(x, y) - -#define ADD_CASES(...) \ - int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__) - -int AddCases(std::vector<TestCase>* out, std::initializer_list<TestCase> const& v) { - for (auto const& TC : v) - out->push_back(TC); - return 0; -} - -template <class First> -std::string join(First f) { return f; } - -template <class First, class ...Args> -std::string join(First f, Args&&... args) { - return std::string(std::move(f)) + "[ ]+" + join(std::forward<Args>(args)...); -} - - - -std::string dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?"; - -} // end namespace // ========================================================================= // // ---------------------- Testing Prologue Output -------------------------- // // ========================================================================= // -ADD_CASES(&ConsoleOutputTests, { - {join("^Benchmark", "Time", "CPU", "Iterations$"), MR_Next}, +ADD_CASES(TC_ConsoleOut, { + {"^Benchmark %s Time %s CPU %s Iterations$", MR_Next}, {"^[-]+$", MR_Next} }); -ADD_CASES(&CSVOutputTests, { +ADD_CASES(TC_CSVOut, { {"name,iterations,real_time,cpu_time,time_unit,bytes_per_second,items_per_second," "label,error_occurred,error_message"} }); @@ -142,19 +27,19 @@ void BM_basic(benchmark::State& state) { } BENCHMARK(BM_basic); -ADD_CASES(&ConsoleOutputTests, { - {"^BM_basic[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"} +ADD_CASES(TC_ConsoleOut, { + {"^BM_basic %console_report$"} }); -ADD_CASES(&JSONOutputTests, { +ADD_CASES(TC_JSONOut, { {"\"name\": \"BM_basic\",$"}, - {"\"iterations\": [0-9]+,$", MR_Next}, - {"\"real_time\": [0-9]{1,5},$", MR_Next}, - {"\"cpu_time\": [0-9]{1,5},$", MR_Next}, + {"\"iterations\": %int,$", MR_Next}, + {"\"real_time\": %int,$", MR_Next}, + {"\"cpu_time\": %int,$", MR_Next}, {"\"time_unit\": \"ns\"$", MR_Next}, {"}", MR_Next} }); -ADD_CASES(&CSVOutputTests, { - {"^\"BM_basic\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"} +ADD_CASES(TC_CSVOut, { + {"^\"BM_basic\",%csv_report$"} }); // ========================================================================= // @@ -166,16 +51,16 @@ void BM_error(benchmark::State& state) { while(state.KeepRunning()) {} } BENCHMARK(BM_error); -ADD_CASES(&ConsoleOutputTests, { +ADD_CASES(TC_ConsoleOut, { {"^BM_error[ ]+ERROR OCCURRED: 'message'$"} }); -ADD_CASES(&JSONOutputTests, { +ADD_CASES(TC_JSONOut, { {"\"name\": \"BM_error\",$"}, {"\"error_occurred\": true,$", MR_Next}, {"\"error_message\": \"message\",$", MR_Next} }); -ADD_CASES(&CSVOutputTests, { +ADD_CASES(TC_CSVOut, { {"^\"BM_error\",,,,,,,,true,\"message\"$"} }); @@ -190,66 +75,84 @@ void BM_Complexity_O1(benchmark::State& state) { state.SetComplexityN(state.range(0)); } BENCHMARK(BM_Complexity_O1)->Range(1, 1<<18)->Complexity(benchmark::o1); +SET_SUBSTITUTIONS({ + {"%bigOStr", "[ ]*[0-9]+\\.[0-9]+ \\([0-9]+\\)"}, + {"%RMS", "[ ]*[0-9]+ %"} +}); +ADD_CASES(TC_ConsoleOut, { + {"^BM_Complexity_O1_BigO %bigOStr %bigOStr[ ]*$"}, + {"^BM_Complexity_O1_RMS %RMS %RMS[ ]*$"} +}); + + +// ========================================================================= // +// ----------------------- Testing Aggregate Output ------------------------ // +// ========================================================================= // -std::string bigOStr = "[0-9]+\\.[0-9]+ \\([0-9]+\\)"; +// Test that non-aggregate data is printed by default +void BM_Repeat(benchmark::State& state) { while (state.KeepRunning()) {} } +BENCHMARK(BM_Repeat)->Repetitions(3); +ADD_CASES(TC_ConsoleOut, { + {"^BM_Repeat/repeats:3 %console_report$"}, + {"^BM_Repeat/repeats:3 %console_report$"}, + {"^BM_Repeat/repeats:3 %console_report$"}, + {"^BM_Repeat/repeats:3_mean %console_report$"}, + {"^BM_Repeat/repeats:3_stddev %console_report$"} +}); +ADD_CASES(TC_JSONOut, { + {"\"name\": \"BM_Repeat/repeats:3\",$"}, + {"\"name\": \"BM_Repeat/repeats:3\",$"}, + {"\"name\": \"BM_Repeat/repeats:3\",$"}, + {"\"name\": \"BM_Repeat/repeats:3_mean\",$"}, + {"\"name\": \"BM_Repeat/repeats:3_stddev\",$"} +}); +ADD_CASES(TC_CSVOut, { + {"^\"BM_Repeat/repeats:3\",%csv_report$"}, + {"^\"BM_Repeat/repeats:3\",%csv_report$"}, + {"^\"BM_Repeat/repeats:3\",%csv_report$"}, + {"^\"BM_Repeat/repeats:3_mean\",%csv_report$"}, + {"^\"BM_Repeat/repeats:3_stddev\",%csv_report$"} +}); -ADD_CASES(&ConsoleOutputTests, { - {join("^BM_Complexity_O1_BigO", bigOStr, bigOStr) + "[ ]*$"}, - {join("^BM_Complexity_O1_RMS", "[0-9]+ %", "[0-9]+ %") + "[ ]*$"} +// Test that a non-repeated test still prints non-aggregate results even when +// only-aggregate reports have been requested +void BM_RepeatOnce(benchmark::State& state) { while (state.KeepRunning()) {} } +BENCHMARK(BM_RepeatOnce)->Repetitions(1)->ReportAggregatesOnly(); +ADD_CASES(TC_ConsoleOut, { + {"^BM_RepeatOnce/repeats:1 %console_report$"} +}); +ADD_CASES(TC_JSONOut, { + {"\"name\": \"BM_RepeatOnce/repeats:1\",$"} +}); +ADD_CASES(TC_CSVOut, { + {"^\"BM_RepeatOnce/repeats:1\",%csv_report$"} }); +// Test that non-aggregate data is not reported +void BM_SummaryRepeat(benchmark::State& state) { while (state.KeepRunning()) {} } +BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly(); +ADD_CASES(TC_ConsoleOut, { + {".*BM_SummaryRepeat/repeats:3 ", MR_Not}, + {"^BM_SummaryRepeat/repeats:3_mean %console_report$"}, + {"^BM_SummaryRepeat/repeats:3_stddev %console_report$"} +}); +ADD_CASES(TC_JSONOut, { + {".*BM_SummaryRepeat/repeats:3 ", MR_Not}, + {"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"}, + {"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"} +}); +ADD_CASES(TC_CSVOut, { + {".*BM_SummaryRepeat/repeats:3 ", MR_Not}, + {"^\"BM_SummaryRepeat/repeats:3_mean\",%csv_report$"}, + {"^\"BM_SummaryRepeat/repeats:3_stddev\",%csv_report$"} +}); + // ========================================================================= // // --------------------------- TEST CASES END ------------------------------ // // ========================================================================= // int main(int argc, char* argv[]) { - benchmark::Initialize(&argc, argv); - benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None); - benchmark::JSONReporter JR; - benchmark::CSVReporter CSVR; - struct ReporterTest { - const char* name; - std::vector<TestCase>& output_cases; - std::vector<TestCase>& error_cases; - benchmark::BenchmarkReporter& reporter; - std::stringstream out_stream; - std::stringstream err_stream; - - ReporterTest(const char* n, - std::vector<TestCase>& out_tc, - std::vector<TestCase>& err_tc, - benchmark::BenchmarkReporter& br) - : name(n), output_cases(out_tc), error_cases(err_tc), reporter(br) { - reporter.SetOutputStream(&out_stream); - reporter.SetErrorStream(&err_stream); - } - } TestCases[] = { - {"ConsoleReporter", ConsoleOutputTests, ConsoleErrorTests, CR}, - {"JSONReporter", JSONOutputTests, JSONErrorTests, JR}, - {"CSVReporter", CSVOutputTests, CSVErrorTests, CSVR} - }; - - // Create the test reporter and run the benchmarks. - std::cout << "Running benchmarks...\n"; - TestReporter test_rep({&CR, &JR, &CSVR}); - benchmark::RunSpecifiedBenchmarks(&test_rep); - - for (auto& rep_test : TestCases) { - std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n"; - std::string banner(msg.size() - 1, '-'); - std::cout << banner << msg << banner << "\n"; - - std::cerr << rep_test.err_stream.str(); - std::cout << rep_test.out_stream.str(); - - for (const auto& TC : rep_test.error_cases) - TC.Check(rep_test.err_stream); - for (const auto& TC : rep_test.output_cases) - TC.Check(rep_test.out_stream); - - std::cout << "\n"; - } - return 0; + RunOutputTests(argc, argv); } |