/// /// \author John Farrier /// /// \copyright Copyright 2015, 2016, 2017, 2018. 2019 John Farrier /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// #include #include #include #include #include #include #include #include #include #include #include #include using namespace celero; /// /// \struct celero::JUnit::Impl /// class celero::JUnit::Impl { public: std::string fileName; /// Store the test case size, measured baseline, objective baseline, and total run time in seconds. std::map>> results; double totalTime = {0.0}; }; JUnit& JUnit::Instance() { static JUnit singleton; return singleton; } void JUnit::setFileName(const std::string& x) { assert(x.empty() == false); this->pimpl->fileName = x; } void JUnit::add(std::shared_ptr x) { this->pimpl->results[x->getExperiment()->getBenchmark()->getName()].push_back(x); this->save(); } void JUnit::save() { std::ofstream ofs; ofs.open(this->pimpl->fileName); if(ofs.is_open() == true) { const auto os = &ofs; *os << "" << std::endl; for(auto i : this->pimpl->results) { uint64_t testSuiteTime = 0; size_t testSuiteFailures = 0; size_t testSuiteErrors = 0; const auto runs = i.second; for(auto j : runs) { if(j->getFailure()) { testSuiteErrors++; continue; } else if((j->getExperiment()->getBaselineTarget() > 0.0) && (j->getBaselineMeasurement() > j->getExperiment()->getBaselineTarget())) { testSuiteFailures++; } testSuiteTime += j->getRunTime(); } *os << "" << std::endl; for(auto j : runs) { *os << "\tgetFailure() ? 0 : j->getRunTime()) << "\" "; *os << "name=\"" << j->getExperiment()->getName() << "#" << j->getProblemSpaceValue() << "\""; // Compare measured to objective if(j->getFailure()) { // Error *os << ">" << std::endl; *os << "\t\t" << std::endl; *os << "\t" << std::endl; } else if((j->getExperiment()->getBaselineTarget() > 0.0) && (j->getBaselineMeasurement() > j->getExperiment()->getBaselineTarget())) { // Failure *os << ">" << std::endl; *os << "\t\tgetBaselineMeasurement() << " exceeds objective baseline of " << j->getExperiment()->getBaselineTarget() << "\" "; *os << "/>" << std::endl; *os << "\t" << std::endl; } else { // Success *os << "/>" << std::endl; } } *os << "" << std::endl; } ofs.close(); } }