"Fossies" - the Fresh Open Source Software Archive  

Source code changes of the file "googletest/src/gtest.cc" between
googletest-release-1.10.0.tar.gz and googletest-release-1.11.0.tar.gz

About: GoogleTest is Google's (unit) testing and mocking framework for C++ tests.

gtest.cc  (googletest-release-1.10.0):gtest.cc  (googletest-release-1.11.0)
skipping to change at line 38 skipping to change at line 38
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// //
// The Google C++ Testing and Mocking Framework (Google Test) // The Google C++ Testing and Mocking Framework (Google Test)
#include "gtest/gtest.h" #include "gtest/gtest.h"
#include "gtest/internal/custom/gtest.h" #include "gtest/internal/custom/gtest.h"
#include "gtest/gtest-spi.h" #include "gtest/gtest-spi.h"
#include <ctype.h> #include <ctype.h>
#include <math.h>
#include <stdarg.h> #include <stdarg.h>
#include <stdio.h> #include <stdio.h>
#include <stdlib.h> #include <stdlib.h>
#include <time.h> #include <time.h>
#include <wchar.h> #include <wchar.h>
#include <wctype.h> #include <wctype.h>
#include <algorithm> #include <algorithm>
#include <chrono> // NOLINT
#include <cmath>
#include <cstdint>
#include <iomanip> #include <iomanip>
#include <limits> #include <limits>
#include <list> #include <list>
#include <map> #include <map>
#include <ostream> // NOLINT #include <ostream> // NOLINT
#include <sstream> #include <sstream>
#include <vector> #include <vector>
#if GTEST_OS_LINUX #if GTEST_OS_LINUX
# define GTEST_HAS_GETTIMEOFDAY_ 1
# include <fcntl.h> // NOLINT # include <fcntl.h> // NOLINT
# include <limits.h> // NOLINT # include <limits.h> // NOLINT
# include <sched.h> // NOLINT # include <sched.h> // NOLINT
// Declares vsnprintf(). This header is not available on Windows. // Declares vsnprintf(). This header is not available on Windows.
# include <strings.h> // NOLINT # include <strings.h> // NOLINT
# include <sys/mman.h> // NOLINT # include <sys/mman.h> // NOLINT
# include <sys/time.h> // NOLINT # include <sys/time.h> // NOLINT
# include <unistd.h> // NOLINT # include <unistd.h> // NOLINT
# include <string> # include <string>
#elif GTEST_OS_ZOS #elif GTEST_OS_ZOS
# define GTEST_HAS_GETTIMEOFDAY_ 1
# include <sys/time.h> // NOLINT # include <sys/time.h> // NOLINT
// On z/OS we additionally need strings.h for strcasecmp. // On z/OS we additionally need strings.h for strcasecmp.
# include <strings.h> // NOLINT # include <strings.h> // NOLINT
#elif GTEST_OS_WINDOWS_MOBILE // We are on Windows CE. #elif GTEST_OS_WINDOWS_MOBILE // We are on Windows CE.
# include <windows.h> // NOLINT # include <windows.h> // NOLINT
# undef min # undef min
#elif GTEST_OS_WINDOWS // We are on Windows proper. #elif GTEST_OS_WINDOWS // We are on Windows proper.
# include <windows.h> // NOLINT # include <windows.h> // NOLINT
# undef min # undef min
#ifdef _MSC_VER
# include <crtdbg.h> // NOLINT # include <crtdbg.h> // NOLINT
# include <debugapi.h> // NOLINT #endif
# include <io.h> // NOLINT # include <io.h> // NOLINT
# include <sys/timeb.h> // NOLINT # include <sys/timeb.h> // NOLINT
# include <sys/types.h> // NOLINT # include <sys/types.h> // NOLINT
# include <sys/stat.h> // NOLINT # include <sys/stat.h> // NOLINT
# if GTEST_OS_WINDOWS_MINGW # if GTEST_OS_WINDOWS_MINGW
// MinGW has gettimeofday() but not _ftime64().
# define GTEST_HAS_GETTIMEOFDAY_ 1
# include <sys/time.h> // NOLINT # include <sys/time.h> // NOLINT
# endif // GTEST_OS_WINDOWS_MINGW # endif // GTEST_OS_WINDOWS_MINGW
#else #else
// Assume other platforms have gettimeofday().
# define GTEST_HAS_GETTIMEOFDAY_ 1
// cpplint thinks that the header is already included, so we want to // cpplint thinks that the header is already included, so we want to
// silence it. // silence it.
# include <sys/time.h> // NOLINT # include <sys/time.h> // NOLINT
# include <unistd.h> // NOLINT # include <unistd.h> // NOLINT
#endif // GTEST_OS_LINUX #endif // GTEST_OS_LINUX
#if GTEST_HAS_EXCEPTIONS #if GTEST_HAS_EXCEPTIONS
# include <stdexcept> # include <stdexcept>
#endif #endif
skipping to change at line 212 skipping to change at line 208
// environment variable. // environment variable.
static const char* GetDefaultFilter() { static const char* GetDefaultFilter() {
const char* const testbridge_test_only = const char* const testbridge_test_only =
internal::posix::GetEnv("TESTBRIDGE_TEST_ONLY"); internal::posix::GetEnv("TESTBRIDGE_TEST_ONLY");
if (testbridge_test_only != nullptr) { if (testbridge_test_only != nullptr) {
return testbridge_test_only; return testbridge_test_only;
} }
return kUniversalFilter; return kUniversalFilter;
} }
// Bazel passes in the argument to '--test_runner_fail_fast' via the
// TESTBRIDGE_TEST_RUNNER_FAIL_FAST environment variable.
static bool GetDefaultFailFast() {
const char* const testbridge_test_runner_fail_fast =
internal::posix::GetEnv("TESTBRIDGE_TEST_RUNNER_FAIL_FAST");
if (testbridge_test_runner_fail_fast != nullptr) {
return strcmp(testbridge_test_runner_fail_fast, "1") == 0;
}
return false;
}
GTEST_DEFINE_bool_(
fail_fast, internal::BoolFromGTestEnv("fail_fast", GetDefaultFailFast()),
"True if and only if a test failure should stop further test execution.");
GTEST_DEFINE_bool_( GTEST_DEFINE_bool_(
also_run_disabled_tests, also_run_disabled_tests,
internal::BoolFromGTestEnv("also_run_disabled_tests", false), internal::BoolFromGTestEnv("also_run_disabled_tests", false),
"Run disabled tests too, in addition to the tests normally being run."); "Run disabled tests too, in addition to the tests normally being run.");
GTEST_DEFINE_bool_( GTEST_DEFINE_bool_(
break_on_failure, internal::BoolFromGTestEnv("break_on_failure", false), break_on_failure, internal::BoolFromGTestEnv("break_on_failure", false),
"True if and only if a failed assertion should be a debugger " "True if and only if a failed assertion should be a debugger "
"break-point."); "break-point.");
skipping to change at line 272 skipping to change at line 283
internal::OutputFlagAlsoCheckEnvVar().c_str()), internal::OutputFlagAlsoCheckEnvVar().c_str()),
"A format (defaults to \"xml\" but can be specified to be \"json\"), " "A format (defaults to \"xml\" but can be specified to be \"json\"), "
"optionally followed by a colon and an output file name or directory. " "optionally followed by a colon and an output file name or directory. "
"A directory is indicated by a trailing pathname separator. " "A directory is indicated by a trailing pathname separator. "
"Examples: \"xml:filename.xml\", \"xml::directoryname/\". " "Examples: \"xml:filename.xml\", \"xml::directoryname/\". "
"If a directory is specified, output files will be created " "If a directory is specified, output files will be created "
"within that directory, with file-names based on the test " "within that directory, with file-names based on the test "
"executable's name and, if necessary, made unique by adding " "executable's name and, if necessary, made unique by adding "
"digits."); "digits.");
GTEST_DEFINE_bool_(
brief, internal::BoolFromGTestEnv("brief", false),
"True if only test failures should be displayed in text output.");
GTEST_DEFINE_bool_(print_time, internal::BoolFromGTestEnv("print_time", true), GTEST_DEFINE_bool_(print_time, internal::BoolFromGTestEnv("print_time", true),
"True if and only if " GTEST_NAME_ "True if and only if " GTEST_NAME_
" should display elapsed time in text output."); " should display elapsed time in text output.");
GTEST_DEFINE_bool_(print_utf8, internal::BoolFromGTestEnv("print_utf8", true), GTEST_DEFINE_bool_(print_utf8, internal::BoolFromGTestEnv("print_utf8", true),
"True if and only if " GTEST_NAME_ "True if and only if " GTEST_NAME_
" prints UTF8 characters as text."); " prints UTF8 characters as text.");
GTEST_DEFINE_int32_( GTEST_DEFINE_int32_(
random_seed, random_seed,
skipping to change at line 333 skipping to change at line 348
flagfile, flagfile,
internal::StringFromGTestEnv("flagfile", ""), internal::StringFromGTestEnv("flagfile", ""),
"This flag specifies the flagfile to read command-line flags from."); "This flag specifies the flagfile to read command-line flags from.");
#endif // GTEST_USE_OWN_FLAGFILE_FLAG_ #endif // GTEST_USE_OWN_FLAGFILE_FLAG_
namespace internal { namespace internal {
// Generates a random number from [0, range), using a Linear // Generates a random number from [0, range), using a Linear
// Congruential Generator (LCG). Crashes if 'range' is 0 or greater // Congruential Generator (LCG). Crashes if 'range' is 0 or greater
// than kMaxRange. // than kMaxRange.
UInt32 Random::Generate(UInt32 range) { uint32_t Random::Generate(uint32_t range) {
// These constants are the same as are used in glibc's rand(3). // These constants are the same as are used in glibc's rand(3).
// Use wider types than necessary to prevent unsigned overflow diagnostics. // Use wider types than necessary to prevent unsigned overflow diagnostics.
state_ = static_cast<UInt32>(1103515245ULL*state_ + 12345U) % kMaxRange; state_ = static_cast<uint32_t>(1103515245ULL*state_ + 12345U) % kMaxRange;
GTEST_CHECK_(range > 0) GTEST_CHECK_(range > 0)
<< "Cannot generate a number in the range [0, 0)."; << "Cannot generate a number in the range [0, 0).";
GTEST_CHECK_(range <= kMaxRange) GTEST_CHECK_(range <= kMaxRange)
<< "Generation of a number in [0, " << range << ") was requested, " << "Generation of a number in [0, " << range << ") was requested, "
<< "but this can only generate numbers in [0, " << kMaxRange << ")."; << "but this can only generate numbers in [0, " << kMaxRange << ").";
// Converting via modulus introduces a bit of downward bias, but // Converting via modulus introduces a bit of downward bias, but
// it's simple, and a linear congruential generator isn't too good // it's simple, and a linear congruential generator isn't too good
// to begin with. // to begin with.
skipping to change at line 406 skipping to change at line 421
void AssertHelper::operator=(const Message& message) const { void AssertHelper::operator=(const Message& message) const {
UnitTest::GetInstance()-> UnitTest::GetInstance()->
AddTestPartResult(data_->type, data_->file, data_->line, AddTestPartResult(data_->type, data_->file, data_->line,
AppendUserMessage(data_->message, message), AppendUserMessage(data_->message, message),
UnitTest::GetInstance()->impl() UnitTest::GetInstance()->impl()
->CurrentOsStackTraceExceptTop(1) ->CurrentOsStackTraceExceptTop(1)
// Skips the stack frame for this function itself. // Skips the stack frame for this function itself.
); // NOLINT ); // NOLINT
} }
namespace {
// When TEST_P is found without a matching INSTANTIATE_TEST_SUITE_P
// to creates test cases for it, a syntetic test case is
// inserted to report ether an error or a log message.
//
// This configuration bit will likely be removed at some point.
constexpr bool kErrorOnUninstantiatedParameterizedTest = true;
constexpr bool kErrorOnUninstantiatedTypeParameterizedTest = true;
// A test that fails at a given file/line location with a given message.
class FailureTest : public Test {
public:
explicit FailureTest(const CodeLocation& loc, std::string error_message,
bool as_error)
: loc_(loc),
error_message_(std::move(error_message)),
as_error_(as_error) {}
void TestBody() override {
if (as_error_) {
AssertHelper(TestPartResult::kNonFatalFailure, loc_.file.c_str(),
loc_.line, "") = Message() << error_message_;
} else {
std::cout << error_message_ << std::endl;
}
}
private:
const CodeLocation loc_;
const std::string error_message_;
const bool as_error_;
};
} // namespace
std::set<std::string>* GetIgnoredParameterizedTestSuites() {
return UnitTest::GetInstance()->impl()->ignored_parameterized_test_suites();
}
// Add a given test_suit to the list of them allow to go un-instantiated.
MarkAsIgnored::MarkAsIgnored(const char* test_suite) {
GetIgnoredParameterizedTestSuites()->insert(test_suite);
}
// If this parameterized test suite has no instantiations (and that
// has not been marked as okay), emit a test case reporting that.
void InsertSyntheticTestCase(const std::string& name, CodeLocation location,
bool has_test_p) {
const auto& ignored = *GetIgnoredParameterizedTestSuites();
if (ignored.find(name) != ignored.end()) return;
const char kMissingInstantiation[] = //
" is defined via TEST_P, but never instantiated. None of the test cases "
"will run. Either no INSTANTIATE_TEST_SUITE_P is provided or the only "
"ones provided expand to nothing."
"\n\n"
"Ideally, TEST_P definitions should only ever be included as part of "
"binaries that intend to use them. (As opposed to, for example, being "
"placed in a library that may be linked in to get other utilities.)";
const char kMissingTestCase[] = //
" is instantiated via INSTANTIATE_TEST_SUITE_P, but no tests are "
"defined via TEST_P . No test cases will run."
"\n\n"
"Ideally, INSTANTIATE_TEST_SUITE_P should only ever be invoked from "
"code that always depend on code that provides TEST_P. Failing to do "
"so is often an indication of dead code, e.g. the last TEST_P was "
"removed but the rest got left behind.";
std::string message =
"Parameterized test suite " + name +
(has_test_p ? kMissingInstantiation : kMissingTestCase) +
"\n\n"
"To suppress this error for this test suite, insert the following line "
"(in a non-header) in the namespace it is defined in:"
"\n\n"
"GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(" + name + ");";
std::string full_name = "UninstantiatedParameterizedTestSuite<" + name + ">";
RegisterTest( //
"GoogleTestVerification", full_name.c_str(),
nullptr, // No type parameter.
nullptr, // No value parameter.
location.file.c_str(), location.line, [message, location] {
return new FailureTest(location, message,
kErrorOnUninstantiatedParameterizedTest);
});
}
void RegisterTypeParameterizedTestSuite(const char* test_suite_name,
CodeLocation code_location) {
GetUnitTestImpl()->type_parameterized_test_registry().RegisterTestSuite(
test_suite_name, code_location);
}
void RegisterTypeParameterizedTestSuiteInstantiation(const char* case_name) {
GetUnitTestImpl()
->type_parameterized_test_registry()
.RegisterInstantiation(case_name);
}
void TypeParameterizedTestSuiteRegistry::RegisterTestSuite(
const char* test_suite_name, CodeLocation code_location) {
suites_.emplace(std::string(test_suite_name),
TypeParameterizedTestSuiteInfo(code_location));
}
void TypeParameterizedTestSuiteRegistry::RegisterInstantiation(
const char* test_suite_name) {
auto it = suites_.find(std::string(test_suite_name));
if (it != suites_.end()) {
it->second.instantiated = true;
} else {
GTEST_LOG_(ERROR) << "Unknown type parameterized test suit '"
<< test_suite_name << "'";
}
}
void TypeParameterizedTestSuiteRegistry::CheckForInstantiations() {
const auto& ignored = *GetIgnoredParameterizedTestSuites();
for (const auto& testcase : suites_) {
if (testcase.second.instantiated) continue;
if (ignored.find(testcase.first) != ignored.end()) continue;
std::string message =
"Type parameterized test suite " + testcase.first +
" is defined via REGISTER_TYPED_TEST_SUITE_P, but never instantiated "
"via INSTANTIATE_TYPED_TEST_SUITE_P. None of the test cases will run."
"\n\n"
"Ideally, TYPED_TEST_P definitions should only ever be included as "
"part of binaries that intend to use them. (As opposed to, for "
"example, being placed in a library that may be linked in to get other "
"utilities.)"
"\n\n"
"To suppress this error for this test suite, insert the following line "
"(in a non-header) in the namespace it is defined in:"
"\n\n"
"GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(" +
testcase.first + ");";
std::string full_name =
"UninstantiatedTypeParameterizedTestSuite<" + testcase.first + ">";
RegisterTest( //
"GoogleTestVerification", full_name.c_str(),
nullptr, // No type parameter.
nullptr, // No value parameter.
testcase.second.code_location.file.c_str(),
testcase.second.code_location.line, [message, testcase] {
return new FailureTest(testcase.second.code_location, message,
kErrorOnUninstantiatedTypeParameterizedTest);
});
}
}
// A copy of all command line arguments. Set by InitGoogleTest(). // A copy of all command line arguments. Set by InitGoogleTest().
static ::std::vector<std::string> g_argvs; static ::std::vector<std::string> g_argvs;
::std::vector<std::string> GetArgvs() { ::std::vector<std::string> GetArgvs() {
#if defined(GTEST_CUSTOM_GET_ARGVS_) #if defined(GTEST_CUSTOM_GET_ARGVS_)
// GTEST_CUSTOM_GET_ARGVS_() may return a container of std::string or // GTEST_CUSTOM_GET_ARGVS_() may return a container of std::string or
// ::string. This code converts it to the appropriate type. // ::string. This code converts it to the appropriate type.
const auto& custom = GTEST_CUSTOM_GET_ARGVS_(); const auto& custom = GTEST_CUSTOM_GET_ARGVS_();
return ::std::vector<std::string>(custom.begin(), custom.end()); return ::std::vector<std::string>(custom.begin(), custom.end());
#else // defined(GTEST_CUSTOM_GET_ARGVS_) #else // defined(GTEST_CUSTOM_GET_ARGVS_)
skipping to change at line 478 skipping to change at line 648
if (!output_name.IsDirectory()) if (!output_name.IsDirectory())
return output_name.string(); return output_name.string();
internal::FilePath result(internal::FilePath::GenerateUniqueFileName( internal::FilePath result(internal::FilePath::GenerateUniqueFileName(
output_name, internal::GetCurrentExecutableName(), output_name, internal::GetCurrentExecutableName(),
GetOutputFormat().c_str())); GetOutputFormat().c_str()));
return result.string(); return result.string();
} }
// Returns true if and only if the wildcard pattern matches the string. // Returns true if and only if the wildcard pattern matches the string. Each
// The first ':' or '\0' character in pattern marks the end of it. // pattern consists of regular characters, single-character wildcards (?), and
// // multi-character wildcards (*).
// This recursive algorithm isn't very efficient, but is clear and //
// works well enough for matching test names, which are short. // This function implements a linear-time string globbing algorithm based on
bool UnitTestOptions::PatternMatchesString(const char *pattern, // https://research.swtch.com/glob.
const char *str) { static bool PatternMatchesString(const std::string& name_str,
switch (*pattern) { const char* pattern, const char* pattern_end) {
case '\0': const char* name = name_str.c_str();
case ':': // Either ':' or '\0' marks the end of the pattern. const char* const name_begin = name;
return *str == '\0'; const char* const name_end = name + name_str.size();
case '?': // Matches any single character.
return *str != '\0' && PatternMatchesString(pattern + 1, str + 1); const char* pattern_next = pattern;
case '*': // Matches any string (possibly empty) of characters. const char* name_next = name;
return (*str != '\0' && PatternMatchesString(pattern, str + 1)) ||
PatternMatchesString(pattern + 1, str); while (pattern < pattern_end || name < name_end) {
default: // Non-special character. Matches itself. if (pattern < pattern_end) {
return *pattern == *str && switch (*pattern) {
PatternMatchesString(pattern + 1, str + 1); default: // Match an ordinary character.
if (name < name_end && *name == *pattern) {
++pattern;
++name;
continue;
}
break;
case '?': // Match any single character.
if (name < name_end) {
++pattern;
++name;
continue;
}
break;
case '*':
// Match zero or more characters. Start by skipping over the wildcard
// and matching zero characters from name. If that fails, restart and
// match one more character than the last attempt.
pattern_next = pattern;
name_next = name + 1;
++pattern;
continue;
}
}
// Failed to match a character. Restart if possible.
if (name_begin < name_next && name_next <= name_end) {
pattern = pattern_next;
name = name_next;
continue;
}
return false;
} }
return true;
} }
bool UnitTestOptions::MatchesFilter( bool UnitTestOptions::MatchesFilter(const std::string& name_str,
const std::string& name, const char* filter) { const char* filter) {
const char *cur_pattern = filter; // The filter is a list of patterns separated by colons (:).
for (;;) { const char* pattern = filter;
if (PatternMatchesString(cur_pattern, name.c_str())) { while (true) {
// Find the bounds of this pattern.
const char* const next_sep = strchr(pattern, ':');
const char* const pattern_end =
next_sep != nullptr ? next_sep : pattern + strlen(pattern);
// Check if this pattern matches name_str.
if (PatternMatchesString(name_str, pattern, pattern_end)) {
return true; return true;
} }
// Finds the next pattern in the filter. // Give up on this pattern. However, if we found a pattern separator (:),
cur_pattern = strchr(cur_pattern, ':'); // advance to the next pattern (skipping over the separator) and restart.
if (next_sep == nullptr) {
// Returns if no more pattern can be found.
if (cur_pattern == nullptr) {
return false; return false;
} }
pattern = next_sep + 1;
// Skips the pattern separater (the ':' character).
cur_pattern++;
} }
return true;
} }
// Returns true if and only if the user-specified filter matches the test // Returns true if and only if the user-specified filter matches the test
// suite name and the test name. // suite name and the test name.
bool UnitTestOptions::FilterMatchesTest(const std::string& test_suite_name, bool UnitTestOptions::FilterMatchesTest(const std::string& test_suite_name,
const std::string& test_name) { const std::string& test_name) {
const std::string& full_name = test_suite_name + "." + test_name.c_str(); const std::string& full_name = test_suite_name + "." + test_name.c_str();
// Split --gtest_filter at '-', if there is one, to separate into // Split --gtest_filter at '-', if there is one, to separate into
// positive filter and negative filter portions // positive filter and negative filter portions
skipping to change at line 828 skipping to change at line 1033
// trace but Bar() and CurrentOsStackTraceExceptTop() won't. // trace but Bar() and CurrentOsStackTraceExceptTop() won't.
std::string UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) { std::string UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) {
return os_stack_trace_getter()->CurrentStackTrace( return os_stack_trace_getter()->CurrentStackTrace(
static_cast<int>(GTEST_FLAG(stack_trace_depth)), static_cast<int>(GTEST_FLAG(stack_trace_depth)),
skip_count + 1 skip_count + 1
// Skips the user-specified number of frames plus this function // Skips the user-specified number of frames plus this function
// itself. // itself.
); // NOLINT ); // NOLINT
} }
// Returns the current time in milliseconds. // A helper class for measuring elapsed times.
class Timer {
public:
Timer() : start_(std::chrono::steady_clock::now()) {}
// Return time elapsed in milliseconds since the timer was created.
TimeInMillis Elapsed() {
return std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::steady_clock::now() - start_)
.count();
}
private:
std::chrono::steady_clock::time_point start_;
};
// Returns a timestamp as milliseconds since the epoch. Note this time may jump
// around subject to adjustments by the system, to measure elapsed time use
// Timer instead.
TimeInMillis GetTimeInMillis() { TimeInMillis GetTimeInMillis() {
#if GTEST_OS_WINDOWS_MOBILE || defined(__BORLANDC__) return std::chrono::duration_cast<std::chrono::milliseconds>(
// Difference between 1970-01-01 and 1601-01-01 in milliseconds. std::chrono::system_clock::now() -
// http://analogous.blogspot.com/2005/04/epoch.html std::chrono::system_clock::from_time_t(0))
const TimeInMillis kJavaEpochToWinFileTimeDelta = .count();
static_cast<TimeInMillis>(116444736UL) * 100000UL;
const DWORD kTenthMicrosInMilliSecond = 10000;
SYSTEMTIME now_systime;
FILETIME now_filetime;
ULARGE_INTEGER now_int64;
GetSystemTime(&now_systime);
if (SystemTimeToFileTime(&now_systime, &now_filetime)) {
now_int64.LowPart = now_filetime.dwLowDateTime;
now_int64.HighPart = now_filetime.dwHighDateTime;
now_int64.QuadPart = (now_int64.QuadPart / kTenthMicrosInMilliSecond) -
kJavaEpochToWinFileTimeDelta;
return now_int64.QuadPart;
}
return 0;
#elif GTEST_OS_WINDOWS && !GTEST_HAS_GETTIMEOFDAY_
__timeb64 now;
// MSVC 8 deprecates _ftime64(), so we want to suppress warning 4996
// (deprecated function) there.
GTEST_DISABLE_MSC_DEPRECATED_PUSH_()
_ftime64(&now);
GTEST_DISABLE_MSC_DEPRECATED_POP_()
return static_cast<TimeInMillis>(now.time) * 1000 + now.millitm;
#elif GTEST_HAS_GETTIMEOFDAY_
struct timeval now;
gettimeofday(&now, nullptr);
return static_cast<TimeInMillis>(now.tv_sec) * 1000 + now.tv_usec / 1000;
#else
# error "Don't know how to get the current time on your system."
#endif
} }
// Utilities // Utilities
// class String. // class String.
#if GTEST_OS_WINDOWS_MOBILE #if GTEST_OS_WINDOWS_MOBILE
// Creates a UTF-16 wide string from the given ANSI string, allocating // Creates a UTF-16 wide string from the given ANSI string, allocating
// memory using new. The caller is responsible for deleting the return // memory using new. The caller is responsible for deleting the return
// value using delete[]. Returns the wide string, or NULL if the // value using delete[]. Returns the wide string, or NULL if the
skipping to change at line 1380 skipping to change at line 1571
// Helper function for implementing ASSERT_NEAR. // Helper function for implementing ASSERT_NEAR.
AssertionResult DoubleNearPredFormat(const char* expr1, AssertionResult DoubleNearPredFormat(const char* expr1,
const char* expr2, const char* expr2,
const char* abs_error_expr, const char* abs_error_expr,
double val1, double val1,
double val2, double val2,
double abs_error) { double abs_error) {
const double diff = fabs(val1 - val2); const double diff = fabs(val1 - val2);
if (diff <= abs_error) return AssertionSuccess(); if (diff <= abs_error) return AssertionSuccess();
// Find the value which is closest to zero.
const double min_abs = std::min(fabs(val1), fabs(val2));
// Find the distance to the next double from that value.
const double epsilon =
nextafter(min_abs, std::numeric_limits<double>::infinity()) - min_abs;
// Detect the case where abs_error is so small that EXPECT_NEAR is
// effectively the same as EXPECT_EQUAL, and give an informative error
// message so that the situation can be more easily understood without
// requiring exotic floating-point knowledge.
// Don't do an epsilon check if abs_error is zero because that implies
// that an equality check was actually intended.
if (!(std::isnan)(val1) && !(std::isnan)(val2) && abs_error > 0 &&
abs_error < epsilon) {
return AssertionFailure()
<< "The difference between " << expr1 << " and " << expr2 << " is "
<< diff << ", where\n"
<< expr1 << " evaluates to " << val1 << ",\n"
<< expr2 << " evaluates to " << val2 << ".\nThe abs_error parameter "
<< abs_error_expr << " evaluates to " << abs_error
<< " which is smaller than the minimum distance between doubles for "
"numbers of this magnitude which is "
<< epsilon
<< ", thus making this EXPECT_NEAR check equivalent to "
"EXPECT_EQUAL. Consider using EXPECT_DOUBLE_EQ instead.";
}
return AssertionFailure() return AssertionFailure()
<< "The difference between " << expr1 << " and " << expr2 << "The difference between " << expr1 << " and " << expr2
<< " is " << diff << ", which exceeds " << abs_error_expr << ", where\n" << " is " << diff << ", which exceeds " << abs_error_expr << ", where\n"
<< expr1 << " evaluates to " << val1 << ",\n" << expr1 << " evaluates to " << val1 << ",\n"
<< expr2 << " evaluates to " << val2 << ", and\n" << expr2 << " evaluates to " << val2 << ", and\n"
<< abs_error_expr << " evaluates to " << abs_error << "."; << abs_error_expr << " evaluates to " << abs_error << ".";
} }
// Helper template for implementing FloatLE() and DoubleLE(). // Helper template for implementing FloatLE() and DoubleLE().
template <typename RawType> template <typename RawType>
skipping to change at line 1441 skipping to change at line 1657
// Asserts that val1 is less than, or almost equal to, val2. Fails // Asserts that val1 is less than, or almost equal to, val2. Fails
// otherwise. In particular, it fails if either val1 or val2 is NaN. // otherwise. In particular, it fails if either val1 or val2 is NaN.
AssertionResult DoubleLE(const char* expr1, const char* expr2, AssertionResult DoubleLE(const char* expr1, const char* expr2,
double val1, double val2) { double val1, double val2) {
return internal::FloatingPointLE<double>(expr1, expr2, val1, val2); return internal::FloatingPointLE<double>(expr1, expr2, val1, val2);
} }
namespace internal { namespace internal {
// The helper function for {ASSERT|EXPECT}_EQ with int or enum
// arguments.
AssertionResult CmpHelperEQ(const char* lhs_expression,
const char* rhs_expression,
BiggestInt lhs,
BiggestInt rhs) {
if (lhs == rhs) {
return AssertionSuccess();
}
return EqFailure(lhs_expression,
rhs_expression,
FormatForComparisonFailureMessage(lhs, rhs),
FormatForComparisonFailureMessage(rhs, lhs),
false);
}
// A macro for implementing the helper functions needed to implement
// ASSERT_?? and EXPECT_?? with integer or enum arguments. It is here
// just to avoid copy-and-paste of similar code.
#define GTEST_IMPL_CMP_HELPER_(op_name, op)\
AssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \
BiggestInt val1, BiggestInt val2) {\
if (val1 op val2) {\
return AssertionSuccess();\
} else {\
return AssertionFailure() \
<< "Expected: (" << expr1 << ") " #op " (" << expr2\
<< "), actual: " << FormatForComparisonFailureMessage(val1, val2)\
<< " vs " << FormatForComparisonFailureMessage(val2, val1);\
}\
}
// Implements the helper function for {ASSERT|EXPECT}_NE with int or
// enum arguments.
GTEST_IMPL_CMP_HELPER_(NE, !=)
// Implements the helper function for {ASSERT|EXPECT}_LE with int or
// enum arguments.
GTEST_IMPL_CMP_HELPER_(LE, <=)
// Implements the helper function for {ASSERT|EXPECT}_LT with int or
// enum arguments.
GTEST_IMPL_CMP_HELPER_(LT, < )
// Implements the helper function for {ASSERT|EXPECT}_GE with int or
// enum arguments.
GTEST_IMPL_CMP_HELPER_(GE, >=)
// Implements the helper function for {ASSERT|EXPECT}_GT with int or
// enum arguments.
GTEST_IMPL_CMP_HELPER_(GT, > )
#undef GTEST_IMPL_CMP_HELPER_
// The helper function for {ASSERT|EXPECT}_STREQ. // The helper function for {ASSERT|EXPECT}_STREQ.
AssertionResult CmpHelperSTREQ(const char* lhs_expression, AssertionResult CmpHelperSTREQ(const char* lhs_expression,
const char* rhs_expression, const char* rhs_expression,
const char* lhs, const char* lhs,
const char* rhs) { const char* rhs) {
if (String::CStringEquals(lhs, rhs)) { if (String::CStringEquals(lhs, rhs)) {
return AssertionSuccess(); return AssertionSuccess();
} }
return EqFailure(lhs_expression, return EqFailure(lhs_expression,
skipping to change at line 1737 skipping to change at line 1902
// A Unicode code-point can have up to 21 bits, and is encoded in UTF-8 // A Unicode code-point can have up to 21 bits, and is encoded in UTF-8
// like this: // like this:
// //
// Code-point length Encoding // Code-point length Encoding
// 0 - 7 bits 0xxxxxxx // 0 - 7 bits 0xxxxxxx
// 8 - 11 bits 110xxxxx 10xxxxxx // 8 - 11 bits 110xxxxx 10xxxxxx
// 12 - 16 bits 1110xxxx 10xxxxxx 10xxxxxx // 12 - 16 bits 1110xxxx 10xxxxxx 10xxxxxx
// 17 - 21 bits 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx // 17 - 21 bits 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
// The maximum code-point a one-byte UTF-8 sequence can represent. // The maximum code-point a one-byte UTF-8 sequence can represent.
const UInt32 kMaxCodePoint1 = (static_cast<UInt32>(1) << 7) - 1; constexpr uint32_t kMaxCodePoint1 = (static_cast<uint32_t>(1) << 7) - 1;
// The maximum code-point a two-byte UTF-8 sequence can represent. // The maximum code-point a two-byte UTF-8 sequence can represent.
const UInt32 kMaxCodePoint2 = (static_cast<UInt32>(1) << (5 + 6)) - 1; constexpr uint32_t kMaxCodePoint2 = (static_cast<uint32_t>(1) << (5 + 6)) - 1;
// The maximum code-point a three-byte UTF-8 sequence can represent. // The maximum code-point a three-byte UTF-8 sequence can represent.
const UInt32 kMaxCodePoint3 = (static_cast<UInt32>(1) << (4 + 2*6)) - 1; constexpr uint32_t kMaxCodePoint3 = (static_cast<uint32_t>(1) << (4 + 2*6)) - 1;
// The maximum code-point a four-byte UTF-8 sequence can represent. // The maximum code-point a four-byte UTF-8 sequence can represent.
const UInt32 kMaxCodePoint4 = (static_cast<UInt32>(1) << (3 + 3*6)) - 1; constexpr uint32_t kMaxCodePoint4 = (static_cast<uint32_t>(1) << (3 + 3*6)) - 1;
// Chops off the n lowest bits from a bit pattern. Returns the n // Chops off the n lowest bits from a bit pattern. Returns the n
// lowest bits. As a side effect, the original bit pattern will be // lowest bits. As a side effect, the original bit pattern will be
// shifted to the right by n bits. // shifted to the right by n bits.
inline UInt32 ChopLowBits(UInt32* bits, int n) { inline uint32_t ChopLowBits(uint32_t* bits, int n) {
const UInt32 low_bits = *bits & ((static_cast<UInt32>(1) << n) - 1); const uint32_t low_bits = *bits & ((static_cast<uint32_t>(1) << n) - 1);
*bits >>= n; *bits >>= n;
return low_bits; return low_bits;
} }
// Converts a Unicode code point to a narrow string in UTF-8 encoding. // Converts a Unicode code point to a narrow string in UTF-8 encoding.
// code_point parameter is of type UInt32 because wchar_t may not be // code_point parameter is of type uint32_t because wchar_t may not be
// wide enough to contain a code point. // wide enough to contain a code point.
// If the code_point is not a valid Unicode code point // If the code_point is not a valid Unicode code point
// (i.e. outside of Unicode range U+0 to U+10FFFF) it will be converted // (i.e. outside of Unicode range U+0 to U+10FFFF) it will be converted
// to "(Invalid Unicode 0xXXXXXXXX)". // to "(Invalid Unicode 0xXXXXXXXX)".
std::string CodePointToUtf8(UInt32 code_point) { std::string CodePointToUtf8(uint32_t code_point) {
if (code_point > kMaxCodePoint4) { if (code_point > kMaxCodePoint4) {
return "(Invalid Unicode 0x" + String::FormatHexUInt32(code_point) + ")"; return "(Invalid Unicode 0x" + String::FormatHexUInt32(code_point) + ")";
} }
char str[5]; // Big enough for the largest valid code point. char str[5]; // Big enough for the largest valid code point.
if (code_point <= kMaxCodePoint1) { if (code_point <= kMaxCodePoint1) {
str[1] = '\0'; str[1] = '\0';
str[0] = static_cast<char>(code_point); // 0xxxxxxx str[0] = static_cast<char>(code_point); // 0xxxxxxx
} else if (code_point <= kMaxCodePoint2) { } else if (code_point <= kMaxCodePoint2) {
str[2] = '\0'; str[2] = '\0';
skipping to change at line 1804 skipping to change at line 1969
// Determines if the arguments constitute UTF-16 surrogate pair // Determines if the arguments constitute UTF-16 surrogate pair
// and thus should be combined into a single Unicode code point // and thus should be combined into a single Unicode code point
// using CreateCodePointFromUtf16SurrogatePair. // using CreateCodePointFromUtf16SurrogatePair.
inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) { inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) {
return sizeof(wchar_t) == 2 && return sizeof(wchar_t) == 2 &&
(first & 0xFC00) == 0xD800 && (second & 0xFC00) == 0xDC00; (first & 0xFC00) == 0xD800 && (second & 0xFC00) == 0xDC00;
} }
// Creates a Unicode code point from UTF16 surrogate pair. // Creates a Unicode code point from UTF16 surrogate pair.
inline UInt32 CreateCodePointFromUtf16SurrogatePair(wchar_t first, inline uint32_t CreateCodePointFromUtf16SurrogatePair(wchar_t first,
wchar_t second) { wchar_t second) {
const auto first_u = static_cast<UInt32>(first); const auto first_u = static_cast<uint32_t>(first);
const auto second_u = static_cast<UInt32>(second); const auto second_u = static_cast<uint32_t>(second);
const UInt32 mask = (1 << 10) - 1; const uint32_t mask = (1 << 10) - 1;
return (sizeof(wchar_t) == 2) return (sizeof(wchar_t) == 2)
? (((first_u & mask) << 10) | (second_u & mask)) + 0x10000 ? (((first_u & mask) << 10) | (second_u & mask)) + 0x10000
: :
// This function should not be called when the condition is // This function should not be called when the condition is
// false, but we provide a sensible default in case it is. // false, but we provide a sensible default in case it is.
first_u; first_u;
} }
// Converts a wide string to a narrow string in UTF-8 encoding. // Converts a wide string to a narrow string in UTF-8 encoding.
// The wide string is assumed to have the following encoding: // The wide string is assumed to have the following encoding:
skipping to change at line 1836 skipping to change at line 2001
// (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output // (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output
// as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding // as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding
// and contains invalid UTF-16 surrogate pairs, values in those pairs // and contains invalid UTF-16 surrogate pairs, values in those pairs
// will be encoded as individual Unicode characters from Basic Normal Plane. // will be encoded as individual Unicode characters from Basic Normal Plane.
std::string WideStringToUtf8(const wchar_t* str, int num_chars) { std::string WideStringToUtf8(const wchar_t* str, int num_chars) {
if (num_chars == -1) if (num_chars == -1)
num_chars = static_cast<int>(wcslen(str)); num_chars = static_cast<int>(wcslen(str));
::std::stringstream stream; ::std::stringstream stream;
for (int i = 0; i < num_chars; ++i) { for (int i = 0; i < num_chars; ++i) {
UInt32 unicode_code_point; uint32_t unicode_code_point;
if (str[i] == L'\0') { if (str[i] == L'\0') {
break; break;
} else if (i + 1 < num_chars && IsUtf16SurrogatePair(str[i], str[i + 1])) { } else if (i + 1 < num_chars && IsUtf16SurrogatePair(str[i], str[i + 1])) {
unicode_code_point = CreateCodePointFromUtf16SurrogatePair(str[i], unicode_code_point = CreateCodePointFromUtf16SurrogatePair(str[i],
str[i + 1]); str[i + 1]);
i++; i++;
} else { } else {
unicode_code_point = static_cast<UInt32>(str[i]); unicode_code_point = static_cast<uint32_t>(str[i]);
} }
stream << CodePointToUtf8(unicode_code_point); stream << CodePointToUtf8(unicode_code_point);
} }
return StringStreamToString(&stream); return StringStreamToString(&stream);
} }
// Converts a wide C string to an std::string using the UTF-8 encoding. // Converts a wide C string to an std::string using the UTF-8 encoding.
// NULL will be converted to "(null)". // NULL will be converted to "(null)".
std::string String::ShowWideCString(const wchar_t * wide_c_str) { std::string String::ShowWideCString(const wchar_t * wide_c_str) {
skipping to change at line 1965 skipping to change at line 2130
const std::string& str, const std::string& suffix) { const std::string& str, const std::string& suffix) {
const size_t str_len = str.length(); const size_t str_len = str.length();
const size_t suffix_len = suffix.length(); const size_t suffix_len = suffix.length();
return (str_len >= suffix_len) && return (str_len >= suffix_len) &&
CaseInsensitiveCStringEquals(str.c_str() + str_len - suffix_len, CaseInsensitiveCStringEquals(str.c_str() + str_len - suffix_len,
suffix.c_str()); suffix.c_str());
} }
// Formats an int value as "%02d". // Formats an int value as "%02d".
std::string String::FormatIntWidth2(int value) { std::string String::FormatIntWidth2(int value) {
return FormatIntWidthN(value, 2);
}
// Formats an int value to given width with leading zeros.
std::string String::FormatIntWidthN(int value, int width) {
std::stringstream ss; std::stringstream ss;
ss << std::setfill('0') << std::setw(2) << value; ss << std::setfill('0') << std::setw(width) << value;
return ss.str(); return ss.str();
} }
// Formats an int value as "%X". // Formats an int value as "%X".
std::string String::FormatHexUInt32(UInt32 value) { std::string String::FormatHexUInt32(uint32_t value) {
std::stringstream ss; std::stringstream ss;
ss << std::hex << std::uppercase << value; ss << std::hex << std::uppercase << value;
return ss.str(); return ss.str();
} }
// Formats an int value as "%X". // Formats an int value as "%X".
std::string String::FormatHexInt(int value) { std::string String::FormatHexInt(int value) {
return FormatHexUInt32(static_cast<UInt32>(value)); return FormatHexUInt32(static_cast<uint32_t>(value));
} }
// Formats a byte as "%02X". // Formats a byte as "%02X".
std::string String::FormatByte(unsigned char value) { std::string String::FormatByte(unsigned char value) {
std::stringstream ss; std::stringstream ss;
ss << std::setfill('0') << std::setw(2) << std::hex << std::uppercase ss << std::setfill('0') << std::setw(2) << std::hex << std::uppercase
<< static_cast<unsigned int>(value); << static_cast<unsigned int>(value);
return ss.str(); return ss.str();
} }
skipping to change at line 2018 skipping to change at line 2188
} }
// Appends the user-supplied message to the Google-Test-generated message. // Appends the user-supplied message to the Google-Test-generated message.
std::string AppendUserMessage(const std::string& gtest_msg, std::string AppendUserMessage(const std::string& gtest_msg,
const Message& user_msg) { const Message& user_msg) {
// Appends the user message if it's non-empty. // Appends the user message if it's non-empty.
const std::string user_msg_string = user_msg.GetString(); const std::string user_msg_string = user_msg.GetString();
if (user_msg_string.empty()) { if (user_msg_string.empty()) {
return gtest_msg; return gtest_msg;
} }
if (gtest_msg.empty()) {
return user_msg_string;
}
return gtest_msg + "\n" + user_msg_string; return gtest_msg + "\n" + user_msg_string;
} }
} // namespace internal } // namespace internal
// class TestResult // class TestResult
// Creates an empty TestResult. // Creates an empty TestResult.
TestResult::TestResult() TestResult::TestResult()
: death_test_count_(0), start_timestamp_(0), elapsed_time_(0) {} : death_test_count_(0), start_timestamp_(0), elapsed_time_(0) {}
skipping to change at line 2070 skipping to change at line 2242
} }
// Adds a test property to the list. If a property with the same key as the // Adds a test property to the list. If a property with the same key as the
// supplied property is already represented, the value of this test_property // supplied property is already represented, the value of this test_property
// replaces the old value for that key. // replaces the old value for that key.
void TestResult::RecordProperty(const std::string& xml_element, void TestResult::RecordProperty(const std::string& xml_element,
const TestProperty& test_property) { const TestProperty& test_property) {
if (!ValidateTestProperty(xml_element, test_property)) { if (!ValidateTestProperty(xml_element, test_property)) {
return; return;
} }
internal::MutexLock lock(&test_properites_mutex_); internal::MutexLock lock(&test_properties_mutex_);
const std::vector<TestProperty>::iterator property_with_matching_key = const std::vector<TestProperty>::iterator property_with_matching_key =
std::find_if(test_properties_.begin(), test_properties_.end(), std::find_if(test_properties_.begin(), test_properties_.end(),
internal::TestPropertyKeyIs(test_property.key())); internal::TestPropertyKeyIs(test_property.key()));
if (property_with_matching_key == test_properties_.end()) { if (property_with_matching_key == test_properties_.end()) {
test_properties_.push_back(test_property); test_properties_.push_back(test_property);
return; return;
} }
property_with_matching_key->SetValue(test_property.value()); property_with_matching_key->SetValue(test_property.value());
} }
skipping to change at line 2097 skipping to change at line 2269
"name", "name",
"random_seed", "random_seed",
"tests", "tests",
"time", "time",
"timestamp" "timestamp"
}; };
// The list of reserved attributes used in the <testsuite> element of XML // The list of reserved attributes used in the <testsuite> element of XML
// output. // output.
static const char* const kReservedTestSuiteAttributes[] = { static const char* const kReservedTestSuiteAttributes[] = {
"disabled", "errors", "failures", "name", "tests", "time", "timestamp"}; "disabled", "errors", "failures", "name",
"tests", "time", "timestamp", "skipped"};
// The list of reserved attributes used in the <testcase> element of XML output. // The list of reserved attributes used in the <testcase> element of XML output.
static const char* const kReservedTestCaseAttributes[] = { static const char* const kReservedTestCaseAttributes[] = {
"classname", "name", "status", "time", "type_param", "classname", "name", "status", "time", "type_param",
"value_param", "file", "line"}; "value_param", "file", "line"};
// Use a slightly different set for allowed output to ensure existing tests can // Use a slightly different set for allowed output to ensure existing tests can
// still RecordProperty("result") or "RecordProperty(timestamp") // still RecordProperty("result") or "RecordProperty(timestamp")
static const char* const kReservedOutputTestCaseAttributes[] = { static const char* const kReservedOutputTestCaseAttributes[] = {
"classname", "name", "status", "time", "type_param", "classname", "name", "status", "time", "type_param",
"value_param", "file", "line", "result", "timestamp"}; "value_param", "file", "line", "result", "timestamp"};
template <int kSize> template <size_t kSize>
std::vector<std::string> ArrayAsVector(const char* const (&array)[kSize]) { std::vector<std::string> ArrayAsVector(const char* const (&array)[kSize]) {
return std::vector<std::string>(array, array + kSize); return std::vector<std::string>(array, array + kSize);
} }
static std::vector<std::string> GetReservedAttributesForElement( static std::vector<std::string> GetReservedAttributesForElement(
const std::string& xml_element) { const std::string& xml_element) {
if (xml_element == "testsuites") { if (xml_element == "testsuites") {
return ArrayAsVector(kReservedTestSuitesAttributes); return ArrayAsVector(kReservedTestSuitesAttributes);
} else if (xml_element == "testsuite") { } else if (xml_element == "testsuite") {
return ArrayAsVector(kReservedTestSuiteAttributes); return ArrayAsVector(kReservedTestSuiteAttributes);
skipping to change at line 2554 skipping to change at line 2727
internal::TestFactoryBase* factory) internal::TestFactoryBase* factory)
: test_suite_name_(a_test_suite_name), : test_suite_name_(a_test_suite_name),
name_(a_name), name_(a_name),
type_param_(a_type_param ? new std::string(a_type_param) : nullptr), type_param_(a_type_param ? new std::string(a_type_param) : nullptr),
value_param_(a_value_param ? new std::string(a_value_param) : nullptr), value_param_(a_value_param ? new std::string(a_value_param) : nullptr),
location_(a_code_location), location_(a_code_location),
fixture_class_id_(fixture_class_id), fixture_class_id_(fixture_class_id),
should_run_(false), should_run_(false),
is_disabled_(false), is_disabled_(false),
matches_filter_(false), matches_filter_(false),
is_in_another_shard_(false),
factory_(factory), factory_(factory),
result_() {} result_() {}
// Destructs a TestInfo object. // Destructs a TestInfo object.
TestInfo::~TestInfo() { delete factory_; } TestInfo::~TestInfo() { delete factory_; }
namespace internal { namespace internal {
// Creates a new TestInfo object and registers it with Google Test; // Creates a new TestInfo object and registers it with Google Test;
// returns the created object. // returns the created object.
// //
// Arguments: // Arguments:
// //
// test_suite_name: name of the test suite // test_suite_name: name of the test suite
// name: name of the test // name: name of the test
// type_param: the name of the test's type parameter, or NULL if // type_param: the name of the test's type parameter, or NULL if
// this is not a typed or a type-parameterized test. // this is not a typed or a type-parameterized test.
// value_param: text representation of the test's value parameter, // value_param: text representation of the test's value parameter,
// or NULL if this is not a value-parameterized test. // or NULL if this is not a value-parameterized test.
// code_location: code location where the test is defined // code_location: code location where the test is defined
// fixture_class_id: ID of the test fixture class // fixture_class_id: ID of the test fixture class
// set_up_tc: pointer to the function that sets up the test suite // set_up_tc: pointer to the function that sets up the test suite
// tear_down_tc: pointer to the function that tears down the test suite // tear_down_tc: pointer to the function that tears down the test suite
// factory: pointer to the factory that creates a test object. // factory: pointer to the factory that creates a test object.
skipping to change at line 2648 skipping to change at line 2822
} // namespace } // namespace
namespace internal { namespace internal {
// This method expands all parameterized tests registered with macros TEST_P // This method expands all parameterized tests registered with macros TEST_P
// and INSTANTIATE_TEST_SUITE_P into regular tests and registers those. // and INSTANTIATE_TEST_SUITE_P into regular tests and registers those.
// This will be done just once during the program runtime. // This will be done just once during the program runtime.
void UnitTestImpl::RegisterParameterizedTests() { void UnitTestImpl::RegisterParameterizedTests() {
if (!parameterized_tests_registered_) { if (!parameterized_tests_registered_) {
parameterized_test_registry_.RegisterTests(); parameterized_test_registry_.RegisterTests();
type_parameterized_test_registry_.CheckForInstantiations();
parameterized_tests_registered_ = true; parameterized_tests_registered_ = true;
} }
} }
} // namespace internal } // namespace internal
// Creates the test object, runs it, records its result, and then // Creates the test object, runs it, records its result, and then
// deletes it. // deletes it.
void TestInfo::Run() { void TestInfo::Run() {
if (!should_run_) return; if (!should_run_) return;
// Tells UnitTest where to store test result. // Tells UnitTest where to store test result.
internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
impl->set_current_test_info(this); impl->set_current_test_info(this);
TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
// Notifies the unit test event listeners that a test is about to start. // Notifies the unit test event listeners that a test is about to start.
repeater->OnTestStart(*this); repeater->OnTestStart(*this);
const TimeInMillis start = internal::GetTimeInMillis(); result_.set_start_timestamp(internal::GetTimeInMillis());
internal::Timer timer;
impl->os_stack_trace_getter()->UponLeavingGTest(); impl->os_stack_trace_getter()->UponLeavingGTest();
// Creates the test object. // Creates the test object.
Test* const test = internal::HandleExceptionsInMethodIfSupported( Test* const test = internal::HandleExceptionsInMethodIfSupported(
factory_, &internal::TestFactoryBase::CreateTest, factory_, &internal::TestFactoryBase::CreateTest,
"the test fixture's constructor"); "the test fixture's constructor");
// Runs the test if the constructor didn't generate a fatal failure or invoke // Runs the test if the constructor didn't generate a fatal failure or invoke
// GTEST_SKIP(). // GTEST_SKIP().
skipping to change at line 2693 skipping to change at line 2869
test->Run(); test->Run();
} }
if (test != nullptr) { if (test != nullptr) {
// Deletes the test object. // Deletes the test object.
impl->os_stack_trace_getter()->UponLeavingGTest(); impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported( internal::HandleExceptionsInMethodIfSupported(
test, &Test::DeleteSelf_, "the test fixture's destructor"); test, &Test::DeleteSelf_, "the test fixture's destructor");
} }
result_.set_start_timestamp(start); result_.set_elapsed_time(timer.Elapsed());
result_.set_elapsed_time(internal::GetTimeInMillis() - start);
// Notifies the unit test event listener that a test has just finished. // Notifies the unit test event listener that a test has just finished.
repeater->OnTestEnd(*this); repeater->OnTestEnd(*this);
// Tells UnitTest to stop associating assertion results to this // Tells UnitTest to stop associating assertion results to this
// test. // test.
impl->set_current_test_info(nullptr); impl->set_current_test_info(nullptr);
} }
// Skip and records a skipped test result for this object.
void TestInfo::Skip() {
if (!should_run_) return;
internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
impl->set_current_test_info(this);
TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
// Notifies the unit test event listeners that a test is about to start.
repeater->OnTestStart(*this);
const TestPartResult test_part_result =
TestPartResult(TestPartResult::kSkip, this->file(), this->line(), "");
impl->GetTestPartResultReporterForCurrentThread()->ReportTestPartResult(
test_part_result);
// Notifies the unit test event listener that a test has just finished.
repeater->OnTestEnd(*this);
impl->set_current_test_info(nullptr);
}
// class TestSuite // class TestSuite
// Gets the number of successful tests in this test suite. // Gets the number of successful tests in this test suite.
int TestSuite::successful_test_count() const { int TestSuite::successful_test_count() const {
return CountIf(test_info_list_, TestPassed); return CountIf(test_info_list_, TestPassed);
} }
// Gets the number of successful tests in this test suite. // Gets the number of successful tests in this test suite.
int TestSuite::skipped_test_count() const { int TestSuite::skipped_test_count() const {
return CountIf(test_info_list_, TestSkipped); return CountIf(test_info_list_, TestSkipped);
skipping to change at line 2750 skipping to change at line 2947
// Gets the number of all tests. // Gets the number of all tests.
int TestSuite::total_test_count() const { int TestSuite::total_test_count() const {
return static_cast<int>(test_info_list_.size()); return static_cast<int>(test_info_list_.size());
} }
// Creates a TestSuite with the given name. // Creates a TestSuite with the given name.
// //
// Arguments: // Arguments:
// //
// name: name of the test suite // a_name: name of the test suite
// a_type_param: the name of the test suite's type parameter, or NULL if // a_type_param: the name of the test suite's type parameter, or NULL if
// this is not a typed or a type-parameterized test suite. // this is not a typed or a type-parameterized test suite.
// set_up_tc: pointer to the function that sets up the test suite // set_up_tc: pointer to the function that sets up the test suite
// tear_down_tc: pointer to the function that tears down the test suite // tear_down_tc: pointer to the function that tears down the test suite
TestSuite::TestSuite(const char* a_name, const char* a_type_param, TestSuite::TestSuite(const char* a_name, const char* a_type_param,
internal::SetUpTestSuiteFunc set_up_tc, internal::SetUpTestSuiteFunc set_up_tc,
internal::TearDownTestSuiteFunc tear_down_tc) internal::TearDownTestSuiteFunc tear_down_tc)
: name_(a_name), : name_(a_name),
type_param_(a_type_param ? new std::string(a_type_param) : nullptr), type_param_(a_type_param ? new std::string(a_type_param) : nullptr),
set_up_tc_(set_up_tc), set_up_tc_(set_up_tc),
skipping to change at line 2805 skipping to change at line 3002
if (!should_run_) return; if (!should_run_) return;
internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
impl->set_current_test_suite(this); impl->set_current_test_suite(this);
TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
// Call both legacy and the new API // Call both legacy and the new API
repeater->OnTestSuiteStart(*this); repeater->OnTestSuiteStart(*this);
// Legacy API is deprecated but still available // Legacy API is deprecated but still available
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
repeater->OnTestCaseStart(*this); repeater->OnTestCaseStart(*this);
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
impl->os_stack_trace_getter()->UponLeavingGTest(); impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported( internal::HandleExceptionsInMethodIfSupported(
this, &TestSuite::RunSetUpTestSuite, "SetUpTestSuite()"); this, &TestSuite::RunSetUpTestSuite, "SetUpTestSuite()");
start_timestamp_ = internal::GetTimeInMillis(); start_timestamp_ = internal::GetTimeInMillis();
internal::Timer timer;
for (int i = 0; i < total_test_count(); i++) { for (int i = 0; i < total_test_count(); i++) {
GetMutableTestInfo(i)->Run(); GetMutableTestInfo(i)->Run();
if (GTEST_FLAG(fail_fast) && GetMutableTestInfo(i)->result()->Failed()) {
for (int j = i + 1; j < total_test_count(); j++) {
GetMutableTestInfo(j)->Skip();
}
break;
}
} }
elapsed_time_ = internal::GetTimeInMillis() - start_timestamp_; elapsed_time_ = timer.Elapsed();
impl->os_stack_trace_getter()->UponLeavingGTest(); impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported( internal::HandleExceptionsInMethodIfSupported(
this, &TestSuite::RunTearDownTestSuite, "TearDownTestSuite()"); this, &TestSuite::RunTearDownTestSuite, "TearDownTestSuite()");
// Call both legacy and the new API // Call both legacy and the new API
repeater->OnTestSuiteEnd(*this); repeater->OnTestSuiteEnd(*this);
// Legacy API is deprecated but still available // Legacy API is deprecated but still available
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
repeater->OnTestCaseEnd(*this);
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
impl->set_current_test_suite(nullptr);
}
// Skips all tests under this TestSuite.
void TestSuite::Skip() {
if (!should_run_) return;
internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
impl->set_current_test_suite(this);
TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
// Call both legacy and the new API
repeater->OnTestSuiteStart(*this);
// Legacy API is deprecated but still available
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
repeater->OnTestCaseStart(*this);
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
for (int i = 0; i < total_test_count(); i++) {
GetMutableTestInfo(i)->Skip();
}
// Call both legacy and the new API
repeater->OnTestSuiteEnd(*this);
// Legacy API is deprecated but still available
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
repeater->OnTestCaseEnd(*this); repeater->OnTestCaseEnd(*this);
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
impl->set_current_test_suite(nullptr); impl->set_current_test_suite(nullptr);
} }
// Clears the results of all tests in this test suite. // Clears the results of all tests in this test suite.
void TestSuite::ClearResult() { void TestSuite::ClearResult() {
ad_hoc_test_result_.Clear(); ad_hoc_test_result_.Clear();
ForEach(test_info_list_, TestInfo::ClearTestResult); ForEach(test_info_list_, TestInfo::ClearTestResult);
} }
skipping to change at line 2880 skipping to change at line 3114
return FormatCountableNoun(test_suite_count, "test suite", "test suites"); return FormatCountableNoun(test_suite_count, "test suite", "test suites");
} }
// Converts a TestPartResult::Type enum to human-friendly string // Converts a TestPartResult::Type enum to human-friendly string
// representation. Both kNonFatalFailure and kFatalFailure are translated // representation. Both kNonFatalFailure and kFatalFailure are translated
// to "Failure", as the user usually doesn't care about the difference // to "Failure", as the user usually doesn't care about the difference
// between the two when viewing the test result. // between the two when viewing the test result.
static const char * TestPartResultTypeToString(TestPartResult::Type type) { static const char * TestPartResultTypeToString(TestPartResult::Type type) {
switch (type) { switch (type) {
case TestPartResult::kSkip: case TestPartResult::kSkip:
return "Skipped"; return "Skipped\n";
case TestPartResult::kSuccess: case TestPartResult::kSuccess:
return "Success"; return "Success";
case TestPartResult::kNonFatalFailure: case TestPartResult::kNonFatalFailure:
case TestPartResult::kFatalFailure: case TestPartResult::kFatalFailure:
#ifdef _MSC_VER #ifdef _MSC_VER
return "error: "; return "error: ";
#else #else
return "Failure\n"; return "Failure\n";
#endif #endif
default: default:
return "Unknown result type"; return "Unknown result type";
} }
} }
namespace internal { namespace internal {
namespace {
enum class GTestColor { kDefault, kRed, kGreen, kYellow };
} // namespace
// Prints a TestPartResult to an std::string. // Prints a TestPartResult to an std::string.
static std::string PrintTestPartResultToString( static std::string PrintTestPartResultToString(
const TestPartResult& test_part_result) { const TestPartResult& test_part_result) {
return (Message() return (Message()
<< internal::FormatFileLocation(test_part_result.file_name(), << internal::FormatFileLocation(test_part_result.file_name(),
test_part_result.line_number()) test_part_result.line_number())
<< " " << TestPartResultTypeToString(test_part_result.type()) << " " << TestPartResultTypeToString(test_part_result.type())
<< test_part_result.message()).GetString(); << test_part_result.message()).GetString();
} }
skipping to change at line 2934 skipping to change at line 3171
#endif #endif
} }
// class PrettyUnitTestResultPrinter // class PrettyUnitTestResultPrinter
#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \ #if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \
!GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW
// Returns the character attribute for the given color. // Returns the character attribute for the given color.
static WORD GetColorAttribute(GTestColor color) { static WORD GetColorAttribute(GTestColor color) {
switch (color) { switch (color) {
case COLOR_RED: return FOREGROUND_RED; case GTestColor::kRed:
case COLOR_GREEN: return FOREGROUND_GREEN; return FOREGROUND_RED;
case COLOR_YELLOW: return FOREGROUND_RED | FOREGROUND_GREEN; case GTestColor::kGreen:
return FOREGROUND_GREEN;
case GTestColor::kYellow:
return FOREGROUND_RED | FOREGROUND_GREEN;
default: return 0; default: return 0;
} }
} }
static int GetBitOffset(WORD color_mask) { static int GetBitOffset(WORD color_mask) {
if (color_mask == 0) return 0; if (color_mask == 0) return 0;
int bitOffset = 0; int bitOffset = 0;
while ((color_mask & 1) == 0) { while ((color_mask & 1) == 0) {
color_mask >>= 1; color_mask >>= 1;
skipping to change at line 2974 skipping to change at line 3214
if (((new_color & background_mask) >> bg_bitOffset) == if (((new_color & background_mask) >> bg_bitOffset) ==
((new_color & foreground_mask) >> fg_bitOffset)) { ((new_color & foreground_mask) >> fg_bitOffset)) {
new_color ^= FOREGROUND_INTENSITY; // invert intensity new_color ^= FOREGROUND_INTENSITY; // invert intensity
} }
return new_color; return new_color;
} }
#else #else
// Returns the ANSI color code for the given color. COLOR_DEFAULT is // Returns the ANSI color code for the given color. GTestColor::kDefault is
// an invalid input. // an invalid input.
static const char* GetAnsiColorCode(GTestColor color) { static const char* GetAnsiColorCode(GTestColor color) {
switch (color) { switch (color) {
case COLOR_RED: return "1"; case GTestColor::kRed:
case COLOR_GREEN: return "2"; return "1";
case COLOR_YELLOW: return "3"; case GTestColor::kGreen:
return "2";
case GTestColor::kYellow:
return "3";
default: default:
return nullptr; return nullptr;
} }
} }
#endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE #endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
// Returns true if and only if Google Test should use colors in the output. // Returns true if and only if Google Test should use colors in the output.
bool ShouldUseColor(bool stdout_is_tty) { bool ShouldUseColor(bool stdout_is_tty) {
const char* const gtest_color = GTEST_FLAG(color).c_str(); const char* const gtest_color = GTEST_FLAG(color).c_str();
skipping to change at line 3029 skipping to change at line 3272
String::CStringEquals(gtest_color, "1"); String::CStringEquals(gtest_color, "1");
// We take "yes", "true", "t", and "1" as meaning "yes". If the // We take "yes", "true", "t", and "1" as meaning "yes". If the
// value is neither one of these nor "auto", we treat it as "no" to // value is neither one of these nor "auto", we treat it as "no" to
// be conservative. // be conservative.
} }
// Helpers for printing colored strings to stdout. Note that on Windows, we // Helpers for printing colored strings to stdout. Note that on Windows, we
// cannot simply emit special characters and have the terminal change colors. // cannot simply emit special characters and have the terminal change colors.
// This routine must actually emit the characters rather than return a string // This routine must actually emit the characters rather than return a string
// that would be colored when printed, as can be done on Linux. // that would be colored when printed, as can be done on Linux.
void ColoredPrintf(GTestColor color, const char* fmt, ...) {
GTEST_ATTRIBUTE_PRINTF_(2, 3)
static void ColoredPrintf(GTestColor color, const char *fmt, ...) {
va_list args; va_list args;
va_start(args, fmt); va_start(args, fmt);
#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS || GTEST_OS_IOS || \ #if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS || GTEST_OS_IOS || \
GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT || defined(ESP_PLATFORM) GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT || defined(ESP_PLATFORM)
const bool use_color = AlwaysFalse(); const bool use_color = AlwaysFalse();
#else #else
static const bool in_color_mode = static const bool in_color_mode =
ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0); ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0);
const bool use_color = in_color_mode && (color != COLOR_DEFAULT); const bool use_color = in_color_mode && (color != GTestColor::kDefault);
#endif // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS #endif // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS
if (!use_color) { if (!use_color) {
vprintf(fmt, args); vprintf(fmt, args);
va_end(args); va_end(args);
return; return;
} }
#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \ #if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \
!GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW
skipping to change at line 3136 skipping to change at line 3381
void OnTestSuiteEnd(const TestSuite& test_suite) override; void OnTestSuiteEnd(const TestSuite& test_suite) override;
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnEnvironmentsTearDownStart(const UnitTest& unit_test) override; void OnEnvironmentsTearDownStart(const UnitTest& unit_test) override;
void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {} void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {}
void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override; void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;
void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {} void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {}
private: private:
static void PrintFailedTests(const UnitTest& unit_test); static void PrintFailedTests(const UnitTest& unit_test);
static void PrintFailedTestSuites(const UnitTest& unit_test);
static void PrintSkippedTests(const UnitTest& unit_test); static void PrintSkippedTests(const UnitTest& unit_test);
}; };
// Fired before each iteration of tests starts. // Fired before each iteration of tests starts.
void PrettyUnitTestResultPrinter::OnTestIterationStart( void PrettyUnitTestResultPrinter::OnTestIterationStart(
const UnitTest& unit_test, int iteration) { const UnitTest& unit_test, int iteration) {
if (GTEST_FLAG(repeat) != 1) if (GTEST_FLAG(repeat) != 1)
printf("\nRepeating all tests (iteration %d) . . .\n\n", iteration + 1); printf("\nRepeating all tests (iteration %d) . . .\n\n", iteration + 1);
const char* const filter = GTEST_FLAG(filter).c_str(); const char* const filter = GTEST_FLAG(filter).c_str();
// Prints the filter if it's not *. This reminds the user that some // Prints the filter if it's not *. This reminds the user that some
// tests may be skipped. // tests may be skipped.
if (!String::CStringEquals(filter, kUniversalFilter)) { if (!String::CStringEquals(filter, kUniversalFilter)) {
ColoredPrintf(COLOR_YELLOW, ColoredPrintf(GTestColor::kYellow, "Note: %s filter = %s\n", GTEST_NAME_,
"Note: %s filter = %s\n", GTEST_NAME_, filter); filter);
} }
if (internal::ShouldShard(kTestTotalShards, kTestShardIndex, false)) { if (internal::ShouldShard(kTestTotalShards, kTestShardIndex, false)) {
const Int32 shard_index = Int32FromEnvOrDie(kTestShardIndex, -1); const int32_t shard_index = Int32FromEnvOrDie(kTestShardIndex, -1);
ColoredPrintf(COLOR_YELLOW, ColoredPrintf(GTestColor::kYellow, "Note: This is test shard %d of %s.\n",
"Note: This is test shard %d of %s.\n",
static_cast<int>(shard_index) + 1, static_cast<int>(shard_index) + 1,
internal::posix::GetEnv(kTestTotalShards)); internal::posix::GetEnv(kTestTotalShards));
} }
if (GTEST_FLAG(shuffle)) { if (GTEST_FLAG(shuffle)) {
ColoredPrintf(COLOR_YELLOW, ColoredPrintf(GTestColor::kYellow,
"Note: Randomizing tests' orders with a seed of %d .\n", "Note: Randomizing tests' orders with a seed of %d .\n",
unit_test.random_seed()); unit_test.random_seed());
} }
ColoredPrintf(COLOR_GREEN, "[==========] "); ColoredPrintf(GTestColor::kGreen, "[==========] ");
printf("Running %s from %s.\n", printf("Running %s from %s.\n",
FormatTestCount(unit_test.test_to_run_count()).c_str(), FormatTestCount(unit_test.test_to_run_count()).c_str(),
FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str()); FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str());
fflush(stdout); fflush(stdout);
} }
void PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart( void PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart(
const UnitTest& /*unit_test*/) { const UnitTest& /*unit_test*/) {
ColoredPrintf(COLOR_GREEN, "[----------] "); ColoredPrintf(GTestColor::kGreen, "[----------] ");
printf("Global test environment set-up.\n"); printf("Global test environment set-up.\n");
fflush(stdout); fflush(stdout);
} }
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) { void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) {
const std::string counts = const std::string counts =
FormatCountableNoun(test_case.test_to_run_count(), "test", "tests"); FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
ColoredPrintf(COLOR_GREEN, "[----------] "); ColoredPrintf(GTestColor::kGreen, "[----------] ");
printf("%s from %s", counts.c_str(), test_case.name()); printf("%s from %s", counts.c_str(), test_case.name());
if (test_case.type_param() == nullptr) { if (test_case.type_param() == nullptr) {
printf("\n"); printf("\n");
} else { } else {
printf(", where %s = %s\n", kTypeParamLabel, test_case.type_param()); printf(", where %s = %s\n", kTypeParamLabel, test_case.type_param());
} }
fflush(stdout); fflush(stdout);
} }
#else #else
void PrettyUnitTestResultPrinter::OnTestSuiteStart( void PrettyUnitTestResultPrinter::OnTestSuiteStart(
const TestSuite& test_suite) { const TestSuite& test_suite) {
const std::string counts = const std::string counts =
FormatCountableNoun(test_suite.test_to_run_count(), "test", "tests"); FormatCountableNoun(test_suite.test_to_run_count(), "test", "tests");
ColoredPrintf(COLOR_GREEN, "[----------] "); ColoredPrintf(GTestColor::kGreen, "[----------] ");
printf("%s from %s", counts.c_str(), test_suite.name()); printf("%s from %s", counts.c_str(), test_suite.name());
if (test_suite.type_param() == nullptr) { if (test_suite.type_param() == nullptr) {
printf("\n"); printf("\n");
} else { } else {
printf(", where %s = %s\n", kTypeParamLabel, test_suite.type_param()); printf(", where %s = %s\n", kTypeParamLabel, test_suite.type_param());
} }
fflush(stdout); fflush(stdout);
} }
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) { void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {
ColoredPrintf(COLOR_GREEN, "[ RUN ] "); ColoredPrintf(GTestColor::kGreen, "[ RUN ] ");
PrintTestName(test_info.test_suite_name(), test_info.name()); PrintTestName(test_info.test_suite_name(), test_info.name());
printf("\n"); printf("\n");
fflush(stdout); fflush(stdout);
} }
// Called after an assertion failure. // Called after an assertion failure.
void PrettyUnitTestResultPrinter::OnTestPartResult( void PrettyUnitTestResultPrinter::OnTestPartResult(
const TestPartResult& result) { const TestPartResult& result) {
switch (result.type()) { switch (result.type()) {
// If the test part succeeded, or was skipped, // If the test part succeeded, we don't need to do anything.
// we don't need to do anything.
case TestPartResult::kSkip:
case TestPartResult::kSuccess: case TestPartResult::kSuccess:
return; return;
default: default:
// Print failure message from the assertion // Print failure message from the assertion
// (e.g. expected this and got that). // (e.g. expected this and got that).
PrintTestPartResult(result); PrintTestPartResult(result);
fflush(stdout); fflush(stdout);
} }
} }
void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) { void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
if (test_info.result()->Passed()) { if (test_info.result()->Passed()) {
ColoredPrintf(COLOR_GREEN, "[ OK ] "); ColoredPrintf(GTestColor::kGreen, "[ OK ] ");
} else if (test_info.result()->Skipped()) { } else if (test_info.result()->Skipped()) {
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); ColoredPrintf(GTestColor::kGreen, "[ SKIPPED ] ");
} else { } else {
ColoredPrintf(COLOR_RED, "[ FAILED ] "); ColoredPrintf(GTestColor::kRed, "[ FAILED ] ");
} }
PrintTestName(test_info.test_suite_name(), test_info.name()); PrintTestName(test_info.test_suite_name(), test_info.name());
if (test_info.result()->Failed()) if (test_info.result()->Failed())
PrintFullTestCommentIfPresent(test_info); PrintFullTestCommentIfPresent(test_info);
if (GTEST_FLAG(print_time)) { if (GTEST_FLAG(print_time)) {
printf(" (%s ms)\n", internal::StreamableToString( printf(" (%s ms)\n", internal::StreamableToString(
test_info.result()->elapsed_time()).c_str()); test_info.result()->elapsed_time()).c_str());
} else { } else {
printf("\n"); printf("\n");
} }
fflush(stdout); fflush(stdout);
} }
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) { void PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) {
if (!GTEST_FLAG(print_time)) return; if (!GTEST_FLAG(print_time)) return;
const std::string counts = const std::string counts =
FormatCountableNoun(test_case.test_to_run_count(), "test", "tests"); FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
ColoredPrintf(COLOR_GREEN, "[----------] "); ColoredPrintf(GTestColor::kGreen, "[----------] ");
printf("%s from %s (%s ms total)\n\n", counts.c_str(), test_case.name(), printf("%s from %s (%s ms total)\n\n", counts.c_str(), test_case.name(),
internal::StreamableToString(test_case.elapsed_time()).c_str()); internal::StreamableToString(test_case.elapsed_time()).c_str());
fflush(stdout); fflush(stdout);
} }
#else #else
void PrettyUnitTestResultPrinter::OnTestSuiteEnd(const TestSuite& test_suite) { void PrettyUnitTestResultPrinter::OnTestSuiteEnd(const TestSuite& test_suite) {
if (!GTEST_FLAG(print_time)) return; if (!GTEST_FLAG(print_time)) return;
const std::string counts = const std::string counts =
FormatCountableNoun(test_suite.test_to_run_count(), "test", "tests"); FormatCountableNoun(test_suite.test_to_run_count(), "test", "tests");
ColoredPrintf(COLOR_GREEN, "[----------] "); ColoredPrintf(GTestColor::kGreen, "[----------] ");
printf("%s from %s (%s ms total)\n\n", counts.c_str(), test_suite.name(), printf("%s from %s (%s ms total)\n\n", counts.c_str(), test_suite.name(),
internal::StreamableToString(test_suite.elapsed_time()).c_str()); internal::StreamableToString(test_suite.elapsed_time()).c_str());
fflush(stdout); fflush(stdout);
} }
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart( void PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart(
const UnitTest& /*unit_test*/) { const UnitTest& /*unit_test*/) {
ColoredPrintf(COLOR_GREEN, "[----------] "); ColoredPrintf(GTestColor::kGreen, "[----------] ");
printf("Global test environment tear-down\n"); printf("Global test environment tear-down\n");
fflush(stdout); fflush(stdout);
} }
// Internal helper for printing the list of failed tests. // Internal helper for printing the list of failed tests.
void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) { void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
const int failed_test_count = unit_test.failed_test_count(); const int failed_test_count = unit_test.failed_test_count();
if (failed_test_count == 0) { ColoredPrintf(GTestColor::kRed, "[ FAILED ] ");
return; printf("%s, listed below:\n", FormatTestCount(failed_test_count).c_str());
}
for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
const TestSuite& test_suite = *unit_test.GetTestSuite(i); const TestSuite& test_suite = *unit_test.GetTestSuite(i);
if (!test_suite.should_run() || (test_suite.failed_test_count() == 0)) { if (!test_suite.should_run() || (test_suite.failed_test_count() == 0)) {
continue; continue;
} }
for (int j = 0; j < test_suite.total_test_count(); ++j) { for (int j = 0; j < test_suite.total_test_count(); ++j) {
const TestInfo& test_info = *test_suite.GetTestInfo(j); const TestInfo& test_info = *test_suite.GetTestInfo(j);
if (!test_info.should_run() || !test_info.result()->Failed()) { if (!test_info.should_run() || !test_info.result()->Failed()) {
continue; continue;
} }
ColoredPrintf(COLOR_RED, "[ FAILED ] "); ColoredPrintf(GTestColor::kRed, "[ FAILED ] ");
printf("%s.%s", test_suite.name(), test_info.name()); printf("%s.%s", test_suite.name(), test_info.name());
PrintFullTestCommentIfPresent(test_info); PrintFullTestCommentIfPresent(test_info);
printf("\n"); printf("\n");
} }
} }
printf("\n%2d FAILED %s\n", failed_test_count,
failed_test_count == 1 ? "TEST" : "TESTS");
}
// Internal helper for printing the list of test suite failures not covered by
// PrintFailedTests.
void PrettyUnitTestResultPrinter::PrintFailedTestSuites(
const UnitTest& unit_test) {
int suite_failure_count = 0;
for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
const TestSuite& test_suite = *unit_test.GetTestSuite(i);
if (!test_suite.should_run()) {
continue;
}
if (test_suite.ad_hoc_test_result().Failed()) {
ColoredPrintf(GTestColor::kRed, "[ FAILED ] ");
printf("%s: SetUpTestSuite or TearDownTestSuite\n", test_suite.name());
++suite_failure_count;
}
}
if (suite_failure_count > 0) {
printf("\n%2d FAILED TEST %s\n", suite_failure_count,
suite_failure_count == 1 ? "SUITE" : "SUITES");
}
} }
// Internal helper for printing the list of skipped tests. // Internal helper for printing the list of skipped tests.
void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) { void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) {
const int skipped_test_count = unit_test.skipped_test_count(); const int skipped_test_count = unit_test.skipped_test_count();
if (skipped_test_count == 0) { if (skipped_test_count == 0) {
return; return;
} }
for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
const TestSuite& test_suite = *unit_test.GetTestSuite(i); const TestSuite& test_suite = *unit_test.GetTestSuite(i);
if (!test_suite.should_run() || (test_suite.skipped_test_count() == 0)) { if (!test_suite.should_run() || (test_suite.skipped_test_count() == 0)) {
continue; continue;
} }
for (int j = 0; j < test_suite.total_test_count(); ++j) { for (int j = 0; j < test_suite.total_test_count(); ++j) {
const TestInfo& test_info = *test_suite.GetTestInfo(j); const TestInfo& test_info = *test_suite.GetTestInfo(j);
if (!test_info.should_run() || !test_info.result()->Skipped()) { if (!test_info.should_run() || !test_info.result()->Skipped()) {
continue; continue;
} }
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); ColoredPrintf(GTestColor::kGreen, "[ SKIPPED ] ");
printf("%s.%s", test_suite.name(), test_info.name()); printf("%s.%s", test_suite.name(), test_info.name());
printf("\n"); printf("\n");
} }
} }
} }
void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
int /*iteration*/) { int /*iteration*/) {
ColoredPrintf(COLOR_GREEN, "[==========] "); ColoredPrintf(GTestColor::kGreen, "[==========] ");
printf("%s from %s ran.", printf("%s from %s ran.",
FormatTestCount(unit_test.test_to_run_count()).c_str(), FormatTestCount(unit_test.test_to_run_count()).c_str(),
FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str()); FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str());
if (GTEST_FLAG(print_time)) { if (GTEST_FLAG(print_time)) {
printf(" (%s ms total)", printf(" (%s ms total)",
internal::StreamableToString(unit_test.elapsed_time()).c_str()); internal::StreamableToString(unit_test.elapsed_time()).c_str());
} }
printf("\n"); printf("\n");
ColoredPrintf(COLOR_GREEN, "[ PASSED ] "); ColoredPrintf(GTestColor::kGreen, "[ PASSED ] ");
printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str()); printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
const int skipped_test_count = unit_test.skipped_test_count(); const int skipped_test_count = unit_test.skipped_test_count();
if (skipped_test_count > 0) { if (skipped_test_count > 0) {
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); ColoredPrintf(GTestColor::kGreen, "[ SKIPPED ] ");
printf("%s, listed below:\n", FormatTestCount(skipped_test_count).c_str()); printf("%s, listed below:\n", FormatTestCount(skipped_test_count).c_str());
PrintSkippedTests(unit_test); PrintSkippedTests(unit_test);
} }
int num_failures = unit_test.failed_test_count();
if (!unit_test.Passed()) { if (!unit_test.Passed()) {
const int failed_test_count = unit_test.failed_test_count();
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
printf("%s, listed below:\n", FormatTestCount(failed_test_count).c_str());
PrintFailedTests(unit_test); PrintFailedTests(unit_test);
printf("\n%2d FAILED %s\n", num_failures, PrintFailedTestSuites(unit_test);
num_failures == 1 ? "TEST" : "TESTS");
} }
int num_disabled = unit_test.reportable_disabled_test_count(); int num_disabled = unit_test.reportable_disabled_test_count();
if (num_disabled && !GTEST_FLAG(also_run_disabled_tests)) { if (num_disabled && !GTEST_FLAG(also_run_disabled_tests)) {
if (!num_failures) { if (unit_test.Passed()) {
printf("\n"); // Add a spacer if no FAILURE banner is displayed. printf("\n"); // Add a spacer if no FAILURE banner is displayed.
} }
ColoredPrintf(COLOR_YELLOW, ColoredPrintf(GTestColor::kYellow, " YOU HAVE %d DISABLED %s\n\n",
" YOU HAVE %d DISABLED %s\n\n", num_disabled, num_disabled == 1 ? "TEST" : "TESTS");
num_disabled,
num_disabled == 1 ? "TEST" : "TESTS");
} }
// Ensure that Google Test output is printed before, e.g., heapchecker output. // Ensure that Google Test output is printed before, e.g., heapchecker output.
fflush(stdout); fflush(stdout);
} }
// End PrettyUnitTestResultPrinter // End PrettyUnitTestResultPrinter
// This class implements the TestEventListener interface.
//
// Class BriefUnitTestResultPrinter is copyable.
class BriefUnitTestResultPrinter : public TestEventListener {
public:
BriefUnitTestResultPrinter() {}
static void PrintTestName(const char* test_suite, const char* test) {
printf("%s.%s", test_suite, test);
}
// The following methods override what's in the TestEventListener class.
void OnTestProgramStart(const UnitTest& /*unit_test*/) override {}
void OnTestIterationStart(const UnitTest& /*unit_test*/,
int /*iteration*/) override {}
void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override {}
void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {}
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnTestCaseStart(const TestCase& /*test_case*/) override {}
#else
void OnTestSuiteStart(const TestSuite& /*test_suite*/) override {}
#endif // OnTestCaseStart
void OnTestStart(const TestInfo& /*test_info*/) override {}
void OnTestPartResult(const TestPartResult& result) override;
void OnTestEnd(const TestInfo& test_info) override;
#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnTestCaseEnd(const TestCase& /*test_case*/) override {}
#else
void OnTestSuiteEnd(const TestSuite& /*test_suite*/) override {}
#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override {}
void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {}
void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;
void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {}
};
// Called after an assertion failure.
void BriefUnitTestResultPrinter::OnTestPartResult(
const TestPartResult& result) {
switch (result.type()) {
// If the test part succeeded, we don't need to do anything.
case TestPartResult::kSuccess:
return;
default:
// Print failure message from the assertion
// (e.g. expected this and got that).
PrintTestPartResult(result);
fflush(stdout);
}
}
void BriefUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
if (test_info.result()->Failed()) {
ColoredPrintf(GTestColor::kRed, "[ FAILED ] ");
PrintTestName(test_info.test_suite_name(), test_info.name());
PrintFullTestCommentIfPresent(test_info);
if (GTEST_FLAG(print_time)) {
printf(" (%s ms)\n",
internal::StreamableToString(test_info.result()->elapsed_time())
.c_str());
} else {
printf("\n");
}
fflush(stdout);
}
}
void BriefUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
int /*iteration*/) {
ColoredPrintf(GTestColor::kGreen, "[==========] ");
printf("%s from %s ran.",
FormatTestCount(unit_test.test_to_run_count()).c_str(),
FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str());
if (GTEST_FLAG(print_time)) {
printf(" (%s ms total)",
internal::StreamableToString(unit_test.elapsed_time()).c_str());
}
printf("\n");
ColoredPrintf(GTestColor::kGreen, "[ PASSED ] ");
printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
const int skipped_test_count = unit_test.skipped_test_count();
if (skipped_test_count > 0) {
ColoredPrintf(GTestColor::kGreen, "[ SKIPPED ] ");
printf("%s.\n", FormatTestCount(skipped_test_count).c_str());
}
int num_disabled = unit_test.reportable_disabled_test_count();
if (num_disabled && !GTEST_FLAG(also_run_disabled_tests)) {
if (unit_test.Passed()) {
printf("\n"); // Add a spacer if no FAILURE banner is displayed.
}
ColoredPrintf(GTestColor::kYellow, " YOU HAVE %d DISABLED %s\n\n",
num_disabled, num_disabled == 1 ? "TEST" : "TESTS");
}
// Ensure that Google Test output is printed before, e.g., heapchecker output.
fflush(stdout);
}
// End BriefUnitTestResultPrinter
// class TestEventRepeater // class TestEventRepeater
// //
// This class forwards events to other event listeners. // This class forwards events to other event listeners.
class TestEventRepeater : public TestEventListener { class TestEventRepeater : public TestEventListener {
public: public:
TestEventRepeater() : forwarding_enabled_(true) {} TestEventRepeater() : forwarding_enabled_(true) {}
~TestEventRepeater() override; ~TestEventRepeater() override;
void Append(TestEventListener *listener); void Append(TestEventListener *listener);
TestEventListener* Release(TestEventListener* listener); TestEventListener* Release(TestEventListener* listener);
skipping to change at line 3566 skipping to change at line 3929
// Verifies that the given attribute belongs to the given element and // Verifies that the given attribute belongs to the given element and
// streams the attribute as XML. // streams the attribute as XML.
static void OutputXmlAttribute(std::ostream* stream, static void OutputXmlAttribute(std::ostream* stream,
const std::string& element_name, const std::string& element_name,
const std::string& name, const std::string& name,
const std::string& value); const std::string& value);
// Streams an XML CDATA section, escaping invalid CDATA sequences as needed. // Streams an XML CDATA section, escaping invalid CDATA sequences as needed.
static void OutputXmlCDataSection(::std::ostream* stream, const char* data); static void OutputXmlCDataSection(::std::ostream* stream, const char* data);
// Streams a test suite XML stanza containing the given test result.
//
// Requires: result.Failed()
static void OutputXmlTestSuiteForTestResult(::std::ostream* stream,
const TestResult& result);
// Streams an XML representation of a TestResult object.
static void OutputXmlTestResult(::std::ostream* stream,
const TestResult& result);
// Streams an XML representation of a TestInfo object. // Streams an XML representation of a TestInfo object.
static void OutputXmlTestInfo(::std::ostream* stream, static void OutputXmlTestInfo(::std::ostream* stream,
const char* test_suite_name, const char* test_suite_name,
const TestInfo& test_info); const TestInfo& test_info);
// Prints an XML representation of a TestSuite object // Prints an XML representation of a TestSuite object
static void PrintXmlTestSuite(::std::ostream* stream, static void PrintXmlTestSuite(::std::ostream* stream,
const TestSuite& test_suite); const TestSuite& test_suite);
// Prints an XML summary of unit_test to output stream out. // Prints an XML summary of unit_test to output stream out.
skipping to change at line 3724 skipping to change at line 4097
static bool PortableLocaltime(time_t seconds, struct tm* out) { static bool PortableLocaltime(time_t seconds, struct tm* out) {
#if defined(_MSC_VER) #if defined(_MSC_VER)
return localtime_s(out, &seconds) == 0; return localtime_s(out, &seconds) == 0;
#elif defined(__MINGW32__) || defined(__MINGW64__) #elif defined(__MINGW32__) || defined(__MINGW64__)
// MINGW <time.h> provides neither localtime_r nor localtime_s, but uses // MINGW <time.h> provides neither localtime_r nor localtime_s, but uses
// Windows' localtime(), which has a thread-local tm buffer. // Windows' localtime(), which has a thread-local tm buffer.
struct tm* tm_ptr = localtime(&seconds); // NOLINT struct tm* tm_ptr = localtime(&seconds); // NOLINT
if (tm_ptr == nullptr) return false; if (tm_ptr == nullptr) return false;
*out = *tm_ptr; *out = *tm_ptr;
return true; return true;
#elif defined(__STDC_LIB_EXT1__)
// Uses localtime_s when available as localtime_r is only available from
// C23 standard.
return localtime_s(&seconds, out) != nullptr;
#else #else
return localtime_r(&seconds, out) != nullptr; return localtime_r(&seconds, out) != nullptr;
#endif #endif
} }
// Converts the given epoch time in milliseconds to a date string in the ISO // Converts the given epoch time in milliseconds to a date string in the ISO
// 8601 format, without the timezone information. // 8601 format, without the timezone information.
std::string FormatEpochTimeInMillisAsIso8601(TimeInMillis ms) { std::string FormatEpochTimeInMillisAsIso8601(TimeInMillis ms) {
struct tm time_struct; struct tm time_struct;
if (!PortableLocaltime(static_cast<time_t>(ms / 1000), &time_struct)) if (!PortableLocaltime(static_cast<time_t>(ms / 1000), &time_struct))
return ""; return "";
// YYYY-MM-DDThh:mm:ss // YYYY-MM-DDThh:mm:ss.sss
return StreamableToString(time_struct.tm_year + 1900) + "-" + return StreamableToString(time_struct.tm_year + 1900) + "-" +
String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" + String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" +
String::FormatIntWidth2(time_struct.tm_mday) + "T" + String::FormatIntWidth2(time_struct.tm_mday) + "T" +
String::FormatIntWidth2(time_struct.tm_hour) + ":" + String::FormatIntWidth2(time_struct.tm_hour) + ":" +
String::FormatIntWidth2(time_struct.tm_min) + ":" + String::FormatIntWidth2(time_struct.tm_min) + ":" +
String::FormatIntWidth2(time_struct.tm_sec); String::FormatIntWidth2(time_struct.tm_sec) + "." +
String::FormatIntWidthN(static_cast<int>(ms % 1000), 3);
} }
// Streams an XML CDATA section, escaping invalid CDATA sequences as needed. // Streams an XML CDATA section, escaping invalid CDATA sequences as needed.
void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream, void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream,
const char* data) { const char* data) {
const char* segment = data; const char* segment = data;
*stream << "<![CDATA["; *stream << "<![CDATA[";
for (;;) { for (;;) {
const char* const next_segment = strstr(segment, "]]>"); const char* const next_segment = strstr(segment, "]]>");
if (next_segment != nullptr) { if (next_segment != nullptr) {
skipping to change at line 3780 skipping to change at line 4158
GetReservedOutputAttributesForElement(element_name); GetReservedOutputAttributesForElement(element_name);
GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=
allowed_names.end()) allowed_names.end())
<< "Attribute " << name << " is not allowed for element <" << element_name << "Attribute " << name << " is not allowed for element <" << element_name
<< ">."; << ">.";
*stream << " " << name << "=\"" << EscapeXmlAttribute(value) << "\""; *stream << " " << name << "=\"" << EscapeXmlAttribute(value) << "\"";
} }
// Streams a test suite XML stanza containing the given test result.
void XmlUnitTestResultPrinter::OutputXmlTestSuiteForTestResult(
::std::ostream* stream, const TestResult& result) {
// Output the boilerplate for a minimal test suite with one test.
*stream << " <testsuite";
OutputXmlAttribute(stream, "testsuite", "name", "NonTestSuiteFailure");
OutputXmlAttribute(stream, "testsuite", "tests", "1");
OutputXmlAttribute(stream, "testsuite", "failures", "1");
OutputXmlAttribute(stream, "testsuite", "disabled", "0");
OutputXmlAttribute(stream, "testsuite", "skipped", "0");
OutputXmlAttribute(stream, "testsuite", "errors", "0");
OutputXmlAttribute(stream, "testsuite", "time",
FormatTimeInMillisAsSeconds(result.elapsed_time()));
OutputXmlAttribute(
stream, "testsuite", "timestamp",
FormatEpochTimeInMillisAsIso8601(result.start_timestamp()));
*stream << ">";
// Output the boilerplate for a minimal test case with a single test.
*stream << " <testcase";
OutputXmlAttribute(stream, "testcase", "name", "");
OutputXmlAttribute(stream, "testcase", "status", "run");
OutputXmlAttribute(stream, "testcase", "result", "completed");
OutputXmlAttribute(stream, "testcase", "classname", "");
OutputXmlAttribute(stream, "testcase", "time",
FormatTimeInMillisAsSeconds(result.elapsed_time()));
OutputXmlAttribute(
stream, "testcase", "timestamp",
FormatEpochTimeInMillisAsIso8601(result.start_timestamp()));
// Output the actual test result.
OutputXmlTestResult(stream, result);
// Complete the test suite.
*stream << " </testsuite>\n";
}
// Prints an XML representation of a TestInfo object. // Prints an XML representation of a TestInfo object.
void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream, void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream,
const char* test_suite_name, const char* test_suite_name,
const TestInfo& test_info) { const TestInfo& test_info) {
const TestResult& result = *test_info.result(); const TestResult& result = *test_info.result();
const std::string kTestsuite = "testcase"; const std::string kTestsuite = "testcase";
if (test_info.is_in_another_shard()) { if (test_info.is_in_another_shard()) {
return; return;
} }
skipping to change at line 3823 skipping to change at line 4238
test_info.should_run() test_info.should_run()
? (result.Skipped() ? "skipped" : "completed") ? (result.Skipped() ? "skipped" : "completed")
: "suppressed"); : "suppressed");
OutputXmlAttribute(stream, kTestsuite, "time", OutputXmlAttribute(stream, kTestsuite, "time",
FormatTimeInMillisAsSeconds(result.elapsed_time())); FormatTimeInMillisAsSeconds(result.elapsed_time()));
OutputXmlAttribute( OutputXmlAttribute(
stream, kTestsuite, "timestamp", stream, kTestsuite, "timestamp",
FormatEpochTimeInMillisAsIso8601(result.start_timestamp())); FormatEpochTimeInMillisAsIso8601(result.start_timestamp()));
OutputXmlAttribute(stream, kTestsuite, "classname", test_suite_name); OutputXmlAttribute(stream, kTestsuite, "classname", test_suite_name);
OutputXmlTestResult(stream, result);
}
void XmlUnitTestResultPrinter::OutputXmlTestResult(::std::ostream* stream,
const TestResult& result) {
int failures = 0; int failures = 0;
int skips = 0;
for (int i = 0; i < result.total_part_count(); ++i) { for (int i = 0; i < result.total_part_count(); ++i) {
const TestPartResult& part = result.GetTestPartResult(i); const TestPartResult& part = result.GetTestPartResult(i);
if (part.failed()) { if (part.failed()) {
if (++failures == 1) { if (++failures == 1 && skips == 0) {
*stream << ">\n"; *stream << ">\n";
} }
const std::string location = const std::string location =
internal::FormatCompilerIndependentFileLocation(part.file_name(), internal::FormatCompilerIndependentFileLocation(part.file_name(),
part.line_number()); part.line_number());
const std::string summary = location + "\n" + part.summary(); const std::string summary = location + "\n" + part.summary();
*stream << " <failure message=\"" *stream << " <failure message=\""
<< EscapeXmlAttribute(summary.c_str()) << EscapeXmlAttribute(summary)
<< "\" type=\"\">"; << "\" type=\"\">";
const std::string detail = location + "\n" + part.message(); const std::string detail = location + "\n" + part.message();
OutputXmlCDataSection(stream, RemoveInvalidXmlCharacters(detail).c_str()); OutputXmlCDataSection(stream, RemoveInvalidXmlCharacters(detail).c_str());
*stream << "</failure>\n"; *stream << "</failure>\n";
} else if (part.skipped()) {
if (++skips == 1 && failures == 0) {
*stream << ">\n";
}
const std::string location =
internal::FormatCompilerIndependentFileLocation(part.file_name(),
part.line_number());
const std::string summary = location + "\n" + part.summary();
*stream << " <skipped message=\""
<< EscapeXmlAttribute(summary.c_str()) << "\">";
const std::string detail = location + "\n" + part.message();
OutputXmlCDataSection(stream, RemoveInvalidXmlCharacters(detail).c_str());
*stream << "</skipped>\n";
} }
} }
if (failures == 0 && result.test_property_count() == 0) { if (failures == 0 && skips == 0 && result.test_property_count() == 0) {
*stream << " />\n"; *stream << " />\n";
} else { } else {
if (failures == 0) { if (failures == 0 && skips == 0) {
*stream << ">\n"; *stream << ">\n";
} }
OutputXmlTestProperties(stream, result); OutputXmlTestProperties(stream, result);
*stream << " </testcase>\n"; *stream << " </testcase>\n";
} }
} }
// Prints an XML representation of a TestSuite object // Prints an XML representation of a TestSuite object
void XmlUnitTestResultPrinter::PrintXmlTestSuite(std::ostream* stream, void XmlUnitTestResultPrinter::PrintXmlTestSuite(std::ostream* stream,
const TestSuite& test_suite) { const TestSuite& test_suite) {
skipping to change at line 3868 skipping to change at line 4302
*stream << " <" << kTestsuite; *stream << " <" << kTestsuite;
OutputXmlAttribute(stream, kTestsuite, "name", test_suite.name()); OutputXmlAttribute(stream, kTestsuite, "name", test_suite.name());
OutputXmlAttribute(stream, kTestsuite, "tests", OutputXmlAttribute(stream, kTestsuite, "tests",
StreamableToString(test_suite.reportable_test_count())); StreamableToString(test_suite.reportable_test_count()));
if (!GTEST_FLAG(list_tests)) { if (!GTEST_FLAG(list_tests)) {
OutputXmlAttribute(stream, kTestsuite, "failures", OutputXmlAttribute(stream, kTestsuite, "failures",
StreamableToString(test_suite.failed_test_count())); StreamableToString(test_suite.failed_test_count()));
OutputXmlAttribute( OutputXmlAttribute(
stream, kTestsuite, "disabled", stream, kTestsuite, "disabled",
StreamableToString(test_suite.reportable_disabled_test_count())); StreamableToString(test_suite.reportable_disabled_test_count()));
OutputXmlAttribute(stream, kTestsuite, "skipped",
StreamableToString(test_suite.skipped_test_count()));
OutputXmlAttribute(stream, kTestsuite, "errors", "0"); OutputXmlAttribute(stream, kTestsuite, "errors", "0");
OutputXmlAttribute(stream, kTestsuite, "time", OutputXmlAttribute(stream, kTestsuite, "time",
FormatTimeInMillisAsSeconds(test_suite.elapsed_time())); FormatTimeInMillisAsSeconds(test_suite.elapsed_time()));
OutputXmlAttribute( OutputXmlAttribute(
stream, kTestsuite, "timestamp", stream, kTestsuite, "timestamp",
FormatEpochTimeInMillisAsIso8601(test_suite.start_timestamp())); FormatEpochTimeInMillisAsIso8601(test_suite.start_timestamp()));
*stream << TestPropertiesAsXmlAttributes(test_suite.ad_hoc_test_result()); *stream << TestPropertiesAsXmlAttributes(test_suite.ad_hoc_test_result());
} }
*stream << ">\n"; *stream << ">\n";
for (int i = 0; i < test_suite.total_test_count(); ++i) { for (int i = 0; i < test_suite.total_test_count(); ++i) {
if (test_suite.GetTestInfo(i)->is_reportable()) if (test_suite.GetTestInfo(i)->is_reportable())
skipping to change at line 3919 skipping to change at line 4357
} }
*stream << TestPropertiesAsXmlAttributes(unit_test.ad_hoc_test_result()); *stream << TestPropertiesAsXmlAttributes(unit_test.ad_hoc_test_result());
OutputXmlAttribute(stream, kTestsuites, "name", "AllTests"); OutputXmlAttribute(stream, kTestsuites, "name", "AllTests");
*stream << ">\n"; *stream << ">\n";
for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
if (unit_test.GetTestSuite(i)->reportable_test_count() > 0) if (unit_test.GetTestSuite(i)->reportable_test_count() > 0)
PrintXmlTestSuite(stream, *unit_test.GetTestSuite(i)); PrintXmlTestSuite(stream, *unit_test.GetTestSuite(i));
} }
// If there was a test failure outside of one of the test suites (like in a
// test environment) include that in the output.
if (unit_test.ad_hoc_test_result().Failed()) {
OutputXmlTestSuiteForTestResult(stream, unit_test.ad_hoc_test_result());
}
*stream << "</" << kTestsuites << ">\n"; *stream << "</" << kTestsuites << ">\n";
} }
void XmlUnitTestResultPrinter::PrintXmlTestsList( void XmlUnitTestResultPrinter::PrintXmlTestsList(
std::ostream* stream, const std::vector<TestSuite*>& test_suites) { std::ostream* stream, const std::vector<TestSuite*>& test_suites) {
const std::string kTestsuites = "testsuites"; const std::string kTestsuites = "testsuites";
*stream << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"; *stream << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
*stream << "<" << kTestsuites; *stream << "<" << kTestsuites;
skipping to change at line 4009 skipping to change at line 4454
const std::string& value, const std::string& value,
const std::string& indent, const std::string& indent,
bool comma = true); bool comma = true);
static void OutputJsonKey(std::ostream* stream, static void OutputJsonKey(std::ostream* stream,
const std::string& element_name, const std::string& element_name,
const std::string& name, const std::string& name,
int value, int value,
const std::string& indent, const std::string& indent,
bool comma = true); bool comma = true);
// Streams a test suite JSON stanza containing the given test result.
//
// Requires: result.Failed()
static void OutputJsonTestSuiteForTestResult(::std::ostream* stream,
const TestResult& result);
// Streams a JSON representation of a TestResult object.
static void OutputJsonTestResult(::std::ostream* stream,
const TestResult& result);
// Streams a JSON representation of a TestInfo object. // Streams a JSON representation of a TestInfo object.
static void OutputJsonTestInfo(::std::ostream* stream, static void OutputJsonTestInfo(::std::ostream* stream,
const char* test_suite_name, const char* test_suite_name,
const TestInfo& test_info); const TestInfo& test_info);
// Prints a JSON representation of a TestSuite object // Prints a JSON representation of a TestSuite object
static void PrintJsonTestSuite(::std::ostream* stream, static void PrintJsonTestSuite(::std::ostream* stream,
const TestSuite& test_suite); const TestSuite& test_suite);
// Prints a JSON summary of unit_test to output stream out. // Prints a JSON summary of unit_test to output stream out.
skipping to change at line 4159 skipping to change at line 4614
GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=
allowed_names.end()) allowed_names.end())
<< "Key \"" << name << "\" is not allowed for value \"" << element_name << "Key \"" << name << "\" is not allowed for value \"" << element_name
<< "\"."; << "\".";
*stream << indent << "\"" << name << "\": " << StreamableToString(value); *stream << indent << "\"" << name << "\": " << StreamableToString(value);
if (comma) if (comma)
*stream << ",\n"; *stream << ",\n";
} }
// Streams a test suite JSON stanza containing the given test result.
void JsonUnitTestResultPrinter::OutputJsonTestSuiteForTestResult(
::std::ostream* stream, const TestResult& result) {
// Output the boilerplate for a new test suite.
*stream << Indent(4) << "{\n";
OutputJsonKey(stream, "testsuite", "name", "NonTestSuiteFailure", Indent(6));
OutputJsonKey(stream, "testsuite", "tests", 1, Indent(6));
if (!GTEST_FLAG(list_tests)) {
OutputJsonKey(stream, "testsuite", "failures", 1, Indent(6));
OutputJsonKey(stream, "testsuite", "disabled", 0, Indent(6));
OutputJsonKey(stream, "testsuite", "skipped", 0, Indent(6));
OutputJsonKey(stream, "testsuite", "errors", 0, Indent(6));
OutputJsonKey(stream, "testsuite", "time",
FormatTimeInMillisAsDuration(result.elapsed_time()),
Indent(6));
OutputJsonKey(stream, "testsuite", "timestamp",
FormatEpochTimeInMillisAsRFC3339(result.start_timestamp()),
Indent(6));
}
*stream << Indent(6) << "\"testsuite\": [\n";
// Output the boilerplate for a new test case.
*stream << Indent(8) << "{\n";
OutputJsonKey(stream, "testcase", "name", "", Indent(10));
OutputJsonKey(stream, "testcase", "status", "RUN", Indent(10));
OutputJsonKey(stream, "testcase", "result", "COMPLETED", Indent(10));
OutputJsonKey(stream, "testcase", "timestamp",
FormatEpochTimeInMillisAsRFC3339(result.start_timestamp()),
Indent(10));
OutputJsonKey(stream, "testcase", "time",
FormatTimeInMillisAsDuration(result.elapsed_time()),
Indent(10));
OutputJsonKey(stream, "testcase", "classname", "", Indent(10), false);
*stream << TestPropertiesAsJson(result, Indent(10));
// Output the actual test result.
OutputJsonTestResult(stream, result);
// Finish the test suite.
*stream << "\n" << Indent(6) << "]\n" << Indent(4) << "}";
}
// Prints a JSON representation of a TestInfo object. // Prints a JSON representation of a TestInfo object.
void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream, void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream,
const char* test_suite_name, const char* test_suite_name,
const TestInfo& test_info) { const TestInfo& test_info) {
const TestResult& result = *test_info.result(); const TestResult& result = *test_info.result();
const std::string kTestsuite = "testcase"; const std::string kTestsuite = "testcase";
const std::string kIndent = Indent(10); const std::string kIndent = Indent(10);
*stream << Indent(8) << "{\n"; *stream << Indent(8) << "{\n";
OutputJsonKey(stream, kTestsuite, "name", test_info.name(), kIndent); OutputJsonKey(stream, kTestsuite, "name", test_info.name(), kIndent);
skipping to change at line 4201 skipping to change at line 4698
kIndent); kIndent);
OutputJsonKey(stream, kTestsuite, "timestamp", OutputJsonKey(stream, kTestsuite, "timestamp",
FormatEpochTimeInMillisAsRFC3339(result.start_timestamp()), FormatEpochTimeInMillisAsRFC3339(result.start_timestamp()),
kIndent); kIndent);
OutputJsonKey(stream, kTestsuite, "time", OutputJsonKey(stream, kTestsuite, "time",
FormatTimeInMillisAsDuration(result.elapsed_time()), kIndent); FormatTimeInMillisAsDuration(result.elapsed_time()), kIndent);
OutputJsonKey(stream, kTestsuite, "classname", test_suite_name, kIndent, OutputJsonKey(stream, kTestsuite, "classname", test_suite_name, kIndent,
false); false);
*stream << TestPropertiesAsJson(result, kIndent); *stream << TestPropertiesAsJson(result, kIndent);
OutputJsonTestResult(stream, result);
}
void JsonUnitTestResultPrinter::OutputJsonTestResult(::std::ostream* stream,
const TestResult& result) {
const std::string kIndent = Indent(10);
int failures = 0; int failures = 0;
for (int i = 0; i < result.total_part_count(); ++i) { for (int i = 0; i < result.total_part_count(); ++i) {
const TestPartResult& part = result.GetTestPartResult(i); const TestPartResult& part = result.GetTestPartResult(i);
if (part.failed()) { if (part.failed()) {
*stream << ",\n"; *stream << ",\n";
if (++failures == 1) { if (++failures == 1) {
*stream << kIndent << "\"" << "failures" << "\": [\n"; *stream << kIndent << "\"" << "failures" << "\": [\n";
} }
const std::string location = const std::string location =
internal::FormatCompilerIndependentFileLocation(part.file_name(), internal::FormatCompilerIndependentFileLocation(part.file_name(),
skipping to change at line 4311 skipping to change at line 4815
if (unit_test.GetTestSuite(i)->reportable_test_count() > 0) { if (unit_test.GetTestSuite(i)->reportable_test_count() > 0) {
if (comma) { if (comma) {
*stream << ",\n"; *stream << ",\n";
} else { } else {
comma = true; comma = true;
} }
PrintJsonTestSuite(stream, *unit_test.GetTestSuite(i)); PrintJsonTestSuite(stream, *unit_test.GetTestSuite(i));
} }
} }
// If there was a test failure outside of one of the test suites (like in a
// test environment) include that in the output.
if (unit_test.ad_hoc_test_result().Failed()) {
OutputJsonTestSuiteForTestResult(stream, unit_test.ad_hoc_test_result());
}
*stream << "\n" << kIndent << "]\n" << "}\n"; *stream << "\n" << kIndent << "]\n" << "}\n";
} }
void JsonUnitTestResultPrinter::PrintJsonTestList( void JsonUnitTestResultPrinter::PrintJsonTestList(
std::ostream* stream, const std::vector<TestSuite*>& test_suites) { std::ostream* stream, const std::vector<TestSuite*>& test_suites) {
const std::string kTestsuites = "testsuites"; const std::string kTestsuites = "testsuites";
const std::string kIndent = Indent(2); const std::string kIndent = Indent(2);
*stream << "{\n"; *stream << "{\n";
int total_tests = 0; int total_tests = 0;
for (auto test_suite : test_suites) { for (auto test_suite : test_suites) {
skipping to change at line 4510 skipping to change at line 5020
// create the file with a single "0" character in it. I/O // create the file with a single "0" character in it. I/O
// errors are ignored as there's nothing better we can do and we // errors are ignored as there's nothing better we can do and we
// don't want to fail the test because of this. // don't want to fail the test because of this.
FILE* pfile = posix::FOpen(premature_exit_filepath, "w"); FILE* pfile = posix::FOpen(premature_exit_filepath, "w");
fwrite("0", 1, 1, pfile); fwrite("0", 1, 1, pfile);
fclose(pfile); fclose(pfile);
} }
} }
~ScopedPrematureExitFile() { ~ScopedPrematureExitFile() {
#if !defined GTEST_OS_ESP8266
if (!premature_exit_filepath_.empty()) { if (!premature_exit_filepath_.empty()) {
int retval = remove(premature_exit_filepath_.c_str()); int retval = remove(premature_exit_filepath_.c_str());
if (retval) { if (retval) {
GTEST_LOG_(ERROR) << "Failed to remove premature exit filepath \"" GTEST_LOG_(ERROR) << "Failed to remove premature exit filepath \""
<< premature_exit_filepath_ << "\" with error " << premature_exit_filepath_ << "\" with error "
<< retval; << retval;
} }
} }
#endif
} }
private: private:
const std::string premature_exit_filepath_; const std::string premature_exit_filepath_;
GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedPrematureExitFile); GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedPrematureExitFile);
}; };
} // namespace internal } // namespace internal
skipping to change at line 4910 skipping to change at line 5422
# if defined(_MSC_VER) && !GTEST_OS_WINDOWS_MOBILE # if defined(_MSC_VER) && !GTEST_OS_WINDOWS_MOBILE
// In the debug version, Visual Studio pops up a separate dialog // In the debug version, Visual Studio pops up a separate dialog
// offering a choice to debug the aborted program. We need to suppress // offering a choice to debug the aborted program. We need to suppress
// this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement // this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement
// executed. Google Test will notify the user of any unexpected // executed. Google Test will notify the user of any unexpected
// failure via stderr. // failure via stderr.
if (!GTEST_FLAG(break_on_failure)) if (!GTEST_FLAG(break_on_failure))
_set_abort_behavior( _set_abort_behavior(
0x0, // Clear the following flags: 0x0, // Clear the following flags:
_WRITE_ABORT_MSG | _CALL_REPORTFAULT); // pop-up window, core dump. _WRITE_ABORT_MSG | _CALL_REPORTFAULT); // pop-up window, core dump.
# endif
// In debug mode, the Windows CRT can crash with an assertion over invalid // In debug mode, the Windows CRT can crash with an assertion over invalid
// input (e.g. passing an invalid file descriptor). The default handling // input (e.g. passing an invalid file descriptor). The default handling
// for these assertions is to pop up a dialog and wait for user input. // for these assertions is to pop up a dialog and wait for user input.
// Instead ask the CRT to dump such assertions to stderr non-interactively. // Instead ask the CRT to dump such assertions to stderr non-interactively.
if (!IsDebuggerPresent()) { if (!IsDebuggerPresent()) {
(void)_CrtSetReportMode(_CRT_ASSERT, (void)_CrtSetReportMode(_CRT_ASSERT,
_CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG); _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG);
(void)_CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR); (void)_CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR);
} }
# endif
} }
#endif // GTEST_OS_WINDOWS #endif // GTEST_OS_WINDOWS
return internal::HandleExceptionsInMethodIfSupported( return internal::HandleExceptionsInMethodIfSupported(
impl(), impl(),
&internal::UnitTestImpl::RunAllTests, &internal::UnitTestImpl::RunAllTests,
"auxiliary test code (environments or event listeners)") ? 0 : 1; "auxiliary test code (environments or event listeners)") ? 0 : 1;
} }
// Returns the working directory when the first TEST() or TEST_F() was // Returns the working directory when the first TEST() or TEST_F() was
skipping to change at line 5131 skipping to change at line 5643
// Registers parameterized tests. This makes parameterized tests // Registers parameterized tests. This makes parameterized tests
// available to the UnitTest reflection API without running // available to the UnitTest reflection API without running
// RUN_ALL_TESTS. // RUN_ALL_TESTS.
RegisterParameterizedTests(); RegisterParameterizedTests();
// Configures listeners for XML output. This makes it possible for users // Configures listeners for XML output. This makes it possible for users
// to shut down the default XML output before invoking RUN_ALL_TESTS. // to shut down the default XML output before invoking RUN_ALL_TESTS.
ConfigureXmlOutput(); ConfigureXmlOutput();
if (GTEST_FLAG(brief)) {
listeners()->SetDefaultResultPrinter(new BriefUnitTestResultPrinter);
}
#if GTEST_CAN_STREAM_RESULTS_ #if GTEST_CAN_STREAM_RESULTS_
// Configures listeners for streaming test results to the specified server. // Configures listeners for streaming test results to the specified server.
ConfigureStreamingOutput(); ConfigureStreamingOutput();
#endif // GTEST_CAN_STREAM_RESULTS_ #endif // GTEST_CAN_STREAM_RESULTS_
#if GTEST_HAS_ABSL #if GTEST_HAS_ABSL
if (GTEST_FLAG(install_failure_signal_handler)) { if (GTEST_FLAG(install_failure_signal_handler)) {
absl::FailureSignalHandlerOptions options; absl::FailureSignalHandlerOptions options;
absl::InstallFailureSignalHandler(options); absl::InstallFailureSignalHandler(options);
} }
skipping to change at line 5176 skipping to change at line 5692
}; };
// Finds and returns a TestSuite with the given name. If one doesn't // Finds and returns a TestSuite with the given name. If one doesn't
// exist, creates one and returns it. It's the CALLER'S // exist, creates one and returns it. It's the CALLER'S
// RESPONSIBILITY to ensure that this function is only called WHEN THE // RESPONSIBILITY to ensure that this function is only called WHEN THE
// TESTS ARE NOT SHUFFLED. // TESTS ARE NOT SHUFFLED.
// //
// Arguments: // Arguments:
// //
// test_suite_name: name of the test suite // test_suite_name: name of the test suite
// type_param: the name of the test suite's type parameter, or NULL if // type_param: the name of the test suite's type parameter, or NULL if
// this is not a typed or a type-parameterized test suite. // this is not a typed or a type-parameterized test suite.
// set_up_tc: pointer to the function that sets up the test suite // set_up_tc: pointer to the function that sets up the test suite
// tear_down_tc: pointer to the function that tears down the test suite // tear_down_tc: pointer to the function that tears down the test suite
TestSuite* UnitTestImpl::GetTestSuite( TestSuite* UnitTestImpl::GetTestSuite(
const char* test_suite_name, const char* type_param, const char* test_suite_name, const char* type_param,
internal::SetUpTestSuiteFunc set_up_tc, internal::SetUpTestSuiteFunc set_up_tc,
internal::TearDownTestSuiteFunc tear_down_tc) { internal::TearDownTestSuiteFunc tear_down_tc) {
// Can we find a TestSuite with the given name? // Can we find a TestSuite with the given name?
const auto test_suite = const auto test_suite =
std::find_if(test_suites_.rbegin(), test_suites_.rend(), std::find_if(test_suites_.rbegin(), test_suites_.rend(),
TestSuiteNameIs(test_suite_name)); TestSuiteNameIs(test_suite_name));
if (test_suite != test_suites_.rend()) return *test_suite; if (test_suite != test_suites_.rend()) return *test_suite;
skipping to change at line 5297 skipping to change at line 5813
// How many times to repeat the tests? We don't want to repeat them // How many times to repeat the tests? We don't want to repeat them
// when we are inside the subprocess of a death test. // when we are inside the subprocess of a death test.
const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG(repeat); const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG(repeat);
// Repeats forever if the repeat count is negative. // Repeats forever if the repeat count is negative.
const bool gtest_repeat_forever = repeat < 0; const bool gtest_repeat_forever = repeat < 0;
for (int i = 0; gtest_repeat_forever || i != repeat; i++) { for (int i = 0; gtest_repeat_forever || i != repeat; i++) {
// We want to preserve failures generated by ad-hoc test // We want to preserve failures generated by ad-hoc test
// assertions executed before RUN_ALL_TESTS(). // assertions executed before RUN_ALL_TESTS().
ClearNonAdHocTestResult(); ClearNonAdHocTestResult();
const TimeInMillis start = GetTimeInMillis(); Timer timer;
// Shuffles test suites and tests if requested. // Shuffles test suites and tests if requested.
if (has_tests_to_run && GTEST_FLAG(shuffle)) { if (has_tests_to_run && GTEST_FLAG(shuffle)) {
random()->Reseed(static_cast<UInt32>(random_seed_)); random()->Reseed(static_cast<uint32_t>(random_seed_));
// This should be done before calling OnTestIterationStart(), // This should be done before calling OnTestIterationStart(),
// such that a test event listener can see the actual test order // such that a test event listener can see the actual test order
// in the event. // in the event.
ShuffleTests(); ShuffleTests();
} }
// Tells the unit test event listeners that the tests are about to start. // Tells the unit test event listeners that the tests are about to start.
repeater->OnTestIterationStart(*parent_, i); repeater->OnTestIterationStart(*parent_, i);
// Runs each test suite if there is at least one test to run. // Runs each test suite if there is at least one test to run.
skipping to change at line 5338 skipping to change at line 5854
if (test_part_result.type() == TestPartResult::kSkip) { if (test_part_result.type() == TestPartResult::kSkip) {
const std::string& result = test_part_result.message(); const std::string& result = test_part_result.message();
printf("%s\n", result.c_str()); printf("%s\n", result.c_str());
} }
} }
fflush(stdout); fflush(stdout);
} else if (!Test::HasFatalFailure()) { } else if (!Test::HasFatalFailure()) {
for (int test_index = 0; test_index < total_test_suite_count(); for (int test_index = 0; test_index < total_test_suite_count();
test_index++) { test_index++) {
GetMutableSuiteCase(test_index)->Run(); GetMutableSuiteCase(test_index)->Run();
if (GTEST_FLAG(fail_fast) &&
GetMutableSuiteCase(test_index)->Failed()) {
for (int j = test_index + 1; j < total_test_suite_count(); j++) {
GetMutableSuiteCase(j)->Skip();
}
break;
}
}
} else if (Test::HasFatalFailure()) {
// If there was a fatal failure during the global setup then we know we
// aren't going to run any tests. Explicitly mark all of the tests as
// skipped to make this obvious in the output.
for (int test_index = 0; test_index < total_test_suite_count();
test_index++) {
GetMutableSuiteCase(test_index)->Skip();
} }
} }
// Tears down all environments in reverse order afterwards. // Tears down all environments in reverse order afterwards.
repeater->OnEnvironmentsTearDownStart(*parent_); repeater->OnEnvironmentsTearDownStart(*parent_);
std::for_each(environments_.rbegin(), environments_.rend(), std::for_each(environments_.rbegin(), environments_.rend(),
TearDownEnvironment); TearDownEnvironment);
repeater->OnEnvironmentsTearDownEnd(*parent_); repeater->OnEnvironmentsTearDownEnd(*parent_);
} }
elapsed_time_ = GetTimeInMillis() - start; elapsed_time_ = timer.Elapsed();
// Tells the unit test event listener that the tests have just finished. // Tells the unit test event listener that the tests have just finished.
repeater->OnTestIterationEnd(*parent_, i); repeater->OnTestIterationEnd(*parent_, i);
// Gets the result and clears it. // Gets the result and clears it.
if (!Passed()) { if (!Passed()) {
failed = true; failed = true;
} }
// Restores the original test order after the iteration. This // Restores the original test order after the iteration. This
skipping to change at line 5376 skipping to change at line 5907
if (GTEST_FLAG(shuffle)) { if (GTEST_FLAG(shuffle)) {
// Picks a new random seed for each iteration. // Picks a new random seed for each iteration.
random_seed_ = GetNextRandomSeed(random_seed_); random_seed_ = GetNextRandomSeed(random_seed_);
} }
} }
repeater->OnTestProgramEnd(*parent_); repeater->OnTestProgramEnd(*parent_);
if (!gtest_is_initialized_before_run_all_tests) { if (!gtest_is_initialized_before_run_all_tests) {
ColoredPrintf( ColoredPrintf(
COLOR_RED, GTestColor::kRed,
"\nIMPORTANT NOTICE - DO NOT IGNORE:\n" "\nIMPORTANT NOTICE - DO NOT IGNORE:\n"
"This test program did NOT call " GTEST_INIT_GOOGLE_TEST_NAME_ "This test program did NOT call " GTEST_INIT_GOOGLE_TEST_NAME_
"() before calling RUN_ALL_TESTS(). This is INVALID. Soon " GTEST_NAME_ "() before calling RUN_ALL_TESTS(). This is INVALID. Soon " GTEST_NAME_
" will start to enforce the valid usage. " " will start to enforce the valid usage. "
"Please fix it ASAP, or IT WILL START TO FAIL.\n"); // NOLINT "Please fix it ASAP, or IT WILL START TO FAIL.\n"); // NOLINT
#if GTEST_FOR_GOOGLE_ #if GTEST_FOR_GOOGLE_
ColoredPrintf(COLOR_RED, ColoredPrintf(GTestColor::kRed,
"For more details, see http://wiki/Main/ValidGUnitMain.\n"); "For more details, see http://wiki/Main/ValidGUnitMain.\n");
#endif // GTEST_FOR_GOOGLE_ #endif // GTEST_FOR_GOOGLE_
} }
return !failed; return !failed;
} }
// Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file // Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file
// if the variable is present. If a file already exists at this location, this // if the variable is present. If a file already exists at this location, this
// function will write over it. If the variable is present, but the file cannot // function will write over it. If the variable is present, but the file cannot
// be created, prints an error and exits. // be created, prints an error and exits.
void WriteToShardStatusFileIfNeeded() { void WriteToShardStatusFileIfNeeded() {
const char* const test_shard_file = posix::GetEnv(kTestShardStatusFile); const char* const test_shard_file = posix::GetEnv(kTestShardStatusFile);
if (test_shard_file != nullptr) { if (test_shard_file != nullptr) {
FILE* const file = posix::FOpen(test_shard_file, "w"); FILE* const file = posix::FOpen(test_shard_file, "w");
if (file == nullptr) { if (file == nullptr) {
ColoredPrintf(COLOR_RED, ColoredPrintf(GTestColor::kRed,
"Could not write to the test shard status file \"%s\" " "Could not write to the test shard status file \"%s\" "
"specified by the %s environment variable.\n", "specified by the %s environment variable.\n",
test_shard_file, kTestShardStatusFile); test_shard_file, kTestShardStatusFile);
fflush(stdout); fflush(stdout);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
fclose(file); fclose(file);
} }
} }
skipping to change at line 5424 skipping to change at line 5955
// an error and exits. If in_subprocess_for_death_test, sharding is // an error and exits. If in_subprocess_for_death_test, sharding is
// disabled because it must only be applied to the original test // disabled because it must only be applied to the original test
// process. Otherwise, we could filter out death tests we intended to execute. // process. Otherwise, we could filter out death tests we intended to execute.
bool ShouldShard(const char* total_shards_env, bool ShouldShard(const char* total_shards_env,
const char* shard_index_env, const char* shard_index_env,
bool in_subprocess_for_death_test) { bool in_subprocess_for_death_test) {
if (in_subprocess_for_death_test) { if (in_subprocess_for_death_test) {
return false; return false;
} }
const Int32 total_shards = Int32FromEnvOrDie(total_shards_env, -1); const int32_t total_shards = Int32FromEnvOrDie(total_shards_env, -1);
const Int32 shard_index = Int32FromEnvOrDie(shard_index_env, -1); const int32_t shard_index = Int32FromEnvOrDie(shard_index_env, -1);
if (total_shards == -1 && shard_index == -1) { if (total_shards == -1 && shard_index == -1) {
return false; return false;
} else if (total_shards == -1 && shard_index != -1) { } else if (total_shards == -1 && shard_index != -1) {
const Message msg = Message() const Message msg = Message()
<< "Invalid environment variables: you have " << "Invalid environment variables: you have "
<< kTestShardIndex << " = " << shard_index << kTestShardIndex << " = " << shard_index
<< ", but have left " << kTestTotalShards << " unset.\n"; << ", but have left " << kTestTotalShards << " unset.\n";
ColoredPrintf(COLOR_RED, "%s", msg.GetString().c_str()); ColoredPrintf(GTestColor::kRed, "%s", msg.GetString().c_str());
fflush(stdout); fflush(stdout);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} else if (total_shards != -1 && shard_index == -1) { } else if (total_shards != -1 && shard_index == -1) {
const Message msg = Message() const Message msg = Message()
<< "Invalid environment variables: you have " << "Invalid environment variables: you have "
<< kTestTotalShards << " = " << total_shards << kTestTotalShards << " = " << total_shards
<< ", but have left " << kTestShardIndex << " unset.\n"; << ", but have left " << kTestShardIndex << " unset.\n";
ColoredPrintf(COLOR_RED, "%s", msg.GetString().c_str()); ColoredPrintf(GTestColor::kRed, "%s", msg.GetString().c_str());
fflush(stdout); fflush(stdout);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} else if (shard_index < 0 || shard_index >= total_shards) { } else if (shard_index < 0 || shard_index >= total_shards) {
const Message msg = Message() const Message msg = Message()
<< "Invalid environment variables: we require 0 <= " << "Invalid environment variables: we require 0 <= "
<< kTestShardIndex << " < " << kTestTotalShards << kTestShardIndex << " < " << kTestTotalShards
<< ", but you have " << kTestShardIndex << "=" << shard_index << ", but you have " << kTestShardIndex << "=" << shard_index
<< ", " << kTestTotalShards << "=" << total_shards << ".\n"; << ", " << kTestTotalShards << "=" << total_shards << ".\n";
ColoredPrintf(COLOR_RED, "%s", msg.GetString().c_str()); ColoredPrintf(GTestColor::kRed, "%s", msg.GetString().c_str());
fflush(stdout); fflush(stdout);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
return total_shards > 1; return total_shards > 1;
} }
// Parses the environment variable var as an Int32. If it is unset, // Parses the environment variable var as an Int32. If it is unset,
// returns default_val. If it is not an Int32, prints an error // returns default_val. If it is not an Int32, prints an error
// and aborts. // and aborts.
Int32 Int32FromEnvOrDie(const char* var, Int32 default_val) { int32_t Int32FromEnvOrDie(const char* var, int32_t default_val) {
const char* str_val = posix::GetEnv(var); const char* str_val = posix::GetEnv(var);
if (str_val == nullptr) { if (str_val == nullptr) {
return default_val; return default_val;
} }
Int32 result; int32_t result;
if (!ParseInt32(Message() << "The value of environment variable " << var, if (!ParseInt32(Message() << "The value of environment variable " << var,
str_val, &result)) { str_val, &result)) {
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
return result; return result;
} }
// Given the total number of shards, the shard index, and the test id, // Given the total number of shards, the shard index, and the test id,
// returns true if and only if the test should be run on this shard. The test id // returns true if and only if the test should be run on this shard. The test id
// is some arbitrary but unique non-negative integer assigned to each test // is some arbitrary but unique non-negative integer assigned to each test
skipping to change at line 5492 skipping to change at line 6023
} }
// Compares the name of each test with the user-specified filter to // Compares the name of each test with the user-specified filter to
// decide whether the test should be run, then records the result in // decide whether the test should be run, then records the result in
// each TestSuite and TestInfo object. // each TestSuite and TestInfo object.
// If shard_tests == true, further filters tests based on sharding // If shard_tests == true, further filters tests based on sharding
// variables in the environment - see // variables in the environment - see
// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md // https://github.com/google/googletest/blob/master/googletest/docs/advanced.md
// . Returns the number of tests that should run. // . Returns the number of tests that should run.
int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) { int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) {
const Int32 total_shards = shard_tests == HONOR_SHARDING_PROTOCOL ? const int32_t total_shards = shard_tests == HONOR_SHARDING_PROTOCOL ?
Int32FromEnvOrDie(kTestTotalShards, -1) : -1; Int32FromEnvOrDie(kTestTotalShards, -1) : -1;
const Int32 shard_index = shard_tests == HONOR_SHARDING_PROTOCOL ? const int32_t shard_index = shard_tests == HONOR_SHARDING_PROTOCOL ?
Int32FromEnvOrDie(kTestShardIndex, -1) : -1; Int32FromEnvOrDie(kTestShardIndex, -1) : -1;
// num_runnable_tests are the number of tests that will // num_runnable_tests are the number of tests that will
// run across all shards (i.e., match filter and are not disabled). // run across all shards (i.e., match filter and are not disabled).
// num_selected_tests are the number of tests to be run on // num_selected_tests are the number of tests to be run on
// this shard. // this shard.
int num_runnable_tests = 0; int num_runnable_tests = 0;
int num_selected_tests = 0; int num_selected_tests = 0;
for (auto* test_suite : test_suites_) { for (auto* test_suite : test_suites_) {
const std::string& test_suite_name = test_suite->name(); const std::string& test_suite_name = test_suite->name();
skipping to change at line 5783 skipping to change at line 6314
const char* const value_str = ParseFlagValue(str, flag, true); const char* const value_str = ParseFlagValue(str, flag, true);
// Aborts if the parsing failed. // Aborts if the parsing failed.
if (value_str == nullptr) return false; if (value_str == nullptr) return false;
// Converts the string value to a bool. // Converts the string value to a bool.
*value = !(*value_str == '0' || *value_str == 'f' || *value_str == 'F'); *value = !(*value_str == '0' || *value_str == 'f' || *value_str == 'F');
return true; return true;
} }
// Parses a string for an Int32 flag, in the form of // Parses a string for an int32_t flag, in the form of "--flag=value".
// "--flag=value".
// //
// On success, stores the value of the flag in *value, and returns // On success, stores the value of the flag in *value, and returns
// true. On failure, returns false without changing *value. // true. On failure, returns false without changing *value.
bool ParseInt32Flag(const char* str, const char* flag, Int32* value) { bool ParseInt32Flag(const char* str, const char* flag, int32_t* value) {
// Gets the value of the flag as a string. // Gets the value of the flag as a string.
const char* const value_str = ParseFlagValue(str, flag, false); const char* const value_str = ParseFlagValue(str, flag, false);
// Aborts if the parsing failed. // Aborts if the parsing failed.
if (value_str == nullptr) return false; if (value_str == nullptr) return false;
// Sets *value to the value of the flag. // Sets *value to the value of the flag.
return ParseInt32(Message() << "The value of flag --" << flag, return ParseInt32(Message() << "The value of flag --" << flag,
value_str, value); value_str, value);
} }
// Parses a string for a string flag, in the form of // Parses a string for a string flag, in the form of "--flag=value".
// "--flag=value".
// //
// On success, stores the value of the flag in *value, and returns // On success, stores the value of the flag in *value, and returns
// true. On failure, returns false without changing *value. // true. On failure, returns false without changing *value.
template <typename String> template <typename String>
static bool ParseStringFlag(const char* str, const char* flag, String* value) { static bool ParseStringFlag(const char* str, const char* flag, String* value) {
// Gets the value of the flag as a string. // Gets the value of the flag as a string.
const char* const value_str = ParseFlagValue(str, flag, false); const char* const value_str = ParseFlagValue(str, flag, false);
// Aborts if the parsing failed. // Aborts if the parsing failed.
if (value_str == nullptr) return false; if (value_str == nullptr) return false;
skipping to change at line 5843 skipping to change at line 6372
// Prints a string containing code-encoded text. The following escape // Prints a string containing code-encoded text. The following escape
// sequences can be used in the string to control the text color: // sequences can be used in the string to control the text color:
// //
// @@ prints a single '@' character. // @@ prints a single '@' character.
// @R changes the color to red. // @R changes the color to red.
// @G changes the color to green. // @G changes the color to green.
// @Y changes the color to yellow. // @Y changes the color to yellow.
// @D changes to the default terminal text color. // @D changes to the default terminal text color.
// //
static void PrintColorEncoded(const char* str) { static void PrintColorEncoded(const char* str) {
GTestColor color = COLOR_DEFAULT; // The current color. GTestColor color = GTestColor::kDefault; // The current color.
// Conceptually, we split the string into segments divided by escape // Conceptually, we split the string into segments divided by escape
// sequences. Then we print one segment at a time. At the end of // sequences. Then we print one segment at a time. At the end of
// each iteration, the str pointer advances to the beginning of the // each iteration, the str pointer advances to the beginning of the
// next segment. // next segment.
for (;;) { for (;;) {
const char* p = strchr(str, '@'); const char* p = strchr(str, '@');
if (p == nullptr) { if (p == nullptr) {
ColoredPrintf(color, "%s", str); ColoredPrintf(color, "%s", str);
return; return;
} }
ColoredPrintf(color, "%s", std::string(str, p).c_str()); ColoredPrintf(color, "%s", std::string(str, p).c_str());
const char ch = p[1]; const char ch = p[1];
str = p + 2; str = p + 2;
if (ch == '@') { if (ch == '@') {
ColoredPrintf(color, "@"); ColoredPrintf(color, "@");
} else if (ch == 'D') { } else if (ch == 'D') {
color = COLOR_DEFAULT; color = GTestColor::kDefault;
} else if (ch == 'R') { } else if (ch == 'R') {
color = COLOR_RED; color = GTestColor::kRed;
} else if (ch == 'G') { } else if (ch == 'G') {
color = COLOR_GREEN; color = GTestColor::kGreen;
} else if (ch == 'Y') { } else if (ch == 'Y') {
color = COLOR_YELLOW; color = GTestColor::kYellow;
} else { } else {
--str; --str;
} }
} }
} }
static const char kColorEncodedHelpMessage[] = static const char kColorEncodedHelpMessage[] =
"This program contains tests written using " GTEST_NAME_ ". You can use the\n" "This program contains tests written using " GTEST_NAME_
"following command line flags to control its behavior:\n" ". You can use the\n"
"\n" "following command line flags to control its behavior:\n"
"Test Selection:\n" "\n"
" @G--" GTEST_FLAG_PREFIX_ "list_tests@D\n" "Test Selection:\n"
" List the names of all tests instead of running them. The name of\n" " @G--" GTEST_FLAG_PREFIX_
" TEST(Foo, Bar) is \"Foo.Bar\".\n" "list_tests@D\n"
" @G--" GTEST_FLAG_PREFIX_ "filter=@YPOSTIVE_PATTERNS" " List the names of all tests instead of running them. The name of\n"
" TEST(Foo, Bar) is \"Foo.Bar\".\n"
" @G--" GTEST_FLAG_PREFIX_
"filter=@YPOSITIVE_PATTERNS"
"[@G-@YNEGATIVE_PATTERNS]@D\n" "[@G-@YNEGATIVE_PATTERNS]@D\n"
" Run only the tests whose name matches one of the positive patterns but\n" " Run only the tests whose name matches one of the positive patterns "
" none of the negative patterns. '?' matches any single character; '*'\n" "but\n"
" matches any substring; ':' separates two patterns.\n" " none of the negative patterns. '?' matches any single character; "
" @G--" GTEST_FLAG_PREFIX_ "also_run_disabled_tests@D\n" "'*'\n"
" Run all disabled tests too.\n" " matches any substring; ':' separates two patterns.\n"
"\n" " @G--" GTEST_FLAG_PREFIX_
"Test Execution:\n" "also_run_disabled_tests@D\n"
" @G--" GTEST_FLAG_PREFIX_ "repeat=@Y[COUNT]@D\n" " Run all disabled tests too.\n"
" Run the tests repeatedly; use a negative count to repeat forever.\n" "\n"
" @G--" GTEST_FLAG_PREFIX_ "shuffle@D\n" "Test Execution:\n"
" Randomize tests' orders on every iteration.\n" " @G--" GTEST_FLAG_PREFIX_
" @G--" GTEST_FLAG_PREFIX_ "random_seed=@Y[NUMBER]@D\n" "repeat=@Y[COUNT]@D\n"
" Random number seed to use for shuffling test orders (between 1 and\n" " Run the tests repeatedly; use a negative count to repeat forever.\n"
" 99999, or 0 to use a seed based on the current time).\n" " @G--" GTEST_FLAG_PREFIX_
"\n" "shuffle@D\n"
"Test Output:\n" " Randomize tests' orders on every iteration.\n"
" @G--" GTEST_FLAG_PREFIX_ "color=@Y(@Gyes@Y|@Gno@Y|@Gauto@Y)@D\n" " @G--" GTEST_FLAG_PREFIX_
" Enable/disable colored output. The default is @Gauto@D.\n" "random_seed=@Y[NUMBER]@D\n"
" -@G-" GTEST_FLAG_PREFIX_ "print_time=0@D\n" " Random number seed to use for shuffling test orders (between 1 and\n"
" Don't print the elapsed time of each test.\n" " 99999, or 0 to use a seed based on the current time).\n"
" @G--" GTEST_FLAG_PREFIX_ "output=@Y(@Gjson@Y|@Gxml@Y)[@G:@YDIRECTORY_PATH@G" "\n"
GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n" "Test Output:\n"
" Generate a JSON or XML report in the given directory or with the given\n" " @G--" GTEST_FLAG_PREFIX_
" file name. @YFILE_PATH@D defaults to @Gtest_detail.xml@D.\n" "color=@Y(@Gyes@Y|@Gno@Y|@Gauto@Y)@D\n"
" Enable/disable colored output. The default is @Gauto@D.\n"
" @G--" GTEST_FLAG_PREFIX_
"brief=1@D\n"
" Only print test failures.\n"
" @G--" GTEST_FLAG_PREFIX_
"print_time=0@D\n"
" Don't print the elapsed time of each test.\n"
" @G--" GTEST_FLAG_PREFIX_
"output=@Y(@Gjson@Y|@Gxml@Y)[@G:@YDIRECTORY_PATH@G" GTEST_PATH_SEP_
"@Y|@G:@YFILE_PATH]@D\n"
" Generate a JSON or XML report in the given directory or with the "
"given\n"
" file name. @YFILE_PATH@D defaults to @Gtest_detail.xml@D.\n"
# if GTEST_CAN_STREAM_RESULTS_ # if GTEST_CAN_STREAM_RESULTS_
" @G--" GTEST_FLAG_PREFIX_ "stream_result_to=@YHOST@G:@YPORT@D\n" " @G--" GTEST_FLAG_PREFIX_
" Stream test results to the given server.\n" "stream_result_to=@YHOST@G:@YPORT@D\n"
" Stream test results to the given server.\n"
# endif // GTEST_CAN_STREAM_RESULTS_ # endif // GTEST_CAN_STREAM_RESULTS_
"\n" "\n"
"Assertion Behavior:\n" "Assertion Behavior:\n"
# if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS # if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
" @G--" GTEST_FLAG_PREFIX_ "death_test_style=@Y(@Gfast@Y|@Gthreadsafe@Y)@D\n" " @G--" GTEST_FLAG_PREFIX_
" Set the default death test style.\n" "death_test_style=@Y(@Gfast@Y|@Gthreadsafe@Y)@D\n"
" Set the default death test style.\n"
# endif // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS # endif // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS
" @G--" GTEST_FLAG_PREFIX_ "break_on_failure@D\n" " @G--" GTEST_FLAG_PREFIX_
" Turn assertion failures into debugger break-points.\n" "break_on_failure@D\n"
" @G--" GTEST_FLAG_PREFIX_ "throw_on_failure@D\n" " Turn assertion failures into debugger break-points.\n"
" Turn assertion failures into C++ exceptions for use by an external\n" " @G--" GTEST_FLAG_PREFIX_
" test framework.\n" "throw_on_failure@D\n"
" @G--" GTEST_FLAG_PREFIX_ "catch_exceptions=0@D\n" " Turn assertion failures into C++ exceptions for use by an external\n"
" Do not report exceptions as test failures. Instead, allow them\n" " test framework.\n"
" to crash the program or throw a pop-up (on Windows).\n" " @G--" GTEST_FLAG_PREFIX_
"\n" "catch_exceptions=0@D\n"
"Except for @G--" GTEST_FLAG_PREFIX_ "list_tests@D, you can alternatively set " " Do not report exceptions as test failures. Instead, allow them\n"
" to crash the program or throw a pop-up (on Windows).\n"
"\n"
"Except for @G--" GTEST_FLAG_PREFIX_
"list_tests@D, you can alternatively set "
"the corresponding\n" "the corresponding\n"
"environment variable of a flag (all letters in upper-case). For example, to\n" "environment variable of a flag (all letters in upper-case). For example, "
"disable colored text output, you can either specify @G--" GTEST_FLAG_PREFIX_ "to\n"
"disable colored text output, you can either specify "
"@G--" GTEST_FLAG_PREFIX_
"color=no@D or set\n" "color=no@D or set\n"
"the @G" GTEST_FLAG_PREFIX_UPPER_ "COLOR@D environment variable to @Gno@D.\n" "the @G" GTEST_FLAG_PREFIX_UPPER_
"\n" "COLOR@D environment variable to @Gno@D.\n"
"For more information, please read the " GTEST_NAME_ " documentation at\n" "\n"
"@G" GTEST_PROJECT_URL_ "@D. If you find a bug in " GTEST_NAME_ "\n" "For more information, please read the " GTEST_NAME_
"(not one in your own code or tests), please report it to\n" " documentation at\n"
"@G<" GTEST_DEV_EMAIL_ ">@D.\n"; "@G" GTEST_PROJECT_URL_ "@D. If you find a bug in " GTEST_NAME_
"\n"
"(not one in your own code or tests), please report it to\n"
"@G<" GTEST_DEV_EMAIL_ ">@D.\n";
static bool ParseGoogleTestFlag(const char* const arg) { static bool ParseGoogleTestFlag(const char* const arg) {
return ParseBoolFlag(arg, kAlsoRunDisabledTestsFlag, return ParseBoolFlag(arg, kAlsoRunDisabledTestsFlag,
&GTEST_FLAG(also_run_disabled_tests)) || &GTEST_FLAG(also_run_disabled_tests)) ||
ParseBoolFlag(arg, kBreakOnFailureFlag, ParseBoolFlag(arg, kBreakOnFailureFlag,
&GTEST_FLAG(break_on_failure)) || &GTEST_FLAG(break_on_failure)) ||
ParseBoolFlag(arg, kCatchExceptionsFlag, ParseBoolFlag(arg, kCatchExceptionsFlag,
&GTEST_FLAG(catch_exceptions)) || &GTEST_FLAG(catch_exceptions)) ||
ParseStringFlag(arg, kColorFlag, &GTEST_FLAG(color)) || ParseStringFlag(arg, kColorFlag, &GTEST_FLAG(color)) ||
ParseStringFlag(arg, kDeathTestStyleFlag, ParseStringFlag(arg, kDeathTestStyleFlag,
&GTEST_FLAG(death_test_style)) || &GTEST_FLAG(death_test_style)) ||
ParseBoolFlag(arg, kDeathTestUseFork, ParseBoolFlag(arg, kDeathTestUseFork,
&GTEST_FLAG(death_test_use_fork)) || &GTEST_FLAG(death_test_use_fork)) ||
ParseStringFlag(arg, kFilterFlag, &GTEST_FLAG(filter)) || ParseBoolFlag(arg, kFailFast, &GTEST_FLAG(fail_fast)) ||
ParseStringFlag(arg, kInternalRunDeathTestFlag, ParseStringFlag(arg, kFilterFlag, &GTEST_FLAG(filter)) ||
&GTEST_FLAG(internal_run_death_test)) || ParseStringFlag(arg, kInternalRunDeathTestFlag,
ParseBoolFlag(arg, kListTestsFlag, &GTEST_FLAG(list_tests)) || &GTEST_FLAG(internal_run_death_test)) ||
ParseStringFlag(arg, kOutputFlag, &GTEST_FLAG(output)) || ParseBoolFlag(arg, kListTestsFlag, &GTEST_FLAG(list_tests)) ||
ParseBoolFlag(arg, kPrintTimeFlag, &GTEST_FLAG(print_time)) || ParseStringFlag(arg, kOutputFlag, &GTEST_FLAG(output)) ||
ParseBoolFlag(arg, kPrintUTF8Flag, &GTEST_FLAG(print_utf8)) || ParseBoolFlag(arg, kBriefFlag, &GTEST_FLAG(brief)) ||
ParseInt32Flag(arg, kRandomSeedFlag, &GTEST_FLAG(random_seed)) || ParseBoolFlag(arg, kPrintTimeFlag, &GTEST_FLAG(print_time)) ||
ParseInt32Flag(arg, kRepeatFlag, &GTEST_FLAG(repeat)) || ParseBoolFlag(arg, kPrintUTF8Flag, &GTEST_FLAG(print_utf8)) ||
ParseBoolFlag(arg, kShuffleFlag, &GTEST_FLAG(shuffle)) || ParseInt32Flag(arg, kRandomSeedFlag, &GTEST_FLAG(random_seed)) ||
ParseInt32Flag(arg, kStackTraceDepthFlag, ParseInt32Flag(arg, kRepeatFlag, &GTEST_FLAG(repeat)) ||
&GTEST_FLAG(stack_trace_depth)) || ParseBoolFlag(arg, kShuffleFlag, &GTEST_FLAG(shuffle)) ||
ParseStringFlag(arg, kStreamResultToFlag, ParseInt32Flag(arg, kStackTraceDepthFlag,
&GTEST_FLAG(stream_result_to)) || &GTEST_FLAG(stack_trace_depth)) ||
ParseBoolFlag(arg, kThrowOnFailureFlag, ParseStringFlag(arg, kStreamResultToFlag,
&GTEST_FLAG(throw_on_failure)); &GTEST_FLAG(stream_result_to)) ||
ParseBoolFlag(arg, kThrowOnFailureFlag, &GTEST_FLAG(throw_on_failure));
} }
#if GTEST_USE_OWN_FLAGFILE_FLAG_ #if GTEST_USE_OWN_FLAGFILE_FLAG_
static void LoadFlagsFromFile(const std::string& path) { static void LoadFlagsFromFile(const std::string& path) {
FILE* flagfile = posix::FOpen(path.c_str(), "r"); FILE* flagfile = posix::FOpen(path.c_str(), "r");
if (!flagfile) { if (!flagfile) {
GTEST_LOG_(FATAL) << "Unable to open file \"" << GTEST_FLAG(flagfile) GTEST_LOG_(FATAL) << "Unable to open file \"" << GTEST_FLAG(flagfile)
<< "\""; << "\"";
} }
std::string contents(ReadEntireFile(flagfile)); std::string contents(ReadEntireFile(flagfile));
skipping to change at line 6138 skipping to change at line 6695
#if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)
GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(&argc, argv); GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(&argc, argv);
#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) #else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)
internal::InitGoogleTestImpl(&argc, argv); internal::InitGoogleTestImpl(&argc, argv);
#endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)
} }
std::string TempDir() { std::string TempDir() {
#if defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_) #if defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_)
return GTEST_CUSTOM_TEMPDIR_FUNCTION_(); return GTEST_CUSTOM_TEMPDIR_FUNCTION_();
#endif #elif GTEST_OS_WINDOWS_MOBILE
#if GTEST_OS_WINDOWS_MOBILE
return "\\temp\\"; return "\\temp\\";
#elif GTEST_OS_WINDOWS #elif GTEST_OS_WINDOWS
const char* temp_dir = internal::posix::GetEnv("TEMP"); const char* temp_dir = internal::posix::GetEnv("TEMP");
if (temp_dir == nullptr || temp_dir[0] == '\0') if (temp_dir == nullptr || temp_dir[0] == '\0') {
return "\\temp\\"; return "\\temp\\";
else if (temp_dir[strlen(temp_dir) - 1] == '\\') } else if (temp_dir[strlen(temp_dir) - 1] == '\\') {
return temp_dir; return temp_dir;
else } else {
return std::string(temp_dir) + "\\"; return std::string(temp_dir) + "\\";
}
#elif GTEST_OS_LINUX_ANDROID #elif GTEST_OS_LINUX_ANDROID
return "/sdcard/"; const char* temp_dir = internal::posix::GetEnv("TEST_TMPDIR");
if (temp_dir == nullptr || temp_dir[0] == '\0') {
return "/data/local/tmp/";
} else {
return temp_dir;
}
#elif GTEST_OS_LINUX
const char* temp_dir = internal::posix::GetEnv("TEST_TMPDIR");
if (temp_dir == nullptr || temp_dir[0] == '\0') {
return "/tmp/";
} else {
return temp_dir;
}
#else #else
return "/tmp/"; return "/tmp/";
#endif // GTEST_OS_WINDOWS_MOBILE #endif // GTEST_OS_WINDOWS_MOBILE
} }
// Class ScopedTrace // Class ScopedTrace
// Pushes the given source file location and message onto a per-thread // Pushes the given source file location and message onto a per-thread
// trace stack maintained by Google Test. // trace stack maintained by Google Test.
void ScopedTrace::PushTrace(const char* file, int line, std::string message) { void ScopedTrace::PushTrace(const char* file, int line, std::string message) {
 End of changes. 154 change blocks. 
337 lines changed or deleted 905 lines changed or added

Home  |  About  |  Features  |  All  |  Newest  |  Dox  |  Diffs  |  RSS Feeds  |  Screenshots  |  Comments  |  Imprint  |  Privacy  |  HTTP(S)