summaryrefslogtreecommitdiffstats
path: root/googletest
diff options
context:
space:
mode:
Diffstat (limited to 'googletest')
-rw-r--r--googletest/docs/advanced.md9
-rw-r--r--googletest/include/gtest/gtest.h10
-rw-r--r--googletest/src/gtest-internal-inl.h4
-rw-r--r--googletest/src/gtest.cc130
-rw-r--r--googletest/test/BUILD.bazel16
-rwxr-xr-xgoogletest/test/googletest-env-var-test.py2
-rw-r--r--googletest/test/googletest-env-var-test_.cc5
-rwxr-xr-xgoogletest/test/googletest-failfast-unittest.py410
-rw-r--r--googletest/test/googletest-failfast-unittest_.cc167
-rw-r--r--googletest/test/gtest_unittest.cc85
10 files changed, 784 insertions, 54 deletions
diff --git a/googletest/docs/advanced.md b/googletest/docs/advanced.md
index 5677643..1295c9d 100644
--- a/googletest/docs/advanced.md
+++ b/googletest/docs/advanced.md
@@ -2116,6 +2116,15 @@ For example:
everything in test suite `FooTest` except `FooTest.Bar` and everything in
test suite `BarTest` except `BarTest.Foo`.
+#### Stop test execution upon first failure
+
+By default, a googletest program runs all tests the user has defined. In some
+cases (e.g. iterative test development & execution) it may be desirable stop
+test execution upon first failure (trading improved latency for completeness).
+If `GTEST_FAIL_FAST` environment variable or `--gtest_fail_fast` flag is set,
+the test runner will stop execution as soon as the first test failure is
+found.
+
#### Temporarily Disabling Tests
If you have a broken test that you cannot fix right away, you can add the
diff --git a/googletest/include/gtest/gtest.h b/googletest/include/gtest/gtest.h
index 0139fcc..deacb27 100644
--- a/googletest/include/gtest/gtest.h
+++ b/googletest/include/gtest/gtest.h
@@ -101,6 +101,10 @@ GTEST_DECLARE_bool_(catch_exceptions);
// to let Google Test decide.
GTEST_DECLARE_string_(color);
+// This flag controls whether the test runner should continue execution past
+// first failure.
+GTEST_DECLARE_bool_(fail_fast);
+
// This flag sets up the filter to select by name using a glob pattern
// the tests to run. If the filter is not given all tests are executed.
GTEST_DECLARE_string_(filter);
@@ -795,6 +799,9 @@ class GTEST_API_ TestInfo {
// deletes it.
void Run();
+ // Skip and records the test result for this object.
+ void Skip();
+
static void ClearTestResult(TestInfo* test_info) {
test_info->result_.Clear();
}
@@ -943,6 +950,9 @@ class GTEST_API_ TestSuite {
// Runs every test in this TestSuite.
void Run();
+ // Skips the execution of tests under this TestSuite
+ void Skip();
+
// Runs SetUpTestSuite() for this TestSuite. This wrapper is needed
// for catching exceptions thrown from SetUpTestSuite().
void RunSetUpTestSuite() {
diff --git a/googletest/src/gtest-internal-inl.h b/googletest/src/gtest-internal-inl.h
index e42ff47..75ec352 100644
--- a/googletest/src/gtest-internal-inl.h
+++ b/googletest/src/gtest-internal-inl.h
@@ -84,6 +84,7 @@ const char kAlsoRunDisabledTestsFlag[] = "also_run_disabled_tests";
const char kBreakOnFailureFlag[] = "break_on_failure";
const char kCatchExceptionsFlag[] = "catch_exceptions";
const char kColorFlag[] = "color";
+const char kFailFast[] = "fail_fast";
const char kFilterFlag[] = "filter";
const char kListTestsFlag[] = "list_tests";
const char kOutputFlag[] = "output";
@@ -164,6 +165,7 @@ class GTestFlagSaver {
color_ = GTEST_FLAG(color);
death_test_style_ = GTEST_FLAG(death_test_style);
death_test_use_fork_ = GTEST_FLAG(death_test_use_fork);
+ fail_fast_ = GTEST_FLAG(fail_fast);
filter_ = GTEST_FLAG(filter);
internal_run_death_test_ = GTEST_FLAG(internal_run_death_test);
list_tests_ = GTEST_FLAG(list_tests);
@@ -187,6 +189,7 @@ class GTestFlagSaver {
GTEST_FLAG(death_test_style) = death_test_style_;
GTEST_FLAG(death_test_use_fork) = death_test_use_fork_;
GTEST_FLAG(filter) = filter_;
+ GTEST_FLAG(fail_fast) = fail_fast_;
GTEST_FLAG(internal_run_death_test) = internal_run_death_test_;
GTEST_FLAG(list_tests) = list_tests_;
GTEST_FLAG(output) = output_;
@@ -208,6 +211,7 @@ class GTestFlagSaver {
std::string color_;
std::string death_test_style_;
bool death_test_use_fork_;
+ bool fail_fast_;
std::string filter_;
std::string internal_run_death_test_;
bool list_tests_;
diff --git a/googletest/src/gtest.cc b/googletest/src/gtest.cc
index 2eb2ecb..4c8b42f 100644
--- a/googletest/src/gtest.cc
+++ b/googletest/src/gtest.cc
@@ -213,6 +213,21 @@ static const char* GetDefaultFilter() {
return kUniversalFilter;
}
+// Bazel passes in the argument to '--test_runner_fail_fast' via the
+// TESTBRIDGE_TEST_RUNNER_FAIL_FAST environment variable.
+static bool GetDefaultFailFast() {
+ const char* const testbridge_test_runner_fail_fast =
+ internal::posix::GetEnv("TESTBRIDGE_TEST_RUNNER_FAIL_FAST");
+ if (testbridge_test_runner_fail_fast != nullptr) {
+ return strcmp(testbridge_test_runner_fail_fast, "1") == 0;
+ }
+ return false;
+}
+
+GTEST_DEFINE_bool_(
+ fail_fast, internal::BoolFromGTestEnv("fail_fast", GetDefaultFailFast()),
+ "True if and only if a test failure should stop further test execution.");
+
GTEST_DEFINE_bool_(
also_run_disabled_tests,
internal::BoolFromGTestEnv("also_run_disabled_tests", false),
@@ -2863,6 +2878,28 @@ void TestInfo::Run() {
impl->set_current_test_info(nullptr);
}
+// Skip and records a skipped test result for this object.
+void TestInfo::Skip() {
+ if (!should_run_) return;
+
+ internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+ impl->set_current_test_info(this);
+
+ TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
+
+ // Notifies the unit test event listeners that a test is about to start.
+ repeater->OnTestStart(*this);
+
+ const TestPartResult test_part_result =
+ TestPartResult(TestPartResult::kSkip, this->file(), this->line(), "");
+ impl->GetTestPartResultReporterForCurrentThread()->ReportTestPartResult(
+ test_part_result);
+
+ // Notifies the unit test event listener that a test has just finished.
+ repeater->OnTestEnd(*this);
+ impl->set_current_test_info(nullptr);
+}
+
// class TestSuite
// Gets the number of successful tests in this test suite.
@@ -2975,6 +3012,12 @@ void TestSuite::Run() {
start_timestamp_ = internal::GetTimeInMillis();
for (int i = 0; i < total_test_count(); i++) {
GetMutableTestInfo(i)->Run();
+ if (GTEST_FLAG(fail_fast) && GetMutableTestInfo(i)->result()->Failed()) {
+ for (int j = i + 1; j < total_test_count(); j++) {
+ GetMutableTestInfo(j)->Skip();
+ }
+ break;
+ }
}
elapsed_time_ = internal::GetTimeInMillis() - start_timestamp_;
@@ -2992,6 +3035,36 @@ void TestSuite::Run() {
impl->set_current_test_suite(nullptr);
}
+// Skips all tests under this TestSuite.
+void TestSuite::Skip() {
+ if (!should_run_) return;
+
+ internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
+ impl->set_current_test_suite(this);
+
+ TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
+
+ // Call both legacy and the new API
+ repeater->OnTestSuiteStart(*this);
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI
+ repeater->OnTestCaseStart(*this);
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI
+
+ for (int i = 0; i < total_test_count(); i++) {
+ GetMutableTestInfo(i)->Skip();
+ }
+
+ // Call both legacy and the new API
+ repeater->OnTestSuiteEnd(*this);
+ // Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI
+ repeater->OnTestCaseEnd(*this);
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI
+
+ impl->set_current_test_suite(nullptr);
+}
+
// Clears the results of all tests in this test suite.
void TestSuite::ClearResult() {
ad_hoc_test_result_.Clear();
@@ -5523,6 +5596,13 @@ bool UnitTestImpl::RunAllTests() {
for (int test_index = 0; test_index < total_test_suite_count();
test_index++) {
GetMutableSuiteCase(test_index)->Run();
+ if (GTEST_FLAG(fail_fast) &&
+ GetMutableSuiteCase(test_index)->Failed()) {
+ for (int j = test_index + 1; j < total_test_suite_count(); j++) {
+ GetMutableSuiteCase(j)->Skip();
+ }
+ break;
+ }
}
}
@@ -6127,31 +6207,31 @@ static const char kColorEncodedHelpMessage[] =
static bool ParseGoogleTestFlag(const char* const arg) {
return ParseBoolFlag(arg, kAlsoRunDisabledTestsFlag,
&GTEST_FLAG(also_run_disabled_tests)) ||
- ParseBoolFlag(arg, kBreakOnFailureFlag,
- &GTEST_FLAG(break_on_failure)) ||
- ParseBoolFlag(arg, kCatchExceptionsFlag,
- &GTEST_FLAG(catch_exceptions)) ||
- ParseStringFlag(arg, kColorFlag, &GTEST_FLAG(color)) ||
- ParseStringFlag(arg, kDeathTestStyleFlag,
- &GTEST_FLAG(death_test_style)) ||
- ParseBoolFlag(arg, kDeathTestUseFork,
- &GTEST_FLAG(death_test_use_fork)) ||
- ParseStringFlag(arg, kFilterFlag, &GTEST_FLAG(filter)) ||
- ParseStringFlag(arg, kInternalRunDeathTestFlag,
- &GTEST_FLAG(internal_run_death_test)) ||
- ParseBoolFlag(arg, kListTestsFlag, &GTEST_FLAG(list_tests)) ||
- ParseStringFlag(arg, kOutputFlag, &GTEST_FLAG(output)) ||
- ParseBoolFlag(arg, kPrintTimeFlag, &GTEST_FLAG(print_time)) ||
- ParseBoolFlag(arg, kPrintUTF8Flag, &GTEST_FLAG(print_utf8)) ||
- ParseInt32Flag(arg, kRandomSeedFlag, &GTEST_FLAG(random_seed)) ||
- ParseInt32Flag(arg, kRepeatFlag, &GTEST_FLAG(repeat)) ||
- ParseBoolFlag(arg, kShuffleFlag, &GTEST_FLAG(shuffle)) ||
- ParseInt32Flag(arg, kStackTraceDepthFlag,
- &GTEST_FLAG(stack_trace_depth)) ||
- ParseStringFlag(arg, kStreamResultToFlag,
- &GTEST_FLAG(stream_result_to)) ||
- ParseBoolFlag(arg, kThrowOnFailureFlag,
- &GTEST_FLAG(throw_on_failure));
+ ParseBoolFlag(arg, kBreakOnFailureFlag,
+ &GTEST_FLAG(break_on_failure)) ||
+ ParseBoolFlag(arg, kCatchExceptionsFlag,
+ &GTEST_FLAG(catch_exceptions)) ||
+ ParseStringFlag(arg, kColorFlag, &GTEST_FLAG(color)) ||
+ ParseStringFlag(arg, kDeathTestStyleFlag,
+ &GTEST_FLAG(death_test_style)) ||
+ ParseBoolFlag(arg, kDeathTestUseFork,
+ &GTEST_FLAG(death_test_use_fork)) ||
+ ParseBoolFlag(arg, kFailFast, &GTEST_FLAG(fail_fast)) ||
+ ParseStringFlag(arg, kFilterFlag, &GTEST_FLAG(filter)) ||
+ ParseStringFlag(arg, kInternalRunDeathTestFlag,
+ &GTEST_FLAG(internal_run_death_test)) ||
+ ParseBoolFlag(arg, kListTestsFlag, &GTEST_FLAG(list_tests)) ||
+ ParseStringFlag(arg, kOutputFlag, &GTEST_FLAG(output)) ||
+ ParseBoolFlag(arg, kPrintTimeFlag, &GTEST_FLAG(print_time)) ||
+ ParseBoolFlag(arg, kPrintUTF8Flag, &GTEST_FLAG(print_utf8)) ||
+ ParseInt32Flag(arg, kRandomSeedFlag, &GTEST_FLAG(random_seed)) ||
+ ParseInt32Flag(arg, kRepeatFlag, &GTEST_FLAG(repeat)) ||
+ ParseBoolFlag(arg, kShuffleFlag, &GTEST_FLAG(shuffle)) ||
+ ParseInt32Flag(arg, kStackTraceDepthFlag,
+ &GTEST_FLAG(stack_trace_depth)) ||
+ ParseStringFlag(arg, kStreamResultToFlag,
+ &GTEST_FLAG(stream_result_to)) ||
+ ParseBoolFlag(arg, kThrowOnFailureFlag, &GTEST_FLAG(throw_on_failure));
}
#if GTEST_USE_OWN_FLAGFILE_FLAG_
diff --git a/googletest/test/BUILD.bazel b/googletest/test/BUILD.bazel
index dbbb5c0..e24f5f2 100644
--- a/googletest/test/BUILD.bazel
+++ b/googletest/test/BUILD.bazel
@@ -58,6 +58,7 @@ cc_test(
"googletest-catch-exceptions-test_.cc",
"googletest-color-test_.cc",
"googletest-env-var-test_.cc",
+ "googletest-failfast-unittest_.cc",
"googletest-filter-unittest_.cc",
"googletest-break-on-failure-unittest_.cc",
"googletest-listener-test.cc",
@@ -223,6 +224,21 @@ py_test(
)
cc_binary(
+ name = "googletest-failfast-unittest_",
+ testonly = 1,
+ srcs = ["googletest-failfast-unittest_.cc"],
+ deps = ["//:gtest"],
+)
+
+py_test(
+ name = "googletest-failfast-unittest",
+ size = "medium",
+ srcs = ["googletest-failfast-unittest.py"],
+ data = [":googletest-failfast-unittest_"],
+ deps = [":gtest_test_utils"],
+)
+
+cc_binary(
name = "googletest-filter-unittest_",
testonly = 1,
srcs = ["googletest-filter-unittest_.cc"],
diff --git a/googletest/test/googletest-env-var-test.py b/googletest/test/googletest-env-var-test.py
index 2f0e406..ce4d2a0 100755
--- a/googletest/test/googletest-env-var-test.py
+++ b/googletest/test/googletest-env-var-test.py
@@ -85,6 +85,8 @@ class GTestEnvVarTest(gtest_test_utils.TestCase):
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
+ SetEnvVar('TESTBRIDGE_TEST_RUNNER_FAIL_FAST', None) # For 'fail_fast' test
+ TestFlag('fail_fast', '1', '0')
TestFlag('filter', 'FooTest.Bar', '*')
SetEnvVar('XML_OUTPUT_FILE', None) # For 'output' test
TestFlag('output', 'xml:tmp/foo.xml', '')
diff --git a/googletest/test/googletest-env-var-test_.cc b/googletest/test/googletest-env-var-test_.cc
index fd2aa82..66d1871 100644
--- a/googletest/test/googletest-env-var-test_.cc
+++ b/googletest/test/googletest-env-var-test_.cc
@@ -72,6 +72,11 @@ void PrintFlag(const char* flag) {
return;
}
+ if (strcmp(flag, "fail_fast") == 0) {
+ cout << GTEST_FLAG(fail_fast);
+ return;
+ }
+
if (strcmp(flag, "filter") == 0) {
cout << GTEST_FLAG(filter);
return;
diff --git a/googletest/test/googletest-failfast-unittest.py b/googletest/test/googletest-failfast-unittest.py
new file mode 100755
index 0000000..3aeb2df
--- /dev/null
+++ b/googletest/test/googletest-failfast-unittest.py
@@ -0,0 +1,410 @@
+#!/usr/bin/env python
+#
+# Copyright 2020 Google Inc. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit test for Google Test fail_fast.
+
+A user can specify if a Google Test program should continue test execution
+after a test failure via the GTEST_FAIL_FAST environment variable or the
+--gtest_fail_fast flag. The default value of the flag can also be changed
+by Bazel fail fast environment variable TESTBRIDGE_TEST_RUNNER_FAIL_FAST.
+
+This script tests such functionality by invoking googletest-failfast-unittest_
+(a program written with Google Test) with different environments and command
+line flags.
+"""
+
+import os
+import gtest_test_utils
+
+# Constants.
+
+# Bazel testbridge environment variable for fail fast
+BAZEL_FAIL_FAST_ENV_VAR = 'TESTBRIDGE_TEST_RUNNER_FAIL_FAST'
+
+# The environment variable for specifying fail fast.
+FAIL_FAST_ENV_VAR = 'GTEST_FAIL_FAST'
+
+# The command line flag for specifying fail fast.
+FAIL_FAST_FLAG = 'gtest_fail_fast'
+
+# The command line flag to run disabled tests.
+RUN_DISABLED_FLAG = 'gtest_also_run_disabled_tests'
+
+# The command line flag for specifying a filter.
+FILTER_FLAG = 'gtest_filter'
+
+# Command to run the googletest-failfast-unittest_ program.
+COMMAND = gtest_test_utils.GetTestExecutablePath(
+ 'googletest-failfast-unittest_')
+
+# The command line flag to tell Google Test to output the list of tests it
+# will run.
+LIST_TESTS_FLAG = '--gtest_list_tests'
+
+# Indicates whether Google Test supports death tests.
+SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
+ [COMMAND, LIST_TESTS_FLAG]).output
+
+# Utilities.
+
+environ = os.environ.copy()
+
+
+def SetEnvVar(env_var, value):
+ """Sets the env variable to 'value'; unsets it when 'value' is None."""
+
+ if value is not None:
+ environ[env_var] = value
+ elif env_var in environ:
+ del environ[env_var]
+
+
+def RunAndReturnOutput(test_suite=None, fail_fast=None, run_disabled=False):
+ """Runs the test program and returns its output."""
+
+ args = []
+ xml_path = os.path.join(gtest_test_utils.GetTempDir(),
+ '.GTestFailFastUnitTest.xml')
+ args += ['--gtest_output=xml:' + xml_path]
+ if fail_fast is not None:
+ if isinstance(fail_fast, str):
+ args += ['--%s=%s' % (FAIL_FAST_FLAG, fail_fast)]
+ elif fail_fast:
+ args += ['--%s' % FAIL_FAST_FLAG]
+ else:
+ args += ['--no%s' % FAIL_FAST_FLAG]
+ if test_suite:
+ args += ['--%s=%s.*' % (FILTER_FLAG, test_suite)]
+ if run_disabled:
+ args += ['--%s' % RUN_DISABLED_FLAG]
+ txt_out = gtest_test_utils.Subprocess([COMMAND] + args, env=environ).output
+ with open(xml_path) as xml_file:
+ return txt_out, xml_file.read()
+
+
+# The unit test.
+class GTestFailFastUnitTest(gtest_test_utils.TestCase):
+ """Tests the env variable or the command line flag for fail_fast."""
+
+ def testDefaultBehavior(self):
+ """Tests the behavior of not specifying the fail_fast."""
+
+ txt, _ = RunAndReturnOutput()
+ self.assertIn('22 FAILED TEST', txt)
+
+ def testGoogletestFlag(self):
+ txt, _ = RunAndReturnOutput(test_suite='HasSimpleTest', fail_fast=True)
+ self.assertIn('1 FAILED TEST', txt)
+ self.assertIn('[ SKIPPED ] 3 tests', txt)
+
+ txt, _ = RunAndReturnOutput(test_suite='HasSimpleTest', fail_fast=False)
+ self.assertIn('4 FAILED TEST', txt)
+ self.assertNotIn('[ SKIPPED ]', txt)
+
+ def testGoogletestEnvVar(self):
+ """Tests the behavior of specifying fail_fast via Googletest env var."""
+
+ try:
+ SetEnvVar(FAIL_FAST_ENV_VAR, '1')
+ txt, _ = RunAndReturnOutput('HasSimpleTest')
+ self.assertIn('1 FAILED TEST', txt)
+ self.assertIn('[ SKIPPED ] 3 tests', txt)
+
+ SetEnvVar(FAIL_FAST_ENV_VAR, '0')
+ txt, _ = RunAndReturnOutput('HasSimpleTest')
+ self.assertIn('4 FAILED TEST', txt)
+ self.assertNotIn('[ SKIPPED ]', txt)
+ finally:
+ SetEnvVar(FAIL_FAST_ENV_VAR, None)
+
+ def testBazelEnvVar(self):
+ """Tests the behavior of specifying fail_fast via Bazel testbridge."""
+
+ try:
+ SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, '1')
+ txt, _ = RunAndReturnOutput('HasSimpleTest')
+ self.assertIn('1 FAILED TEST', txt)
+ self.assertIn('[ SKIPPED ] 3 tests', txt)
+
+ SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, '0')
+ txt, _ = RunAndReturnOutput('HasSimpleTest')
+ self.assertIn('4 FAILED TEST', txt)
+ self.assertNotIn('[ SKIPPED ]', txt)
+ finally:
+ SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, None)
+
+ def testFlagOverridesEnvVar(self):
+ """Tests precedence of flag over env var."""
+
+ try:
+ SetEnvVar(FAIL_FAST_ENV_VAR, '0')
+ txt, _ = RunAndReturnOutput('HasSimpleTest', True)
+ self.assertIn('1 FAILED TEST', txt)
+ self.assertIn('[ SKIPPED ] 3 tests', txt)
+ finally:
+ SetEnvVar(FAIL_FAST_ENV_VAR, None)
+
+ def testGoogletestEnvVarOverridesBazelEnvVar(self):
+ """Tests that the Googletest native env var over Bazel testbridge."""
+
+ try:
+ SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, '0')
+ SetEnvVar(FAIL_FAST_ENV_VAR, '1')
+ txt, _ = RunAndReturnOutput('HasSimpleTest')
+ self.assertIn('1 FAILED TEST', txt)
+ self.assertIn('[ SKIPPED ] 3 tests', txt)
+ finally:
+ SetEnvVar(FAIL_FAST_ENV_VAR, None)
+ SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, None)
+
+ def testEventListener(self):
+ txt, _ = RunAndReturnOutput(test_suite='HasSkipTest', fail_fast=True)
+ self.assertIn('1 FAILED TEST', txt)
+ self.assertIn('[ SKIPPED ] 3 tests', txt)
+ for expected_count, callback in [(1, 'OnTestSuiteStart'),
+ (5, 'OnTestStart'),
+ (5, 'OnTestEnd'),
+ (5, 'OnTestPartResult'),
+ (1, 'OnTestSuiteEnd')]:
+ self.assertEqual(
+ expected_count, txt.count(callback),
+ 'Expected %d calls to callback %s match count on output: %s ' %
+ (expected_count, callback, txt))
+
+ txt, _ = RunAndReturnOutput(test_suite='HasSkipTest', fail_fast=False)
+ self.assertIn('3 FAILED TEST', txt)
+ self.assertIn('[ SKIPPED ] 1 test', txt)
+ for expected_count, callback in [(1, 'OnTestSuiteStart'),
+ (5, 'OnTestStart'),
+ (5, 'OnTestEnd'),
+ (5, 'OnTestPartResult'),
+ (1, 'OnTestSuiteEnd')]:
+ self.assertEqual(
+ expected_count, txt.count(callback),
+ 'Expected %d calls to callback %s match count on output: %s ' %
+ (expected_count, callback, txt))
+
+ def assertXmlResultCount(self, result, count, xml):
+ self.assertEqual(
+ count, xml.count('result="%s"' % result),
+ 'Expected \'result="%s"\' match count of %s: %s ' %
+ (result, count, xml))
+
+ def assertXmlStatusCount(self, status, count, xml):
+ self.assertEqual(
+ count, xml.count('status="%s"' % status),
+ 'Expected \'status="%s"\' match count of %s: %s ' %
+ (status, count, xml))
+
+ def assertFailFastXmlAndTxtOutput(self,
+ fail_fast,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled=False):
+ """Assert XML and text output of a test execution."""
+
+ txt, xml = RunAndReturnOutput(test_suite, fail_fast, run_disabled)
+ if failure_count > 0:
+ self.assertIn('%s FAILED TEST' % failure_count, txt)
+ if suppressed_count > 0:
+ self.assertIn('%s DISABLED TEST' % suppressed_count, txt)
+ if skipped_count > 0:
+ self.assertIn('[ SKIPPED ] %s tests' % skipped_count, txt)
+ self.assertXmlStatusCount('run',
+ passed_count + failure_count + skipped_count, xml)
+ self.assertXmlStatusCount('notrun', suppressed_count, xml)
+ self.assertXmlResultCount('completed', passed_count + failure_count, xml)
+ self.assertXmlResultCount('skipped', skipped_count, xml)
+ self.assertXmlResultCount('suppressed', suppressed_count, xml)
+
+ def assertFailFastBehavior(self,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled=False):
+ """Assert --fail_fast via flag."""
+
+ for fail_fast in ('true', '1', 't', True):
+ self.assertFailFastXmlAndTxtOutput(fail_fast, test_suite, passed_count,
+ failure_count, skipped_count,
+ suppressed_count, run_disabled)
+
+ def assertNotFailFastBehavior(self,
+ test_suite,
+ passed_count,
+ failure_count,
+ skipped_count,
+ suppressed_count,
+ run_disabled=False):
+ """Assert --nofail_fast via flag."""
+
+ for fail_fast in ('false', '0', 'f', False):
+ self.assertFailFastXmlAndTxtOutput(fail_fast, test_suite, passed_count,
+ failure_count, skipped_count,
+ suppressed_count, run_disabled)
+
+ def testFlag_HasFixtureTest(self):
+ """Tests the behavior of fail_fast and TEST_F."""
+ self.assertFailFastBehavior(
+ test_suite='HasFixtureTest',
+ passed_count=1,
+ failure_count=1,
+ skipped_count=3,
+ suppressed_count=0)
+ self.assertNotFailFastBehavior(
+ test_suite='HasFixtureTest',
+ passed_count=1,
+ failure_count=4,
+ skipped_count=0,
+ suppressed_count=0)
+
+ def testFlag_HasSimpleTest(self):
+ """Tests the behavior of fail_fast and TEST."""
+ self.assertFailFastBehavior(
+ test_suite='HasSimpleTest',
+ passed_count=1,
+ failure_count=1,
+ skipped_count=3,
+ suppressed_count=0)
+ self.assertNotFailFastBehavior(
+ test_suite='HasSimpleTest',
+ passed_count=1,
+ failure_count=4,
+ skipped_count=0,
+ suppressed_count=0)
+
+ def testFlag_HasParametersTest(self):
+ """Tests the behavior of fail_fast and TEST_P."""
+ self.assertFailFastBehavior(
+ test_suite='HasParametersSuite/HasParametersTest',
+ passed_count=0,
+ failure_count=1,
+ skipped_count=3,
+ suppressed_count=0)
+ self.assertNotFailFastBehavior(
+ test_suite='HasParametersSuite/HasParametersTest',
+ passed_count=0,
+ failure_count=4,
+ skipped_count=0,
+ suppressed_count=0)
+
+ def testFlag_HasDisabledTest(self):
+ """Tests the behavior of fail_fast and Disabled test cases."""
+ self.assertFailFastBehavior(
+ test_suite='HasDisabledTest',
+ passed_count=1,
+ failure_count=1,
+ skipped_count=2,
+ suppressed_count=1,
+ run_disabled=False)
+ self.assertNotFailFastBehavior(
+ test_suite='HasDisabledTest',
+ passed_count=1,
+ failure_count=3,
+ skipped_count=0,
+ suppressed_count=1,
+ run_disabled=False)
+
+ def testFlag_HasDisabledRunDisabledTest(self):
+ """Tests the behavior of fail_fast and Disabled test cases enabled."""
+ self.assertFailFastBehavior(
+ test_suite='HasDisabledTest',
+ passed_count=1,
+ failure_count=1,
+ skipped_count=3,
+ suppressed_count=0,
+ run_disabled=True)
+ self.assertNotFailFastBehavior(
+ test_suite='HasDisabledTest',
+ passed_count=1,
+ failure_count=4,
+ skipped_count=0,
+ suppressed_count=0,
+ run_disabled=True)
+
+ def testFlag_HasDisabledSuiteTest(self):
+ """Tests the behavior of fail_fast and Disabled test suites."""
+ self.assertFailFastBehavior(
+ test_suite='DISABLED_HasDisabledSuite',
+ passed_count=0,
+ failure_count=0,
+ skipped_count=0,
+ suppressed_count=5,
+ run_disabled=False)
+ self.assertNotFailFastBehavior(
+ test_suite='DISABLED_HasDisabledSuite',
+ passed_count=0,
+ failure_count=0,
+ skipped_count=0,
+ suppressed_count=5,
+ run_disabled=False)
+
+ def testFlag_HasDisabledSuiteRunDisabledTest(self):
+ """Tests the behavior of fail_fast and Disabled test suites enabled."""
+ self.assertFailFastBehavior(
+ test_suite='DISABLED_HasDisabledSuite',
+ passed_count=1,
+ failure_count=1,
+ skipped_count=3,
+ suppressed_count=0,
+ run_disabled=True)
+ self.assertNotFailFastBehavior(
+ test_suite='DISABLED_HasDisabledSuite',
+ passed_count=1,
+ failure_count=4,
+ skipped_count=0,
+ suppressed_count=0,
+ run_disabled=True)
+
+ if SUPPORTS_DEATH_TESTS:
+
+ def testFlag_HasDeathTest(self):
+ """Tests the behavior of fail_fast and death tests."""
+ self.assertFailFastBehavior(
+ test_suite='HasDeathTest',
+ passed_count=1,
+ failure_count=1,
+ skipped_count=3,
+ suppressed_count=0)
+ self.assertNotFailFastBehavior(
+ test_suite='HasDeathTest',
+ passed_count=1,
+ failure_count=4,
+ skipped_count=0,
+ suppressed_count=0)
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/googletest/test/googletest-failfast-unittest_.cc b/googletest/test/googletest-failfast-unittest_.cc
new file mode 100644
index 0000000..0b2c951
--- /dev/null
+++ b/googletest/test/googletest-failfast-unittest_.cc
@@ -0,0 +1,167 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Unit test for Google Test test filters.
+//
+// A user can specify which test(s) in a Google Test program to run via
+// either the GTEST_FILTER environment variable or the --gtest_filter
+// flag. This is used for testing such functionality.
+//
+// The program will be invoked from a Python unit test. Don't run it
+// directly.
+
+#include "gtest/gtest.h"
+
+namespace {
+
+// Test HasFixtureTest.
+
+class HasFixtureTest : public testing::Test {};
+
+TEST_F(HasFixtureTest, Test0) {}
+
+TEST_F(HasFixtureTest, Test1) { FAIL() << "Expected failure."; }
+
+TEST_F(HasFixtureTest, Test2) { FAIL() << "Expected failure."; }
+
+TEST_F(HasFixtureTest, Test3) { FAIL() << "Expected failure."; }
+
+TEST_F(HasFixtureTest, Test4) { FAIL() << "Expected failure."; }
+
+// Test HasSimpleTest.
+
+TEST(HasSimpleTest, Test0) {}
+
+TEST(HasSimpleTest, Test1) { FAIL() << "Expected failure."; }
+
+TEST(HasSimpleTest, Test2) { FAIL() << "Expected failure."; }
+
+TEST(HasSimpleTest, Test3) { FAIL() << "Expected failure."; }
+
+TEST(HasSimpleTest, Test4) { FAIL() << "Expected failure."; }
+
+// Test HasDisabledTest.
+
+TEST(HasDisabledTest, Test0) {}
+
+TEST(HasDisabledTest, DISABLED_Test1) { FAIL() << "Expected failure."; }
+
+TEST(HasDisabledTest, Test2) { FAIL() << "Expected failure."; }
+
+TEST(HasDisabledTest, Test3) { FAIL() << "Expected failure."; }
+
+TEST(HasDisabledTest, Test4) { FAIL() << "Expected failure."; }
+
+// Test HasDeathTest
+
+TEST(HasDeathTest, Test0) { EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*"); }
+
+TEST(HasDeathTest, Test1) {
+ EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*");
+}
+
+TEST(HasDeathTest, Test2) {
+ EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*");
+}
+
+TEST(HasDeathTest, Test3) {
+ EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*");
+}
+
+TEST(HasDeathTest, Test4) {
+ EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*");
+}
+
+// Test DISABLED_HasDisabledSuite
+
+TEST(DISABLED_HasDisabledSuite, Test0) {}
+
+TEST(DISABLED_HasDisabledSuite, Test1) { FAIL() << "Expected failure."; }
+
+TEST(DISABLED_HasDisabledSuite, Test2) { FAIL() << "Expected failure."; }
+
+TEST(DISABLED_HasDisabledSuite, Test3) { FAIL() << "Expected failure."; }
+
+TEST(DISABLED_HasDisabledSuite, Test4) { FAIL() << "Expected failure."; }
+
+// Test HasParametersTest
+
+class HasParametersTest : public testing::TestWithParam<int> {};
+
+TEST_P(HasParametersTest, Test1) { FAIL() << "Expected failure."; }
+
+TEST_P(HasParametersTest, Test2) { FAIL() << "Expected failure."; }
+
+INSTANTIATE_TEST_SUITE_P(HasParametersSuite, HasParametersTest,
+ testing::Values(1, 2));
+
+class MyTestListener : public ::testing::EmptyTestEventListener {
+ void OnTestSuiteStart(const ::testing::TestSuite& test_suite) override {
+ printf("We are in OnTestSuiteStart of %s.\n", test_suite.name());
+ }
+
+ void OnTestStart(const ::testing::TestInfo& test_info) override {
+ printf("We are in OnTestStart of %s.%s.\n", test_info.test_suite_name(),
+ test_info.name());
+ }
+
+ void OnTestPartResult(
+ const ::testing::TestPartResult& test_part_result) override {
+ printf("We are in OnTestPartResult %s:%d.\n", test_part_result.file_name(),
+ test_part_result.line_number());
+ }
+
+ void OnTestEnd(const ::testing::TestInfo& test_info) override {
+ printf("We are in OnTestEnd of %s.%s.\n", test_info.test_suite_name(),
+ test_info.name());
+ }
+
+ void OnTestSuiteEnd(const ::testing::TestSuite& test_suite) override {
+ printf("We are in OnTestSuiteEnd of %s.\n", test_suite.name());
+ }
+};
+
+TEST(HasSkipTest, Test0) { SUCCEED() << "Expected success."; }
+
+TEST(HasSkipTest, Test1) { GTEST_SKIP() << "Expected skip."; }
+
+TEST(HasSkipTest, Test2) { FAIL() << "Expected failure."; }
+
+TEST(HasSkipTest, Test3) { FAIL() << "Expected failure."; }
+
+TEST(HasSkipTest, Test4) { FAIL() << "Expected failure."; }
+
+} // namespace
+
+int main(int argc, char **argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new MyTestListener());
+ return RUN_ALL_TESTS();
+}
diff --git a/googletest/test/gtest_unittest.cc b/googletest/test/gtest_unittest.cc
index 06c5e67..f3b7216 100644
--- a/googletest/test/gtest_unittest.cc
+++ b/googletest/test/gtest_unittest.cc
@@ -37,21 +37,22 @@
// code once "gtest.h" has been #included.
// Do not move it after other gtest #includes.
TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
- bool dummy = testing::GTEST_FLAG(also_run_disabled_tests)
- || testing::GTEST_FLAG(break_on_failure)
- || testing::GTEST_FLAG(catch_exceptions)
- || testing::GTEST_FLAG(color) != "unknown"
- || testing::GTEST_FLAG(filter) != "unknown"
- || testing::GTEST_FLAG(list_tests)
- || testing::GTEST_FLAG(output) != "unknown"
- || testing::GTEST_FLAG(print_time)
- || testing::GTEST_FLAG(random_seed)
- || testing::GTEST_FLAG(repeat) > 0
- || testing::GTEST_FLAG(show_internal_stack_frames)
- || testing::GTEST_FLAG(shuffle)
- || testing::GTEST_FLAG(stack_trace_depth) > 0
- || testing::GTEST_FLAG(stream_result_to) != "unknown"
- || testing::GTEST_FLAG(throw_on_failure);
+ bool dummy = testing::GTEST_FLAG(also_run_disabled_tests) ||
+ testing::GTEST_FLAG(break_on_failure) ||
+ testing::GTEST_FLAG(catch_exceptions) ||
+ testing::GTEST_FLAG(color) != "unknown" ||
+ testing::GTEST_FLAG(fail_fast) ||
+ testing::GTEST_FLAG(filter) != "unknown" ||
+ testing::GTEST_FLAG(list_tests) ||
+ testing::GTEST_FLAG(output) != "unknown" ||
+ testing::GTEST_FLAG(print_time) ||
+ testing::GTEST_FLAG(random_seed) ||
+ testing::GTEST_FLAG(repeat) > 0 ||
+ testing::GTEST_FLAG(show_internal_stack_frames) ||
+ testing::GTEST_FLAG(shuffle) ||
+ testing::GTEST_FLAG(stack_trace_depth) > 0 ||
+ testing::GTEST_FLAG(stream_result_to) != "unknown" ||
+ testing::GTEST_FLAG(throw_on_failure);
EXPECT_TRUE(dummy || !dummy); // Suppresses warning that dummy is unused.
}
@@ -202,6 +203,7 @@ using testing::GTEST_FLAG(break_on_failure);
using testing::GTEST_FLAG(catch_exceptions);
using testing::GTEST_FLAG(color);
using testing::GTEST_FLAG(death_test_use_fork);
+using testing::GTEST_FLAG(fail_fast);
using testing::GTEST_FLAG(filter);
using testing::GTEST_FLAG(list_tests);
using testing::GTEST_FLAG(output);
@@ -1598,6 +1600,7 @@ class GTestFlagSaverTest : public Test {
GTEST_FLAG(catch_exceptions) = false;
GTEST_FLAG(death_test_use_fork) = false;
GTEST_FLAG(color) = "auto";
+ GTEST_FLAG(fail_fast) = false;
GTEST_FLAG(filter) = "";
GTEST_FLAG(list_tests) = false;
GTEST_FLAG(output) = "";
@@ -1625,6 +1628,7 @@ class GTestFlagSaverTest : public Test {
EXPECT_FALSE(GTEST_FLAG(catch_exceptions));
EXPECT_STREQ("auto", GTEST_FLAG(color).c_str());
EXPECT_FALSE(GTEST_FLAG(death_test_use_fork));
+ EXPECT_FALSE(GTEST_FLAG(fail_fast));
EXPECT_STREQ("", GTEST_FLAG(filter).c_str());
EXPECT_FALSE(GTEST_FLAG(list_tests));
EXPECT_STREQ("", GTEST_FLAG(output).c_str());
@@ -1641,6 +1645,7 @@ class GTestFlagSaverTest : public Test {
GTEST_FLAG(catch_exceptions) = true;
GTEST_FLAG(color) = "no";
GTEST_FLAG(death_test_use_fork) = true;
+ GTEST_FLAG(fail_fast) = true;
GTEST_FLAG(filter) = "abc";
GTEST_FLAG(list_tests) = true;
GTEST_FLAG(output) = "xml:foo.xml";
@@ -5495,20 +5500,22 @@ TEST_F(SetUpTestSuiteTest, TestSetupTestSuite2) {
// The Flags struct stores a copy of all Google Test flags.
struct Flags {
// Constructs a Flags struct where each flag has its default value.
- Flags() : also_run_disabled_tests(false),
- break_on_failure(false),
- catch_exceptions(false),
- death_test_use_fork(false),
- filter(""),
- list_tests(false),
- output(""),
- print_time(true),
- random_seed(0),
- repeat(1),
- shuffle(false),
- stack_trace_depth(kMaxStackTraceDepth),
- stream_result_to(""),
- throw_on_failure(false) {}
+ Flags()
+ : also_run_disabled_tests(false),
+ break_on_failure(false),
+ catch_exceptions(false),
+ death_test_use_fork(false),
+ fail_fast(false),
+ filter(""),
+ list_tests(false),
+ output(""),
+ print_time(true),
+ random_seed(0),
+ repeat(1),
+ shuffle(false),
+ stack_trace_depth(kMaxStackTraceDepth),
+ stream_result_to(""),
+ throw_on_failure(false) {}
// Factory methods.
@@ -5544,6 +5551,14 @@ struct Flags {
return flags;
}
+ // Creates a Flags struct where the gtest_fail_fast flag has
+ // the given value.
+ static Flags FailFast(bool fail_fast) {
+ Flags flags;
+ flags.fail_fast = fail_fast;
+ return flags;
+ }
+
// Creates a Flags struct where the gtest_filter flag has the given
// value.
static Flags Filter(const char* filter) {
@@ -5629,6 +5644,7 @@ struct Flags {
bool break_on_failure;
bool catch_exceptions;
bool death_test_use_fork;
+ bool fail_fast;
const char* filter;
bool list_tests;
const char* output;
@@ -5650,6 +5666,7 @@ class ParseFlagsTest : public Test {
GTEST_FLAG(break_on_failure) = false;
GTEST_FLAG(catch_exceptions) = false;
GTEST_FLAG(death_test_use_fork) = false;
+ GTEST_FLAG(fail_fast) = false;
GTEST_FLAG(filter) = "";
GTEST_FLAG(list_tests) = false;
GTEST_FLAG(output) = "";
@@ -5680,6 +5697,7 @@ class ParseFlagsTest : public Test {
EXPECT_EQ(expected.break_on_failure, GTEST_FLAG(break_on_failure));
EXPECT_EQ(expected.catch_exceptions, GTEST_FLAG(catch_exceptions));
EXPECT_EQ(expected.death_test_use_fork, GTEST_FLAG(death_test_use_fork));
+ EXPECT_EQ(expected.fail_fast, GTEST_FLAG(fail_fast));
EXPECT_STREQ(expected.filter, GTEST_FLAG(filter).c_str());
EXPECT_EQ(expected.list_tests, GTEST_FLAG(list_tests));
EXPECT_STREQ(expected.output, GTEST_FLAG(output).c_str());
@@ -5766,6 +5784,15 @@ TEST_F(ParseFlagsTest, NoFlag) {
GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);
}
+// Tests parsing --gtest_fail_fast.
+TEST_F(ParseFlagsTest, FailFast) {
+ const char* argv[] = {"foo.exe", "--gtest_fail_fast", nullptr};
+
+ const char* argv2[] = {"foo.exe", nullptr};
+
+ GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::FailFast(true), false);
+}
+
// Tests parsing a bad --gtest_filter flag.
TEST_F(ParseFlagsTest, FilterBad) {
const char* argv[] = {"foo.exe", "--gtest_filter", nullptr};