summaryrefslogtreecommitdiffstats
path: root/googletest/test
diff options
context:
space:
mode:
authorChris Johnson <chrisjohnsonmail@gmail.com>2019-08-27 23:00:04 (GMT)
committerGitHub <noreply@github.com>2019-08-27 23:00:04 (GMT)
commit85f059f03d56ce82cf8f68cf7505b695a0c730c2 (patch)
treee715b911578e831d7a3fdf46bbf2bdbece4fafc3 /googletest/test
parent130e5aa86a7a71501cf8fa7cd6f507928f01bd79 (diff)
parentfdd6a1dc8c74bf37211c14a1b2e4b64755bb3380 (diff)
downloadgoogletest-85f059f03d56ce82cf8f68cf7505b695a0c730c2.zip
googletest-85f059f03d56ce82cf8f68cf7505b695a0c730c2.tar.gz
googletest-85f059f03d56ce82cf8f68cf7505b695a0c730c2.tar.bz2
Merge pull request #3 from google/master
Update master
Diffstat (limited to 'googletest/test')
-rw-r--r--googletest/test/BUILD.bazel27
-rwxr-xr-xgoogletest/test/googletest-catch-exceptions-test.py147
-rw-r--r--googletest/test/googletest-catch-exceptions-test_.cc61
-rw-r--r--googletest/test/googletest-death-test-test.cc86
-rw-r--r--googletest/test/googletest-death-test_ex_test.cc2
-rw-r--r--googletest/test/googletest-filepath-test.cc2
-rw-r--r--googletest/test/googletest-filter-unittest_.cc4
-rw-r--r--googletest/test/googletest-json-outfiles-test.py91
-rw-r--r--googletest/test/googletest-json-output-unittest.py860
-rw-r--r--googletest/test/googletest-list-tests-unittest_.cc10
-rw-r--r--googletest/test/googletest-listener-test.cc391
-rw-r--r--googletest/test/googletest-options-test.cc1
-rw-r--r--googletest/test/googletest-output-test-golden-lin.txt181
-rwxr-xr-xgoogletest/test/googletest-output-test.py1
-rw-r--r--googletest/test/googletest-output-test_.cc130
-rw-r--r--googletest/test/googletest-param-test-invalid-name1-test_.cc8
-rw-r--r--googletest/test/googletest-param-test-invalid-name2-test_.cc8
-rw-r--r--googletest/test/googletest-param-test-test.cc151
-rw-r--r--googletest/test/googletest-param-test-test.h2
-rw-r--r--googletest/test/googletest-param-test2-test.cc14
-rw-r--r--googletest/test/googletest-port-test.cc17
-rw-r--r--googletest/test/googletest-printers-test.cc175
-rw-r--r--googletest/test/googletest-test-part-test.cc2
-rw-r--r--googletest/test/googletest-test2_test.cc14
-rwxr-xr-xgoogletest/test/googletest-throw-on-failure-test.py5
-rw-r--r--googletest/test/gtest-typed-test2_test.cc4
-rw-r--r--googletest/test/gtest-typed-test_test.cc135
-rw-r--r--googletest/test/gtest-typed-test_test.h6
-rw-r--r--googletest/test/gtest-unittest-api_test.cc232
-rw-r--r--googletest/test/gtest_assert_by_exception_test.cc1
-rw-r--r--googletest/test/gtest_environment_test.cc2
-rw-r--r--googletest/test/gtest_pred_impl_unittest.cc22
-rw-r--r--googletest/test/gtest_premature_exit_test.cc4
-rw-r--r--googletest/test/gtest_repeat_test.cc7
-rwxr-xr-xgoogletest/test/gtest_skip_environment_check_output_test.py54
-rw-r--r--googletest/test/gtest_skip_in_environment_setup_test.cc49
-rwxr-xr-xgoogletest/test/gtest_test_utils.py6
-rw-r--r--googletest/test/gtest_unittest.cc596
-rwxr-xr-xgoogletest/test/gtest_xml_outfiles_test.py13
-rwxr-xr-xgoogletest/test/gtest_xml_output_unittest.py96
-rw-r--r--googletest/test/gtest_xml_output_unittest_.cc26
-rwxr-xr-xgoogletest/test/gtest_xml_test_utils.py2
42 files changed, 2149 insertions, 1496 deletions
diff --git a/googletest/test/BUILD.bazel b/googletest/test/BUILD.bazel
index 6683206..156d5d4 100644
--- a/googletest/test/BUILD.bazel
+++ b/googletest/test/BUILD.bazel
@@ -32,6 +32,9 @@
#
# Bazel BUILD for The Google C++ Testing Framework (Google Test)
+load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_test")
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
licenses(["notice"])
#on windows exclude gtest-tuple.h
@@ -278,6 +281,30 @@ cc_binary(
deps = ["//:gtest"],
)
+cc_test(
+ name = "gtest_skip_test",
+ size = "small",
+ srcs = ["gtest_skip_test.cc"],
+ deps = ["//:gtest_main"],
+)
+
+cc_test(
+ name = "gtest_skip_in_environment_setup_test",
+ size = "small",
+ srcs = ["gtest_skip_in_environment_setup_test.cc"],
+ deps = ["//:gtest_main"],
+)
+
+py_test(
+ name = "gtest_skip_environment_check_output_test",
+ size = "small",
+ srcs = ["gtest_skip_environment_check_output_test.py"],
+ data = [
+ ":gtest_skip_in_environment_setup_test",
+ ],
+ deps = [":gtest_test_utils"],
+)
+
py_test(
name = "googletest-list-tests-unittest",
size = "small",
diff --git a/googletest/test/googletest-catch-exceptions-test.py b/googletest/test/googletest-catch-exceptions-test.py
index 5d49c10..94a5b33 100755
--- a/googletest/test/googletest-catch-exceptions-test.py
+++ b/googletest/test/googletest-catch-exceptions-test.py
@@ -89,9 +89,9 @@ if SUPPORTS_SEH_EXCEPTIONS:
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s destructor'
in test_output)
- self.assert_('SEH exception with code 0x2a thrown in SetUpTestCase()'
+ self.assert_('SEH exception with code 0x2a thrown in SetUpTestSuite()'
in test_output)
- self.assert_('SEH exception with code 0x2a thrown in TearDownTestCase()'
+ self.assert_('SEH exception with code 0x2a thrown in TearDownTestSuite()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUp()'
in test_output)
@@ -117,10 +117,11 @@ class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
"""
def testCatchesCxxExceptionsInFixtureConstructor(self):
- self.assert_('C++ exception with description '
- '"Standard C++ exception" thrown '
- 'in the test fixture\'s constructor'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'C++ exception with description '
+ '"Standard C++ exception" thrown '
+ 'in the test fixture\'s constructor' in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInConstructorTest" (no quotes) '
@@ -130,88 +131,90 @@ class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
EX_BINARY_OUTPUT):
def testCatchesCxxExceptionsInFixtureDestructor(self):
- self.assert_('C++ exception with description '
- '"Standard C++ exception" thrown '
- 'in the test fixture\'s destructor'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInDestructorTest::TearDownTestCase() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'C++ exception with description '
+ '"Standard C++ exception" thrown '
+ 'in the test fixture\'s destructor' in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInDestructorTest::TearDownTestSuite() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUpTestCase(self):
- self.assert_('C++ exception with description "Standard C++ exception"'
- ' thrown in SetUpTestCase()'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInConstructorTest::TearDownTestCase() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTestCaseTest constructor '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTestCaseTest destructor '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTestCaseTest::SetUp() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTestCaseTest::TearDown() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTestCaseTest test body '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'C++ exception with description "Standard C++ exception"'
+ ' thrown in SetUpTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInConstructorTest::TearDownTestSuite() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTestSuiteTest constructor '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTestSuiteTest destructor '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTestSuiteTest::SetUp() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTestSuiteTest::TearDown() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTestSuiteTest test body '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTearDownTestCase(self):
- self.assert_('C++ exception with description "Standard C++ exception"'
- ' thrown in TearDownTestCase()'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'C++ exception with description "Standard C++ exception"'
+ ' thrown in TearDownTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUp(self):
- self.assert_('C++ exception with description "Standard C++ exception"'
- ' thrown in SetUp()'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTest::TearDownTestCase() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTest destructor '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInSetUpTest::TearDown() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'C++ exception with description "Standard C++ exception"'
+ ' thrown in SetUp()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTest::TearDownTestSuite() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTest destructor '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInSetUpTest::TearDown() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInSetUpTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
def testCatchesCxxExceptionsInTearDown(self):
- self.assert_('C++ exception with description "Standard C++ exception"'
- ' thrown in TearDown()'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInTearDownTest::TearDownTestCase() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInTearDownTest destructor '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'C++ exception with description "Standard C++ exception"'
+ ' thrown in TearDown()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInTearDownTest::TearDownTestSuite() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInTearDownTest destructor '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTestBody(self):
- self.assert_('C++ exception with description "Standard C++ exception"'
- ' thrown in the test body'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInTestBodyTest::TearDownTestCase() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInTestBodyTest destructor '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
- self.assert_('CxxExceptionInTestBodyTest::TearDown() '
- 'called as expected.'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'C++ exception with description "Standard C++ exception"'
+ ' thrown in the test body' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInTestBodyTest::TearDownTestSuite() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInTestBodyTest destructor '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'CxxExceptionInTestBodyTest::TearDown() '
+ 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)
def testCatchesNonStdCxxExceptions(self):
- self.assert_('Unknown C++ exception thrown in the test body'
- in EX_BINARY_OUTPUT)
+ self.assertTrue(
+ 'Unknown C++ exception thrown in the test body' in EX_BINARY_OUTPUT,
+ EX_BINARY_OUTPUT)
def testUnhandledCxxExceptionsAbortTheProgram(self):
# Filters out SEH exception tests on Windows. Unhandled SEH exceptions
diff --git a/googletest/test/googletest-catch-exceptions-test_.cc b/googletest/test/googletest-catch-exceptions-test_.cc
index 8270f64..8c127d4 100644
--- a/googletest/test/googletest-catch-exceptions-test_.cc
+++ b/googletest/test/googletest-catch-exceptions-test_.cc
@@ -64,19 +64,20 @@ class SehExceptionInDestructorTest : public Test {
TEST_F(SehExceptionInDestructorTest, ThrowsExceptionInDestructor) {}
-class SehExceptionInSetUpTestCaseTest : public Test {
+class SehExceptionInSetUpTestSuiteTest : public Test {
public:
- static void SetUpTestCase() { RaiseException(42, 0, 0, NULL); }
+ static void SetUpTestSuite() { RaiseException(42, 0, 0, NULL); }
};
-TEST_F(SehExceptionInSetUpTestCaseTest, ThrowsExceptionInSetUpTestCase) {}
+TEST_F(SehExceptionInSetUpTestSuiteTest, ThrowsExceptionInSetUpTestSuite) {}
-class SehExceptionInTearDownTestCaseTest : public Test {
+class SehExceptionInTearDownTestSuiteTest : public Test {
public:
- static void TearDownTestCase() { RaiseException(42, 0, 0, NULL); }
+ static void TearDownTestSuite() { RaiseException(42, 0, 0, NULL); }
};
-TEST_F(SehExceptionInTearDownTestCaseTest, ThrowsExceptionInTearDownTestCase) {}
+TEST_F(SehExceptionInTearDownTestSuiteTest,
+ ThrowsExceptionInTearDownTestSuite) {}
class SehExceptionInSetUpTest : public Test {
protected:
@@ -109,9 +110,9 @@ class CxxExceptionInConstructorTest : public Test {
throw std::runtime_error("Standard C++ exception"));
}
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
printf("%s",
- "CxxExceptionInConstructorTest::TearDownTestCase() "
+ "CxxExceptionInConstructorTest::TearDownTestSuite() "
"called as expected.\n");
}
@@ -137,65 +138,65 @@ TEST_F(CxxExceptionInConstructorTest, ThrowsExceptionInConstructor) {
<< "called unexpectedly.";
}
-
-class CxxExceptionInSetUpTestCaseTest : public Test {
+class CxxExceptionInSetUpTestSuiteTest : public Test {
public:
- CxxExceptionInSetUpTestCaseTest() {
+ CxxExceptionInSetUpTestSuiteTest() {
printf("%s",
- "CxxExceptionInSetUpTestCaseTest constructor "
+ "CxxExceptionInSetUpTestSuiteTest constructor "
"called as expected.\n");
}
- static void SetUpTestCase() {
+ static void SetUpTestSuite() {
throw std::runtime_error("Standard C++ exception");
}
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
printf("%s",
- "CxxExceptionInSetUpTestCaseTest::TearDownTestCase() "
+ "CxxExceptionInSetUpTestSuiteTest::TearDownTestSuite() "
"called as expected.\n");
}
protected:
- ~CxxExceptionInSetUpTestCaseTest() override {
+ ~CxxExceptionInSetUpTestSuiteTest() override {
printf("%s",
- "CxxExceptionInSetUpTestCaseTest destructor "
+ "CxxExceptionInSetUpTestSuiteTest destructor "
"called as expected.\n");
}
void SetUp() override {
printf("%s",
- "CxxExceptionInSetUpTestCaseTest::SetUp() "
+ "CxxExceptionInSetUpTestSuiteTest::SetUp() "
"called as expected.\n");
}
void TearDown() override {
printf("%s",
- "CxxExceptionInSetUpTestCaseTest::TearDown() "
+ "CxxExceptionInSetUpTestSuiteTest::TearDown() "
"called as expected.\n");
}
};
-TEST_F(CxxExceptionInSetUpTestCaseTest, ThrowsExceptionInSetUpTestCase) {
+TEST_F(CxxExceptionInSetUpTestSuiteTest, ThrowsExceptionInSetUpTestSuite) {
printf("%s",
- "CxxExceptionInSetUpTestCaseTest test body "
+ "CxxExceptionInSetUpTestSuiteTest test body "
"called as expected.\n");
}
-class CxxExceptionInTearDownTestCaseTest : public Test {
+class CxxExceptionInTearDownTestSuiteTest : public Test {
public:
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
throw std::runtime_error("Standard C++ exception");
}
};
-TEST_F(CxxExceptionInTearDownTestCaseTest, ThrowsExceptionInTearDownTestCase) {}
+TEST_F(CxxExceptionInTearDownTestSuiteTest,
+ ThrowsExceptionInTearDownTestSuite) {}
class CxxExceptionInSetUpTest : public Test {
public:
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
printf("%s",
- "CxxExceptionInSetUpTest::TearDownTestCase() "
+ "CxxExceptionInSetUpTest::TearDownTestSuite() "
"called as expected.\n");
}
@@ -222,9 +223,9 @@ TEST_F(CxxExceptionInSetUpTest, ThrowsExceptionInSetUp) {
class CxxExceptionInTearDownTest : public Test {
public:
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
printf("%s",
- "CxxExceptionInTearDownTest::TearDownTestCase() "
+ "CxxExceptionInTearDownTest::TearDownTestSuite() "
"called as expected.\n");
}
@@ -244,9 +245,9 @@ TEST_F(CxxExceptionInTearDownTest, ThrowsExceptionInTearDown) {}
class CxxExceptionInTestBodyTest : public Test {
public:
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
printf("%s",
- "CxxExceptionInTestBodyTest::TearDownTestCase() "
+ "CxxExceptionInTestBodyTest::TearDownTestSuite() "
"called as expected.\n");
}
diff --git a/googletest/test/googletest-death-test-test.cc b/googletest/test/googletest-death-test-test.cc
index a1a8f18..814d771 100644
--- a/googletest/test/googletest-death-test-test.cc
+++ b/googletest/test/googletest-death-test-test.cc
@@ -41,7 +41,9 @@ using testing::internal::AlwaysTrue;
#if GTEST_HAS_DEATH_TEST
# if GTEST_OS_WINDOWS
+# include <fcntl.h> // For O_BINARY
# include <direct.h> // For chdir().
+# include <io.h>
# else
# include <unistd.h>
# include <sys/wait.h> // For waitpid.
@@ -139,7 +141,7 @@ class TestForDeathTest : public testing::Test {
DieInside("MemberFunction");
}
- // True iff MemberFunction() should die.
+ // True if MemberFunction() should die.
bool should_die_;
const FilePath original_dir_;
};
@@ -156,7 +158,7 @@ class MayDie {
}
private:
- // True iff MemberFunction() should die.
+ // True if MemberFunction() should die.
bool should_die_;
};
@@ -202,6 +204,26 @@ int DieInDebugElse12(int* sideeffect) {
return 12;
}
+# if GTEST_OS_WINDOWS
+
+// Death in dbg due to Windows CRT assertion failure, not opt.
+int DieInCRTDebugElse12(int* sideeffect) {
+ if (sideeffect) *sideeffect = 12;
+
+ // Create an invalid fd by closing a valid one
+ int fdpipe[2];
+ EXPECT_EQ(_pipe(fdpipe, 256, O_BINARY), 0);
+ EXPECT_EQ(_close(fdpipe[0]), 0);
+ EXPECT_EQ(_close(fdpipe[1]), 0);
+
+ // _dup() should crash in debug mode
+ EXPECT_EQ(_dup(fdpipe[0]), -1);
+
+ return 12;
+}
+
+#endif // GTEST_OS_WINDOWS
+
# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA
// Tests the ExitedWithCode predicate.
@@ -493,17 +515,6 @@ TEST_F(TestForDeathTest, AcceptsAnythingConvertibleToRE) {
const testing::internal::RE regex(regex_c_str);
EXPECT_DEATH(GlobalFunction(), regex);
-# if GTEST_HAS_GLOBAL_STRING
-
- const ::string regex_str(regex_c_str);
- EXPECT_DEATH(GlobalFunction(), regex_str);
-
- // This one is tricky; a temporary pointer into another temporary. Reference
- // lifetime extension of the pointer is not sufficient.
- EXPECT_DEATH(GlobalFunction(), ::string(regex_c_str).c_str());
-
-# endif // GTEST_HAS_GLOBAL_STRING
-
# if !GTEST_USES_PCRE
const ::std::string regex_std_str(regex_c_str);
@@ -562,7 +573,7 @@ TEST_F(TestForDeathTest, ErrorMessageMismatch) {
}, "died but not with expected error");
}
-// On exit, *aborted will be true iff the EXPECT_DEATH() statement
+// On exit, *aborted will be true if the EXPECT_DEATH() statement
// aborted the function.
void ExpectDeathTestHelper(bool* aborted) {
*aborted = true;
@@ -643,6 +654,40 @@ TEST_F(TestForDeathTest, TestExpectDebugDeath) {
# endif
}
+# if GTEST_OS_WINDOWS
+
+// Tests that EXPECT_DEBUG_DEATH works as expected when in debug mode
+// the Windows CRT crashes the process with an assertion failure.
+// 1. Asserts on death.
+// 2. Has no side effect (doesn't pop up a window or wait for user input).
+//
+// And in opt mode, it:
+// 1. Has side effects but does not assert.
+TEST_F(TestForDeathTest, CRTDebugDeath) {
+ int sideeffect = 0;
+
+ // Put the regex in a local variable to make sure we don't get an "unused"
+ // warning in opt mode.
+ const char* regex = "dup.* : Assertion failed";
+
+ EXPECT_DEBUG_DEATH(DieInCRTDebugElse12(&sideeffect), regex)
+ << "Must accept a streamed message";
+
+# ifdef NDEBUG
+
+ // Checks that the assignment occurs in opt mode (sideeffect).
+ EXPECT_EQ(12, sideeffect);
+
+# else
+
+ // Checks that the assignment does not occur in dbg mode (no sideeffect).
+ EXPECT_EQ(0, sideeffect);
+
+# endif
+}
+
+# endif // GTEST_OS_WINDOWS
+
// Tests that ASSERT_DEBUG_DEATH works as expected, that is, you can stream a
// message to it, and in debug mode it:
// 1. Asserts on death.
@@ -895,10 +940,12 @@ class MockDeathTestFactory : public DeathTestFactory {
int AssumeRoleCalls() const { return assume_role_calls_; }
int WaitCalls() const { return wait_calls_; }
size_t PassedCalls() const { return passed_args_.size(); }
- bool PassedArgument(int n) const { return passed_args_[n]; }
+ bool PassedArgument(int n) const {
+ return passed_args_[static_cast<size_t>(n)];
+ }
size_t AbortCalls() const { return abort_args_.size(); }
DeathTest::AbortReason AbortArgument(int n) const {
- return abort_args_[n];
+ return abort_args_[static_cast<size_t>(n)];
}
bool TestDeleted() const { return test_deleted_; }
@@ -1017,12 +1064,12 @@ class MacroLogicDeathTest : public testing::Test {
static testing::internal::ReplaceDeathTestFactory* replacer_;
static MockDeathTestFactory* factory_;
- static void SetUpTestCase() {
+ static void SetUpTestSuite() {
factory_ = new MockDeathTestFactory;
replacer_ = new testing::internal::ReplaceDeathTestFactory(factory_);
}
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
delete replacer_;
replacer_ = nullptr;
delete factory_;
@@ -1281,9 +1328,6 @@ TEST(ParseNaturalNumberTest, WorksForShorterIntegers) {
# if GTEST_OS_WINDOWS
TEST(EnvironmentTest, HandleFitsIntoSizeT) {
- // FIXME: Remove this test after this condition is verified
- // in a static assertion in gtest-death-test.cc in the function
- // GetStatusFileDescriptor.
ASSERT_TRUE(sizeof(HANDLE) <= sizeof(size_t));
}
# endif // GTEST_OS_WINDOWS
diff --git a/googletest/test/googletest-death-test_ex_test.cc b/googletest/test/googletest-death-test_ex_test.cc
index cf0d970..7ea5b94 100644
--- a/googletest/test/googletest-death-test_ex_test.cc
+++ b/googletest/test/googletest-death-test_ex_test.cc
@@ -68,7 +68,7 @@ TEST(CxxExceptionDeathTest, PrintsMessageForStdExceptions) {
"exceptional message");
// Verifies that the location is mentioned in the failure text.
EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""),
- "googletest-death-test_ex_test.cc");
+ __FILE__);
}
# endif // GTEST_HAS_EXCEPTIONS
diff --git a/googletest/test/googletest-filepath-test.cc b/googletest/test/googletest-filepath-test.cc
index 674799a..aafad36 100644
--- a/googletest/test/googletest-filepath-test.cc
+++ b/googletest/test/googletest-filepath-test.cc
@@ -50,8 +50,6 @@ namespace internal {
namespace {
#if GTEST_OS_WINDOWS_MOBILE
-// FIXME: Move these to the POSIX adapter section in
-// gtest-port.h.
// Windows CE doesn't have the remove C function.
int remove(const char* path) {
diff --git a/googletest/test/googletest-filter-unittest_.cc b/googletest/test/googletest-filter-unittest_.cc
index d335b60..d30ec9c 100644
--- a/googletest/test/googletest-filter-unittest_.cc
+++ b/googletest/test/googletest-filter-unittest_.cc
@@ -125,8 +125,8 @@ TEST_P(ParamTest, TestX) {
TEST_P(ParamTest, TestY) {
}
-INSTANTIATE_TEST_CASE_P(SeqP, ParamTest, testing::Values(1, 2));
-INSTANTIATE_TEST_CASE_P(SeqQ, ParamTest, testing::Values(5, 6));
+INSTANTIATE_TEST_SUITE_P(SeqP, ParamTest, testing::Values(1, 2));
+INSTANTIATE_TEST_SUITE_P(SeqQ, ParamTest, testing::Values(5, 6));
} // namespace
diff --git a/googletest/test/googletest-json-outfiles-test.py b/googletest/test/googletest-json-outfiles-test.py
index c99be48..8ef47b8 100644
--- a/googletest/test/googletest-json-outfiles-test.py
+++ b/googletest/test/googletest-json-outfiles-test.py
@@ -40,24 +40,41 @@ GTEST_OUTPUT_1_TEST = 'gtest_xml_outfile1_test_'
GTEST_OUTPUT_2_TEST = 'gtest_xml_outfile2_test_'
EXPECTED_1 = {
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
- u'time': u'*',
- u'timestamp': u'*',
- u'name': u'AllTests',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'name':
+ u'AllTests',
u'testsuites': [{
- u'name': u'PropertyOne',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
- u'time': u'*',
+ u'name':
+ u'PropertyOne',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
u'testsuite': [{
u'name': u'TestSomeProperties',
u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
+ u'timestamp': u'*',
u'classname': u'PropertyOne',
u'SetUpProp': u'1',
u'TestSomeProperty': u'1',
@@ -67,23 +84,40 @@ EXPECTED_1 = {
}
EXPECTED_2 = {
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
- u'time': u'*',
- u'timestamp': u'*',
- u'name': u'AllTests',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'name':
+ u'AllTests',
u'testsuites': [{
- u'name': u'PropertyTwo',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
- u'time': u'*',
+ u'name':
+ u'PropertyTwo',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
u'testsuite': [{
u'name': u'TestSomeProperties',
u'status': u'RUN',
+ u'result': u'COMPLETED',
+ u'timestamp': u'*',
u'time': u'*',
u'classname': u'PropertyTwo',
u'SetUpProp': u'2',
@@ -136,11 +170,6 @@ class GTestJsonOutFilesTest(gtest_test_utils.TestCase):
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
- # FIXME: libtool causes the built test binary to be
- # named lt-gtest_xml_outfiles_test_ instead of
- # gtest_xml_outfiles_test_. To account for this possibility, we
- # allow both names in the following code. We should remove this
- # when libtool replacement tool is ready.
output_file_name1 = test_name + '.json'
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
diff --git a/googletest/test/googletest-json-output-unittest.py b/googletest/test/googletest-json-output-unittest.py
index b09b590..15861f7 100644
--- a/googletest/test/googletest-json-output-unittest.py
+++ b/googletest/test/googletest-json-output-unittest.py
@@ -57,386 +57,530 @@ else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY = {
- u'tests': 24,
- u'failures': 4,
- u'disabled': 2,
- u'errors': 0,
- u'timestamp': u'*',
- u'time': u'*',
- u'ad_hoc_property': u'42',
- u'name': u'AllTests',
- u'testsuites': [
- {
- u'name': u'SuccessfulTest',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'tests':
+ 24,
+ u'failures':
+ 4,
+ u'disabled':
+ 2,
+ u'errors':
+ 0,
+ u'timestamp':
+ u'*',
+ u'time':
+ u'*',
+ u'ad_hoc_property':
+ u'42',
+ u'name':
+ u'AllTests',
+ u'testsuites': [{
+ u'name':
+ u'SuccessfulTest',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'Succeeds',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'Succeeds',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'SuccessfulTest'
- }
- ]
- },
- {
- u'name': u'FailedTest',
- u'tests': 1,
- u'failures': 1,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'SuccessfulTest'
+ }]
+ }, {
+ u'name':
+ u'FailedTest',
+ u'tests':
+ 1,
+ u'failures':
+ 1,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name':
+ u'Fails',
+ u'status':
+ u'RUN',
+ u'result':
+ u'COMPLETED',
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'classname':
+ u'FailedTest',
+ u'failures': [{
+ u'failure': u'gtest_xml_output_unittest_.cc:*\n'
+ u'Expected equality of these values:\n'
+ u' 1\n 2' + STACK_TRACE_TEMPLATE,
+ u'type': u''
+ }]
+ }]
+ }, {
+ u'name':
+ u'DisabledTest',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 1,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'DISABLED_test_not_run',
+ u'status': u'NOTRUN',
+ u'result': u'SUPPRESSED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'Fails',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'FailedTest',
- u'failures': [
- {
- u'failure':
- u'gtest_xml_output_unittest_.cc:*\n'
- u'Expected equality of these values:\n'
- u' 1\n 2' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }
- ]
- }
- ]
- },
- {
- u'name': u'DisabledTest',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 1,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'DisabledTest'
+ }]
+ }, {
+ u'name':
+ u'SkippedTest',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'Skipped',
+ u'status': u'RUN',
+ u'result': u'SKIPPED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'DISABLED_test_not_run',
- u'status': u'NOTRUN',
- u'time': u'*',
- u'classname': u'DisabledTest'
- }
- ]
- },
- {
- u'name': u'SkippedTest',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'SkippedTest'
+ }]
+ }, {
+ u'name':
+ u'MixedResultTest',
+ u'tests':
+ 3,
+ u'failures':
+ 1,
+ u'disabled':
+ 1,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'Succeeds',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'Skipped',
- u'status': u'SKIPPED',
- u'time': u'*',
- u'classname': u'SkippedTest'
- }
- ]
- },
- {
- u'name': u'MixedResultTest',
- u'tests': 3,
- u'failures': 1,
- u'disabled': 1,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'MixedResultTest'
+ }, {
+ u'name':
+ u'Fails',
+ u'status':
+ u'RUN',
+ u'result':
+ u'COMPLETED',
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'classname':
+ u'MixedResultTest',
+ u'failures': [{
+ u'failure': u'gtest_xml_output_unittest_.cc:*\n'
+ u'Expected equality of these values:\n'
+ u' 1\n 2' + STACK_TRACE_TEMPLATE,
+ u'type': u''
+ }, {
+ u'failure': u'gtest_xml_output_unittest_.cc:*\n'
+ u'Expected equality of these values:\n'
+ u' 2\n 3' + STACK_TRACE_TEMPLATE,
+ u'type': u''
+ }]
+ }, {
+ u'name': u'DISABLED_test',
+ u'status': u'NOTRUN',
+ u'result': u'SUPPRESSED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'Succeeds',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'MixedResultTest'
- },
- {
- u'name': u'Fails',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'MixedResultTest',
- u'failures': [
- {
- u'failure':
- u'gtest_xml_output_unittest_.cc:*\n'
- u'Expected equality of these values:\n'
- u' 1\n 2' + STACK_TRACE_TEMPLATE,
- u'type': u''
- },
- {
- u'failure':
- u'gtest_xml_output_unittest_.cc:*\n'
- u'Expected equality of these values:\n'
- u' 2\n 3' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }
- ]
- },
- {
- u'name': u'DISABLED_test',
- u'status': u'NOTRUN',
- u'time': u'*',
- u'classname': u'MixedResultTest'
- }
- ]
- },
- {
- u'name': u'XmlQuotingTest',
- u'tests': 1,
- u'failures': 1,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'MixedResultTest'
+ }]
+ }, {
+ u'name':
+ u'XmlQuotingTest',
+ u'tests':
+ 1,
+ u'failures':
+ 1,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name':
+ u'OutputsCData',
+ u'status':
+ u'RUN',
+ u'result':
+ u'COMPLETED',
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'classname':
+ u'XmlQuotingTest',
+ u'failures': [{
+ u'failure': u'gtest_xml_output_unittest_.cc:*\n'
+ u'Failed\nXML output: <?xml encoding="utf-8">'
+ u'<top><![CDATA[cdata text]]></top>' +
+ STACK_TRACE_TEMPLATE,
+ u'type': u''
+ }]
+ }]
+ }, {
+ u'name':
+ u'InvalidCharactersTest',
+ u'tests':
+ 1,
+ u'failures':
+ 1,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name':
+ u'InvalidCharactersInMessage',
+ u'status':
+ u'RUN',
+ u'result':
+ u'COMPLETED',
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'classname':
+ u'InvalidCharactersTest',
+ u'failures': [{
+ u'failure': u'gtest_xml_output_unittest_.cc:*\n'
+ u'Failed\nInvalid characters in brackets'
+ u' [\x01\x02]' + STACK_TRACE_TEMPLATE,
+ u'type': u''
+ }]
+ }]
+ }, {
+ u'name':
+ u'PropertyRecordingTest',
+ u'tests':
+ 4,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'SetUpTestSuite':
+ u'yes',
+ u'TearDownTestSuite':
+ u'aye',
+ u'testsuite': [{
+ u'name': u'OneProperty',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'OutputsCData',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'XmlQuotingTest',
- u'failures': [
- {
- u'failure':
- u'gtest_xml_output_unittest_.cc:*\n'
- u'Failed\nXML output: <?xml encoding="utf-8">'
- u'<top><![CDATA[cdata text]]></top>' +
- STACK_TRACE_TEMPLATE,
- u'type': u''
- }
- ]
- }
- ]
- },
- {
- u'name': u'InvalidCharactersTest',
- u'tests': 1,
- u'failures': 1,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'PropertyRecordingTest',
+ u'key_1': u'1'
+ }, {
+ u'name': u'IntValuedProperty',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'InvalidCharactersInMessage',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'InvalidCharactersTest',
- u'failures': [
- {
- u'failure':
- u'gtest_xml_output_unittest_.cc:*\n'
- u'Failed\nInvalid characters in brackets'
- u' [\x01\x02]' + STACK_TRACE_TEMPLATE,
- u'type': u''
- }
- ]
- }
- ]
- },
- {
- u'name': u'PropertyRecordingTest',
- u'tests': 4,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'PropertyRecordingTest',
+ u'key_int': u'1'
+ }, {
+ u'name': u'ThreeProperties',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'SetUpTestCase': u'yes',
- u'TearDownTestCase': u'aye',
- u'testsuite': [
- {
- u'name': u'OneProperty',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_1': u'1'
- },
- {
- u'name': u'IntValuedProperty',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_int': u'1'
- },
- {
- u'name': u'ThreeProperties',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_1': u'1',
- u'key_2': u'2',
- u'key_3': u'3'
- },
- {
- u'name': u'TwoValuesForOneKeyUsesLastValue',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'PropertyRecordingTest',
- u'key_1': u'2'
- }
- ]
- },
- {
- u'name': u'NoFixtureTest',
- u'tests': 3,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'PropertyRecordingTest',
+ u'key_1': u'1',
+ u'key_2': u'2',
+ u'key_3': u'3'
+ }, {
+ u'name': u'TwoValuesForOneKeyUsesLastValue',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'RecordProperty',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'NoFixtureTest',
- u'key': u'1'
- },
- {
- u'name': u'ExternalUtilityThatCallsRecordIntValuedProperty',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'NoFixtureTest',
- u'key_for_utility_int': u'1'
- },
- {
- u'name':
- u'ExternalUtilityThatCallsRecordStringValuedProperty',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'NoFixtureTest',
- u'key_for_utility_string': u'1'
- }
- ]
- },
- {
- u'name': u'TypedTest/0',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'PropertyRecordingTest',
+ u'key_1': u'2'
+ }]
+ }, {
+ u'name':
+ u'NoFixtureTest',
+ u'tests':
+ 3,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'RecordProperty',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'int',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'TypedTest/0'
- }
- ]
- },
- {
- u'name': u'TypedTest/1',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'NoFixtureTest',
+ u'key': u'1'
+ }, {
+ u'name': u'ExternalUtilityThatCallsRecordIntValuedProperty',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'long',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'TypedTest/1'
- }
- ]
- },
- {
- u'name': u'Single/TypeParameterizedTestCase/0',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'NoFixtureTest',
+ u'key_for_utility_int': u'1'
+ }, {
+ u'name': u'ExternalUtilityThatCallsRecordStringValuedProperty',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
+ u'time': u'*',
+ u'timestamp': u'*',
+ u'classname': u'NoFixtureTest',
+ u'key_for_utility_string': u'1'
+ }]
+ }, {
+ u'name':
+ u'TypedTest/0',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'HasTypeParamAttribute',
+ u'type_param': u'int',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
+ u'time': u'*',
+ u'timestamp': u'*',
+ u'classname': u'TypedTest/0'
+ }]
+ }, {
+ u'name':
+ u'TypedTest/1',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'HasTypeParamAttribute',
+ u'type_param': u'long',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
+ u'time': u'*',
+ u'timestamp': u'*',
+ u'classname': u'TypedTest/1'
+ }]
+ }, {
+ u'name':
+ u'Single/TypeParameterizedTestSuite/0',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'HasTypeParamAttribute',
+ u'type_param': u'int',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
+ u'time': u'*',
+ u'timestamp': u'*',
+ u'classname': u'Single/TypeParameterizedTestSuite/0'
+ }]
+ }, {
+ u'name':
+ u'Single/TypeParameterizedTestSuite/1',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'HasTypeParamAttribute',
+ u'type_param': u'long',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
+ u'time': u'*',
+ u'timestamp': u'*',
+ u'classname': u'Single/TypeParameterizedTestSuite/1'
+ }]
+ }, {
+ u'name':
+ u'Single/ValueParamTest',
+ u'tests':
+ 4,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'testsuite': [{
+ u'name': u'HasValueParamAttribute/0',
+ u'value_param': u'33',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
+ u'time': u'*',
+ u'timestamp': u'*',
+ u'classname': u'Single/ValueParamTest'
+ }, {
+ u'name': u'HasValueParamAttribute/1',
+ u'value_param': u'42',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'int',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'Single/TypeParameterizedTestCase/0'
- }
- ]
- },
- {
- u'name': u'Single/TypeParameterizedTestCase/1',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'Single/ValueParamTest'
+ }, {
+ u'name': u'AnotherTestThatHasValueParamAttribute/0',
+ u'value_param': u'33',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'HasTypeParamAttribute',
- u'type_param': u'long',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'Single/TypeParameterizedTestCase/1'
- }
- ]
- },
- {
- u'name': u'Single/ValueParamTest',
- u'tests': 4,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
+ u'timestamp': u'*',
+ u'classname': u'Single/ValueParamTest'
+ }, {
+ u'name': u'AnotherTestThatHasValueParamAttribute/1',
+ u'value_param': u'42',
+ u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
- u'testsuite': [
- {
- u'name': u'HasValueParamAttribute/0',
- u'value_param': u'33',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'Single/ValueParamTest'
- },
- {
- u'name': u'HasValueParamAttribute/1',
- u'value_param': u'42',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'Single/ValueParamTest'
- },
- {
- u'name': u'AnotherTestThatHasValueParamAttribute/0',
- u'value_param': u'33',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'Single/ValueParamTest'
- },
- {
- u'name': u'AnotherTestThatHasValueParamAttribute/1',
- u'value_param': u'42',
- u'status': u'RUN',
- u'time': u'*',
- u'classname': u'Single/ValueParamTest'
- }
- ]
- }
- ]
+ u'timestamp': u'*',
+ u'classname': u'Single/ValueParamTest'
+ }]
+ }]
}
EXPECTED_FILTERED = {
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
- u'time': u'*',
- u'timestamp': u'*',
- u'name': u'AllTests',
- u'ad_hoc_property': u'42',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
+ u'name':
+ u'AllTests',
+ u'ad_hoc_property':
+ u'42',
u'testsuites': [{
- u'name': u'SuccessfulTest',
- u'tests': 1,
- u'failures': 0,
- u'disabled': 0,
- u'errors': 0,
- u'time': u'*',
+ u'name':
+ u'SuccessfulTest',
+ u'tests':
+ 1,
+ u'failures':
+ 0,
+ u'disabled':
+ 0,
+ u'errors':
+ 0,
+ u'time':
+ u'*',
+ u'timestamp':
+ u'*',
u'testsuite': [{
u'name': u'Succeeds',
u'status': u'RUN',
+ u'result': u'COMPLETED',
u'time': u'*',
+ u'timestamp': u'*',
u'classname': u'SuccessfulTest',
}]
}],
diff --git a/googletest/test/googletest-list-tests-unittest_.cc b/googletest/test/googletest-list-tests-unittest_.cc
index f473c7d..493c6f0 100644
--- a/googletest/test/googletest-list-tests-unittest_.cc
+++ b/googletest/test/googletest-list-tests-unittest_.cc
@@ -99,7 +99,7 @@ TEST_P(ValueParamTest, TestA) {
TEST_P(ValueParamTest, TestB) {
}
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
MyInstantiation, ValueParamTest,
testing::Values(MyType("one line"),
MyType("two\nlines"),
@@ -123,7 +123,7 @@ class MyArray {
typedef testing::Types<VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName, // NOLINT
int*, MyArray<bool, 42> > MyTypes;
-TYPED_TEST_CASE(TypedTest, MyTypes);
+TYPED_TEST_SUITE(TypedTest, MyTypes);
TYPED_TEST(TypedTest, TestA) {
}
@@ -137,7 +137,7 @@ template <typename T>
class TypeParamTest : public testing::Test {
};
-TYPED_TEST_CASE_P(TypeParamTest);
+TYPED_TEST_SUITE_P(TypeParamTest);
TYPED_TEST_P(TypeParamTest, TestA) {
}
@@ -145,9 +145,9 @@ TYPED_TEST_P(TypeParamTest, TestA) {
TYPED_TEST_P(TypeParamTest, TestB) {
}
-REGISTER_TYPED_TEST_CASE_P(TypeParamTest, TestA, TestB);
+REGISTER_TYPED_TEST_SUITE_P(TypeParamTest, TestA, TestB);
-INSTANTIATE_TYPED_TEST_CASE_P(My, TypeParamTest, MyTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, TypeParamTest, MyTypes);
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
diff --git a/googletest/test/googletest-listener-test.cc b/googletest/test/googletest-listener-test.cc
index 1f5f5c5..10457af 100644
--- a/googletest/test/googletest-listener-test.cc
+++ b/googletest/test/googletest-listener-test.cc
@@ -35,12 +35,13 @@
#include <vector>
#include "gtest/gtest.h"
+#include "gtest/internal/custom/gtest.h"
using ::testing::AddGlobalTestEnvironment;
using ::testing::Environment;
using ::testing::InitGoogleTest;
using ::testing::Test;
-using ::testing::TestCase;
+using ::testing::TestSuite;
using ::testing::TestEventListener;
using ::testing::TestInfo;
using ::testing::TestPartResult;
@@ -76,10 +77,11 @@ class EventRecordingListener : public TestEventListener {
void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {
g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpEnd"));
}
-
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnTestCaseStart(const TestCase& /*test_case*/) override {
g_events->push_back(GetFullMethodName("OnTestCaseStart"));
}
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnTestStart(const TestInfo& /*test_info*/) override {
g_events->push_back(GetFullMethodName("OnTestStart"));
@@ -93,9 +95,11 @@ class EventRecordingListener : public TestEventListener {
g_events->push_back(GetFullMethodName("OnTestEnd"));
}
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnTestCaseEnd(const TestCase& /*test_case*/) override {
g_events->push_back(GetFullMethodName("OnTestCaseEnd"));
}
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override {
g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownStart"));
@@ -125,6 +129,78 @@ class EventRecordingListener : public TestEventListener {
std::string name_;
};
+// This listener is using OnTestSuiteStart, OnTestSuiteEnd API
+class EventRecordingListener2 : public TestEventListener {
+ public:
+ explicit EventRecordingListener2(const char* name) : name_(name) {}
+
+ protected:
+ void OnTestProgramStart(const UnitTest& /*unit_test*/) override {
+ g_events->push_back(GetFullMethodName("OnTestProgramStart"));
+ }
+
+ void OnTestIterationStart(const UnitTest& /*unit_test*/,
+ int iteration) override {
+ Message message;
+ message << GetFullMethodName("OnTestIterationStart") << "(" << iteration
+ << ")";
+ g_events->push_back(message.GetString());
+ }
+
+ void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpStart"));
+ }
+
+ void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpEnd"));
+ }
+
+ void OnTestSuiteStart(const TestSuite& /*test_suite*/) override {
+ g_events->push_back(GetFullMethodName("OnTestSuiteStart"));
+ }
+
+ void OnTestStart(const TestInfo& /*test_info*/) override {
+ g_events->push_back(GetFullMethodName("OnTestStart"));
+ }
+
+ void OnTestPartResult(const TestPartResult& /*test_part_result*/) override {
+ g_events->push_back(GetFullMethodName("OnTestPartResult"));
+ }
+
+ void OnTestEnd(const TestInfo& /*test_info*/) override {
+ g_events->push_back(GetFullMethodName("OnTestEnd"));
+ }
+
+ void OnTestSuiteEnd(const TestSuite& /*test_suite*/) override {
+ g_events->push_back(GetFullMethodName("OnTestSuiteEnd"));
+ }
+
+ void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownStart"));
+ }
+
+ void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {
+ g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownEnd"));
+ }
+
+ void OnTestIterationEnd(const UnitTest& /*unit_test*/,
+ int iteration) override {
+ Message message;
+ message << GetFullMethodName("OnTestIterationEnd") << "(" << iteration
+ << ")";
+ g_events->push_back(message.GetString());
+ }
+
+ void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {
+ g_events->push_back(GetFullMethodName("OnTestProgramEnd"));
+ }
+
+ private:
+ std::string GetFullMethodName(const char* name) { return name_ + "." + name; }
+
+ std::string name_;
+};
+
class EnvironmentInvocationCatcher : public Environment {
protected:
void SetUp() override { g_events->push_back("Environment::SetUp"); }
@@ -134,12 +210,12 @@ class EnvironmentInvocationCatcher : public Environment {
class ListenerTest : public Test {
protected:
- static void SetUpTestCase() {
- g_events->push_back("ListenerTest::SetUpTestCase");
+ static void SetUpTestSuite() {
+ g_events->push_back("ListenerTest::SetUpTestSuite");
}
- static void TearDownTestCase() {
- g_events->push_back("ListenerTest::TearDownTestCase");
+ static void TearDownTestSuite() {
+ g_events->push_back("ListenerTest::TearDownTestSuite");
}
void SetUp() override { g_events->push_back("ListenerTest::SetUp"); }
@@ -165,6 +241,7 @@ TEST_F(ListenerTest, DoesBar) {
using ::testing::internal::EnvironmentInvocationCatcher;
using ::testing::internal::EventRecordingListener;
+using ::testing::internal::EventRecordingListener2;
void VerifyResults(const std::vector<std::string>& data,
const char* const* expected_data,
@@ -199,6 +276,8 @@ int main(int argc, char **argv) {
new EventRecordingListener("1st"));
UnitTest::GetInstance()->listeners().Append(
new EventRecordingListener("2nd"));
+ UnitTest::GetInstance()->listeners().Append(
+ new EventRecordingListener2("3rd"));
AddGlobalTestEnvironment(new EnvironmentInvocationCatcher);
@@ -208,88 +287,224 @@ int main(int argc, char **argv) {
::testing::GTEST_FLAG(repeat) = 2;
int ret_val = RUN_ALL_TESTS();
- const char* const expected_events[] = {
- "1st.OnTestProgramStart",
- "2nd.OnTestProgramStart",
- "1st.OnTestIterationStart(0)",
- "2nd.OnTestIterationStart(0)",
- "1st.OnEnvironmentsSetUpStart",
- "2nd.OnEnvironmentsSetUpStart",
- "Environment::SetUp",
- "2nd.OnEnvironmentsSetUpEnd",
- "1st.OnEnvironmentsSetUpEnd",
- "1st.OnTestCaseStart",
- "2nd.OnTestCaseStart",
- "ListenerTest::SetUpTestCase",
- "1st.OnTestStart",
- "2nd.OnTestStart",
- "ListenerTest::SetUp",
- "ListenerTest::* Test Body",
- "1st.OnTestPartResult",
- "2nd.OnTestPartResult",
- "ListenerTest::TearDown",
- "2nd.OnTestEnd",
- "1st.OnTestEnd",
- "1st.OnTestStart",
- "2nd.OnTestStart",
- "ListenerTest::SetUp",
- "ListenerTest::* Test Body",
- "1st.OnTestPartResult",
- "2nd.OnTestPartResult",
- "ListenerTest::TearDown",
- "2nd.OnTestEnd",
- "1st.OnTestEnd",
- "ListenerTest::TearDownTestCase",
- "2nd.OnTestCaseEnd",
- "1st.OnTestCaseEnd",
- "1st.OnEnvironmentsTearDownStart",
- "2nd.OnEnvironmentsTearDownStart",
- "Environment::TearDown",
- "2nd.OnEnvironmentsTearDownEnd",
- "1st.OnEnvironmentsTearDownEnd",
- "2nd.OnTestIterationEnd(0)",
- "1st.OnTestIterationEnd(0)",
- "1st.OnTestIterationStart(1)",
- "2nd.OnTestIterationStart(1)",
- "1st.OnEnvironmentsSetUpStart",
- "2nd.OnEnvironmentsSetUpStart",
- "Environment::SetUp",
- "2nd.OnEnvironmentsSetUpEnd",
- "1st.OnEnvironmentsSetUpEnd",
- "1st.OnTestCaseStart",
- "2nd.OnTestCaseStart",
- "ListenerTest::SetUpTestCase",
- "1st.OnTestStart",
- "2nd.OnTestStart",
- "ListenerTest::SetUp",
- "ListenerTest::* Test Body",
- "1st.OnTestPartResult",
- "2nd.OnTestPartResult",
- "ListenerTest::TearDown",
- "2nd.OnTestEnd",
- "1st.OnTestEnd",
- "1st.OnTestStart",
- "2nd.OnTestStart",
- "ListenerTest::SetUp",
- "ListenerTest::* Test Body",
- "1st.OnTestPartResult",
- "2nd.OnTestPartResult",
- "ListenerTest::TearDown",
- "2nd.OnTestEnd",
- "1st.OnTestEnd",
- "ListenerTest::TearDownTestCase",
- "2nd.OnTestCaseEnd",
- "1st.OnTestCaseEnd",
- "1st.OnEnvironmentsTearDownStart",
- "2nd.OnEnvironmentsTearDownStart",
- "Environment::TearDown",
- "2nd.OnEnvironmentsTearDownEnd",
- "1st.OnEnvironmentsTearDownEnd",
- "2nd.OnTestIterationEnd(1)",
- "1st.OnTestIterationEnd(1)",
- "2nd.OnTestProgramEnd",
- "1st.OnTestProgramEnd"
- };
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+
+ // The deprecated OnTestSuiteStart/OnTestCaseStart events are included
+ const char* const expected_events[] = {"1st.OnTestProgramStart",
+ "2nd.OnTestProgramStart",
+ "3rd.OnTestProgramStart",
+ "1st.OnTestIterationStart(0)",
+ "2nd.OnTestIterationStart(0)",
+ "3rd.OnTestIterationStart(0)",
+ "1st.OnEnvironmentsSetUpStart",
+ "2nd.OnEnvironmentsSetUpStart",
+ "3rd.OnEnvironmentsSetUpStart",
+ "Environment::SetUp",
+ "3rd.OnEnvironmentsSetUpEnd",
+ "2nd.OnEnvironmentsSetUpEnd",
+ "1st.OnEnvironmentsSetUpEnd",
+ "3rd.OnTestSuiteStart",
+ "1st.OnTestCaseStart",
+ "2nd.OnTestCaseStart",
+ "ListenerTest::SetUpTestSuite",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "ListenerTest::TearDownTestSuite",
+ "3rd.OnTestSuiteEnd",
+ "2nd.OnTestCaseEnd",
+ "1st.OnTestCaseEnd",
+ "1st.OnEnvironmentsTearDownStart",
+ "2nd.OnEnvironmentsTearDownStart",
+ "3rd.OnEnvironmentsTearDownStart",
+ "Environment::TearDown",
+ "3rd.OnEnvironmentsTearDownEnd",
+ "2nd.OnEnvironmentsTearDownEnd",
+ "1st.OnEnvironmentsTearDownEnd",
+ "3rd.OnTestIterationEnd(0)",
+ "2nd.OnTestIterationEnd(0)",
+ "1st.OnTestIterationEnd(0)",
+ "1st.OnTestIterationStart(1)",
+ "2nd.OnTestIterationStart(1)",
+ "3rd.OnTestIterationStart(1)",
+ "1st.OnEnvironmentsSetUpStart",
+ "2nd.OnEnvironmentsSetUpStart",
+ "3rd.OnEnvironmentsSetUpStart",
+ "Environment::SetUp",
+ "3rd.OnEnvironmentsSetUpEnd",
+ "2nd.OnEnvironmentsSetUpEnd",
+ "1st.OnEnvironmentsSetUpEnd",
+ "3rd.OnTestSuiteStart",
+ "1st.OnTestCaseStart",
+ "2nd.OnTestCaseStart",
+ "ListenerTest::SetUpTestSuite",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "ListenerTest::TearDownTestSuite",
+ "3rd.OnTestSuiteEnd",
+ "2nd.OnTestCaseEnd",
+ "1st.OnTestCaseEnd",
+ "1st.OnEnvironmentsTearDownStart",
+ "2nd.OnEnvironmentsTearDownStart",
+ "3rd.OnEnvironmentsTearDownStart",
+ "Environment::TearDown",
+ "3rd.OnEnvironmentsTearDownEnd",
+ "2nd.OnEnvironmentsTearDownEnd",
+ "1st.OnEnvironmentsTearDownEnd",
+ "3rd.OnTestIterationEnd(1)",
+ "2nd.OnTestIterationEnd(1)",
+ "1st.OnTestIterationEnd(1)",
+ "3rd.OnTestProgramEnd",
+ "2nd.OnTestProgramEnd",
+ "1st.OnTestProgramEnd"};
+#else
+ const char* const expected_events[] = {"1st.OnTestProgramStart",
+ "2nd.OnTestProgramStart",
+ "3rd.OnTestProgramStart",
+ "1st.OnTestIterationStart(0)",
+ "2nd.OnTestIterationStart(0)",
+ "3rd.OnTestIterationStart(0)",
+ "1st.OnEnvironmentsSetUpStart",
+ "2nd.OnEnvironmentsSetUpStart",
+ "3rd.OnEnvironmentsSetUpStart",
+ "Environment::SetUp",
+ "3rd.OnEnvironmentsSetUpEnd",
+ "2nd.OnEnvironmentsSetUpEnd",
+ "1st.OnEnvironmentsSetUpEnd",
+ "3rd.OnTestSuiteStart",
+ "ListenerTest::SetUpTestSuite",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "ListenerTest::TearDownTestSuite",
+ "3rd.OnTestSuiteEnd",
+ "1st.OnEnvironmentsTearDownStart",
+ "2nd.OnEnvironmentsTearDownStart",
+ "3rd.OnEnvironmentsTearDownStart",
+ "Environment::TearDown",
+ "3rd.OnEnvironmentsTearDownEnd",
+ "2nd.OnEnvironmentsTearDownEnd",
+ "1st.OnEnvironmentsTearDownEnd",
+ "3rd.OnTestIterationEnd(0)",
+ "2nd.OnTestIterationEnd(0)",
+ "1st.OnTestIterationEnd(0)",
+ "1st.OnTestIterationStart(1)",
+ "2nd.OnTestIterationStart(1)",
+ "3rd.OnTestIterationStart(1)",
+ "1st.OnEnvironmentsSetUpStart",
+ "2nd.OnEnvironmentsSetUpStart",
+ "3rd.OnEnvironmentsSetUpStart",
+ "Environment::SetUp",
+ "3rd.OnEnvironmentsSetUpEnd",
+ "2nd.OnEnvironmentsSetUpEnd",
+ "1st.OnEnvironmentsSetUpEnd",
+ "3rd.OnTestSuiteStart",
+ "ListenerTest::SetUpTestSuite",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "1st.OnTestStart",
+ "2nd.OnTestStart",
+ "3rd.OnTestStart",
+ "ListenerTest::SetUp",
+ "ListenerTest::* Test Body",
+ "1st.OnTestPartResult",
+ "2nd.OnTestPartResult",
+ "3rd.OnTestPartResult",
+ "ListenerTest::TearDown",
+ "3rd.OnTestEnd",
+ "2nd.OnTestEnd",
+ "1st.OnTestEnd",
+ "ListenerTest::TearDownTestSuite",
+ "3rd.OnTestSuiteEnd",
+ "1st.OnEnvironmentsTearDownStart",
+ "2nd.OnEnvironmentsTearDownStart",
+ "3rd.OnEnvironmentsTearDownStart",
+ "Environment::TearDown",
+ "3rd.OnEnvironmentsTearDownEnd",
+ "2nd.OnEnvironmentsTearDownEnd",
+ "1st.OnEnvironmentsTearDownEnd",
+ "3rd.OnTestIterationEnd(1)",
+ "2nd.OnTestIterationEnd(1)",
+ "1st.OnTestIterationEnd(1)",
+ "3rd.OnTestProgramEnd",
+ "2nd.OnTestProgramEnd",
+ "1st.OnTestProgramEnd"};
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+
VerifyResults(events,
expected_events,
sizeof(expected_events)/sizeof(expected_events[0]));
diff --git a/googletest/test/googletest-options-test.cc b/googletest/test/googletest-options-test.cc
index 08aa9d8..f07b316 100644
--- a/googletest/test/googletest-options-test.cc
+++ b/googletest/test/googletest-options-test.cc
@@ -111,7 +111,6 @@ TEST(OutputFileHelpersTest, GetCurrentExecutableName) {
#elif GTEST_OS_FUCHSIA
const bool success = exe_str == "app";
#else
- // FIXME: remove the hard-coded "lt-" prefix when libtool replacement is ready
const bool success =
exe_str == "googletest-options-test" ||
exe_str == "gtest_all_test" ||
diff --git a/googletest/test/googletest-output-test-golden-lin.txt b/googletest/test/googletest-output-test-golden-lin.txt
index 86da845..038de92 100644
--- a/googletest/test/googletest-output-test-golden-lin.txt
+++ b/googletest/test/googletest-output-test-golden-lin.txt
@@ -12,7 +12,7 @@ Expected equality of these values:
3
Stack trace: (omitted)
-[==========] Running 76 tests from 34 test cases.
+[==========] Running 85 tests from 40 test suites.
[----------] Global test environment set-up.
FooEnvironment::SetUp() called.
BarEnvironment::SetUp() called.
@@ -380,66 +380,74 @@ Stack trace: (omitted)
[ RUN ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
foo.cc:42: Failure
Failed
-Expected failure in foo.cc
+Expected nonfatal failure in foo.cc
Stack trace: (omitted)
[ FAILED ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
-[----------] 4 tests from MixedUpTestCaseTest
-[ RUN ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
-[ OK ] MixedUpTestCaseTest.FirstTestFromNamespaceFoo
-[ RUN ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
-[ OK ] MixedUpTestCaseTest.SecondTestFromNamespaceFoo
-[ RUN ] MixedUpTestCaseTest.ThisShouldFail
+[----------] 1 test from GtestFailAtTest
+[ RUN ] GtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber
+foo.cc:42: Failure
+Failed
+Expected fatal failure in foo.cc
+Stack trace: (omitted)
+
+[ FAILED ] GtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber
+[----------] 4 tests from MixedUpTestSuiteTest
+[ RUN ] MixedUpTestSuiteTest.FirstTestFromNamespaceFoo
+[ OK ] MixedUpTestSuiteTest.FirstTestFromNamespaceFoo
+[ RUN ] MixedUpTestSuiteTest.SecondTestFromNamespaceFoo
+[ OK ] MixedUpTestSuiteTest.SecondTestFromNamespaceFoo
+[ RUN ] MixedUpTestSuiteTest.ThisShouldFail
gtest.cc:#: Failure
Failed
-All tests in the same test case must use the same test fixture
-class. However, in test case MixedUpTestCaseTest,
+All tests in the same test suite must use the same test fixture
+class. However, in test suite MixedUpTestSuiteTest,
you defined test FirstTestFromNamespaceFoo and test ThisShouldFail
using two different test fixture classes. This can happen if
the two classes are from different namespaces or translation
units and have the same name. You should probably rename one
-of the classes to put the tests into different test cases.
+of the classes to put the tests into different test suites.
Stack trace: (omitted)
-[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
-[ RUN ] MixedUpTestCaseTest.ThisShouldFailToo
+[ FAILED ] MixedUpTestSuiteTest.ThisShouldFail
+[ RUN ] MixedUpTestSuiteTest.ThisShouldFailToo
gtest.cc:#: Failure
Failed
-All tests in the same test case must use the same test fixture
-class. However, in test case MixedUpTestCaseTest,
+All tests in the same test suite must use the same test fixture
+class. However, in test suite MixedUpTestSuiteTest,
you defined test FirstTestFromNamespaceFoo and test ThisShouldFailToo
using two different test fixture classes. This can happen if
the two classes are from different namespaces or translation
units and have the same name. You should probably rename one
-of the classes to put the tests into different test cases.
+of the classes to put the tests into different test suites.
Stack trace: (omitted)
-[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
-[----------] 2 tests from MixedUpTestCaseWithSameTestNameTest
-[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
-[ OK ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
-[ RUN ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ FAILED ] MixedUpTestSuiteTest.ThisShouldFailToo
+[----------] 2 tests from MixedUpTestSuiteWithSameTestNameTest
+[ RUN ] MixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ OK ] MixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ RUN ] MixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
gtest.cc:#: Failure
Failed
-All tests in the same test case must use the same test fixture
-class. However, in test case MixedUpTestCaseWithSameTestNameTest,
+All tests in the same test suite must use the same test fixture
+class. However, in test suite MixedUpTestSuiteWithSameTestNameTest,
you defined test TheSecondTestWithThisNameShouldFail and test TheSecondTestWithThisNameShouldFail
using two different test fixture classes. This can happen if
the two classes are from different namespaces or translation
units and have the same name. You should probably rename one
-of the classes to put the tests into different test cases.
+of the classes to put the tests into different test suites.
Stack trace: (omitted)
-[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ FAILED ] MixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
[----------] 2 tests from TEST_F_before_TEST_in_same_test_case
[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
[ OK ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F
[ RUN ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
gtest.cc:#: Failure
Failed
-All tests in the same test case must use the same test fixture
-class, so mixing TEST_F and TEST in the same test case is
-illegal. In test case TEST_F_before_TEST_in_same_test_case,
+All tests in the same test suite must use the same test fixture
+class, so mixing TEST_F and TEST in the same test suite is
+illegal. In test suite TEST_F_before_TEST_in_same_test_case,
test DefinedUsingTEST_F is defined using TEST_F but
test DefinedUsingTESTAndShouldFail is defined using TEST. You probably
want to change the TEST to TEST_F or move it to another test
@@ -453,9 +461,9 @@ Stack trace: (omitted)
[ RUN ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
gtest.cc:#: Failure
Failed
-All tests in the same test case must use the same test fixture
-class, so mixing TEST_F and TEST in the same test case is
-illegal. In test case TEST_before_TEST_F_in_same_test_case,
+All tests in the same test suite must use the same test fixture
+class, so mixing TEST_F and TEST in the same test suite is
+illegal. In test suite TEST_before_TEST_F_in_same_test_case,
test DefinedUsingTEST_FAndShouldFail is defined using TEST_F but
test DefinedUsingTEST is defined using TEST. You probably
want to change the TEST to TEST_F or move it to another test
@@ -870,6 +878,84 @@ Expected non-fatal failure.
Stack trace: (omitted)
[ FAILED ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
+[----------] 2 tests from DynamicFixture
+DynamicFixture::SetUpTestSuite
+[ RUN ] DynamicFixture.DynamicTestPass
+DynamicFixture()
+DynamicFixture::SetUp
+DynamicFixture::TearDown
+~DynamicFixture()
+[ OK ] DynamicFixture.DynamicTestPass
+[ RUN ] DynamicFixture.DynamicTestFail
+DynamicFixture()
+DynamicFixture::SetUp
+googletest-output-test_.cc:#: Failure
+Value of: Pass
+ Actual: false
+Expected: true
+Stack trace: (omitted)
+
+DynamicFixture::TearDown
+~DynamicFixture()
+[ FAILED ] DynamicFixture.DynamicTestFail
+DynamicFixture::TearDownTestSuite
+[----------] 1 test from DynamicFixtureAnotherName
+DynamicFixture::SetUpTestSuite
+[ RUN ] DynamicFixtureAnotherName.DynamicTestPass
+DynamicFixture()
+DynamicFixture::SetUp
+DynamicFixture::TearDown
+~DynamicFixture()
+[ OK ] DynamicFixtureAnotherName.DynamicTestPass
+DynamicFixture::TearDownTestSuite
+[----------] 2 tests from BadDynamicFixture1
+DynamicFixture::SetUpTestSuite
+[ RUN ] BadDynamicFixture1.FixtureBase
+DynamicFixture()
+DynamicFixture::SetUp
+DynamicFixture::TearDown
+~DynamicFixture()
+[ OK ] BadDynamicFixture1.FixtureBase
+[ RUN ] BadDynamicFixture1.TestBase
+DynamicFixture()
+gtest.cc:#: Failure
+Failed
+All tests in the same test suite must use the same test fixture
+class, so mixing TEST_F and TEST in the same test suite is
+illegal. In test suite BadDynamicFixture1,
+test FixtureBase is defined using TEST_F but
+test TestBase is defined using TEST. You probably
+want to change the TEST to TEST_F or move it to another test
+case.
+Stack trace: (omitted)
+
+~DynamicFixture()
+[ FAILED ] BadDynamicFixture1.TestBase
+DynamicFixture::TearDownTestSuite
+[----------] 2 tests from BadDynamicFixture2
+DynamicFixture::SetUpTestSuite
+[ RUN ] BadDynamicFixture2.FixtureBase
+DynamicFixture()
+DynamicFixture::SetUp
+DynamicFixture::TearDown
+~DynamicFixture()
+[ OK ] BadDynamicFixture2.FixtureBase
+[ RUN ] BadDynamicFixture2.Derived
+DynamicFixture()
+gtest.cc:#: Failure
+Failed
+All tests in the same test suite must use the same test fixture
+class. However, in test suite BadDynamicFixture2,
+you defined test FixtureBase and test Derived
+using two different test fixture classes. This can happen if
+the two classes are from different namespaces or translation
+units and have the same name. You should probably rename one
+of the classes to put the tests into different test suites.
+Stack trace: (omitted)
+
+~DynamicFixture()
+[ FAILED ] BadDynamicFixture2.Derived
+DynamicFixture::TearDownTestSuite
[----------] 1 test from PrintingFailingParams/FailingParamTest
[ RUN ] PrintingFailingParams/FailingParamTest.Fails/0
googletest-output-test_.cc:#: Failure
@@ -880,6 +966,9 @@ Expected equality of these values:
Stack trace: (omitted)
[ FAILED ] PrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2
+[----------] 1 test from EmptyBasenameParamInst
+[ RUN ] EmptyBasenameParamInst.Passes/0
+[ OK ] EmptyBasenameParamInst.Passes/0
[----------] 2 tests from PrintingStrings/ParamTest
[ RUN ] PrintingStrings/ParamTest.Success/a
[ OK ] PrintingStrings/ParamTest.Success/a
@@ -906,9 +995,9 @@ Failed
Expected fatal failure.
Stack trace: (omitted)
-[==========] 76 tests from 34 test cases ran.
-[ PASSED ] 26 tests.
-[ FAILED ] 50 tests, listed below:
+[==========] 85 tests from 40 test suites ran.
+[ PASSED ] 31 tests.
+[ FAILED ] 54 tests, listed below:
[ FAILED ] NonfatalFailureTest.EscapesStringOperands
[ FAILED ] NonfatalFailureTest.DiffForLongStrings
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
@@ -928,9 +1017,10 @@ Stack trace: (omitted)
[ FAILED ] NonFatalFailureInSetUpTest.FailureInSetUp
[ FAILED ] FatalFailureInSetUpTest.FailureInSetUp
[ FAILED ] AddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber
-[ FAILED ] MixedUpTestCaseTest.ThisShouldFail
-[ FAILED ] MixedUpTestCaseTest.ThisShouldFailToo
-[ FAILED ] MixedUpTestCaseWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
+[ FAILED ] GtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber
+[ FAILED ] MixedUpTestSuiteTest.ThisShouldFail
+[ FAILED ] MixedUpTestSuiteTest.ThisShouldFailToo
+[ FAILED ] MixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail
[ FAILED ] TEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail
[ FAILED ] TEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail
[ FAILED ] ExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure
@@ -957,14 +1047,17 @@ Stack trace: (omitted)
[ FAILED ] ExpectFailureWithThreadsTest.ExpectFatalFailure
[ FAILED ] ExpectFailureWithThreadsTest.ExpectNonFatalFailure
[ FAILED ] ScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread
+[ FAILED ] DynamicFixture.DynamicTestFail
+[ FAILED ] BadDynamicFixture1.TestBase
+[ FAILED ] BadDynamicFixture2.Derived
[ FAILED ] PrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2
[ FAILED ] PrintingStrings/ParamTest.Failure/a, where GetParam() = "a"
-50 FAILED TESTS
+54 FAILED TESTS
 YOU HAVE 1 DISABLED TEST
Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
-[==========] Running 4 tests from 2 test cases.
+[==========] Running 4 tests from 2 test suites.
[----------] Global test environment set-up.
[----------] 3 tests from FatalFailureTest
[ RUN ] FatalFailureTest.FatalFailureInSubroutine
@@ -1017,7 +1110,7 @@ Stack trace: (omitted)
[----------] 1 test from LoggingTest (? ms total)
[----------] Global test environment tear-down
-[==========] 4 tests from 2 test cases ran. (? ms total)
+[==========] 4 tests from 2 test suites ran. (? ms total)
[ PASSED ] 0 tests.
[ FAILED ] 4 tests, listed below:
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
@@ -1027,21 +1120,21 @@ Stack trace: (omitted)
4 FAILED TESTS
Note: Google Test filter = *DISABLED_*
-[==========] Running 1 test from 1 test case.
+[==========] Running 1 test from 1 test suite.
[----------] Global test environment set-up.
[----------] 1 test from DisabledTestsWarningTest
[ RUN ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
[ OK ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning
[----------] Global test environment tear-down
-[==========] 1 test from 1 test case ran.
+[==========] 1 test from 1 test suite ran.
[ PASSED ] 1 test.
Note: Google Test filter = PassingTest.*
Note: This is test shard 2 of 2.
-[==========] Running 1 test from 1 test case.
+[==========] Running 1 test from 1 test suite.
[----------] Global test environment set-up.
[----------] 1 test from PassingTest
[ RUN ] PassingTest.PassingTest2
[ OK ] PassingTest.PassingTest2
[----------] Global test environment tear-down
-[==========] 1 test from 1 test case ran.
+[==========] 1 test from 1 test suite ran.
[ PASSED ] 1 test.
diff --git a/googletest/test/googletest-output-test.py b/googletest/test/googletest-output-test.py
index 1a9ee6e..c727f17 100755
--- a/googletest/test/googletest-output-test.py
+++ b/googletest/test/googletest-output-test.py
@@ -55,7 +55,6 @@ NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
IS_WINDOWS = os.name == 'nt'
-# FIXME: remove the _lin suffix.
GOLDEN_NAME = 'googletest-output-test-golden-lin.txt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('googletest-output-test_')
diff --git a/googletest/test/googletest-output-test_.cc b/googletest/test/googletest-output-test_.cc
index 67de2d1..4f716d8 100644
--- a/googletest/test/googletest-output-test_.cc
+++ b/googletest/test/googletest-output-test_.cc
@@ -92,9 +92,17 @@ TEST_P(FailingParamTest, Fails) {
// This generates a test which will fail. Google Test is expected to print
// its parameter when it outputs the list of all failed tests.
-INSTANTIATE_TEST_CASE_P(PrintingFailingParams,
- FailingParamTest,
- testing::Values(2));
+INSTANTIATE_TEST_SUITE_P(PrintingFailingParams,
+ FailingParamTest,
+ testing::Values(2));
+
+// Tests that an empty value for the test suite basename yields just
+// the test name without any prior /
+class EmptyBasenameParamInst : public testing::TestWithParam<int> {};
+
+TEST_P(EmptyBasenameParamInst, Passes) { EXPECT_EQ(1, GetParam()); }
+
+INSTANTIATE_TEST_SUITE_P(, EmptyBasenameParamInst, testing::Values(1));
static const char kGoldenString[] = "\"Line\0 1\"\nLine 2";
@@ -461,7 +469,11 @@ TEST_F(FatalFailureInSetUpTest, FailureInSetUp) {
}
TEST(AddFailureAtTest, MessageContainsSpecifiedFileAndLineNumber) {
- ADD_FAILURE_AT("foo.cc", 42) << "Expected failure in foo.cc";
+ ADD_FAILURE_AT("foo.cc", 42) << "Expected nonfatal failure in foo.cc";
+}
+
+TEST(GtestFailAtTest, MessageContainsSpecifiedFileAndLineNumber) {
+ GTEST_FAIL_AT("foo.cc", 42) << "Expected fatal failure in foo.cc";
}
#if GTEST_IS_THREADSAFE
@@ -521,48 +533,48 @@ class DeathTestAndMultiThreadsTest : public testing::Test {
#endif // GTEST_IS_THREADSAFE
-// The MixedUpTestCaseTest test case verifies that Google Test will fail a
+// The MixedUpTestSuiteTest test case verifies that Google Test will fail a
// test if it uses a different fixture class than what other tests in
// the same test case use. It deliberately contains two fixture
// classes with the same name but defined in different namespaces.
-// The MixedUpTestCaseWithSameTestNameTest test case verifies that
+// The MixedUpTestSuiteWithSameTestNameTest test case verifies that
// when the user defines two tests with the same test case name AND
// same test name (but in different namespaces), the second test will
// fail.
namespace foo {
-class MixedUpTestCaseTest : public testing::Test {
+class MixedUpTestSuiteTest : public testing::Test {
};
-TEST_F(MixedUpTestCaseTest, FirstTestFromNamespaceFoo) {}
-TEST_F(MixedUpTestCaseTest, SecondTestFromNamespaceFoo) {}
+TEST_F(MixedUpTestSuiteTest, FirstTestFromNamespaceFoo) {}
+TEST_F(MixedUpTestSuiteTest, SecondTestFromNamespaceFoo) {}
-class MixedUpTestCaseWithSameTestNameTest : public testing::Test {
+class MixedUpTestSuiteWithSameTestNameTest : public testing::Test {
};
-TEST_F(MixedUpTestCaseWithSameTestNameTest,
+TEST_F(MixedUpTestSuiteWithSameTestNameTest,
TheSecondTestWithThisNameShouldFail) {}
} // namespace foo
namespace bar {
-class MixedUpTestCaseTest : public testing::Test {
+class MixedUpTestSuiteTest : public testing::Test {
};
// The following two tests are expected to fail. We rely on the
// golden file to check that Google Test generates the right error message.
-TEST_F(MixedUpTestCaseTest, ThisShouldFail) {}
-TEST_F(MixedUpTestCaseTest, ThisShouldFailToo) {}
+TEST_F(MixedUpTestSuiteTest, ThisShouldFail) {}
+TEST_F(MixedUpTestSuiteTest, ThisShouldFailToo) {}
-class MixedUpTestCaseWithSameTestNameTest : public testing::Test {
+class MixedUpTestSuiteWithSameTestNameTest : public testing::Test {
};
// Expected to fail. We rely on the golden file to check that Google Test
// generates the right error message.
-TEST_F(MixedUpTestCaseWithSameTestNameTest,
+TEST_F(MixedUpTestSuiteWithSameTestNameTest,
TheSecondTestWithThisNameShouldFail) {}
} // namespace bar
@@ -773,10 +785,10 @@ TEST_P(ParamTest, Failure) {
EXPECT_EQ("b", GetParam()) << "Expected failure";
}
-INSTANTIATE_TEST_CASE_P(PrintingStrings,
- ParamTest,
- testing::Values(std::string("a")),
- ParamNameFunc);
+INSTANTIATE_TEST_SUITE_P(PrintingStrings,
+ ParamTest,
+ testing::Values(std::string("a")),
+ ParamNameFunc);
// This #ifdef block tests the output of typed tests.
#if GTEST_HAS_TYPED_TEST
@@ -785,7 +797,7 @@ template <typename T>
class TypedTest : public testing::Test {
};
-TYPED_TEST_CASE(TypedTest, testing::Types<int>);
+TYPED_TEST_SUITE(TypedTest, testing::Types<int>);
TYPED_TEST(TypedTest, Success) {
EXPECT_EQ(0, TypeParam());
@@ -804,14 +816,14 @@ class TypedTestNames {
public:
template <typename T>
static std::string GetName(int i) {
- if (testing::internal::IsSame<T, char>::value)
+ if (std::is_same<T, char>::value)
return std::string("char") + ::testing::PrintToString(i);
- if (testing::internal::IsSame<T, int>::value)
+ if (std::is_same<T, int>::value)
return std::string("int") + ::testing::PrintToString(i);
}
};
-TYPED_TEST_CASE(TypedTestWithNames, TypesForTestWithNames, TypedTestNames);
+TYPED_TEST_SUITE(TypedTestWithNames, TypesForTestWithNames, TypedTestNames);
TYPED_TEST(TypedTestWithNames, Success) {}
@@ -826,7 +838,7 @@ template <typename T>
class TypedTestP : public testing::Test {
};
-TYPED_TEST_CASE_P(TypedTestP);
+TYPED_TEST_SUITE_P(TypedTestP);
TYPED_TEST_P(TypedTestP, Success) {
EXPECT_EQ(0U, TypeParam());
@@ -836,25 +848,25 @@ TYPED_TEST_P(TypedTestP, Failure) {
EXPECT_EQ(1U, TypeParam()) << "Expected failure";
}
-REGISTER_TYPED_TEST_CASE_P(TypedTestP, Success, Failure);
+REGISTER_TYPED_TEST_SUITE_P(TypedTestP, Success, Failure);
typedef testing::Types<unsigned char, unsigned int> UnsignedTypes;
-INSTANTIATE_TYPED_TEST_CASE_P(Unsigned, TypedTestP, UnsignedTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(Unsigned, TypedTestP, UnsignedTypes);
class TypedTestPNames {
public:
template <typename T>
static std::string GetName(int i) {
- if (testing::internal::IsSame<T, unsigned char>::value) {
+ if (std::is_same<T, unsigned char>::value) {
return std::string("unsignedChar") + ::testing::PrintToString(i);
}
- if (testing::internal::IsSame<T, unsigned int>::value) {
+ if (std::is_same<T, unsigned int>::value) {
return std::string("unsignedInt") + ::testing::PrintToString(i);
}
}
};
-INSTANTIATE_TYPED_TEST_CASE_P(UnsignedCustomName, TypedTestP, UnsignedTypes,
+INSTANTIATE_TYPED_TEST_SUITE_P(UnsignedCustomName, TypedTestP, UnsignedTypes,
TypedTestPNames);
#endif // GTEST_HAS_TYPED_TEST_P
@@ -877,7 +889,7 @@ class ATypedDeathTest : public testing::Test {
};
typedef testing::Types<int, double> NumericTypes;
-TYPED_TEST_CASE(ATypedDeathTest, NumericTypes);
+TYPED_TEST_SUITE(ATypedDeathTest, NumericTypes);
TYPED_TEST(ATypedDeathTest, ShouldRunFirst) {
}
@@ -894,14 +906,14 @@ template <typename T>
class ATypeParamDeathTest : public testing::Test {
};
-TYPED_TEST_CASE_P(ATypeParamDeathTest);
+TYPED_TEST_SUITE_P(ATypeParamDeathTest);
TYPED_TEST_P(ATypeParamDeathTest, ShouldRunFirst) {
}
-REGISTER_TYPED_TEST_CASE_P(ATypeParamDeathTest, ShouldRunFirst);
+REGISTER_TYPED_TEST_SUITE_P(ATypeParamDeathTest, ShouldRunFirst);
-INSTANTIATE_TYPED_TEST_CASE_P(My, ATypeParamDeathTest, NumericTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, ATypeParamDeathTest, NumericTypes);
# endif // GTEST_HAS_TYPED_TEST_P
@@ -1024,6 +1036,56 @@ TEST_F(ExpectFailureTest, ExpectNonFatalFailureOnAllThreads) {
"Some other non-fatal failure.");
}
+class DynamicFixture : public testing::Test {
+ protected:
+ DynamicFixture() { printf("DynamicFixture()\n"); }
+ ~DynamicFixture() override { printf("~DynamicFixture()\n"); }
+ void SetUp() override { printf("DynamicFixture::SetUp\n"); }
+ void TearDown() override { printf("DynamicFixture::TearDown\n"); }
+
+ static void SetUpTestSuite() { printf("DynamicFixture::SetUpTestSuite\n"); }
+ static void TearDownTestSuite() {
+ printf("DynamicFixture::TearDownTestSuite\n");
+ }
+};
+
+template <bool Pass>
+class DynamicTest : public DynamicFixture {
+ public:
+ void TestBody() override { EXPECT_TRUE(Pass); }
+};
+
+auto dynamic_test = (
+ // Register two tests with the same fixture correctly.
+ testing::RegisterTest(
+ "DynamicFixture", "DynamicTestPass", nullptr, nullptr, __FILE__,
+ __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }),
+ testing::RegisterTest(
+ "DynamicFixture", "DynamicTestFail", nullptr, nullptr, __FILE__,
+ __LINE__, []() -> DynamicFixture* { return new DynamicTest<false>; }),
+
+ // Register the same fixture with another name. That's fine.
+ testing::RegisterTest(
+ "DynamicFixtureAnotherName", "DynamicTestPass", nullptr, nullptr,
+ __FILE__, __LINE__,
+ []() -> DynamicFixture* { return new DynamicTest<true>; }),
+
+ // Register two tests with the same fixture incorrectly.
+ testing::RegisterTest(
+ "BadDynamicFixture1", "FixtureBase", nullptr, nullptr, __FILE__,
+ __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }),
+ testing::RegisterTest(
+ "BadDynamicFixture1", "TestBase", nullptr, nullptr, __FILE__, __LINE__,
+ []() -> testing::Test* { return new DynamicTest<true>; }),
+
+ // Register two tests with the same fixture incorrectly by ommiting the
+ // return type.
+ testing::RegisterTest(
+ "BadDynamicFixture2", "FixtureBase", nullptr, nullptr, __FILE__,
+ __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }),
+ testing::RegisterTest("BadDynamicFixture2", "Derived", nullptr, nullptr,
+ __FILE__, __LINE__,
+ []() { return new DynamicTest<true>; }));
// Two test environments for testing testing::AddGlobalTestEnvironment().
diff --git a/googletest/test/googletest-param-test-invalid-name1-test_.cc b/googletest/test/googletest-param-test-invalid-name1-test_.cc
index 5a95155..955d699 100644
--- a/googletest/test/googletest-param-test-invalid-name1-test_.cc
+++ b/googletest/test/googletest-param-test-invalid-name1-test_.cc
@@ -36,10 +36,10 @@ class DummyTest : public ::testing::TestWithParam<const char *> {};
TEST_P(DummyTest, Dummy) {
}
-INSTANTIATE_TEST_CASE_P(InvalidTestName,
- DummyTest,
- ::testing::Values("InvalidWithQuotes"),
- ::testing::PrintToStringParamName());
+INSTANTIATE_TEST_SUITE_P(InvalidTestName,
+ DummyTest,
+ ::testing::Values("InvalidWithQuotes"),
+ ::testing::PrintToStringParamName());
} // namespace
diff --git a/googletest/test/googletest-param-test-invalid-name2-test_.cc b/googletest/test/googletest-param-test-invalid-name2-test_.cc
index ef09349..76371df 100644
--- a/googletest/test/googletest-param-test-invalid-name2-test_.cc
+++ b/googletest/test/googletest-param-test-invalid-name2-test_.cc
@@ -41,10 +41,10 @@ std::string StringParamTestSuffix(
TEST_P(DummyTest, Dummy) {
}
-INSTANTIATE_TEST_CASE_P(DuplicateTestNames,
- DummyTest,
- ::testing::Values("a", "b", "a", "c"),
- StringParamTestSuffix);
+INSTANTIATE_TEST_SUITE_P(DuplicateTestNames,
+ DummyTest,
+ ::testing::Values("a", "b", "a", "c"),
+ StringParamTestSuffix);
} // namespace
int main(int argc, char *argv[]) {
diff --git a/googletest/test/googletest-param-test-test.cc b/googletest/test/googletest-param-test-test.cc
index fc33378..6c187df 100644
--- a/googletest/test/googletest-param-test-test.cc
+++ b/googletest/test/googletest-param-test-test.cc
@@ -542,12 +542,12 @@ TEST(ParamGeneratorTest, AssignmentWorks) {
// This test verifies that the tests are expanded and run as specified:
// one test per element from the sequence produced by the generator
-// specified in INSTANTIATE_TEST_CASE_P. It also verifies that the test's
+// specified in INSTANTIATE_TEST_SUITE_P. It also verifies that the test's
// fixture constructor, SetUp(), and TearDown() have run and have been
// supplied with the correct parameters.
// The use of environment object allows detection of the case where no test
-// case functionality is run at all. In this case TestCaseTearDown will not
+// case functionality is run at all. In this case TearDownTestSuite will not
// be able to detect missing tests, naturally.
template <int kExpectedCalls>
class TestGenerationEnvironment : public ::testing::Environment {
@@ -628,7 +628,7 @@ class TestGenerationTest : public TestWithParam<int> {
EXPECT_EQ(current_parameter_, GetParam());
}
- static void SetUpTestCase() {
+ static void SetUpTestSuite() {
bool all_tests_in_test_case_selected = true;
for (int i = 0; i < PARAMETER_COUNT; ++i) {
@@ -649,7 +649,7 @@ class TestGenerationTest : public TestWithParam<int> {
collected_parameters_.clear();
}
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
vector<int> expected_values(test_generation_params,
test_generation_params + PARAMETER_COUNT);
// Test execution order is not guaranteed by Google Test,
@@ -675,17 +675,17 @@ TEST_P(TestGenerationTest, TestsExpandedAndRun) {
EXPECT_EQ(current_parameter_, GetParam());
collected_parameters_.push_back(GetParam());
}
-INSTANTIATE_TEST_CASE_P(TestExpansionModule, TestGenerationTest,
- ValuesIn(test_generation_params));
+INSTANTIATE_TEST_SUITE_P(TestExpansionModule, TestGenerationTest,
+ ValuesIn(test_generation_params));
// This test verifies that the element sequence (third parameter of
-// INSTANTIATE_TEST_CASE_P) is evaluated in InitGoogleTest() and neither at
-// the call site of INSTANTIATE_TEST_CASE_P nor in RUN_ALL_TESTS(). For
+// INSTANTIATE_TEST_SUITE_P) is evaluated in InitGoogleTest() and neither at
+// the call site of INSTANTIATE_TEST_SUITE_P nor in RUN_ALL_TESTS(). For
// that, we declare param_value_ to be a static member of
// GeneratorEvaluationTest and initialize it to 0. We set it to 1 in
// main(), just before invocation of InitGoogleTest(). After calling
// InitGoogleTest(), we set the value to 2. If the sequence is evaluated
-// before or after InitGoogleTest, INSTANTIATE_TEST_CASE_P will create a
+// before or after InitGoogleTest, INSTANTIATE_TEST_SUITE_P will create a
// test with parameter other than 1, and the test body will fail the
// assertion.
class GeneratorEvaluationTest : public TestWithParam<int> {
@@ -701,9 +701,8 @@ int GeneratorEvaluationTest::param_value_ = 0;
TEST_P(GeneratorEvaluationTest, GeneratorsEvaluatedInMain) {
EXPECT_EQ(1, GetParam());
}
-INSTANTIATE_TEST_CASE_P(GenEvalModule,
- GeneratorEvaluationTest,
- Values(GeneratorEvaluationTest::param_value()));
+INSTANTIATE_TEST_SUITE_P(GenEvalModule, GeneratorEvaluationTest,
+ Values(GeneratorEvaluationTest::param_value()));
// Tests that generators defined in a different translation unit are
// functional. Generator extern_gen is defined in gtest-param-test_test2.cc.
@@ -714,9 +713,8 @@ TEST_P(ExternalGeneratorTest, ExternalGenerator) {
// which we verify here.
EXPECT_EQ(GetParam(), 33);
}
-INSTANTIATE_TEST_CASE_P(ExternalGeneratorModule,
- ExternalGeneratorTest,
- extern_gen);
+INSTANTIATE_TEST_SUITE_P(ExternalGeneratorModule, ExternalGeneratorTest,
+ extern_gen);
// Tests that a parameterized test case can be defined in one translation
// unit and instantiated in another. This test will be instantiated in
@@ -731,20 +729,19 @@ TEST_P(ExternalInstantiationTest, IsMultipleOf33) {
class MultipleInstantiationTest : public TestWithParam<int> {};
TEST_P(MultipleInstantiationTest, AllowsMultipleInstances) {
}
-INSTANTIATE_TEST_CASE_P(Sequence1, MultipleInstantiationTest, Values(1, 2));
-INSTANTIATE_TEST_CASE_P(Sequence2, MultipleInstantiationTest, Range(3, 5));
+INSTANTIATE_TEST_SUITE_P(Sequence1, MultipleInstantiationTest, Values(1, 2));
+INSTANTIATE_TEST_SUITE_P(Sequence2, MultipleInstantiationTest, Range(3, 5));
// Tests that a parameterized test case can be instantiated
// in multiple translation units. This test will be instantiated
// here and in gtest-param-test_test2.cc.
// InstantiationInMultipleTranslationUnitsTest fixture class
// is defined in gtest-param-test_test.h.
-TEST_P(InstantiationInMultipleTranslaionUnitsTest, IsMultipleOf42) {
+TEST_P(InstantiationInMultipleTranslationUnitsTest, IsMultipleOf42) {
EXPECT_EQ(0, GetParam() % 42);
}
-INSTANTIATE_TEST_CASE_P(Sequence1,
- InstantiationInMultipleTranslaionUnitsTest,
- Values(42, 42*2));
+INSTANTIATE_TEST_SUITE_P(Sequence1, InstantiationInMultipleTranslationUnitsTest,
+ Values(42, 42 * 2));
// Tests that each iteration of parameterized test runs in a separate test
// object.
@@ -752,7 +749,7 @@ class SeparateInstanceTest : public TestWithParam<int> {
public:
SeparateInstanceTest() : count_(0) {}
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
EXPECT_GE(global_count_, 2)
<< "If some (but not all) SeparateInstanceTest tests have been "
<< "filtered out this test will fail. Make sure that all "
@@ -770,20 +767,20 @@ TEST_P(SeparateInstanceTest, TestsRunInSeparateInstances) {
EXPECT_EQ(0, count_++);
global_count_++;
}
-INSTANTIATE_TEST_CASE_P(FourElemSequence, SeparateInstanceTest, Range(1, 4));
+INSTANTIATE_TEST_SUITE_P(FourElemSequence, SeparateInstanceTest, Range(1, 4));
// Tests that all instantiations of a test have named appropriately. Test
-// defined with TEST_P(TestCaseName, TestName) and instantiated with
-// INSTANTIATE_TEST_CASE_P(SequenceName, TestCaseName, generator) must be named
-// SequenceName/TestCaseName.TestName/i, where i is the 0-based index of the
-// sequence element used to instantiate the test.
+// defined with TEST_P(TestSuiteName, TestName) and instantiated with
+// INSTANTIATE_TEST_SUITE_P(SequenceName, TestSuiteName, generator) must be
+// named SequenceName/TestSuiteName.TestName/i, where i is the 0-based index of
+// the sequence element used to instantiate the test.
class NamingTest : public TestWithParam<int> {};
TEST_P(NamingTest, TestsReportCorrectNamesAndParameters) {
const ::testing::TestInfo* const test_info =
::testing::UnitTest::GetInstance()->current_test_info();
- EXPECT_STREQ("ZeroToFiveSequence/NamingTest", test_info->test_case_name());
+ EXPECT_STREQ("ZeroToFiveSequence/NamingTest", test_info->test_suite_name());
Message index_stream;
index_stream << "TestsReportCorrectNamesAndParameters/" << GetParam();
@@ -792,7 +789,7 @@ TEST_P(NamingTest, TestsReportCorrectNamesAndParameters) {
EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param());
}
-INSTANTIATE_TEST_CASE_P(ZeroToFiveSequence, NamingTest, Range(0, 5));
+INSTANTIATE_TEST_SUITE_P(ZeroToFiveSequence, NamingTest, Range(0, 5));
// Tests that macros in test names are expanded correctly.
class MacroNamingTest : public TestWithParam<int> {};
@@ -804,11 +801,11 @@ TEST_P(PREFIX_WITH_MACRO(NamingTest), PREFIX_WITH_FOO(SomeTestName)) {
const ::testing::TestInfo* const test_info =
::testing::UnitTest::GetInstance()->current_test_info();
- EXPECT_STREQ("FortyTwo/MacroNamingTest", test_info->test_case_name());
+ EXPECT_STREQ("FortyTwo/MacroNamingTest", test_info->test_suite_name());
EXPECT_STREQ("FooSomeTestName", test_info->name());
}
-INSTANTIATE_TEST_CASE_P(FortyTwo, MacroNamingTest, Values(42));
+INSTANTIATE_TEST_SUITE_P(FortyTwo, MacroNamingTest, Values(42));
// Tests the same thing for non-parametrized tests.
class MacroNamingTestNonParametrized : public ::testing::Test {};
@@ -818,7 +815,7 @@ TEST_F(PREFIX_WITH_MACRO(NamingTestNonParametrized),
const ::testing::TestInfo* const test_info =
::testing::UnitTest::GetInstance()->current_test_info();
- EXPECT_STREQ("MacroNamingTestNonParametrized", test_info->test_case_name());
+ EXPECT_STREQ("MacroNamingTestNonParametrized", test_info->test_suite_name());
EXPECT_STREQ("FooSomeTestName", test_info->name());
}
@@ -835,17 +832,14 @@ struct CustomParamNameFunctor {
}
};
-INSTANTIATE_TEST_CASE_P(CustomParamNameFunctor,
- CustomFunctorNamingTest,
- Values(std::string("FunctorName")),
- CustomParamNameFunctor());
+INSTANTIATE_TEST_SUITE_P(CustomParamNameFunctor, CustomFunctorNamingTest,
+ Values(std::string("FunctorName")),
+ CustomParamNameFunctor());
-INSTANTIATE_TEST_CASE_P(AllAllowedCharacters,
- CustomFunctorNamingTest,
- Values("abcdefghijklmnopqrstuvwxyz",
- "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
- "01234567890_"),
- CustomParamNameFunctor());
+INSTANTIATE_TEST_SUITE_P(AllAllowedCharacters, CustomFunctorNamingTest,
+ Values("abcdefghijklmnopqrstuvwxyz",
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ", "01234567890_"),
+ CustomParamNameFunctor());
inline std::string CustomParamNameFunction(
const ::testing::TestParamInfo<std::string>& inf) {
@@ -855,38 +849,40 @@ inline std::string CustomParamNameFunction(
class CustomFunctionNamingTest : public TestWithParam<std::string> {};
TEST_P(CustomFunctionNamingTest, CustomTestNames) {}
-INSTANTIATE_TEST_CASE_P(CustomParamNameFunction,
- CustomFunctionNamingTest,
- Values(std::string("FunctionName")),
- CustomParamNameFunction);
+INSTANTIATE_TEST_SUITE_P(CustomParamNameFunction, CustomFunctionNamingTest,
+ Values(std::string("FunctionName")),
+ CustomParamNameFunction);
+
+INSTANTIATE_TEST_SUITE_P(CustomParamNameFunctionP, CustomFunctionNamingTest,
+ Values(std::string("FunctionNameP")),
+ &CustomParamNameFunction);
// Test custom naming with a lambda
class CustomLambdaNamingTest : public TestWithParam<std::string> {};
TEST_P(CustomLambdaNamingTest, CustomTestNames) {}
-INSTANTIATE_TEST_CASE_P(CustomParamNameLambda, CustomLambdaNamingTest,
- Values(std::string("LambdaName")),
- [](const ::testing::TestParamInfo<std::string>& inf) {
- return inf.param;
- });
+INSTANTIATE_TEST_SUITE_P(CustomParamNameLambda, CustomLambdaNamingTest,
+ Values(std::string("LambdaName")),
+ [](const ::testing::TestParamInfo<std::string>& inf) {
+ return inf.param;
+ });
TEST(CustomNamingTest, CheckNameRegistry) {
::testing::UnitTest* unit_test = ::testing::UnitTest::GetInstance();
std::set<std::string> test_names;
- for (int case_num = 0;
- case_num < unit_test->total_test_case_count();
- ++case_num) {
- const ::testing::TestCase* test_case = unit_test->GetTestCase(case_num);
- for (int test_num = 0;
- test_num < test_case->total_test_count();
+ for (int suite_num = 0; suite_num < unit_test->total_test_suite_count();
+ ++suite_num) {
+ const ::testing::TestSuite* test_suite = unit_test->GetTestSuite(suite_num);
+ for (int test_num = 0; test_num < test_suite->total_test_count();
++test_num) {
- const ::testing::TestInfo* test_info = test_case->GetTestInfo(test_num);
+ const ::testing::TestInfo* test_info = test_suite->GetTestInfo(test_num);
test_names.insert(std::string(test_info->name()));
}
}
EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctorName"));
EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctionName"));
+ EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctionNameP"));
EXPECT_EQ(1u, test_names.count("CustomTestNames/LambdaName"));
}
@@ -902,10 +898,8 @@ TEST_P(CustomIntegerNamingTest, TestsReportCorrectNames) {
EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
}
-INSTANTIATE_TEST_CASE_P(PrintToString,
- CustomIntegerNamingTest,
- Range(0, 5),
- ::testing::PrintToStringParamName());
+INSTANTIATE_TEST_SUITE_P(PrintToString, CustomIntegerNamingTest, Range(0, 5),
+ ::testing::PrintToStringParamName());
// Test a custom struct with PrintToString.
@@ -929,10 +923,9 @@ TEST_P(CustomStructNamingTest, TestsReportCorrectNames) {
EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
}
-INSTANTIATE_TEST_CASE_P(PrintToString,
- CustomStructNamingTest,
- Values(CustomStruct(0), CustomStruct(1)),
- ::testing::PrintToStringParamName());
+INSTANTIATE_TEST_SUITE_P(PrintToString, CustomStructNamingTest,
+ Values(CustomStruct(0), CustomStruct(1)),
+ ::testing::PrintToStringParamName());
// Test that using a stateful parameter naming function works as expected.
@@ -961,10 +954,8 @@ TEST_P(StatefulNamingTest, TestsReportCorrectNames) {
EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());
}
-INSTANTIATE_TEST_CASE_P(StatefulNamingFunctor,
- StatefulNamingTest,
- Range(0, 5),
- StatefulNamingFunctor());
+INSTANTIATE_TEST_SUITE_P(StatefulNamingFunctor, StatefulNamingTest, Range(0, 5),
+ StatefulNamingFunctor());
// Class that cannot be streamed into an ostream. It needs to be copyable
// (and, in case of MSVC, also assignable) in order to be a test parameter
@@ -973,6 +964,8 @@ INSTANTIATE_TEST_CASE_P(StatefulNamingFunctor,
class Unstreamable {
public:
explicit Unstreamable(int value) : value_(value) {}
+ // -Wunused-private-field: dummy accessor for `value_`.
+ const int& dummy_value() const { return value_; }
private:
int value_;
@@ -987,9 +980,8 @@ TEST_P(CommentTest, TestsCorrectlyReportUnstreamableParams) {
EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param());
}
-INSTANTIATE_TEST_CASE_P(InstantiationWithComments,
- CommentTest,
- Values(Unstreamable(1)));
+INSTANTIATE_TEST_SUITE_P(InstantiationWithComments, CommentTest,
+ Values(Unstreamable(1)));
// Verify that we can create a hierarchy of test fixtures, where the base
// class fixture is not parameterized and the derived class is. In this case
@@ -1029,7 +1021,8 @@ TEST_F(ParameterizedDeathTest, GetParamDiesFromTestF) {
".* value-parameterized test .*");
}
-INSTANTIATE_TEST_CASE_P(RangeZeroToFive, ParameterizedDerivedTest, Range(0, 5));
+INSTANTIATE_TEST_SUITE_P(RangeZeroToFive, ParameterizedDerivedTest,
+ Range(0, 5));
// Tests param generator working with Enums
enum MyEnums {
@@ -1041,19 +1034,19 @@ enum MyEnums {
class MyEnumTest : public testing::TestWithParam<MyEnums> {};
TEST_P(MyEnumTest, ChecksParamMoreThanZero) { EXPECT_GE(10, GetParam()); }
-INSTANTIATE_TEST_CASE_P(MyEnumTests, MyEnumTest,
- ::testing::Values(ENUM1, ENUM2, 0));
+INSTANTIATE_TEST_SUITE_P(MyEnumTests, MyEnumTest,
+ ::testing::Values(ENUM1, ENUM2, 0));
int main(int argc, char **argv) {
- // Used in TestGenerationTest test case.
+ // Used in TestGenerationTest test suite.
AddGlobalTestEnvironment(TestGenerationTest::Environment::Instance());
- // Used in GeneratorEvaluationTest test case. Tests that the updated value
+ // Used in GeneratorEvaluationTest test suite. Tests that the updated value
// will be picked up for instantiating tests in GeneratorEvaluationTest.
GeneratorEvaluationTest::set_param_value(1);
::testing::InitGoogleTest(&argc, argv);
- // Used in GeneratorEvaluationTest test case. Tests that value updated
+ // Used in GeneratorEvaluationTest test suite. Tests that value updated
// here will NOT be used for instantiating tests in
// GeneratorEvaluationTest.
GeneratorEvaluationTest::set_param_value(2);
diff --git a/googletest/test/googletest-param-test-test.h b/googletest/test/googletest-param-test-test.h
index 632a61f..6480570 100644
--- a/googletest/test/googletest-param-test-test.h
+++ b/googletest/test/googletest-param-test-test.h
@@ -44,7 +44,7 @@ class ExternalInstantiationTest : public ::testing::TestWithParam<int> {
// Test fixture for testing instantiation of a test in multiple
// translation units.
-class InstantiationInMultipleTranslaionUnitsTest
+class InstantiationInMultipleTranslationUnitsTest
: public ::testing::TestWithParam<int> {
};
diff --git a/googletest/test/googletest-param-test2-test.cc b/googletest/test/googletest-param-test2-test.cc
index 25bb945..2a29fb1 100644
--- a/googletest/test/googletest-param-test2-test.cc
+++ b/googletest/test/googletest-param-test2-test.cc
@@ -46,16 +46,16 @@ ParamGenerator<int> extern_gen = Values(33);
// and instantiated in another. The test is defined in
// googletest-param-test-test.cc and ExternalInstantiationTest fixture class is
// defined in gtest-param-test_test.h.
-INSTANTIATE_TEST_CASE_P(MultiplesOf33,
- ExternalInstantiationTest,
- Values(33, 66));
+INSTANTIATE_TEST_SUITE_P(MultiplesOf33,
+ ExternalInstantiationTest,
+ Values(33, 66));
// Tests that a parameterized test case can be instantiated
// in multiple translation units. Another instantiation is defined
// in googletest-param-test-test.cc and
-// InstantiationInMultipleTranslaionUnitsTest fixture is defined in
+// InstantiationInMultipleTranslationUnitsTest fixture is defined in
// gtest-param-test_test.h
-INSTANTIATE_TEST_CASE_P(Sequence2,
- InstantiationInMultipleTranslaionUnitsTest,
- Values(42*3, 42*4, 42*5));
+INSTANTIATE_TEST_SUITE_P(Sequence2,
+ InstantiationInMultipleTranslationUnitsTest,
+ Values(42*3, 42*4, 42*5));
diff --git a/googletest/test/googletest-port-test.cc b/googletest/test/googletest-port-test.cc
index e6a227b..42035cc 100644
--- a/googletest/test/googletest-port-test.cc
+++ b/googletest/test/googletest-port-test.cc
@@ -286,7 +286,9 @@ TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFileAndLine) {
EXPECT_EQ("unknown file", FormatCompilerIndependentFileLocation(nullptr, -1));
}
-#if GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA
+#if GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA || \
+ GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \
+ GTEST_OS_NETBSD || GTEST_OS_OPENBSD
void* ThreadFunc(void* data) {
internal::Mutex* mutex = static_cast<internal::Mutex*>(data);
mutex->Lock();
@@ -386,14 +388,9 @@ class RETest : public ::testing::Test {};
// Defines StringTypes as the list of all string types that class RE
// supports.
-typedef testing::Types<
- ::std::string,
-# if GTEST_HAS_GLOBAL_STRING
- ::string,
-# endif // GTEST_HAS_GLOBAL_STRING
- const char*> StringTypes;
+typedef testing::Types< ::std::string, const char*> StringTypes;
-TYPED_TEST_CASE(RETest, StringTypes);
+TYPED_TEST_SUITE(RETest, StringTypes);
// Tests RE's implicit constructors.
TYPED_TEST(RETest, ImplicitConstructorWorks) {
@@ -1051,7 +1048,7 @@ class AtomicCounterWithMutex {
pthread_mutex_init(&memory_barrier_mutex, nullptr));
GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&memory_barrier_mutex));
- SleepMilliseconds(random_.Generate(30));
+ SleepMilliseconds(static_cast<int>(random_.Generate(30)));
GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&memory_barrier_mutex));
GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&memory_barrier_mutex));
@@ -1059,7 +1056,7 @@ class AtomicCounterWithMutex {
// On Windows, performing an interlocked access puts up a memory barrier.
volatile LONG dummy = 0;
::InterlockedIncrement(&dummy);
- SleepMilliseconds(random_.Generate(30));
+ SleepMilliseconds(static_cast<int>(random_.Generate(30)));
::InterlockedIncrement(&dummy);
#else
# error "Memory barrier not implemented on this platform."
diff --git a/googletest/test/googletest-printers-test.cc b/googletest/test/googletest-printers-test.cc
index ed66fa2..4bdc9ad 100644
--- a/googletest/test/googletest-printers-test.cc
+++ b/googletest/test/googletest-printers-test.cc
@@ -37,29 +37,20 @@
#include <string.h>
#include <algorithm>
#include <deque>
+#include <forward_list>
#include <list>
#include <map>
#include <set>
#include <sstream>
#include <string>
+#include <unordered_map>
+#include <unordered_set>
#include <utility>
#include <vector>
#include "gtest/gtest-printers.h"
#include "gtest/gtest.h"
-#if GTEST_HAS_UNORDERED_MAP_
-# include <unordered_map> // NOLINT
-#endif // GTEST_HAS_UNORDERED_MAP_
-
-#if GTEST_HAS_UNORDERED_SET_
-# include <unordered_set> // NOLINT
-#endif // GTEST_HAS_UNORDERED_SET_
-
-#if GTEST_HAS_STD_FORWARD_LIST_
-# include <forward_list> // NOLINT
-#endif // GTEST_HAS_STD_FORWARD_LIST_
-
// Some user-defined types for testing the universal value printer.
// An anonymous enum type.
@@ -192,8 +183,14 @@ class PathLike {
public:
struct iterator {
typedef PathLike value_type;
+
+ iterator& operator++();
+ PathLike& operator*();
};
+ using value_type = char;
+ using const_iterator = iterator;
+
PathLike() {}
iterator begin() const { return iterator(); }
@@ -659,16 +656,6 @@ TEST(PrintArrayTest, BigArray) {
// Tests printing ::string and ::std::string.
-#if GTEST_HAS_GLOBAL_STRING
-// ::string.
-TEST(PrintStringTest, StringInGlobalNamespace) {
- const char s[] = "'\"?\\\a\b\f\n\0\r\t\v\x7F\xFF a";
- const ::string str(s, sizeof(s));
- EXPECT_EQ("\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v\\x7F\\xFF a\\0\"",
- Print(str));
-}
-#endif // GTEST_HAS_GLOBAL_STRING
-
// ::std::string.
TEST(PrintStringTest, StringInStdNamespace) {
const char s[] = "'\"?\\\a\b\f\n\0\r\t\v\x7F\xFF a";
@@ -691,19 +678,7 @@ TEST(PrintStringTest, StringAmbiguousHex) {
EXPECT_EQ("\"!\\x5-!\"", Print(::std::string("!\x5-!")));
}
-// Tests printing ::wstring and ::std::wstring.
-
-#if GTEST_HAS_GLOBAL_WSTRING
-// ::wstring.
-TEST(PrintWideStringTest, StringInGlobalNamespace) {
- const wchar_t s[] = L"'\"?\\\a\b\f\n\0\r\t\v\xD3\x576\x8D3\xC74D a";
- const ::wstring str(s, sizeof(s)/sizeof(wchar_t));
- EXPECT_EQ("L\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v"
- "\\xD3\\x576\\x8D3\\xC74D a\\0\"",
- Print(str));
-}
-#endif // GTEST_HAS_GLOBAL_WSTRING
-
+// Tests printing ::std::wstring.
#if GTEST_HAS_STD_WSTRING
// ::std::wstring.
TEST(PrintWideStringTest, StringInStdNamespace) {
@@ -814,7 +789,6 @@ TEST(PrintStlContainerTest, NonEmptyDeque) {
EXPECT_EQ("{ 1, 3 }", Print(non_empty));
}
-#if GTEST_HAS_UNORDERED_MAP_
TEST(PrintStlContainerTest, OneElementHashMap) {
::std::unordered_map<int, char> map1;
@@ -834,9 +808,7 @@ TEST(PrintStlContainerTest, HashMultiMap) {
<< " where Print(map1) returns \"" << result << "\".";
}
-#endif // GTEST_HAS_UNORDERED_MAP_
-#if GTEST_HAS_UNORDERED_SET_
TEST(PrintStlContainerTest, HashSet) {
::std::unordered_set<int> set1;
@@ -873,7 +845,6 @@ TEST(PrintStlContainerTest, HashMultiSet) {
EXPECT_TRUE(std::equal(a, a + kSize, numbers.begin()));
}
-#endif // GTEST_HAS_UNORDERED_SET_
TEST(PrintStlContainerTest, List) {
const std::string a[] = {"hello", "world"};
@@ -915,14 +886,12 @@ TEST(PrintStlContainerTest, MultiSet) {
EXPECT_EQ("{ 1, 1, 1, 2, 5 }", Print(set1));
}
-#if GTEST_HAS_STD_FORWARD_LIST_
TEST(PrintStlContainerTest, SinglyLinkedList) {
int a[] = { 9, 2, 8 };
const std::forward_list<int> ints(a, a + 3);
EXPECT_EQ("{ 9, 2, 8 }", Print(ints));
}
-#endif // GTEST_HAS_STD_FORWARD_LIST_
TEST(PrintStlContainerTest, Pair) {
pair<const bool, int> p(true, 5);
@@ -1032,16 +1001,20 @@ TEST(PrintNullptrT, Basic) {
TEST(PrintReferenceWrapper, Printable) {
int x = 5;
- EXPECT_EQ("5", Print(std::ref(x)));
- EXPECT_EQ("5", Print(std::cref(x)));
+ EXPECT_EQ("@" + PrintPointer(&x) + " 5", Print(std::ref(x)));
+ EXPECT_EQ("@" + PrintPointer(&x) + " 5", Print(std::cref(x)));
}
TEST(PrintReferenceWrapper, Unprintable) {
::foo::UnprintableInFoo up;
- EXPECT_EQ("16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
- Print(std::ref(up)));
- EXPECT_EQ("16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
- Print(std::cref(up)));
+ EXPECT_EQ(
+ "@" + PrintPointer(&up) +
+ " 16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
+ Print(std::ref(up)));
+ EXPECT_EQ(
+ "@" + PrintPointer(&up) +
+ " 16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>",
+ Print(std::cref(up)));
}
// Tests printing user-defined unprintable types.
@@ -1248,21 +1221,6 @@ TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsPointer) {
// Tests formatting a char pointer when it's compared to a string object.
// In this case we want to print the char pointer as a C string.
-#if GTEST_HAS_GLOBAL_STRING
-// char pointer vs ::string
-TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsString) {
- const char* s = "hello \"world";
- EXPECT_STREQ("\"hello \\\"world\"", // The string content should be escaped.
- FormatForComparisonFailureMessage(s, ::string()).c_str());
-
- // char*
- char str[] = "hi\1";
- char* p = str;
- EXPECT_STREQ("\"hi\\x1\"", // The string content should be escaped.
- FormatForComparisonFailureMessage(p, ::string()).c_str());
-}
-#endif
-
// char pointer vs std::string
TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsStdString) {
const char* s = "hello \"world";
@@ -1276,21 +1234,6 @@ TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsStdString) {
FormatForComparisonFailureMessage(p, ::std::string()).c_str());
}
-#if GTEST_HAS_GLOBAL_WSTRING
-// wchar_t pointer vs ::wstring
-TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsWString) {
- const wchar_t* s = L"hi \"world";
- EXPECT_STREQ("L\"hi \\\"world\"", // The string content should be escaped.
- FormatForComparisonFailureMessage(s, ::wstring()).c_str());
-
- // wchar_t*
- wchar_t str[] = L"hi\1";
- wchar_t* p = str;
- EXPECT_STREQ("L\"hi\\x1\"", // The string content should be escaped.
- FormatForComparisonFailureMessage(p, ::wstring()).c_str());
-}
-#endif
-
#if GTEST_HAS_STD_WSTRING
// wchar_t pointer vs std::wstring
TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsStdWString) {
@@ -1343,16 +1286,6 @@ TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsWCharArray) {
// Tests formatting a char array when it's compared with a string object.
// In this case we want to print the array as a C string.
-#if GTEST_HAS_GLOBAL_STRING
-// char array vs string
-TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsString) {
- const char str[] = "hi \"w\0rld\"";
- EXPECT_STREQ("\"hi \\\"w\"", // The content should be escaped.
- // Embedded NUL terminates the string.
- FormatForComparisonFailureMessage(str, ::string()).c_str());
-}
-#endif
-
// char array vs std::string
TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsStdString) {
const char str[] = "hi \"world\"";
@@ -1360,15 +1293,6 @@ TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsStdString) {
FormatForComparisonFailureMessage(str, ::std::string()).c_str());
}
-#if GTEST_HAS_GLOBAL_WSTRING
-// wchar_t array vs wstring
-TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsWString) {
- const wchar_t str[] = L"hi \"world\"";
- EXPECT_STREQ("L\"hi \\\"world\\\"\"", // The content should be escaped.
- FormatForComparisonFailureMessage(str, ::wstring()).c_str());
-}
-#endif
-
#if GTEST_HAS_STD_WSTRING
// wchar_t array vs std::wstring
TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsStdWString) {
@@ -1632,6 +1556,65 @@ TEST(PrintOneofTest, Basic) {
PrintToString(Type(NonPrintable{})));
}
#endif // GTEST_HAS_ABSL
+namespace {
+class string_ref;
+
+/**
+ * This is a synthetic pointer to a fixed size string.
+ */
+class string_ptr {
+ public:
+ string_ptr(const char* data, size_t size) : data_(data), size_(size) {}
+
+ string_ptr& operator++() noexcept {
+ data_ += size_;
+ return *this;
+ }
+
+ string_ref operator*() const noexcept;
+
+ private:
+ const char* data_;
+ size_t size_;
+};
+
+/**
+ * This is a synthetic reference of a fixed size string.
+ */
+class string_ref {
+ public:
+ string_ref(const char* data, size_t size) : data_(data), size_(size) {}
+
+ string_ptr operator&() const noexcept { return {data_, size_}; } // NOLINT
+
+ bool operator==(const char* s) const noexcept {
+ if (size_ > 0 && data_[size_ - 1] != 0) {
+ return std::string(data_, size_) == std::string(s);
+ } else {
+ return std::string(data_) == std::string(s);
+ }
+ }
+
+ private:
+ const char* data_;
+ size_t size_;
+};
+
+string_ref string_ptr::operator*() const noexcept { return {data_, size_}; }
+
+TEST(string_ref, compare) {
+ const char* s = "alex\0davidjohn\0";
+ string_ptr ptr(s, 5);
+ EXPECT_EQ(*ptr, "alex");
+ EXPECT_TRUE(*ptr == "alex");
+ ++ptr;
+ EXPECT_EQ(*ptr, "david");
+ EXPECT_TRUE(*ptr == "david");
+ ++ptr;
+ EXPECT_EQ(*ptr, "john");
+}
+
+} // namespace
} // namespace gtest_printers_test
} // namespace testing
diff --git a/googletest/test/googletest-test-part-test.cc b/googletest/test/googletest-test-part-test.cc
index 8a689be..44cf7ca 100644
--- a/googletest/test/googletest-test-part-test.cc
+++ b/googletest/test/googletest-test-part-test.cc
@@ -227,6 +227,4 @@ TEST_F(TestPartResultArrayDeathTest, DiesWhenIndexIsOutOfBound) {
EXPECT_DEATH_IF_SUPPORTED(results.GetTestPartResult(1), "");
}
-// FIXME: Add a test for the class HasNewFatalFailureHelper.
-
} // namespace
diff --git a/googletest/test/googletest-test2_test.cc b/googletest/test/googletest-test2_test.cc
index c2f98dc..2e425da 100644
--- a/googletest/test/googletest-test2_test.cc
+++ b/googletest/test/googletest-test2_test.cc
@@ -46,16 +46,16 @@ ParamGenerator<int> extern_gen_2 = Values(33);
// and instantiated in another. The test is defined in
// googletest-param-test-test.cc and ExternalInstantiationTest fixture class is
// defined in gtest-param-test_test.h.
-INSTANTIATE_TEST_CASE_P(MultiplesOf33,
- ExternalInstantiationTest,
- Values(33, 66));
+INSTANTIATE_TEST_SUITE_P(MultiplesOf33,
+ ExternalInstantiationTest,
+ Values(33, 66));
// Tests that a parameterized test case can be instantiated
// in multiple translation units. Another instantiation is defined
// in googletest-param-test-test.cc and
-// InstantiationInMultipleTranslaionUnitsTest fixture is defined in
+// InstantiationInMultipleTranslationUnitsTest fixture is defined in
// gtest-param-test_test.h
-INSTANTIATE_TEST_CASE_P(Sequence2,
- InstantiationInMultipleTranslaionUnitsTest,
- Values(42*3, 42*4, 42*5));
+INSTANTIATE_TEST_SUITE_P(Sequence2,
+ InstantiationInMultipleTranslationUnitsTest,
+ Values(42*3, 42*4, 42*5));
diff --git a/googletest/test/googletest-throw-on-failure-test.py b/googletest/test/googletest-throw-on-failure-test.py
index 204e43e..a38cd33 100755
--- a/googletest/test/googletest-throw-on-failure-test.py
+++ b/googletest/test/googletest-throw-on-failure-test.py
@@ -73,8 +73,7 @@ def Run(command):
return p.exited and p.exit_code == 0
-# The tests. FIXME: refactor the class to share common
-# logic with code in googletest-break-on-failure-unittest.py.
+# The tests.
class ThrowOnFailureTest(gtest_test_utils.TestCase):
"""Tests the throw-on-failure mode."""
@@ -87,7 +86,7 @@ class ThrowOnFailureTest(gtest_test_utils.TestCase):
variable; None if the variable should be unset.
flag_value: value of the --gtest_break_on_failure flag;
None if the flag should not be present.
- should_fail: True iff the program is expected to fail.
+ should_fail: True if the program is expected to fail.
"""
SetEnvVar(THROW_ON_FAILURE, env_var_value)
diff --git a/googletest/test/gtest-typed-test2_test.cc b/googletest/test/gtest-typed-test2_test.cc
index ed96421..7000160 100644
--- a/googletest/test/gtest-typed-test2_test.cc
+++ b/googletest/test/gtest-typed-test2_test.cc
@@ -38,7 +38,7 @@
// Tests that the same type-parameterized test case can be
// instantiated in different translation units linked together.
// (ContainerTest is also instantiated in gtest-typed-test_test.cc.)
-INSTANTIATE_TYPED_TEST_CASE_P(Vector, ContainerTest,
- testing::Types<std::vector<int> >);
+INSTANTIATE_TYPED_TEST_SUITE_P(Vector, ContainerTest,
+ testing::Types<std::vector<int> >);
#endif // GTEST_HAS_TYPED_TEST_P
diff --git a/googletest/test/gtest-typed-test_test.cc b/googletest/test/gtest-typed-test_test.cc
index de6cc53..5411832 100644
--- a/googletest/test/gtest-typed-test_test.cc
+++ b/googletest/test/gtest-typed-test_test.cc
@@ -31,6 +31,7 @@
#include "test/gtest-typed-test_test.h"
#include <set>
+#include <type_traits>
#include <vector>
#include "gtest/gtest.h"
@@ -41,19 +42,19 @@ GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127 /* conditional expression is constant */)
using testing::Test;
-// Used for testing that SetUpTestCase()/TearDownTestCase(), fixture
+// Used for testing that SetUpTestSuite()/TearDownTestSuite(), fixture
// ctor/dtor, and SetUp()/TearDown() work correctly in typed tests and
// type-parameterized test.
template <typename T>
class CommonTest : public Test {
- // For some technical reason, SetUpTestCase() and TearDownTestCase()
+ // For some technical reason, SetUpTestSuite() and TearDownTestSuite()
// must be public.
public:
- static void SetUpTestCase() {
+ static void SetUpTestSuite() {
shared_ = new T(5);
}
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
delete shared_;
shared_ = nullptr;
}
@@ -92,11 +93,11 @@ T* CommonTest<T>::shared_ = nullptr;
using testing::Types;
-// Tests that SetUpTestCase()/TearDownTestCase(), fixture ctor/dtor,
+// Tests that SetUpTestSuite()/TearDownTestSuite(), fixture ctor/dtor,
// and SetUp()/TearDown() work correctly in typed tests
typedef Types<char, int> TwoTypes;
-TYPED_TEST_CASE(CommonTest, TwoTypes);
+TYPED_TEST_SUITE(CommonTest, TwoTypes);
TYPED_TEST(CommonTest, ValuesAreCorrect) {
// Static members of the fixture class template can be visited via
@@ -128,25 +129,25 @@ TYPED_TEST(CommonTest, ValuesAreStillCorrect) {
EXPECT_EQ(static_cast<TypeParam>(2), this->value_);
}
-// Tests that multiple TYPED_TEST_CASE's can be defined in the same
+// Tests that multiple TYPED_TEST_SUITE's can be defined in the same
// translation unit.
template <typename T>
class TypedTest1 : public Test {
};
-// Verifies that the second argument of TYPED_TEST_CASE can be a
+// Verifies that the second argument of TYPED_TEST_SUITE can be a
// single type.
-TYPED_TEST_CASE(TypedTest1, int);
+TYPED_TEST_SUITE(TypedTest1, int);
TYPED_TEST(TypedTest1, A) {}
template <typename T>
class TypedTest2 : public Test {
};
-// Verifies that the second argument of TYPED_TEST_CASE can be a
+// Verifies that the second argument of TYPED_TEST_SUITE can be a
// Types<...> type list.
-TYPED_TEST_CASE(TypedTest2, Types<int>);
+TYPED_TEST_SUITE(TypedTest2, Types<int>);
// This also verifies that tests from different typed test cases can
// share the same name.
@@ -161,7 +162,7 @@ class NumericTest : public Test {
};
typedef Types<int, long> NumericTypes;
-TYPED_TEST_CASE(NumericTest, NumericTypes);
+TYPED_TEST_SUITE(NumericTest, NumericTypes);
TYPED_TEST(NumericTest, DefaultIsZero) {
EXPECT_EQ(0, TypeParam());
@@ -177,25 +178,25 @@ class TypedTestNames {
public:
template <typename T>
static std::string GetName(int i) {
- if (testing::internal::IsSame<T, char>::value) {
+ if (std::is_same<T, char>::value) {
return std::string("char") + ::testing::PrintToString(i);
}
- if (testing::internal::IsSame<T, int>::value) {
+ if (std::is_same<T, int>::value) {
return std::string("int") + ::testing::PrintToString(i);
}
}
};
-TYPED_TEST_CASE(TypedTestWithNames, TwoTypes, TypedTestNames);
+TYPED_TEST_SUITE(TypedTestWithNames, TwoTypes, TypedTestNames);
-TYPED_TEST(TypedTestWithNames, TestCaseName) {
- if (testing::internal::IsSame<TypeParam, char>::value) {
+TYPED_TEST(TypedTestWithNames, TestSuiteName) {
+ if (std::is_same<TypeParam, char>::value) {
EXPECT_STREQ(::testing::UnitTest::GetInstance()
->current_test_info()
->test_case_name(),
"TypedTestWithNames/char0");
}
- if (testing::internal::IsSame<TypeParam, int>::value) {
+ if (std::is_same<TypeParam, int>::value) {
EXPECT_STREQ(::testing::UnitTest::GetInstance()
->current_test_info()
->test_case_name(),
@@ -209,11 +210,11 @@ TYPED_TEST(TypedTestWithNames, TestCaseName) {
#if GTEST_HAS_TYPED_TEST_P
using testing::Types;
-using testing::internal::TypedTestCasePState;
+using testing::internal::TypedTestSuitePState;
-// Tests TypedTestCasePState.
+// Tests TypedTestSuitePState.
-class TypedTestCasePStateTest : public Test {
+class TypedTestSuitePStateTest : public Test {
protected:
void SetUp() override {
state_.AddTestName("foo.cc", 0, "FooTest", "A");
@@ -221,10 +222,10 @@ class TypedTestCasePStateTest : public Test {
state_.AddTestName("foo.cc", 0, "FooTest", "C");
}
- TypedTestCasePState state_;
+ TypedTestSuitePState state_;
};
-TEST_F(TypedTestCasePStateTest, SucceedsForMatchingList) {
+TEST_F(TypedTestSuitePStateTest, SucceedsForMatchingList) {
const char* tests = "A, B, C";
EXPECT_EQ(tests,
state_.VerifyRegisteredTestNames("foo.cc", 1, tests));
@@ -232,27 +233,27 @@ TEST_F(TypedTestCasePStateTest, SucceedsForMatchingList) {
// Makes sure that the order of the tests and spaces around the names
// don't matter.
-TEST_F(TypedTestCasePStateTest, IgnoresOrderAndSpaces) {
+TEST_F(TypedTestSuitePStateTest, IgnoresOrderAndSpaces) {
const char* tests = "A,C, B";
EXPECT_EQ(tests,
state_.VerifyRegisteredTestNames("foo.cc", 1, tests));
}
-typedef TypedTestCasePStateTest TypedTestCasePStateDeathTest;
+using TypedTestSuitePStateDeathTest = TypedTestSuitePStateTest;
-TEST_F(TypedTestCasePStateDeathTest, DetectsDuplicates) {
+TEST_F(TypedTestSuitePStateDeathTest, DetectsDuplicates) {
EXPECT_DEATH_IF_SUPPORTED(
state_.VerifyRegisteredTestNames("foo.cc", 1, "A, B, A, C"),
"foo\\.cc.1.?: Test A is listed more than once\\.");
}
-TEST_F(TypedTestCasePStateDeathTest, DetectsExtraTest) {
+TEST_F(TypedTestSuitePStateDeathTest, DetectsExtraTest) {
EXPECT_DEATH_IF_SUPPORTED(
state_.VerifyRegisteredTestNames("foo.cc", 1, "A, B, C, D"),
- "foo\\.cc.1.?: No test named D can be found in this test case\\.");
+ "foo\\.cc.1.?: No test named D can be found in this test suite\\.");
}
-TEST_F(TypedTestCasePStateDeathTest, DetectsMissedTest) {
+TEST_F(TypedTestSuitePStateDeathTest, DetectsMissedTest) {
EXPECT_DEATH_IF_SUPPORTED(
state_.VerifyRegisteredTestNames("foo.cc", 1, "A, C"),
"foo\\.cc.1.?: You forgot to list test B\\.");
@@ -260,22 +261,22 @@ TEST_F(TypedTestCasePStateDeathTest, DetectsMissedTest) {
// Tests that defining a test for a parameterized test case generates
// a run-time error if the test case has been registered.
-TEST_F(TypedTestCasePStateDeathTest, DetectsTestAfterRegistration) {
+TEST_F(TypedTestSuitePStateDeathTest, DetectsTestAfterRegistration) {
state_.VerifyRegisteredTestNames("foo.cc", 1, "A, B, C");
EXPECT_DEATH_IF_SUPPORTED(
state_.AddTestName("foo.cc", 2, "FooTest", "D"),
- "foo\\.cc.2.?: Test D must be defined before REGISTER_TYPED_TEST_CASE_P"
+ "foo\\.cc.2.?: Test D must be defined before REGISTER_TYPED_TEST_SUITE_P"
"\\(FooTest, \\.\\.\\.\\)\\.");
}
-// Tests that SetUpTestCase()/TearDownTestCase(), fixture ctor/dtor,
+// Tests that SetUpTestSuite()/TearDownTestSuite(), fixture ctor/dtor,
// and SetUp()/TearDown() work correctly in type-parameterized tests.
template <typename T>
class DerivedTest : public CommonTest<T> {
};
-TYPED_TEST_CASE_P(DerivedTest);
+TYPED_TEST_SUITE_P(DerivedTest);
TYPED_TEST_P(DerivedTest, ValuesAreCorrect) {
// Static members of the fixture class template can be visited via
@@ -297,27 +298,27 @@ TYPED_TEST_P(DerivedTest, ValuesAreStillCorrect) {
EXPECT_EQ(2, this->value_);
}
-REGISTER_TYPED_TEST_CASE_P(DerivedTest,
+REGISTER_TYPED_TEST_SUITE_P(DerivedTest,
ValuesAreCorrect, ValuesAreStillCorrect);
typedef Types<short, long> MyTwoTypes;
-INSTANTIATE_TYPED_TEST_CASE_P(My, DerivedTest, MyTwoTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, DerivedTest, MyTwoTypes);
// Tests that custom names work with type parametrized tests. We reuse the
// TwoTypes from above here.
template <typename T>
class TypeParametrizedTestWithNames : public Test {};
-TYPED_TEST_CASE_P(TypeParametrizedTestWithNames);
+TYPED_TEST_SUITE_P(TypeParametrizedTestWithNames);
-TYPED_TEST_P(TypeParametrizedTestWithNames, TestCaseName) {
- if (testing::internal::IsSame<TypeParam, char>::value) {
+TYPED_TEST_P(TypeParametrizedTestWithNames, TestSuiteName) {
+ if (std::is_same<TypeParam, char>::value) {
EXPECT_STREQ(::testing::UnitTest::GetInstance()
->current_test_info()
->test_case_name(),
"CustomName/TypeParametrizedTestWithNames/parChar0");
}
- if (testing::internal::IsSame<TypeParam, int>::value) {
+ if (std::is_same<TypeParam, int>::value) {
EXPECT_STREQ(::testing::UnitTest::GetInstance()
->current_test_info()
->test_case_name(),
@@ -325,77 +326,77 @@ TYPED_TEST_P(TypeParametrizedTestWithNames, TestCaseName) {
}
}
-REGISTER_TYPED_TEST_CASE_P(TypeParametrizedTestWithNames, TestCaseName);
+REGISTER_TYPED_TEST_SUITE_P(TypeParametrizedTestWithNames, TestSuiteName);
class TypeParametrizedTestNames {
public:
template <typename T>
static std::string GetName(int i) {
- if (testing::internal::IsSame<T, char>::value) {
+ if (std::is_same<T, char>::value) {
return std::string("parChar") + ::testing::PrintToString(i);
}
- if (testing::internal::IsSame<T, int>::value) {
+ if (std::is_same<T, int>::value) {
return std::string("parInt") + ::testing::PrintToString(i);
}
}
};
-INSTANTIATE_TYPED_TEST_CASE_P(CustomName, TypeParametrizedTestWithNames,
+INSTANTIATE_TYPED_TEST_SUITE_P(CustomName, TypeParametrizedTestWithNames,
TwoTypes, TypeParametrizedTestNames);
-// Tests that multiple TYPED_TEST_CASE_P's can be defined in the same
+// Tests that multiple TYPED_TEST_SUITE_P's can be defined in the same
// translation unit.
template <typename T>
class TypedTestP1 : public Test {
};
-TYPED_TEST_CASE_P(TypedTestP1);
+TYPED_TEST_SUITE_P(TypedTestP1);
-// For testing that the code between TYPED_TEST_CASE_P() and
+// For testing that the code between TYPED_TEST_SUITE_P() and
// TYPED_TEST_P() is not enclosed in a namespace.
-typedef int IntAfterTypedTestCaseP;
+using IntAfterTypedTestSuiteP = int;
TYPED_TEST_P(TypedTestP1, A) {}
TYPED_TEST_P(TypedTestP1, B) {}
// For testing that the code between TYPED_TEST_P() and
-// REGISTER_TYPED_TEST_CASE_P() is not enclosed in a namespace.
-typedef int IntBeforeRegisterTypedTestCaseP;
+// REGISTER_TYPED_TEST_SUITE_P() is not enclosed in a namespace.
+using IntBeforeRegisterTypedTestSuiteP = int;
-REGISTER_TYPED_TEST_CASE_P(TypedTestP1, A, B);
+REGISTER_TYPED_TEST_SUITE_P(TypedTestP1, A, B);
template <typename T>
class TypedTestP2 : public Test {
};
-TYPED_TEST_CASE_P(TypedTestP2);
+TYPED_TEST_SUITE_P(TypedTestP2);
// This also verifies that tests from different type-parameterized
// test cases can share the same name.
TYPED_TEST_P(TypedTestP2, A) {}
-REGISTER_TYPED_TEST_CASE_P(TypedTestP2, A);
+REGISTER_TYPED_TEST_SUITE_P(TypedTestP2, A);
-// Verifies that the code between TYPED_TEST_CASE_P() and
-// REGISTER_TYPED_TEST_CASE_P() is not enclosed in a namespace.
-IntAfterTypedTestCaseP after = 0;
-IntBeforeRegisterTypedTestCaseP before = 0;
+// Verifies that the code between TYPED_TEST_SUITE_P() and
+// REGISTER_TYPED_TEST_SUITE_P() is not enclosed in a namespace.
+IntAfterTypedTestSuiteP after = 0;
+IntBeforeRegisterTypedTestSuiteP before = 0;
-// Verifies that the last argument of INSTANTIATE_TYPED_TEST_CASE_P()
+// Verifies that the last argument of INSTANTIATE_TYPED_TEST_SUITE_P()
// can be either a single type or a Types<...> type list.
-INSTANTIATE_TYPED_TEST_CASE_P(Int, TypedTestP1, int);
-INSTANTIATE_TYPED_TEST_CASE_P(Int, TypedTestP2, Types<int>);
+INSTANTIATE_TYPED_TEST_SUITE_P(Int, TypedTestP1, int);
+INSTANTIATE_TYPED_TEST_SUITE_P(Int, TypedTestP2, Types<int>);
// Tests that the same type-parameterized test case can be
// instantiated more than once in the same translation unit.
-INSTANTIATE_TYPED_TEST_CASE_P(Double, TypedTestP2, Types<double>);
+INSTANTIATE_TYPED_TEST_SUITE_P(Double, TypedTestP2, Types<double>);
// Tests that the same type-parameterized test case can be
// instantiated in different translation units linked together.
// (ContainerTest is also instantiated in gtest-typed-test_test.cc.)
typedef Types<std::vector<double>, std::set<char> > MyContainers;
-INSTANTIATE_TYPED_TEST_CASE_P(My, ContainerTest, MyContainers);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, ContainerTest, MyContainers);
// Tests that a type-parameterized test case can be defined and
// instantiated in a namespace.
@@ -406,7 +407,7 @@ template <typename T>
class NumericTest : public Test {
};
-TYPED_TEST_CASE_P(NumericTest);
+TYPED_TEST_SUITE_P(NumericTest);
TYPED_TEST_P(NumericTest, DefaultIsZero) {
EXPECT_EQ(0, TypeParam());
@@ -416,29 +417,29 @@ TYPED_TEST_P(NumericTest, ZeroIsLessThanOne) {
EXPECT_LT(TypeParam(0), TypeParam(1));
}
-REGISTER_TYPED_TEST_CASE_P(NumericTest,
+REGISTER_TYPED_TEST_SUITE_P(NumericTest,
DefaultIsZero, ZeroIsLessThanOne);
typedef Types<int, double> NumericTypes;
-INSTANTIATE_TYPED_TEST_CASE_P(My, NumericTest, NumericTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, NumericTest, NumericTypes);
static const char* GetTestName() {
return testing::UnitTest::GetInstance()->current_test_info()->name();
}
// Test the stripping of space from test names
template <typename T> class TrimmedTest : public Test { };
-TYPED_TEST_CASE_P(TrimmedTest);
+TYPED_TEST_SUITE_P(TrimmedTest);
TYPED_TEST_P(TrimmedTest, Test1) { EXPECT_STREQ("Test1", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test2) { EXPECT_STREQ("Test2", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test3) { EXPECT_STREQ("Test3", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test4) { EXPECT_STREQ("Test4", GetTestName()); }
TYPED_TEST_P(TrimmedTest, Test5) { EXPECT_STREQ("Test5", GetTestName()); }
-REGISTER_TYPED_TEST_CASE_P(
+REGISTER_TYPED_TEST_SUITE_P(
TrimmedTest,
Test1, Test2,Test3 , Test4 ,Test5 ); // NOLINT
template <typename T1, typename T2> struct MyPair {};
// Be sure to try a type with a comma in its name just in case it matters.
typedef Types<int, double, MyPair<int, int> > TrimTypes;
-INSTANTIATE_TYPED_TEST_CASE_P(My, TrimmedTest, TrimTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, TrimmedTest, TrimTypes);
} // namespace library2
diff --git a/googletest/test/gtest-typed-test_test.h b/googletest/test/gtest-typed-test_test.h
index 2cce67c..23137b7 100644
--- a/googletest/test/gtest-typed-test_test.h
+++ b/googletest/test/gtest-typed-test_test.h
@@ -46,7 +46,7 @@ template <typename T>
class ContainerTest : public Test {
};
-TYPED_TEST_CASE_P(ContainerTest);
+TYPED_TEST_SUITE_P(ContainerTest);
TYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) {
TypeParam container;
@@ -57,8 +57,8 @@ TYPED_TEST_P(ContainerTest, InitialSizeIsZero) {
EXPECT_EQ(0U, container.size());
}
-REGISTER_TYPED_TEST_CASE_P(ContainerTest,
- CanBeDefaultConstructed, InitialSizeIsZero);
+REGISTER_TYPED_TEST_SUITE_P(ContainerTest,
+ CanBeDefaultConstructed, InitialSizeIsZero);
#endif // GTEST_HAS_TYPED_TEST_P
diff --git a/googletest/test/gtest-unittest-api_test.cc b/googletest/test/gtest-unittest-api_test.cc
index 2bcbedf..480a41f 100644
--- a/googletest/test/gtest-unittest-api_test.cc
+++ b/googletest/test/gtest-unittest-api_test.cc
@@ -51,59 +51,59 @@ struct LessByName {
class UnitTestHelper {
public:
- // Returns the array of pointers to all test cases sorted by the test case
+ // Returns the array of pointers to all test suites sorted by the test suite
// name. The caller is responsible for deleting the array.
- static TestCase const** GetSortedTestCases() {
+ static TestSuite const** GetSortedTestSuites() {
UnitTest& unit_test = *UnitTest::GetInstance();
- TestCase const** const test_cases =
- new const TestCase*[unit_test.total_test_case_count()];
+ auto const** const test_suites =
+ new const TestSuite*[unit_test.total_test_suite_count()];
- for (int i = 0; i < unit_test.total_test_case_count(); ++i)
- test_cases[i] = unit_test.GetTestCase(i);
+ for (int i = 0; i < unit_test.total_test_suite_count(); ++i)
+ test_suites[i] = unit_test.GetTestSuite(i);
- std::sort(test_cases,
- test_cases + unit_test.total_test_case_count(),
- LessByName<TestCase>());
- return test_cases;
+ std::sort(test_suites,
+ test_suites + unit_test.total_test_suite_count(),
+ LessByName<TestSuite>());
+ return test_suites;
}
- // Returns the test case by its name. The caller doesn't own the returned
+ // Returns the test suite by its name. The caller doesn't own the returned
// pointer.
- static const TestCase* FindTestCase(const char* name) {
+ static const TestSuite* FindTestSuite(const char* name) {
UnitTest& unit_test = *UnitTest::GetInstance();
- for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
- const TestCase* test_case = unit_test.GetTestCase(i);
- if (0 == strcmp(test_case->name(), name))
- return test_case;
+ for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
+ const TestSuite* test_suite = unit_test.GetTestSuite(i);
+ if (0 == strcmp(test_suite->name(), name))
+ return test_suite;
}
return nullptr;
}
- // Returns the array of pointers to all tests in a particular test case
+ // Returns the array of pointers to all tests in a particular test suite
// sorted by the test name. The caller is responsible for deleting the
// array.
- static TestInfo const** GetSortedTests(const TestCase* test_case) {
+ static TestInfo const** GetSortedTests(const TestSuite* test_suite) {
TestInfo const** const tests =
- new const TestInfo*[test_case->total_test_count()];
+ new const TestInfo*[test_suite->total_test_count()];
- for (int i = 0; i < test_case->total_test_count(); ++i)
- tests[i] = test_case->GetTestInfo(i);
+ for (int i = 0; i < test_suite->total_test_count(); ++i)
+ tests[i] = test_suite->GetTestInfo(i);
- std::sort(tests, tests + test_case->total_test_count(),
+ std::sort(tests, tests + test_suite->total_test_count(),
LessByName<TestInfo>());
return tests;
}
};
#if GTEST_HAS_TYPED_TEST
-template <typename T> class TestCaseWithCommentTest : public Test {};
-TYPED_TEST_CASE(TestCaseWithCommentTest, Types<int>);
-TYPED_TEST(TestCaseWithCommentTest, Dummy) {}
+template <typename T> class TestSuiteWithCommentTest : public Test {};
+TYPED_TEST_SUITE(TestSuiteWithCommentTest, Types<int>);
+TYPED_TEST(TestSuiteWithCommentTest, Dummy) {}
-const int kTypedTestCases = 1;
+const int kTypedTestSuites = 1;
const int kTypedTests = 1;
#else
-const int kTypedTestCases = 0;
+const int kTypedTestSuites = 0;
const int kTypedTests = 0;
#endif // GTEST_HAS_TYPED_TEST
@@ -113,21 +113,21 @@ const int kTypedTests = 0;
TEST(ApiTest, UnitTestImmutableAccessorsWork) {
UnitTest* unit_test = UnitTest::GetInstance();
- ASSERT_EQ(2 + kTypedTestCases, unit_test->total_test_case_count());
- EXPECT_EQ(1 + kTypedTestCases, unit_test->test_case_to_run_count());
+ ASSERT_EQ(2 + kTypedTestSuites, unit_test->total_test_suite_count());
+ EXPECT_EQ(1 + kTypedTestSuites, unit_test->test_suite_to_run_count());
EXPECT_EQ(2, unit_test->disabled_test_count());
EXPECT_EQ(5 + kTypedTests, unit_test->total_test_count());
EXPECT_EQ(3 + kTypedTests, unit_test->test_to_run_count());
- const TestCase** const test_cases = UnitTestHelper::GetSortedTestCases();
+ const TestSuite** const test_suites = UnitTestHelper::GetSortedTestSuites();
- EXPECT_STREQ("ApiTest", test_cases[0]->name());
- EXPECT_STREQ("DISABLED_Test", test_cases[1]->name());
+ EXPECT_STREQ("ApiTest", test_suites[0]->name());
+ EXPECT_STREQ("DISABLED_Test", test_suites[1]->name());
#if GTEST_HAS_TYPED_TEST
- EXPECT_STREQ("TestCaseWithCommentTest/0", test_cases[2]->name());
+ EXPECT_STREQ("TestSuiteWithCommentTest/0", test_suites[2]->name());
#endif // GTEST_HAS_TYPED_TEST
- delete[] test_cases;
+ delete[] test_suites;
// The following lines initiate actions to verify certain methods in
// FinalSuccessChecker::TearDown.
@@ -143,39 +143,39 @@ AssertionResult IsNull(const char* str) {
return AssertionSuccess();
}
-TEST(ApiTest, TestCaseImmutableAccessorsWork) {
- const TestCase* test_case = UnitTestHelper::FindTestCase("ApiTest");
- ASSERT_TRUE(test_case != nullptr);
+TEST(ApiTest, TestSuiteImmutableAccessorsWork) {
+ const TestSuite* test_suite = UnitTestHelper::FindTestSuite("ApiTest");
+ ASSERT_TRUE(test_suite != nullptr);
- EXPECT_STREQ("ApiTest", test_case->name());
- EXPECT_TRUE(IsNull(test_case->type_param()));
- EXPECT_TRUE(test_case->should_run());
- EXPECT_EQ(1, test_case->disabled_test_count());
- EXPECT_EQ(3, test_case->test_to_run_count());
- ASSERT_EQ(4, test_case->total_test_count());
+ EXPECT_STREQ("ApiTest", test_suite->name());
+ EXPECT_TRUE(IsNull(test_suite->type_param()));
+ EXPECT_TRUE(test_suite->should_run());
+ EXPECT_EQ(1, test_suite->disabled_test_count());
+ EXPECT_EQ(3, test_suite->test_to_run_count());
+ ASSERT_EQ(4, test_suite->total_test_count());
- const TestInfo** tests = UnitTestHelper::GetSortedTests(test_case);
+ const TestInfo** tests = UnitTestHelper::GetSortedTests(test_suite);
EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name());
- EXPECT_STREQ("ApiTest", tests[0]->test_case_name());
+ EXPECT_STREQ("ApiTest", tests[0]->test_suite_name());
EXPECT_TRUE(IsNull(tests[0]->value_param()));
EXPECT_TRUE(IsNull(tests[0]->type_param()));
EXPECT_FALSE(tests[0]->should_run());
- EXPECT_STREQ("TestCaseDisabledAccessorsWork", tests[1]->name());
- EXPECT_STREQ("ApiTest", tests[1]->test_case_name());
+ EXPECT_STREQ("TestSuiteDisabledAccessorsWork", tests[1]->name());
+ EXPECT_STREQ("ApiTest", tests[1]->test_suite_name());
EXPECT_TRUE(IsNull(tests[1]->value_param()));
EXPECT_TRUE(IsNull(tests[1]->type_param()));
EXPECT_TRUE(tests[1]->should_run());
- EXPECT_STREQ("TestCaseImmutableAccessorsWork", tests[2]->name());
- EXPECT_STREQ("ApiTest", tests[2]->test_case_name());
+ EXPECT_STREQ("TestSuiteImmutableAccessorsWork", tests[2]->name());
+ EXPECT_STREQ("ApiTest", tests[2]->test_suite_name());
EXPECT_TRUE(IsNull(tests[2]->value_param()));
EXPECT_TRUE(IsNull(tests[2]->type_param()));
EXPECT_TRUE(tests[2]->should_run());
EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name());
- EXPECT_STREQ("ApiTest", tests[3]->test_case_name());
+ EXPECT_STREQ("ApiTest", tests[3]->test_suite_name());
EXPECT_TRUE(IsNull(tests[3]->value_param()));
EXPECT_TRUE(IsNull(tests[3]->type_param()));
EXPECT_TRUE(tests[3]->should_run());
@@ -184,20 +184,20 @@ TEST(ApiTest, TestCaseImmutableAccessorsWork) {
tests = nullptr;
#if GTEST_HAS_TYPED_TEST
- test_case = UnitTestHelper::FindTestCase("TestCaseWithCommentTest/0");
- ASSERT_TRUE(test_case != nullptr);
+ test_suite = UnitTestHelper::FindTestSuite("TestSuiteWithCommentTest/0");
+ ASSERT_TRUE(test_suite != nullptr);
- EXPECT_STREQ("TestCaseWithCommentTest/0", test_case->name());
- EXPECT_STREQ(GetTypeName<int>().c_str(), test_case->type_param());
- EXPECT_TRUE(test_case->should_run());
- EXPECT_EQ(0, test_case->disabled_test_count());
- EXPECT_EQ(1, test_case->test_to_run_count());
- ASSERT_EQ(1, test_case->total_test_count());
+ EXPECT_STREQ("TestSuiteWithCommentTest/0", test_suite->name());
+ EXPECT_STREQ(GetTypeName<int>().c_str(), test_suite->type_param());
+ EXPECT_TRUE(test_suite->should_run());
+ EXPECT_EQ(0, test_suite->disabled_test_count());
+ EXPECT_EQ(1, test_suite->test_to_run_count());
+ ASSERT_EQ(1, test_suite->total_test_count());
- tests = UnitTestHelper::GetSortedTests(test_case);
+ tests = UnitTestHelper::GetSortedTests(test_suite);
EXPECT_STREQ("Dummy", tests[0]->name());
- EXPECT_STREQ("TestCaseWithCommentTest/0", tests[0]->test_case_name());
+ EXPECT_STREQ("TestSuiteWithCommentTest/0", tests[0]->test_suite_name());
EXPECT_TRUE(IsNull(tests[0]->value_param()));
EXPECT_STREQ(GetTypeName<int>().c_str(), tests[0]->type_param());
EXPECT_TRUE(tests[0]->should_run());
@@ -206,27 +206,27 @@ TEST(ApiTest, TestCaseImmutableAccessorsWork) {
#endif // GTEST_HAS_TYPED_TEST
}
-TEST(ApiTest, TestCaseDisabledAccessorsWork) {
- const TestCase* test_case = UnitTestHelper::FindTestCase("DISABLED_Test");
- ASSERT_TRUE(test_case != nullptr);
+TEST(ApiTest, TestSuiteDisabledAccessorsWork) {
+ const TestSuite* test_suite = UnitTestHelper::FindTestSuite("DISABLED_Test");
+ ASSERT_TRUE(test_suite != nullptr);
- EXPECT_STREQ("DISABLED_Test", test_case->name());
- EXPECT_TRUE(IsNull(test_case->type_param()));
- EXPECT_FALSE(test_case->should_run());
- EXPECT_EQ(1, test_case->disabled_test_count());
- EXPECT_EQ(0, test_case->test_to_run_count());
- ASSERT_EQ(1, test_case->total_test_count());
+ EXPECT_STREQ("DISABLED_Test", test_suite->name());
+ EXPECT_TRUE(IsNull(test_suite->type_param()));
+ EXPECT_FALSE(test_suite->should_run());
+ EXPECT_EQ(1, test_suite->disabled_test_count());
+ EXPECT_EQ(0, test_suite->test_to_run_count());
+ ASSERT_EQ(1, test_suite->total_test_count());
- const TestInfo* const test_info = test_case->GetTestInfo(0);
+ const TestInfo* const test_info = test_suite->GetTestInfo(0);
EXPECT_STREQ("Dummy2", test_info->name());
- EXPECT_STREQ("DISABLED_Test", test_info->test_case_name());
+ EXPECT_STREQ("DISABLED_Test", test_info->test_suite_name());
EXPECT_TRUE(IsNull(test_info->value_param()));
EXPECT_TRUE(IsNull(test_info->type_param()));
EXPECT_FALSE(test_info->should_run());
}
// These two tests are here to provide support for testing
-// test_case_to_run_count, disabled_test_count, and test_to_run_count.
+// test_suite_to_run_count, disabled_test_count, and test_to_run_count.
TEST(ApiTest, DISABLED_Dummy1) {}
TEST(DISABLED_Test, Dummy2) {}
@@ -235,62 +235,62 @@ class FinalSuccessChecker : public Environment {
void TearDown() override {
UnitTest* unit_test = UnitTest::GetInstance();
- EXPECT_EQ(1 + kTypedTestCases, unit_test->successful_test_case_count());
+ EXPECT_EQ(1 + kTypedTestSuites, unit_test->successful_test_suite_count());
EXPECT_EQ(3 + kTypedTests, unit_test->successful_test_count());
- EXPECT_EQ(0, unit_test->failed_test_case_count());
+ EXPECT_EQ(0, unit_test->failed_test_suite_count());
EXPECT_EQ(0, unit_test->failed_test_count());
EXPECT_TRUE(unit_test->Passed());
EXPECT_FALSE(unit_test->Failed());
- ASSERT_EQ(2 + kTypedTestCases, unit_test->total_test_case_count());
-
- const TestCase** const test_cases = UnitTestHelper::GetSortedTestCases();
-
- EXPECT_STREQ("ApiTest", test_cases[0]->name());
- EXPECT_TRUE(IsNull(test_cases[0]->type_param()));
- EXPECT_TRUE(test_cases[0]->should_run());
- EXPECT_EQ(1, test_cases[0]->disabled_test_count());
- ASSERT_EQ(4, test_cases[0]->total_test_count());
- EXPECT_EQ(3, test_cases[0]->successful_test_count());
- EXPECT_EQ(0, test_cases[0]->failed_test_count());
- EXPECT_TRUE(test_cases[0]->Passed());
- EXPECT_FALSE(test_cases[0]->Failed());
-
- EXPECT_STREQ("DISABLED_Test", test_cases[1]->name());
- EXPECT_TRUE(IsNull(test_cases[1]->type_param()));
- EXPECT_FALSE(test_cases[1]->should_run());
- EXPECT_EQ(1, test_cases[1]->disabled_test_count());
- ASSERT_EQ(1, test_cases[1]->total_test_count());
- EXPECT_EQ(0, test_cases[1]->successful_test_count());
- EXPECT_EQ(0, test_cases[1]->failed_test_count());
+ ASSERT_EQ(2 + kTypedTestSuites, unit_test->total_test_suite_count());
+
+ const TestSuite** const test_suites = UnitTestHelper::GetSortedTestSuites();
+
+ EXPECT_STREQ("ApiTest", test_suites[0]->name());
+ EXPECT_TRUE(IsNull(test_suites[0]->type_param()));
+ EXPECT_TRUE(test_suites[0]->should_run());
+ EXPECT_EQ(1, test_suites[0]->disabled_test_count());
+ ASSERT_EQ(4, test_suites[0]->total_test_count());
+ EXPECT_EQ(3, test_suites[0]->successful_test_count());
+ EXPECT_EQ(0, test_suites[0]->failed_test_count());
+ EXPECT_TRUE(test_suites[0]->Passed());
+ EXPECT_FALSE(test_suites[0]->Failed());
+
+ EXPECT_STREQ("DISABLED_Test", test_suites[1]->name());
+ EXPECT_TRUE(IsNull(test_suites[1]->type_param()));
+ EXPECT_FALSE(test_suites[1]->should_run());
+ EXPECT_EQ(1, test_suites[1]->disabled_test_count());
+ ASSERT_EQ(1, test_suites[1]->total_test_count());
+ EXPECT_EQ(0, test_suites[1]->successful_test_count());
+ EXPECT_EQ(0, test_suites[1]->failed_test_count());
#if GTEST_HAS_TYPED_TEST
- EXPECT_STREQ("TestCaseWithCommentTest/0", test_cases[2]->name());
- EXPECT_STREQ(GetTypeName<int>().c_str(), test_cases[2]->type_param());
- EXPECT_TRUE(test_cases[2]->should_run());
- EXPECT_EQ(0, test_cases[2]->disabled_test_count());
- ASSERT_EQ(1, test_cases[2]->total_test_count());
- EXPECT_EQ(1, test_cases[2]->successful_test_count());
- EXPECT_EQ(0, test_cases[2]->failed_test_count());
- EXPECT_TRUE(test_cases[2]->Passed());
- EXPECT_FALSE(test_cases[2]->Failed());
+ EXPECT_STREQ("TestSuiteWithCommentTest/0", test_suites[2]->name());
+ EXPECT_STREQ(GetTypeName<int>().c_str(), test_suites[2]->type_param());
+ EXPECT_TRUE(test_suites[2]->should_run());
+ EXPECT_EQ(0, test_suites[2]->disabled_test_count());
+ ASSERT_EQ(1, test_suites[2]->total_test_count());
+ EXPECT_EQ(1, test_suites[2]->successful_test_count());
+ EXPECT_EQ(0, test_suites[2]->failed_test_count());
+ EXPECT_TRUE(test_suites[2]->Passed());
+ EXPECT_FALSE(test_suites[2]->Failed());
#endif // GTEST_HAS_TYPED_TEST
- const TestCase* test_case = UnitTestHelper::FindTestCase("ApiTest");
- const TestInfo** tests = UnitTestHelper::GetSortedTests(test_case);
+ const TestSuite* test_suite = UnitTestHelper::FindTestSuite("ApiTest");
+ const TestInfo** tests = UnitTestHelper::GetSortedTests(test_suite);
EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name());
- EXPECT_STREQ("ApiTest", tests[0]->test_case_name());
+ EXPECT_STREQ("ApiTest", tests[0]->test_suite_name());
EXPECT_FALSE(tests[0]->should_run());
- EXPECT_STREQ("TestCaseDisabledAccessorsWork", tests[1]->name());
- EXPECT_STREQ("ApiTest", tests[1]->test_case_name());
+ EXPECT_STREQ("TestSuiteDisabledAccessorsWork", tests[1]->name());
+ EXPECT_STREQ("ApiTest", tests[1]->test_suite_name());
EXPECT_TRUE(IsNull(tests[1]->value_param()));
EXPECT_TRUE(IsNull(tests[1]->type_param()));
EXPECT_TRUE(tests[1]->should_run());
EXPECT_TRUE(tests[1]->result()->Passed());
EXPECT_EQ(0, tests[1]->result()->test_property_count());
- EXPECT_STREQ("TestCaseImmutableAccessorsWork", tests[2]->name());
- EXPECT_STREQ("ApiTest", tests[2]->test_case_name());
+ EXPECT_STREQ("TestSuiteImmutableAccessorsWork", tests[2]->name());
+ EXPECT_STREQ("ApiTest", tests[2]->test_suite_name());
EXPECT_TRUE(IsNull(tests[2]->value_param()));
EXPECT_TRUE(IsNull(tests[2]->type_param()));
EXPECT_TRUE(tests[2]->should_run());
@@ -298,7 +298,7 @@ class FinalSuccessChecker : public Environment {
EXPECT_EQ(0, tests[2]->result()->test_property_count());
EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name());
- EXPECT_STREQ("ApiTest", tests[3]->test_case_name());
+ EXPECT_STREQ("ApiTest", tests[3]->test_suite_name());
EXPECT_TRUE(IsNull(tests[3]->value_param()));
EXPECT_TRUE(IsNull(tests[3]->type_param()));
EXPECT_TRUE(tests[3]->should_run());
@@ -311,11 +311,11 @@ class FinalSuccessChecker : public Environment {
delete[] tests;
#if GTEST_HAS_TYPED_TEST
- test_case = UnitTestHelper::FindTestCase("TestCaseWithCommentTest/0");
- tests = UnitTestHelper::GetSortedTests(test_case);
+ test_suite = UnitTestHelper::FindTestSuite("TestSuiteWithCommentTest/0");
+ tests = UnitTestHelper::GetSortedTests(test_suite);
EXPECT_STREQ("Dummy", tests[0]->name());
- EXPECT_STREQ("TestCaseWithCommentTest/0", tests[0]->test_case_name());
+ EXPECT_STREQ("TestSuiteWithCommentTest/0", tests[0]->test_suite_name());
EXPECT_TRUE(IsNull(tests[0]->value_param()));
EXPECT_STREQ(GetTypeName<int>().c_str(), tests[0]->type_param());
EXPECT_TRUE(tests[0]->should_run());
@@ -324,7 +324,7 @@ class FinalSuccessChecker : public Environment {
delete[] tests;
#endif // GTEST_HAS_TYPED_TEST
- delete[] test_cases;
+ delete[] test_suites;
}
};
diff --git a/googletest/test/gtest_assert_by_exception_test.cc b/googletest/test/gtest_assert_by_exception_test.cc
index 7dfd48c..ada4cb3 100644
--- a/googletest/test/gtest_assert_by_exception_test.cc
+++ b/googletest/test/gtest_assert_by_exception_test.cc
@@ -96,7 +96,6 @@ TEST(Test, Test) {
int kTestForContinuingTest = 0;
TEST(Test, Test2) {
- // FIXME: how to force Test2 to be after Test?
kTestForContinuingTest = 1;
}
diff --git a/googletest/test/gtest_environment_test.cc b/googletest/test/gtest_environment_test.cc
index fea542a..58908e6 100644
--- a/googletest/test/gtest_environment_test.cc
+++ b/googletest/test/gtest_environment_test.cc
@@ -116,7 +116,7 @@ void Check(bool condition, const char* msg) {
}
}
-// Runs the tests. Return true iff successful.
+// Runs the tests. Return true if successful.
//
// The 'failure' parameter specifies the type of failure that should
// be generated by the global set-up.
diff --git a/googletest/test/gtest_pred_impl_unittest.cc b/googletest/test/gtest_pred_impl_unittest.cc
index 2019a30..4d77896 100644
--- a/googletest/test/gtest_pred_impl_unittest.cc
+++ b/googletest/test/gtest_pred_impl_unittest.cc
@@ -27,7 +27,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// This file is AUTOMATICALLY GENERATED on 01/02/2018 by command
+// This file is AUTOMATICALLY GENERATED on 01/02/2019 by command
// 'gen_gtest_pred_impl.py 5'. DO NOT EDIT BY HAND!
// Regression test for gtest_pred_impl.h
@@ -144,10 +144,10 @@ class Predicate1Test : public testing::Test {
}
}
- // true iff the test function is expected to run to finish.
+ // true if the test function is expected to run to finish.
static bool expected_to_finish_;
- // true iff the test function did run to finish.
+ // true if the test function did run to finish.
static bool finished_;
static int n1_;
@@ -539,10 +539,10 @@ class Predicate2Test : public testing::Test {
}
}
- // true iff the test function is expected to run to finish.
+ // true if the test function is expected to run to finish.
static bool expected_to_finish_;
- // true iff the test function did run to finish.
+ // true if the test function did run to finish.
static bool finished_;
static int n1_;
@@ -976,10 +976,10 @@ class Predicate3Test : public testing::Test {
}
}
- // true iff the test function is expected to run to finish.
+ // true if the test function is expected to run to finish.
static bool expected_to_finish_;
- // true iff the test function did run to finish.
+ // true if the test function did run to finish.
static bool finished_;
static int n1_;
@@ -1455,10 +1455,10 @@ class Predicate4Test : public testing::Test {
}
}
- // true iff the test function is expected to run to finish.
+ // true if the test function is expected to run to finish.
static bool expected_to_finish_;
- // true iff the test function did run to finish.
+ // true if the test function did run to finish.
static bool finished_;
static int n1_;
@@ -1976,10 +1976,10 @@ class Predicate5Test : public testing::Test {
}
}
- // true iff the test function is expected to run to finish.
+ // true if the test function is expected to run to finish.
static bool expected_to_finish_;
- // true iff the test function did run to finish.
+ // true if the test function did run to finish.
static bool finished_;
static int n1_;
diff --git a/googletest/test/gtest_premature_exit_test.cc b/googletest/test/gtest_premature_exit_test.cc
index 0920a97..777a8bf 100644
--- a/googletest/test/gtest_premature_exit_test.cc
+++ b/googletest/test/gtest_premature_exit_test.cc
@@ -45,7 +45,7 @@ namespace {
class PrematureExitTest : public Test {
public:
- // Returns true iff the given file exists.
+ // Returns true if the given file exists.
static bool FileExists(const char* filepath) {
StatStruct stat;
return Stat(filepath, &stat) == 0;
@@ -61,7 +61,7 @@ class PrematureExitTest : public Test {
}
}
- // Returns true iff the premature-exit file exists.
+ // Returns true if the premature-exit file exists.
bool PrematureExitFileExists() const {
return FileExists(premature_exit_file_path_);
}
diff --git a/googletest/test/gtest_repeat_test.cc b/googletest/test/gtest_repeat_test.cc
index 2ab82ca..7da4a15 100644
--- a/googletest/test/gtest_repeat_test.cc
+++ b/googletest/test/gtest_repeat_test.cc
@@ -117,13 +117,12 @@ const int kNumberOfParamTests = 10;
class MyParamTest : public testing::TestWithParam<int> {};
TEST_P(MyParamTest, ShouldPass) {
- // FIXME: Make parameter value checking robust WRT order of tests.
GTEST_CHECK_INT_EQ_(g_param_test_count % kNumberOfParamTests, GetParam());
g_param_test_count++;
}
-INSTANTIATE_TEST_CASE_P(MyParamSequence,
- MyParamTest,
- testing::Range(0, kNumberOfParamTests));
+INSTANTIATE_TEST_SUITE_P(MyParamSequence,
+ MyParamTest,
+ testing::Range(0, kNumberOfParamTests));
// Resets the count for each test.
void ResetCounts() {
diff --git a/googletest/test/gtest_skip_environment_check_output_test.py b/googletest/test/gtest_skip_environment_check_output_test.py
new file mode 100755
index 0000000..6e79155
--- /dev/null
+++ b/googletest/test/gtest_skip_environment_check_output_test.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 Google LLC. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests Google Test's gtest skip in environment setup behavior.
+
+This script invokes gtest_skip_in_environment_setup_test_ and verifies its
+output.
+"""
+
+import gtest_test_utils
+
+# Path to the gtest_skip_in_environment_setup_test binary
+EXE_PATH = gtest_test_utils.GetTestExecutablePath(
+ 'gtest_skip_in_environment_setup_test')
+
+OUTPUT = gtest_test_utils.Subprocess([EXE_PATH]).output
+
+
+# Test.
+class SkipEntireEnvironmentTest(gtest_test_utils.TestCase):
+
+ def testSkipEntireEnvironmentTest(self):
+ self.assertIn('Skipping the entire environment', OUTPUT)
+ self.assertNotIn('FAILED', OUTPUT)
+
+
+if __name__ == '__main__':
+ gtest_test_utils.Main()
diff --git a/googletest/test/gtest_skip_in_environment_setup_test.cc b/googletest/test/gtest_skip_in_environment_setup_test.cc
new file mode 100644
index 0000000..9372310
--- /dev/null
+++ b/googletest/test/gtest_skip_in_environment_setup_test.cc
@@ -0,0 +1,49 @@
+// Copyright 2019, Google LLC.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google LLC. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// This test verifies that skipping in the environment results in the
+// testcases being skipped.
+
+#include <iostream>
+#include "gtest/gtest.h"
+
+class SetupEnvironment : public testing::Environment {
+ public:
+ void SetUp() override { GTEST_SKIP() << "Skipping the entire environment"; }
+};
+
+TEST(Test, AlwaysFails) { EXPECT_EQ(true, false); }
+
+int main(int argc, char **argv) {
+ testing::InitGoogleTest(&argc, argv);
+
+ testing::AddGlobalTestEnvironment(new SetupEnvironment());
+
+ return RUN_ALL_TESTS();
+}
diff --git a/googletest/test/gtest_test_utils.py b/googletest/test/gtest_test_utils.py
index 245dcb1..abd56ec 100755
--- a/googletest/test/gtest_test_utils.py
+++ b/googletest/test/gtest_test_utils.py
@@ -215,10 +215,10 @@ class Subprocess:
Returns:
An object that represents outcome of the executed process. It has the
following attributes:
- terminated_by_signal True iff the child process has been terminated
+ terminated_by_signal True if the child process has been terminated
by a signal.
signal Sygnal that terminated the child process.
- exited True iff the child process exited normally.
+ exited True if the child process exited normally.
exit_code The code with which the child process exited.
output Child process's stdout and stderr output
combined in a string.
@@ -307,8 +307,6 @@ def Main():
_ParseAndStripGTestFlags(sys.argv)
# The tested binaries should not be writing XML output files unless the
# script explicitly instructs them to.
- # FIXME: Move this into Subprocess when we implement
- # passing environment into it as a parameter.
if GTEST_OUTPUT_VAR_NAME in os.environ:
del os.environ[GTEST_OUTPUT_VAR_NAME]
diff --git a/googletest/test/gtest_unittest.cc b/googletest/test/gtest_unittest.cc
index 9ddb37d..5020d73 100644
--- a/googletest/test/gtest_unittest.cc
+++ b/googletest/test/gtest_unittest.cc
@@ -61,9 +61,10 @@ TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {
#include <time.h>
#include <map>
-#include <vector>
#include <ostream>
+#include <type_traits>
#include <unordered_set>
+#include <vector>
#include "gtest/gtest-spi.h"
#include "src/gtest-internal-inl.h"
@@ -226,7 +227,6 @@ using testing::TestProperty;
using testing::TestResult;
using testing::TimeInMillis;
using testing::UnitTest;
-using testing::internal::AddReference;
using testing::internal::AlwaysFalse;
using testing::internal::AlwaysTrue;
using testing::internal::AppendUserMessage;
@@ -250,7 +250,6 @@ using testing::internal::GetTestTypeId;
using testing::internal::GetTimeInMillis;
using testing::internal::GetTypeId;
using testing::internal::GetUnitTestImpl;
-using testing::internal::ImplicitlyConvertible;
using testing::internal::Int32;
using testing::internal::Int32FromEnvOrDie;
using testing::internal::IsAProtocolMessage;
@@ -263,7 +262,6 @@ using testing::internal::OsStackTraceGetterInterface;
using testing::internal::ParseInt32Flag;
using testing::internal::RelationToSourceCopy;
using testing::internal::RelationToSourceReference;
-using testing::internal::RemoveConst;
using testing::internal::RemoveReference;
using testing::internal::ShouldRunTestOnShard;
using testing::internal::ShouldShard;
@@ -511,37 +509,88 @@ TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsEpochStart) {
EXPECT_EQ("1970-01-01T00:00:00", FormatEpochTimeInMillisAsIso8601(0));
}
-#if GTEST_CAN_COMPARE_NULL
-
# ifdef __BORLANDC__
// Silences warnings: "Condition is always true", "Unreachable code"
# pragma option push -w-ccc -w-rch
# endif
-// Tests that GTEST_IS_NULL_LITERAL_(x) is true when x is a null
-// pointer literal.
-TEST(NullLiteralTest, IsTrueForNullLiterals) {
- EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(nullptr));
- EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(nullptr));
- EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(nullptr));
- EXPECT_TRUE(GTEST_IS_NULL_LITERAL_(nullptr));
+// Tests that the LHS of EXPECT_EQ or ASSERT_EQ can be used as a null literal
+// when the RHS is a pointer type.
+TEST(NullLiteralTest, LHSAllowsNullLiterals) {
+ EXPECT_EQ(0, static_cast<void*>(nullptr)); // NOLINT
+ ASSERT_EQ(0, static_cast<void*>(nullptr)); // NOLINT
+ EXPECT_EQ(NULL, static_cast<void*>(nullptr)); // NOLINT
+ ASSERT_EQ(NULL, static_cast<void*>(nullptr)); // NOLINT
+ EXPECT_EQ(nullptr, static_cast<void*>(nullptr));
+ ASSERT_EQ(nullptr, static_cast<void*>(nullptr));
+
+ const int* const p = nullptr;
+ EXPECT_EQ(0, p); // NOLINT
+ ASSERT_EQ(0, p); // NOLINT
+ EXPECT_EQ(NULL, p); // NOLINT
+ ASSERT_EQ(NULL, p); // NOLINT
+ EXPECT_EQ(nullptr, p);
+ ASSERT_EQ(nullptr, p);
}
-// Tests that GTEST_IS_NULL_LITERAL_(x) is false when x is not a null
-// pointer literal.
-TEST(NullLiteralTest, IsFalseForNonNullLiterals) {
- EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(1));
- EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(0.0));
- EXPECT_FALSE(GTEST_IS_NULL_LITERAL_('a'));
- EXPECT_FALSE(GTEST_IS_NULL_LITERAL_(static_cast<void*>(nullptr)));
+struct ConvertToAll {
+ template <typename T>
+ operator T() const { // NOLINT
+ return T();
+ }
+};
+
+struct ConvertToPointer {
+ template <class T>
+ operator T*() const { // NOLINT
+ return nullptr;
+ }
+};
+
+struct ConvertToAllButNoPointers {
+ template <typename T,
+ typename std::enable_if<!std::is_pointer<T>::value, int>::type = 0>
+ operator T() const { // NOLINT
+ return T();
+ }
+};
+
+struct MyType {};
+inline bool operator==(MyType const&, MyType const&) { return true; }
+
+TEST(NullLiteralTest, ImplicitConversion) {
+ EXPECT_EQ(ConvertToPointer{}, static_cast<void*>(nullptr));
+#if !defined(__GNUC__) || defined(__clang__)
+ // Disabled due to GCC bug gcc.gnu.org/PR89580
+ EXPECT_EQ(ConvertToAll{}, static_cast<void*>(nullptr));
+#endif
+ EXPECT_EQ(ConvertToAll{}, MyType{});
+ EXPECT_EQ(ConvertToAllButNoPointers{}, MyType{});
}
+#ifdef __clang__
+#pragma clang diagnostic push
+#if __has_warning("-Wzero-as-null-pointer-constant")
+#pragma clang diagnostic error "-Wzero-as-null-pointer-constant"
+#endif
+#endif
+
+TEST(NullLiteralTest, NoConversionNoWarning) {
+ // Test that gtests detection and handling of null pointer constants
+ // doesn't trigger a warning when '0' isn't actually used as null.
+ EXPECT_EQ(0, 0);
+ ASSERT_EQ(0, 0);
+}
+
+#ifdef __clang__
+#pragma clang diagnostic pop
+#endif
+
# ifdef __BORLANDC__
// Restores warnings after previous "#pragma option push" suppressed them.
# pragma option pop
# endif
-#endif // GTEST_CAN_COMPARE_NULL
//
// Tests CodePointToUtf8().
@@ -586,7 +635,7 @@ TEST(CodePointToUtf8Test, CanEncode12To16Bits) {
#if !GTEST_WIDE_STRING_USES_UTF16_
// Tests in this group require a wchar_t to hold > 16 bits, and thus
-// are skipped on Windows, Cygwin, and Symbian, where a wchar_t is
+// are skipped on Windows, and Cygwin, where a wchar_t is
// 16-bit wide. This code may not compile on those systems.
// Tests that Unicode code-points that have 17 to 21 bits are encoded
@@ -849,23 +898,23 @@ TEST(ContainerUtilityDeathTest, ShuffleRange) {
class VectorShuffleTest : public Test {
protected:
- static const int kVectorSize = 20;
+ static const size_t kVectorSize = 20;
VectorShuffleTest() : random_(1) {
- for (int i = 0; i < kVectorSize; i++) {
+ for (int i = 0; i < static_cast<int>(kVectorSize); i++) {
vector_.push_back(i);
}
}
static bool VectorIsCorrupt(const TestingVector& vector) {
- if (kVectorSize != static_cast<int>(vector.size())) {
+ if (kVectorSize != vector.size()) {
return true;
}
bool found_in_vector[kVectorSize] = { false };
for (size_t i = 0; i < vector.size(); i++) {
const int e = vector[i];
- if (e < 0 || e >= kVectorSize || found_in_vector[e]) {
+ if (e < 0 || e >= static_cast<int>(kVectorSize) || found_in_vector[e]) {
return true;
}
found_in_vector[e] = true;
@@ -882,7 +931,7 @@ class VectorShuffleTest : public Test {
static bool RangeIsShuffled(const TestingVector& vector, int begin, int end) {
for (int i = begin; i < end; i++) {
- if (i != vector[i]) {
+ if (i != vector[static_cast<size_t>(i)]) {
return true;
}
}
@@ -906,7 +955,7 @@ class VectorShuffleTest : public Test {
TestingVector vector_;
}; // class VectorShuffleTest
-const int VectorShuffleTest::kVectorSize;
+const size_t VectorShuffleTest::kVectorSize;
TEST_F(VectorShuffleTest, HandlesEmptyRange) {
// Tests an empty range at the beginning...
@@ -958,7 +1007,7 @@ TEST_F(VectorShuffleTest, ShufflesEntireVector) {
// Tests the first and last elements in particular to ensure that
// there are no off-by-one problems in our shuffle algorithm.
EXPECT_NE(0, vector_[0]);
- EXPECT_NE(kVectorSize - 1, vector_[kVectorSize - 1]);
+ EXPECT_NE(static_cast<int>(kVectorSize - 1), vector_[kVectorSize - 1]);
}
TEST_F(VectorShuffleTest, ShufflesStartOfVector) {
@@ -968,7 +1017,8 @@ TEST_F(VectorShuffleTest, ShufflesStartOfVector) {
ASSERT_PRED1(VectorIsNotCorrupt, vector_);
EXPECT_PRED3(RangeIsShuffled, vector_, 0, kRangeSize);
- EXPECT_PRED3(RangeIsUnshuffled, vector_, kRangeSize, kVectorSize);
+ EXPECT_PRED3(RangeIsUnshuffled, vector_, kRangeSize,
+ static_cast<int>(kVectorSize));
}
TEST_F(VectorShuffleTest, ShufflesEndOfVector) {
@@ -977,23 +1027,25 @@ TEST_F(VectorShuffleTest, ShufflesEndOfVector) {
ASSERT_PRED1(VectorIsNotCorrupt, vector_);
EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);
- EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, kVectorSize);
+ EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize,
+ static_cast<int>(kVectorSize));
}
TEST_F(VectorShuffleTest, ShufflesMiddleOfVector) {
- int kRangeSize = kVectorSize/3;
+ const int kRangeSize = static_cast<int>(kVectorSize) / 3;
ShuffleRange(&random_, kRangeSize, 2*kRangeSize, &vector_);
ASSERT_PRED1(VectorIsNotCorrupt, vector_);
EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);
EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, 2*kRangeSize);
- EXPECT_PRED3(RangeIsUnshuffled, vector_, 2*kRangeSize, kVectorSize);
+ EXPECT_PRED3(RangeIsUnshuffled, vector_, 2 * kRangeSize,
+ static_cast<int>(kVectorSize));
}
TEST_F(VectorShuffleTest, ShufflesRepeatably) {
TestingVector vector2;
- for (int i = 0; i < kVectorSize; i++) {
- vector2.push_back(i);
+ for (size_t i = 0; i < kVectorSize; i++) {
+ vector2.push_back(static_cast<int>(i));
}
random_.Reseed(1234);
@@ -1004,7 +1056,7 @@ TEST_F(VectorShuffleTest, ShufflesRepeatably) {
ASSERT_PRED1(VectorIsNotCorrupt, vector_);
ASSERT_PRED1(VectorIsNotCorrupt, vector2);
- for (int i = 0; i < kVectorSize; i++) {
+ for (size_t i = 0; i < kVectorSize; i++) {
EXPECT_EQ(vector_[i], vector2[i]) << " where i is " << i;
}
}
@@ -1193,12 +1245,6 @@ TEST_F(ExpectFatalFailureTest, CatchesFatalFaliure) {
EXPECT_FATAL_FAILURE(AddFatalFailure(), "Expected fatal failure.");
}
-#if GTEST_HAS_GLOBAL_STRING
-TEST_F(ExpectFatalFailureTest, AcceptsStringObject) {
- EXPECT_FATAL_FAILURE(AddFatalFailure(), ::string("Expected fatal failure."));
-}
-#endif
-
TEST_F(ExpectFatalFailureTest, AcceptsStdStringObject) {
EXPECT_FATAL_FAILURE(AddFatalFailure(),
::std::string("Expected fatal failure."));
@@ -1281,13 +1327,6 @@ TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailure) {
"Expected non-fatal failure.");
}
-#if GTEST_HAS_GLOBAL_STRING
-TEST_F(ExpectNonfatalFailureTest, AcceptsStringObject) {
- EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),
- ::string("Expected non-fatal failure."));
-}
-#endif
-
TEST_F(ExpectNonfatalFailureTest, AcceptsStdStringObject) {
EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),
::std::string("Expected non-fatal failure."));
@@ -1554,7 +1593,7 @@ class GTestFlagSaverTest : public Test {
// Saves the Google Test flags such that we can restore them later, and
// then sets them to their default values. This will be called
// before the first test in this test case is run.
- static void SetUpTestCase() {
+ static void SetUpTestSuite() {
saver_ = new GTestFlagSaver;
GTEST_FLAG(also_run_disabled_tests) = false;
@@ -1576,7 +1615,7 @@ class GTestFlagSaverTest : public Test {
// Restores the Google Test flags that the tests have modified. This will
// be called after the last test in this test case is run.
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
delete saver_;
saver_ = nullptr;
}
@@ -1940,7 +1979,7 @@ TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereAreFiveShards) {
// Test class, there are no separate tests for the following classes
// (except for some trivial cases):
//
-// TestCase, UnitTest, UnitTestResultPrinter.
+// TestSuite, UnitTest, UnitTestResultPrinter.
//
// Similarly, there are no separate tests for the following macros:
//
@@ -1974,15 +2013,16 @@ void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
key);
}
-void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
+void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
const char* key) {
- const TestCase* test_case = UnitTest::GetInstance()->current_test_case();
- ASSERT_TRUE(test_case != nullptr);
+ const testing::TestSuite* test_suite =
+ UnitTest::GetInstance()->current_test_suite();
+ ASSERT_TRUE(test_suite != nullptr);
ExpectNonFatalFailureRecordingPropertyWithReservedKey(
- test_case->ad_hoc_test_result(), key);
+ test_suite->ad_hoc_test_result(), key);
}
-void ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+void ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
const char* key) {
ExpectNonFatalFailureRecordingPropertyWithReservedKey(
UnitTest::GetInstance()->ad_hoc_test_result(), key);
@@ -1994,29 +2034,32 @@ void ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
class UnitTestRecordPropertyTest :
public testing::internal::UnitTestRecordPropertyTestHelper {
public:
- static void SetUpTestCase() {
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
+ static void SetUpTestSuite() {
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
"disabled");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
"errors");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
"failures");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
"name");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
"tests");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(
"time");
Test::RecordProperty("test_case_key_1", "1");
- const TestCase* test_case = UnitTest::GetInstance()->current_test_case();
- ASSERT_TRUE(test_case != nullptr);
- ASSERT_EQ(1, test_case->ad_hoc_test_result().test_property_count());
+ const testing::TestSuite* test_suite =
+ UnitTest::GetInstance()->current_test_suite();
+
+ ASSERT_TRUE(test_suite != nullptr);
+
+ ASSERT_EQ(1, test_suite->ad_hoc_test_result().test_property_count());
EXPECT_STREQ("test_case_key_1",
- test_case->ad_hoc_test_result().GetTestProperty(0).key());
+ test_suite->ad_hoc_test_result().GetTestProperty(0).key());
EXPECT_STREQ("1",
- test_case->ad_hoc_test_result().GetTestProperty(0).value());
+ test_suite->ad_hoc_test_result().GetTestProperty(0).value());
}
};
@@ -2069,7 +2112,7 @@ TEST_F(UnitTestRecordPropertyTest, OverridesValuesForDuplicateKeys) {
}
TEST_F(UnitTestRecordPropertyTest,
- AddFailureInsideTestsWhenUsingTestCaseReservedKeys) {
+ AddFailureInsideTestsWhenUsingTestSuiteReservedKeys) {
ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
"name");
ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(
@@ -2095,21 +2138,21 @@ TEST_F(UnitTestRecordPropertyTest,
class UnitTestRecordPropertyTestEnvironment : public Environment {
public:
void TearDown() override {
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"tests");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"failures");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"disabled");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"errors");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"name");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"timestamp");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"time");
- ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestCase(
+ ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(
"random_seed");
}
};
@@ -2126,12 +2169,12 @@ static Environment* record_property_env GTEST_ATTRIBUTE_UNUSED_ =
// First, some predicates and predicate-formatters needed by the tests.
-// Returns true iff the argument is an even number.
+// Returns true if the argument is an even number.
bool IsEven(int n) {
return (n % 2) == 0;
}
-// A functor that returns true iff the argument is an even number.
+// A functor that returns true if the argument is an even number.
struct IsEvenFunctor {
bool operator()(int n) { return IsEven(n); }
};
@@ -2175,12 +2218,12 @@ struct AssertIsEvenFunctor {
}
};
-// Returns true iff the sum of the arguments is an even number.
+// Returns true if the sum of the arguments is an even number.
bool SumIsEven2(int n1, int n2) {
return IsEven(n1 + n2);
}
-// A functor that returns true iff the sum of the arguments is an even
+// A functor that returns true if the sum of the arguments is an even
// number.
struct SumIsEven3Functor {
bool operator()(int n1, int n2, int n3) {
@@ -2360,6 +2403,16 @@ TEST(PredTest, SingleEvaluationOnFailure) {
EXPECT_EQ(1, n4) << "Argument 4 is not evaluated exactly once.";
}
+// Test predicate assertions for sets
+TEST(PredTest, ExpectPredEvalFailure) {
+ std::set<int> set_a = {2, 1, 3, 4, 5};
+ std::set<int> set_b = {0, 4, 8};
+ const auto compare_sets = [] (std::set<int>, std::set<int>) { return false; };
+ EXPECT_NONFATAL_FAILURE(
+ EXPECT_PRED2(compare_sets, set_a, set_b),
+ "compare_sets(set_a, set_b) evaluates to false, where\nset_a evaluates "
+ "to { 1, 2, 3, 4, 5 }\nset_b evaluates to { 0, 4, 8 }");
+}
// Some helper functions for testing using overloaded/template
// functions with ASSERT_PREDn and EXPECT_PREDn.
@@ -2822,8 +2875,6 @@ TEST_F(FloatTest, LargeDiff) {
TEST_F(FloatTest, Infinity) {
EXPECT_FLOAT_EQ(values_.infinity, values_.close_to_infinity);
EXPECT_FLOAT_EQ(-values_.infinity, -values_.close_to_infinity);
-#if !GTEST_OS_SYMBIAN
- // Nokia's STLport crashes if we try to output infinity or NaN.
EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, -values_.infinity),
"-values_.infinity");
@@ -2831,14 +2882,10 @@ TEST_F(FloatTest, Infinity) {
// are only 1 DLP apart.
EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, values_.nan1),
"values_.nan1");
-#endif // !GTEST_OS_SYMBIAN
}
// Tests that comparing with NAN always returns false.
TEST_F(FloatTest, NaN) {
-#if !GTEST_OS_SYMBIAN
-// Nokia's STLport crashes if we try to output infinity or NaN.
-
// In C++Builder, names within local classes (such as used by
// EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
// scoping class. Use a static local alias as a workaround.
@@ -2856,7 +2903,6 @@ TEST_F(FloatTest, NaN) {
EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity),
"v.infinity");
-#endif // !GTEST_OS_SYMBIAN
}
// Tests that *_FLOAT_EQ are reflexive.
@@ -2918,10 +2964,6 @@ TEST_F(FloatTest, FloatLEFails) {
EXPECT_PRED_FORMAT2(FloatLE, values_.further_from_one, 1.0f);
}, "(values_.further_from_one) <= (1.0f)");
-#if !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
- // Nokia's STLport crashes if we try to output infinity or NaN.
- // C++Builder gives bad results for ordered comparisons involving NaNs
- // due to compiler bugs.
EXPECT_NONFATAL_FAILURE({ // NOLINT
EXPECT_PRED_FORMAT2(FloatLE, values_.nan1, values_.infinity);
}, "(values_.nan1) <= (values_.infinity)");
@@ -2931,7 +2973,6 @@ TEST_F(FloatTest, FloatLEFails) {
EXPECT_FATAL_FAILURE({ // NOLINT
ASSERT_PRED_FORMAT2(FloatLE, values_.nan1, values_.nan1);
}, "(values_.nan1) <= (values_.nan1)");
-#endif // !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
}
// Instantiates FloatingPointTest for testing *_DOUBLE_EQ.
@@ -2995,8 +3036,6 @@ TEST_F(DoubleTest, LargeDiff) {
TEST_F(DoubleTest, Infinity) {
EXPECT_DOUBLE_EQ(values_.infinity, values_.close_to_infinity);
EXPECT_DOUBLE_EQ(-values_.infinity, -values_.close_to_infinity);
-#if !GTEST_OS_SYMBIAN
- // Nokia's STLport crashes if we try to output infinity or NaN.
EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, -values_.infinity),
"-values_.infinity");
@@ -3004,18 +3043,10 @@ TEST_F(DoubleTest, Infinity) {
// are only 1 DLP apart.
EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, values_.nan1),
"values_.nan1");
-#endif // !GTEST_OS_SYMBIAN
}
// Tests that comparing with NAN always returns false.
TEST_F(DoubleTest, NaN) {
-#if !GTEST_OS_SYMBIAN
- // In C++Builder, names within local classes (such as used by
- // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the
- // scoping class. Use a static local alias as a workaround.
- // We use the assignment syntax since some compilers, like Sun Studio,
- // don't allow initializing references using construction syntax
- // (parentheses).
static const DoubleTest::TestValues& v = this->values_;
// Nokia's STLport crashes if we try to output infinity or NaN.
@@ -3025,17 +3056,13 @@ TEST_F(DoubleTest, NaN) {
EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, v.nan1), "v.nan1");
EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity),
"v.infinity");
-#endif // !GTEST_OS_SYMBIAN
}
// Tests that *_DOUBLE_EQ are reflexive.
TEST_F(DoubleTest, Reflexive) {
EXPECT_DOUBLE_EQ(0.0, 0.0);
EXPECT_DOUBLE_EQ(1.0, 1.0);
-#if !GTEST_OS_SYMBIAN
- // Nokia's STLport crashes if we try to output infinity or NaN.
ASSERT_DOUBLE_EQ(values_.infinity, values_.infinity);
-#endif // !GTEST_OS_SYMBIAN
}
// Tests that *_DOUBLE_EQ are commutative.
@@ -3090,10 +3117,6 @@ TEST_F(DoubleTest, DoubleLEFails) {
EXPECT_PRED_FORMAT2(DoubleLE, values_.further_from_one, 1.0);
}, "(values_.further_from_one) <= (1.0)");
-#if !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
- // Nokia's STLport crashes if we try to output infinity or NaN.
- // C++Builder gives bad results for ordered comparisons involving NaNs
- // due to compiler bugs.
EXPECT_NONFATAL_FAILURE({ // NOLINT
EXPECT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.infinity);
}, "(values_.nan1) <= (values_.infinity)");
@@ -3103,7 +3126,6 @@ TEST_F(DoubleTest, DoubleLEFails) {
EXPECT_FATAL_FAILURE({ // NOLINT
ASSERT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.nan1);
}, "(values_.nan1) <= (values_.nan1)");
-#endif // !GTEST_OS_SYMBIAN && !defined(__BORLANDC__)
}
@@ -3124,28 +3146,28 @@ TEST(DisabledTest, NotDISABLED_TestShouldRun) {
// A test case whose name starts with DISABLED_.
// Should not run.
-TEST(DISABLED_TestCase, TestShouldNotRun) {
+TEST(DISABLED_TestSuite, TestShouldNotRun) {
FAIL() << "Unexpected failure: Test in disabled test case should not be run.";
}
// A test case and test whose names start with DISABLED_.
// Should not run.
-TEST(DISABLED_TestCase, DISABLED_TestShouldNotRun) {
+TEST(DISABLED_TestSuite, DISABLED_TestShouldNotRun) {
FAIL() << "Unexpected failure: Test in disabled test case should not be run.";
}
-// Check that when all tests in a test case are disabled, SetUpTestCase() and
-// TearDownTestCase() are not called.
+// Check that when all tests in a test case are disabled, SetUpTestSuite() and
+// TearDownTestSuite() are not called.
class DisabledTestsTest : public Test {
protected:
- static void SetUpTestCase() {
+ static void SetUpTestSuite() {
FAIL() << "Unexpected failure: All tests disabled in test case. "
- "SetUpTestCase() should not be called.";
+ "SetUpTestSuite() should not be called.";
}
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
FAIL() << "Unexpected failure: All tests disabled in test case. "
- "TearDownTestCase() should not be called.";
+ "TearDownTestSuite() should not be called.";
}
};
@@ -3166,7 +3188,7 @@ class TypedTest : public Test {
};
typedef testing::Types<int, double> NumericTypes;
-TYPED_TEST_CASE(TypedTest, NumericTypes);
+TYPED_TEST_SUITE(TypedTest, NumericTypes);
TYPED_TEST(TypedTest, DISABLED_ShouldNotRun) {
FAIL() << "Unexpected failure: Disabled typed test should not run.";
@@ -3176,7 +3198,7 @@ template <typename T>
class DISABLED_TypedTest : public Test {
};
-TYPED_TEST_CASE(DISABLED_TypedTest, NumericTypes);
+TYPED_TEST_SUITE(DISABLED_TypedTest, NumericTypes);
TYPED_TEST(DISABLED_TypedTest, ShouldNotRun) {
FAIL() << "Unexpected failure: Disabled typed test should not run.";
@@ -3192,31 +3214,31 @@ template <typename T>
class TypedTestP : public Test {
};
-TYPED_TEST_CASE_P(TypedTestP);
+TYPED_TEST_SUITE_P(TypedTestP);
TYPED_TEST_P(TypedTestP, DISABLED_ShouldNotRun) {
FAIL() << "Unexpected failure: "
<< "Disabled type-parameterized test should not run.";
}
-REGISTER_TYPED_TEST_CASE_P(TypedTestP, DISABLED_ShouldNotRun);
+REGISTER_TYPED_TEST_SUITE_P(TypedTestP, DISABLED_ShouldNotRun);
-INSTANTIATE_TYPED_TEST_CASE_P(My, TypedTestP, NumericTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, TypedTestP, NumericTypes);
template <typename T>
class DISABLED_TypedTestP : public Test {
};
-TYPED_TEST_CASE_P(DISABLED_TypedTestP);
+TYPED_TEST_SUITE_P(DISABLED_TypedTestP);
TYPED_TEST_P(DISABLED_TypedTestP, ShouldNotRun) {
FAIL() << "Unexpected failure: "
<< "Disabled type-parameterized test should not run.";
}
-REGISTER_TYPED_TEST_CASE_P(DISABLED_TypedTestP, ShouldNotRun);
+REGISTER_TYPED_TEST_SUITE_P(DISABLED_TypedTestP, ShouldNotRun);
-INSTANTIATE_TYPED_TEST_CASE_P(My, DISABLED_TypedTestP, NumericTypes);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, DISABLED_TypedTestP, NumericTypes);
#endif // GTEST_HAS_TYPED_TEST_P
@@ -3479,7 +3501,7 @@ std::string EditsToString(const std::vector<EditType>& edits) {
std::vector<size_t> CharsToIndices(const std::string& str) {
std::vector<size_t> out;
for (size_t i = 0; i < str.size(); ++i) {
- out.push_back(str[i]);
+ out.push_back(static_cast<size_t>(str[i]));
}
return out;
}
@@ -3492,7 +3514,7 @@ std::vector<std::string> CharsToLines(const std::string& str) {
return out;
}
-TEST(EditDistance, TestCases) {
+TEST(EditDistance, TestSuites) {
struct Case {
int line;
const char* left;
@@ -3711,7 +3733,6 @@ TEST(AssertionTest, ASSERT_EQ) {
}
// Tests ASSERT_EQ(NULL, pointer).
-#if GTEST_CAN_COMPARE_NULL
TEST(AssertionTest, ASSERT_EQ_NULL) {
// A success.
const char* p = nullptr;
@@ -3725,7 +3746,6 @@ TEST(AssertionTest, ASSERT_EQ_NULL) {
static int n = 0;
EXPECT_FATAL_FAILURE(ASSERT_EQ(nullptr, &n), " &n\n Which is:");
}
-#endif // GTEST_CAN_COMPARE_NULL
// Tests ASSERT_EQ(0, non_pointer). Since the literal 0 can be
// treated as a null pointer by the compiler, we need to make sure
@@ -3916,11 +3936,8 @@ TEST(AssertionTest, NamedEnum) {
EXPECT_NONFATAL_FAILURE(EXPECT_EQ(kE1, kE2), "Which is: 1");
}
-// The version of gcc used in XCode 2.2 has a bug and doesn't allow
-// anonymous enums in assertions. Therefore the following test is not
-// done on Mac.
-// Sun Studio and HP aCC also reject this code.
-#if !GTEST_OS_MAC && !defined(__SUNPRO_CC) && !defined(__HP_aCC)
+// Sun Studio and HP aCC2reject this code.
+#if !defined(__SUNPRO_CC) && !defined(__HP_aCC)
// Tests using assertions with anonymous enums.
enum {
@@ -4439,7 +4456,6 @@ TEST(ExpectTest, EXPECT_EQ_Double) {
"5.1");
}
-#if GTEST_CAN_COMPARE_NULL
// Tests EXPECT_EQ(NULL, pointer).
TEST(ExpectTest, EXPECT_EQ_NULL) {
// A success.
@@ -4454,7 +4470,6 @@ TEST(ExpectTest, EXPECT_EQ_NULL) {
int n = 0;
EXPECT_NONFATAL_FAILURE(EXPECT_EQ(nullptr, &n), " &n\n Which is:");
}
-#endif // GTEST_CAN_COMPARE_NULL
// Tests EXPECT_EQ(0, non_pointer). Since the literal 0 can be
// treated as a null pointer by the compiler, we need to make sure
@@ -4695,6 +4710,19 @@ TEST(MacroTest, FAIL) {
"Intentional failure.");
}
+// Tests GTEST_FAIL_AT.
+TEST(MacroTest, GTEST_FAIL_AT) {
+ // Verifies that GTEST_FAIL_AT does generate a fatal failure and
+ // the failure message contains the user-streamed part.
+ EXPECT_FATAL_FAILURE(GTEST_FAIL_AT("foo.cc", 42) << "Wrong!", "Wrong!");
+
+ // Verifies that the user-streamed part is optional.
+ EXPECT_FATAL_FAILURE(GTEST_FAIL_AT("foo.cc", 42), "Failed");
+
+ // See the ADD_FAIL_AT test above to see how we test that the failure message
+ // contains the right filename and line number -- the same applies here.
+}
+
// Tests SUCCEED
TEST(MacroTest, SUCCEED) {
SUCCEED();
@@ -4829,72 +4857,6 @@ TEST(EqAssertionTest, StdWideString) {
#endif // GTEST_HAS_STD_WSTRING
-#if GTEST_HAS_GLOBAL_STRING
-// Tests using ::string values in {EXPECT|ASSERT}_EQ.
-TEST(EqAssertionTest, GlobalString) {
- // Compares a const char* to a ::string that has identical content.
- EXPECT_EQ("Test", ::string("Test"));
-
- // Compares two identical ::strings.
- const ::string str1("A * in the middle");
- const ::string str2(str1);
- ASSERT_EQ(str1, str2);
-
- // Compares a ::string to a const char* that has different content.
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::string("Test"), "test"),
- "test");
-
- // Compares two ::strings that have different contents, one of which
- // having a NUL character in the middle.
- ::string str3(str1);
- str3.at(2) = '\0';
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(str1, str3),
- "str3");
-
- // Compares a ::string to a char* that has different content.
- EXPECT_FATAL_FAILURE({ // NOLINT
- ASSERT_EQ(::string("bar"), const_cast<char*>("foo"));
- }, "");
-}
-
-#endif // GTEST_HAS_GLOBAL_STRING
-
-#if GTEST_HAS_GLOBAL_WSTRING
-
-// Tests using ::wstring values in {EXPECT|ASSERT}_EQ.
-TEST(EqAssertionTest, GlobalWideString) {
- // Compares two identical ::wstrings.
- static const ::wstring wstr1(L"A * in the middle");
- static const ::wstring wstr2(wstr1);
- EXPECT_EQ(wstr1, wstr2);
-
- // Compares a const wchar_t* to a ::wstring that has identical content.
- const wchar_t kTestX8119[] = { 'T', 'e', 's', 't', 0x8119, '\0' };
- ASSERT_EQ(kTestX8119, ::wstring(kTestX8119));
-
- // Compares a const wchar_t* to a ::wstring that has different
- // content.
- const wchar_t kTestX8120[] = { 'T', 'e', 's', 't', 0x8120, '\0' };
- EXPECT_NONFATAL_FAILURE({ // NOLINT
- EXPECT_EQ(kTestX8120, ::wstring(kTestX8119));
- }, "Test\\x8119");
-
- // Compares a wchar_t* to a ::wstring that has different content.
- wchar_t* const p1 = const_cast<wchar_t*>(L"foo");
- EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, ::wstring(L"bar")),
- "bar");
-
- // Compares two ::wstrings that have different contents, one of which
- // having a NUL character in the middle.
- static ::wstring wstr3;
- wstr3 = wstr1;
- wstr3.at(2) = L'\0';
- EXPECT_FATAL_FAILURE(ASSERT_EQ(wstr1, wstr3),
- "wstr3");
-}
-
-#endif // GTEST_HAS_GLOBAL_WSTRING
-
// Tests using char pointers in {EXPECT|ASSERT}_EQ.
TEST(EqAssertionTest, CharPointer) {
char* const p0 = nullptr;
@@ -5322,11 +5284,11 @@ namespace testing {
class TestInfoTest : public Test {
protected:
static const TestInfo* GetTestInfo(const char* test_name) {
- const TestCase* const test_case =
- GetUnitTestImpl()->GetTestCase("TestInfoTest", "", nullptr, nullptr);
+ const TestSuite* const test_suite =
+ GetUnitTestImpl()->GetTestSuite("TestInfoTest", "", nullptr, nullptr);
- for (int i = 0; i < test_case->total_test_count(); ++i) {
- const TestInfo* const test_info = test_case->GetTestInfo(i);
+ for (int i = 0; i < test_suite->total_test_count(); ++i) {
+ const TestInfo* const test_info = test_suite->GetTestInfo(i);
if (strcmp(test_name, test_info->name()) == 0)
return test_info;
}
@@ -5383,13 +5345,13 @@ TEST_P(CodeLocationForTESTP, Verify) {
VERIFY_CODE_LOCATION;
}
-INSTANTIATE_TEST_CASE_P(, CodeLocationForTESTP, Values(0));
+INSTANTIATE_TEST_SUITE_P(, CodeLocationForTESTP, Values(0));
template <typename T>
class CodeLocationForTYPEDTEST : public Test {
};
-TYPED_TEST_CASE(CodeLocationForTYPEDTEST, int);
+TYPED_TEST_SUITE(CodeLocationForTYPEDTEST, int);
TYPED_TEST(CodeLocationForTYPEDTEST, Verify) {
VERIFY_CODE_LOCATION;
@@ -5399,20 +5361,21 @@ template <typename T>
class CodeLocationForTYPEDTESTP : public Test {
};
-TYPED_TEST_CASE_P(CodeLocationForTYPEDTESTP);
+TYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP);
TYPED_TEST_P(CodeLocationForTYPEDTESTP, Verify) {
VERIFY_CODE_LOCATION;
}
-REGISTER_TYPED_TEST_CASE_P(CodeLocationForTYPEDTESTP, Verify);
+REGISTER_TYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP, Verify);
-INSTANTIATE_TYPED_TEST_CASE_P(My, CodeLocationForTYPEDTESTP, int);
+INSTANTIATE_TYPED_TEST_SUITE_P(My, CodeLocationForTYPEDTESTP, int);
#undef VERIFY_CODE_LOCATION
// Tests setting up and tearing down a test case.
-
+// Legacy API is deprecated but still available
+#ifndef REMOVE_LEGACY_TEST_CASEAPI
class SetUpTestCaseTest : public Test {
protected:
// This will be called once before the first test in this test case
@@ -5471,7 +5434,69 @@ TEST_F(SetUpTestCaseTest, Test1) { EXPECT_STRNE(nullptr, shared_resource_); }
TEST_F(SetUpTestCaseTest, Test2) {
EXPECT_STREQ("123", shared_resource_);
}
+#endif // REMOVE_LEGACY_TEST_CASEAPI
+
+// Tests SetupTestSuite/TearDown TestSuite
+class SetUpTestSuiteTest : public Test {
+ protected:
+ // This will be called once before the first test in this test case
+ // is run.
+ static void SetUpTestSuite() {
+ printf("Setting up the test suite . . .\n");
+
+ // Initializes some shared resource. In this simple example, we
+ // just create a C string. More complex stuff can be done if
+ // desired.
+ shared_resource_ = "123";
+
+ // Increments the number of test cases that have been set up.
+ counter_++;
+
+ // SetUpTestSuite() should be called only once.
+ EXPECT_EQ(1, counter_);
+ }
+
+ // This will be called once after the last test in this test case is
+ // run.
+ static void TearDownTestSuite() {
+ printf("Tearing down the test suite . . .\n");
+
+ // Decrements the number of test suites that have been set up.
+ counter_--;
+
+ // TearDownTestSuite() should be called only once.
+ EXPECT_EQ(0, counter_);
+
+ // Cleans up the shared resource.
+ shared_resource_ = nullptr;
+ }
+
+ // This will be called before each test in this test case.
+ void SetUp() override {
+ // SetUpTestSuite() should be called only once, so counter_ should
+ // always be 1.
+ EXPECT_EQ(1, counter_);
+ }
+
+ // Number of test suites that have been set up.
+ static int counter_;
+
+ // Some resource to be shared by all tests in this test case.
+ static const char* shared_resource_;
+};
+
+int SetUpTestSuiteTest::counter_ = 0;
+const char* SetUpTestSuiteTest::shared_resource_ = nullptr;
+
+// A test that uses the shared resource.
+TEST_F(SetUpTestSuiteTest, TestSetupTestSuite1) {
+ EXPECT_STRNE(nullptr, shared_resource_);
+}
+// Another test that uses the shared resource.
+TEST_F(SetUpTestSuiteTest, TestSetupTestSuite2) {
+ EXPECT_STREQ("123", shared_resource_);
+}
// The ParseFlagsTest test case tests ParseGoogleTestFlagsOnly.
@@ -5647,11 +5672,11 @@ class ParseFlagsTest : public Test {
// Asserts that two narrow or wide string arrays are equal.
template <typename CharType>
- static void AssertStringArrayEq(size_t size1, CharType** array1,
- size_t size2, CharType** array2) {
+ static void AssertStringArrayEq(int size1, CharType** array1, int size2,
+ CharType** array2) {
ASSERT_EQ(size1, size2) << " Array sizes different.";
- for (size_t i = 0; i != size1; i++) {
+ for (int i = 0; i != size1; i++) {
ASSERT_STREQ(array1[i], array2[i]) << " where i == " << i;
}
}
@@ -6229,7 +6254,7 @@ class CurrentTestInfoTest : public Test {
protected:
// Tests that current_test_info() returns NULL before the first test in
// the test case is run.
- static void SetUpTestCase() {
+ static void SetUpTestSuite() {
// There should be no tests running at this point.
const TestInfo* test_info =
UnitTest::GetInstance()->current_test_info();
@@ -6239,7 +6264,7 @@ class CurrentTestInfoTest : public Test {
// Tests that current_test_info() returns NULL after the last test in
// the test case has run.
- static void TearDownTestCase() {
+ static void TearDownTestSuite() {
const TestInfo* test_info =
UnitTest::GetInstance()->current_test_info();
EXPECT_TRUE(test_info == nullptr)
@@ -6249,14 +6274,14 @@ class CurrentTestInfoTest : public Test {
// Tests that current_test_info() returns TestInfo for currently running
// test by checking the expected test name against the actual one.
-TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestCase) {
+TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestSuite) {
const TestInfo* test_info =
UnitTest::GetInstance()->current_test_info();
ASSERT_TRUE(nullptr != test_info)
<< "There is a test running so we should have a valid TestInfo.";
EXPECT_STREQ("CurrentTestInfoTest", test_info->test_case_name())
<< "Expected the name of the currently running test case.";
- EXPECT_STREQ("WorksForFirstTestInATestCase", test_info->name())
+ EXPECT_STREQ("WorksForFirstTestInATestSuite", test_info->name())
<< "Expected the name of the currently running test.";
}
@@ -6264,14 +6289,14 @@ TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestCase) {
// test by checking the expected test name against the actual one. We
// use this test to see that the TestInfo object actually changed from
// the previous invocation.
-TEST_F(CurrentTestInfoTest, WorksForSecondTestInATestCase) {
+TEST_F(CurrentTestInfoTest, WorksForSecondTestInATestSuite) {
const TestInfo* test_info =
UnitTest::GetInstance()->current_test_info();
ASSERT_TRUE(nullptr != test_info)
<< "There is a test running so we should have a valid TestInfo.";
EXPECT_STREQ("CurrentTestInfoTest", test_info->test_case_name())
<< "Expected the name of the currently running test case.";
- EXPECT_STREQ("WorksForSecondTestInATestCase", test_info->name())
+ EXPECT_STREQ("WorksForSecondTestInATestSuite", test_info->name())
<< "Expected the name of the currently running test.";
}
@@ -7054,14 +7079,13 @@ GTEST_TEST(AlternativeNameTest, Works) { // GTEST_TEST is the same as TEST.
// Tests for internal utilities necessary for implementation of the universal
// printing.
-// FIXME: Find a better home for them.
class ConversionHelperBase {};
class ConversionHelperDerived : public ConversionHelperBase {};
// Tests that IsAProtocolMessage<T>::value is a compile-time constant.
TEST(IsAProtocolMessageTest, ValueIsCompileTimeConstant) {
- GTEST_COMPILE_ASSERT_(IsAProtocolMessage<ProtocolMessage>::value,
+ GTEST_COMPILE_ASSERT_(IsAProtocolMessage<::proto2::Message>::value,
const_true);
GTEST_COMPILE_ASSERT_(!IsAProtocolMessage<int>::value, const_false);
}
@@ -7070,11 +7094,10 @@ TEST(IsAProtocolMessageTest, ValueIsCompileTimeConstant) {
// proto2::Message or a sub-class of it.
TEST(IsAProtocolMessageTest, ValueIsTrueWhenTypeIsAProtocolMessage) {
EXPECT_TRUE(IsAProtocolMessage< ::proto2::Message>::value);
- EXPECT_TRUE(IsAProtocolMessage<ProtocolMessage>::value);
}
// Tests that IsAProtocolMessage<T>::value is false when T is neither
-// ProtocolMessage nor a sub-class of it.
+// ::proto2::Message nor a sub-class of it.
TEST(IsAProtocolMessageTest, ValueIsFalseWhenTypeIsNotAProtocolMessage) {
EXPECT_FALSE(IsAProtocolMessage<int>::value);
EXPECT_FALSE(IsAProtocolMessage<const ConversionHelperBase>::value);
@@ -7111,33 +7134,6 @@ TEST(RemoveReferenceTest, MacroVersion) {
TestGTestRemoveReference<const char, const char&>();
}
-
-// Tests that RemoveConst does not affect non-const types.
-TEST(RemoveConstTest, DoesNotAffectNonConstType) {
- CompileAssertTypesEqual<int, RemoveConst<int>::type>();
- CompileAssertTypesEqual<char&, RemoveConst<char&>::type>();
-}
-
-// Tests that RemoveConst removes const from const types.
-TEST(RemoveConstTest, RemovesConst) {
- CompileAssertTypesEqual<int, RemoveConst<const int>::type>();
- CompileAssertTypesEqual<char[2], RemoveConst<const char[2]>::type>();
- CompileAssertTypesEqual<char[2][3], RemoveConst<const char[2][3]>::type>();
-}
-
-// Tests GTEST_REMOVE_CONST_.
-
-template <typename T1, typename T2>
-void TestGTestRemoveConst() {
- CompileAssertTypesEqual<T1, GTEST_REMOVE_CONST_(T2)>();
-}
-
-TEST(RemoveConstTest, MacroVersion) {
- TestGTestRemoveConst<int, int>();
- TestGTestRemoveConst<double&, double&>();
- TestGTestRemoveConst<char, const char>();
-}
-
// Tests GTEST_REMOVE_REFERENCE_AND_CONST_.
template <typename T1, typename T2>
@@ -7153,30 +7149,6 @@ TEST(RemoveReferenceToConstTest, Works) {
TestGTestRemoveReferenceAndConst<const char*, const char*>();
}
-// Tests that AddReference does not affect reference types.
-TEST(AddReferenceTest, DoesNotAffectReferenceType) {
- CompileAssertTypesEqual<int&, AddReference<int&>::type>();
- CompileAssertTypesEqual<const char&, AddReference<const char&>::type>();
-}
-
-// Tests that AddReference adds reference to non-reference types.
-TEST(AddReferenceTest, AddsReference) {
- CompileAssertTypesEqual<int&, AddReference<int>::type>();
- CompileAssertTypesEqual<const char&, AddReference<const char>::type>();
-}
-
-// Tests GTEST_ADD_REFERENCE_.
-
-template <typename T1, typename T2>
-void TestGTestAddReference() {
- CompileAssertTypesEqual<T1, GTEST_ADD_REFERENCE_(T2)>();
-}
-
-TEST(AddReferenceTest, MacroVersion) {
- TestGTestAddReference<int&, int>();
- TestGTestAddReference<const char&, const char&>();
-}
-
// Tests GTEST_REFERENCE_TO_CONST_.
template <typename T1, typename T2>
@@ -7191,35 +7163,6 @@ TEST(GTestReferenceToConstTest, Works) {
TestGTestReferenceToConst<const std::string&, const std::string&>();
}
-// Tests that ImplicitlyConvertible<T1, T2>::value is a compile-time constant.
-TEST(ImplicitlyConvertibleTest, ValueIsCompileTimeConstant) {
- GTEST_COMPILE_ASSERT_((ImplicitlyConvertible<int, int>::value), const_true);
- GTEST_COMPILE_ASSERT_((!ImplicitlyConvertible<void*, int*>::value),
- const_false);
-}
-
-// Tests that ImplicitlyConvertible<T1, T2>::value is true when T1 can
-// be implicitly converted to T2.
-TEST(ImplicitlyConvertibleTest, ValueIsTrueWhenConvertible) {
- EXPECT_TRUE((ImplicitlyConvertible<int, double>::value));
- EXPECT_TRUE((ImplicitlyConvertible<double, int>::value));
- EXPECT_TRUE((ImplicitlyConvertible<int*, void*>::value));
- EXPECT_TRUE((ImplicitlyConvertible<int*, const int*>::value));
- EXPECT_TRUE((ImplicitlyConvertible<ConversionHelperDerived&,
- const ConversionHelperBase&>::value));
- EXPECT_TRUE((ImplicitlyConvertible<const ConversionHelperBase,
- ConversionHelperBase>::value));
-}
-
-// Tests that ImplicitlyConvertible<T1, T2>::value is false when T1
-// cannot be implicitly converted to T2.
-TEST(ImplicitlyConvertibleTest, ValueIsFalseWhenNotConvertible) {
- EXPECT_FALSE((ImplicitlyConvertible<double, int*>::value));
- EXPECT_FALSE((ImplicitlyConvertible<void*, int*>::value));
- EXPECT_FALSE((ImplicitlyConvertible<const int*, int*>::value));
- EXPECT_FALSE((ImplicitlyConvertible<ConversionHelperBase&,
- ConversionHelperDerived&>::value));
-}
// Tests IsContainerTest.
@@ -7530,14 +7473,14 @@ TEST(SkipPrefixTest, DoesNotSkipWhenPrefixDoesNotMatch) {
class AdHocTestResultTest : public testing::Test {
protected:
- static void SetUpTestCase() {
- FAIL() << "A failure happened inside SetUpTestCase().";
+ static void SetUpTestSuite() {
+ FAIL() << "A failure happened inside SetUpTestSuite().";
}
};
-TEST_F(AdHocTestResultTest, AdHocTestResultForTestCaseShowsFailure) {
+TEST_F(AdHocTestResultTest, AdHocTestResultForTestSuiteShowsFailure) {
const testing::TestResult& test_result = testing::UnitTest::GetInstance()
- ->current_test_case()
+ ->current_test_suite()
->ad_hoc_test_result();
EXPECT_TRUE(test_result.Failed());
}
@@ -7547,3 +7490,30 @@ TEST_F(AdHocTestResultTest, AdHocTestResultTestForUnitTestDoesNotShowFailure) {
testing::UnitTest::GetInstance()->ad_hoc_test_result();
EXPECT_FALSE(test_result.Failed());
}
+
+class DynamicUnitTestFixture : public testing::Test {};
+
+class DynamicTest : public DynamicUnitTestFixture {
+ void TestBody() override { EXPECT_TRUE(true); }
+};
+
+auto* dynamic_test = testing::RegisterTest(
+ "DynamicUnitTestFixture", "DynamicTest", "TYPE", "VALUE", __FILE__,
+ __LINE__, []() -> DynamicUnitTestFixture* { return new DynamicTest; });
+
+TEST(RegisterTest, WasRegistered) {
+ auto* unittest = testing::UnitTest::GetInstance();
+ for (int i = 0; i < unittest->total_test_suite_count(); ++i) {
+ auto* tests = unittest->GetTestSuite(i);
+ if (tests->name() != std::string("DynamicUnitTestFixture")) continue;
+ for (int j = 0; j < tests->total_test_count(); ++j) {
+ if (tests->GetTestInfo(j)->name() != std::string("DynamicTest")) continue;
+ // Found it.
+ EXPECT_STREQ(tests->GetTestInfo(j)->value_param(), "VALUE");
+ EXPECT_STREQ(tests->GetTestInfo(j)->type_param(), "TYPE");
+ return;
+ }
+ }
+
+ FAIL() << "Didn't find the test!";
+}
diff --git a/googletest/test/gtest_xml_outfiles_test.py b/googletest/test/gtest_xml_outfiles_test.py
index 2c031ff..e093f6f 100755
--- a/googletest/test/gtest_xml_outfiles_test.py
+++ b/googletest/test/gtest_xml_outfiles_test.py
@@ -42,8 +42,8 @@ GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
- <testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne">
+ <testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="TestSomeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyOne">
<properties>
<property name="SetUpProp" value="1"/>
<property name="TestSomeProperty" value="1"/>
@@ -56,8 +56,8 @@ EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
- <testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo">
+ <testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="TestSomeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyTwo">
<properties>
<property name="SetUpProp" value="2"/>
<property name="TestSomeProperty" value="2"/>
@@ -111,11 +111,6 @@ class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
- # FIXME: libtool causes the built test binary to be
- # named lt-gtest_xml_outfiles_test_ instead of
- # gtest_xml_outfiles_test_. To account for this possibility, we
- # allow both names in the following code. We should remove this
- # when libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
diff --git a/googletest/test/gtest_xml_output_unittest.py b/googletest/test/gtest_xml_output_unittest.py
index ab733d1..63b1af0 100755
--- a/googletest/test/gtest_xml_output_unittest.py
+++ b/googletest/test/gtest_xml_output_unittest.py
@@ -66,20 +66,20 @@ else:
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="24" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
- <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
+ <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
</testsuite>
- <testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
- <testcase name="Fails" status="run" time="*" classname="FailedTest">
+ <testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="Fails" status="run" result="completed" time="*" timestamp="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Expected equality of these values:
1
2%(stack)s]]></failure>
</testcase>
</testsuite>
- <testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
- <testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
- <testcase name="Fails" status="run" time="*" classname="MixedResultTest">
+ <testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*" timestamp="*">
+ <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest"/>
+ <testcase name="Fails" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A; 1&#x0A; 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Expected equality of these values:
1
@@ -89,112 +89,114 @@ Expected equality of these values:
2
3%(stack)s]]></failure>
</testcase>
- <testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
+ <testcase name="DISABLED_test" status="notrun" result="suppressed" time="*" timestamp="*" classname="MixedResultTest"/>
</testsuite>
- <testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
- <testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
+ <testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="OutputsCData" status="run" result="completed" time="*" timestamp="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;XML output: &lt;?xml encoding=&quot;utf-8&quot;&gt;&lt;top&gt;&lt;![CDATA[cdata text]]&gt;&lt;/top&gt;" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]&gt;<![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
- <testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
- <testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
+ <testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="InvalidCharactersInMessage" status="run" result="completed" time="*" timestamp="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
- <testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
- <testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
+ <testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*" timestamp="*">
+ <testcase name="DISABLED_test_not_run" status="notrun" result="suppressed" time="*" timestamp="*" classname="DisabledTest"/>
</testsuite>
- <testsuite name="SkippedTest" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="Skipped" status="skipped" time="*" classname="SkippedTest"/>
+ <testsuite name="SkippedTest" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="Skipped" status="run" result="skipped" time="*" timestamp="*" classname="SkippedTest"/>
</testsuite>
- <testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
- <testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest">
+ <testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye">
+ <testcase name="OneProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_1" value="1"/>
</properties>
</testcase>
- <testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest">
+ <testcase name="IntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_int" value="1"/>
</properties>
</testcase>
- <testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest">
+ <testcase name="ThreeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_1" value="1"/>
<property name="key_2" value="2"/>
<property name="key_3" value="3"/>
</properties>
</testcase>
- <testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest">
+ <testcase name="TwoValuesForOneKeyUsesLastValue" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_1" value="2"/>
</properties>
</testcase>
</testsuite>
- <testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
- <testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest">
+ <testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="RecordProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
<properties>
<property name="key" value="1"/>
</properties>
</testcase>
- <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest">
+ <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
<properties>
<property name="key_for_utility_int" value="1"/>
</properties>
</testcase>
- <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest">
+ <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest">
<properties>
<property name="key_for_utility_string" value="1"/>
</properties>
</testcase>
</testsuite>
- <testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
- <testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
- <testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
- <testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
- <testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
+ <testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="HasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="HasValueParamAttribute/1" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
+ <testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
</testsuite>
- <testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
+ <testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/0" />
</testsuite>
- <testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
+ <testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/1" />
</testsuite>
- <testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
+ <testsuite name="Single/TypeParameterizedTestSuite/0" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/0" />
</testsuite>
- <testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
+ <testsuite name="Single/TypeParameterizedTestSuite/1" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/1" />
</testsuite>
-</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
+</testsuites>""" % {
+ 'stack': STACK_TRACE_TEMPLATE
+}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
- errors="0" time="*">
- <testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
+ errors="0" time="*" timestamp="*">
+ <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_SHARDED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
- <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
+ <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/>
</testsuite>
- <testsuite name="PropertyRecordingTest" tests="1" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
- <testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest">
+ <testsuite name="PropertyRecordingTest" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye">
+ <testcase name="TwoValuesForOneKeyUsesLastValue" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest">
<properties>
<property name="key_1" value="2"/>
</properties>
</testcase>
</testsuite>
- <testsuite name="Single/ValueParamTest" tests="1" failures="0" disabled="0" errors="0" time="*">
- <testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
+ <testsuite name="Single/ValueParamTest" tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*">
+ <testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" />
</testsuite>
</testsuites>"""
diff --git a/googletest/test/gtest_xml_output_unittest_.cc b/googletest/test/gtest_xml_output_unittest_.cc
index 39d9b4e..c95fd66 100644
--- a/googletest/test/gtest_xml_output_unittest_.cc
+++ b/googletest/test/gtest_xml_output_unittest_.cc
@@ -101,8 +101,10 @@ TEST(InvalidCharactersTest, InvalidCharactersInMessage) {
class PropertyRecordingTest : public Test {
public:
- static void SetUpTestCase() { RecordProperty("SetUpTestCase", "yes"); }
- static void TearDownTestCase() { RecordProperty("TearDownTestCase", "aye"); }
+ static void SetUpTestSuite() { RecordProperty("SetUpTestSuite", "yes"); }
+ static void TearDownTestSuite() {
+ RecordProperty("TearDownTestSuite", "aye");
+ }
};
TEST_F(PropertyRecordingTest, OneProperty) {
@@ -150,28 +152,28 @@ TEST(NoFixtureTest, ExternalUtilityThatCallsRecordStringValuedProperty) {
class ValueParamTest : public TestWithParam<int> {};
TEST_P(ValueParamTest, HasValueParamAttribute) {}
TEST_P(ValueParamTest, AnotherTestThatHasValueParamAttribute) {}
-INSTANTIATE_TEST_CASE_P(Single, ValueParamTest, Values(33, 42));
+INSTANTIATE_TEST_SUITE_P(Single, ValueParamTest, Values(33, 42));
#if GTEST_HAS_TYPED_TEST
// Verifies that the type parameter name is output in the 'type_param'
// XML attribute for typed tests.
template <typename T> class TypedTest : public Test {};
typedef testing::Types<int, long> TypedTestTypes;
-TYPED_TEST_CASE(TypedTest, TypedTestTypes);
+TYPED_TEST_SUITE(TypedTest, TypedTestTypes);
TYPED_TEST(TypedTest, HasTypeParamAttribute) {}
#endif
#if GTEST_HAS_TYPED_TEST_P
// Verifies that the type parameter name is output in the 'type_param'
// XML attribute for type-parameterized tests.
-template <typename T> class TypeParameterizedTestCase : public Test {};
-TYPED_TEST_CASE_P(TypeParameterizedTestCase);
-TYPED_TEST_P(TypeParameterizedTestCase, HasTypeParamAttribute) {}
-REGISTER_TYPED_TEST_CASE_P(TypeParameterizedTestCase, HasTypeParamAttribute);
-typedef testing::Types<int, long> TypeParameterizedTestCaseTypes;
-INSTANTIATE_TYPED_TEST_CASE_P(Single,
- TypeParameterizedTestCase,
- TypeParameterizedTestCaseTypes);
+template <typename T>
+class TypeParameterizedTestSuite : public Test {};
+TYPED_TEST_SUITE_P(TypeParameterizedTestSuite);
+TYPED_TEST_P(TypeParameterizedTestSuite, HasTypeParamAttribute) {}
+REGISTER_TYPED_TEST_SUITE_P(TypeParameterizedTestSuite, HasTypeParamAttribute);
+typedef testing::Types<int, long> TypeParameterizedTestSuiteTypes; // NOLINT
+INSTANTIATE_TYPED_TEST_SUITE_P(Single, TypeParameterizedTestSuite,
+ TypeParameterizedTestSuiteTypes);
#endif
int main(int argc, char** argv) {
diff --git a/googletest/test/gtest_xml_test_utils.py b/googletest/test/gtest_xml_test_utils.py
index afcf55e..9914a49 100755
--- a/googletest/test/gtest_xml_test_utils.py
+++ b/googletest/test/gtest_xml_test_utils.py
@@ -169,7 +169,7 @@ class GTestXMLTestCase(gtest_test_utils.TestCase):
* The stack traces are removed.
"""
- if element.tagName == 'testsuites':
+ if element.tagName in ('testsuites', 'testsuite', 'testcase'):
timestamp = element.getAttributeNode('timestamp')
timestamp.value = re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d$',
'*', timestamp.value)