summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xrun_tests.py19
-rw-r--r--scons/SConscript90
-rw-r--r--src/gtest-internal-inl.h5
-rw-r--r--test/gtest-death-test_test.cc6
-rw-r--r--test/gtest-port_test.cc8
-rw-r--r--test/gtest-typed-test_test.cc4
-rw-r--r--test/gtest-typed-test_test.h2
-rw-r--r--test/gtest_output_test_.cc4
-rw-r--r--test/gtest_output_test_golden_lin.txt6
-rw-r--r--test/gtest_output_test_golden_win.txt6
-rwxr-xr-xtest/run_tests_test.py147
11 files changed, 151 insertions, 146 deletions
diff --git a/run_tests.py b/run_tests.py
index 727ecca..67014f3 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -98,16 +98,16 @@ KNOWN BUILD DIRECTORIES
defines them as follows (the default build directory is the first one
listed in each group):
On Windows:
- <gtest root>/scons/build/win-dbg8/gtest/scons/
- <gtest root>/scons/build/win-opt8/gtest/scons/
- <gtest root>/scons/build/win-dbg/gtest/scons/
- <gtest root>/scons/build/win-opt/gtest/scons/
+ <gtest root>/scons/build/win-dbg8/scons/
+ <gtest root>/scons/build/win-opt8/scons/
+ <gtest root>/scons/build/win-dbg/scons/
+ <gtest root>/scons/build/win-opt/scons/
On Mac:
- <gtest root>/scons/build/mac-dbg/gtest/scons/
- <gtest root>/scons/build/mac-opt/gtest/scons/
+ <gtest root>/scons/build/mac-dbg/scons/
+ <gtest root>/scons/build/mac-opt/scons/
On other platforms:
- <gtest root>/scons/build/dbg/gtest/scons/
- <gtest root>/scons/build/opt/gtest/scons/
+ <gtest root>/scons/build/dbg/scons/
+ <gtest root>/scons/build/opt/scons/
AUTHOR
Written by Zhanyong Wan (wan@google.com)
@@ -177,8 +177,7 @@ class TestRunner(object):
"""Returns the build directory for a given configuration."""
return self.os.path.normpath(
- self.os.path.join(self.script_dir,
- 'scons/build/%s/gtest/scons' % config))
+ self.os.path.join(self.script_dir, 'scons/build', config, 'scons'))
def Run(self, args):
"""Runs the executable with given args (args[0] is the executable name).
diff --git a/scons/SConscript b/scons/SConscript
index 21c3e6d..8fbd5f5 100644
--- a/scons/SConscript
+++ b/scons/SConscript
@@ -109,13 +109,25 @@ def NewEnvironment(env, type):
return new_env;
+def Remove(env, attribute, value):
+ """Removes the given attribute value from the environment."""
+
+ attribute_values = env[attribute]
+ if value in attribute_values:
+ attribute_values.remove(value)
+
+
Import('env')
env = NewEnvironment(env, '')
-# Note: The relative paths in SConscript files are relative to the location of
-# the SConscript file itself. To make a path relative to the location of the
-# main SConstruct file, prepend the path with the # sign.
-
+# Note: The relative paths in SConscript files are relative to the location
+# of the SConscript file itself. To make a path relative to the location of
+# the main SConstruct file, prepend the path with the # sign.
+#
+# But if a project uses variant builds without source duplication, the above
+# rule gets muddied a bit. In that case the paths must be counted from the
+# location of the copy of the SConscript file in scons/build/<config>/scons.
+#
# Include paths to gtest headers are relative to either the gtest
# directory or the 'include' subdirectory of it, and this SConscript
# file is one directory deeper than the gtest directory.
@@ -124,32 +136,33 @@ env.Prepend(CPPPATH = ['..', '../include'])
env_use_own_tuple = NewEnvironment(env, 'use_own_tuple')
env_use_own_tuple.Append(CPPDEFINES = 'GTEST_USE_OWN_TR1_TUPLE=1')
-env_with_exceptions = NewEnvironment(env, 'ex')
+# Needed to allow gtest_unittest.cc, which triggers a gcc warning when
+# testing EXPECT_EQ(NULL, ptr), to compile.
+env_warning_ok = NewEnvironment(env, 'warning_ok')
+if env_warning_ok['PLATFORM'] == 'win32':
+ Remove(env_warning_ok, 'CCFLAGS', '-WX')
+else:
+ Remove(env_warning_ok, 'CCFLAGS', '-Werror')
+
+env_with_exceptions = NewEnvironment(env_warning_ok, 'ex')
if env_with_exceptions['PLATFORM'] == 'win32':
env_with_exceptions.Append(CCFLAGS=['/EHsc'])
env_with_exceptions.Append(CPPDEFINES='_HAS_EXCEPTIONS=1')
- cppdefines = env_with_exceptions['CPPDEFINES']
# Undoes the _TYPEINFO_ hack, which is unnecessary and only creates
# trouble when exceptions are enabled.
- if '_TYPEINFO_' in cppdefines:
- cppdefines.remove('_TYPEINFO_')
- if '_HAS_EXCEPTIONS=0' in cppdefines:
- cppdefines.remove('_HAS_EXCEPTIONS=0')
+ Remove(env_with_exceptions, 'CPPDEFINES', '_TYPEINFO_')
+ Remove(env_with_exceptions, 'CPPDEFINES', '_HAS_EXCEPTIONS=0')
else:
env_with_exceptions.Append(CCFLAGS='-fexceptions')
- ccflags = env_with_exceptions['CCFLAGS']
- if '-fno-exceptions' in ccflags:
- ccflags.remove('-fno-exceptions')
+ Remove(env_with_exceptions, 'CCFLAGS', '-fno-exceptions')
# We need to disable some optimization flags for some tests on
# Windows; otherwise the redirection of stdout does not work
# (apparently because of a compiler bug).
env_less_optimized = NewEnvironment(env, 'less_optimized')
if env_less_optimized['PLATFORM'] == 'win32':
- linker_flags = env_less_optimized['LINKFLAGS']
for flag in ['/O1', '/Os', '/Og', '/Oy']:
- if flag in linker_flags:
- linker_flags.remove(flag)
+ Remove(env_less_optimized, 'LINKFLAGS', flag)
# Assuming POSIX-like environment with GCC.
# TODO(vladl@google.com): sniff presence of pthread_atfork instead of
@@ -159,7 +172,7 @@ if env_with_threads['PLATFORM'] != 'win32':
env_with_threads.Append(CCFLAGS=['-pthread'])
env_with_threads.Append(LINKFLAGS=['-pthread'])
-env_without_rtti = NewEnvironment(env, 'no_rtti')
+env_without_rtti = NewEnvironment(env_warning_ok, 'no_rtti')
if env_without_rtti['PLATFORM'] == 'win32':
env_without_rtti.Append(CCFLAGS=['/GR-'])
else:
@@ -169,12 +182,19 @@ else:
############################################################
# Helpers for creating build targets.
+# Caches object file targets built by GtestObject to allow passing the
+# same source file with the same environment twice into the function as a
+# convenience.
+_all_objects = {}
+
def GtestObject(build_env, source):
"""Returns a target to build an object file from the given .cc source file."""
- return build_env.Object(
- target=os.path.basename(source).rstrip('.cc') + build_env['OBJ_SUFFIX'],
- source=source)
+ object_name = os.path.basename(source).rstrip('.cc') + build_env['OBJ_SUFFIX']
+ if object_name not in _all_objects:
+ _all_objects[object_name] = build_env.Object(target=object_name,
+ source=source)
+ return _all_objects[object_name]
def GtestStaticLibraries(build_env):
@@ -206,17 +226,16 @@ def GtestBinary(build_env, target, gtest_libs, sources):
gtest_libs: The gtest library or the list of libraries to link.
sources: A list of source files in the target.
"""
- if build_env['OBJ_SUFFIX']:
- srcs = [] # The object targets corresponding to sources.
- for src in sources:
- if type(src) is str:
- srcs.append(GtestObject(build_env, src))
- else:
- srcs.append(src)
- else:
- srcs = sources
-
- if type(gtest_libs) != type(list()):
+ srcs = [] # The object targets corresponding to sources.
+ for src in sources:
+ if type(src) is str:
+ srcs.append(GtestObject(build_env, src))
+ else:
+ srcs.append(src)
+
+ if not gtest_libs:
+ gtest_libs = []
+ elif type(gtest_libs) != type(list()):
gtest_libs = [gtest_libs]
binary = build_env.Program(target=target, source=srcs, LIBS=gtest_libs)
if 'EXE_OUTPUT' in build_env.Dictionary():
@@ -301,11 +320,11 @@ GtestTest(env, 'gtest_xml_outfile1_test_', gtest_main)
GtestTest(env, 'gtest_xml_outfile2_test_', gtest_main)
GtestTest(env, 'gtest_xml_output_unittest_', gtest_main)
GtestTest(env, 'gtest-unittest-api_test', gtest)
-GtestTest(env, 'gtest_unittest', gtest_main)
############################################################
# Tests targets using custom environments.
+GtestTest(env_warning_ok, 'gtest_unittest', gtest_main)
GtestTest(env_with_exceptions, 'gtest_output_test_', gtest_ex)
GtestTest(env_with_exceptions, 'gtest_throw_on_failure_ex_test', gtest_ex)
GtestTest(env_with_threads, 'gtest-death-test_test', gtest_main)
@@ -332,14 +351,15 @@ GtestBinary(env_without_rtti, 'gtest_no_rtti_test', gtest_main_no_rtti,
# my_environment = Environment(variables = vars, ...)
# Then, in the command line use GTEST_BUILD_SAMPLES=true to enable them.
if env.get('GTEST_BUILD_SAMPLES', False):
- sample1_obj = env.Object('../samples/sample1.cc')
- GtestSample(env, 'sample1_unittest', additional_sources=[sample1_obj])
+ GtestSample(env, 'sample1_unittest',
+ additional_sources=['../samples/sample1.cc'])
GtestSample(env, 'sample2_unittest',
additional_sources=['../samples/sample2.cc'])
GtestSample(env, 'sample3_unittest')
GtestSample(env, 'sample4_unittest',
additional_sources=['../samples/sample4.cc'])
- GtestSample(env, 'sample5_unittest', additional_sources=[sample1_obj])
+ GtestSample(env, 'sample5_unittest',
+ additional_sources=['../samples/sample1.cc'])
GtestSample(env, 'sample6_unittest')
GtestSample(env, 'sample7_unittest')
GtestSample(env, 'sample8_unittest')
diff --git a/src/gtest-internal-inl.h b/src/gtest-internal-inl.h
index 5bb981d..189852e 100644
--- a/src/gtest-internal-inl.h
+++ b/src/gtest-internal-inl.h
@@ -93,7 +93,7 @@ const char kShuffleFlag[] = "shuffle";
const char kThrowOnFailureFlag[] = "throw_on_failure";
// A valid random seed must be in [1, kMaxRandomSeed].
-const unsigned int kMaxRandomSeed = 99999;
+const int kMaxRandomSeed = 99999;
// Returns the current time in milliseconds.
TimeInMillis GetTimeInMillis();
@@ -108,7 +108,8 @@ inline int GetRandomSeedFromFlag(Int32 random_seed_flag) {
// Normalizes the actual seed to range [1, kMaxRandomSeed] such that
// it's easy to type.
const int normalized_seed =
- static_cast<int>((raw_seed - 1U) % kMaxRandomSeed) + 1;
+ static_cast<int>((raw_seed - 1U) %
+ static_cast<unsigned int>(kMaxRandomSeed)) + 1;
return normalized_seed;
}
diff --git a/test/gtest-death-test_test.cc b/test/gtest-death-test_test.cc
index e9317e1..d5f1598 100644
--- a/test/gtest-death-test_test.cc
+++ b/test/gtest-death-test_test.cc
@@ -1057,16 +1057,16 @@ TEST(ParseNaturalNumberTest, AcceptsValidNumbers) {
result = 0;
ASSERT_TRUE(ParseNaturalNumber(String("123"), &result));
- EXPECT_EQ(123, result);
+ EXPECT_EQ(123U, result);
// Check 0 as an edge case.
result = 1;
ASSERT_TRUE(ParseNaturalNumber(String("0"), &result));
- EXPECT_EQ(0, result);
+ EXPECT_EQ(0U, result);
result = 1;
ASSERT_TRUE(ParseNaturalNumber(String("00000"), &result));
- EXPECT_EQ(0, result);
+ EXPECT_EQ(0U, result);
}
TEST(ParseNaturalNumberTest, AcceptsTypeLimits) {
diff --git a/test/gtest-port_test.cc b/test/gtest-port_test.cc
index 37880a7..49af8b9 100644
--- a/test/gtest-port_test.cc
+++ b/test/gtest-port_test.cc
@@ -91,7 +91,7 @@ void* ThreadFunc(void* data) {
}
TEST(GetThreadCountTest, ReturnsCorrectValue) {
- EXPECT_EQ(1, GetThreadCount());
+ EXPECT_EQ(1U, GetThreadCount());
pthread_mutex_t mutex;
pthread_attr_t attr;
pthread_t thread_id;
@@ -106,7 +106,7 @@ TEST(GetThreadCountTest, ReturnsCorrectValue) {
const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);
ASSERT_EQ(0, pthread_attr_destroy(&attr));
ASSERT_EQ(0, status);
- EXPECT_EQ(2, GetThreadCount());
+ EXPECT_EQ(2U, GetThreadCount());
pthread_mutex_unlock(&mutex);
void* dummy;
@@ -124,12 +124,12 @@ TEST(GetThreadCountTest, ReturnsCorrectValue) {
time.tv_nsec = 100L * 1000 * 1000; // .1 seconds.
nanosleep(&time, NULL);
}
- EXPECT_EQ(1, GetThreadCount());
+ EXPECT_EQ(1U, GetThreadCount());
pthread_mutex_destroy(&mutex);
}
#else
TEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) {
- EXPECT_EQ(0, GetThreadCount());
+ EXPECT_EQ(0U, GetThreadCount());
}
#endif // GTEST_OS_MAC
diff --git a/test/gtest-typed-test_test.cc b/test/gtest-typed-test_test.cc
index eb921a0..8e86ac8 100644
--- a/test/gtest-typed-test_test.cc
+++ b/test/gtest-typed-test_test.cc
@@ -100,10 +100,10 @@ TYPED_TEST(CommonTest, ValuesAreCorrect) {
// Typedefs in the fixture class template can be visited via the
// "typename TestFixture::" prefix.
typename TestFixture::List empty;
- EXPECT_EQ(0, empty.size());
+ EXPECT_EQ(0U, empty.size());
typename TestFixture::IntSet empty2;
- EXPECT_EQ(0, empty2.size());
+ EXPECT_EQ(0U, empty2.size());
// Non-static members of the fixture class must be visited via
// 'this', as required by C++ for class templates.
diff --git a/test/gtest-typed-test_test.h b/test/gtest-typed-test_test.h
index ecbe5b3..40dfeac 100644
--- a/test/gtest-typed-test_test.h
+++ b/test/gtest-typed-test_test.h
@@ -55,7 +55,7 @@ TYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) {
TYPED_TEST_P(ContainerTest, InitialSizeIsZero) {
TypeParam container;
- EXPECT_EQ(0, container.size());
+ EXPECT_EQ(0U, container.size());
}
REGISTER_TYPED_TEST_CASE_P(ContainerTest,
diff --git a/test/gtest_output_test_.cc b/test/gtest_output_test_.cc
index 693df3f..6d75602 100644
--- a/test/gtest_output_test_.cc
+++ b/test/gtest_output_test_.cc
@@ -743,11 +743,11 @@ class TypedTestP : public testing::Test {
TYPED_TEST_CASE_P(TypedTestP);
TYPED_TEST_P(TypedTestP, Success) {
- EXPECT_EQ(0, TypeParam());
+ EXPECT_EQ(0U, TypeParam());
}
TYPED_TEST_P(TypedTestP, Failure) {
- EXPECT_EQ(1, TypeParam()) << "Expected failure";
+ EXPECT_EQ(1U, TypeParam()) << "Expected failure";
}
REGISTER_TYPED_TEST_CASE_P(TypedTestP, Success, Failure);
diff --git a/test/gtest_output_test_golden_lin.txt b/test/gtest_output_test_golden_lin.txt
index 46a90fb..51bae52 100644
--- a/test/gtest_output_test_golden_lin.txt
+++ b/test/gtest_output_test_golden_lin.txt
@@ -390,7 +390,8 @@ Expected failure
gtest_output_test_.cc:#: Failure
Value of: TypeParam()
Actual: \0
-Expected: 1
+Expected: 1U
+Which is: 1
Expected failure
[ FAILED ] Unsigned/TypedTestP/0.Failure
[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
@@ -400,7 +401,8 @@ Expected failure
gtest_output_test_.cc:#: Failure
Value of: TypeParam()
Actual: 0
-Expected: 1
+Expected: 1U
+Which is: 1
Expected failure
[ FAILED ] Unsigned/TypedTestP/1.Failure
[----------] 4 tests from ExpectFailureTest
diff --git a/test/gtest_output_test_golden_win.txt b/test/gtest_output_test_golden_win.txt
index 92fe7f4..313c3aa 100644
--- a/test/gtest_output_test_golden_win.txt
+++ b/test/gtest_output_test_golden_win.txt
@@ -376,7 +376,8 @@ Expected failure
[ RUN ] Unsigned/TypedTestP/0.Failure
gtest_output_test_.cc:#: error: Value of: TypeParam()
Actual: \0
-Expected: 1
+Expected: 1U
+Which is: 1
Expected failure
[ FAILED ] Unsigned/TypedTestP/0.Failure
[----------] 2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int
@@ -385,7 +386,8 @@ Expected failure
[ RUN ] Unsigned/TypedTestP/1.Failure
gtest_output_test_.cc:#: error: Value of: TypeParam()
Actual: 0
-Expected: 1
+Expected: 1U
+Which is: 1
Expected failure
[ FAILED ] Unsigned/TypedTestP/1.Failure
[----------] 4 tests from ExpectFailureTest
diff --git a/test/run_tests_test.py b/test/run_tests_test.py
index 2582262..79524a6 100755
--- a/test/run_tests_test.py
+++ b/test/run_tests_test.py
@@ -41,6 +41,12 @@ import unittest
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), os.pardir))
import run_tests
+
+GTEST_DBG_DIR = 'scons/build/dbg/scons'
+GTEST_OPT_DIR = 'scons/build/opt/scons'
+GTEST_OTHER_DIR = 'scons/build/other/scons'
+
+
def AddExeExtension(path):
"""Appends .exe to the path on Windows or Cygwin."""
@@ -182,10 +188,9 @@ class GetTestsToRunTest(unittest.TestCase):
def setUp(self):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
- known_paths=[
- AddExeExtension('scons/build/dbg/gtest/scons/gtest_unittest'),
- AddExeExtension('scons/build/opt/gtest/scons/gtest_unittest'),
- 'test/gtest_color_test.py']))
+ known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
+ AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
+ 'test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None,
@@ -202,19 +207,17 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# An explicitly specified directory.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'gtest_unittest'],
+ [GTEST_DBG_DIR, 'gtest_unittest'],
'',
False,
available_configurations=self.fake_configurations),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# A particular configuration.
self.AssertResultsEqual(
@@ -224,8 +227,7 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
- [('scons/build/other/gtest/scons',
- 'scons/build/other/gtest/scons/gtest_unittest')]))
+ [(GTEST_OTHER_DIR, GTEST_OTHER_DIR + '/gtest_unittest')]))
# All available configurations
self.AssertResultsEqual(
@@ -235,10 +237,8 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest'),
- ('scons/build/opt/gtest/scons',
- 'scons/build/opt/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
# All built configurations (unbuilt don't cause failure).
self.AssertResultsEqual(
@@ -248,47 +248,40 @@ class GetTestsToRunTest(unittest.TestCase):
True,
available_configurations=self.fake_configurations + ['unbuilt']),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest'),
- ('scons/build/opt/gtest/scons',
- 'scons/build/opt/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
# A combination of an explicit directory and a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'gtest_unittest'],
+ [GTEST_DBG_DIR, 'gtest_unittest'],
'opt',
False,
available_configurations=self.fake_configurations),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest'),
- ('scons/build/opt/gtest/scons',
- 'scons/build/opt/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
# Same test specified in an explicit directory and via a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'gtest_unittest'],
+ [GTEST_DBG_DIR, 'gtest_unittest'],
'dbg',
False,
available_configurations=self.fake_configurations),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# All built configurations + explicit directory + explicit configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'gtest_unittest'],
+ [GTEST_DBG_DIR, 'gtest_unittest'],
'opt',
True,
available_configurations=self.fake_configurations),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest'),
- ('scons/build/opt/gtest/scons',
- 'scons/build/opt/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
+ (GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
def testPythonTestsOnly(self):
"""Exercises GetTestsToRun with parameters designating Python tests only."""
@@ -300,17 +293,17 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
# An explicitly specified directory.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'],
+ [GTEST_DBG_DIR, 'test/gtest_color_test.py'],
'',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
# A particular configuration.
@@ -320,7 +313,7 @@ class GetTestsToRunTest(unittest.TestCase):
'other',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/other/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_OTHER_DIR, 'test/gtest_color_test.py')],
[]))
# All available configurations
@@ -330,8 +323,8 @@ class GetTestsToRunTest(unittest.TestCase):
'all',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
- ('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
# All built configurations (unbuilt don't cause failure).
@@ -341,40 +334,40 @@ class GetTestsToRunTest(unittest.TestCase):
'',
True,
available_configurations=self.fake_configurations + ['unbuilt']),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
- ('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
# A combination of an explicit directory and a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
+ [GTEST_DBG_DIR, 'gtest_color_test.py'],
'opt',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
- ('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
# Same test specified in an explicit directory and via a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
+ [GTEST_DBG_DIR, 'gtest_color_test.py'],
'dbg',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
# All built configurations + explicit directory + explicit configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
- ['scons/build/dbg/gtest/scons', 'gtest_color_test.py'],
+ [GTEST_DBG_DIR, 'gtest_color_test.py'],
'opt',
True,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py'),
- ('scons/build/opt/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
+ (GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
def testCombinationOfBinaryAndPythonTests(self):
@@ -389,9 +382,8 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]))
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# Specifying both binary and Python tests.
self.AssertResultsEqual(
@@ -400,9 +392,8 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]))
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# Specifying binary tests suppresses Python tests.
self.AssertResultsEqual(
@@ -412,8 +403,7 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]))
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# Specifying Python tests suppresses binary tests.
self.AssertResultsEqual(
@@ -422,7 +412,7 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
- ([('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
+ ([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
def testIgnoresNonTestFiles(self):
@@ -430,9 +420,8 @@ class GetTestsToRunTest(unittest.TestCase):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
- known_paths=[
- AddExeExtension('scons/build/dbg/gtest/scons/gtest_nontest'),
- 'test/']))
+ known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_nontest'),
+ 'test/']))
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None,
injected_script_dir='.')
@@ -453,8 +442,8 @@ class GetTestsToRunTest(unittest.TestCase):
current_dir=os.path.abspath('/a/b/c'),
known_paths=[
'/a/b/c/',
- AddExeExtension('/d/scons/build/dbg/gtest/scons/gtest_unittest'),
- AddExeExtension('/d/scons/build/opt/gtest/scons/gtest_unittest'),
+ AddExeExtension('/d/' + GTEST_DBG_DIR + '/gtest_unittest'),
+ AddExeExtension('/d/' + GTEST_OPT_DIR + '/gtest_unittest'),
'/d/test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
@@ -468,8 +457,7 @@ class GetTestsToRunTest(unittest.TestCase):
False,
available_configurations=self.fake_configurations),
([],
- [('/d/scons/build/dbg/gtest/scons',
- '/d/scons/build/dbg/gtest/scons/gtest_unittest')]))
+ [('/d/' + GTEST_DBG_DIR, '/d/' + GTEST_DBG_DIR + '/gtest_unittest')]))
# A Python test.
self.AssertResultsEqual(
@@ -478,8 +466,7 @@ class GetTestsToRunTest(unittest.TestCase):
'',
False,
available_configurations=self.fake_configurations),
- ([('/d/scons/build/dbg/gtest/scons', '/d/test/gtest_color_test.py')],
- []))
+ ([('/d/' + GTEST_DBG_DIR, '/d/test/gtest_color_test.py')], []))
def testNonTestBinary(self):
@@ -508,7 +495,7 @@ class GetTestsToRunTest(unittest.TestCase):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
- known_paths=['scons/build/dbg/gtest/scons/gtest_test', 'test/']))
+ known_paths=['/d/' + GTEST_DBG_DIR + '/gtest_test', 'test/']))
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
injected_subprocess=None,
injected_script_dir='.')
@@ -540,8 +527,8 @@ class RunTestsTest(unittest.TestCase):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests.__file__)),
known_paths=[
- AddExeExtension('scons/build/dbg/gtest/scons/gtest_unittest'),
- AddExeExtension('scons/build/opt/gtest/scons/gtest_unittest'),
+ AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
+ AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
'test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests.TestRunner(injected_os=self.fake_os,
@@ -554,7 +541,7 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = self.SpawnSuccess
self.assertEqual(
self.test_runner.RunTests(
- [('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
+ [(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]),
0)
self.assertEqual(self.num_spawn_calls, 1)
@@ -566,8 +553,7 @@ class RunTestsTest(unittest.TestCase):
self.assertEqual(
self.test_runner.RunTests(
[],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]),
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 1)
@@ -577,7 +563,7 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = self.SpawnFailure
self.assertEqual(
self.test_runner.RunTests(
- [('scons/build/dbg/gtest/scons', 'test/gtest_color_test.py')],
+ [(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]),
1)
self.assertEqual(self.num_spawn_calls, 1)
@@ -589,8 +575,7 @@ class RunTestsTest(unittest.TestCase):
self.assertEqual(
self.test_runner.RunTests(
[],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]),
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
1)
self.assertEqual(self.num_spawn_calls, 1)
@@ -600,10 +585,8 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = self.SpawnSuccess
self.assertEqual(
self.test_runner.RunTests(
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]),
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 2)
@@ -621,10 +604,8 @@ class RunTestsTest(unittest.TestCase):
self.fake_os.spawn_impl = SpawnImpl
self.assertEqual(
self.test_runner.RunTests(
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')],
- [('scons/build/dbg/gtest/scons',
- 'scons/build/dbg/gtest/scons/gtest_unittest')]),
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
+ [(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 2)