diff options
Diffstat (limited to 'googletest/test')
-rw-r--r-- | googletest/test/BUILD.bazel | 68 | ||||
-rwxr-xr-x | googletest/test/googletest-filter-unittest.py | 19 | ||||
-rw-r--r-- | googletest/test/googletest-json-output-unittest.py | 111 | ||||
-rw-r--r-- | googletest/test/googletest-param-test-test.cc | 70 | ||||
-rw-r--r-- | googletest/test/googletest-printers-test.cc | 32 | ||||
-rw-r--r-- | googletest/test/googletest-setuptestsuite-test_.cc | 4 | ||||
-rw-r--r-- | googletest/test/gtest_unittest.cc | 56 | ||||
-rwxr-xr-x | googletest/test/gtest_xml_output_unittest.py | 93 | ||||
-rw-r--r-- | googletest/test/gtest_xml_output_unittest_.cc | 22 |
9 files changed, 408 insertions, 67 deletions
diff --git a/googletest/test/BUILD.bazel b/googletest/test/BUILD.bazel index 1890b6ff9ed9..c561ef8b91c3 100644 --- a/googletest/test/BUILD.bazel +++ b/googletest/test/BUILD.bazel @@ -45,36 +45,38 @@ cc_test( "gtest-*.cc", "googletest-*.cc", "*.h", - "googletest/include/gtest/**/*.h", ], exclude = [ - "gtest-unittest-api_test.cc", - "googletest/src/gtest-all.cc", - "gtest_all_test.cc", - "gtest-death-test_ex_test.cc", - "gtest-listener_test.cc", - "gtest-unittest-api_test.cc", - "googletest-param-test-test.cc", - "googletest-param-test2-test.cc", + # go/keep-sorted start + "googletest-break-on-failure-unittest_.cc", "googletest-catch-exceptions-test_.cc", "googletest-color-test_.cc", + "googletest-death-test_ex_test.cc", "googletest-env-var-test_.cc", + "googletest-fail-if-no-test-linked-test-with-disabled-test_.cc", + "googletest-fail-if-no-test-linked-test-with-enabled-test_.cc", "googletest-failfast-unittest_.cc", "googletest-filter-unittest_.cc", "googletest-global-environment-unittest_.cc", - "googletest-break-on-failure-unittest_.cc", + "googletest-list-tests-unittest_.cc", "googletest-listener-test.cc", "googletest-message-test.cc", "googletest-output-test_.cc", - "googletest-list-tests-unittest_.cc", - "googletest-shuffle-test_.cc", - "googletest-setuptestsuite-test_.cc", - "googletest-uninitialized-test_.cc", - "googletest-death-test_ex_test.cc", - "googletest-param-test-test", - "googletest-throw-on-failure-test_.cc", "googletest-param-test-invalid-name1-test_.cc", "googletest-param-test-invalid-name2-test_.cc", + "googletest-param-test-test", + "googletest-param-test-test.cc", + "googletest-param-test2-test.cc", + "googletest-setuptestsuite-test_.cc", + "googletest-shuffle-test_.cc", + "googletest-throw-on-failure-test_.cc", + "googletest-uninitialized-test_.cc", + "googletest/src/gtest-all.cc", + "gtest-death-test_ex_test.cc", + "gtest-listener_test.cc", + "gtest-unittest-api_test.cc", + "gtest_all_test.cc", + # go/keep-sorted end ], ) + select({ "//:windows": [], @@ -324,6 +326,26 @@ cc_binary( deps = ["//:gtest"], ) +cc_binary( + name = "googletest-fail-if-no-test-linked-test-without-test_", + testonly = 1, + deps = ["//:gtest_main"], +) + +cc_binary( + name = "googletest-fail-if-no-test-linked-test-with-disabled-test_", + testonly = 1, + srcs = ["googletest-fail-if-no-test-linked-test-with-disabled-test_.cc"], + deps = ["//:gtest_main"], +) + +cc_binary( + name = "googletest-fail-if-no-test-linked-test-with-enabled-test_", + testonly = 1, + srcs = ["googletest-fail-if-no-test-linked-test-with-enabled-test_.cc"], + deps = ["//:gtest_main"], +) + cc_test( name = "gtest_skip_test", size = "small", @@ -364,6 +386,18 @@ py_test( deps = [":gtest_test_utils"], ) +py_test( + name = "googletest-fail-if-no-test-linked-test", + size = "small", + srcs = ["googletest-fail-if-no-test-linked-test.py"], + data = [ + ":googletest-fail-if-no-test-linked-test-with-disabled-test_", + ":googletest-fail-if-no-test-linked-test-with-enabled-test_", + ":googletest-fail-if-no-test-linked-test-without-test_", + ], + deps = [":gtest_test_utils"], +) + cc_binary( name = "googletest-shuffle-test_", srcs = ["googletest-shuffle-test_.cc"], diff --git a/googletest/test/googletest-filter-unittest.py b/googletest/test/googletest-filter-unittest.py index f1f3c7a513be..a44882a6da67 100755 --- a/googletest/test/googletest-filter-unittest.py +++ b/googletest/test/googletest-filter-unittest.py @@ -97,6 +97,9 @@ TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS' SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX' SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE' +# The environment variable for the test warnings output file. +TEST_WARNINGS_OUTPUT_FILE = 'TEST_WARNINGS_OUTPUT_FILE' + # The command line flag for specifying the test filters. FILTER_FLAG = 'gtest_filter' @@ -419,6 +422,22 @@ class GTestFilterUnitTest(gtest_test_utils.TestCase): self.RunAndVerify('BadFilter', []) self.RunAndVerifyAllowingDisabled('BadFilter', []) + def testBadFilterWithWarningFile(self): + """Tests the warning file when a filter that matches nothing.""" + + warning_file = os.path.join( + gtest_test_utils.GetTempDir(), 'testBadFilterWithWarningFile' + ) + extra_env = {TEST_WARNINGS_OUTPUT_FILE: warning_file} + args = ['--%s=%s' % (FILTER_FLAG, 'BadFilter')] + InvokeWithModifiedEnv(extra_env, RunAndReturnOutput, args) + with open(warning_file, 'r') as f: + warning_file_contents = f.read() + self.assertEqual( + warning_file_contents, + 'filter "BadFilter" did not match any test; no tests were run\n', + ) + def testFullName(self): """Tests filtering by full name.""" diff --git a/googletest/test/googletest-json-output-unittest.py b/googletest/test/googletest-json-output-unittest.py index d3338e3d2f90..c75051c800fb 100644 --- a/googletest/test/googletest-json-output-unittest.py +++ b/googletest/test/googletest-json-output-unittest.py @@ -57,7 +57,7 @@ else: STACK_TRACE_TEMPLATE = '\n' EXPECTED_NON_EMPTY = { - 'tests': 26, + 'tests': 28, 'failures': 5, 'disabled': 2, 'errors': 0, @@ -323,12 +323,14 @@ EXPECTED_NON_EMPTY = { 'time': '*', 'timestamp': '*', 'SetUpTestSuite': 'yes', + 'SetUpTestSuite (with whitespace)': 'yes and yes', 'TearDownTestSuite': 'aye', + 'TearDownTestSuite (with whitespace)': 'aye and aye', 'testsuite': [ { 'name': 'OneProperty', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 121, + 'line': 125, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -339,7 +341,7 @@ EXPECTED_NON_EMPTY = { { 'name': 'IntValuedProperty', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 125, + 'line': 129, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -350,7 +352,7 @@ EXPECTED_NON_EMPTY = { { 'name': 'ThreeProperties', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 129, + 'line': 133, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -363,7 +365,7 @@ EXPECTED_NON_EMPTY = { { 'name': 'TwoValuesForOneKeyUsesLastValue', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 135, + 'line': 139, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -385,7 +387,7 @@ EXPECTED_NON_EMPTY = { { 'name': 'RecordProperty', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 140, + 'line': 144, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -396,7 +398,7 @@ EXPECTED_NON_EMPTY = { { 'name': 'ExternalUtilityThatCallsRecordIntValuedProperty', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 153, + 'line': 157, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -409,7 +411,7 @@ EXPECTED_NON_EMPTY = { 'ExternalUtilityThatCallsRecordStringValuedProperty' ), 'file': 'gtest_xml_output_unittest_.cc', - 'line': 157, + 'line': 161, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -420,6 +422,83 @@ EXPECTED_NON_EMPTY = { ], }, { + 'name': 'SetupFailTest', + 'tests': 1, + 'failures': 0, + 'disabled': 0, + 'errors': 0, + 'time': '*', + 'timestamp': '*', + 'testsuite': [ + { + 'name': 'NoopPassingTest', + 'file': 'gtest_xml_output_unittest_.cc', + 'line': 172, + 'status': 'RUN', + 'result': 'SKIPPED', + 'timestamp': '*', + 'time': '*', + 'classname': 'SetupFailTest', + 'skipped': [ + {'message': 'gtest_xml_output_unittest_.cc:*\n'} + ], + }, + { + 'name': '', + 'status': 'RUN', + 'result': 'COMPLETED', + 'timestamp': '*', + 'time': '*', + 'classname': '', + 'failures': [{ + 'failure': ( + 'gtest_xml_output_unittest_.cc:*\nExpected equality' + ' of these values:\n 1\n 2' + + STACK_TRACE_TEMPLATE + ), + 'type': '', + }], + }, + ], + }, + { + 'name': 'TearDownFailTest', + 'tests': 1, + 'failures': 0, + 'disabled': 0, + 'errors': 0, + 'timestamp': '*', + 'time': '*', + 'testsuite': [ + { + 'name': 'NoopPassingTest', + 'file': 'gtest_xml_output_unittest_.cc', + 'line': 179, + 'status': 'RUN', + 'result': 'COMPLETED', + 'timestamp': '*', + 'time': '*', + 'classname': 'TearDownFailTest', + }, + { + 'name': '', + 'status': 'RUN', + 'result': 'COMPLETED', + 'timestamp': '*', + 'time': '*', + 'classname': '', + 'failures': [{ + 'failure': ( + 'gtest_xml_output_unittest_.cc:*\nExpected equality' + ' of these values:\n 1\n 2' + + STACK_TRACE_TEMPLATE + ), + 'type': '', + }], + }, + ], + }, + { 'name': 'TypedTest/0', 'tests': 1, 'failures': 0, @@ -431,7 +510,7 @@ EXPECTED_NON_EMPTY = { 'name': 'HasTypeParamAttribute', 'type_param': 'int', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 173, + 'line': 193, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -451,7 +530,7 @@ EXPECTED_NON_EMPTY = { 'name': 'HasTypeParamAttribute', 'type_param': 'long', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 173, + 'line': 193, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -471,7 +550,7 @@ EXPECTED_NON_EMPTY = { 'name': 'HasTypeParamAttribute', 'type_param': 'int', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 180, + 'line': 200, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -491,7 +570,7 @@ EXPECTED_NON_EMPTY = { 'name': 'HasTypeParamAttribute', 'type_param': 'long', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 180, + 'line': 200, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -512,7 +591,7 @@ EXPECTED_NON_EMPTY = { 'name': 'HasValueParamAttribute/0', 'value_param': '33', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 164, + 'line': 184, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -523,7 +602,7 @@ EXPECTED_NON_EMPTY = { 'name': 'HasValueParamAttribute/1', 'value_param': '42', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 164, + 'line': 184, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -534,7 +613,7 @@ EXPECTED_NON_EMPTY = { 'name': 'AnotherTestThatHasValueParamAttribute/0', 'value_param': '33', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 165, + 'line': 185, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', @@ -545,7 +624,7 @@ EXPECTED_NON_EMPTY = { 'name': 'AnotherTestThatHasValueParamAttribute/1', 'value_param': '42', 'file': 'gtest_xml_output_unittest_.cc', - 'line': 165, + 'line': 185, 'status': 'RUN', 'result': 'COMPLETED', 'time': '*', diff --git a/googletest/test/googletest-param-test-test.cc b/googletest/test/googletest-param-test-test.cc index c9c5e78e49f4..bc1306019783 100644 --- a/googletest/test/googletest-param-test-test.cc +++ b/googletest/test/googletest-param-test-test.cc @@ -35,12 +35,17 @@ #include "test/googletest-param-test-test.h" #include <algorithm> +#include <cstddef> +#include <cstdint> +#include <functional> #include <iostream> #include <list> #include <set> #include <sstream> #include <string> +#include <string_view> #include <tuple> +#include <type_traits> #include <vector> #include "gtest/gtest.h" @@ -583,6 +588,71 @@ TEST(ConvertTest, NonDefaultConstructAssign) { EXPECT_TRUE(it == gen.end()); } +TEST(ConvertTest, WithConverterLambdaAndDeducedType) { + const ParamGenerator<ConstructFromT<int8_t>> gen = + ConvertGenerator(Values("0", std::string("1")), [](const std::string& s) { + size_t pos; + int64_t value = std::stoll(s, &pos); + EXPECT_EQ(pos, s.size()); + return value; + }); + + ConstructFromT<int8_t> expected_values[] = {ConstructFromT<int8_t>(0), + ConstructFromT<int8_t>(1)}; + VerifyGenerator(gen, expected_values); +} + +TEST(ConvertTest, WithConverterLambdaAndExplicitType) { + auto convert_generator = ConvertGenerator<std::string>( + Values("0", std::string("1")), [](std::string_view s) { + size_t pos; + int64_t value = std::stoll(std::string(s), &pos); + EXPECT_EQ(pos, s.size()); + return value; + }); + constexpr bool is_correct_type = std::is_same_v< + decltype(convert_generator), + testing::internal::ParamConverterGenerator< + std::string, std::function<int64_t(std::string_view)>>>; + EXPECT_TRUE(is_correct_type); + const ParamGenerator<ConstructFromT<int8_t>> gen = convert_generator; + + ConstructFromT<int8_t> expected_values[] = {ConstructFromT<int8_t>(0), + ConstructFromT<int8_t>(1)}; + VerifyGenerator(gen, expected_values); +} + +TEST(ConvertTest, WithConverterFunctionPointer) { + int64_t (*func_ptr)(const std::string&) = [](const std::string& s) { + size_t pos; + int64_t value = std::stoll(s, &pos); + EXPECT_EQ(pos, s.size()); + return value; + }; + const ParamGenerator<ConstructFromT<int8_t>> gen = + ConvertGenerator(Values("0", std::string("1")), func_ptr); + + ConstructFromT<int8_t> expected_values[] = {ConstructFromT<int8_t>(0), + ConstructFromT<int8_t>(1)}; + VerifyGenerator(gen, expected_values); +} + +TEST(ConvertTest, WithConverterFunctionReference) { + int64_t (*func_ptr)(const std::string&) = [](const std::string& s) { + size_t pos; + int64_t value = std::stoll(s, &pos); + EXPECT_EQ(pos, s.size()); + return value; + }; + int64_t (&func_ref)(const std::string&) = *func_ptr; + const ParamGenerator<ConstructFromT<int8_t>> gen = + ConvertGenerator(Values("0", std::string("1")), func_ref); + + ConstructFromT<int8_t> expected_values[] = {ConstructFromT<int8_t>(0), + ConstructFromT<int8_t>(1)}; + VerifyGenerator(gen, expected_values); +} + // Tests that an generator produces correct sequence after being // assigned from another generator. TEST(ParamGeneratorTest, AssignmentWorks) { diff --git a/googletest/test/googletest-printers-test.cc b/googletest/test/googletest-printers-test.cc index d5061bef26d7..52b2c497d740 100644 --- a/googletest/test/googletest-printers-test.cc +++ b/googletest/test/googletest-printers-test.cc @@ -64,6 +64,10 @@ #include <span> // NOLINT #endif // GTEST_INTERNAL_HAS_STD_SPAN +#if GTEST_INTERNAL_HAS_COMPARE_LIB +#include <compare> // NOLINT +#endif // GTEST_INTERNAL_HAS_COMPARE_LIB + // Some user-defined types for testing the universal value printer. // An anonymous enum type. @@ -117,6 +121,9 @@ class UnprintableTemplateInGlobal { // A user-defined streamable type in the global namespace. class StreamableInGlobal { public: + StreamableInGlobal() = default; + StreamableInGlobal(const StreamableInGlobal&) = default; + StreamableInGlobal& operator=(const StreamableInGlobal&) = default; virtual ~StreamableInGlobal() = default; }; @@ -568,6 +575,8 @@ TEST(PrintU8StringTest, Null) { } // Tests that u8 strings are escaped properly. +// TODO(b/396121064) - Fix this test under MSVC +#ifndef _MSC_VER TEST(PrintU8StringTest, EscapesProperly) { const char8_t* p = u8"'\"?\\\a\b\f\n\r\t\v\x7F\xFF hello 世界"; EXPECT_EQ(PrintPointer(p) + @@ -575,7 +584,8 @@ TEST(PrintU8StringTest, EscapesProperly) { "hello \\xE4\\xB8\\x96\\xE7\\x95\\x8C\"", Print(p)); } -#endif +#endif // _MSC_VER +#endif // __cpp_lib_char8_t // const char16_t*. TEST(PrintU16StringTest, Const) { @@ -1970,6 +1980,26 @@ TEST(PrintOneofTest, Basic) { PrintToString(Type(NonPrintable{}))); } #endif // GTEST_INTERNAL_HAS_VARIANT + +#if GTEST_INTERNAL_HAS_COMPARE_LIB +TEST(PrintOrderingTest, Basic) { + EXPECT_EQ("(less)", PrintToString(std::strong_ordering::less)); + EXPECT_EQ("(greater)", PrintToString(std::strong_ordering::greater)); + // equal == equivalent for strong_ordering. + EXPECT_EQ("(equal)", PrintToString(std::strong_ordering::equivalent)); + EXPECT_EQ("(equal)", PrintToString(std::strong_ordering::equal)); + + EXPECT_EQ("(less)", PrintToString(std::weak_ordering::less)); + EXPECT_EQ("(greater)", PrintToString(std::weak_ordering::greater)); + EXPECT_EQ("(equivalent)", PrintToString(std::weak_ordering::equivalent)); + + EXPECT_EQ("(less)", PrintToString(std::partial_ordering::less)); + EXPECT_EQ("(greater)", PrintToString(std::partial_ordering::greater)); + EXPECT_EQ("(equivalent)", PrintToString(std::partial_ordering::equivalent)); + EXPECT_EQ("(unordered)", PrintToString(std::partial_ordering::unordered)); +} +#endif + namespace { class string_ref; diff --git a/googletest/test/googletest-setuptestsuite-test_.cc b/googletest/test/googletest-setuptestsuite-test_.cc index d20899f56866..f4c43ccb2fac 100644 --- a/googletest/test/googletest-setuptestsuite-test_.cc +++ b/googletest/test/googletest-setuptestsuite-test_.cc @@ -31,14 +31,14 @@ class SetupFailTest : public ::testing::Test { protected: - static void SetUpTestSuite() { ASSERT_EQ("", "SET_UP_FAIL"); } + static void SetUpTestSuite() { ASSERT_STREQ("", "SET_UP_FAIL"); } }; TEST_F(SetupFailTest, NoopPassingTest) {} class TearDownFailTest : public ::testing::Test { protected: - static void TearDownTestSuite() { ASSERT_EQ("", "TEAR_DOWN_FAIL"); } + static void TearDownTestSuite() { ASSERT_STREQ("", "TEAR_DOWN_FAIL"); } }; TEST_F(TearDownFailTest, NoopPassingTest) {} diff --git a/googletest/test/gtest_unittest.cc b/googletest/test/gtest_unittest.cc index 5ded865074df..559d34c013d0 100644 --- a/googletest/test/gtest_unittest.cc +++ b/googletest/test/gtest_unittest.cc @@ -2163,7 +2163,7 @@ class UnitTestRecordPropertyTestEnvironment : public Environment { }; // This will test property recording outside of any test or test case. -GTEST_INTERNAL_ATTRIBUTE_MAYBE_UNUSED static Environment* record_property_env = +[[maybe_unused]] static Environment* record_property_env = AddGlobalTestEnvironment(new UnitTestRecordPropertyTestEnvironment); // This group of tests is for predicate assertions (ASSERT_PRED*, etc) @@ -2870,6 +2870,8 @@ TEST_F(FloatTest, LargeDiff) { // This ensures that no overflow occurs when comparing numbers whose // absolute value is very large. TEST_F(FloatTest, Infinity) { + EXPECT_FLOAT_EQ(values_.infinity, values_.infinity); + EXPECT_FLOAT_EQ(-values_.infinity, -values_.infinity); EXPECT_FLOAT_EQ(values_.infinity, values_.close_to_infinity); EXPECT_FLOAT_EQ(-values_.infinity, -values_.close_to_infinity); EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, -values_.infinity), @@ -2894,6 +2896,11 @@ TEST_F(FloatTest, NaN) { EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan1), "v.nan1"); EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan2), "v.nan2"); EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, v.nan1), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0f, v.nan1, 1.0f), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0f, v.nan1, v.infinity), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(v.infinity, v.nan1, 1.0f), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(v.infinity, v.nan1, v.infinity), + "v.nan1"); EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity), "v.infinity"); } @@ -2917,11 +2924,28 @@ TEST_F(FloatTest, Commutative) { // Tests EXPECT_NEAR. TEST_F(FloatTest, EXPECT_NEAR) { + static const FloatTest::TestValues& v = this->values_; + EXPECT_NEAR(-1.0f, -1.1f, 0.2f); EXPECT_NEAR(2.0f, 3.0f, 1.0f); + EXPECT_NEAR(v.infinity, v.infinity, 0.0f); + EXPECT_NEAR(-v.infinity, -v.infinity, 0.0f); + EXPECT_NEAR(0.0f, 1.0f, v.infinity); + EXPECT_NEAR(v.infinity, -v.infinity, v.infinity); + EXPECT_NEAR(-v.infinity, v.infinity, v.infinity); EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0f, 1.5f, 0.25f), // NOLINT "The difference between 1.0f and 1.5f is 0.5, " "which exceeds 0.25f"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(v.infinity, -v.infinity, 0.0f), // NOLINT + "The difference between v.infinity and -v.infinity " + "is inf, which exceeds 0.0f"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(-v.infinity, v.infinity, 0.0f), // NOLINT + "The difference between -v.infinity and v.infinity " + "is inf, which exceeds 0.0f"); + EXPECT_NONFATAL_FAILURE( + EXPECT_NEAR(v.infinity, v.close_to_infinity, v.further_from_infinity), + "The difference between v.infinity and v.close_to_infinity is inf, which " + "exceeds v.further_from_infinity"); } // Tests ASSERT_NEAR. @@ -3028,6 +3052,8 @@ TEST_F(DoubleTest, LargeDiff) { // This ensures that no overflow occurs when comparing numbers whose // absolute value is very large. TEST_F(DoubleTest, Infinity) { + EXPECT_DOUBLE_EQ(values_.infinity, values_.infinity); + EXPECT_DOUBLE_EQ(-values_.infinity, -values_.infinity); EXPECT_DOUBLE_EQ(values_.infinity, values_.close_to_infinity); EXPECT_DOUBLE_EQ(-values_.infinity, -values_.close_to_infinity); EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, -values_.infinity), @@ -3047,6 +3073,12 @@ TEST_F(DoubleTest, NaN) { EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan1), "v.nan1"); EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan2), "v.nan2"); EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, v.nan1), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0, v.nan1, 1.0), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0, v.nan1, v.infinity), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(v.infinity, v.nan1, 1.0), "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(v.infinity, v.nan1, v.infinity), + "v.nan1"); + EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity), "v.infinity"); } @@ -3069,11 +3101,28 @@ TEST_F(DoubleTest, Commutative) { // Tests EXPECT_NEAR. TEST_F(DoubleTest, EXPECT_NEAR) { + static const DoubleTest::TestValues& v = this->values_; + EXPECT_NEAR(-1.0, -1.1, 0.2); EXPECT_NEAR(2.0, 3.0, 1.0); + EXPECT_NEAR(v.infinity, v.infinity, 0.0); + EXPECT_NEAR(-v.infinity, -v.infinity, 0.0); + EXPECT_NEAR(0.0, 1.0, v.infinity); + EXPECT_NEAR(v.infinity, -v.infinity, v.infinity); + EXPECT_NEAR(-v.infinity, v.infinity, v.infinity); EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0, 1.5, 0.25), // NOLINT "The difference between 1.0 and 1.5 is 0.5, " "which exceeds 0.25"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(v.infinity, -v.infinity, 0.0), + "The difference between v.infinity and -v.infinity " + "is inf, which exceeds 0.0"); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(-v.infinity, v.infinity, 0.0), + "The difference between -v.infinity and v.infinity " + "is inf, which exceeds 0.0"); + EXPECT_NONFATAL_FAILURE( + EXPECT_NEAR(v.infinity, v.close_to_infinity, v.further_from_infinity), + "The difference between v.infinity and v.close_to_infinity is inf, which " + "exceeds v.further_from_infinity"); // At this magnitude adjacent doubles are 512.0 apart, so this triggers a // slightly different failure reporting path. EXPECT_NONFATAL_FAILURE( @@ -6705,9 +6754,8 @@ TEST(ColoredOutputTest, UsesColorsWhenTermSupportsColors) { // Verifies that StaticAssertTypeEq works in a namespace scope. -GTEST_INTERNAL_ATTRIBUTE_MAYBE_UNUSED static bool dummy1 = - StaticAssertTypeEq<bool, bool>(); -GTEST_INTERNAL_ATTRIBUTE_MAYBE_UNUSED static bool dummy2 = +[[maybe_unused]] static bool dummy1 = StaticAssertTypeEq<bool, bool>(); +[[maybe_unused]] static bool dummy2 = StaticAssertTypeEq<const int, const int>(); // Verifies that StaticAssertTypeEq works in a class. diff --git a/googletest/test/gtest_xml_output_unittest.py b/googletest/test/gtest_xml_output_unittest.py index c3fea2c0a3a1..87a7683a90c6 100755 --- a/googletest/test/gtest_xml_output_unittest.py +++ b/googletest/test/gtest_xml_output_unittest.py @@ -29,14 +29,14 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -"""Unit test for the gtest_xml_output module""" +"""Unit test for the gtest_xml_output module.""" import datetime import errno import os import re import sys -from xml.dom import minidom, Node +from xml.dom import minidom from googletest.test import gtest_test_utils from googletest.test import gtest_xml_test_utils @@ -67,7 +67,10 @@ else: sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG) EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?> -<testsuites tests="26" failures="5" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42"> +<testsuites tests="28" failures="5" disabled="2" errors="0" time="*" timestamp="*" name="AllTests"> + <properties> + <property name="ad_hoc_property" value="42"/> + </properties> <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> <testcase name="Succeeds" file="gtest_xml_output_unittest_.cc" line="53" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/> </testsuite> @@ -132,64 +135,91 @@ It is good practice to tell why you skip a test. </testcase> </testsuite> - <testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye"> - <testcase name="OneProperty" file="gtest_xml_output_unittest_.cc" line="121" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <properties> + <property name="SetUpTestSuite" value="yes"/> + <property name="SetUpTestSuite (with whitespace)" value="yes and yes"/> + <property name="TearDownTestSuite" value="aye"/> + <property name="TearDownTestSuite (with whitespace)" value="aye and aye"/> + </properties> + <testcase name="OneProperty" file="gtest_xml_output_unittest_.cc" line="125" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> <properties> <property name="key_1" value="1"/> </properties> </testcase> - <testcase name="IntValuedProperty" file="gtest_xml_output_unittest_.cc" line="125" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <testcase name="IntValuedProperty" file="gtest_xml_output_unittest_.cc" line="129" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> <properties> <property name="key_int" value="1"/> </properties> </testcase> - <testcase name="ThreeProperties" file="gtest_xml_output_unittest_.cc" line="129" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <testcase name="ThreeProperties" file="gtest_xml_output_unittest_.cc" line="133" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> <properties> <property name="key_1" value="1"/> <property name="key_2" value="2"/> <property name="key_3" value="3"/> </properties> </testcase> - <testcase name="TwoValuesForOneKeyUsesLastValue" file="gtest_xml_output_unittest_.cc" line="135" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <testcase name="TwoValuesForOneKeyUsesLastValue" file="gtest_xml_output_unittest_.cc" line="139" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> <properties> <property name="key_1" value="2"/> </properties> </testcase> </testsuite> <testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> - <testcase name="RecordProperty" file="gtest_xml_output_unittest_.cc" line="140" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> + <testcase name="RecordProperty" file="gtest_xml_output_unittest_.cc" line="144" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> <properties> <property name="key" value="1"/> </properties> </testcase> - <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" file="gtest_xml_output_unittest_.cc" line="153" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> + <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" file="gtest_xml_output_unittest_.cc" line="157" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> <properties> <property name="key_for_utility_int" value="1"/> </properties> </testcase> - <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" file="gtest_xml_output_unittest_.cc" line="157" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> + <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" file="gtest_xml_output_unittest_.cc" line="161" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> <properties> <property name="key_for_utility_string" value="1"/> </properties> </testcase> </testsuite> + <testsuite name="SetupFailTest" tests="1" failures="0" disabled="0" skipped="1" errors="0" time="*" timestamp="*"> + <testcase name="NoopPassingTest" file="gtest_xml_output_unittest_.cc" line="172" status="run" result="skipped" time="*" timestamp="*" classname="SetupFailTest"> + <skipped message="gtest_xml_output_unittest_.cc:*
"><![CDATA[gtest_xml_output_unittest_.cc:* +]]></skipped> + </testcase> + <testcase name="" status="run" result="completed" classname="" time="*" timestamp="*"> + <failure message="gtest_xml_output_unittest_.cc:*
Expected equality of these values:
 1
 2%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Expected equality of these values: + 1 + 2%(stack)s]]></failure> + </testcase> + </testsuite> + <testsuite name="TearDownFailTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="NoopPassingTest" file="gtest_xml_output_unittest_.cc" line="179" status="run" result="completed" time="*" timestamp="*" classname="TearDownFailTest"/> + <testcase name="" status="run" result="completed" classname="" time="*" timestamp="*"> + <failure message="gtest_xml_output_unittest_.cc:*
Expected equality of these values:
 1
 2%(stack_entity)s" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Expected equality of these values: + 1 + 2%(stack)s]]></failure> + </testcase> + </testsuite> <testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> - <testcase name="HasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="164" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> - <testcase name="HasValueParamAttribute/1" file="gtest_xml_output_unittest_.cc" line="164" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> - <testcase name="AnotherTestThatHasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="165" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> - <testcase name="AnotherTestThatHasValueParamAttribute/1" file="gtest_xml_output_unittest_.cc" line="165" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testcase name="HasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="184" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testcase name="HasValueParamAttribute/1" file="gtest_xml_output_unittest_.cc" line="184" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testcase name="AnotherTestThatHasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="185" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testcase name="AnotherTestThatHasValueParamAttribute/1" file="gtest_xml_output_unittest_.cc" line="185" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> </testsuite> <testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> - <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="173" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/0" /> + <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="193" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/0" /> </testsuite> <testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> - <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="173" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/1" /> + <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="193" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/1" /> </testsuite> <testsuite name="Single/TypeParameterizedTestSuite/0" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> - <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="180" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/0" /> + <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="200" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/0" /> </testsuite> <testsuite name="Single/TypeParameterizedTestSuite/1" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> - <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="180" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/1" /> + <testcase name="HasTypeParamAttribute" file="gtest_xml_output_unittest_.cc" line="200" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/1" /> </testsuite> </testsuites>""" % { 'stack': STACK_TRACE_TEMPLATE, @@ -197,8 +227,10 @@ It is good practice to tell why you skip a test. } EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?> -<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" - timestamp="*" name="AllTests" ad_hoc_property="42"> +<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests"> + <properties> + <property name="ad_hoc_property" value="42"/> + </properties> <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> <testcase name="Succeeds" file="gtest_xml_output_unittest_.cc" line="53" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/> @@ -206,19 +238,28 @@ EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?> </testsuites>""" EXPECTED_SHARDED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?> -<testsuites tests="3" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42"> +<testsuites tests="3" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests"> + <properties> + <property name="ad_hoc_property" value="42"/> + </properties> <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> <testcase name="Succeeds" file="gtest_xml_output_unittest_.cc" line="53" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/> </testsuite> - <testsuite name="PropertyRecordingTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye"> - <testcase name="IntValuedProperty" file="gtest_xml_output_unittest_.cc" line="125" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <testsuite name="PropertyRecordingTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <properties> + <property name="SetUpTestSuite" value="yes"/> + <property name="SetUpTestSuite (with whitespace)" value="yes and yes"/> + <property name="TearDownTestSuite" value="aye"/> + <property name="TearDownTestSuite (with whitespace)" value="aye and aye"/> + </properties> + <testcase name="IntValuedProperty" file="gtest_xml_output_unittest_.cc" line="129" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> <properties> <property name="key_int" value="1"/> </properties> </testcase> </testsuite> - <testsuite name="Single/ValueParamTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> - <testcase name="HasValueParamAttribute/0" file="gtest_xml_output_unittest_.cc" line="164" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testsuite name="Single/TypeParameterizedTestSuite/0" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="HasTypeParamAttribute" type_param="*" file="gtest_xml_output_unittest_.cc" line="200" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/0" /> </testsuite> </testsuites>""" diff --git a/googletest/test/gtest_xml_output_unittest_.cc b/googletest/test/gtest_xml_output_unittest_.cc index 0ab33022e06a..fe196b630b08 100644 --- a/googletest/test/gtest_xml_output_unittest_.cc +++ b/googletest/test/gtest_xml_output_unittest_.cc @@ -112,8 +112,12 @@ TEST(InvalidCharactersTest, InvalidCharactersInMessage) { class PropertyRecordingTest : public Test { public: - static void SetUpTestSuite() { RecordProperty("SetUpTestSuite", "yes"); } + static void SetUpTestSuite() { + RecordProperty("SetUpTestSuite (with whitespace)", "yes and yes"); + RecordProperty("SetUpTestSuite", "yes"); + } static void TearDownTestSuite() { + RecordProperty("TearDownTestSuite (with whitespace)", "aye and aye"); RecordProperty("TearDownTestSuite", "aye"); } }; @@ -158,6 +162,22 @@ TEST(NoFixtureTest, ExternalUtilityThatCallsRecordStringValuedProperty) { ExternalUtilityThatCallsRecordProperty("key_for_utility_string", "1"); } +// Ensures that SetUpTestSuite and TearDownTestSuite failures are reported in +// the XML output. +class SetupFailTest : public ::testing::Test { + protected: + static void SetUpTestSuite() { ASSERT_EQ(1, 2); } +}; + +TEST_F(SetupFailTest, NoopPassingTest) {} + +class TearDownFailTest : public ::testing::Test { + protected: + static void TearDownTestSuite() { ASSERT_EQ(1, 2); } +}; + +TEST_F(TearDownFailTest, NoopPassingTest) {} + // Verifies that the test parameter value is output in the 'value_param' // XML attribute for value-parameterized tests. class ValueParamTest : public TestWithParam<int> {}; |