Index: head/contrib/googletest/googletest/CMakeLists.txt =================================================================== --- head/contrib/googletest/googletest/CMakeLists.txt (revision 345769) +++ head/contrib/googletest/googletest/CMakeLists.txt (revision 345770) @@ -1,332 +1,333 @@ ######################################################################## # CMake build script for Google Test. # # To run the tests for Google Test itself on Linux, use 'make test' or # ctest. You can select which tests to run using 'ctest -R regex'. # For more options, run 'ctest --help'. # When other libraries are using a shared version of runtime libraries, # Google Test also has to use one. option( gtest_force_shared_crt "Use shared (DLL) run-time lib even when Google Test is built as static lib." OFF) option(gtest_build_tests "Build all of gtest's own tests." OFF) option(gtest_build_samples "Build gtest's sample programs." OFF) option(gtest_disable_pthreads "Disable uses of pthreads in gtest." OFF) option( gtest_hide_internal_symbols "Build gtest with internal symbols hidden in shared libraries." OFF) # Defines pre_project_set_up_hermetic_build() and set_up_hermetic_build(). include(cmake/hermetic_build.cmake OPTIONAL) if (COMMAND pre_project_set_up_hermetic_build) pre_project_set_up_hermetic_build() endif() ######################################################################## # # Project-wide settings # Name of the project. # # CMake files in this project can refer to the root source directory # as ${gtest_SOURCE_DIR} and to the root binary directory as # ${gtest_BINARY_DIR}. # Language "C" is required for find_package(Threads). if (CMAKE_VERSION VERSION_LESS 3.0) project(gtest CXX C) else() cmake_policy(SET CMP0048 NEW) project(gtest VERSION ${GOOGLETEST_VERSION} LANGUAGES CXX C) endif() cmake_minimum_required(VERSION 2.6.4) if (POLICY CMP0063) # Visibility cmake_policy(SET CMP0063 NEW) endif (POLICY CMP0063) if (COMMAND set_up_hermetic_build) set_up_hermetic_build() endif() # These commands only run if this is the main project if(CMAKE_PROJECT_NAME STREQUAL "gtest" OR CMAKE_PROJECT_NAME STREQUAL "googletest-distribution") # BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to # make it prominent in the GUI. option(BUILD_SHARED_LIBS "Build shared libraries (DLLs)." OFF) else() mark_as_advanced( gtest_force_shared_crt gtest_build_tests gtest_build_samples gtest_disable_pthreads gtest_hide_internal_symbols) endif() if (gtest_hide_internal_symbols) set(CMAKE_CXX_VISIBILITY_PRESET hidden) set(CMAKE_VISIBILITY_INLINES_HIDDEN 1) endif() # Define helper functions and macros used by Google Test. include(cmake/internal_utils.cmake) config_compiler_and_linker() # Defined in internal_utils.cmake. # Create the CMake package file descriptors. if (INSTALL_GTEST) include(CMakePackageConfigHelpers) set(cmake_package_name GTest) set(targets_export_name ${cmake_package_name}Targets CACHE INTERNAL "") set(generated_dir "${CMAKE_CURRENT_BINARY_DIR}/generated" CACHE INTERNAL "") set(cmake_files_install_dir "${CMAKE_INSTALL_LIBDIR}/cmake/${cmake_package_name}") set(version_file "${generated_dir}/${cmake_package_name}ConfigVersion.cmake") write_basic_package_version_file(${version_file} COMPATIBILITY AnyNewerVersion) install(EXPORT ${targets_export_name} NAMESPACE ${cmake_package_name}:: DESTINATION ${cmake_files_install_dir}) set(config_file "${generated_dir}/${cmake_package_name}Config.cmake") configure_package_config_file("${gtest_SOURCE_DIR}/cmake/Config.cmake.in" "${config_file}" INSTALL_DESTINATION ${cmake_files_install_dir}) install(FILES ${version_file} ${config_file} DESTINATION ${cmake_files_install_dir}) endif() # Where Google Test's .h files can be found. set(gtest_build_include_dirs "${gtest_SOURCE_DIR}/include" "${gtest_SOURCE_DIR}") include_directories(${gtest_build_include_dirs}) # Summary of tuple support for Microsoft Visual Studio: # Compiler version(MS) version(cmake) Support # ---------- ----------- -------------- ----------------------------- # <= VS 2010 <= 10 <= 1600 Use Google Tests's own tuple. # VS 2012 11 1700 std::tr1::tuple + _VARIADIC_MAX=10 # VS 2013 12 1800 std::tr1::tuple # VS 2015 14 1900 std::tuple # VS 2017 15 >= 1910 std::tuple if (MSVC AND MSVC_VERSION EQUAL 1700) add_definitions(/D _VARIADIC_MAX=10) endif() ######################################################################## # # Defines the gtest & gtest_main libraries. User tests should link # with one of them. # Google Test libraries. We build them using more strict warnings than what # are used for other targets, to ensure that gtest can be compiled by a user # aggressive about warnings. cxx_library(gtest "${cxx_strict}" src/gtest-all.cc) cxx_library(gtest_main "${cxx_strict}" src/gtest_main.cc) # If the CMake version supports it, attach header directory information # to the targets for when we are part of a parent build (ie being pulled # in via add_subdirectory() rather than being a standalone build). if (DEFINED CMAKE_VERSION AND NOT "${CMAKE_VERSION}" VERSION_LESS "2.8.11") target_include_directories(gtest SYSTEM INTERFACE "$" "$/${CMAKE_INSTALL_INCLUDEDIR}>") target_include_directories(gtest_main SYSTEM INTERFACE "$" "$/${CMAKE_INSTALL_INCLUDEDIR}>") endif() target_link_libraries(gtest_main PUBLIC gtest) ######################################################################## # # Install rules install_project(gtest gtest_main) ######################################################################## # # Samples on how to link user tests with gtest or gtest_main. # # They are not built by default. To build them, set the # gtest_build_samples option to ON. You can do it by running ccmake # or specifying the -Dgtest_build_samples=ON flag when running cmake. if (gtest_build_samples) cxx_executable(sample1_unittest samples gtest_main samples/sample1.cc) cxx_executable(sample2_unittest samples gtest_main samples/sample2.cc) cxx_executable(sample3_unittest samples gtest_main) cxx_executable(sample4_unittest samples gtest_main samples/sample4.cc) cxx_executable(sample5_unittest samples gtest_main samples/sample1.cc) cxx_executable(sample6_unittest samples gtest_main) cxx_executable(sample7_unittest samples gtest_main) cxx_executable(sample8_unittest samples gtest_main) cxx_executable(sample9_unittest samples gtest) cxx_executable(sample10_unittest samples gtest) endif() ######################################################################## # # Google Test's own tests. # # You can skip this section if you aren't interested in testing # Google Test itself. # # The tests are not built by default. To build them, set the # gtest_build_tests option to ON. You can do it by running ccmake # or specifying the -Dgtest_build_tests=ON flag when running cmake. if (gtest_build_tests) # This must be set in the root directory for the tests to be run by # 'make test' or ctest. enable_testing() ############################################################ # C++ tests built with standard compiler flags. cxx_test(googletest-death-test-test gtest_main) cxx_test(gtest_environment_test gtest) cxx_test(googletest-filepath-test gtest_main) cxx_test(googletest-linked-ptr-test gtest_main) cxx_test(googletest-listener-test gtest_main) cxx_test(gtest_main_unittest gtest_main) cxx_test(googletest-message-test gtest_main) cxx_test(gtest_no_test_unittest gtest) cxx_test(googletest-options-test gtest_main) cxx_test(googletest-param-test-test gtest test/googletest-param-test2-test.cc) cxx_test(googletest-port-test gtest_main) cxx_test(gtest_pred_impl_unittest gtest_main) cxx_test(gtest_premature_exit_test gtest test/gtest_premature_exit_test.cc) cxx_test(googletest-printers-test gtest_main) cxx_test(gtest_prod_test gtest_main test/production.cc) cxx_test(gtest_repeat_test gtest) cxx_test(gtest_sole_header_test gtest_main) cxx_test(gtest_stress_test gtest) cxx_test(googletest-test-part-test gtest_main) cxx_test(gtest_throw_on_failure_ex_test gtest) cxx_test(gtest-typed-test_test gtest_main test/gtest-typed-test2_test.cc) cxx_test(gtest_unittest gtest_main) cxx_test(gtest-unittest-api_test gtest) + cxx_test(gtest_skip_in_environment_setup_test gtest_main) cxx_test(gtest_skip_test gtest_main) ############################################################ # C++ tests built with non-standard compiler flags. # MSVC 7.1 does not support STL with exceptions disabled. if (NOT MSVC OR MSVC_VERSION GREATER 1310) cxx_library(gtest_no_exception "${cxx_no_exception}" src/gtest-all.cc) cxx_library(gtest_main_no_exception "${cxx_no_exception}" src/gtest-all.cc src/gtest_main.cc) endif() cxx_library(gtest_main_no_rtti "${cxx_no_rtti}" src/gtest-all.cc src/gtest_main.cc) cxx_test_with_flags(gtest-death-test_ex_nocatch_test "${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=0" gtest test/googletest-death-test_ex_test.cc) cxx_test_with_flags(gtest-death-test_ex_catch_test "${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=1" gtest test/googletest-death-test_ex_test.cc) cxx_test_with_flags(gtest_no_rtti_unittest "${cxx_no_rtti}" gtest_main_no_rtti test/gtest_unittest.cc) cxx_shared_library(gtest_dll "${cxx_default}" src/gtest-all.cc src/gtest_main.cc) cxx_executable_with_flags(gtest_dll_test_ "${cxx_default}" gtest_dll test/gtest_all_test.cc) set_target_properties(gtest_dll_test_ PROPERTIES COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1") if (NOT MSVC OR MSVC_VERSION LESS 1600) # 1600 is Visual Studio 2010. # Visual Studio 2010, 2012, and 2013 define symbols in std::tr1 that # conflict with our own definitions. Therefore using our own tuple does not # work on those compilers. cxx_library(gtest_main_use_own_tuple "${cxx_use_own_tuple}" src/gtest-all.cc src/gtest_main.cc) cxx_test_with_flags(googletest-tuple-test "${cxx_use_own_tuple}" gtest_main_use_own_tuple test/googletest-tuple-test.cc) cxx_test_with_flags(gtest_use_own_tuple_test "${cxx_use_own_tuple}" gtest_main_use_own_tuple test/googletest-param-test-test.cc test/googletest-param-test2-test.cc) endif() ############################################################ # Python tests. cxx_executable(googletest-break-on-failure-unittest_ test gtest) py_test(googletest-break-on-failure-unittest) # Visual Studio .NET 2003 does not support STL with exceptions disabled. if (NOT MSVC OR MSVC_VERSION GREATER 1310) # 1310 is Visual Studio .NET 2003 cxx_executable_with_flags( googletest-catch-exceptions-no-ex-test_ "${cxx_no_exception}" gtest_main_no_exception test/googletest-catch-exceptions-test_.cc) endif() cxx_executable_with_flags( googletest-catch-exceptions-ex-test_ "${cxx_exception}" gtest_main test/googletest-catch-exceptions-test_.cc) py_test(googletest-catch-exceptions-test) cxx_executable(googletest-color-test_ test gtest) py_test(googletest-color-test) cxx_executable(googletest-env-var-test_ test gtest) py_test(googletest-env-var-test) cxx_executable(googletest-filter-unittest_ test gtest) py_test(googletest-filter-unittest) cxx_executable(gtest_help_test_ test gtest_main) py_test(gtest_help_test) cxx_executable(googletest-list-tests-unittest_ test gtest) py_test(googletest-list-tests-unittest) cxx_executable(googletest-output-test_ test gtest) py_test(googletest-output-test --no_stacktrace_support) cxx_executable(googletest-shuffle-test_ test gtest) py_test(googletest-shuffle-test) # MSVC 7.1 does not support STL with exceptions disabled. if (NOT MSVC OR MSVC_VERSION GREATER 1310) cxx_executable(googletest-throw-on-failure-test_ test gtest_no_exception) set_target_properties(googletest-throw-on-failure-test_ PROPERTIES COMPILE_FLAGS "${cxx_no_exception}") py_test(googletest-throw-on-failure-test) endif() cxx_executable(googletest-uninitialized-test_ test gtest) py_test(googletest-uninitialized-test) cxx_executable(gtest_xml_outfile1_test_ test gtest_main) cxx_executable(gtest_xml_outfile2_test_ test gtest_main) py_test(gtest_xml_outfiles_test) py_test(googletest-json-outfiles-test) cxx_executable(gtest_xml_output_unittest_ test gtest) py_test(gtest_xml_output_unittest --no_stacktrace_support) py_test(googletest-json-output-unittest --no_stacktrace_support) endif() Index: head/contrib/googletest/googletest/Makefile.am =================================================================== --- head/contrib/googletest/googletest/Makefile.am (revision 345769) +++ head/contrib/googletest/googletest/Makefile.am (revision 345770) @@ -1,339 +1,345 @@ # Automake file ACLOCAL_AMFLAGS = -I m4 # Nonstandard package files for distribution EXTRA_DIST = \ CHANGES \ CONTRIBUTORS \ LICENSE \ include/gtest/gtest-param-test.h.pump \ include/gtest/internal/gtest-param-util-generated.h.pump \ include/gtest/internal/gtest-tuple.h.pump \ include/gtest/internal/gtest-type-util.h.pump \ make/Makefile \ scripts/fuse_gtest_files.py \ scripts/gen_gtest_pred_impl.py \ scripts/pump.py \ scripts/test/Makefile # gtest source files that we don't compile directly. They are # #included by gtest-all.cc. GTEST_SRC = \ src/gtest-death-test.cc \ src/gtest-filepath.cc \ src/gtest-internal-inl.h \ src/gtest-port.cc \ src/gtest-printers.cc \ src/gtest-test-part.cc \ src/gtest-typed-test.cc \ src/gtest.cc EXTRA_DIST += $(GTEST_SRC) # Sample files that we don't compile. EXTRA_DIST += \ samples/prime_tables.h \ samples/sample1_unittest.cc \ samples/sample2_unittest.cc \ samples/sample3_unittest.cc \ samples/sample4_unittest.cc \ samples/sample5_unittest.cc \ samples/sample6_unittest.cc \ samples/sample7_unittest.cc \ samples/sample8_unittest.cc \ samples/sample9_unittest.cc # C++ test files that we don't compile directly. EXTRA_DIST += \ test/gtest-death-test_ex_test.cc \ test/gtest-death-test_test.cc \ test/gtest-filepath_test.cc \ test/gtest-linked_ptr_test.cc \ test/gtest-listener_test.cc \ test/gtest-message_test.cc \ test/gtest-options_test.cc \ test/googletest-param-test2-test.cc \ test/googletest-param-test2-test.cc \ test/googletest-param-test-test.cc \ test/googletest-param-test-test.cc \ test/gtest-param-test_test.h \ test/gtest-port_test.cc \ test/gtest_premature_exit_test.cc \ test/gtest-printers_test.cc \ test/gtest-test-part_test.cc \ test/googletest-tuple-test.cc \ test/gtest-typed-test2_test.cc \ test/gtest-typed-test_test.cc \ test/gtest-typed-test_test.h \ test/gtest-unittest-api_test.cc \ test/googletest-break-on-failure-unittest_.cc \ test/googletest-catch-exceptions-test_.cc \ test/googletest-color-test_.cc \ test/googletest-env-var-test_.cc \ test/gtest_environment_test.cc \ test/googletest-filter-unittest_.cc \ test/gtest_help_test_.cc \ test/googletest-list-tests-unittest_.cc \ test/gtest_main_unittest.cc \ test/gtest_no_test_unittest.cc \ test/googletest-output-test_.cc \ test/gtest_pred_impl_unittest.cc \ test/gtest_prod_test.cc \ test/gtest_repeat_test.cc \ test/googletest-shuffle-test_.cc \ test/gtest_sole_header_test.cc \ test/gtest_stress_test.cc \ test/gtest_throw_on_failure_ex_test.cc \ test/googletest-throw-on-failure-test_.cc \ test/googletest-uninitialized-test_.cc \ test/gtest_unittest.cc \ test/gtest_unittest.cc \ test/gtest_xml_outfile1_test_.cc \ test/gtest_xml_outfile2_test_.cc \ test/gtest_xml_output_unittest_.cc \ test/production.cc \ test/production.h # Python tests that we don't run. EXTRA_DIST += \ test/googletest-break-on-failure-unittest.py \ test/googletest-catch-exceptions-test.py \ test/googletest-color-test.py \ test/googletest-env-var-test.py \ test/googletest-filter-unittest.py \ test/gtest_help_test.py \ test/googletest-list-tests-unittest.py \ test/googletest-output-test.py \ test/googletest-output-test_golden_lin.txt \ test/googletest-shuffle-test.py \ test/gtest_test_utils.py \ test/googletest-throw-on-failure-test.py \ test/googletest-uninitialized-test.py \ test/gtest_xml_outfiles_test.py \ test/gtest_xml_output_unittest.py \ test/gtest_xml_test_utils.py # CMake script EXTRA_DIST += \ CMakeLists.txt \ cmake/internal_utils.cmake # MSVC project files EXTRA_DIST += \ msvc/2010/gtest-md.sln \ msvc/2010/gtest-md.vcxproj \ msvc/2010/gtest.sln \ msvc/2010/gtest.vcxproj \ msvc/2010/gtest_main-md.vcxproj \ msvc/2010/gtest_main.vcxproj \ msvc/2010/gtest_prod_test-md.vcxproj \ msvc/2010/gtest_prod_test.vcxproj \ msvc/2010/gtest_unittest-md.vcxproj \ msvc/2010/gtest_unittest.vcxproj # xcode project files EXTRA_DIST += \ xcode/Config/DebugProject.xcconfig \ xcode/Config/FrameworkTarget.xcconfig \ xcode/Config/General.xcconfig \ xcode/Config/ReleaseProject.xcconfig \ xcode/Config/StaticLibraryTarget.xcconfig \ xcode/Config/TestTarget.xcconfig \ xcode/Resources/Info.plist \ xcode/Scripts/runtests.sh \ xcode/Scripts/versiongenerate.py \ xcode/gtest.xcodeproj/project.pbxproj # xcode sample files EXTRA_DIST += \ xcode/Samples/FrameworkSample/Info.plist \ xcode/Samples/FrameworkSample/WidgetFramework.xcodeproj/project.pbxproj \ xcode/Samples/FrameworkSample/runtests.sh \ xcode/Samples/FrameworkSample/widget.cc \ xcode/Samples/FrameworkSample/widget.h \ xcode/Samples/FrameworkSample/widget_test.cc # C++Builder project files EXTRA_DIST += \ codegear/gtest.cbproj \ codegear/gtest.groupproj \ codegear/gtest_all.cc \ codegear/gtest_link.cc \ codegear/gtest_main.cbproj \ codegear/gtest_unittest.cbproj # Distribute and install M4 macro m4datadir = $(datadir)/aclocal m4data_DATA = m4/gtest.m4 EXTRA_DIST += $(m4data_DATA) # We define the global AM_CPPFLAGS as everything we compile includes from these # directories. AM_CPPFLAGS = -I$(srcdir) -I$(srcdir)/include # Modifies compiler and linker flags for pthreads compatibility. if HAVE_PTHREADS AM_CXXFLAGS = @PTHREAD_CFLAGS@ -DGTEST_HAS_PTHREAD=1 AM_LIBS = @PTHREAD_LIBS@ else AM_CXXFLAGS = -DGTEST_HAS_PTHREAD=0 endif # Build rules for libraries. lib_LTLIBRARIES = lib/libgtest.la lib/libgtest_main.la lib_libgtest_la_SOURCES = src/gtest-all.cc pkginclude_HEADERS = \ include/gtest/gtest-death-test.h \ include/gtest/gtest-message.h \ include/gtest/gtest-param-test.h \ include/gtest/gtest-printers.h \ include/gtest/gtest-spi.h \ include/gtest/gtest-test-part.h \ include/gtest/gtest-typed-test.h \ include/gtest/gtest.h \ include/gtest/gtest_pred_impl.h \ include/gtest/gtest_prod.h pkginclude_internaldir = $(pkgincludedir)/internal pkginclude_internal_HEADERS = \ include/gtest/internal/gtest-death-test-internal.h \ include/gtest/internal/gtest-filepath.h \ include/gtest/internal/gtest-internal.h \ include/gtest/internal/gtest-linked_ptr.h \ include/gtest/internal/gtest-param-util-generated.h \ include/gtest/internal/gtest-param-util.h \ include/gtest/internal/gtest-port.h \ include/gtest/internal/gtest-port-arch.h \ include/gtest/internal/gtest-string.h \ include/gtest/internal/gtest-tuple.h \ include/gtest/internal/gtest-type-util.h \ include/gtest/internal/custom/gtest.h \ include/gtest/internal/custom/gtest-port.h \ include/gtest/internal/custom/gtest-printers.h lib_libgtest_main_la_SOURCES = src/gtest_main.cc lib_libgtest_main_la_LIBADD = lib/libgtest.la # Build rules for samples and tests. Automake's naming for some of # these variables isn't terribly obvious, so this is a brief # reference: # # TESTS -- Programs run automatically by "make check" # check_PROGRAMS -- Programs built by "make check" but not necessarily run TESTS= TESTS_ENVIRONMENT = GTEST_SOURCE_DIR="$(srcdir)/test" \ GTEST_BUILD_DIR="$(top_builddir)/test" check_PROGRAMS= # A simple sample on using gtest. TESTS += samples/sample1_unittest \ samples/sample2_unittest \ samples/sample3_unittest \ samples/sample4_unittest \ samples/sample5_unittest \ samples/sample6_unittest \ samples/sample7_unittest \ samples/sample8_unittest \ samples/sample9_unittest \ samples/sample10_unittest check_PROGRAMS += samples/sample1_unittest \ samples/sample2_unittest \ samples/sample3_unittest \ samples/sample4_unittest \ samples/sample5_unittest \ samples/sample6_unittest \ samples/sample7_unittest \ samples/sample8_unittest \ samples/sample9_unittest \ samples/sample10_unittest samples_sample1_unittest_SOURCES = samples/sample1_unittest.cc samples/sample1.cc samples_sample1_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la samples_sample2_unittest_SOURCES = samples/sample2_unittest.cc samples/sample2.cc samples_sample2_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la samples_sample3_unittest_SOURCES = samples/sample3_unittest.cc samples_sample3_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la samples_sample4_unittest_SOURCES = samples/sample4_unittest.cc samples/sample4.cc samples_sample4_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la samples_sample5_unittest_SOURCES = samples/sample5_unittest.cc samples/sample1.cc samples_sample5_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la samples_sample6_unittest_SOURCES = samples/sample6_unittest.cc samples_sample6_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la samples_sample7_unittest_SOURCES = samples/sample7_unittest.cc samples_sample7_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la samples_sample8_unittest_SOURCES = samples/sample8_unittest.cc samples_sample8_unittest_LDADD = lib/libgtest_main.la \ lib/libgtest.la # Also verify that libgtest works by itself. samples_sample9_unittest_SOURCES = samples/sample9_unittest.cc samples_sample9_unittest_LDADD = lib/libgtest.la samples_sample10_unittest_SOURCES = samples/sample10_unittest.cc samples_sample10_unittest_LDADD = lib/libgtest.la # This tests most constructs of gtest and verifies that libgtest_main # and libgtest work. TESTS += test/gtest_all_test check_PROGRAMS += test/gtest_all_test test_gtest_all_test_SOURCES = test/gtest_all_test.cc test_gtest_all_test_LDADD = lib/libgtest_main.la \ lib/libgtest.la +TESTS += test/gtest_skip_in_environment_setup_test +check_PROGRAMS += test/gtest_skip_in_environment_setup_test +test_gtest_skip_in_environment_setup_test_SOURCES = test/gtest_skip_in_environment_setup_test.cc +test_gtest_skip_in_environment_setup_test_LDADD= lib/libgtest_main.la \ + lib/libgtest.la + # Tests that fused gtest files compile and work. FUSED_GTEST_SRC = \ fused-src/gtest/gtest-all.cc \ fused-src/gtest/gtest.h \ fused-src/gtest/gtest_main.cc if HAVE_PYTHON TESTS += test/fused_gtest_test check_PROGRAMS += test/fused_gtest_test test_fused_gtest_test_SOURCES = $(FUSED_GTEST_SRC) \ samples/sample1.cc samples/sample1_unittest.cc test_fused_gtest_test_CPPFLAGS = -I"$(srcdir)/fused-src" # Build rules for putting fused Google Test files into the distribution # package. The user can also create those files by manually running # scripts/fuse_gtest_files.py. $(test_fused_gtest_test_SOURCES): fused-gtest fused-gtest: $(pkginclude_HEADERS) $(pkginclude_internal_HEADERS) \ $(GTEST_SRC) src/gtest-all.cc src/gtest_main.cc \ scripts/fuse_gtest_files.py mkdir -p "$(srcdir)/fused-src" chmod -R u+w "$(srcdir)/fused-src" rm -f "$(srcdir)/fused-src/gtest/gtest-all.cc" rm -f "$(srcdir)/fused-src/gtest/gtest.h" "$(srcdir)/scripts/fuse_gtest_files.py" "$(srcdir)/fused-src" cp -f "$(srcdir)/src/gtest_main.cc" "$(srcdir)/fused-src/gtest/" maintainer-clean-local: rm -rf "$(srcdir)/fused-src" endif # Death tests may produce core dumps in the build directory. In case # this happens, clean them to keep distcleancheck happy. CLEANFILES = core # Disables 'make install' as installing a compiled version of Google # Test can lead to undefined behavior due to violation of the # One-Definition Rule. install-exec-local: echo "'make install' is dangerous and not supported. Instead, see README for how to integrate Google Test into your build system." false install-data-local: echo "'make install' is dangerous and not supported. Instead, see README for how to integrate Google Test into your build system." false Index: head/contrib/googletest/googletest/docs/advanced.md =================================================================== --- head/contrib/googletest/googletest/docs/advanced.md (revision 345769) +++ head/contrib/googletest/googletest/docs/advanced.md (revision 345770) @@ -1,2520 +1,2522 @@ # Advanced googletest Topics ## Introduction Now that you have read the [googletest Primer](primer.md) and learned how to write tests using googletest, it's time to learn some new tricks. This document will show you more assertions as well as how to construct complex failure messages, propagate fatal failures, reuse and speed up your test fixtures, and use various flags with your tests. ## More Assertions This section covers some less frequently used, but still significant, assertions. ### Explicit Success and Failure These three assertions do not actually test a value or expression. Instead, they generate a success or failure directly. Like the macros that actually perform a test, you may stream a custom failure message into them. ```c++ SUCCEED(); ``` Generates a success. This does **NOT** make the overall test succeed. A test is considered successful only if none of its assertions fail during its execution. NOTE: `SUCCEED()` is purely documentary and currently doesn't generate any user-visible output. However, we may add `SUCCEED()` messages to googletest's output in the future. ```c++ FAIL(); ADD_FAILURE(); ADD_FAILURE_AT("file_path", line_number); ``` `FAIL()` generates a fatal failure, while `ADD_FAILURE()` and `ADD_FAILURE_AT()` generate a nonfatal failure. These are useful when control flow, rather than a Boolean expression, determines the test's success or failure. For example, you might want to write something like: ```c++ switch(expression) { case 1: ... some checks ... case 2: ... some other checks ... default: FAIL() << "We shouldn't get here."; } ``` NOTE: you can only use `FAIL()` in functions that return `void`. See the [Assertion Placement section](#assertion-placement) for more information. **Availability**: Linux, Windows, Mac. ### Exception Assertions These are for verifying that a piece of code throws (or does not throw) an exception of the given type: Fatal assertion | Nonfatal assertion | Verifies ------------------------------------------ | ------------------------------------------ | -------- `ASSERT_THROW(statement, exception_type);` | `EXPECT_THROW(statement, exception_type);` | `statement` throws an exception of the given type `ASSERT_ANY_THROW(statement);` | `EXPECT_ANY_THROW(statement);` | `statement` throws an exception of any type `ASSERT_NO_THROW(statement);` | `EXPECT_NO_THROW(statement);` | `statement` doesn't throw any exception Examples: ```c++ ASSERT_THROW(Foo(5), bar_exception); EXPECT_NO_THROW({ int n = 5; Bar(&n); }); ``` **Availability**: Linux, Windows, Mac; requires exceptions to be enabled in the build environment (note that `google3` **disables** exceptions). ### Predicate Assertions for Better Error Messages Even though googletest has a rich set of assertions, they can never be complete, as it's impossible (nor a good idea) to anticipate all scenarios a user might run into. Therefore, sometimes a user has to use `EXPECT_TRUE()` to check a complex expression, for lack of a better macro. This has the problem of not showing you the values of the parts of the expression, making it hard to understand what went wrong. As a workaround, some users choose to construct the failure message by themselves, streaming it into `EXPECT_TRUE()`. However, this is awkward especially when the expression has side-effects or is expensive to evaluate. googletest gives you three different options to solve this problem: #### Using an Existing Boolean Function If you already have a function or functor that returns `bool` (or a type that can be implicitly converted to `bool`), you can use it in a *predicate assertion* to get the function arguments printed for free: | Fatal assertion | Nonfatal assertion | Verifies | | ---------------------------------- | ---------------------------------- | --------------------------- | | `ASSERT_PRED1(pred1, val1);` | `EXPECT_PRED1(pred1, val1);` | `pred1(val1)` is true | | `ASSERT_PRED2(pred2, val1, val2);` | `EXPECT_PRED2(pred2, val1, val2);` | `pred2(val1, val2)` is true | | `...` | `...` | ... | In the above, `predn` is an `n`-ary predicate function or functor, where `val1`, `val2`, ..., and `valn` are its arguments. The assertion succeeds if the predicate returns `true` when applied to the given arguments, and fails otherwise. When the assertion fails, it prints the value of each argument. In either case, the arguments are evaluated exactly once. Here's an example. Given ```c++ // Returns true if m and n have no common divisors except 1. bool MutuallyPrime(int m, int n) { ... } const int a = 3; const int b = 4; const int c = 10; ``` the assertion ```c++ EXPECT_PRED2(MutuallyPrime, a, b); ``` will succeed, while the assertion ```c++ EXPECT_PRED2(MutuallyPrime, b, c); ``` will fail with the message ```none MutuallyPrime(b, c) is false, where b is 4 c is 10 ``` > NOTE: > > 1. If you see a compiler error "no matching function to call" when using > `ASSERT_PRED*` or `EXPECT_PRED*`, please see > [this](faq.md#OverloadedPredicate) for how to resolve it. > 1. Currently we only provide predicate assertions of arity <= 5. If you need > a higher-arity assertion, let [us](https://github.com/google/googletest/issues) know. **Availability**: Linux, Windows, Mac. #### Using a Function That Returns an AssertionResult While `EXPECT_PRED*()` and friends are handy for a quick job, the syntax is not satisfactory: you have to use different macros for different arities, and it feels more like Lisp than C++. The `::testing::AssertionResult` class solves this problem. An `AssertionResult` object represents the result of an assertion (whether it's a success or a failure, and an associated message). You can create an `AssertionResult` using one of these factory functions: ```c++ namespace testing { // Returns an AssertionResult object to indicate that an assertion has // succeeded. AssertionResult AssertionSuccess(); // Returns an AssertionResult object to indicate that an assertion has // failed. AssertionResult AssertionFailure(); } ``` You can then use the `<<` operator to stream messages to the `AssertionResult` object. To provide more readable messages in Boolean assertions (e.g. `EXPECT_TRUE()`), write a predicate function that returns `AssertionResult` instead of `bool`. For example, if you define `IsEven()` as: ```c++ ::testing::AssertionResult IsEven(int n) { if ((n % 2) == 0) return ::testing::AssertionSuccess(); else return ::testing::AssertionFailure() << n << " is odd"; } ``` instead of: ```c++ bool IsEven(int n) { return (n % 2) == 0; } ``` the failed assertion `EXPECT_TRUE(IsEven(Fib(4)))` will print: ```none Value of: IsEven(Fib(4)) Actual: false (3 is odd) Expected: true ``` instead of a more opaque ```none Value of: IsEven(Fib(4)) Actual: false Expected: true ``` If you want informative messages in `EXPECT_FALSE` and `ASSERT_FALSE` as well (one third of Boolean assertions in the Google code base are negative ones), and are fine with making the predicate slower in the success case, you can supply a success message: ```c++ ::testing::AssertionResult IsEven(int n) { if ((n % 2) == 0) return ::testing::AssertionSuccess() << n << " is even"; else return ::testing::AssertionFailure() << n << " is odd"; } ``` Then the statement `EXPECT_FALSE(IsEven(Fib(6)))` will print ```none Value of: IsEven(Fib(6)) Actual: true (8 is even) Expected: false ``` **Availability**: Linux, Windows, Mac. #### Using a Predicate-Formatter If you find the default message generated by `(ASSERT|EXPECT)_PRED*` and `(ASSERT|EXPECT)_(TRUE|FALSE)` unsatisfactory, or some arguments to your predicate do not support streaming to `ostream`, you can instead use the following *predicate-formatter assertions* to *fully* customize how the message is formatted: Fatal assertion | Nonfatal assertion | Verifies ------------------------------------------------ | ------------------------------------------------ | -------- `ASSERT_PRED_FORMAT1(pred_format1, val1);` | `EXPECT_PRED_FORMAT1(pred_format1, val1);` | `pred_format1(val1)` is successful `ASSERT_PRED_FORMAT2(pred_format2, val1, val2);` | `EXPECT_PRED_FORMAT2(pred_format2, val1, val2);` | `pred_format2(val1, val2)` is successful `...` | `...` | ... The difference between this and the previous group of macros is that instead of a predicate, `(ASSERT|EXPECT)_PRED_FORMAT*` take a *predicate-formatter* (`pred_formatn`), which is a function or functor with the signature: ```c++ ::testing::AssertionResult PredicateFormattern(const char* expr1, const char* expr2, ... const char* exprn, T1 val1, T2 val2, ... Tn valn); ``` where `val1`, `val2`, ..., and `valn` are the values of the predicate arguments, and `expr1`, `expr2`, ..., and `exprn` are the corresponding expressions as they appear in the source code. The types `T1`, `T2`, ..., and `Tn` can be either value types or reference types. For example, if an argument has type `Foo`, you can declare it as either `Foo` or `const Foo&`, whichever is appropriate. As an example, let's improve the failure message in `MutuallyPrime()`, which was used with `EXPECT_PRED2()`: ```c++ // Returns the smallest prime common divisor of m and n, // or 1 when m and n are mutually prime. int SmallestPrimeCommonDivisor(int m, int n) { ... } // A predicate-formatter for asserting that two integers are mutually prime. ::testing::AssertionResult AssertMutuallyPrime(const char* m_expr, const char* n_expr, int m, int n) { if (MutuallyPrime(m, n)) return ::testing::AssertionSuccess(); return ::testing::AssertionFailure() << m_expr << " and " << n_expr << " (" << m << " and " << n << ") are not mutually prime, " << "as they have a common divisor " << SmallestPrimeCommonDivisor(m, n); } ``` With this predicate-formatter, we can use ```c++ EXPECT_PRED_FORMAT2(AssertMutuallyPrime, b, c); ``` to generate the message ```none b and c (4 and 10) are not mutually prime, as they have a common divisor 2. ``` As you may have realized, many of the built-in assertions we introduced earlier are special cases of `(EXPECT|ASSERT)_PRED_FORMAT*`. In fact, most of them are indeed defined using `(EXPECT|ASSERT)_PRED_FORMAT*`. **Availability**: Linux, Windows, Mac. ### Floating-Point Comparison Comparing floating-point numbers is tricky. Due to round-off errors, it is very unlikely that two floating-points will match exactly. Therefore, `ASSERT_EQ` 's naive comparison usually doesn't work. And since floating-points can have a wide value range, no single fixed error bound works. It's better to compare by a fixed relative error bound, except for values close to 0 due to the loss of precision there. In general, for floating-point comparison to make sense, the user needs to carefully choose the error bound. If they don't want or care to, comparing in terms of Units in the Last Place (ULPs) is a good default, and googletest provides assertions to do this. Full details about ULPs are quite long; if you want to learn more, see [here](https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/). #### Floating-Point Macros | Fatal assertion | Nonfatal assertion | Verifies | | ------------------------------- | ------------------------------ | ---------------------------------------- | | `ASSERT_FLOAT_EQ(val1, val2);` | `EXPECT_FLOAT_EQ(val1,val2);` | the two `float` values are almost equal | | `ASSERT_DOUBLE_EQ(val1, val2);` | `EXPECT_DOUBLE_EQ(val1, val2);`| the two `double` values are almost equal | By "almost equal" we mean the values are within 4 ULP's from each other. NOTE: `CHECK_DOUBLE_EQ()` in `base/logging.h` uses a fixed absolute error bound, so its result may differ from that of the googletest macros. That macro is unsafe and has been deprecated. Please don't use it any more. The following assertions allow you to choose the acceptable error bound: | Fatal assertion | Nonfatal assertion | Verifies | | ------------------------------------- | ------------------------------------- | ------------------------- | | `ASSERT_NEAR(val1, val2, abs_error);` | `EXPECT_NEAR(val1, val2, abs_error);` | the difference between `val1` and `val2` doesn't exceed the given absolute error | **Availability**: Linux, Windows, Mac. #### Floating-Point Predicate-Format Functions Some floating-point operations are useful, but not that often used. In order to avoid an explosion of new macros, we provide them as predicate-format functions that can be used in predicate assertion macros (e.g. `EXPECT_PRED_FORMAT2`, etc). ```c++ EXPECT_PRED_FORMAT2(::testing::FloatLE, val1, val2); EXPECT_PRED_FORMAT2(::testing::DoubleLE, val1, val2); ``` Verifies that `val1` is less than, or almost equal to, `val2`. You can replace `EXPECT_PRED_FORMAT2` in the above table with `ASSERT_PRED_FORMAT2`. **Availability**: Linux, Windows, Mac. ### Asserting Using gMock Matchers Google-developed C++ mocking framework [gMock](../../googlemock) comes with a library of matchers for validating arguments passed to mock objects. A gMock *matcher* is basically a predicate that knows how to describe itself. It can be used in these assertion macros: | Fatal assertion | Nonfatal assertion | Verifies | | ------------------------------ | ------------------------------ | --------------------- | | `ASSERT_THAT(value, matcher);` | `EXPECT_THAT(value, matcher);` | value matches matcher | For example, `StartsWith(prefix)` is a matcher that matches a string starting with `prefix`, and you can write: ```c++ using ::testing::StartsWith; ... // Verifies that Foo() returns a string starting with "Hello". EXPECT_THAT(Foo(), StartsWith("Hello")); ``` Read this [recipe](../../googlemock/docs/CookBook.md#using-matchers-in-google-test-assertions) in the gMock Cookbook for more details. gMock has a rich set of matchers. You can do many things googletest cannot do alone with them. For a list of matchers gMock provides, read [this](../../googlemock/docs/CookBook.md#using-matchers). Especially useful among them are some [protocol buffer matchers](https://github.com/google/nucleus/blob/master/nucleus/testing/protocol-buffer-matchers.h). It's easy to write your [own matchers](../../googlemock/docs/CookBook.md#writing-new-matchers-quickly) too. For example, you can use gMock's [EqualsProto](https://github.com/google/nucleus/blob/master/nucleus/testing/protocol-buffer-matchers.h) to compare protos in your tests: ```c++ #include "testing/base/public/gmock.h" using ::testing::EqualsProto; ... EXPECT_THAT(actual_proto, EqualsProto("foo: 123 bar: 'xyz'")); EXPECT_THAT(*actual_proto_ptr, EqualsProto(expected_proto)); ``` gMock is bundled with googletest, so you don't need to add any build dependency in order to take advantage of this. Just include `"testing/base/public/gmock.h"` and you're ready to go. **Availability**: Linux, Windows, and Mac. ### More String Assertions (Please read the [previous](#AssertThat) section first if you haven't.) You can use the gMock [string matchers](../../googlemock/docs/CheatSheet.md#string-matchers) with `EXPECT_THAT()` or `ASSERT_THAT()` to do more string comparison tricks (sub-string, prefix, suffix, regular expression, and etc). For example, ```c++ using ::testing::HasSubstr; using ::testing::MatchesRegex; ... ASSERT_THAT(foo_string, HasSubstr("needle")); EXPECT_THAT(bar_string, MatchesRegex("\\w*\\d+")); ``` **Availability**: Linux, Windows, Mac. If the string contains a well-formed HTML or XML document, you can check whether its DOM tree matches an [XPath expression](http://www.w3.org/TR/xpath/#contents): ```c++ // Currently still in //template/prototemplate/testing:xpath_matcher #include "template/prototemplate/testing/xpath_matcher.h" using prototemplate::testing::MatchesXPath; EXPECT_THAT(html_string, MatchesXPath("//a[text()='click here']")); ``` **Availability**: Linux. ### Windows HRESULT assertions These assertions test for `HRESULT` success or failure. Fatal assertion | Nonfatal assertion | Verifies -------------------------------------- | -------------------------------------- | -------- `ASSERT_HRESULT_SUCCEEDED(expression)` | `EXPECT_HRESULT_SUCCEEDED(expression)` | `expression` is a success `HRESULT` `ASSERT_HRESULT_FAILED(expression)` | `EXPECT_HRESULT_FAILED(expression)` | `expression` is a failure `HRESULT` The generated output contains the human-readable error message associated with the `HRESULT` code returned by `expression`. You might use them like this: ```c++ CComPtr shell; ASSERT_HRESULT_SUCCEEDED(shell.CoCreateInstance(L"Shell.Application")); CComVariant empty; ASSERT_HRESULT_SUCCEEDED(shell->ShellExecute(CComBSTR(url), empty, empty, empty, empty)); ``` **Availability**: Windows. ### Type Assertions You can call the function ```c++ ::testing::StaticAssertTypeEq(); ``` to assert that types `T1` and `T2` are the same. The function does nothing if the assertion is satisfied. If the types are different, the function call will fail to compile, and the compiler error message will likely (depending on the compiler) show you the actual values of `T1` and `T2`. This is mainly useful inside template code. **Caveat**: When used inside a member function of a class template or a function template, `StaticAssertTypeEq()` is effective only if the function is instantiated. For example, given: ```c++ template class Foo { public: void Bar() { ::testing::StaticAssertTypeEq(); } }; ``` the code: ```c++ void Test1() { Foo foo; } ``` will not generate a compiler error, as `Foo::Bar()` is never actually instantiated. Instead, you need: ```c++ void Test2() { Foo foo; foo.Bar(); } ``` to cause a compiler error. **Availability**: Linux, Windows, Mac. ### Assertion Placement You can use assertions in any C++ function. In particular, it doesn't have to be a method of the test fixture class. The one constraint is that assertions that generate a fatal failure (`FAIL*` and `ASSERT_*`) can only be used in void-returning functions. This is a consequence of Google's not using exceptions. By placing it in a non-void function you'll get a confusing compile error like `"error: void value not ignored as it ought to be"` or `"cannot initialize return object of type 'bool' with an rvalue of type 'void'"` or `"error: no viable conversion from 'void' to 'string'"`. If you need to use fatal assertions in a function that returns non-void, one option is to make the function return the value in an out parameter instead. For example, you can rewrite `T2 Foo(T1 x)` to `void Foo(T1 x, T2* result)`. You need to make sure that `*result` contains some sensible value even when the function returns prematurely. As the function now returns `void`, you can use any assertion inside of it. If changing the function's type is not an option, you should just use assertions that generate non-fatal failures, such as `ADD_FAILURE*` and `EXPECT_*`. NOTE: Constructors and destructors are not considered void-returning functions, according to the C++ language specification, and so you may not use fatal assertions in them. You'll get a compilation error if you try. A simple workaround is to transfer the entire body of the constructor or destructor to a private void-returning method. However, you should be aware that a fatal assertion failure in a constructor does not terminate the current test, as your intuition might suggest; it merely returns from the constructor early, possibly leaving your object in a partially-constructed state. Likewise, a fatal assertion failure in a destructor may leave your object in a partially-destructed state. Use assertions carefully in these situations! ## Teaching googletest How to Print Your Values When a test assertion such as `EXPECT_EQ` fails, googletest prints the argument values to help you debug. It does this using a user-extensible value printer. This printer knows how to print built-in C++ types, native arrays, STL containers, and any type that supports the `<<` operator. For other types, it prints the raw bytes in the value and hopes that you the user can figure it out. As mentioned earlier, the printer is *extensible*. That means you can teach it to do a better job at printing your particular type than to dump the bytes. To do that, define `<<` for your type: ```c++ // Streams are allowed only for logging. Don't include this for // any other purpose. #include namespace foo { class Bar { // We want googletest to be able to print instances of this. ... // Create a free inline friend function. friend std::ostream& operator<<(std::ostream& os, const Bar& bar) { return os << bar.DebugString(); // whatever needed to print bar to os } }; // If you can't declare the function in the class it's important that the // << operator is defined in the SAME namespace that defines Bar. C++'s look-up // rules rely on that. std::ostream& operator<<(std::ostream& os, const Bar& bar) { return os << bar.DebugString(); // whatever needed to print bar to os } } // namespace foo ``` Sometimes, this might not be an option: your team may consider it bad style to have a `<<` operator for `Bar`, or `Bar` may already have a `<<` operator that doesn't do what you want (and you cannot change it). If so, you can instead define a `PrintTo()` function like this: ```c++ // Streams are allowed only for logging. Don't include this for // any other purpose. #include namespace foo { class Bar { ... friend void PrintTo(const Bar& bar, std::ostream* os) { *os << bar.DebugString(); // whatever needed to print bar to os } }; // If you can't declare the function in the class it's important that PrintTo() // is defined in the SAME namespace that defines Bar. C++'s look-up rules rely // on that. void PrintTo(const Bar& bar, std::ostream* os) { *os << bar.DebugString(); // whatever needed to print bar to os } } // namespace foo ``` If you have defined both `<<` and `PrintTo()`, the latter will be used when googletest is concerned. This allows you to customize how the value appears in googletest's output without affecting code that relies on the behavior of its `<<` operator. If you want to print a value `x` using googletest's value printer yourself, just call `::testing::PrintToString(x)`, which returns an `std::string`: ```c++ vector > bar_ints = GetBarIntVector(); EXPECT_TRUE(IsCorrectBarIntVector(bar_ints)) << "bar_ints = " << ::testing::PrintToString(bar_ints); ``` ## Death Tests In many applications, there are assertions that can cause application failure if a condition is not met. These sanity checks, which ensure that the program is in a known good state, are there to fail at the earliest possible time after some program state is corrupted. If the assertion checks the wrong condition, then the program may proceed in an erroneous state, which could lead to memory corruption, security holes, or worse. Hence it is vitally important to test that such assertion statements work as expected. Since these precondition checks cause the processes to die, we call such tests _death tests_. More generally, any test that checks that a program terminates (except by throwing an exception) in an expected fashion is also a death test. Note that if a piece of code throws an exception, we don't consider it "death" for the purpose of death tests, as the caller of the code could catch the exception and avoid the crash. If you want to verify exceptions thrown by your code, see [Exception Assertions](#exception-assertions). If you want to test `EXPECT_*()/ASSERT_*()` failures in your test code, see Catching Failures ### How to Write a Death Test googletest has the following macros to support death tests: Fatal assertion | Nonfatal assertion | Verifies ---------------------------------------------- | ---------------------------------------------- | -------- `ASSERT_DEATH(statement, regex);` | `EXPECT_DEATH(statement, regex);` | `statement` crashes with the given error `ASSERT_DEATH_IF_SUPPORTED(statement, regex);` | `EXPECT_DEATH_IF_SUPPORTED(statement, regex);` | if death tests are supported, verifies that `statement` crashes with the given error; otherwise verifies nothing `ASSERT_EXIT(statement, predicate, regex);` | `EXPECT_EXIT(statement, predicate, regex);` | `statement` exits with the given error, and its exit code matches `predicate` where `statement` is a statement that is expected to cause the process to die, `predicate` is a function or function object that evaluates an integer exit status, and `regex` is a (Perl) regular expression that the stderr output of `statement` is expected to match. Note that `statement` can be *any valid statement* (including *compound statement*) and doesn't have to be an expression. As usual, the `ASSERT` variants abort the current test function, while the `EXPECT` variants do not. > NOTE: We use the word "crash" here to mean that the process terminates with a > *non-zero* exit status code. There are two possibilities: either the process > has called `exit()` or `_exit()` with a non-zero value, or it may be killed by > a signal. > > This means that if `*statement*` terminates the process with a 0 exit code, it > is *not* considered a crash by `EXPECT_DEATH`. Use `EXPECT_EXIT` instead if > this is the case, or if you want to restrict the exit code more precisely. A predicate here must accept an `int` and return a `bool`. The death test succeeds only if the predicate returns `true`. googletest defines a few predicates that handle the most common cases: ```c++ ::testing::ExitedWithCode(exit_code) ``` This expression is `true` if the program exited normally with the given exit code. ```c++ ::testing::KilledBySignal(signal_number) // Not available on Windows. ``` This expression is `true` if the program was killed by the given signal. The `*_DEATH` macros are convenient wrappers for `*_EXIT` that use a predicate that verifies the process' exit code is non-zero. Note that a death test only cares about three things: 1. does `statement` abort or exit the process? 2. (in the case of `ASSERT_EXIT` and `EXPECT_EXIT`) does the exit status satisfy `predicate`? Or (in the case of `ASSERT_DEATH` and `EXPECT_DEATH`) is the exit status non-zero? And 3. does the stderr output match `regex`? In particular, if `statement` generates an `ASSERT_*` or `EXPECT_*` failure, it will **not** cause the death test to fail, as googletest assertions don't abort the process. To write a death test, simply use one of the above macros inside your test function. For example, ```c++ TEST(MyDeathTest, Foo) { // This death test uses a compound statement. ASSERT_DEATH({ int n = 5; Foo(&n); }, "Error on line .* of Foo()"); } TEST(MyDeathTest, NormalExit) { EXPECT_EXIT(NormalExit(), ::testing::ExitedWithCode(0), "Success"); } TEST(MyDeathTest, KillMyself) { EXPECT_EXIT(KillMyself(), ::testing::KilledBySignal(SIGKILL), "Sending myself unblockable signal"); } ``` verifies that: * calling `Foo(5)` causes the process to die with the given error message, * calling `NormalExit()` causes the process to print `"Success"` to stderr and exit with exit code 0, and * calling `KillMyself()` kills the process with signal `SIGKILL`. The test function body may contain other assertions and statements as well, if necessary. ### Death Test Naming IMPORTANT: We strongly recommend you to follow the convention of naming your **test case** (not test) `*DeathTest` when it contains a death test, as demonstrated in the above example. The [Death Tests And Threads](#death-tests-and-threads) section below explains why. If a test fixture class is shared by normal tests and death tests, you can use `using` or `typedef` to introduce an alias for the fixture class and avoid duplicating its code: ```c++ class FooTest : public ::testing::Test { ... }; using FooDeathTest = FooTest; TEST_F(FooTest, DoesThis) { // normal test } TEST_F(FooDeathTest, DoesThat) { // death test } ``` **Availability**: Linux, Windows (requires MSVC 8.0 or above), Cygwin, and Mac ### Regular Expression Syntax On POSIX systems (e.g. Linux, Cygwin, and Mac), googletest uses the [POSIX extended regular expression](http://www.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap09.html#tag_09_04) syntax. To learn about this syntax, you may want to read this [Wikipedia entry](http://en.wikipedia.org/wiki/Regular_expression#POSIX_Extended_Regular_Expressions). On Windows, googletest uses its own simple regular expression implementation. It lacks many features. For example, we don't support union (`"x|y"`), grouping (`"(xy)"`), brackets (`"[xy]"`), and repetition count (`"x{5,7}"`), among others. Below is what we do support (`A` denotes a literal character, period (`.`), or a single `\\ ` escape sequence; `x` and `y` denote regular expressions.): Expression | Meaning ---------- | -------------------------------------------------------------- `c` | matches any literal character `c` `\\d` | matches any decimal digit `\\D` | matches any character that's not a decimal digit `\\f` | matches `\f` `\\n` | matches `\n` `\\r` | matches `\r` `\\s` | matches any ASCII whitespace, including `\n` `\\S` | matches any character that's not a whitespace `\\t` | matches `\t` `\\v` | matches `\v` `\\w` | matches any letter, `_`, or decimal digit `\\W` | matches any character that `\\w` doesn't match `\\c` | matches any literal character `c`, which must be a punctuation `.` | matches any single character except `\n` `A?` | matches 0 or 1 occurrences of `A` `A*` | matches 0 or many occurrences of `A` `A+` | matches 1 or many occurrences of `A` `^` | matches the beginning of a string (not that of each line) `$` | matches the end of a string (not that of each line) `xy` | matches `x` followed by `y` To help you determine which capability is available on your system, googletest defines macros to govern which regular expression it is using. The macros are: `GTEST_USES_PCRE=1`, or `GTEST_USES_SIMPLE_RE=1` or `GTEST_USES_POSIX_RE=1`. If you want your death tests to work in all cases, you can either `#if` on these macros or use the more limited syntax only. ### How It Works Under the hood, `ASSERT_EXIT()` spawns a new process and executes the death test statement in that process. The details of how precisely that happens depend on the platform and the variable ::testing::GTEST_FLAG(death_test_style) (which is initialized from the command-line flag `--gtest_death_test_style`). * On POSIX systems, `fork()` (or `clone()` on Linux) is used to spawn the child, after which: * If the variable's value is `"fast"`, the death test statement is immediately executed. * If the variable's value is `"threadsafe"`, the child process re-executes the unit test binary just as it was originally invoked, but with some extra flags to cause just the single death test under consideration to be run. * On Windows, the child is spawned using the `CreateProcess()` API, and re-executes the binary to cause just the single death test under consideration to be run - much like the `threadsafe` mode on POSIX. Other values for the variable are illegal and will cause the death test to fail. Currently, the flag's default value is "fast". However, we reserve the right to change it in the future. Therefore, your tests should not depend on this. In either case, the parent process waits for the child process to complete, and checks that 1. the child's exit status satisfies the predicate, and 2. the child's stderr matches the regular expression. If the death test statement runs to completion without dying, the child process will nonetheless terminate, and the assertion fails. ### Death Tests And Threads The reason for the two death test styles has to do with thread safety. Due to well-known problems with forking in the presence of threads, death tests should be run in a single-threaded context. Sometimes, however, it isn't feasible to arrange that kind of environment. For example, statically-initialized modules may start threads before main is ever reached. Once threads have been created, it may be difficult or impossible to clean them up. googletest has three features intended to raise awareness of threading issues. 1. A warning is emitted if multiple threads are running when a death test is encountered. 2. Test cases with a name ending in "DeathTest" are run before all other tests. 3. It uses `clone()` instead of `fork()` to spawn the child process on Linux (`clone()` is not available on Cygwin and Mac), as `fork()` is more likely to cause the child to hang when the parent process has multiple threads. It's perfectly fine to create threads inside a death test statement; they are executed in a separate process and cannot affect the parent. ### Death Test Styles The "threadsafe" death test style was introduced in order to help mitigate the risks of testing in a possibly multithreaded environment. It trades increased test execution time (potentially dramatically so) for improved thread safety. The automated testing framework does not set the style flag. You can choose a particular style of death tests by setting the flag programmatically: ```c++ testing::FLAGS_gtest_death_test_style="threadsafe" ``` You can do this in `main()` to set the style for all death tests in the binary, or in individual tests. Recall that flags are saved before running each test and restored afterwards, so you need not do that yourself. For example: ```c++ int main(int argc, char** argv) { InitGoogle(argv[0], &argc, &argv, true); ::testing::FLAGS_gtest_death_test_style = "fast"; return RUN_ALL_TESTS(); } TEST(MyDeathTest, TestOne) { ::testing::FLAGS_gtest_death_test_style = "threadsafe"; // This test is run in the "threadsafe" style: ASSERT_DEATH(ThisShouldDie(), ""); } TEST(MyDeathTest, TestTwo) { // This test is run in the "fast" style: ASSERT_DEATH(ThisShouldDie(), ""); } ``` ### Caveats The `statement` argument of `ASSERT_EXIT()` can be any valid C++ statement. If it leaves the current function via a `return` statement or by throwing an exception, the death test is considered to have failed. Some googletest macros may return from the current function (e.g. `ASSERT_TRUE()`), so be sure to avoid them in `statement`. Since `statement` runs in the child process, any in-memory side effect (e.g. modifying a variable, releasing memory, etc) it causes will *not* be observable in the parent process. In particular, if you release memory in a death test, your program will fail the heap check as the parent process will never see the memory reclaimed. To solve this problem, you can 1. try not to free memory in a death test; 2. free the memory again in the parent process; or 3. do not use the heap checker in your program. Due to an implementation detail, you cannot place multiple death test assertions on the same line; otherwise, compilation will fail with an unobvious error message. Despite the improved thread safety afforded by the "threadsafe" style of death test, thread problems such as deadlock are still possible in the presence of handlers registered with `pthread_atfork(3)`. ## Using Assertions in Sub-routines ### Adding Traces to Assertions If a test sub-routine is called from several places, when an assertion inside it fails, it can be hard to tell which invocation of the sub-routine the failure is from. You can alleviate this problem using extra logging or custom failure messages, but that usually clutters up your tests. A better solution is to use the `SCOPED_TRACE` macro or the `ScopedTrace` utility: ```c++ SCOPED_TRACE(message); ScopedTrace trace("file_path", line_number, message); ``` where `message` can be anything streamable to `std::ostream`. `SCOPED_TRACE` macro will cause the current file name, line number, and the given message to be added in every failure message. `ScopedTrace` accepts explicit file name and line number in arguments, which is useful for writing test helpers. The effect will be undone when the control leaves the current lexical scope. For example, ```c++ 10: void Sub1(int n) { 11: EXPECT_EQ(1, Bar(n)); 12: EXPECT_EQ(2, Bar(n + 1)); 13: } 14: 15: TEST(FooTest, Bar) { 16: { 17: SCOPED_TRACE("A"); // This trace point will be included in 18: // every failure in this scope. 19: Sub1(1); 20: } 21: // Now it won't. 22: Sub1(9); 23: } ``` could result in messages like these: ```none path/to/foo_test.cc:11: Failure Value of: Bar(n) Expected: 1 Actual: 2 Trace: path/to/foo_test.cc:17: A path/to/foo_test.cc:12: Failure Value of: Bar(n + 1) Expected: 2 Actual: 3 ``` Without the trace, it would've been difficult to know which invocation of `Sub1()` the two failures come from respectively. (You could add an extra message to each assertion in `Sub1()` to indicate the value of `n`, but that's tedious.) Some tips on using `SCOPED_TRACE`: 1. With a suitable message, it's often enough to use `SCOPED_TRACE` at the beginning of a sub-routine, instead of at each call site. 2. When calling sub-routines inside a loop, make the loop iterator part of the message in `SCOPED_TRACE` such that you can know which iteration the failure is from. 3. Sometimes the line number of the trace point is enough for identifying the particular invocation of a sub-routine. In this case, you don't have to choose a unique message for `SCOPED_TRACE`. You can simply use `""`. 4. You can use `SCOPED_TRACE` in an inner scope when there is one in the outer scope. In this case, all active trace points will be included in the failure messages, in reverse order they are encountered. 5. The trace dump is clickable in Emacs - hit `return` on a line number and you'll be taken to that line in the source file! **Availability**: Linux, Windows, Mac. ### Propagating Fatal Failures A common pitfall when using `ASSERT_*` and `FAIL*` is not understanding that when they fail they only abort the _current function_, not the entire test. For example, the following test will segfault: ```c++ void Subroutine() { // Generates a fatal failure and aborts the current function. ASSERT_EQ(1, 2); // The following won't be executed. ... } TEST(FooTest, Bar) { Subroutine(); // The intended behavior is for the fatal failure // in Subroutine() to abort the entire test. // The actual behavior: the function goes on after Subroutine() returns. int* p = NULL; *p = 3; // Segfault! } ``` To alleviate this, googletest provides three different solutions. You could use either exceptions, the `(ASSERT|EXPECT)_NO_FATAL_FAILURE` assertions or the `HasFatalFailure()` function. They are described in the following two subsections. #### Asserting on Subroutines with an exception The following code can turn ASSERT-failure into an exception: ```c++ class ThrowListener : public testing::EmptyTestEventListener { void OnTestPartResult(const testing::TestPartResult& result) override { if (result.type() == testing::TestPartResult::kFatalFailure) { throw testing::AssertionException(result); } } }; int main(int argc, char** argv) { ... testing::UnitTest::GetInstance()->listeners().Append(new ThrowListener); return RUN_ALL_TESTS(); } ``` This listener should be added after other listeners if you have any, otherwise they won't see failed `OnTestPartResult`. #### Asserting on Subroutines As shown above, if your test calls a subroutine that has an `ASSERT_*` failure in it, the test will continue after the subroutine returns. This may not be what you want. Often people want fatal failures to propagate like exceptions. For that googletest offers the following macros: Fatal assertion | Nonfatal assertion | Verifies ------------------------------------- | ------------------------------------- | -------- `ASSERT_NO_FATAL_FAILURE(statement);` | `EXPECT_NO_FATAL_FAILURE(statement);` | `statement` doesn't generate any new fatal failures in the current thread. Only failures in the thread that executes the assertion are checked to determine the result of this type of assertions. If `statement` creates new threads, failures in these threads are ignored. Examples: ```c++ ASSERT_NO_FATAL_FAILURE(Foo()); int i; EXPECT_NO_FATAL_FAILURE({ i = Bar(); }); ``` **Availability**: Linux, Windows, Mac. Assertions from multiple threads are currently not supported on Windows. #### Checking for Failures in the Current Test `HasFatalFailure()` in the `::testing::Test` class returns `true` if an assertion in the current test has suffered a fatal failure. This allows functions to catch fatal failures in a sub-routine and return early. ```c++ class Test { public: ... static bool HasFatalFailure(); }; ``` The typical usage, which basically simulates the behavior of a thrown exception, is: ```c++ TEST(FooTest, Bar) { Subroutine(); // Aborts if Subroutine() had a fatal failure. if (HasFatalFailure()) return; // The following won't be executed. ... } ``` If `HasFatalFailure()` is used outside of `TEST()` , `TEST_F()` , or a test fixture, you must add the `::testing::Test::` prefix, as in: ```c++ if (::testing::Test::HasFatalFailure()) return; ``` Similarly, `HasNonfatalFailure()` returns `true` if the current test has at least one non-fatal failure, and `HasFailure()` returns `true` if the current test has at least one failure of either kind. **Availability**: Linux, Windows, Mac. ## Logging Additional Information In your test code, you can call `RecordProperty("key", value)` to log additional information, where `value` can be either a string or an `int`. The *last* value recorded for a key will be emitted to the [XML output](#generating-an-xml-report) if you specify one. For example, the test ```c++ TEST_F(WidgetUsageTest, MinAndMaxWidgets) { RecordProperty("MaximumWidgets", ComputeMaxUsage()); RecordProperty("MinimumWidgets", ComputeMinUsage()); } ``` will output XML like this: ```xml ... ... ``` > NOTE: > > * `RecordProperty()` is a static member of the `Test` class. Therefore it > needs to be prefixed with `::testing::Test::` if used outside of the > `TEST` body and the test fixture class. > * `*key*` must be a valid XML attribute name, and cannot conflict with the > ones already used by googletest (`name`, `status`, `time`, `classname`, > `type_param`, and `value_param`). > * Calling `RecordProperty()` outside of the lifespan of a test is allowed. > If it's called outside of a test but between a test case's > `SetUpTestCase()` and `TearDownTestCase()` methods, it will be attributed > to the XML element for the test case. If it's called outside of all test > cases (e.g. in a test environment), it will be attributed to the top-level > XML element. **Availability**: Linux, Windows, Mac. ## Sharing Resources Between Tests in the Same Test Case googletest creates a new test fixture object for each test in order to make tests independent and easier to debug. However, sometimes tests use resources that are expensive to set up, making the one-copy-per-test model prohibitively expensive. If the tests don't change the resource, there's no harm in their sharing a single resource copy. So, in addition to per-test set-up/tear-down, googletest also supports per-test-case set-up/tear-down. To use it: 1. In your test fixture class (say `FooTest` ), declare as `static` some member variables to hold the shared resources. 1. Outside your test fixture class (typically just below it), define those member variables, optionally giving them initial values. 1. In the same test fixture class, define a `static void SetUpTestCase()` function (remember not to spell it as **`SetupTestCase`** with a small `u`!) to set up the shared resources and a `static void TearDownTestCase()` function to tear them down. That's it! googletest automatically calls `SetUpTestCase()` before running the *first test* in the `FooTest` test case (i.e. before creating the first `FooTest` object), and calls `TearDownTestCase()` after running the *last test* in it (i.e. after deleting the last `FooTest` object). In between, the tests can use the shared resources. Remember that the test order is undefined, so your code can't depend on a test preceding or following another. Also, the tests must either not modify the state of any shared resource, or, if they do modify the state, they must restore the state to its original value before passing control to the next test. Here's an example of per-test-case set-up and tear-down: ```c++ class FooTest : public ::testing::Test { protected: // Per-test-case set-up. // Called before the first test in this test case. // Can be omitted if not needed. static void SetUpTestCase() { shared_resource_ = new ...; } // Per-test-case tear-down. // Called after the last test in this test case. // Can be omitted if not needed. static void TearDownTestCase() { delete shared_resource_; shared_resource_ = NULL; } // You can define per-test set-up logic as usual. virtual void SetUp() { ... } // You can define per-test tear-down logic as usual. virtual void TearDown() { ... } // Some expensive resource shared by all tests. static T* shared_resource_; }; T* FooTest::shared_resource_ = NULL; TEST_F(FooTest, Test1) { ... you can refer to shared_resource_ here ... } TEST_F(FooTest, Test2) { ... you can refer to shared_resource_ here ... } ``` NOTE: Though the above code declares `SetUpTestCase()` protected, it may sometimes be necessary to declare it public, such as when using it with `TEST_P`. **Availability**: Linux, Windows, Mac. ## Global Set-Up and Tear-Down Just as you can do set-up and tear-down at the test level and the test case level, you can also do it at the test program level. Here's how. First, you subclass the `::testing::Environment` class to define a test environment, which knows how to set-up and tear-down: ```c++ class Environment { public: virtual ~Environment() {} // Override this to define how to set up the environment. virtual void SetUp() {} // Override this to define how to tear down the environment. virtual void TearDown() {} }; ``` Then, you register an instance of your environment class with googletest by calling the `::testing::AddGlobalTestEnvironment()` function: ```c++ Environment* AddGlobalTestEnvironment(Environment* env); ``` Now, when `RUN_ALL_TESTS()` is called, it first calls the `SetUp()` method of -the environment object, then runs the tests if there was no fatal failures, and -finally calls `TearDown()` of the environment object. +each environment object, then runs the tests if none of the environments +reported fatal failures and `GTEST_SKIP()` was not called. `RUN_ALL_TESTS()` +always calls `TearDown()` with each environment object, regardless of whether +or not the tests were run. It's OK to register multiple environment objects. In this case, their `SetUp()` will be called in the order they are registered, and their `TearDown()` will be called in the reverse order. Note that googletest takes ownership of the registered environment objects. Therefore **do not delete them** by yourself. You should call `AddGlobalTestEnvironment()` before `RUN_ALL_TESTS()` is called, probably in `main()`. If you use `gtest_main`, you need to call this before `main()` starts for it to take effect. One way to do this is to define a global variable like this: ```c++ ::testing::Environment* const foo_env = ::testing::AddGlobalTestEnvironment(new FooEnvironment); ``` However, we strongly recommend you to write your own `main()` and call `AddGlobalTestEnvironment()` there, as relying on initialization of global variables makes the code harder to read and may cause problems when you register multiple environments from different translation units and the environments have dependencies among them (remember that the compiler doesn't guarantee the order in which global variables from different translation units are initialized). ## Value-Parameterized Tests *Value-parameterized tests* allow you to test your code with different parameters without writing multiple copies of the same test. This is useful in a number of situations, for example: * You have a piece of code whose behavior is affected by one or more command-line flags. You want to make sure your code performs correctly for various values of those flags. * You want to test different implementations of an OO interface. * You want to test your code over various inputs (a.k.a. data-driven testing). This feature is easy to abuse, so please exercise your good sense when doing it! ### How to Write Value-Parameterized Tests To write value-parameterized tests, first you should define a fixture class. It must be derived from both `::testing::Test` and `::testing::WithParamInterface` (the latter is a pure interface), where `T` is the type of your parameter values. For convenience, you can just derive the fixture class from `::testing::TestWithParam`, which itself is derived from both `::testing::Test` and `::testing::WithParamInterface`. `T` can be any copyable type. If it's a raw pointer, you are responsible for managing the lifespan of the pointed values. NOTE: If your test fixture defines `SetUpTestCase()` or `TearDownTestCase()` they must be declared **public** rather than **protected** in order to use `TEST_P`. ```c++ class FooTest : public ::testing::TestWithParam { // You can implement all the usual fixture class members here. // To access the test parameter, call GetParam() from class // TestWithParam. }; // Or, when you want to add parameters to a pre-existing fixture class: class BaseTest : public ::testing::Test { ... }; class BarTest : public BaseTest, public ::testing::WithParamInterface { ... }; ``` Then, use the `TEST_P` macro to define as many test patterns using this fixture as you want. The `_P` suffix is for "parameterized" or "pattern", whichever you prefer to think. ```c++ TEST_P(FooTest, DoesBlah) { // Inside a test, access the test parameter with the GetParam() method // of the TestWithParam class: EXPECT_TRUE(foo.Blah(GetParam())); ... } TEST_P(FooTest, HasBlahBlah) { ... } ``` Finally, you can use `INSTANTIATE_TEST_CASE_P` to instantiate the test case with any set of parameters you want. googletest defines a number of functions for generating test parameters. They return what we call (surprise!) *parameter generators*. Here is a summary of them, which are all in the `testing` namespace: | Parameter Generator | Behavior | | ---------------------------- | ------------------------------------------- | | `Range(begin, end [, step])` | Yields values `{begin, begin+step, begin+step+step, ...}`. The values do not include `end`. `step` defaults to 1. | | `Values(v1, v2, ..., vN)` | Yields values `{v1, v2, ..., vN}`. | | `ValuesIn(container)` and `ValuesIn(begin,end)` | Yields values from a C-style array, an STL-style container, or an iterator range `[begin, end)`. | | `Bool()` | Yields sequence `{false, true}`. | | `Combine(g1, g2, ..., gN)` | Yields all combinations (Cartesian product) as std\:\:tuples of the values generated by the `N` generators. | For more details, see the comments at the definitions of these functions. The following statement will instantiate tests from the `FooTest` test case each with parameter values `"meeny"`, `"miny"`, and `"moe"`. ```c++ INSTANTIATE_TEST_CASE_P(InstantiationName, FooTest, ::testing::Values("meeny", "miny", "moe")); ``` NOTE: The code above must be placed at global or namespace scope, not at function scope. NOTE: Don't forget this step! If you do your test will silently pass, but none of its cases will ever run! To distinguish different instances of the pattern (yes, you can instantiate it more than once), the first argument to `INSTANTIATE_TEST_CASE_P` is a prefix that will be added to the actual test case name. Remember to pick unique prefixes for different instantiations. The tests from the instantiation above will have these names: * `InstantiationName/FooTest.DoesBlah/0` for `"meeny"` * `InstantiationName/FooTest.DoesBlah/1` for `"miny"` * `InstantiationName/FooTest.DoesBlah/2` for `"moe"` * `InstantiationName/FooTest.HasBlahBlah/0` for `"meeny"` * `InstantiationName/FooTest.HasBlahBlah/1` for `"miny"` * `InstantiationName/FooTest.HasBlahBlah/2` for `"moe"` You can use these names in [`--gtest_filter`](#running-a-subset-of-the-tests). This statement will instantiate all tests from `FooTest` again, each with parameter values `"cat"` and `"dog"`: ```c++ const char* pets[] = {"cat", "dog"}; INSTANTIATE_TEST_CASE_P(AnotherInstantiationName, FooTest, ::testing::ValuesIn(pets)); ``` The tests from the instantiation above will have these names: * `AnotherInstantiationName/FooTest.DoesBlah/0` for `"cat"` * `AnotherInstantiationName/FooTest.DoesBlah/1` for `"dog"` * `AnotherInstantiationName/FooTest.HasBlahBlah/0` for `"cat"` * `AnotherInstantiationName/FooTest.HasBlahBlah/1` for `"dog"` Please note that `INSTANTIATE_TEST_CASE_P` will instantiate *all* tests in the given test case, whether their definitions come before or *after* the `INSTANTIATE_TEST_CASE_P` statement. You can see sample7_unittest.cc and sample8_unittest.cc for more examples. **Availability**: Linux, Windows (requires MSVC 8.0 or above), Mac ### Creating Value-Parameterized Abstract Tests In the above, we define and instantiate `FooTest` in the *same* source file. Sometimes you may want to define value-parameterized tests in a library and let other people instantiate them later. This pattern is known as *abstract tests*. As an example of its application, when you are designing an interface you can write a standard suite of abstract tests (perhaps using a factory function as the test parameter) that all implementations of the interface are expected to pass. When someone implements the interface, they can instantiate your suite to get all the interface-conformance tests for free. To define abstract tests, you should organize your code like this: 1. Put the definition of the parameterized test fixture class (e.g. `FooTest`) in a header file, say `foo_param_test.h`. Think of this as *declaring* your abstract tests. 1. Put the `TEST_P` definitions in `foo_param_test.cc`, which includes `foo_param_test.h`. Think of this as *implementing* your abstract tests. Once they are defined, you can instantiate them by including `foo_param_test.h`, invoking `INSTANTIATE_TEST_CASE_P()`, and depending on the library target that contains `foo_param_test.cc`. You can instantiate the same abstract test case multiple times, possibly in different source files. ### Specifying Names for Value-Parameterized Test Parameters The optional last argument to `INSTANTIATE_TEST_CASE_P()` allows the user to specify a function or functor that generates custom test name suffixes based on the test parameters. The function should accept one argument of type `testing::TestParamInfo`, and return `std::string`. `testing::PrintToStringParamName` is a builtin test suffix generator that returns the value of `testing::PrintToString(GetParam())`. It does not work for `std::string` or C strings. NOTE: test names must be non-empty, unique, and may only contain ASCII alphanumeric characters. In particular, they [should not contain underscores](https://g3doc.corp.google.com/third_party/googletest/googletest/g3doc/faq.md#no-underscores). ```c++ class MyTestCase : public testing::TestWithParam {}; TEST_P(MyTestCase, MyTest) { std::cout << "Example Test Param: " << GetParam() << std::endl; } INSTANTIATE_TEST_CASE_P(MyGroup, MyTestCase, testing::Range(0, 10), testing::PrintToStringParamName()); ``` ## Typed Tests Suppose you have multiple implementations of the same interface and want to make sure that all of them satisfy some common requirements. Or, you may have defined several types that are supposed to conform to the same "concept" and you want to verify it. In both cases, you want the same test logic repeated for different types. While you can write one `TEST` or `TEST_F` for each type you want to test (and you may even factor the test logic into a function template that you invoke from the `TEST`), it's tedious and doesn't scale: if you want `m` tests over `n` types, you'll end up writing `m*n` `TEST`s. *Typed tests* allow you to repeat the same test logic over a list of types. You only need to write the test logic once, although you must know the type list when writing typed tests. Here's how you do it: First, define a fixture class template. It should be parameterized by a type. Remember to derive it from `::testing::Test`: ```c++ template class FooTest : public ::testing::Test { public: ... typedef std::list List; static T shared_; T value_; }; ``` Next, associate a list of types with the test case, which will be repeated for each type in the list: ```c++ using MyTypes = ::testing::Types; TYPED_TEST_CASE(FooTest, MyTypes); ``` The type alias (`using` or `typedef`) is necessary for the `TYPED_TEST_CASE` macro to parse correctly. Otherwise the compiler will think that each comma in the type list introduces a new macro argument. Then, use `TYPED_TEST()` instead of `TEST_F()` to define a typed test for this test case. You can repeat this as many times as you want: ```c++ TYPED_TEST(FooTest, DoesBlah) { // Inside a test, refer to the special name TypeParam to get the type // parameter. Since we are inside a derived class template, C++ requires // us to visit the members of FooTest via 'this'. TypeParam n = this->value_; // To visit static members of the fixture, add the 'TestFixture::' // prefix. n += TestFixture::shared_; // To refer to typedefs in the fixture, add the 'typename TestFixture::' // prefix. The 'typename' is required to satisfy the compiler. typename TestFixture::List values; values.push_back(n); ... } TYPED_TEST(FooTest, HasPropertyA) { ... } ``` You can see sample6_unittest.cc **Availability**: Linux, Windows (requires MSVC 8.0 or above), Mac ## Type-Parameterized Tests *Type-parameterized tests* are like typed tests, except that they don't require you to know the list of types ahead of time. Instead, you can define the test logic first and instantiate it with different type lists later. You can even instantiate it more than once in the same program. If you are designing an interface or concept, you can define a suite of type-parameterized tests to verify properties that any valid implementation of the interface/concept should have. Then, the author of each implementation can just instantiate the test suite with their type to verify that it conforms to the requirements, without having to write similar tests repeatedly. Here's an example: First, define a fixture class template, as we did with typed tests: ```c++ template class FooTest : public ::testing::Test { ... }; ``` Next, declare that you will define a type-parameterized test case: ```c++ TYPED_TEST_CASE_P(FooTest); ``` Then, use `TYPED_TEST_P()` to define a type-parameterized test. You can repeat this as many times as you want: ```c++ TYPED_TEST_P(FooTest, DoesBlah) { // Inside a test, refer to TypeParam to get the type parameter. TypeParam n = 0; ... } TYPED_TEST_P(FooTest, HasPropertyA) { ... } ``` Now the tricky part: you need to register all test patterns using the `REGISTER_TYPED_TEST_CASE_P` macro before you can instantiate them. The first argument of the macro is the test case name; the rest are the names of the tests in this test case: ```c++ REGISTER_TYPED_TEST_CASE_P(FooTest, DoesBlah, HasPropertyA); ``` Finally, you are free to instantiate the pattern with the types you want. If you put the above code in a header file, you can `#include` it in multiple C++ source files and instantiate it multiple times. ```c++ typedef ::testing::Types MyTypes; INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, MyTypes); ``` To distinguish different instances of the pattern, the first argument to the `INSTANTIATE_TYPED_TEST_CASE_P` macro is a prefix that will be added to the actual test case name. Remember to pick unique prefixes for different instances. In the special case where the type list contains only one type, you can write that type directly without `::testing::Types<...>`, like this: ```c++ INSTANTIATE_TYPED_TEST_CASE_P(My, FooTest, int); ``` You can see `sample6_unittest.cc` for a complete example. **Availability**: Linux, Windows (requires MSVC 8.0 or above), Mac ## Testing Private Code If you change your software's internal implementation, your tests should not break as long as the change is not observable by users. Therefore, **per the black-box testing principle, most of the time you should test your code through its public interfaces.** **If you still find yourself needing to test internal implementation code, consider if there's a better design.** The desire to test internal implementation is often a sign that the class is doing too much. Consider extracting an implementation class, and testing it. Then use that implementation class in the original class. If you absolutely have to test non-public interface code though, you can. There are two cases to consider: * Static functions ( *not* the same as static member functions!) or unnamed namespaces, and * Private or protected class members To test them, we use the following special techniques: * Both static functions and definitions/declarations in an unnamed namespace are only visible within the same translation unit. To test them, you can `#include` the entire `.cc` file being tested in your `*_test.cc` file. (including `.cc` files is not a good way to reuse code - you should not do this in production code!) However, a better approach is to move the private code into the `foo::internal` namespace, where `foo` is the namespace your project normally uses, and put the private declarations in a `*-internal.h` file. Your production `.cc` files and your tests are allowed to include this internal header, but your clients are not. This way, you can fully test your internal implementation without leaking it to your clients. * Private class members are only accessible from within the class or by friends. To access a class' private members, you can declare your test fixture as a friend to the class and define accessors in your fixture. Tests using the fixture can then access the private members of your production class via the accessors in the fixture. Note that even though your fixture is a friend to your production class, your tests are not automatically friends to it, as they are technically defined in sub-classes of the fixture. Another way to test private members is to refactor them into an implementation class, which is then declared in a `*-internal.h` file. Your clients aren't allowed to include this header but your tests can. Such is called the [Pimpl](https://www.gamedev.net/articles/programming/general-and-gameplay-programming/the-c-pimpl-r1794/) (Private Implementation) idiom. Or, you can declare an individual test as a friend of your class by adding this line in the class body: ```c++ FRIEND_TEST(TestCaseName, TestName); ``` For example, ```c++ // foo.h #include "gtest/gtest_prod.h" class Foo { ... private: FRIEND_TEST(FooTest, BarReturnsZeroOnNull); int Bar(void* x); }; // foo_test.cc ... TEST(FooTest, BarReturnsZeroOnNull) { Foo foo; EXPECT_EQ(0, foo.Bar(NULL)); // Uses Foo's private member Bar(). } ``` Pay special attention when your class is defined in a namespace, as you should define your test fixtures and tests in the same namespace if you want them to be friends of your class. For example, if the code to be tested looks like: ```c++ namespace my_namespace { class Foo { friend class FooTest; FRIEND_TEST(FooTest, Bar); FRIEND_TEST(FooTest, Baz); ... definition of the class Foo ... }; } // namespace my_namespace ``` Your test code should be something like: ```c++ namespace my_namespace { class FooTest : public ::testing::Test { protected: ... }; TEST_F(FooTest, Bar) { ... } TEST_F(FooTest, Baz) { ... } } // namespace my_namespace ``` ## "Catching" Failures If you are building a testing utility on top of googletest, you'll want to test your utility. What framework would you use to test it? googletest, of course. The challenge is to verify that your testing utility reports failures correctly. In frameworks that report a failure by throwing an exception, you could catch the exception and assert on it. But googletest doesn't use exceptions, so how do we test that a piece of code generates an expected failure? gunit-spi.h contains some constructs to do this. After #including this header, you can use ```c++ EXPECT_FATAL_FAILURE(statement, substring); ``` to assert that `statement` generates a fatal (e.g. `ASSERT_*`) failure in the current thread whose message contains the given `substring`, or use ```c++ EXPECT_NONFATAL_FAILURE(statement, substring); ``` if you are expecting a non-fatal (e.g. `EXPECT_*`) failure. Only failures in the current thread are checked to determine the result of this type of expectations. If `statement` creates new threads, failures in these threads are also ignored. If you want to catch failures in other threads as well, use one of the following macros instead: ```c++ EXPECT_FATAL_FAILURE_ON_ALL_THREADS(statement, substring); EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(statement, substring); ``` NOTE: Assertions from multiple threads are currently not supported on Windows. For technical reasons, there are some caveats: 1. You cannot stream a failure message to either macro. 1. `statement` in `EXPECT_FATAL_FAILURE{_ON_ALL_THREADS}()` cannot reference local non-static variables or non-static members of `this` object. 1. `statement` in `EXPECT_FATAL_FAILURE{_ON_ALL_THREADS}()()` cannot return a value. ## Getting the Current Test's Name Sometimes a function may need to know the name of the currently running test. For example, you may be using the `SetUp()` method of your test fixture to set the golden file name based on which test is running. The `::testing::TestInfo` class has this information: ```c++ namespace testing { class TestInfo { public: // Returns the test case name and the test name, respectively. // // Do NOT delete or free the return value - it's managed by the // TestInfo class. const char* test_case_name() const; const char* name() const; }; } ``` To obtain a `TestInfo` object for the currently running test, call `current_test_info()` on the `UnitTest` singleton object: ```c++ // Gets information about the currently running test. // Do NOT delete the returned object - it's managed by the UnitTest class. const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); printf("We are in test %s of test case %s.\n", test_info->name(), test_info->test_case_name()); ``` `current_test_info()` returns a null pointer if no test is running. In particular, you cannot find the test case name in `TestCaseSetUp()`, `TestCaseTearDown()` (where you know the test case name implicitly), or functions called from them. **Availability**: Linux, Windows, Mac. ## Extending googletest by Handling Test Events googletest provides an **event listener API** to let you receive notifications about the progress of a test program and test failures. The events you can listen to include the start and end of the test program, a test case, or a test method, among others. You may use this API to augment or replace the standard console output, replace the XML output, or provide a completely different form of output, such as a GUI or a database. You can also use test events as checkpoints to implement a resource leak checker, for example. **Availability**: Linux, Windows, Mac. ### Defining Event Listeners To define a event listener, you subclass either testing::TestEventListener or testing::EmptyTestEventListener The former is an (abstract) interface, where *each pure virtual method can be overridden to handle a test event* (For example, when a test starts, the `OnTestStart()` method will be called.). The latter provides an empty implementation of all methods in the interface, such that a subclass only needs to override the methods it cares about. When an event is fired, its context is passed to the handler function as an argument. The following argument types are used: * UnitTest reflects the state of the entire test program, * TestCase has information about a test case, which can contain one or more tests, * TestInfo contains the state of a test, and * TestPartResult represents the result of a test assertion. An event handler function can examine the argument it receives to find out interesting information about the event and the test program's state. Here's an example: ```c++ class MinimalistPrinter : public ::testing::EmptyTestEventListener { // Called before a test starts. virtual void OnTestStart(const ::testing::TestInfo& test_info) { printf("*** Test %s.%s starting.\n", test_info.test_case_name(), test_info.name()); } // Called after a failed assertion or a SUCCESS(). virtual void OnTestPartResult(const ::testing::TestPartResult& test_part_result) { printf("%s in %s:%d\n%s\n", test_part_result.failed() ? "*** Failure" : "Success", test_part_result.file_name(), test_part_result.line_number(), test_part_result.summary()); } // Called after a test ends. virtual void OnTestEnd(const ::testing::TestInfo& test_info) { printf("*** Test %s.%s ending.\n", test_info.test_case_name(), test_info.name()); } }; ``` ### Using Event Listeners To use the event listener you have defined, add an instance of it to the googletest event listener list (represented by class TestEventListeners - note the "s" at the end of the name) in your `main()` function, before calling `RUN_ALL_TESTS()`: ```c++ int main(int argc, char** argv) { ::testing::InitGoogleTest(&argc, argv); // Gets hold of the event listener list. ::testing::TestEventListeners& listeners = ::testing::UnitTest::GetInstance()->listeners(); // Adds a listener to the end. googletest takes the ownership. listeners.Append(new MinimalistPrinter); return RUN_ALL_TESTS(); } ``` There's only one problem: the default test result printer is still in effect, so its output will mingle with the output from your minimalist printer. To suppress the default printer, just release it from the event listener list and delete it. You can do so by adding one line: ```c++ ... delete listeners.Release(listeners.default_result_printer()); listeners.Append(new MinimalistPrinter); return RUN_ALL_TESTS(); ``` Now, sit back and enjoy a completely different output from your tests. For more details, you can read this sample9_unittest.cc You may append more than one listener to the list. When an `On*Start()` or `OnTestPartResult()` event is fired, the listeners will receive it in the order they appear in the list (since new listeners are added to the end of the list, the default text printer and the default XML generator will receive the event first). An `On*End()` event will be received by the listeners in the *reverse* order. This allows output by listeners added later to be framed by output from listeners added earlier. ### Generating Failures in Listeners You may use failure-raising macros (`EXPECT_*()`, `ASSERT_*()`, `FAIL()`, etc) when processing an event. There are some restrictions: 1. You cannot generate any failure in `OnTestPartResult()` (otherwise it will cause `OnTestPartResult()` to be called recursively). 1. A listener that handles `OnTestPartResult()` is not allowed to generate any failure. When you add listeners to the listener list, you should put listeners that handle `OnTestPartResult()` *before* listeners that can generate failures. This ensures that failures generated by the latter are attributed to the right test by the former. We have a sample of failure-raising listener sample10_unittest.cc ## Running Test Programs: Advanced Options googletest test programs are ordinary executables. Once built, you can run them directly and affect their behavior via the following environment variables and/or command line flags. For the flags to work, your programs must call `::testing::InitGoogleTest()` before calling `RUN_ALL_TESTS()`. To see a list of supported flags and their usage, please run your test program with the `--help` flag. You can also use `-h`, `-?`, or `/?` for short. If an option is specified both by an environment variable and by a flag, the latter takes precedence. ### Selecting Tests #### Listing Test Names Sometimes it is necessary to list the available tests in a program before running them so that a filter may be applied if needed. Including the flag `--gtest_list_tests` overrides all other flags and lists tests in the following format: ```none TestCase1. TestName1 TestName2 TestCase2. TestName ``` None of the tests listed are actually run if the flag is provided. There is no corresponding environment variable for this flag. **Availability**: Linux, Windows, Mac. #### Running a Subset of the Tests By default, a googletest program runs all tests the user has defined. Sometimes, you want to run only a subset of the tests (e.g. for debugging or quickly verifying a change). If you set the `GTEST_FILTER` environment variable or the `--gtest_filter` flag to a filter string, googletest will only run the tests whose full names (in the form of `TestCaseName.TestName`) match the filter. The format of a filter is a '`:`'-separated list of wildcard patterns (called the *positive patterns*) optionally followed by a '`-`' and another '`:`'-separated pattern list (called the *negative patterns*). A test matches the filter if and only if it matches any of the positive patterns but does not match any of the negative patterns. A pattern may contain `'*'` (matches any string) or `'?'` (matches any single character). For convenience, the filter `'*-NegativePatterns'` can be also written as `'-NegativePatterns'`. For example: * `./foo_test` Has no flag, and thus runs all its tests. * `./foo_test --gtest_filter=*` Also runs everything, due to the single match-everything `*` value. * `./foo_test --gtest_filter=FooTest.*` Runs everything in test case `FooTest` . * `./foo_test --gtest_filter=*Null*:*Constructor*` Runs any test whose full name contains either `"Null"` or `"Constructor"` . * `./foo_test --gtest_filter=-*DeathTest.*` Runs all non-death tests. * `./foo_test --gtest_filter=FooTest.*-FooTest.Bar` Runs everything in test case `FooTest` except `FooTest.Bar`. * `./foo_test --gtest_filter=FooTest.*:BarTest.*-FooTest.Bar:BarTest.Foo` Runs everything in test case `FooTest` except `FooTest.Bar` and everything in test case `BarTest` except `BarTest.Foo`. #### Temporarily Disabling Tests If you have a broken test that you cannot fix right away, you can add the `DISABLED_` prefix to its name. This will exclude it from execution. This is better than commenting out the code or using `#if 0`, as disabled tests are still compiled (and thus won't rot). If you need to disable all tests in a test case, you can either add `DISABLED_` to the front of the name of each test, or alternatively add it to the front of the test case name. For example, the following tests won't be run by googletest, even though they will still be compiled: ```c++ // Tests that Foo does Abc. TEST(FooTest, DISABLED_DoesAbc) { ... } class DISABLED_BarTest : public ::testing::Test { ... }; // Tests that Bar does Xyz. TEST_F(DISABLED_BarTest, DoesXyz) { ... } ``` NOTE: This feature should only be used for temporary pain-relief. You still have to fix the disabled tests at a later date. As a reminder, googletest will print a banner warning you if a test program contains any disabled tests. TIP: You can easily count the number of disabled tests you have using `gsearch` and/or `grep`. This number can be used as a metric for improving your test quality. **Availability**: Linux, Windows, Mac. #### Temporarily Enabling Disabled Tests To include disabled tests in test execution, just invoke the test program with the `--gtest_also_run_disabled_tests` flag or set the `GTEST_ALSO_RUN_DISABLED_TESTS` environment variable to a value other than `0`. You can combine this with the `--gtest_filter` flag to further select which disabled tests to run. **Availability**: Linux, Windows, Mac. ### Repeating the Tests Once in a while you'll run into a test whose result is hit-or-miss. Perhaps it will fail only 1% of the time, making it rather hard to reproduce the bug under a debugger. This can be a major source of frustration. The `--gtest_repeat` flag allows you to repeat all (or selected) test methods in a program many times. Hopefully, a flaky test will eventually fail and give you a chance to debug. Here's how to use it: ```none $ foo_test --gtest_repeat=1000 Repeat foo_test 1000 times and don't stop at failures. $ foo_test --gtest_repeat=-1 A negative count means repeating forever. $ foo_test --gtest_repeat=1000 --gtest_break_on_failure Repeat foo_test 1000 times, stopping at the first failure. This is especially useful when running under a debugger: when the test fails, it will drop into the debugger and you can then inspect variables and stacks. $ foo_test --gtest_repeat=1000 --gtest_filter=FooBar.* Repeat the tests whose name matches the filter 1000 times. ``` If your test program contains [global set-up/tear-down](#global-set-up-and-tear-down) code, it will be repeated in each iteration as well, as the flakiness may be in it. You can also specify the repeat count by setting the `GTEST_REPEAT` environment variable. **Availability**: Linux, Windows, Mac. ### Shuffling the Tests You can specify the `--gtest_shuffle` flag (or set the `GTEST_SHUFFLE` environment variable to `1`) to run the tests in a program in a random order. This helps to reveal bad dependencies between tests. By default, googletest uses a random seed calculated from the current time. Therefore you'll get a different order every time. The console output includes the random seed value, such that you can reproduce an order-related test failure later. To specify the random seed explicitly, use the `--gtest_random_seed=SEED` flag (or set the `GTEST_RANDOM_SEED` environment variable), where `SEED` is an integer in the range [0, 99999]. The seed value 0 is special: it tells googletest to do the default behavior of calculating the seed from the current time. If you combine this with `--gtest_repeat=N`, googletest will pick a different random seed and re-shuffle the tests in each iteration. **Availability**: Linux, Windows, Mac. ### Controlling Test Output #### Colored Terminal Output googletest can use colors in its terminal output to make it easier to spot the important information: ...
[----------] 1 test from FooTest
[ RUN ] FooTest.DoesAbc
[ OK ] FooTest.DoesAbc
[----------] 2 tests from BarTest
[ RUN ] BarTest.HasXyzProperty
[ OK ] BarTest.HasXyzProperty
[ RUN ] BarTest.ReturnsTrueOnSuccess
... some error messages ...
[ FAILED ] BarTest.ReturnsTrueOnSuccess
...
[==========] 30 tests from 14 test cases ran.
[ PASSED ] 28 tests.
[ FAILED ] 2 tests, listed below:
[ FAILED ] BarTest.ReturnsTrueOnSuccess
[ FAILED ] AnotherTest.DoesXyz
2 FAILED TESTS You can set the `GTEST_COLOR` environment variable or the `--gtest_color` command line flag to `yes`, `no`, or `auto` (the default) to enable colors, disable colors, or let googletest decide. When the value is `auto`, googletest will use colors if and only if the output goes to a terminal and (on non-Windows platforms) the `TERM` environment variable is set to `xterm` or `xterm-color`. **Availability**: Linux, Windows, Mac. #### Suppressing the Elapsed Time By default, googletest prints the time it takes to run each test. To disable that, run the test program with the `--gtest_print_time=0` command line flag, or set the GTEST_PRINT_TIME environment variable to `0`. **Availability**: Linux, Windows, Mac. #### Suppressing UTF-8 Text Output In case of assertion failures, googletest prints expected and actual values of type `string` both as hex-encoded strings as well as in readable UTF-8 text if they contain valid non-ASCII UTF-8 characters. If you want to suppress the UTF-8 text because, for example, you don't have an UTF-8 compatible output medium, run the test program with `--gtest_print_utf8=0` or set the `GTEST_PRINT_UTF8` environment variable to `0`. **Availability**: Linux, Windows, Mac. #### Generating an XML Report googletest can emit a detailed XML report to a file in addition to its normal textual output. The report contains the duration of each test, and thus can help you identify slow tests. The report is also used by the http://unittest dashboard to show per-test-method error messages. To generate the XML report, set the `GTEST_OUTPUT` environment variable or the `--gtest_output` flag to the string `"xml:path_to_output_file"`, which will create the file at the given location. You can also just use the string `"xml"`, in which case the output can be found in the `test_detail.xml` file in the current directory. If you specify a directory (for example, `"xml:output/directory/"` on Linux or `"xml:output\directory\"` on Windows), googletest will create the XML file in that directory, named after the test executable (e.g. `foo_test.xml` for test program `foo_test` or `foo_test.exe`). If the file already exists (perhaps left over from a previous run), googletest will pick a different name (e.g. `foo_test_1.xml`) to avoid overwriting it. The report is based on the `junitreport` Ant task. Since that format was originally intended for Java, a little interpretation is required to make it apply to googletest tests, as shown here: ```xml ``` * The root `` element corresponds to the entire test program. * `` elements correspond to googletest test cases. * `` elements correspond to googletest test functions. For instance, the following program ```c++ TEST(MathTest, Addition) { ... } TEST(MathTest, Subtraction) { ... } TEST(LogicTest, NonContradiction) { ... } ``` could generate this report: ```xml ... ... ``` Things to note: * The `tests` attribute of a `` or `` element tells how many test functions the googletest program or test case contains, while the `failures` attribute tells how many of them failed. * The `time` attribute expresses the duration of the test, test case, or entire test program in seconds. * The `timestamp` attribute records the local date and time of the test execution. * Each `` element corresponds to a single failed googletest assertion. **Availability**: Linux, Windows, Mac. #### Generating an JSON Report googletest can also emit a JSON report as an alternative format to XML. To generate the JSON report, set the `GTEST_OUTPUT` environment variable or the `--gtest_output` flag to the string `"json:path_to_output_file"`, which will create the file at the given location. You can also just use the string `"json"`, in which case the output can be found in the `test_detail.json` file in the current directory. The report format conforms to the following JSON Schema: ```json { "$schema": "http://json-schema.org/schema#", "type": "object", "definitions": { "TestCase": { "type": "object", "properties": { "name": { "type": "string" }, "tests": { "type": "integer" }, "failures": { "type": "integer" }, "disabled": { "type": "integer" }, "time": { "type": "string" }, "testsuite": { "type": "array", "items": { "$ref": "#/definitions/TestInfo" } } } }, "TestInfo": { "type": "object", "properties": { "name": { "type": "string" }, "status": { "type": "string", "enum": ["RUN", "NOTRUN"] }, "time": { "type": "string" }, "classname": { "type": "string" }, "failures": { "type": "array", "items": { "$ref": "#/definitions/Failure" } } } }, "Failure": { "type": "object", "properties": { "failures": { "type": "string" }, "type": { "type": "string" } } } }, "properties": { "tests": { "type": "integer" }, "failures": { "type": "integer" }, "disabled": { "type": "integer" }, "errors": { "type": "integer" }, "timestamp": { "type": "string", "format": "date-time" }, "time": { "type": "string" }, "name": { "type": "string" }, "testsuites": { "type": "array", "items": { "$ref": "#/definitions/TestCase" } } } } ``` The report uses the format that conforms to the following Proto3 using the [JSON encoding](https://developers.google.com/protocol-buffers/docs/proto3#json): ```proto syntax = "proto3"; package googletest; import "google/protobuf/timestamp.proto"; import "google/protobuf/duration.proto"; message UnitTest { int32 tests = 1; int32 failures = 2; int32 disabled = 3; int32 errors = 4; google.protobuf.Timestamp timestamp = 5; google.protobuf.Duration time = 6; string name = 7; repeated TestCase testsuites = 8; } message TestCase { string name = 1; int32 tests = 2; int32 failures = 3; int32 disabled = 4; int32 errors = 5; google.protobuf.Duration time = 6; repeated TestInfo testsuite = 7; } message TestInfo { string name = 1; enum Status { RUN = 0; NOTRUN = 1; } Status status = 2; google.protobuf.Duration time = 3; string classname = 4; message Failure { string failures = 1; string type = 2; } repeated Failure failures = 5; } ``` For instance, the following program ```c++ TEST(MathTest, Addition) { ... } TEST(MathTest, Subtraction) { ... } TEST(LogicTest, NonContradiction) { ... } ``` could generate this report: ```json { "tests": 3, "failures": 1, "errors": 0, "time": "0.035s", "timestamp": "2011-10-31T18:52:42Z" "name": "AllTests", "testsuites": [ { "name": "MathTest", "tests": 2, "failures": 1, "errors": 0, "time": "0.015s", "testsuite": [ { "name": "Addition", "status": "RUN", "time": "0.007s", "classname": "", "failures": [ { "message": "Value of: add(1, 1)\x0A Actual: 3\x0AExpected: 2", "type": "" }, { "message": "Value of: add(1, -1)\x0A Actual: 1\x0AExpected: 0", "type": "" } ] }, { "name": "Subtraction", "status": "RUN", "time": "0.005s", "classname": "" } ] } { "name": "LogicTest", "tests": 1, "failures": 0, "errors": 0, "time": "0.005s", "testsuite": [ { "name": "NonContradiction", "status": "RUN", "time": "0.005s", "classname": "" } ] } ] } ``` IMPORTANT: The exact format of the JSON document is subject to change. **Availability**: Linux, Windows, Mac. ### Controlling How Failures Are Reported #### Turning Assertion Failures into Break-Points When running test programs under a debugger, it's very convenient if the debugger can catch an assertion failure and automatically drop into interactive mode. googletest's *break-on-failure* mode supports this behavior. To enable it, set the `GTEST_BREAK_ON_FAILURE` environment variable to a value other than `0` . Alternatively, you can use the `--gtest_break_on_failure` command line flag. **Availability**: Linux, Windows, Mac. #### Disabling Catching Test-Thrown Exceptions googletest can be used either with or without exceptions enabled. If a test throws a C++ exception or (on Windows) a structured exception (SEH), by default googletest catches it, reports it as a test failure, and continues with the next test method. This maximizes the coverage of a test run. Also, on Windows an uncaught exception will cause a pop-up window, so catching the exceptions allows you to run the tests automatically. When debugging the test failures, however, you may instead want the exceptions to be handled by the debugger, such that you can examine the call stack when an exception is thrown. To achieve that, set the `GTEST_CATCH_EXCEPTIONS` environment variable to `0`, or use the `--gtest_catch_exceptions=0` flag when running the tests. **Availability**: Linux, Windows, Mac. Index: head/contrib/googletest/googletest/src/gtest.cc =================================================================== --- head/contrib/googletest/googletest/src/gtest.cc (revision 345769) +++ head/contrib/googletest/googletest/src/gtest.cc (revision 345770) @@ -1,6081 +1,6095 @@ // Copyright 2005, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // The Google C++ Testing and Mocking Framework (Google Test) #include "gtest/gtest.h" #include "gtest/internal/custom/gtest.h" #include "gtest/gtest-spi.h" #include #include #include #include #include #include #include #include #include #include #include #include #include #include // NOLINT #include #include #if GTEST_OS_LINUX // FIXME: Use autoconf to detect availability of // gettimeofday(). # define GTEST_HAS_GETTIMEOFDAY_ 1 # include // NOLINT # include // NOLINT # include // NOLINT // Declares vsnprintf(). This header is not available on Windows. # include // NOLINT # include // NOLINT # include // NOLINT # include // NOLINT # include #elif GTEST_OS_SYMBIAN # define GTEST_HAS_GETTIMEOFDAY_ 1 # include // NOLINT #elif GTEST_OS_ZOS # define GTEST_HAS_GETTIMEOFDAY_ 1 # include // NOLINT // On z/OS we additionally need strings.h for strcasecmp. # include // NOLINT #elif GTEST_OS_WINDOWS_MOBILE // We are on Windows CE. # include // NOLINT # undef min #elif GTEST_OS_WINDOWS // We are on Windows proper. # include // NOLINT # include // NOLINT # include // NOLINT # include // NOLINT # if GTEST_OS_WINDOWS_MINGW // MinGW has gettimeofday() but not _ftime64(). // FIXME: Use autoconf to detect availability of // gettimeofday(). // FIXME: There are other ways to get the time on // Windows, like GetTickCount() or GetSystemTimeAsFileTime(). MinGW // supports these. consider using them instead. # define GTEST_HAS_GETTIMEOFDAY_ 1 # include // NOLINT # endif // GTEST_OS_WINDOWS_MINGW // cpplint thinks that the header is already included, so we want to // silence it. # include // NOLINT # undef min #else // Assume other platforms have gettimeofday(). // FIXME: Use autoconf to detect availability of // gettimeofday(). # define GTEST_HAS_GETTIMEOFDAY_ 1 // cpplint thinks that the header is already included, so we want to // silence it. # include // NOLINT # include // NOLINT #endif // GTEST_OS_LINUX #if GTEST_HAS_EXCEPTIONS # include #endif #if GTEST_CAN_STREAM_RESULTS_ # include // NOLINT # include // NOLINT # include // NOLINT # include // NOLINT #endif #include "src/gtest-internal-inl.h" #if GTEST_OS_WINDOWS # define vsnprintf _vsnprintf #endif // GTEST_OS_WINDOWS #if GTEST_OS_MAC #ifndef GTEST_OS_IOS #include #endif #endif #if GTEST_HAS_ABSL #include "absl/debugging/failure_signal_handler.h" #include "absl/debugging/stacktrace.h" #include "absl/debugging/symbolize.h" #include "absl/strings/str_cat.h" #endif // GTEST_HAS_ABSL namespace testing { using internal::CountIf; using internal::ForEach; using internal::GetElementOr; using internal::Shuffle; // Constants. // A test whose test case name or test name matches this filter is // disabled and not run. static const char kDisableTestFilter[] = "DISABLED_*:*/DISABLED_*"; // A test case whose name matches this filter is considered a death // test case and will be run before test cases whose name doesn't // match this filter. static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*"; // A test filter that matches everything. static const char kUniversalFilter[] = "*"; // The default output format. static const char kDefaultOutputFormat[] = "xml"; // The default output file. static const char kDefaultOutputFile[] = "test_detail"; // The environment variable name for the test shard index. static const char kTestShardIndex[] = "GTEST_SHARD_INDEX"; // The environment variable name for the total number of test shards. static const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS"; // The environment variable name for the test shard status file. static const char kTestShardStatusFile[] = "GTEST_SHARD_STATUS_FILE"; namespace internal { // The text used in failure messages to indicate the start of the // stack trace. const char kStackTraceMarker[] = "\nStack trace:\n"; // g_help_flag is true iff the --help flag or an equivalent form is // specified on the command line. bool g_help_flag = false; // Utilty function to Open File for Writing static FILE* OpenFileForWriting(const std::string& output_file) { FILE* fileout = NULL; FilePath output_file_path(output_file); FilePath output_dir(output_file_path.RemoveFileName()); if (output_dir.CreateDirectoriesRecursively()) { fileout = posix::FOpen(output_file.c_str(), "w"); } if (fileout == NULL) { GTEST_LOG_(FATAL) << "Unable to open file \"" << output_file << "\""; } return fileout; } } // namespace internal // Bazel passes in the argument to '--test_filter' via the TESTBRIDGE_TEST_ONLY // environment variable. static const char* GetDefaultFilter() { const char* const testbridge_test_only = internal::posix::GetEnv("TESTBRIDGE_TEST_ONLY"); if (testbridge_test_only != NULL) { return testbridge_test_only; } return kUniversalFilter; } GTEST_DEFINE_bool_( also_run_disabled_tests, internal::BoolFromGTestEnv("also_run_disabled_tests", false), "Run disabled tests too, in addition to the tests normally being run."); GTEST_DEFINE_bool_( break_on_failure, internal::BoolFromGTestEnv("break_on_failure", false), "True iff a failed assertion should be a debugger break-point."); GTEST_DEFINE_bool_( catch_exceptions, internal::BoolFromGTestEnv("catch_exceptions", true), "True iff " GTEST_NAME_ " should catch exceptions and treat them as test failures."); GTEST_DEFINE_string_( color, internal::StringFromGTestEnv("color", "auto"), "Whether to use colors in the output. Valid values: yes, no, " "and auto. 'auto' means to use colors if the output is " "being sent to a terminal and the TERM environment variable " "is set to a terminal type that supports colors."); GTEST_DEFINE_string_( filter, internal::StringFromGTestEnv("filter", GetDefaultFilter()), "A colon-separated list of glob (not regex) patterns " "for filtering the tests to run, optionally followed by a " "'-' and a : separated list of negative patterns (tests to " "exclude). A test is run if it matches one of the positive " "patterns and does not match any of the negative patterns."); GTEST_DEFINE_bool_( install_failure_signal_handler, internal::BoolFromGTestEnv("install_failure_signal_handler", false), "If true and supported on the current platform, " GTEST_NAME_ " should " "install a signal handler that dumps debugging information when fatal " "signals are raised."); GTEST_DEFINE_bool_(list_tests, false, "List all tests without running them."); // The net priority order after flag processing is thus: // --gtest_output command line flag // GTEST_OUTPUT environment variable // XML_OUTPUT_FILE environment variable // '' GTEST_DEFINE_string_( output, internal::StringFromGTestEnv("output", internal::OutputFlagAlsoCheckEnvVar().c_str()), "A format (defaults to \"xml\" but can be specified to be \"json\"), " "optionally followed by a colon and an output file name or directory. " "A directory is indicated by a trailing pathname separator. " "Examples: \"xml:filename.xml\", \"xml::directoryname/\". " "If a directory is specified, output files will be created " "within that directory, with file-names based on the test " "executable's name and, if necessary, made unique by adding " "digits."); GTEST_DEFINE_bool_( print_time, internal::BoolFromGTestEnv("print_time", true), "True iff " GTEST_NAME_ " should display elapsed time in text output."); GTEST_DEFINE_bool_( print_utf8, internal::BoolFromGTestEnv("print_utf8", true), "True iff " GTEST_NAME_ " prints UTF8 characters as text."); GTEST_DEFINE_int32_( random_seed, internal::Int32FromGTestEnv("random_seed", 0), "Random number seed to use when shuffling test orders. Must be in range " "[1, 99999], or 0 to use a seed based on the current time."); GTEST_DEFINE_int32_( repeat, internal::Int32FromGTestEnv("repeat", 1), "How many times to repeat each test. Specify a negative number " "for repeating forever. Useful for shaking out flaky tests."); GTEST_DEFINE_bool_( show_internal_stack_frames, false, "True iff " GTEST_NAME_ " should include internal stack frames when " "printing test failure stack traces."); GTEST_DEFINE_bool_( shuffle, internal::BoolFromGTestEnv("shuffle", false), "True iff " GTEST_NAME_ " should randomize tests' order on every run."); GTEST_DEFINE_int32_( stack_trace_depth, internal::Int32FromGTestEnv("stack_trace_depth", kMaxStackTraceDepth), "The maximum number of stack frames to print when an " "assertion fails. The valid range is 0 through 100, inclusive."); GTEST_DEFINE_string_( stream_result_to, internal::StringFromGTestEnv("stream_result_to", ""), "This flag specifies the host name and the port number on which to stream " "test results. Example: \"localhost:555\". The flag is effective only on " "Linux."); GTEST_DEFINE_bool_( throw_on_failure, internal::BoolFromGTestEnv("throw_on_failure", false), "When this flag is specified, a failed assertion will throw an exception " "if exceptions are enabled or exit the program with a non-zero code " "otherwise. For use with an external test framework."); #if GTEST_USE_OWN_FLAGFILE_FLAG_ GTEST_DEFINE_string_( flagfile, internal::StringFromGTestEnv("flagfile", ""), "This flag specifies the flagfile to read command-line flags from."); #endif // GTEST_USE_OWN_FLAGFILE_FLAG_ namespace internal { // Generates a random number from [0, range), using a Linear // Congruential Generator (LCG). Crashes if 'range' is 0 or greater // than kMaxRange. UInt32 Random::Generate(UInt32 range) { // These constants are the same as are used in glibc's rand(3). // Use wider types than necessary to prevent unsigned overflow diagnostics. state_ = static_cast(1103515245ULL*state_ + 12345U) % kMaxRange; GTEST_CHECK_(range > 0) << "Cannot generate a number in the range [0, 0)."; GTEST_CHECK_(range <= kMaxRange) << "Generation of a number in [0, " << range << ") was requested, " << "but this can only generate numbers in [0, " << kMaxRange << ")."; // Converting via modulus introduces a bit of downward bias, but // it's simple, and a linear congruential generator isn't too good // to begin with. return state_ % range; } // GTestIsInitialized() returns true iff the user has initialized // Google Test. Useful for catching the user mistake of not initializing // Google Test before calling RUN_ALL_TESTS(). static bool GTestIsInitialized() { return GetArgvs().size() > 0; } // Iterates over a vector of TestCases, keeping a running sum of the // results of calling a given int-returning method on each. // Returns the sum. static int SumOverTestCaseList(const std::vector& case_list, int (TestCase::*method)() const) { int sum = 0; for (size_t i = 0; i < case_list.size(); i++) { sum += (case_list[i]->*method)(); } return sum; } // Returns true iff the test case passed. static bool TestCasePassed(const TestCase* test_case) { return test_case->should_run() && test_case->Passed(); } // Returns true iff the test case failed. static bool TestCaseFailed(const TestCase* test_case) { return test_case->should_run() && test_case->Failed(); } // Returns true iff test_case contains at least one test that should // run. static bool ShouldRunTestCase(const TestCase* test_case) { return test_case->should_run(); } // AssertHelper constructor. AssertHelper::AssertHelper(TestPartResult::Type type, const char* file, int line, const char* message) : data_(new AssertHelperData(type, file, line, message)) { } AssertHelper::~AssertHelper() { delete data_; } // Message assignment, for assertion streaming support. void AssertHelper::operator=(const Message& message) const { UnitTest::GetInstance()-> AddTestPartResult(data_->type, data_->file, data_->line, AppendUserMessage(data_->message, message), UnitTest::GetInstance()->impl() ->CurrentOsStackTraceExceptTop(1) // Skips the stack frame for this function itself. ); // NOLINT } // Mutex for linked pointers. GTEST_API_ GTEST_DEFINE_STATIC_MUTEX_(g_linked_ptr_mutex); // A copy of all command line arguments. Set by InitGoogleTest(). static ::std::vector g_argvs; ::std::vector GetArgvs() { #if defined(GTEST_CUSTOM_GET_ARGVS_) // GTEST_CUSTOM_GET_ARGVS_() may return a container of std::string or // ::string. This code converts it to the appropriate type. const auto& custom = GTEST_CUSTOM_GET_ARGVS_(); return ::std::vector(custom.begin(), custom.end()); #else // defined(GTEST_CUSTOM_GET_ARGVS_) return g_argvs; #endif // defined(GTEST_CUSTOM_GET_ARGVS_) } // Returns the current application's name, removing directory path if that // is present. FilePath GetCurrentExecutableName() { FilePath result; #if GTEST_OS_WINDOWS result.Set(FilePath(GetArgvs()[0]).RemoveExtension("exe")); #else result.Set(FilePath(GetArgvs()[0])); #endif // GTEST_OS_WINDOWS return result.RemoveDirectoryName(); } // Functions for processing the gtest_output flag. // Returns the output format, or "" for normal printed output. std::string UnitTestOptions::GetOutputFormat() { const char* const gtest_output_flag = GTEST_FLAG(output).c_str(); const char* const colon = strchr(gtest_output_flag, ':'); return (colon == NULL) ? std::string(gtest_output_flag) : std::string(gtest_output_flag, colon - gtest_output_flag); } // Returns the name of the requested output file, or the default if none // was explicitly specified. std::string UnitTestOptions::GetAbsolutePathToOutputFile() { const char* const gtest_output_flag = GTEST_FLAG(output).c_str(); std::string format = GetOutputFormat(); if (format.empty()) format = std::string(kDefaultOutputFormat); const char* const colon = strchr(gtest_output_flag, ':'); if (colon == NULL) return internal::FilePath::MakeFileName( internal::FilePath( UnitTest::GetInstance()->original_working_dir()), internal::FilePath(kDefaultOutputFile), 0, format.c_str()).string(); internal::FilePath output_name(colon + 1); if (!output_name.IsAbsolutePath()) // FIXME: on Windows \some\path is not an absolute // path (as its meaning depends on the current drive), yet the // following logic for turning it into an absolute path is wrong. // Fix it. output_name = internal::FilePath::ConcatPaths( internal::FilePath(UnitTest::GetInstance()->original_working_dir()), internal::FilePath(colon + 1)); if (!output_name.IsDirectory()) return output_name.string(); internal::FilePath result(internal::FilePath::GenerateUniqueFileName( output_name, internal::GetCurrentExecutableName(), GetOutputFormat().c_str())); return result.string(); } // Returns true iff the wildcard pattern matches the string. The // first ':' or '\0' character in pattern marks the end of it. // // This recursive algorithm isn't very efficient, but is clear and // works well enough for matching test names, which are short. bool UnitTestOptions::PatternMatchesString(const char *pattern, const char *str) { switch (*pattern) { case '\0': case ':': // Either ':' or '\0' marks the end of the pattern. return *str == '\0'; case '?': // Matches any single character. return *str != '\0' && PatternMatchesString(pattern + 1, str + 1); case '*': // Matches any string (possibly empty) of characters. return (*str != '\0' && PatternMatchesString(pattern, str + 1)) || PatternMatchesString(pattern + 1, str); default: // Non-special character. Matches itself. return *pattern == *str && PatternMatchesString(pattern + 1, str + 1); } } bool UnitTestOptions::MatchesFilter( const std::string& name, const char* filter) { const char *cur_pattern = filter; for (;;) { if (PatternMatchesString(cur_pattern, name.c_str())) { return true; } // Finds the next pattern in the filter. cur_pattern = strchr(cur_pattern, ':'); // Returns if no more pattern can be found. if (cur_pattern == NULL) { return false; } // Skips the pattern separater (the ':' character). cur_pattern++; } } // Returns true iff the user-specified filter matches the test case // name and the test name. bool UnitTestOptions::FilterMatchesTest(const std::string &test_case_name, const std::string &test_name) { const std::string& full_name = test_case_name + "." + test_name.c_str(); // Split --gtest_filter at '-', if there is one, to separate into // positive filter and negative filter portions const char* const p = GTEST_FLAG(filter).c_str(); const char* const dash = strchr(p, '-'); std::string positive; std::string negative; if (dash == NULL) { positive = GTEST_FLAG(filter).c_str(); // Whole string is a positive filter negative = ""; } else { positive = std::string(p, dash); // Everything up to the dash negative = std::string(dash + 1); // Everything after the dash if (positive.empty()) { // Treat '-test1' as the same as '*-test1' positive = kUniversalFilter; } } // A filter is a colon-separated list of patterns. It matches a // test if any pattern in it matches the test. return (MatchesFilter(full_name, positive.c_str()) && !MatchesFilter(full_name, negative.c_str())); } #if GTEST_HAS_SEH // Returns EXCEPTION_EXECUTE_HANDLER if Google Test should handle the // given SEH exception, or EXCEPTION_CONTINUE_SEARCH otherwise. // This function is useful as an __except condition. int UnitTestOptions::GTestShouldProcessSEH(DWORD exception_code) { // Google Test should handle a SEH exception if: // 1. the user wants it to, AND // 2. this is not a breakpoint exception, AND // 3. this is not a C++ exception (VC++ implements them via SEH, // apparently). // // SEH exception code for C++ exceptions. // (see http://support.microsoft.com/kb/185294 for more information). const DWORD kCxxExceptionCode = 0xe06d7363; bool should_handle = true; if (!GTEST_FLAG(catch_exceptions)) should_handle = false; else if (exception_code == EXCEPTION_BREAKPOINT) should_handle = false; else if (exception_code == kCxxExceptionCode) should_handle = false; return should_handle ? EXCEPTION_EXECUTE_HANDLER : EXCEPTION_CONTINUE_SEARCH; } #endif // GTEST_HAS_SEH } // namespace internal // The c'tor sets this object as the test part result reporter used by // Google Test. The 'result' parameter specifies where to report the // results. Intercepts only failures from the current thread. ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter( TestPartResultArray* result) : intercept_mode_(INTERCEPT_ONLY_CURRENT_THREAD), result_(result) { Init(); } // The c'tor sets this object as the test part result reporter used by // Google Test. The 'result' parameter specifies where to report the // results. ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter( InterceptMode intercept_mode, TestPartResultArray* result) : intercept_mode_(intercept_mode), result_(result) { Init(); } void ScopedFakeTestPartResultReporter::Init() { internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); if (intercept_mode_ == INTERCEPT_ALL_THREADS) { old_reporter_ = impl->GetGlobalTestPartResultReporter(); impl->SetGlobalTestPartResultReporter(this); } else { old_reporter_ = impl->GetTestPartResultReporterForCurrentThread(); impl->SetTestPartResultReporterForCurrentThread(this); } } // The d'tor restores the test part result reporter used by Google Test // before. ScopedFakeTestPartResultReporter::~ScopedFakeTestPartResultReporter() { internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); if (intercept_mode_ == INTERCEPT_ALL_THREADS) { impl->SetGlobalTestPartResultReporter(old_reporter_); } else { impl->SetTestPartResultReporterForCurrentThread(old_reporter_); } } // Increments the test part result count and remembers the result. // This method is from the TestPartResultReporterInterface interface. void ScopedFakeTestPartResultReporter::ReportTestPartResult( const TestPartResult& result) { result_->Append(result); } namespace internal { // Returns the type ID of ::testing::Test. We should always call this // instead of GetTypeId< ::testing::Test>() to get the type ID of // testing::Test. This is to work around a suspected linker bug when // using Google Test as a framework on Mac OS X. The bug causes // GetTypeId< ::testing::Test>() to return different values depending // on whether the call is from the Google Test framework itself or // from user test code. GetTestTypeId() is guaranteed to always // return the same value, as it always calls GetTypeId<>() from the // gtest.cc, which is within the Google Test framework. TypeId GetTestTypeId() { return GetTypeId(); } // The value of GetTestTypeId() as seen from within the Google Test // library. This is solely for testing GetTestTypeId(). extern const TypeId kTestTypeIdInGoogleTest = GetTestTypeId(); // This predicate-formatter checks that 'results' contains a test part // failure of the given type and that the failure message contains the // given substring. static AssertionResult HasOneFailure(const char* /* results_expr */, const char* /* type_expr */, const char* /* substr_expr */, const TestPartResultArray& results, TestPartResult::Type type, const std::string& substr) { const std::string expected(type == TestPartResult::kFatalFailure ? "1 fatal failure" : "1 non-fatal failure"); Message msg; if (results.size() != 1) { msg << "Expected: " << expected << "\n" << " Actual: " << results.size() << " failures"; for (int i = 0; i < results.size(); i++) { msg << "\n" << results.GetTestPartResult(i); } return AssertionFailure() << msg; } const TestPartResult& r = results.GetTestPartResult(0); if (r.type() != type) { return AssertionFailure() << "Expected: " << expected << "\n" << " Actual:\n" << r; } if (strstr(r.message(), substr.c_str()) == NULL) { return AssertionFailure() << "Expected: " << expected << " containing \"" << substr << "\"\n" << " Actual:\n" << r; } return AssertionSuccess(); } // The constructor of SingleFailureChecker remembers where to look up // test part results, what type of failure we expect, and what // substring the failure message should contain. SingleFailureChecker::SingleFailureChecker(const TestPartResultArray* results, TestPartResult::Type type, const std::string& substr) : results_(results), type_(type), substr_(substr) {} // The destructor of SingleFailureChecker verifies that the given // TestPartResultArray contains exactly one failure that has the given // type and contains the given substring. If that's not the case, a // non-fatal failure will be generated. SingleFailureChecker::~SingleFailureChecker() { EXPECT_PRED_FORMAT3(HasOneFailure, *results_, type_, substr_); } DefaultGlobalTestPartResultReporter::DefaultGlobalTestPartResultReporter( UnitTestImpl* unit_test) : unit_test_(unit_test) {} void DefaultGlobalTestPartResultReporter::ReportTestPartResult( const TestPartResult& result) { unit_test_->current_test_result()->AddTestPartResult(result); unit_test_->listeners()->repeater()->OnTestPartResult(result); } DefaultPerThreadTestPartResultReporter::DefaultPerThreadTestPartResultReporter( UnitTestImpl* unit_test) : unit_test_(unit_test) {} void DefaultPerThreadTestPartResultReporter::ReportTestPartResult( const TestPartResult& result) { unit_test_->GetGlobalTestPartResultReporter()->ReportTestPartResult(result); } // Returns the global test part result reporter. TestPartResultReporterInterface* UnitTestImpl::GetGlobalTestPartResultReporter() { internal::MutexLock lock(&global_test_part_result_reporter_mutex_); return global_test_part_result_repoter_; } // Sets the global test part result reporter. void UnitTestImpl::SetGlobalTestPartResultReporter( TestPartResultReporterInterface* reporter) { internal::MutexLock lock(&global_test_part_result_reporter_mutex_); global_test_part_result_repoter_ = reporter; } // Returns the test part result reporter for the current thread. TestPartResultReporterInterface* UnitTestImpl::GetTestPartResultReporterForCurrentThread() { return per_thread_test_part_result_reporter_.get(); } // Sets the test part result reporter for the current thread. void UnitTestImpl::SetTestPartResultReporterForCurrentThread( TestPartResultReporterInterface* reporter) { per_thread_test_part_result_reporter_.set(reporter); } // Gets the number of successful test cases. int UnitTestImpl::successful_test_case_count() const { return CountIf(test_cases_, TestCasePassed); } // Gets the number of failed test cases. int UnitTestImpl::failed_test_case_count() const { return CountIf(test_cases_, TestCaseFailed); } // Gets the number of all test cases. int UnitTestImpl::total_test_case_count() const { return static_cast(test_cases_.size()); } // Gets the number of all test cases that contain at least one test // that should run. int UnitTestImpl::test_case_to_run_count() const { return CountIf(test_cases_, ShouldRunTestCase); } // Gets the number of successful tests. int UnitTestImpl::successful_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count); } // Gets the number of skipped tests. int UnitTestImpl::skipped_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::skipped_test_count); } // Gets the number of failed tests. int UnitTestImpl::failed_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count); } // Gets the number of disabled tests that will be reported in the XML report. int UnitTestImpl::reportable_disabled_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::reportable_disabled_test_count); } // Gets the number of disabled tests. int UnitTestImpl::disabled_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::disabled_test_count); } // Gets the number of tests to be printed in the XML report. int UnitTestImpl::reportable_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::reportable_test_count); } // Gets the number of all tests. int UnitTestImpl::total_test_count() const { return SumOverTestCaseList(test_cases_, &TestCase::total_test_count); } // Gets the number of tests that should run. int UnitTestImpl::test_to_run_count() const { return SumOverTestCaseList(test_cases_, &TestCase::test_to_run_count); } // Returns the current OS stack trace as an std::string. // // The maximum number of stack frames to be included is specified by // the gtest_stack_trace_depth flag. The skip_count parameter // specifies the number of top frames to be skipped, which doesn't // count against the number of frames to be included. // // For example, if Foo() calls Bar(), which in turn calls // CurrentOsStackTraceExceptTop(1), Foo() will be included in the // trace but Bar() and CurrentOsStackTraceExceptTop() won't. std::string UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) { return os_stack_trace_getter()->CurrentStackTrace( static_cast(GTEST_FLAG(stack_trace_depth)), skip_count + 1 // Skips the user-specified number of frames plus this function // itself. ); // NOLINT } // Returns the current time in milliseconds. TimeInMillis GetTimeInMillis() { #if GTEST_OS_WINDOWS_MOBILE || defined(__BORLANDC__) // Difference between 1970-01-01 and 1601-01-01 in milliseconds. // http://analogous.blogspot.com/2005/04/epoch.html const TimeInMillis kJavaEpochToWinFileTimeDelta = static_cast(116444736UL) * 100000UL; const DWORD kTenthMicrosInMilliSecond = 10000; SYSTEMTIME now_systime; FILETIME now_filetime; ULARGE_INTEGER now_int64; // FIXME: Shouldn't this just use // GetSystemTimeAsFileTime()? GetSystemTime(&now_systime); if (SystemTimeToFileTime(&now_systime, &now_filetime)) { now_int64.LowPart = now_filetime.dwLowDateTime; now_int64.HighPart = now_filetime.dwHighDateTime; now_int64.QuadPart = (now_int64.QuadPart / kTenthMicrosInMilliSecond) - kJavaEpochToWinFileTimeDelta; return now_int64.QuadPart; } return 0; #elif GTEST_OS_WINDOWS && !GTEST_HAS_GETTIMEOFDAY_ __timeb64 now; // MSVC 8 deprecates _ftime64(), so we want to suppress warning 4996 // (deprecated function) there. // FIXME: Use GetTickCount()? Or use // SystemTimeToFileTime() GTEST_DISABLE_MSC_DEPRECATED_PUSH_() _ftime64(&now); GTEST_DISABLE_MSC_DEPRECATED_POP_() return static_cast(now.time) * 1000 + now.millitm; #elif GTEST_HAS_GETTIMEOFDAY_ struct timeval now; gettimeofday(&now, NULL); return static_cast(now.tv_sec) * 1000 + now.tv_usec / 1000; #else # error "Don't know how to get the current time on your system." #endif } // Utilities // class String. #if GTEST_OS_WINDOWS_MOBILE // Creates a UTF-16 wide string from the given ANSI string, allocating // memory using new. The caller is responsible for deleting the return // value using delete[]. Returns the wide string, or NULL if the // input is NULL. LPCWSTR String::AnsiToUtf16(const char* ansi) { if (!ansi) return NULL; const int length = strlen(ansi); const int unicode_length = MultiByteToWideChar(CP_ACP, 0, ansi, length, NULL, 0); WCHAR* unicode = new WCHAR[unicode_length + 1]; MultiByteToWideChar(CP_ACP, 0, ansi, length, unicode, unicode_length); unicode[unicode_length] = 0; return unicode; } // Creates an ANSI string from the given wide string, allocating // memory using new. The caller is responsible for deleting the return // value using delete[]. Returns the ANSI string, or NULL if the // input is NULL. const char* String::Utf16ToAnsi(LPCWSTR utf16_str) { if (!utf16_str) return NULL; const int ansi_length = WideCharToMultiByte(CP_ACP, 0, utf16_str, -1, NULL, 0, NULL, NULL); char* ansi = new char[ansi_length + 1]; WideCharToMultiByte(CP_ACP, 0, utf16_str, -1, ansi, ansi_length, NULL, NULL); ansi[ansi_length] = 0; return ansi; } #endif // GTEST_OS_WINDOWS_MOBILE // Compares two C strings. Returns true iff they have the same content. // // Unlike strcmp(), this function can handle NULL argument(s). A NULL // C string is considered different to any non-NULL C string, // including the empty string. bool String::CStringEquals(const char * lhs, const char * rhs) { if ( lhs == NULL ) return rhs == NULL; if ( rhs == NULL ) return false; return strcmp(lhs, rhs) == 0; } #if GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING // Converts an array of wide chars to a narrow string using the UTF-8 // encoding, and streams the result to the given Message object. static void StreamWideCharsToMessage(const wchar_t* wstr, size_t length, Message* msg) { for (size_t i = 0; i != length; ) { // NOLINT if (wstr[i] != L'\0') { *msg << WideStringToUtf8(wstr + i, static_cast(length - i)); while (i != length && wstr[i] != L'\0') i++; } else { *msg << '\0'; i++; } } } #endif // GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING void SplitString(const ::std::string& str, char delimiter, ::std::vector< ::std::string>* dest) { ::std::vector< ::std::string> parsed; ::std::string::size_type pos = 0; while (::testing::internal::AlwaysTrue()) { const ::std::string::size_type colon = str.find(delimiter, pos); if (colon == ::std::string::npos) { parsed.push_back(str.substr(pos)); break; } else { parsed.push_back(str.substr(pos, colon - pos)); pos = colon + 1; } } dest->swap(parsed); } } // namespace internal // Constructs an empty Message. // We allocate the stringstream separately because otherwise each use of // ASSERT/EXPECT in a procedure adds over 200 bytes to the procedure's // stack frame leading to huge stack frames in some cases; gcc does not reuse // the stack space. Message::Message() : ss_(new ::std::stringstream) { // By default, we want there to be enough precision when printing // a double to a Message. *ss_ << std::setprecision(std::numeric_limits::digits10 + 2); } // These two overloads allow streaming a wide C string to a Message // using the UTF-8 encoding. Message& Message::operator <<(const wchar_t* wide_c_str) { return *this << internal::String::ShowWideCString(wide_c_str); } Message& Message::operator <<(wchar_t* wide_c_str) { return *this << internal::String::ShowWideCString(wide_c_str); } #if GTEST_HAS_STD_WSTRING // Converts the given wide string to a narrow string using the UTF-8 // encoding, and streams the result to this Message object. Message& Message::operator <<(const ::std::wstring& wstr) { internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this); return *this; } #endif // GTEST_HAS_STD_WSTRING #if GTEST_HAS_GLOBAL_WSTRING // Converts the given wide string to a narrow string using the UTF-8 // encoding, and streams the result to this Message object. Message& Message::operator <<(const ::wstring& wstr) { internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this); return *this; } #endif // GTEST_HAS_GLOBAL_WSTRING // Gets the text streamed to this object so far as an std::string. // Each '\0' character in the buffer is replaced with "\\0". std::string Message::GetString() const { return internal::StringStreamToString(ss_.get()); } // AssertionResult constructors. // Used in EXPECT_TRUE/FALSE(assertion_result). AssertionResult::AssertionResult(const AssertionResult& other) : success_(other.success_), message_(other.message_.get() != NULL ? new ::std::string(*other.message_) : static_cast< ::std::string*>(NULL)) { } // Swaps two AssertionResults. void AssertionResult::swap(AssertionResult& other) { using std::swap; swap(success_, other.success_); swap(message_, other.message_); } // Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE. AssertionResult AssertionResult::operator!() const { AssertionResult negation(!success_); if (message_.get() != NULL) negation << *message_; return negation; } // Makes a successful assertion result. AssertionResult AssertionSuccess() { return AssertionResult(true); } // Makes a failed assertion result. AssertionResult AssertionFailure() { return AssertionResult(false); } // Makes a failed assertion result with the given failure message. // Deprecated; use AssertionFailure() << message. AssertionResult AssertionFailure(const Message& message) { return AssertionFailure() << message; } namespace internal { namespace edit_distance { std::vector CalculateOptimalEdits(const std::vector& left, const std::vector& right) { std::vector > costs( left.size() + 1, std::vector(right.size() + 1)); std::vector > best_move( left.size() + 1, std::vector(right.size() + 1)); // Populate for empty right. for (size_t l_i = 0; l_i < costs.size(); ++l_i) { costs[l_i][0] = static_cast(l_i); best_move[l_i][0] = kRemove; } // Populate for empty left. for (size_t r_i = 1; r_i < costs[0].size(); ++r_i) { costs[0][r_i] = static_cast(r_i); best_move[0][r_i] = kAdd; } for (size_t l_i = 0; l_i < left.size(); ++l_i) { for (size_t r_i = 0; r_i < right.size(); ++r_i) { if (left[l_i] == right[r_i]) { // Found a match. Consume it. costs[l_i + 1][r_i + 1] = costs[l_i][r_i]; best_move[l_i + 1][r_i + 1] = kMatch; continue; } const double add = costs[l_i + 1][r_i]; const double remove = costs[l_i][r_i + 1]; const double replace = costs[l_i][r_i]; if (add < remove && add < replace) { costs[l_i + 1][r_i + 1] = add + 1; best_move[l_i + 1][r_i + 1] = kAdd; } else if (remove < add && remove < replace) { costs[l_i + 1][r_i + 1] = remove + 1; best_move[l_i + 1][r_i + 1] = kRemove; } else { // We make replace a little more expensive than add/remove to lower // their priority. costs[l_i + 1][r_i + 1] = replace + 1.00001; best_move[l_i + 1][r_i + 1] = kReplace; } } } // Reconstruct the best path. We do it in reverse order. std::vector best_path; for (size_t l_i = left.size(), r_i = right.size(); l_i > 0 || r_i > 0;) { EditType move = best_move[l_i][r_i]; best_path.push_back(move); l_i -= move != kAdd; r_i -= move != kRemove; } std::reverse(best_path.begin(), best_path.end()); return best_path; } namespace { // Helper class to convert string into ids with deduplication. class InternalStrings { public: size_t GetId(const std::string& str) { IdMap::iterator it = ids_.find(str); if (it != ids_.end()) return it->second; size_t id = ids_.size(); return ids_[str] = id; } private: typedef std::map IdMap; IdMap ids_; }; } // namespace std::vector CalculateOptimalEdits( const std::vector& left, const std::vector& right) { std::vector left_ids, right_ids; { InternalStrings intern_table; for (size_t i = 0; i < left.size(); ++i) { left_ids.push_back(intern_table.GetId(left[i])); } for (size_t i = 0; i < right.size(); ++i) { right_ids.push_back(intern_table.GetId(right[i])); } } return CalculateOptimalEdits(left_ids, right_ids); } namespace { // Helper class that holds the state for one hunk and prints it out to the // stream. // It reorders adds/removes when possible to group all removes before all // adds. It also adds the hunk header before printint into the stream. class Hunk { public: Hunk(size_t left_start, size_t right_start) : left_start_(left_start), right_start_(right_start), adds_(), removes_(), common_() {} void PushLine(char edit, const char* line) { switch (edit) { case ' ': ++common_; FlushEdits(); hunk_.push_back(std::make_pair(' ', line)); break; case '-': ++removes_; hunk_removes_.push_back(std::make_pair('-', line)); break; case '+': ++adds_; hunk_adds_.push_back(std::make_pair('+', line)); break; } } void PrintTo(std::ostream* os) { PrintHeader(os); FlushEdits(); for (std::list >::const_iterator it = hunk_.begin(); it != hunk_.end(); ++it) { *os << it->first << it->second << "\n"; } } bool has_edits() const { return adds_ || removes_; } private: void FlushEdits() { hunk_.splice(hunk_.end(), hunk_removes_); hunk_.splice(hunk_.end(), hunk_adds_); } // Print a unified diff header for one hunk. // The format is // "@@ -, +, @@" // where the left/right parts are omitted if unnecessary. void PrintHeader(std::ostream* ss) const { *ss << "@@ "; if (removes_) { *ss << "-" << left_start_ << "," << (removes_ + common_); } if (removes_ && adds_) { *ss << " "; } if (adds_) { *ss << "+" << right_start_ << "," << (adds_ + common_); } *ss << " @@\n"; } size_t left_start_, right_start_; size_t adds_, removes_, common_; std::list > hunk_, hunk_adds_, hunk_removes_; }; } // namespace // Create a list of diff hunks in Unified diff format. // Each hunk has a header generated by PrintHeader above plus a body with // lines prefixed with ' ' for no change, '-' for deletion and '+' for // addition. // 'context' represents the desired unchanged prefix/suffix around the diff. // If two hunks are close enough that their contexts overlap, then they are // joined into one hunk. std::string CreateUnifiedDiff(const std::vector& left, const std::vector& right, size_t context) { const std::vector edits = CalculateOptimalEdits(left, right); size_t l_i = 0, r_i = 0, edit_i = 0; std::stringstream ss; while (edit_i < edits.size()) { // Find first edit. while (edit_i < edits.size() && edits[edit_i] == kMatch) { ++l_i; ++r_i; ++edit_i; } // Find the first line to include in the hunk. const size_t prefix_context = std::min(l_i, context); Hunk hunk(l_i - prefix_context + 1, r_i - prefix_context + 1); for (size_t i = prefix_context; i > 0; --i) { hunk.PushLine(' ', left[l_i - i].c_str()); } // Iterate the edits until we found enough suffix for the hunk or the input // is over. size_t n_suffix = 0; for (; edit_i < edits.size(); ++edit_i) { if (n_suffix >= context) { // Continue only if the next hunk is very close. std::vector::const_iterator it = edits.begin() + edit_i; while (it != edits.end() && *it == kMatch) ++it; if (it == edits.end() || (it - edits.begin()) - edit_i >= context) { // There is no next edit or it is too far away. break; } } EditType edit = edits[edit_i]; // Reset count when a non match is found. n_suffix = edit == kMatch ? n_suffix + 1 : 0; if (edit == kMatch || edit == kRemove || edit == kReplace) { hunk.PushLine(edit == kMatch ? ' ' : '-', left[l_i].c_str()); } if (edit == kAdd || edit == kReplace) { hunk.PushLine('+', right[r_i].c_str()); } // Advance indices, depending on edit type. l_i += edit != kAdd; r_i += edit != kRemove; } if (!hunk.has_edits()) { // We are done. We don't want this hunk. break; } hunk.PrintTo(&ss); } return ss.str(); } } // namespace edit_distance namespace { // The string representation of the values received in EqFailure() are already // escaped. Split them on escaped '\n' boundaries. Leave all other escaped // characters the same. std::vector SplitEscapedString(const std::string& str) { std::vector lines; size_t start = 0, end = str.size(); if (end > 2 && str[0] == '"' && str[end - 1] == '"') { ++start; --end; } bool escaped = false; for (size_t i = start; i + 1 < end; ++i) { if (escaped) { escaped = false; if (str[i] == 'n') { lines.push_back(str.substr(start, i - start - 1)); start = i + 1; } } else { escaped = str[i] == '\\'; } } lines.push_back(str.substr(start, end - start)); return lines; } } // namespace // Constructs and returns the message for an equality assertion // (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure. // // The first four parameters are the expressions used in the assertion // and their values, as strings. For example, for ASSERT_EQ(foo, bar) // where foo is 5 and bar is 6, we have: // // lhs_expression: "foo" // rhs_expression: "bar" // lhs_value: "5" // rhs_value: "6" // // The ignoring_case parameter is true iff the assertion is a // *_STRCASEEQ*. When it's true, the string "Ignoring case" will // be inserted into the message. AssertionResult EqFailure(const char* lhs_expression, const char* rhs_expression, const std::string& lhs_value, const std::string& rhs_value, bool ignoring_case) { Message msg; msg << "Expected equality of these values:"; msg << "\n " << lhs_expression; if (lhs_value != lhs_expression) { msg << "\n Which is: " << lhs_value; } msg << "\n " << rhs_expression; if (rhs_value != rhs_expression) { msg << "\n Which is: " << rhs_value; } if (ignoring_case) { msg << "\nIgnoring case"; } if (!lhs_value.empty() && !rhs_value.empty()) { const std::vector lhs_lines = SplitEscapedString(lhs_value); const std::vector rhs_lines = SplitEscapedString(rhs_value); if (lhs_lines.size() > 1 || rhs_lines.size() > 1) { msg << "\nWith diff:\n" << edit_distance::CreateUnifiedDiff(lhs_lines, rhs_lines); } } return AssertionFailure() << msg; } // Constructs a failure message for Boolean assertions such as EXPECT_TRUE. std::string GetBoolAssertionFailureMessage( const AssertionResult& assertion_result, const char* expression_text, const char* actual_predicate_value, const char* expected_predicate_value) { const char* actual_message = assertion_result.message(); Message msg; msg << "Value of: " << expression_text << "\n Actual: " << actual_predicate_value; if (actual_message[0] != '\0') msg << " (" << actual_message << ")"; msg << "\nExpected: " << expected_predicate_value; return msg.GetString(); } // Helper function for implementing ASSERT_NEAR. AssertionResult DoubleNearPredFormat(const char* expr1, const char* expr2, const char* abs_error_expr, double val1, double val2, double abs_error) { const double diff = fabs(val1 - val2); if (diff <= abs_error) return AssertionSuccess(); // FIXME: do not print the value of an expression if it's // already a literal. return AssertionFailure() << "The difference between " << expr1 << " and " << expr2 << " is " << diff << ", which exceeds " << abs_error_expr << ", where\n" << expr1 << " evaluates to " << val1 << ",\n" << expr2 << " evaluates to " << val2 << ", and\n" << abs_error_expr << " evaluates to " << abs_error << "."; } // Helper template for implementing FloatLE() and DoubleLE(). template AssertionResult FloatingPointLE(const char* expr1, const char* expr2, RawType val1, RawType val2) { // Returns success if val1 is less than val2, if (val1 < val2) { return AssertionSuccess(); } // or if val1 is almost equal to val2. const FloatingPoint lhs(val1), rhs(val2); if (lhs.AlmostEquals(rhs)) { return AssertionSuccess(); } // Note that the above two checks will both fail if either val1 or // val2 is NaN, as the IEEE floating-point standard requires that // any predicate involving a NaN must return false. ::std::stringstream val1_ss; val1_ss << std::setprecision(std::numeric_limits::digits10 + 2) << val1; ::std::stringstream val2_ss; val2_ss << std::setprecision(std::numeric_limits::digits10 + 2) << val2; return AssertionFailure() << "Expected: (" << expr1 << ") <= (" << expr2 << ")\n" << " Actual: " << StringStreamToString(&val1_ss) << " vs " << StringStreamToString(&val2_ss); } } // namespace internal // Asserts that val1 is less than, or almost equal to, val2. Fails // otherwise. In particular, it fails if either val1 or val2 is NaN. AssertionResult FloatLE(const char* expr1, const char* expr2, float val1, float val2) { return internal::FloatingPointLE(expr1, expr2, val1, val2); } // Asserts that val1 is less than, or almost equal to, val2. Fails // otherwise. In particular, it fails if either val1 or val2 is NaN. AssertionResult DoubleLE(const char* expr1, const char* expr2, double val1, double val2) { return internal::FloatingPointLE(expr1, expr2, val1, val2); } namespace internal { // The helper function for {ASSERT|EXPECT}_EQ with int or enum // arguments. AssertionResult CmpHelperEQ(const char* lhs_expression, const char* rhs_expression, BiggestInt lhs, BiggestInt rhs) { if (lhs == rhs) { return AssertionSuccess(); } return EqFailure(lhs_expression, rhs_expression, FormatForComparisonFailureMessage(lhs, rhs), FormatForComparisonFailureMessage(rhs, lhs), false); } // A macro for implementing the helper functions needed to implement // ASSERT_?? and EXPECT_?? with integer or enum arguments. It is here // just to avoid copy-and-paste of similar code. #define GTEST_IMPL_CMP_HELPER_(op_name, op)\ AssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \ BiggestInt val1, BiggestInt val2) {\ if (val1 op val2) {\ return AssertionSuccess();\ } else {\ return AssertionFailure() \ << "Expected: (" << expr1 << ") " #op " (" << expr2\ << "), actual: " << FormatForComparisonFailureMessage(val1, val2)\ << " vs " << FormatForComparisonFailureMessage(val2, val1);\ }\ } // Implements the helper function for {ASSERT|EXPECT}_NE with int or // enum arguments. GTEST_IMPL_CMP_HELPER_(NE, !=) // Implements the helper function for {ASSERT|EXPECT}_LE with int or // enum arguments. GTEST_IMPL_CMP_HELPER_(LE, <=) // Implements the helper function for {ASSERT|EXPECT}_LT with int or // enum arguments. GTEST_IMPL_CMP_HELPER_(LT, < ) // Implements the helper function for {ASSERT|EXPECT}_GE with int or // enum arguments. GTEST_IMPL_CMP_HELPER_(GE, >=) // Implements the helper function for {ASSERT|EXPECT}_GT with int or // enum arguments. GTEST_IMPL_CMP_HELPER_(GT, > ) #undef GTEST_IMPL_CMP_HELPER_ // The helper function for {ASSERT|EXPECT}_STREQ. AssertionResult CmpHelperSTREQ(const char* lhs_expression, const char* rhs_expression, const char* lhs, const char* rhs) { if (String::CStringEquals(lhs, rhs)) { return AssertionSuccess(); } return EqFailure(lhs_expression, rhs_expression, PrintToString(lhs), PrintToString(rhs), false); } // The helper function for {ASSERT|EXPECT}_STRCASEEQ. AssertionResult CmpHelperSTRCASEEQ(const char* lhs_expression, const char* rhs_expression, const char* lhs, const char* rhs) { if (String::CaseInsensitiveCStringEquals(lhs, rhs)) { return AssertionSuccess(); } return EqFailure(lhs_expression, rhs_expression, PrintToString(lhs), PrintToString(rhs), true); } // The helper function for {ASSERT|EXPECT}_STRNE. AssertionResult CmpHelperSTRNE(const char* s1_expression, const char* s2_expression, const char* s1, const char* s2) { if (!String::CStringEquals(s1, s2)) { return AssertionSuccess(); } else { return AssertionFailure() << "Expected: (" << s1_expression << ") != (" << s2_expression << "), actual: \"" << s1 << "\" vs \"" << s2 << "\""; } } // The helper function for {ASSERT|EXPECT}_STRCASENE. AssertionResult CmpHelperSTRCASENE(const char* s1_expression, const char* s2_expression, const char* s1, const char* s2) { if (!String::CaseInsensitiveCStringEquals(s1, s2)) { return AssertionSuccess(); } else { return AssertionFailure() << "Expected: (" << s1_expression << ") != (" << s2_expression << ") (ignoring case), actual: \"" << s1 << "\" vs \"" << s2 << "\""; } } } // namespace internal namespace { // Helper functions for implementing IsSubString() and IsNotSubstring(). // This group of overloaded functions return true iff needle is a // substring of haystack. NULL is considered a substring of itself // only. bool IsSubstringPred(const char* needle, const char* haystack) { if (needle == NULL || haystack == NULL) return needle == haystack; return strstr(haystack, needle) != NULL; } bool IsSubstringPred(const wchar_t* needle, const wchar_t* haystack) { if (needle == NULL || haystack == NULL) return needle == haystack; return wcsstr(haystack, needle) != NULL; } // StringType here can be either ::std::string or ::std::wstring. template bool IsSubstringPred(const StringType& needle, const StringType& haystack) { return haystack.find(needle) != StringType::npos; } // This function implements either IsSubstring() or IsNotSubstring(), // depending on the value of the expected_to_be_substring parameter. // StringType here can be const char*, const wchar_t*, ::std::string, // or ::std::wstring. template AssertionResult IsSubstringImpl( bool expected_to_be_substring, const char* needle_expr, const char* haystack_expr, const StringType& needle, const StringType& haystack) { if (IsSubstringPred(needle, haystack) == expected_to_be_substring) return AssertionSuccess(); const bool is_wide_string = sizeof(needle[0]) > 1; const char* const begin_string_quote = is_wide_string ? "L\"" : "\""; return AssertionFailure() << "Value of: " << needle_expr << "\n" << " Actual: " << begin_string_quote << needle << "\"\n" << "Expected: " << (expected_to_be_substring ? "" : "not ") << "a substring of " << haystack_expr << "\n" << "Which is: " << begin_string_quote << haystack << "\""; } } // namespace // IsSubstring() and IsNotSubstring() check whether needle is a // substring of haystack (NULL is considered a substring of itself // only), and return an appropriate error message when they fail. AssertionResult IsSubstring( const char* needle_expr, const char* haystack_expr, const char* needle, const char* haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } AssertionResult IsSubstring( const char* needle_expr, const char* haystack_expr, const wchar_t* needle, const wchar_t* haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } AssertionResult IsNotSubstring( const char* needle_expr, const char* haystack_expr, const char* needle, const char* haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } AssertionResult IsNotSubstring( const char* needle_expr, const char* haystack_expr, const wchar_t* needle, const wchar_t* haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } AssertionResult IsSubstring( const char* needle_expr, const char* haystack_expr, const ::std::string& needle, const ::std::string& haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } AssertionResult IsNotSubstring( const char* needle_expr, const char* haystack_expr, const ::std::string& needle, const ::std::string& haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } #if GTEST_HAS_STD_WSTRING AssertionResult IsSubstring( const char* needle_expr, const char* haystack_expr, const ::std::wstring& needle, const ::std::wstring& haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } AssertionResult IsNotSubstring( const char* needle_expr, const char* haystack_expr, const ::std::wstring& needle, const ::std::wstring& haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } #endif // GTEST_HAS_STD_WSTRING namespace internal { #if GTEST_OS_WINDOWS namespace { // Helper function for IsHRESULT{SuccessFailure} predicates AssertionResult HRESULTFailureHelper(const char* expr, const char* expected, long hr) { // NOLINT # if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_TV_TITLE // Windows CE doesn't support FormatMessage. const char error_text[] = ""; # else // Looks up the human-readable system message for the HRESULT code // and since we're not passing any params to FormatMessage, we don't // want inserts expanded. const DWORD kFlags = FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS; const DWORD kBufSize = 4096; // Gets the system's human readable message string for this HRESULT. char error_text[kBufSize] = { '\0' }; DWORD message_length = ::FormatMessageA(kFlags, 0, // no source, we're asking system hr, // the error 0, // no line width restrictions error_text, // output buffer kBufSize, // buf size NULL); // no arguments for inserts // Trims tailing white space (FormatMessage leaves a trailing CR-LF) for (; message_length && IsSpace(error_text[message_length - 1]); --message_length) { error_text[message_length - 1] = '\0'; } # endif // GTEST_OS_WINDOWS_MOBILE const std::string error_hex("0x" + String::FormatHexInt(hr)); return ::testing::AssertionFailure() << "Expected: " << expr << " " << expected << ".\n" << " Actual: " << error_hex << " " << error_text << "\n"; } } // namespace AssertionResult IsHRESULTSuccess(const char* expr, long hr) { // NOLINT if (SUCCEEDED(hr)) { return AssertionSuccess(); } return HRESULTFailureHelper(expr, "succeeds", hr); } AssertionResult IsHRESULTFailure(const char* expr, long hr) { // NOLINT if (FAILED(hr)) { return AssertionSuccess(); } return HRESULTFailureHelper(expr, "fails", hr); } #endif // GTEST_OS_WINDOWS // Utility functions for encoding Unicode text (wide strings) in // UTF-8. // A Unicode code-point can have up to 21 bits, and is encoded in UTF-8 // like this: // // Code-point length Encoding // 0 - 7 bits 0xxxxxxx // 8 - 11 bits 110xxxxx 10xxxxxx // 12 - 16 bits 1110xxxx 10xxxxxx 10xxxxxx // 17 - 21 bits 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx // The maximum code-point a one-byte UTF-8 sequence can represent. const UInt32 kMaxCodePoint1 = (static_cast(1) << 7) - 1; // The maximum code-point a two-byte UTF-8 sequence can represent. const UInt32 kMaxCodePoint2 = (static_cast(1) << (5 + 6)) - 1; // The maximum code-point a three-byte UTF-8 sequence can represent. const UInt32 kMaxCodePoint3 = (static_cast(1) << (4 + 2*6)) - 1; // The maximum code-point a four-byte UTF-8 sequence can represent. const UInt32 kMaxCodePoint4 = (static_cast(1) << (3 + 3*6)) - 1; // Chops off the n lowest bits from a bit pattern. Returns the n // lowest bits. As a side effect, the original bit pattern will be // shifted to the right by n bits. inline UInt32 ChopLowBits(UInt32* bits, int n) { const UInt32 low_bits = *bits & ((static_cast(1) << n) - 1); *bits >>= n; return low_bits; } // Converts a Unicode code point to a narrow string in UTF-8 encoding. // code_point parameter is of type UInt32 because wchar_t may not be // wide enough to contain a code point. // If the code_point is not a valid Unicode code point // (i.e. outside of Unicode range U+0 to U+10FFFF) it will be converted // to "(Invalid Unicode 0xXXXXXXXX)". std::string CodePointToUtf8(UInt32 code_point) { if (code_point > kMaxCodePoint4) { return "(Invalid Unicode 0x" + String::FormatHexInt(code_point) + ")"; } char str[5]; // Big enough for the largest valid code point. if (code_point <= kMaxCodePoint1) { str[1] = '\0'; str[0] = static_cast(code_point); // 0xxxxxxx } else if (code_point <= kMaxCodePoint2) { str[2] = '\0'; str[1] = static_cast(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx str[0] = static_cast(0xC0 | code_point); // 110xxxxx } else if (code_point <= kMaxCodePoint3) { str[3] = '\0'; str[2] = static_cast(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx str[1] = static_cast(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx str[0] = static_cast(0xE0 | code_point); // 1110xxxx } else { // code_point <= kMaxCodePoint4 str[4] = '\0'; str[3] = static_cast(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx str[2] = static_cast(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx str[1] = static_cast(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx str[0] = static_cast(0xF0 | code_point); // 11110xxx } return str; } // The following two functions only make sense if the system // uses UTF-16 for wide string encoding. All supported systems // with 16 bit wchar_t (Windows, Cygwin, Symbian OS) do use UTF-16. // Determines if the arguments constitute UTF-16 surrogate pair // and thus should be combined into a single Unicode code point // using CreateCodePointFromUtf16SurrogatePair. inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) { return sizeof(wchar_t) == 2 && (first & 0xFC00) == 0xD800 && (second & 0xFC00) == 0xDC00; } // Creates a Unicode code point from UTF16 surrogate pair. inline UInt32 CreateCodePointFromUtf16SurrogatePair(wchar_t first, wchar_t second) { const UInt32 mask = (1 << 10) - 1; return (sizeof(wchar_t) == 2) ? (((first & mask) << 10) | (second & mask)) + 0x10000 : // This function should not be called when the condition is // false, but we provide a sensible default in case it is. static_cast(first); } // Converts a wide string to a narrow string in UTF-8 encoding. // The wide string is assumed to have the following encoding: // UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin, Symbian OS) // UTF-32 if sizeof(wchar_t) == 4 (on Linux) // Parameter str points to a null-terminated wide string. // Parameter num_chars may additionally limit the number // of wchar_t characters processed. -1 is used when the entire string // should be processed. // If the string contains code points that are not valid Unicode code points // (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output // as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding // and contains invalid UTF-16 surrogate pairs, values in those pairs // will be encoded as individual Unicode characters from Basic Normal Plane. std::string WideStringToUtf8(const wchar_t* str, int num_chars) { if (num_chars == -1) num_chars = static_cast(wcslen(str)); ::std::stringstream stream; for (int i = 0; i < num_chars; ++i) { UInt32 unicode_code_point; if (str[i] == L'\0') { break; } else if (i + 1 < num_chars && IsUtf16SurrogatePair(str[i], str[i + 1])) { unicode_code_point = CreateCodePointFromUtf16SurrogatePair(str[i], str[i + 1]); i++; } else { unicode_code_point = static_cast(str[i]); } stream << CodePointToUtf8(unicode_code_point); } return StringStreamToString(&stream); } // Converts a wide C string to an std::string using the UTF-8 encoding. // NULL will be converted to "(null)". std::string String::ShowWideCString(const wchar_t * wide_c_str) { if (wide_c_str == NULL) return "(null)"; return internal::WideStringToUtf8(wide_c_str, -1); } // Compares two wide C strings. Returns true iff they have the same // content. // // Unlike wcscmp(), this function can handle NULL argument(s). A NULL // C string is considered different to any non-NULL C string, // including the empty string. bool String::WideCStringEquals(const wchar_t * lhs, const wchar_t * rhs) { if (lhs == NULL) return rhs == NULL; if (rhs == NULL) return false; return wcscmp(lhs, rhs) == 0; } // Helper function for *_STREQ on wide strings. AssertionResult CmpHelperSTREQ(const char* lhs_expression, const char* rhs_expression, const wchar_t* lhs, const wchar_t* rhs) { if (String::WideCStringEquals(lhs, rhs)) { return AssertionSuccess(); } return EqFailure(lhs_expression, rhs_expression, PrintToString(lhs), PrintToString(rhs), false); } // Helper function for *_STRNE on wide strings. AssertionResult CmpHelperSTRNE(const char* s1_expression, const char* s2_expression, const wchar_t* s1, const wchar_t* s2) { if (!String::WideCStringEquals(s1, s2)) { return AssertionSuccess(); } return AssertionFailure() << "Expected: (" << s1_expression << ") != (" << s2_expression << "), actual: " << PrintToString(s1) << " vs " << PrintToString(s2); } // Compares two C strings, ignoring case. Returns true iff they have // the same content. // // Unlike strcasecmp(), this function can handle NULL argument(s). A // NULL C string is considered different to any non-NULL C string, // including the empty string. bool String::CaseInsensitiveCStringEquals(const char * lhs, const char * rhs) { if (lhs == NULL) return rhs == NULL; if (rhs == NULL) return false; return posix::StrCaseCmp(lhs, rhs) == 0; } // Compares two wide C strings, ignoring case. Returns true iff they // have the same content. // // Unlike wcscasecmp(), this function can handle NULL argument(s). // A NULL C string is considered different to any non-NULL wide C string, // including the empty string. // NB: The implementations on different platforms slightly differ. // On windows, this method uses _wcsicmp which compares according to LC_CTYPE // environment variable. On GNU platform this method uses wcscasecmp // which compares according to LC_CTYPE category of the current locale. // On MacOS X, it uses towlower, which also uses LC_CTYPE category of the // current locale. bool String::CaseInsensitiveWideCStringEquals(const wchar_t* lhs, const wchar_t* rhs) { if (lhs == NULL) return rhs == NULL; if (rhs == NULL) return false; #if GTEST_OS_WINDOWS return _wcsicmp(lhs, rhs) == 0; #elif GTEST_OS_LINUX && !GTEST_OS_LINUX_ANDROID return wcscasecmp(lhs, rhs) == 0; #else // Android, Mac OS X and Cygwin don't define wcscasecmp. // Other unknown OSes may not define it either. wint_t left, right; do { left = towlower(*lhs++); right = towlower(*rhs++); } while (left && left == right); return left == right; #endif // OS selector } // Returns true iff str ends with the given suffix, ignoring case. // Any string is considered to end with an empty suffix. bool String::EndsWithCaseInsensitive( const std::string& str, const std::string& suffix) { const size_t str_len = str.length(); const size_t suffix_len = suffix.length(); return (str_len >= suffix_len) && CaseInsensitiveCStringEquals(str.c_str() + str_len - suffix_len, suffix.c_str()); } // Formats an int value as "%02d". std::string String::FormatIntWidth2(int value) { std::stringstream ss; ss << std::setfill('0') << std::setw(2) << value; return ss.str(); } // Formats an int value as "%X". std::string String::FormatHexInt(int value) { std::stringstream ss; ss << std::hex << std::uppercase << value; return ss.str(); } // Formats a byte as "%02X". std::string String::FormatByte(unsigned char value) { std::stringstream ss; ss << std::setfill('0') << std::setw(2) << std::hex << std::uppercase << static_cast(value); return ss.str(); } // Converts the buffer in a stringstream to an std::string, converting NUL // bytes to "\\0" along the way. std::string StringStreamToString(::std::stringstream* ss) { const ::std::string& str = ss->str(); const char* const start = str.c_str(); const char* const end = start + str.length(); std::string result; result.reserve(2 * (end - start)); for (const char* ch = start; ch != end; ++ch) { if (*ch == '\0') { result += "\\0"; // Replaces NUL with "\\0"; } else { result += *ch; } } return result; } // Appends the user-supplied message to the Google-Test-generated message. std::string AppendUserMessage(const std::string& gtest_msg, const Message& user_msg) { // Appends the user message if it's non-empty. const std::string user_msg_string = user_msg.GetString(); if (user_msg_string.empty()) { return gtest_msg; } return gtest_msg + "\n" + user_msg_string; } } // namespace internal // class TestResult // Creates an empty TestResult. TestResult::TestResult() : death_test_count_(0), elapsed_time_(0) { } // D'tor. TestResult::~TestResult() { } // Returns the i-th test part result among all the results. i can // range from 0 to total_part_count() - 1. If i is not in that range, // aborts the program. const TestPartResult& TestResult::GetTestPartResult(int i) const { if (i < 0 || i >= total_part_count()) internal::posix::Abort(); return test_part_results_.at(i); } // Returns the i-th test property. i can range from 0 to // test_property_count() - 1. If i is not in that range, aborts the // program. const TestProperty& TestResult::GetTestProperty(int i) const { if (i < 0 || i >= test_property_count()) internal::posix::Abort(); return test_properties_.at(i); } // Clears the test part results. void TestResult::ClearTestPartResults() { test_part_results_.clear(); } // Adds a test part result to the list. void TestResult::AddTestPartResult(const TestPartResult& test_part_result) { test_part_results_.push_back(test_part_result); } // Adds a test property to the list. If a property with the same key as the // supplied property is already represented, the value of this test_property // replaces the old value for that key. void TestResult::RecordProperty(const std::string& xml_element, const TestProperty& test_property) { if (!ValidateTestProperty(xml_element, test_property)) { return; } internal::MutexLock lock(&test_properites_mutex_); const std::vector::iterator property_with_matching_key = std::find_if(test_properties_.begin(), test_properties_.end(), internal::TestPropertyKeyIs(test_property.key())); if (property_with_matching_key == test_properties_.end()) { test_properties_.push_back(test_property); return; } property_with_matching_key->SetValue(test_property.value()); } // The list of reserved attributes used in the element of XML // output. static const char* const kReservedTestSuitesAttributes[] = { "disabled", "errors", "failures", "name", "random_seed", "tests", "time", "timestamp" }; // The list of reserved attributes used in the element of XML // output. static const char* const kReservedTestSuiteAttributes[] = { "disabled", "errors", "failures", "name", "tests", "time" }; // The list of reserved attributes used in the element of XML output. static const char* const kReservedTestCaseAttributes[] = { "classname", "name", "status", "time", "type_param", "value_param", "file", "line"}; template std::vector ArrayAsVector(const char* const (&array)[kSize]) { return std::vector(array, array + kSize); } static std::vector GetReservedAttributesForElement( const std::string& xml_element) { if (xml_element == "testsuites") { return ArrayAsVector(kReservedTestSuitesAttributes); } else if (xml_element == "testsuite") { return ArrayAsVector(kReservedTestSuiteAttributes); } else if (xml_element == "testcase") { return ArrayAsVector(kReservedTestCaseAttributes); } else { GTEST_CHECK_(false) << "Unrecognized xml_element provided: " << xml_element; } // This code is unreachable but some compilers may not realizes that. return std::vector(); } static std::string FormatWordList(const std::vector& words) { Message word_list; for (size_t i = 0; i < words.size(); ++i) { if (i > 0 && words.size() > 2) { word_list << ", "; } if (i == words.size() - 1) { word_list << "and "; } word_list << "'" << words[i] << "'"; } return word_list.GetString(); } static bool ValidateTestPropertyName( const std::string& property_name, const std::vector& reserved_names) { if (std::find(reserved_names.begin(), reserved_names.end(), property_name) != reserved_names.end()) { ADD_FAILURE() << "Reserved key used in RecordProperty(): " << property_name << " (" << FormatWordList(reserved_names) << " are reserved by " << GTEST_NAME_ << ")"; return false; } return true; } // Adds a failure if the key is a reserved attribute of the element named // xml_element. Returns true if the property is valid. bool TestResult::ValidateTestProperty(const std::string& xml_element, const TestProperty& test_property) { return ValidateTestPropertyName(test_property.key(), GetReservedAttributesForElement(xml_element)); } // Clears the object. void TestResult::Clear() { test_part_results_.clear(); test_properties_.clear(); death_test_count_ = 0; elapsed_time_ = 0; } // Returns true off the test part was skipped. static bool TestPartSkipped(const TestPartResult& result) { return result.skipped(); } // Returns true iff the test was skipped. bool TestResult::Skipped() const { return !Failed() && CountIf(test_part_results_, TestPartSkipped) > 0; } // Returns true iff the test failed. bool TestResult::Failed() const { for (int i = 0; i < total_part_count(); ++i) { if (GetTestPartResult(i).failed()) return true; } return false; } // Returns true iff the test part fatally failed. static bool TestPartFatallyFailed(const TestPartResult& result) { return result.fatally_failed(); } // Returns true iff the test fatally failed. bool TestResult::HasFatalFailure() const { return CountIf(test_part_results_, TestPartFatallyFailed) > 0; } // Returns true iff the test part non-fatally failed. static bool TestPartNonfatallyFailed(const TestPartResult& result) { return result.nonfatally_failed(); } // Returns true iff the test has a non-fatal failure. bool TestResult::HasNonfatalFailure() const { return CountIf(test_part_results_, TestPartNonfatallyFailed) > 0; } // Gets the number of all test parts. This is the sum of the number // of successful test parts and the number of failed test parts. int TestResult::total_part_count() const { return static_cast(test_part_results_.size()); } // Returns the number of the test properties. int TestResult::test_property_count() const { return static_cast(test_properties_.size()); } // class Test // Creates a Test object. // The c'tor saves the states of all flags. Test::Test() : gtest_flag_saver_(new GTEST_FLAG_SAVER_) { } // The d'tor restores the states of all flags. The actual work is // done by the d'tor of the gtest_flag_saver_ field, and thus not // visible here. Test::~Test() { } // Sets up the test fixture. // // A sub-class may override this. void Test::SetUp() { } // Tears down the test fixture. // // A sub-class may override this. void Test::TearDown() { } // Allows user supplied key value pairs to be recorded for later output. void Test::RecordProperty(const std::string& key, const std::string& value) { UnitTest::GetInstance()->RecordProperty(key, value); } // Allows user supplied key value pairs to be recorded for later output. void Test::RecordProperty(const std::string& key, int value) { Message value_message; value_message << value; RecordProperty(key, value_message.GetString().c_str()); } namespace internal { void ReportFailureInUnknownLocation(TestPartResult::Type result_type, const std::string& message) { // This function is a friend of UnitTest and as such has access to // AddTestPartResult. UnitTest::GetInstance()->AddTestPartResult( result_type, NULL, // No info about the source file where the exception occurred. -1, // We have no info on which line caused the exception. message, ""); // No stack trace, either. } } // namespace internal // Google Test requires all tests in the same test case to use the same test // fixture class. This function checks if the current test has the // same fixture class as the first test in the current test case. If // yes, it returns true; otherwise it generates a Google Test failure and // returns false. bool Test::HasSameFixtureClass() { internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); const TestCase* const test_case = impl->current_test_case(); // Info about the first test in the current test case. const TestInfo* const first_test_info = test_case->test_info_list()[0]; const internal::TypeId first_fixture_id = first_test_info->fixture_class_id_; const char* const first_test_name = first_test_info->name(); // Info about the current test. const TestInfo* const this_test_info = impl->current_test_info(); const internal::TypeId this_fixture_id = this_test_info->fixture_class_id_; const char* const this_test_name = this_test_info->name(); if (this_fixture_id != first_fixture_id) { // Is the first test defined using TEST? const bool first_is_TEST = first_fixture_id == internal::GetTestTypeId(); // Is this test defined using TEST? const bool this_is_TEST = this_fixture_id == internal::GetTestTypeId(); if (first_is_TEST || this_is_TEST) { // Both TEST and TEST_F appear in same test case, which is incorrect. // Tell the user how to fix this. // Gets the name of the TEST and the name of the TEST_F. Note // that first_is_TEST and this_is_TEST cannot both be true, as // the fixture IDs are different for the two tests. const char* const TEST_name = first_is_TEST ? first_test_name : this_test_name; const char* const TEST_F_name = first_is_TEST ? this_test_name : first_test_name; ADD_FAILURE() << "All tests in the same test case must use the same test fixture\n" << "class, so mixing TEST_F and TEST in the same test case is\n" << "illegal. In test case " << this_test_info->test_case_name() << ",\n" << "test " << TEST_F_name << " is defined using TEST_F but\n" << "test " << TEST_name << " is defined using TEST. You probably\n" << "want to change the TEST to TEST_F or move it to another test\n" << "case."; } else { // Two fixture classes with the same name appear in two different // namespaces, which is not allowed. Tell the user how to fix this. ADD_FAILURE() << "All tests in the same test case must use the same test fixture\n" << "class. However, in test case " << this_test_info->test_case_name() << ",\n" << "you defined test " << first_test_name << " and test " << this_test_name << "\n" << "using two different test fixture classes. This can happen if\n" << "the two classes are from different namespaces or translation\n" << "units and have the same name. You should probably rename one\n" << "of the classes to put the tests into different test cases."; } return false; } return true; } #if GTEST_HAS_SEH // Adds an "exception thrown" fatal failure to the current test. This // function returns its result via an output parameter pointer because VC++ // prohibits creation of objects with destructors on stack in functions // using __try (see error C2712). static std::string* FormatSehExceptionMessage(DWORD exception_code, const char* location) { Message message; message << "SEH exception with code 0x" << std::setbase(16) << exception_code << std::setbase(10) << " thrown in " << location << "."; return new std::string(message.GetString()); } #endif // GTEST_HAS_SEH namespace internal { #if GTEST_HAS_EXCEPTIONS // Adds an "exception thrown" fatal failure to the current test. static std::string FormatCxxExceptionMessage(const char* description, const char* location) { Message message; if (description != NULL) { message << "C++ exception with description \"" << description << "\""; } else { message << "Unknown C++ exception"; } message << " thrown in " << location << "."; return message.GetString(); } static std::string PrintTestPartResultToString( const TestPartResult& test_part_result); GoogleTestFailureException::GoogleTestFailureException( const TestPartResult& failure) : ::std::runtime_error(PrintTestPartResultToString(failure).c_str()) {} #endif // GTEST_HAS_EXCEPTIONS // We put these helper functions in the internal namespace as IBM's xlC // compiler rejects the code if they were declared static. // Runs the given method and handles SEH exceptions it throws, when // SEH is supported; returns the 0-value for type Result in case of an // SEH exception. (Microsoft compilers cannot handle SEH and C++ // exceptions in the same function. Therefore, we provide a separate // wrapper function for handling SEH exceptions.) template Result HandleSehExceptionsInMethodIfSupported( T* object, Result (T::*method)(), const char* location) { #if GTEST_HAS_SEH __try { return (object->*method)(); } __except (internal::UnitTestOptions::GTestShouldProcessSEH( // NOLINT GetExceptionCode())) { // We create the exception message on the heap because VC++ prohibits // creation of objects with destructors on stack in functions using __try // (see error C2712). std::string* exception_message = FormatSehExceptionMessage( GetExceptionCode(), location); internal::ReportFailureInUnknownLocation(TestPartResult::kFatalFailure, *exception_message); delete exception_message; return static_cast(0); } #else (void)location; return (object->*method)(); #endif // GTEST_HAS_SEH } // Runs the given method and catches and reports C++ and/or SEH-style // exceptions, if they are supported; returns the 0-value for type // Result in case of an SEH exception. template Result HandleExceptionsInMethodIfSupported( T* object, Result (T::*method)(), const char* location) { // NOTE: The user code can affect the way in which Google Test handles // exceptions by setting GTEST_FLAG(catch_exceptions), but only before // RUN_ALL_TESTS() starts. It is technically possible to check the flag // after the exception is caught and either report or re-throw the // exception based on the flag's value: // // try { // // Perform the test method. // } catch (...) { // if (GTEST_FLAG(catch_exceptions)) // // Report the exception as failure. // else // throw; // Re-throws the original exception. // } // // However, the purpose of this flag is to allow the program to drop into // the debugger when the exception is thrown. On most platforms, once the // control enters the catch block, the exception origin information is // lost and the debugger will stop the program at the point of the // re-throw in this function -- instead of at the point of the original // throw statement in the code under test. For this reason, we perform // the check early, sacrificing the ability to affect Google Test's // exception handling in the method where the exception is thrown. if (internal::GetUnitTestImpl()->catch_exceptions()) { #if GTEST_HAS_EXCEPTIONS try { return HandleSehExceptionsInMethodIfSupported(object, method, location); } catch (const AssertionException&) { // NOLINT // This failure was reported already. } catch (const internal::GoogleTestFailureException&) { // NOLINT // This exception type can only be thrown by a failed Google // Test assertion with the intention of letting another testing // framework catch it. Therefore we just re-throw it. throw; } catch (const std::exception& e) { // NOLINT internal::ReportFailureInUnknownLocation( TestPartResult::kFatalFailure, FormatCxxExceptionMessage(e.what(), location)); } catch (...) { // NOLINT internal::ReportFailureInUnknownLocation( TestPartResult::kFatalFailure, FormatCxxExceptionMessage(NULL, location)); } return static_cast(0); #else return HandleSehExceptionsInMethodIfSupported(object, method, location); #endif // GTEST_HAS_EXCEPTIONS } else { return (object->*method)(); } } } // namespace internal // Runs the test and updates the test result. void Test::Run() { if (!HasSameFixtureClass()) return; internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); impl->os_stack_trace_getter()->UponLeavingGTest(); internal::HandleExceptionsInMethodIfSupported(this, &Test::SetUp, "SetUp()"); // We will run the test only if SetUp() was successful and didn't call // GTEST_SKIP(). if (!HasFatalFailure() && !IsSkipped()) { impl->os_stack_trace_getter()->UponLeavingGTest(); internal::HandleExceptionsInMethodIfSupported( this, &Test::TestBody, "the test body"); } // However, we want to clean up as much as possible. Hence we will // always call TearDown(), even if SetUp() or the test body has // failed. impl->os_stack_trace_getter()->UponLeavingGTest(); internal::HandleExceptionsInMethodIfSupported( this, &Test::TearDown, "TearDown()"); } // Returns true iff the current test has a fatal failure. bool Test::HasFatalFailure() { return internal::GetUnitTestImpl()->current_test_result()->HasFatalFailure(); } // Returns true iff the current test has a non-fatal failure. bool Test::HasNonfatalFailure() { return internal::GetUnitTestImpl()->current_test_result()-> HasNonfatalFailure(); } // Returns true iff the current test was skipped. bool Test::IsSkipped() { return internal::GetUnitTestImpl()->current_test_result()->Skipped(); } // class TestInfo // Constructs a TestInfo object. It assumes ownership of the test factory // object. TestInfo::TestInfo(const std::string& a_test_case_name, const std::string& a_name, const char* a_type_param, const char* a_value_param, internal::CodeLocation a_code_location, internal::TypeId fixture_class_id, internal::TestFactoryBase* factory) : test_case_name_(a_test_case_name), name_(a_name), type_param_(a_type_param ? new std::string(a_type_param) : NULL), value_param_(a_value_param ? new std::string(a_value_param) : NULL), location_(a_code_location), fixture_class_id_(fixture_class_id), should_run_(false), is_disabled_(false), matches_filter_(false), factory_(factory), result_() {} // Destructs a TestInfo object. TestInfo::~TestInfo() { delete factory_; } namespace internal { // Creates a new TestInfo object and registers it with Google Test; // returns the created object. // // Arguments: // // test_case_name: name of the test case // name: name of the test // type_param: the name of the test's type parameter, or NULL if // this is not a typed or a type-parameterized test. // value_param: text representation of the test's value parameter, // or NULL if this is not a value-parameterized test. // code_location: code location where the test is defined // fixture_class_id: ID of the test fixture class // set_up_tc: pointer to the function that sets up the test case // tear_down_tc: pointer to the function that tears down the test case // factory: pointer to the factory that creates a test object. // The newly created TestInfo instance will assume // ownership of the factory object. TestInfo* MakeAndRegisterTestInfo( const char* test_case_name, const char* name, const char* type_param, const char* value_param, CodeLocation code_location, TypeId fixture_class_id, SetUpTestCaseFunc set_up_tc, TearDownTestCaseFunc tear_down_tc, TestFactoryBase* factory) { TestInfo* const test_info = new TestInfo(test_case_name, name, type_param, value_param, code_location, fixture_class_id, factory); GetUnitTestImpl()->AddTestInfo(set_up_tc, tear_down_tc, test_info); return test_info; } void ReportInvalidTestCaseType(const char* test_case_name, CodeLocation code_location) { Message errors; errors << "Attempted redefinition of test case " << test_case_name << ".\n" << "All tests in the same test case must use the same test fixture\n" << "class. However, in test case " << test_case_name << ", you tried\n" << "to define a test using a fixture class different from the one\n" << "used earlier. This can happen if the two fixture classes are\n" << "from different namespaces and have the same name. You should\n" << "probably rename one of the classes to put the tests into different\n" << "test cases."; GTEST_LOG_(ERROR) << FormatFileLocation(code_location.file.c_str(), code_location.line) << " " << errors.GetString(); } } // namespace internal namespace { // A predicate that checks the test name of a TestInfo against a known // value. // // This is used for implementation of the TestCase class only. We put // it in the anonymous namespace to prevent polluting the outer // namespace. // // TestNameIs is copyable. class TestNameIs { public: // Constructor. // // TestNameIs has NO default constructor. explicit TestNameIs(const char* name) : name_(name) {} // Returns true iff the test name of test_info matches name_. bool operator()(const TestInfo * test_info) const { return test_info && test_info->name() == name_; } private: std::string name_; }; } // namespace namespace internal { // This method expands all parameterized tests registered with macros TEST_P // and INSTANTIATE_TEST_CASE_P into regular tests and registers those. // This will be done just once during the program runtime. void UnitTestImpl::RegisterParameterizedTests() { if (!parameterized_tests_registered_) { parameterized_test_registry_.RegisterTests(); parameterized_tests_registered_ = true; } } } // namespace internal // Creates the test object, runs it, records its result, and then // deletes it. void TestInfo::Run() { if (!should_run_) return; // Tells UnitTest where to store test result. internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); impl->set_current_test_info(this); TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); // Notifies the unit test event listeners that a test is about to start. repeater->OnTestStart(*this); const TimeInMillis start = internal::GetTimeInMillis(); impl->os_stack_trace_getter()->UponLeavingGTest(); // Creates the test object. Test* const test = internal::HandleExceptionsInMethodIfSupported( factory_, &internal::TestFactoryBase::CreateTest, "the test fixture's constructor"); // Runs the test if the constructor didn't generate a fatal failure or invoke // GTEST_SKIP(). // Note that the object will not be null if (!Test::HasFatalFailure() && !Test::IsSkipped()) { // This doesn't throw as all user code that can throw are wrapped into // exception handling code. test->Run(); } // Deletes the test object. impl->os_stack_trace_getter()->UponLeavingGTest(); internal::HandleExceptionsInMethodIfSupported( test, &Test::DeleteSelf_, "the test fixture's destructor"); result_.set_elapsed_time(internal::GetTimeInMillis() - start); // Notifies the unit test event listener that a test has just finished. repeater->OnTestEnd(*this); // Tells UnitTest to stop associating assertion results to this // test. impl->set_current_test_info(NULL); } // class TestCase // Gets the number of successful tests in this test case. int TestCase::successful_test_count() const { return CountIf(test_info_list_, TestPassed); } // Gets the number of successful tests in this test case. int TestCase::skipped_test_count() const { return CountIf(test_info_list_, TestSkipped); } // Gets the number of failed tests in this test case. int TestCase::failed_test_count() const { return CountIf(test_info_list_, TestFailed); } // Gets the number of disabled tests that will be reported in the XML report. int TestCase::reportable_disabled_test_count() const { return CountIf(test_info_list_, TestReportableDisabled); } // Gets the number of disabled tests in this test case. int TestCase::disabled_test_count() const { return CountIf(test_info_list_, TestDisabled); } // Gets the number of tests to be printed in the XML report. int TestCase::reportable_test_count() const { return CountIf(test_info_list_, TestReportable); } // Get the number of tests in this test case that should run. int TestCase::test_to_run_count() const { return CountIf(test_info_list_, ShouldRunTest); } // Gets the number of all tests. int TestCase::total_test_count() const { return static_cast(test_info_list_.size()); } // Creates a TestCase with the given name. // // Arguments: // // name: name of the test case // a_type_param: the name of the test case's type parameter, or NULL if // this is not a typed or a type-parameterized test case. // set_up_tc: pointer to the function that sets up the test case // tear_down_tc: pointer to the function that tears down the test case TestCase::TestCase(const char* a_name, const char* a_type_param, Test::SetUpTestCaseFunc set_up_tc, Test::TearDownTestCaseFunc tear_down_tc) : name_(a_name), type_param_(a_type_param ? new std::string(a_type_param) : NULL), set_up_tc_(set_up_tc), tear_down_tc_(tear_down_tc), should_run_(false), elapsed_time_(0) { } // Destructor of TestCase. TestCase::~TestCase() { // Deletes every Test in the collection. ForEach(test_info_list_, internal::Delete); } // Returns the i-th test among all the tests. i can range from 0 to // total_test_count() - 1. If i is not in that range, returns NULL. const TestInfo* TestCase::GetTestInfo(int i) const { const int index = GetElementOr(test_indices_, i, -1); return index < 0 ? NULL : test_info_list_[index]; } // Returns the i-th test among all the tests. i can range from 0 to // total_test_count() - 1. If i is not in that range, returns NULL. TestInfo* TestCase::GetMutableTestInfo(int i) { const int index = GetElementOr(test_indices_, i, -1); return index < 0 ? NULL : test_info_list_[index]; } // Adds a test to this test case. Will delete the test upon // destruction of the TestCase object. void TestCase::AddTestInfo(TestInfo * test_info) { test_info_list_.push_back(test_info); test_indices_.push_back(static_cast(test_indices_.size())); } // Runs every test in this TestCase. void TestCase::Run() { if (!should_run_) return; internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); impl->set_current_test_case(this); TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); repeater->OnTestCaseStart(*this); impl->os_stack_trace_getter()->UponLeavingGTest(); internal::HandleExceptionsInMethodIfSupported( this, &TestCase::RunSetUpTestCase, "SetUpTestCase()"); const internal::TimeInMillis start = internal::GetTimeInMillis(); for (int i = 0; i < total_test_count(); i++) { GetMutableTestInfo(i)->Run(); } elapsed_time_ = internal::GetTimeInMillis() - start; impl->os_stack_trace_getter()->UponLeavingGTest(); internal::HandleExceptionsInMethodIfSupported( this, &TestCase::RunTearDownTestCase, "TearDownTestCase()"); repeater->OnTestCaseEnd(*this); impl->set_current_test_case(NULL); } // Clears the results of all tests in this test case. void TestCase::ClearResult() { ad_hoc_test_result_.Clear(); ForEach(test_info_list_, TestInfo::ClearTestResult); } // Shuffles the tests in this test case. void TestCase::ShuffleTests(internal::Random* random) { Shuffle(random, &test_indices_); } // Restores the test order to before the first shuffle. void TestCase::UnshuffleTests() { for (size_t i = 0; i < test_indices_.size(); i++) { test_indices_[i] = static_cast(i); } } // Formats a countable noun. Depending on its quantity, either the // singular form or the plural form is used. e.g. // // FormatCountableNoun(1, "formula", "formuli") returns "1 formula". // FormatCountableNoun(5, "book", "books") returns "5 books". static std::string FormatCountableNoun(int count, const char * singular_form, const char * plural_form) { return internal::StreamableToString(count) + " " + (count == 1 ? singular_form : plural_form); } // Formats the count of tests. static std::string FormatTestCount(int test_count) { return FormatCountableNoun(test_count, "test", "tests"); } // Formats the count of test cases. static std::string FormatTestCaseCount(int test_case_count) { return FormatCountableNoun(test_case_count, "test case", "test cases"); } // Converts a TestPartResult::Type enum to human-friendly string // representation. Both kNonFatalFailure and kFatalFailure are translated // to "Failure", as the user usually doesn't care about the difference // between the two when viewing the test result. static const char * TestPartResultTypeToString(TestPartResult::Type type) { switch (type) { case TestPartResult::kSkip: return "Skipped"; case TestPartResult::kSuccess: return "Success"; case TestPartResult::kNonFatalFailure: case TestPartResult::kFatalFailure: #ifdef _MSC_VER return "error: "; #else return "Failure\n"; #endif default: return "Unknown result type"; } } namespace internal { // Prints a TestPartResult to an std::string. static std::string PrintTestPartResultToString( const TestPartResult& test_part_result) { return (Message() << internal::FormatFileLocation(test_part_result.file_name(), test_part_result.line_number()) << " " << TestPartResultTypeToString(test_part_result.type()) << test_part_result.message()).GetString(); } // Prints a TestPartResult. static void PrintTestPartResult(const TestPartResult& test_part_result) { const std::string& result = PrintTestPartResultToString(test_part_result); printf("%s\n", result.c_str()); fflush(stdout); // If the test program runs in Visual Studio or a debugger, the // following statements add the test part result message to the Output // window such that the user can double-click on it to jump to the // corresponding source code location; otherwise they do nothing. #if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE // We don't call OutputDebugString*() on Windows Mobile, as printing // to stdout is done by OutputDebugString() there already - we don't // want the same message printed twice. ::OutputDebugStringA(result.c_str()); ::OutputDebugStringA("\n"); #endif } // class PrettyUnitTestResultPrinter enum GTestColor { COLOR_DEFAULT, COLOR_RED, COLOR_GREEN, COLOR_YELLOW }; #if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \ !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW // Returns the character attribute for the given color. static WORD GetColorAttribute(GTestColor color) { switch (color) { case COLOR_RED: return FOREGROUND_RED; case COLOR_GREEN: return FOREGROUND_GREEN; case COLOR_YELLOW: return FOREGROUND_RED | FOREGROUND_GREEN; default: return 0; } } static int GetBitOffset(WORD color_mask) { if (color_mask == 0) return 0; int bitOffset = 0; while ((color_mask & 1) == 0) { color_mask >>= 1; ++bitOffset; } return bitOffset; } static WORD GetNewColor(GTestColor color, WORD old_color_attrs) { // Let's reuse the BG static const WORD background_mask = BACKGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_INTENSITY; static const WORD foreground_mask = FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY; const WORD existing_bg = old_color_attrs & background_mask; WORD new_color = GetColorAttribute(color) | existing_bg | FOREGROUND_INTENSITY; static const int bg_bitOffset = GetBitOffset(background_mask); static const int fg_bitOffset = GetBitOffset(foreground_mask); if (((new_color & background_mask) >> bg_bitOffset) == ((new_color & foreground_mask) >> fg_bitOffset)) { new_color ^= FOREGROUND_INTENSITY; // invert intensity } return new_color; } #else // Returns the ANSI color code for the given color. COLOR_DEFAULT is // an invalid input. static const char* GetAnsiColorCode(GTestColor color) { switch (color) { case COLOR_RED: return "1"; case COLOR_GREEN: return "2"; case COLOR_YELLOW: return "3"; default: return NULL; }; } #endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE // Returns true iff Google Test should use colors in the output. bool ShouldUseColor(bool stdout_is_tty) { const char* const gtest_color = GTEST_FLAG(color).c_str(); if (String::CaseInsensitiveCStringEquals(gtest_color, "auto")) { #if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW // On Windows the TERM variable is usually not set, but the // console there does support colors. return stdout_is_tty; #else // On non-Windows platforms, we rely on the TERM variable. const char* const term = posix::GetEnv("TERM"); const bool term_supports_color = String::CStringEquals(term, "xterm") || String::CStringEquals(term, "xterm-color") || String::CStringEquals(term, "xterm-256color") || String::CStringEquals(term, "screen") || String::CStringEquals(term, "screen-256color") || String::CStringEquals(term, "tmux") || String::CStringEquals(term, "tmux-256color") || String::CStringEquals(term, "rxvt-unicode") || String::CStringEquals(term, "rxvt-unicode-256color") || String::CStringEquals(term, "linux") || String::CStringEquals(term, "cygwin"); return stdout_is_tty && term_supports_color; #endif // GTEST_OS_WINDOWS } return String::CaseInsensitiveCStringEquals(gtest_color, "yes") || String::CaseInsensitiveCStringEquals(gtest_color, "true") || String::CaseInsensitiveCStringEquals(gtest_color, "t") || String::CStringEquals(gtest_color, "1"); // We take "yes", "true", "t", and "1" as meaning "yes". If the // value is neither one of these nor "auto", we treat it as "no" to // be conservative. } // Helpers for printing colored strings to stdout. Note that on Windows, we // cannot simply emit special characters and have the terminal change colors. // This routine must actually emit the characters rather than return a string // that would be colored when printed, as can be done on Linux. static void ColoredPrintf(GTestColor color, const char* fmt, ...) { va_list args; va_start(args, fmt); #if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS || \ GTEST_OS_IOS || GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT const bool use_color = AlwaysFalse(); #else static const bool in_color_mode = ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0); const bool use_color = in_color_mode && (color != COLOR_DEFAULT); #endif // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS // The '!= 0' comparison is necessary to satisfy MSVC 7.1. if (!use_color) { vprintf(fmt, args); va_end(args); return; } #if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \ !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE); // Gets the current text color. CONSOLE_SCREEN_BUFFER_INFO buffer_info; GetConsoleScreenBufferInfo(stdout_handle, &buffer_info); const WORD old_color_attrs = buffer_info.wAttributes; const WORD new_color = GetNewColor(color, old_color_attrs); // We need to flush the stream buffers into the console before each // SetConsoleTextAttribute call lest it affect the text that is already // printed but has not yet reached the console. fflush(stdout); SetConsoleTextAttribute(stdout_handle, new_color); vprintf(fmt, args); fflush(stdout); // Restores the text color. SetConsoleTextAttribute(stdout_handle, old_color_attrs); #else printf("\033[0;3%sm", GetAnsiColorCode(color)); vprintf(fmt, args); printf("\033[m"); // Resets the terminal to default. #endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE va_end(args); } // Text printed in Google Test's text output and --gtest_list_tests // output to label the type parameter and value parameter for a test. static const char kTypeParamLabel[] = "TypeParam"; static const char kValueParamLabel[] = "GetParam()"; static void PrintFullTestCommentIfPresent(const TestInfo& test_info) { const char* const type_param = test_info.type_param(); const char* const value_param = test_info.value_param(); if (type_param != NULL || value_param != NULL) { printf(", where "); if (type_param != NULL) { printf("%s = %s", kTypeParamLabel, type_param); if (value_param != NULL) printf(" and "); } if (value_param != NULL) { printf("%s = %s", kValueParamLabel, value_param); } } } // This class implements the TestEventListener interface. // // Class PrettyUnitTestResultPrinter is copyable. class PrettyUnitTestResultPrinter : public TestEventListener { public: PrettyUnitTestResultPrinter() {} static void PrintTestName(const char * test_case, const char * test) { printf("%s.%s", test_case, test); } // The following methods override what's in the TestEventListener class. virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) {} virtual void OnTestIterationStart(const UnitTest& unit_test, int iteration); virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test); virtual void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) {} virtual void OnTestCaseStart(const TestCase& test_case); virtual void OnTestStart(const TestInfo& test_info); virtual void OnTestPartResult(const TestPartResult& result); virtual void OnTestEnd(const TestInfo& test_info); virtual void OnTestCaseEnd(const TestCase& test_case); virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test); virtual void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) {} virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration); virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) {} private: static void PrintFailedTests(const UnitTest& unit_test); static void PrintSkippedTests(const UnitTest& unit_test); }; // Fired before each iteration of tests starts. void PrettyUnitTestResultPrinter::OnTestIterationStart( const UnitTest& unit_test, int iteration) { if (GTEST_FLAG(repeat) != 1) printf("\nRepeating all tests (iteration %d) . . .\n\n", iteration + 1); const char* const filter = GTEST_FLAG(filter).c_str(); // Prints the filter if it's not *. This reminds the user that some // tests may be skipped. if (!String::CStringEquals(filter, kUniversalFilter)) { ColoredPrintf(COLOR_YELLOW, "Note: %s filter = %s\n", GTEST_NAME_, filter); } if (internal::ShouldShard(kTestTotalShards, kTestShardIndex, false)) { const Int32 shard_index = Int32FromEnvOrDie(kTestShardIndex, -1); ColoredPrintf(COLOR_YELLOW, "Note: This is test shard %d of %s.\n", static_cast(shard_index) + 1, internal::posix::GetEnv(kTestTotalShards)); } if (GTEST_FLAG(shuffle)) { ColoredPrintf(COLOR_YELLOW, "Note: Randomizing tests' orders with a seed of %d .\n", unit_test.random_seed()); } ColoredPrintf(COLOR_GREEN, "[==========] "); printf("Running %s from %s.\n", FormatTestCount(unit_test.test_to_run_count()).c_str(), FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str()); fflush(stdout); } void PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart( const UnitTest& /*unit_test*/) { ColoredPrintf(COLOR_GREEN, "[----------] "); printf("Global test environment set-up.\n"); fflush(stdout); } void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) { const std::string counts = FormatCountableNoun(test_case.test_to_run_count(), "test", "tests"); ColoredPrintf(COLOR_GREEN, "[----------] "); printf("%s from %s", counts.c_str(), test_case.name()); if (test_case.type_param() == NULL) { printf("\n"); } else { printf(", where %s = %s\n", kTypeParamLabel, test_case.type_param()); } fflush(stdout); } void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) { ColoredPrintf(COLOR_GREEN, "[ RUN ] "); PrintTestName(test_info.test_case_name(), test_info.name()); printf("\n"); fflush(stdout); } // Called after an assertion failure. void PrettyUnitTestResultPrinter::OnTestPartResult( const TestPartResult& result) { switch (result.type()) { // If the test part succeeded, or was skipped, // we don't need to do anything. case TestPartResult::kSkip: case TestPartResult::kSuccess: return; default: // Print failure message from the assertion // (e.g. expected this and got that). PrintTestPartResult(result); fflush(stdout); } } void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) { if (test_info.result()->Passed()) { ColoredPrintf(COLOR_GREEN, "[ OK ] "); } else if (test_info.result()->Skipped()) { ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); } else { ColoredPrintf(COLOR_RED, "[ FAILED ] "); } PrintTestName(test_info.test_case_name(), test_info.name()); if (test_info.result()->Failed()) PrintFullTestCommentIfPresent(test_info); if (GTEST_FLAG(print_time)) { printf(" (%s ms)\n", internal::StreamableToString( test_info.result()->elapsed_time()).c_str()); } else { printf("\n"); } fflush(stdout); } void PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) { if (!GTEST_FLAG(print_time)) return; const std::string counts = FormatCountableNoun(test_case.test_to_run_count(), "test", "tests"); ColoredPrintf(COLOR_GREEN, "[----------] "); printf("%s from %s (%s ms total)\n\n", counts.c_str(), test_case.name(), internal::StreamableToString(test_case.elapsed_time()).c_str()); fflush(stdout); } void PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart( const UnitTest& /*unit_test*/) { ColoredPrintf(COLOR_GREEN, "[----------] "); printf("Global test environment tear-down\n"); fflush(stdout); } // Internal helper for printing the list of failed tests. void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) { const int failed_test_count = unit_test.failed_test_count(); if (failed_test_count == 0) { return; } for (int i = 0; i < unit_test.total_test_case_count(); ++i) { const TestCase& test_case = *unit_test.GetTestCase(i); if (!test_case.should_run() || (test_case.failed_test_count() == 0)) { continue; } for (int j = 0; j < test_case.total_test_count(); ++j) { const TestInfo& test_info = *test_case.GetTestInfo(j); if (!test_info.should_run() || !test_info.result()->Failed()) { continue; } ColoredPrintf(COLOR_RED, "[ FAILED ] "); printf("%s.%s", test_case.name(), test_info.name()); PrintFullTestCommentIfPresent(test_info); printf("\n"); } } } // Internal helper for printing the list of skipped tests. void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) { const int skipped_test_count = unit_test.skipped_test_count(); if (skipped_test_count == 0) { return; } for (int i = 0; i < unit_test.total_test_case_count(); ++i) { const TestCase& test_case = *unit_test.GetTestCase(i); if (!test_case.should_run() || (test_case.skipped_test_count() == 0)) { continue; } for (int j = 0; j < test_case.total_test_count(); ++j) { const TestInfo& test_info = *test_case.GetTestInfo(j); if (!test_info.should_run() || !test_info.result()->Skipped()) { continue; } ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); printf("%s.%s", test_case.name(), test_info.name()); printf("\n"); } } } void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, int /*iteration*/) { ColoredPrintf(COLOR_GREEN, "[==========] "); printf("%s from %s ran.", FormatTestCount(unit_test.test_to_run_count()).c_str(), FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str()); if (GTEST_FLAG(print_time)) { printf(" (%s ms total)", internal::StreamableToString(unit_test.elapsed_time()).c_str()); } printf("\n"); ColoredPrintf(COLOR_GREEN, "[ PASSED ] "); printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str()); const int skipped_test_count = unit_test.skipped_test_count(); if (skipped_test_count > 0) { ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] "); printf("%s, listed below:\n", FormatTestCount(skipped_test_count).c_str()); PrintSkippedTests(unit_test); } int num_failures = unit_test.failed_test_count(); if (!unit_test.Passed()) { const int failed_test_count = unit_test.failed_test_count(); ColoredPrintf(COLOR_RED, "[ FAILED ] "); printf("%s, listed below:\n", FormatTestCount(failed_test_count).c_str()); PrintFailedTests(unit_test); printf("\n%2d FAILED %s\n", num_failures, num_failures == 1 ? "TEST" : "TESTS"); } int num_disabled = unit_test.reportable_disabled_test_count(); if (num_disabled && !GTEST_FLAG(also_run_disabled_tests)) { if (!num_failures) { printf("\n"); // Add a spacer if no FAILURE banner is displayed. } ColoredPrintf(COLOR_YELLOW, " YOU HAVE %d DISABLED %s\n\n", num_disabled, num_disabled == 1 ? "TEST" : "TESTS"); } // Ensure that Google Test output is printed before, e.g., heapchecker output. fflush(stdout); } // End PrettyUnitTestResultPrinter // class TestEventRepeater // // This class forwards events to other event listeners. class TestEventRepeater : public TestEventListener { public: TestEventRepeater() : forwarding_enabled_(true) {} virtual ~TestEventRepeater(); void Append(TestEventListener *listener); TestEventListener* Release(TestEventListener* listener); // Controls whether events will be forwarded to listeners_. Set to false // in death test child processes. bool forwarding_enabled() const { return forwarding_enabled_; } void set_forwarding_enabled(bool enable) { forwarding_enabled_ = enable; } virtual void OnTestProgramStart(const UnitTest& unit_test); virtual void OnTestIterationStart(const UnitTest& unit_test, int iteration); virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test); virtual void OnEnvironmentsSetUpEnd(const UnitTest& unit_test); virtual void OnTestCaseStart(const TestCase& test_case); virtual void OnTestStart(const TestInfo& test_info); virtual void OnTestPartResult(const TestPartResult& result); virtual void OnTestEnd(const TestInfo& test_info); virtual void OnTestCaseEnd(const TestCase& test_case); virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test); virtual void OnEnvironmentsTearDownEnd(const UnitTest& unit_test); virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration); virtual void OnTestProgramEnd(const UnitTest& unit_test); private: // Controls whether events will be forwarded to listeners_. Set to false // in death test child processes. bool forwarding_enabled_; // The list of listeners that receive events. std::vector listeners_; GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventRepeater); }; TestEventRepeater::~TestEventRepeater() { ForEach(listeners_, Delete); } void TestEventRepeater::Append(TestEventListener *listener) { listeners_.push_back(listener); } // FIXME: Factor the search functionality into Vector::Find. TestEventListener* TestEventRepeater::Release(TestEventListener *listener) { for (size_t i = 0; i < listeners_.size(); ++i) { if (listeners_[i] == listener) { listeners_.erase(listeners_.begin() + i); return listener; } } return NULL; } // Since most methods are very similar, use macros to reduce boilerplate. // This defines a member that forwards the call to all listeners. #define GTEST_REPEATER_METHOD_(Name, Type) \ void TestEventRepeater::Name(const Type& parameter) { \ if (forwarding_enabled_) { \ for (size_t i = 0; i < listeners_.size(); i++) { \ listeners_[i]->Name(parameter); \ } \ } \ } // This defines a member that forwards the call to all listeners in reverse // order. #define GTEST_REVERSE_REPEATER_METHOD_(Name, Type) \ void TestEventRepeater::Name(const Type& parameter) { \ if (forwarding_enabled_) { \ for (int i = static_cast(listeners_.size()) - 1; i >= 0; i--) { \ listeners_[i]->Name(parameter); \ } \ } \ } GTEST_REPEATER_METHOD_(OnTestProgramStart, UnitTest) GTEST_REPEATER_METHOD_(OnEnvironmentsSetUpStart, UnitTest) GTEST_REPEATER_METHOD_(OnTestCaseStart, TestCase) GTEST_REPEATER_METHOD_(OnTestStart, TestInfo) GTEST_REPEATER_METHOD_(OnTestPartResult, TestPartResult) GTEST_REPEATER_METHOD_(OnEnvironmentsTearDownStart, UnitTest) GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsSetUpEnd, UnitTest) GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsTearDownEnd, UnitTest) GTEST_REVERSE_REPEATER_METHOD_(OnTestEnd, TestInfo) GTEST_REVERSE_REPEATER_METHOD_(OnTestCaseEnd, TestCase) GTEST_REVERSE_REPEATER_METHOD_(OnTestProgramEnd, UnitTest) #undef GTEST_REPEATER_METHOD_ #undef GTEST_REVERSE_REPEATER_METHOD_ void TestEventRepeater::OnTestIterationStart(const UnitTest& unit_test, int iteration) { if (forwarding_enabled_) { for (size_t i = 0; i < listeners_.size(); i++) { listeners_[i]->OnTestIterationStart(unit_test, iteration); } } } void TestEventRepeater::OnTestIterationEnd(const UnitTest& unit_test, int iteration) { if (forwarding_enabled_) { for (int i = static_cast(listeners_.size()) - 1; i >= 0; i--) { listeners_[i]->OnTestIterationEnd(unit_test, iteration); } } } // End TestEventRepeater // This class generates an XML output file. class XmlUnitTestResultPrinter : public EmptyTestEventListener { public: explicit XmlUnitTestResultPrinter(const char* output_file); virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration); void ListTestsMatchingFilter(const std::vector& test_cases); // Prints an XML summary of all unit tests. static void PrintXmlTestsList(std::ostream* stream, const std::vector& test_cases); private: // Is c a whitespace character that is normalized to a space character // when it appears in an XML attribute value? static bool IsNormalizableWhitespace(char c) { return c == 0x9 || c == 0xA || c == 0xD; } // May c appear in a well-formed XML document? static bool IsValidXmlCharacter(char c) { return IsNormalizableWhitespace(c) || c >= 0x20; } // Returns an XML-escaped copy of the input string str. If // is_attribute is true, the text is meant to appear as an attribute // value, and normalizable whitespace is preserved by replacing it // with character references. static std::string EscapeXml(const std::string& str, bool is_attribute); // Returns the given string with all characters invalid in XML removed. static std::string RemoveInvalidXmlCharacters(const std::string& str); // Convenience wrapper around EscapeXml when str is an attribute value. static std::string EscapeXmlAttribute(const std::string& str) { return EscapeXml(str, true); } // Convenience wrapper around EscapeXml when str is not an attribute value. static std::string EscapeXmlText(const char* str) { return EscapeXml(str, false); } // Verifies that the given attribute belongs to the given element and // streams the attribute as XML. static void OutputXmlAttribute(std::ostream* stream, const std::string& element_name, const std::string& name, const std::string& value); // Streams an XML CDATA section, escaping invalid CDATA sequences as needed. static void OutputXmlCDataSection(::std::ostream* stream, const char* data); // Streams an XML representation of a TestInfo object. static void OutputXmlTestInfo(::std::ostream* stream, const char* test_case_name, const TestInfo& test_info); // Prints an XML representation of a TestCase object static void PrintXmlTestCase(::std::ostream* stream, const TestCase& test_case); // Prints an XML summary of unit_test to output stream out. static void PrintXmlUnitTest(::std::ostream* stream, const UnitTest& unit_test); // Produces a string representing the test properties in a result as space // delimited XML attributes based on the property key="value" pairs. // When the std::string is not empty, it includes a space at the beginning, // to delimit this attribute from prior attributes. static std::string TestPropertiesAsXmlAttributes(const TestResult& result); // Streams an XML representation of the test properties of a TestResult // object. static void OutputXmlTestProperties(std::ostream* stream, const TestResult& result); // The output file. const std::string output_file_; GTEST_DISALLOW_COPY_AND_ASSIGN_(XmlUnitTestResultPrinter); }; // Creates a new XmlUnitTestResultPrinter. XmlUnitTestResultPrinter::XmlUnitTestResultPrinter(const char* output_file) : output_file_(output_file) { if (output_file_.empty()) { GTEST_LOG_(FATAL) << "XML output file may not be null"; } } // Called after the unit test ends. void XmlUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, int /*iteration*/) { FILE* xmlout = OpenFileForWriting(output_file_); std::stringstream stream; PrintXmlUnitTest(&stream, unit_test); fprintf(xmlout, "%s", StringStreamToString(&stream).c_str()); fclose(xmlout); } void XmlUnitTestResultPrinter::ListTestsMatchingFilter( const std::vector& test_cases) { FILE* xmlout = OpenFileForWriting(output_file_); std::stringstream stream; PrintXmlTestsList(&stream, test_cases); fprintf(xmlout, "%s", StringStreamToString(&stream).c_str()); fclose(xmlout); } // Returns an XML-escaped copy of the input string str. If is_attribute // is true, the text is meant to appear as an attribute value, and // normalizable whitespace is preserved by replacing it with character // references. // // Invalid XML characters in str, if any, are stripped from the output. // It is expected that most, if not all, of the text processed by this // module will consist of ordinary English text. // If this module is ever modified to produce version 1.1 XML output, // most invalid characters can be retained using character references. // FIXME: It might be nice to have a minimally invasive, human-readable // escaping scheme for invalid characters, rather than dropping them. std::string XmlUnitTestResultPrinter::EscapeXml( const std::string& str, bool is_attribute) { Message m; for (size_t i = 0; i < str.size(); ++i) { const char ch = str[i]; switch (ch) { case '<': m << "<"; break; case '>': m << ">"; break; case '&': m << "&"; break; case '\'': if (is_attribute) m << "'"; else m << '\''; break; case '"': if (is_attribute) m << """; else m << '"'; break; default: if (IsValidXmlCharacter(ch)) { if (is_attribute && IsNormalizableWhitespace(ch)) m << "&#x" << String::FormatByte(static_cast(ch)) << ";"; else m << ch; } break; } } return m.GetString(); } // Returns the given string with all characters invalid in XML removed. // Currently invalid characters are dropped from the string. An // alternative is to replace them with certain characters such as . or ?. std::string XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters( const std::string& str) { std::string output; output.reserve(str.size()); for (std::string::const_iterator it = str.begin(); it != str.end(); ++it) if (IsValidXmlCharacter(*it)) output.push_back(*it); return output; } // The following routines generate an XML representation of a UnitTest // object. // GOOGLETEST_CM0009 DO NOT DELETE // // This is how Google Test concepts map to the DTD: // // <-- corresponds to a UnitTest object // <-- corresponds to a TestCase object // <-- corresponds to a TestInfo object // ... // ... // ... // <-- individual assertion failures // // // // Formats the given time in milliseconds as seconds. std::string FormatTimeInMillisAsSeconds(TimeInMillis ms) { ::std::stringstream ss; ss << (static_cast(ms) * 1e-3); return ss.str(); } static bool PortableLocaltime(time_t seconds, struct tm* out) { #if defined(_MSC_VER) return localtime_s(out, &seconds) == 0; #elif defined(__MINGW32__) || defined(__MINGW64__) // MINGW provides neither localtime_r nor localtime_s, but uses // Windows' localtime(), which has a thread-local tm buffer. struct tm* tm_ptr = localtime(&seconds); // NOLINT if (tm_ptr == NULL) return false; *out = *tm_ptr; return true; #else return localtime_r(&seconds, out) != NULL; #endif } // Converts the given epoch time in milliseconds to a date string in the ISO // 8601 format, without the timezone information. std::string FormatEpochTimeInMillisAsIso8601(TimeInMillis ms) { struct tm time_struct; if (!PortableLocaltime(static_cast(ms / 1000), &time_struct)) return ""; // YYYY-MM-DDThh:mm:ss return StreamableToString(time_struct.tm_year + 1900) + "-" + String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" + String::FormatIntWidth2(time_struct.tm_mday) + "T" + String::FormatIntWidth2(time_struct.tm_hour) + ":" + String::FormatIntWidth2(time_struct.tm_min) + ":" + String::FormatIntWidth2(time_struct.tm_sec); } // Streams an XML CDATA section, escaping invalid CDATA sequences as needed. void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream, const char* data) { const char* segment = data; *stream << ""); if (next_segment != NULL) { stream->write( segment, static_cast(next_segment - segment)); *stream << "]]>]]>"); } else { *stream << segment; break; } } *stream << "]]>"; } void XmlUnitTestResultPrinter::OutputXmlAttribute( std::ostream* stream, const std::string& element_name, const std::string& name, const std::string& value) { const std::vector& allowed_names = GetReservedAttributesForElement(element_name); GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != allowed_names.end()) << "Attribute " << name << " is not allowed for element <" << element_name << ">."; *stream << " " << name << "=\"" << EscapeXmlAttribute(value) << "\""; } // Prints an XML representation of a TestInfo object. // FIXME: There is also value in printing properties with the plain printer. void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream, const char* test_case_name, const TestInfo& test_info) { const TestResult& result = *test_info.result(); const std::string kTestcase = "testcase"; if (test_info.is_in_another_shard()) { return; } *stream << " \n"; return; } OutputXmlAttribute(stream, kTestcase, "status", test_info.should_run() ? "run" : "notrun"); OutputXmlAttribute(stream, kTestcase, "time", FormatTimeInMillisAsSeconds(result.elapsed_time())); OutputXmlAttribute(stream, kTestcase, "classname", test_case_name); int failures = 0; for (int i = 0; i < result.total_part_count(); ++i) { const TestPartResult& part = result.GetTestPartResult(i); if (part.failed()) { if (++failures == 1) { *stream << ">\n"; } const std::string location = internal::FormatCompilerIndependentFileLocation(part.file_name(), part.line_number()); const std::string summary = location + "\n" + part.summary(); *stream << " "; const std::string detail = location + "\n" + part.message(); OutputXmlCDataSection(stream, RemoveInvalidXmlCharacters(detail).c_str()); *stream << "\n"; } } if (failures == 0 && result.test_property_count() == 0) { *stream << " />\n"; } else { if (failures == 0) { *stream << ">\n"; } OutputXmlTestProperties(stream, result); *stream << " \n"; } } // Prints an XML representation of a TestCase object void XmlUnitTestResultPrinter::PrintXmlTestCase(std::ostream* stream, const TestCase& test_case) { const std::string kTestsuite = "testsuite"; *stream << " <" << kTestsuite; OutputXmlAttribute(stream, kTestsuite, "name", test_case.name()); OutputXmlAttribute(stream, kTestsuite, "tests", StreamableToString(test_case.reportable_test_count())); if (!GTEST_FLAG(list_tests)) { OutputXmlAttribute(stream, kTestsuite, "failures", StreamableToString(test_case.failed_test_count())); OutputXmlAttribute( stream, kTestsuite, "disabled", StreamableToString(test_case.reportable_disabled_test_count())); OutputXmlAttribute(stream, kTestsuite, "errors", "0"); OutputXmlAttribute(stream, kTestsuite, "time", FormatTimeInMillisAsSeconds(test_case.elapsed_time())); *stream << TestPropertiesAsXmlAttributes(test_case.ad_hoc_test_result()); } *stream << ">\n"; for (int i = 0; i < test_case.total_test_count(); ++i) { if (test_case.GetTestInfo(i)->is_reportable()) OutputXmlTestInfo(stream, test_case.name(), *test_case.GetTestInfo(i)); } *stream << " \n"; } // Prints an XML summary of unit_test to output stream out. void XmlUnitTestResultPrinter::PrintXmlUnitTest(std::ostream* stream, const UnitTest& unit_test) { const std::string kTestsuites = "testsuites"; *stream << "\n"; *stream << "<" << kTestsuites; OutputXmlAttribute(stream, kTestsuites, "tests", StreamableToString(unit_test.reportable_test_count())); OutputXmlAttribute(stream, kTestsuites, "failures", StreamableToString(unit_test.failed_test_count())); OutputXmlAttribute( stream, kTestsuites, "disabled", StreamableToString(unit_test.reportable_disabled_test_count())); OutputXmlAttribute(stream, kTestsuites, "errors", "0"); OutputXmlAttribute( stream, kTestsuites, "timestamp", FormatEpochTimeInMillisAsIso8601(unit_test.start_timestamp())); OutputXmlAttribute(stream, kTestsuites, "time", FormatTimeInMillisAsSeconds(unit_test.elapsed_time())); if (GTEST_FLAG(shuffle)) { OutputXmlAttribute(stream, kTestsuites, "random_seed", StreamableToString(unit_test.random_seed())); } *stream << TestPropertiesAsXmlAttributes(unit_test.ad_hoc_test_result()); OutputXmlAttribute(stream, kTestsuites, "name", "AllTests"); *stream << ">\n"; for (int i = 0; i < unit_test.total_test_case_count(); ++i) { if (unit_test.GetTestCase(i)->reportable_test_count() > 0) PrintXmlTestCase(stream, *unit_test.GetTestCase(i)); } *stream << "\n"; } void XmlUnitTestResultPrinter::PrintXmlTestsList( std::ostream* stream, const std::vector& test_cases) { const std::string kTestsuites = "testsuites"; *stream << "\n"; *stream << "<" << kTestsuites; int total_tests = 0; for (size_t i = 0; i < test_cases.size(); ++i) { total_tests += test_cases[i]->total_test_count(); } OutputXmlAttribute(stream, kTestsuites, "tests", StreamableToString(total_tests)); OutputXmlAttribute(stream, kTestsuites, "name", "AllTests"); *stream << ">\n"; for (size_t i = 0; i < test_cases.size(); ++i) { PrintXmlTestCase(stream, *test_cases[i]); } *stream << "\n"; } // Produces a string representing the test properties in a result as space // delimited XML attributes based on the property key="value" pairs. std::string XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes( const TestResult& result) { Message attributes; for (int i = 0; i < result.test_property_count(); ++i) { const TestProperty& property = result.GetTestProperty(i); attributes << " " << property.key() << "=" << "\"" << EscapeXmlAttribute(property.value()) << "\""; } return attributes.GetString(); } void XmlUnitTestResultPrinter::OutputXmlTestProperties( std::ostream* stream, const TestResult& result) { const std::string kProperties = "properties"; const std::string kProperty = "property"; if (result.test_property_count() <= 0) { return; } *stream << "<" << kProperties << ">\n"; for (int i = 0; i < result.test_property_count(); ++i) { const TestProperty& property = result.GetTestProperty(i); *stream << "<" << kProperty; *stream << " name=\"" << EscapeXmlAttribute(property.key()) << "\""; *stream << " value=\"" << EscapeXmlAttribute(property.value()) << "\""; *stream << "/>\n"; } *stream << "\n"; } // End XmlUnitTestResultPrinter // This class generates an JSON output file. class JsonUnitTestResultPrinter : public EmptyTestEventListener { public: explicit JsonUnitTestResultPrinter(const char* output_file); virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration); // Prints an JSON summary of all unit tests. static void PrintJsonTestList(::std::ostream* stream, const std::vector& test_cases); private: // Returns an JSON-escaped copy of the input string str. static std::string EscapeJson(const std::string& str); //// Verifies that the given attribute belongs to the given element and //// streams the attribute as JSON. static void OutputJsonKey(std::ostream* stream, const std::string& element_name, const std::string& name, const std::string& value, const std::string& indent, bool comma = true); static void OutputJsonKey(std::ostream* stream, const std::string& element_name, const std::string& name, int value, const std::string& indent, bool comma = true); // Streams a JSON representation of a TestInfo object. static void OutputJsonTestInfo(::std::ostream* stream, const char* test_case_name, const TestInfo& test_info); // Prints a JSON representation of a TestCase object static void PrintJsonTestCase(::std::ostream* stream, const TestCase& test_case); // Prints a JSON summary of unit_test to output stream out. static void PrintJsonUnitTest(::std::ostream* stream, const UnitTest& unit_test); // Produces a string representing the test properties in a result as // a JSON dictionary. static std::string TestPropertiesAsJson(const TestResult& result, const std::string& indent); // The output file. const std::string output_file_; GTEST_DISALLOW_COPY_AND_ASSIGN_(JsonUnitTestResultPrinter); }; // Creates a new JsonUnitTestResultPrinter. JsonUnitTestResultPrinter::JsonUnitTestResultPrinter(const char* output_file) : output_file_(output_file) { if (output_file_.empty()) { GTEST_LOG_(FATAL) << "JSON output file may not be null"; } } void JsonUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, int /*iteration*/) { FILE* jsonout = OpenFileForWriting(output_file_); std::stringstream stream; PrintJsonUnitTest(&stream, unit_test); fprintf(jsonout, "%s", StringStreamToString(&stream).c_str()); fclose(jsonout); } // Returns an JSON-escaped copy of the input string str. std::string JsonUnitTestResultPrinter::EscapeJson(const std::string& str) { Message m; for (size_t i = 0; i < str.size(); ++i) { const char ch = str[i]; switch (ch) { case '\\': case '"': case '/': m << '\\' << ch; break; case '\b': m << "\\b"; break; case '\t': m << "\\t"; break; case '\n': m << "\\n"; break; case '\f': m << "\\f"; break; case '\r': m << "\\r"; break; default: if (ch < ' ') { m << "\\u00" << String::FormatByte(static_cast(ch)); } else { m << ch; } break; } } return m.GetString(); } // The following routines generate an JSON representation of a UnitTest // object. // Formats the given time in milliseconds as seconds. static std::string FormatTimeInMillisAsDuration(TimeInMillis ms) { ::std::stringstream ss; ss << (static_cast(ms) * 1e-3) << "s"; return ss.str(); } // Converts the given epoch time in milliseconds to a date string in the // RFC3339 format, without the timezone information. static std::string FormatEpochTimeInMillisAsRFC3339(TimeInMillis ms) { struct tm time_struct; if (!PortableLocaltime(static_cast(ms / 1000), &time_struct)) return ""; // YYYY-MM-DDThh:mm:ss return StreamableToString(time_struct.tm_year + 1900) + "-" + String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" + String::FormatIntWidth2(time_struct.tm_mday) + "T" + String::FormatIntWidth2(time_struct.tm_hour) + ":" + String::FormatIntWidth2(time_struct.tm_min) + ":" + String::FormatIntWidth2(time_struct.tm_sec) + "Z"; } static inline std::string Indent(int width) { return std::string(width, ' '); } void JsonUnitTestResultPrinter::OutputJsonKey( std::ostream* stream, const std::string& element_name, const std::string& name, const std::string& value, const std::string& indent, bool comma) { const std::vector& allowed_names = GetReservedAttributesForElement(element_name); GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != allowed_names.end()) << "Key \"" << name << "\" is not allowed for value \"" << element_name << "\"."; *stream << indent << "\"" << name << "\": \"" << EscapeJson(value) << "\""; if (comma) *stream << ",\n"; } void JsonUnitTestResultPrinter::OutputJsonKey( std::ostream* stream, const std::string& element_name, const std::string& name, int value, const std::string& indent, bool comma) { const std::vector& allowed_names = GetReservedAttributesForElement(element_name); GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != allowed_names.end()) << "Key \"" << name << "\" is not allowed for value \"" << element_name << "\"."; *stream << indent << "\"" << name << "\": " << StreamableToString(value); if (comma) *stream << ",\n"; } // Prints a JSON representation of a TestInfo object. void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream, const char* test_case_name, const TestInfo& test_info) { const TestResult& result = *test_info.result(); const std::string kTestcase = "testcase"; const std::string kIndent = Indent(10); *stream << Indent(8) << "{\n"; OutputJsonKey(stream, kTestcase, "name", test_info.name(), kIndent); if (test_info.value_param() != NULL) { OutputJsonKey(stream, kTestcase, "value_param", test_info.value_param(), kIndent); } if (test_info.type_param() != NULL) { OutputJsonKey(stream, kTestcase, "type_param", test_info.type_param(), kIndent); } if (GTEST_FLAG(list_tests)) { OutputJsonKey(stream, kTestcase, "file", test_info.file(), kIndent); OutputJsonKey(stream, kTestcase, "line", test_info.line(), kIndent, false); *stream << "\n" << Indent(8) << "}"; return; } OutputJsonKey(stream, kTestcase, "status", test_info.should_run() ? "RUN" : "NOTRUN", kIndent); OutputJsonKey(stream, kTestcase, "time", FormatTimeInMillisAsDuration(result.elapsed_time()), kIndent); OutputJsonKey(stream, kTestcase, "classname", test_case_name, kIndent, false); *stream << TestPropertiesAsJson(result, kIndent); int failures = 0; for (int i = 0; i < result.total_part_count(); ++i) { const TestPartResult& part = result.GetTestPartResult(i); if (part.failed()) { *stream << ",\n"; if (++failures == 1) { *stream << kIndent << "\"" << "failures" << "\": [\n"; } const std::string location = internal::FormatCompilerIndependentFileLocation(part.file_name(), part.line_number()); const std::string message = EscapeJson(location + "\n" + part.message()); *stream << kIndent << " {\n" << kIndent << " \"failure\": \"" << message << "\",\n" << kIndent << " \"type\": \"\"\n" << kIndent << " }"; } } if (failures > 0) *stream << "\n" << kIndent << "]"; *stream << "\n" << Indent(8) << "}"; } // Prints an JSON representation of a TestCase object void JsonUnitTestResultPrinter::PrintJsonTestCase(std::ostream* stream, const TestCase& test_case) { const std::string kTestsuite = "testsuite"; const std::string kIndent = Indent(6); *stream << Indent(4) << "{\n"; OutputJsonKey(stream, kTestsuite, "name", test_case.name(), kIndent); OutputJsonKey(stream, kTestsuite, "tests", test_case.reportable_test_count(), kIndent); if (!GTEST_FLAG(list_tests)) { OutputJsonKey(stream, kTestsuite, "failures", test_case.failed_test_count(), kIndent); OutputJsonKey(stream, kTestsuite, "disabled", test_case.reportable_disabled_test_count(), kIndent); OutputJsonKey(stream, kTestsuite, "errors", 0, kIndent); OutputJsonKey(stream, kTestsuite, "time", FormatTimeInMillisAsDuration(test_case.elapsed_time()), kIndent, false); *stream << TestPropertiesAsJson(test_case.ad_hoc_test_result(), kIndent) << ",\n"; } *stream << kIndent << "\"" << kTestsuite << "\": [\n"; bool comma = false; for (int i = 0; i < test_case.total_test_count(); ++i) { if (test_case.GetTestInfo(i)->is_reportable()) { if (comma) { *stream << ",\n"; } else { comma = true; } OutputJsonTestInfo(stream, test_case.name(), *test_case.GetTestInfo(i)); } } *stream << "\n" << kIndent << "]\n" << Indent(4) << "}"; } // Prints a JSON summary of unit_test to output stream out. void JsonUnitTestResultPrinter::PrintJsonUnitTest(std::ostream* stream, const UnitTest& unit_test) { const std::string kTestsuites = "testsuites"; const std::string kIndent = Indent(2); *stream << "{\n"; OutputJsonKey(stream, kTestsuites, "tests", unit_test.reportable_test_count(), kIndent); OutputJsonKey(stream, kTestsuites, "failures", unit_test.failed_test_count(), kIndent); OutputJsonKey(stream, kTestsuites, "disabled", unit_test.reportable_disabled_test_count(), kIndent); OutputJsonKey(stream, kTestsuites, "errors", 0, kIndent); if (GTEST_FLAG(shuffle)) { OutputJsonKey(stream, kTestsuites, "random_seed", unit_test.random_seed(), kIndent); } OutputJsonKey(stream, kTestsuites, "timestamp", FormatEpochTimeInMillisAsRFC3339(unit_test.start_timestamp()), kIndent); OutputJsonKey(stream, kTestsuites, "time", FormatTimeInMillisAsDuration(unit_test.elapsed_time()), kIndent, false); *stream << TestPropertiesAsJson(unit_test.ad_hoc_test_result(), kIndent) << ",\n"; OutputJsonKey(stream, kTestsuites, "name", "AllTests", kIndent); *stream << kIndent << "\"" << kTestsuites << "\": [\n"; bool comma = false; for (int i = 0; i < unit_test.total_test_case_count(); ++i) { if (unit_test.GetTestCase(i)->reportable_test_count() > 0) { if (comma) { *stream << ",\n"; } else { comma = true; } PrintJsonTestCase(stream, *unit_test.GetTestCase(i)); } } *stream << "\n" << kIndent << "]\n" << "}\n"; } void JsonUnitTestResultPrinter::PrintJsonTestList( std::ostream* stream, const std::vector& test_cases) { const std::string kTestsuites = "testsuites"; const std::string kIndent = Indent(2); *stream << "{\n"; int total_tests = 0; for (size_t i = 0; i < test_cases.size(); ++i) { total_tests += test_cases[i]->total_test_count(); } OutputJsonKey(stream, kTestsuites, "tests", total_tests, kIndent); OutputJsonKey(stream, kTestsuites, "name", "AllTests", kIndent); *stream << kIndent << "\"" << kTestsuites << "\": [\n"; for (size_t i = 0; i < test_cases.size(); ++i) { if (i != 0) { *stream << ",\n"; } PrintJsonTestCase(stream, *test_cases[i]); } *stream << "\n" << kIndent << "]\n" << "}\n"; } // Produces a string representing the test properties in a result as // a JSON dictionary. std::string JsonUnitTestResultPrinter::TestPropertiesAsJson( const TestResult& result, const std::string& indent) { Message attributes; for (int i = 0; i < result.test_property_count(); ++i) { const TestProperty& property = result.GetTestProperty(i); attributes << ",\n" << indent << "\"" << property.key() << "\": " << "\"" << EscapeJson(property.value()) << "\""; } return attributes.GetString(); } // End JsonUnitTestResultPrinter #if GTEST_CAN_STREAM_RESULTS_ // Checks if str contains '=', '&', '%' or '\n' characters. If yes, // replaces them by "%xx" where xx is their hexadecimal value. For // example, replaces "=" with "%3D". This algorithm is O(strlen(str)) // in both time and space -- important as the input str may contain an // arbitrarily long test failure message and stack trace. std::string StreamingListener::UrlEncode(const char* str) { std::string result; result.reserve(strlen(str) + 1); for (char ch = *str; ch != '\0'; ch = *++str) { switch (ch) { case '%': case '=': case '&': case '\n': result.append("%" + String::FormatByte(static_cast(ch))); break; default: result.push_back(ch); break; } } return result; } void StreamingListener::SocketWriter::MakeConnection() { GTEST_CHECK_(sockfd_ == -1) << "MakeConnection() can't be called when there is already a connection."; addrinfo hints; memset(&hints, 0, sizeof(hints)); hints.ai_family = AF_UNSPEC; // To allow both IPv4 and IPv6 addresses. hints.ai_socktype = SOCK_STREAM; addrinfo* servinfo = NULL; // Use the getaddrinfo() to get a linked list of IP addresses for // the given host name. const int error_num = getaddrinfo( host_name_.c_str(), port_num_.c_str(), &hints, &servinfo); if (error_num != 0) { GTEST_LOG_(WARNING) << "stream_result_to: getaddrinfo() failed: " << gai_strerror(error_num); } // Loop through all the results and connect to the first we can. for (addrinfo* cur_addr = servinfo; sockfd_ == -1 && cur_addr != NULL; cur_addr = cur_addr->ai_next) { sockfd_ = socket( cur_addr->ai_family, cur_addr->ai_socktype, cur_addr->ai_protocol); if (sockfd_ != -1) { // Connect the client socket to the server socket. if (connect(sockfd_, cur_addr->ai_addr, cur_addr->ai_addrlen) == -1) { close(sockfd_); sockfd_ = -1; } } } freeaddrinfo(servinfo); // all done with this structure if (sockfd_ == -1) { GTEST_LOG_(WARNING) << "stream_result_to: failed to connect to " << host_name_ << ":" << port_num_; } } // End of class Streaming Listener #endif // GTEST_CAN_STREAM_RESULTS__ // class OsStackTraceGetter const char* const OsStackTraceGetterInterface::kElidedFramesMarker = "... " GTEST_NAME_ " internal frames ..."; std::string OsStackTraceGetter::CurrentStackTrace(int max_depth, int skip_count) GTEST_LOCK_EXCLUDED_(mutex_) { #if GTEST_HAS_ABSL std::string result; if (max_depth <= 0) { return result; } max_depth = std::min(max_depth, kMaxStackTraceDepth); std::vector raw_stack(max_depth); // Skips the frames requested by the caller, plus this function. const int raw_stack_size = absl::GetStackTrace(&raw_stack[0], max_depth, skip_count + 1); void* caller_frame = nullptr; { MutexLock lock(&mutex_); caller_frame = caller_frame_; } for (int i = 0; i < raw_stack_size; ++i) { if (raw_stack[i] == caller_frame && !GTEST_FLAG(show_internal_stack_frames)) { // Add a marker to the trace and stop adding frames. absl::StrAppend(&result, kElidedFramesMarker, "\n"); break; } char tmp[1024]; const char* symbol = "(unknown)"; if (absl::Symbolize(raw_stack[i], tmp, sizeof(tmp))) { symbol = tmp; } char line[1024]; snprintf(line, sizeof(line), " %p: %s\n", raw_stack[i], symbol); result += line; } return result; #else // !GTEST_HAS_ABSL static_cast(max_depth); static_cast(skip_count); return ""; #endif // GTEST_HAS_ABSL } void OsStackTraceGetter::UponLeavingGTest() GTEST_LOCK_EXCLUDED_(mutex_) { #if GTEST_HAS_ABSL void* caller_frame = nullptr; if (absl::GetStackTrace(&caller_frame, 1, 3) <= 0) { caller_frame = nullptr; } MutexLock lock(&mutex_); caller_frame_ = caller_frame; #endif // GTEST_HAS_ABSL } // A helper class that creates the premature-exit file in its // constructor and deletes the file in its destructor. class ScopedPrematureExitFile { public: explicit ScopedPrematureExitFile(const char* premature_exit_filepath) : premature_exit_filepath_(premature_exit_filepath ? premature_exit_filepath : "") { // If a path to the premature-exit file is specified... if (!premature_exit_filepath_.empty()) { // create the file with a single "0" character in it. I/O // errors are ignored as there's nothing better we can do and we // don't want to fail the test because of this. FILE* pfile = posix::FOpen(premature_exit_filepath, "w"); fwrite("0", 1, 1, pfile); fclose(pfile); } } ~ScopedPrematureExitFile() { if (!premature_exit_filepath_.empty()) { int retval = remove(premature_exit_filepath_.c_str()); if (retval) { GTEST_LOG_(ERROR) << "Failed to remove premature exit filepath \"" << premature_exit_filepath_ << "\" with error " << retval; } } } private: const std::string premature_exit_filepath_; GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedPrematureExitFile); }; } // namespace internal // class TestEventListeners TestEventListeners::TestEventListeners() : repeater_(new internal::TestEventRepeater()), default_result_printer_(NULL), default_xml_generator_(NULL) { } TestEventListeners::~TestEventListeners() { delete repeater_; } // Returns the standard listener responsible for the default console // output. Can be removed from the listeners list to shut down default // console output. Note that removing this object from the listener list // with Release transfers its ownership to the user. void TestEventListeners::Append(TestEventListener* listener) { repeater_->Append(listener); } // Removes the given event listener from the list and returns it. It then // becomes the caller's responsibility to delete the listener. Returns // NULL if the listener is not found in the list. TestEventListener* TestEventListeners::Release(TestEventListener* listener) { if (listener == default_result_printer_) default_result_printer_ = NULL; else if (listener == default_xml_generator_) default_xml_generator_ = NULL; return repeater_->Release(listener); } // Returns repeater that broadcasts the TestEventListener events to all // subscribers. TestEventListener* TestEventListeners::repeater() { return repeater_; } // Sets the default_result_printer attribute to the provided listener. // The listener is also added to the listener list and previous // default_result_printer is removed from it and deleted. The listener can // also be NULL in which case it will not be added to the list. Does // nothing if the previous and the current listener objects are the same. void TestEventListeners::SetDefaultResultPrinter(TestEventListener* listener) { if (default_result_printer_ != listener) { // It is an error to pass this method a listener that is already in the // list. delete Release(default_result_printer_); default_result_printer_ = listener; if (listener != NULL) Append(listener); } } // Sets the default_xml_generator attribute to the provided listener. The // listener is also added to the listener list and previous // default_xml_generator is removed from it and deleted. The listener can // also be NULL in which case it will not be added to the list. Does // nothing if the previous and the current listener objects are the same. void TestEventListeners::SetDefaultXmlGenerator(TestEventListener* listener) { if (default_xml_generator_ != listener) { // It is an error to pass this method a listener that is already in the // list. delete Release(default_xml_generator_); default_xml_generator_ = listener; if (listener != NULL) Append(listener); } } // Controls whether events will be forwarded by the repeater to the // listeners in the list. bool TestEventListeners::EventForwardingEnabled() const { return repeater_->forwarding_enabled(); } void TestEventListeners::SuppressEventForwarding() { repeater_->set_forwarding_enabled(false); } // class UnitTest // Gets the singleton UnitTest object. The first time this method is // called, a UnitTest object is constructed and returned. Consecutive // calls will return the same object. // // We don't protect this under mutex_ as a user is not supposed to // call this before main() starts, from which point on the return // value will never change. UnitTest* UnitTest::GetInstance() { // When compiled with MSVC 7.1 in optimized mode, destroying the // UnitTest object upon exiting the program messes up the exit code, // causing successful tests to appear failed. We have to use a // different implementation in this case to bypass the compiler bug. // This implementation makes the compiler happy, at the cost of // leaking the UnitTest object. // CodeGear C++Builder insists on a public destructor for the // default implementation. Use this implementation to keep good OO // design with private destructor. #if (_MSC_VER == 1310 && !defined(_DEBUG)) || defined(__BORLANDC__) static UnitTest* const instance = new UnitTest; return instance; #else static UnitTest instance; return &instance; #endif // (_MSC_VER == 1310 && !defined(_DEBUG)) || defined(__BORLANDC__) } // Gets the number of successful test cases. int UnitTest::successful_test_case_count() const { return impl()->successful_test_case_count(); } // Gets the number of failed test cases. int UnitTest::failed_test_case_count() const { return impl()->failed_test_case_count(); } // Gets the number of all test cases. int UnitTest::total_test_case_count() const { return impl()->total_test_case_count(); } // Gets the number of all test cases that contain at least one test // that should run. int UnitTest::test_case_to_run_count() const { return impl()->test_case_to_run_count(); } // Gets the number of successful tests. int UnitTest::successful_test_count() const { return impl()->successful_test_count(); } // Gets the number of skipped tests. int UnitTest::skipped_test_count() const { return impl()->skipped_test_count(); } // Gets the number of failed tests. int UnitTest::failed_test_count() const { return impl()->failed_test_count(); } // Gets the number of disabled tests that will be reported in the XML report. int UnitTest::reportable_disabled_test_count() const { return impl()->reportable_disabled_test_count(); } // Gets the number of disabled tests. int UnitTest::disabled_test_count() const { return impl()->disabled_test_count(); } // Gets the number of tests to be printed in the XML report. int UnitTest::reportable_test_count() const { return impl()->reportable_test_count(); } // Gets the number of all tests. int UnitTest::total_test_count() const { return impl()->total_test_count(); } // Gets the number of tests that should run. int UnitTest::test_to_run_count() const { return impl()->test_to_run_count(); } // Gets the time of the test program start, in ms from the start of the // UNIX epoch. internal::TimeInMillis UnitTest::start_timestamp() const { return impl()->start_timestamp(); } // Gets the elapsed time, in milliseconds. internal::TimeInMillis UnitTest::elapsed_time() const { return impl()->elapsed_time(); } // Returns true iff the unit test passed (i.e. all test cases passed). bool UnitTest::Passed() const { return impl()->Passed(); } // Returns true iff the unit test failed (i.e. some test case failed // or something outside of all tests failed). bool UnitTest::Failed() const { return impl()->Failed(); } // Gets the i-th test case among all the test cases. i can range from 0 to // total_test_case_count() - 1. If i is not in that range, returns NULL. const TestCase* UnitTest::GetTestCase(int i) const { return impl()->GetTestCase(i); } // Returns the TestResult containing information on test failures and // properties logged outside of individual test cases. const TestResult& UnitTest::ad_hoc_test_result() const { return *impl()->ad_hoc_test_result(); } // Gets the i-th test case among all the test cases. i can range from 0 to // total_test_case_count() - 1. If i is not in that range, returns NULL. TestCase* UnitTest::GetMutableTestCase(int i) { return impl()->GetMutableTestCase(i); } // Returns the list of event listeners that can be used to track events // inside Google Test. TestEventListeners& UnitTest::listeners() { return *impl()->listeners(); } // Registers and returns a global test environment. When a test // program is run, all global test environments will be set-up in the // order they were registered. After all tests in the program have // finished, all global test environments will be torn-down in the // *reverse* order they were registered. // // The UnitTest object takes ownership of the given environment. // // We don't protect this under mutex_, as we only support calling it // from the main thread. Environment* UnitTest::AddEnvironment(Environment* env) { if (env == NULL) { return NULL; } impl_->environments().push_back(env); return env; } // Adds a TestPartResult to the current TestResult object. All Google Test // assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc) eventually call // this to report their results. The user code should use the // assertion macros instead of calling this directly. void UnitTest::AddTestPartResult( TestPartResult::Type result_type, const char* file_name, int line_number, const std::string& message, const std::string& os_stack_trace) GTEST_LOCK_EXCLUDED_(mutex_) { Message msg; msg << message; internal::MutexLock lock(&mutex_); if (impl_->gtest_trace_stack().size() > 0) { msg << "\n" << GTEST_NAME_ << " trace:"; for (int i = static_cast(impl_->gtest_trace_stack().size()); i > 0; --i) { const internal::TraceInfo& trace = impl_->gtest_trace_stack()[i - 1]; msg << "\n" << internal::FormatFileLocation(trace.file, trace.line) << " " << trace.message; } } if (os_stack_trace.c_str() != NULL && !os_stack_trace.empty()) { msg << internal::kStackTraceMarker << os_stack_trace; } const TestPartResult result = TestPartResult(result_type, file_name, line_number, msg.GetString().c_str()); impl_->GetTestPartResultReporterForCurrentThread()-> ReportTestPartResult(result); if (result_type != TestPartResult::kSuccess && result_type != TestPartResult::kSkip) { // gtest_break_on_failure takes precedence over // gtest_throw_on_failure. This allows a user to set the latter // in the code (perhaps in order to use Google Test assertions // with another testing framework) and specify the former on the // command line for debugging. if (GTEST_FLAG(break_on_failure)) { #if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT // Using DebugBreak on Windows allows gtest to still break into a debugger // when a failure happens and both the --gtest_break_on_failure and // the --gtest_catch_exceptions flags are specified. DebugBreak(); #elif (!defined(__native_client__)) && \ ((defined(__clang__) || defined(__GNUC__)) && \ (defined(__x86_64__) || defined(__i386__))) // with clang/gcc we can achieve the same effect on x86 by invoking int3 asm("int3"); #else // Dereference NULL through a volatile pointer to prevent the compiler // from removing. We use this rather than abort() or __builtin_trap() for // portability: Symbian doesn't implement abort() well, and some debuggers // don't correctly trap abort(). *static_cast(NULL) = 1; #endif // GTEST_OS_WINDOWS } else if (GTEST_FLAG(throw_on_failure)) { #if GTEST_HAS_EXCEPTIONS throw internal::GoogleTestFailureException(result); #else // We cannot call abort() as it generates a pop-up in debug mode // that cannot be suppressed in VC 7.1 or below. exit(1); #endif } } } // Adds a TestProperty to the current TestResult object when invoked from // inside a test, to current TestCase's ad_hoc_test_result_ when invoked // from SetUpTestCase or TearDownTestCase, or to the global property set // when invoked elsewhere. If the result already contains a property with // the same key, the value will be updated. void UnitTest::RecordProperty(const std::string& key, const std::string& value) { impl_->RecordProperty(TestProperty(key, value)); } // Runs all tests in this UnitTest object and prints the result. // Returns 0 if successful, or 1 otherwise. // // We don't protect this under mutex_, as we only support calling it // from the main thread. int UnitTest::Run() { const bool in_death_test_child_process = internal::GTEST_FLAG(internal_run_death_test).length() > 0; // Google Test implements this protocol for catching that a test // program exits before returning control to Google Test: // // 1. Upon start, Google Test creates a file whose absolute path // is specified by the environment variable // TEST_PREMATURE_EXIT_FILE. // 2. When Google Test has finished its work, it deletes the file. // // This allows a test runner to set TEST_PREMATURE_EXIT_FILE before // running a Google-Test-based test program and check the existence // of the file at the end of the test execution to see if it has // exited prematurely. // If we are in the child process of a death test, don't // create/delete the premature exit file, as doing so is unnecessary // and will confuse the parent process. Otherwise, create/delete // the file upon entering/leaving this function. If the program // somehow exits before this function has a chance to return, the // premature-exit file will be left undeleted, causing a test runner // that understands the premature-exit-file protocol to report the // test as having failed. const internal::ScopedPrematureExitFile premature_exit_file( in_death_test_child_process ? NULL : internal::posix::GetEnv("TEST_PREMATURE_EXIT_FILE")); // Captures the value of GTEST_FLAG(catch_exceptions). This value will be // used for the duration of the program. impl()->set_catch_exceptions(GTEST_FLAG(catch_exceptions)); #if GTEST_OS_WINDOWS // Either the user wants Google Test to catch exceptions thrown by the // tests or this is executing in the context of death test child // process. In either case the user does not want to see pop-up dialogs // about crashes - they are expected. if (impl()->catch_exceptions() || in_death_test_child_process) { # if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT // SetErrorMode doesn't exist on CE. SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOALIGNMENTFAULTEXCEPT | SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX); # endif // !GTEST_OS_WINDOWS_MOBILE # if (defined(_MSC_VER) || GTEST_OS_WINDOWS_MINGW) && !GTEST_OS_WINDOWS_MOBILE // Death test children can be terminated with _abort(). On Windows, // _abort() can show a dialog with a warning message. This forces the // abort message to go to stderr instead. _set_error_mode(_OUT_TO_STDERR); # endif # if _MSC_VER >= 1400 && !GTEST_OS_WINDOWS_MOBILE // In the debug version, Visual Studio pops up a separate dialog // offering a choice to debug the aborted program. We need to suppress // this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement // executed. Google Test will notify the user of any unexpected // failure via stderr. // // VC++ doesn't define _set_abort_behavior() prior to the version 8.0. // Users of prior VC versions shall suffer the agony and pain of // clicking through the countless debug dialogs. // FIXME: find a way to suppress the abort dialog() in the // debug mode when compiled with VC 7.1 or lower. if (!GTEST_FLAG(break_on_failure)) _set_abort_behavior( 0x0, // Clear the following flags: _WRITE_ABORT_MSG | _CALL_REPORTFAULT); // pop-up window, core dump. # endif } #endif // GTEST_OS_WINDOWS return internal::HandleExceptionsInMethodIfSupported( impl(), &internal::UnitTestImpl::RunAllTests, "auxiliary test code (environments or event listeners)") ? 0 : 1; } // Returns the working directory when the first TEST() or TEST_F() was // executed. const char* UnitTest::original_working_dir() const { return impl_->original_working_dir_.c_str(); } // Returns the TestCase object for the test that's currently running, // or NULL if no test is running. const TestCase* UnitTest::current_test_case() const GTEST_LOCK_EXCLUDED_(mutex_) { internal::MutexLock lock(&mutex_); return impl_->current_test_case(); } // Returns the TestInfo object for the test that's currently running, // or NULL if no test is running. const TestInfo* UnitTest::current_test_info() const GTEST_LOCK_EXCLUDED_(mutex_) { internal::MutexLock lock(&mutex_); return impl_->current_test_info(); } // Returns the random seed used at the start of the current test run. int UnitTest::random_seed() const { return impl_->random_seed(); } // Returns ParameterizedTestCaseRegistry object used to keep track of // value-parameterized tests and instantiate and register them. internal::ParameterizedTestCaseRegistry& UnitTest::parameterized_test_registry() GTEST_LOCK_EXCLUDED_(mutex_) { return impl_->parameterized_test_registry(); } // Creates an empty UnitTest. UnitTest::UnitTest() { impl_ = new internal::UnitTestImpl(this); } // Destructor of UnitTest. UnitTest::~UnitTest() { delete impl_; } // Pushes a trace defined by SCOPED_TRACE() on to the per-thread // Google Test trace stack. void UnitTest::PushGTestTrace(const internal::TraceInfo& trace) GTEST_LOCK_EXCLUDED_(mutex_) { internal::MutexLock lock(&mutex_); impl_->gtest_trace_stack().push_back(trace); } // Pops a trace from the per-thread Google Test trace stack. void UnitTest::PopGTestTrace() GTEST_LOCK_EXCLUDED_(mutex_) { internal::MutexLock lock(&mutex_); impl_->gtest_trace_stack().pop_back(); } namespace internal { UnitTestImpl::UnitTestImpl(UnitTest* parent) : parent_(parent), GTEST_DISABLE_MSC_WARNINGS_PUSH_(4355 /* using this in initializer */) default_global_test_part_result_reporter_(this), default_per_thread_test_part_result_reporter_(this), GTEST_DISABLE_MSC_WARNINGS_POP_() global_test_part_result_repoter_( &default_global_test_part_result_reporter_), per_thread_test_part_result_reporter_( &default_per_thread_test_part_result_reporter_), parameterized_test_registry_(), parameterized_tests_registered_(false), last_death_test_case_(-1), current_test_case_(NULL), current_test_info_(NULL), ad_hoc_test_result_(), os_stack_trace_getter_(NULL), post_flag_parse_init_performed_(false), random_seed_(0), // Will be overridden by the flag before first use. random_(0), // Will be reseeded before first use. start_timestamp_(0), elapsed_time_(0), #if GTEST_HAS_DEATH_TEST death_test_factory_(new DefaultDeathTestFactory), #endif // Will be overridden by the flag before first use. catch_exceptions_(false) { listeners()->SetDefaultResultPrinter(new PrettyUnitTestResultPrinter); } UnitTestImpl::~UnitTestImpl() { // Deletes every TestCase. ForEach(test_cases_, internal::Delete); // Deletes every Environment. ForEach(environments_, internal::Delete); delete os_stack_trace_getter_; } // Adds a TestProperty to the current TestResult object when invoked in a // context of a test, to current test case's ad_hoc_test_result when invoke // from SetUpTestCase/TearDownTestCase, or to the global property set // otherwise. If the result already contains a property with the same key, // the value will be updated. void UnitTestImpl::RecordProperty(const TestProperty& test_property) { std::string xml_element; TestResult* test_result; // TestResult appropriate for property recording. if (current_test_info_ != NULL) { xml_element = "testcase"; test_result = &(current_test_info_->result_); } else if (current_test_case_ != NULL) { xml_element = "testsuite"; test_result = &(current_test_case_->ad_hoc_test_result_); } else { xml_element = "testsuites"; test_result = &ad_hoc_test_result_; } test_result->RecordProperty(xml_element, test_property); } #if GTEST_HAS_DEATH_TEST // Disables event forwarding if the control is currently in a death test // subprocess. Must not be called before InitGoogleTest. void UnitTestImpl::SuppressTestEventsIfInSubprocess() { if (internal_run_death_test_flag_.get() != NULL) listeners()->SuppressEventForwarding(); } #endif // GTEST_HAS_DEATH_TEST // Initializes event listeners performing XML output as specified by // UnitTestOptions. Must not be called before InitGoogleTest. void UnitTestImpl::ConfigureXmlOutput() { const std::string& output_format = UnitTestOptions::GetOutputFormat(); if (output_format == "xml") { listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter( UnitTestOptions::GetAbsolutePathToOutputFile().c_str())); } else if (output_format == "json") { listeners()->SetDefaultXmlGenerator(new JsonUnitTestResultPrinter( UnitTestOptions::GetAbsolutePathToOutputFile().c_str())); } else if (output_format != "") { GTEST_LOG_(WARNING) << "WARNING: unrecognized output format \"" << output_format << "\" ignored."; } } #if GTEST_CAN_STREAM_RESULTS_ // Initializes event listeners for streaming test results in string form. // Must not be called before InitGoogleTest. void UnitTestImpl::ConfigureStreamingOutput() { const std::string& target = GTEST_FLAG(stream_result_to); if (!target.empty()) { const size_t pos = target.find(':'); if (pos != std::string::npos) { listeners()->Append(new StreamingListener(target.substr(0, pos), target.substr(pos+1))); } else { GTEST_LOG_(WARNING) << "unrecognized streaming target \"" << target << "\" ignored."; } } } #endif // GTEST_CAN_STREAM_RESULTS_ // Performs initialization dependent upon flag values obtained in // ParseGoogleTestFlagsOnly. Is called from InitGoogleTest after the call to // ParseGoogleTestFlagsOnly. In case a user neglects to call InitGoogleTest // this function is also called from RunAllTests. Since this function can be // called more than once, it has to be idempotent. void UnitTestImpl::PostFlagParsingInit() { // Ensures that this function does not execute more than once. if (!post_flag_parse_init_performed_) { post_flag_parse_init_performed_ = true; #if defined(GTEST_CUSTOM_TEST_EVENT_LISTENER_) // Register to send notifications about key process state changes. listeners()->Append(new GTEST_CUSTOM_TEST_EVENT_LISTENER_()); #endif // defined(GTEST_CUSTOM_TEST_EVENT_LISTENER_) #if GTEST_HAS_DEATH_TEST InitDeathTestSubprocessControlInfo(); SuppressTestEventsIfInSubprocess(); #endif // GTEST_HAS_DEATH_TEST // Registers parameterized tests. This makes parameterized tests // available to the UnitTest reflection API without running // RUN_ALL_TESTS. RegisterParameterizedTests(); // Configures listeners for XML output. This makes it possible for users // to shut down the default XML output before invoking RUN_ALL_TESTS. ConfigureXmlOutput(); #if GTEST_CAN_STREAM_RESULTS_ // Configures listeners for streaming test results to the specified server. ConfigureStreamingOutput(); #endif // GTEST_CAN_STREAM_RESULTS_ #if GTEST_HAS_ABSL if (GTEST_FLAG(install_failure_signal_handler)) { absl::FailureSignalHandlerOptions options; absl::InstallFailureSignalHandler(options); } #endif // GTEST_HAS_ABSL } } // A predicate that checks the name of a TestCase against a known // value. // // This is used for implementation of the UnitTest class only. We put // it in the anonymous namespace to prevent polluting the outer // namespace. // // TestCaseNameIs is copyable. class TestCaseNameIs { public: // Constructor. explicit TestCaseNameIs(const std::string& name) : name_(name) {} // Returns true iff the name of test_case matches name_. bool operator()(const TestCase* test_case) const { return test_case != NULL && strcmp(test_case->name(), name_.c_str()) == 0; } private: std::string name_; }; // Finds and returns a TestCase with the given name. If one doesn't // exist, creates one and returns it. It's the CALLER'S // RESPONSIBILITY to ensure that this function is only called WHEN THE // TESTS ARE NOT SHUFFLED. // // Arguments: // // test_case_name: name of the test case // type_param: the name of the test case's type parameter, or NULL if // this is not a typed or a type-parameterized test case. // set_up_tc: pointer to the function that sets up the test case // tear_down_tc: pointer to the function that tears down the test case TestCase* UnitTestImpl::GetTestCase(const char* test_case_name, const char* type_param, Test::SetUpTestCaseFunc set_up_tc, Test::TearDownTestCaseFunc tear_down_tc) { // Can we find a TestCase with the given name? const std::vector::const_reverse_iterator test_case = std::find_if(test_cases_.rbegin(), test_cases_.rend(), TestCaseNameIs(test_case_name)); if (test_case != test_cases_.rend()) return *test_case; // No. Let's create one. TestCase* const new_test_case = new TestCase(test_case_name, type_param, set_up_tc, tear_down_tc); // Is this a death test case? if (internal::UnitTestOptions::MatchesFilter(test_case_name, kDeathTestCaseFilter)) { // Yes. Inserts the test case after the last death test case // defined so far. This only works when the test cases haven't // been shuffled. Otherwise we may end up running a death test // after a non-death test. ++last_death_test_case_; test_cases_.insert(test_cases_.begin() + last_death_test_case_, new_test_case); } else { // No. Appends to the end of the list. test_cases_.push_back(new_test_case); } test_case_indices_.push_back(static_cast(test_case_indices_.size())); return new_test_case; } // Helpers for setting up / tearing down the given environment. They // are for use in the ForEach() function. static void SetUpEnvironment(Environment* env) { env->SetUp(); } static void TearDownEnvironment(Environment* env) { env->TearDown(); } // Runs all tests in this UnitTest object, prints the result, and // returns true if all tests are successful. If any exception is // thrown during a test, the test is considered to be failed, but the // rest of the tests will still be run. // // When parameterized tests are enabled, it expands and registers // parameterized tests first in RegisterParameterizedTests(). // All other functions called from RunAllTests() may safely assume that // parameterized tests are ready to be counted and run. bool UnitTestImpl::RunAllTests() { // True iff Google Test is initialized before RUN_ALL_TESTS() is called. const bool gtest_is_initialized_before_run_all_tests = GTestIsInitialized(); // Do not run any test if the --help flag was specified. if (g_help_flag) return true; // Repeats the call to the post-flag parsing initialization in case the // user didn't call InitGoogleTest. PostFlagParsingInit(); // Even if sharding is not on, test runners may want to use the // GTEST_SHARD_STATUS_FILE to query whether the test supports the sharding // protocol. internal::WriteToShardStatusFileIfNeeded(); // True iff we are in a subprocess for running a thread-safe-style // death test. bool in_subprocess_for_death_test = false; #if GTEST_HAS_DEATH_TEST in_subprocess_for_death_test = (internal_run_death_test_flag_.get() != NULL); # if defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) if (in_subprocess_for_death_test) { GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_(); } # endif // defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) #endif // GTEST_HAS_DEATH_TEST const bool should_shard = ShouldShard(kTestTotalShards, kTestShardIndex, in_subprocess_for_death_test); // Compares the full test names with the filter to decide which // tests to run. const bool has_tests_to_run = FilterTests(should_shard ? HONOR_SHARDING_PROTOCOL : IGNORE_SHARDING_PROTOCOL) > 0; // Lists the tests and exits if the --gtest_list_tests flag was specified. if (GTEST_FLAG(list_tests)) { // This must be called *after* FilterTests() has been called. ListTestsMatchingFilter(); return true; } random_seed_ = GTEST_FLAG(shuffle) ? GetRandomSeedFromFlag(GTEST_FLAG(random_seed)) : 0; // True iff at least one test has failed. bool failed = false; TestEventListener* repeater = listeners()->repeater(); start_timestamp_ = GetTimeInMillis(); repeater->OnTestProgramStart(*parent_); // How many times to repeat the tests? We don't want to repeat them // when we are inside the subprocess of a death test. const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG(repeat); // Repeats forever if the repeat count is negative. const bool forever = repeat < 0; for (int i = 0; forever || i != repeat; i++) { // We want to preserve failures generated by ad-hoc test // assertions executed before RUN_ALL_TESTS(). ClearNonAdHocTestResult(); const TimeInMillis start = GetTimeInMillis(); // Shuffles test cases and tests if requested. if (has_tests_to_run && GTEST_FLAG(shuffle)) { random()->Reseed(random_seed_); // This should be done before calling OnTestIterationStart(), // such that a test event listener can see the actual test order // in the event. ShuffleTests(); } // Tells the unit test event listeners that the tests are about to start. repeater->OnTestIterationStart(*parent_, i); // Runs each test case if there is at least one test to run. if (has_tests_to_run) { // Sets up all environments beforehand. repeater->OnEnvironmentsSetUpStart(*parent_); ForEach(environments_, SetUpEnvironment); repeater->OnEnvironmentsSetUpEnd(*parent_); - // Runs the tests only if there was no fatal failure during global - // set-up. - if (!Test::HasFatalFailure()) { + // Runs the tests only if there was no fatal failure or skip triggered + // during global set-up. + if (Test::IsSkipped()) { + // Emit diagnostics when global set-up calls skip, as it will not be + // emitted by default. + TestResult& test_result = + *internal::GetUnitTestImpl()->current_test_result(); + for (int j = 0; j < test_result.total_part_count(); ++j) { + const TestPartResult& test_part_result = + test_result.GetTestPartResult(j); + if (test_part_result.type() == TestPartResult::kSkip) { + const std::string& result = test_part_result.message(); + printf("%s\n", result.c_str()); + } + } + fflush(stdout); + } else if (!Test::HasFatalFailure()) { for (int test_index = 0; test_index < total_test_case_count(); test_index++) { GetMutableTestCase(test_index)->Run(); } } // Tears down all environments in reverse order afterwards. repeater->OnEnvironmentsTearDownStart(*parent_); std::for_each(environments_.rbegin(), environments_.rend(), TearDownEnvironment); repeater->OnEnvironmentsTearDownEnd(*parent_); } elapsed_time_ = GetTimeInMillis() - start; // Tells the unit test event listener that the tests have just finished. repeater->OnTestIterationEnd(*parent_, i); // Gets the result and clears it. if (!Passed()) { failed = true; } // Restores the original test order after the iteration. This // allows the user to quickly repro a failure that happens in the // N-th iteration without repeating the first (N - 1) iterations. // This is not enclosed in "if (GTEST_FLAG(shuffle)) { ... }", in // case the user somehow changes the value of the flag somewhere // (it's always safe to unshuffle the tests). UnshuffleTests(); if (GTEST_FLAG(shuffle)) { // Picks a new random seed for each iteration. random_seed_ = GetNextRandomSeed(random_seed_); } } repeater->OnTestProgramEnd(*parent_); if (!gtest_is_initialized_before_run_all_tests) { ColoredPrintf( COLOR_RED, "\nIMPORTANT NOTICE - DO NOT IGNORE:\n" "This test program did NOT call " GTEST_INIT_GOOGLE_TEST_NAME_ "() before calling RUN_ALL_TESTS(). This is INVALID. Soon " GTEST_NAME_ " will start to enforce the valid usage. " "Please fix it ASAP, or IT WILL START TO FAIL.\n"); // NOLINT #if GTEST_FOR_GOOGLE_ ColoredPrintf(COLOR_RED, "For more details, see http://wiki/Main/ValidGUnitMain.\n"); #endif // GTEST_FOR_GOOGLE_ } return !failed; } // Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file // if the variable is present. If a file already exists at this location, this // function will write over it. If the variable is present, but the file cannot // be created, prints an error and exits. void WriteToShardStatusFileIfNeeded() { const char* const test_shard_file = posix::GetEnv(kTestShardStatusFile); if (test_shard_file != NULL) { FILE* const file = posix::FOpen(test_shard_file, "w"); if (file == NULL) { ColoredPrintf(COLOR_RED, "Could not write to the test shard status file \"%s\" " "specified by the %s environment variable.\n", test_shard_file, kTestShardStatusFile); fflush(stdout); exit(EXIT_FAILURE); } fclose(file); } } // Checks whether sharding is enabled by examining the relevant // environment variable values. If the variables are present, // but inconsistent (i.e., shard_index >= total_shards), prints // an error and exits. If in_subprocess_for_death_test, sharding is // disabled because it must only be applied to the original test // process. Otherwise, we could filter out death tests we intended to execute. bool ShouldShard(const char* total_shards_env, const char* shard_index_env, bool in_subprocess_for_death_test) { if (in_subprocess_for_death_test) { return false; } const Int32 total_shards = Int32FromEnvOrDie(total_shards_env, -1); const Int32 shard_index = Int32FromEnvOrDie(shard_index_env, -1); if (total_shards == -1 && shard_index == -1) { return false; } else if (total_shards == -1 && shard_index != -1) { const Message msg = Message() << "Invalid environment variables: you have " << kTestShardIndex << " = " << shard_index << ", but have left " << kTestTotalShards << " unset.\n"; ColoredPrintf(COLOR_RED, msg.GetString().c_str()); fflush(stdout); exit(EXIT_FAILURE); } else if (total_shards != -1 && shard_index == -1) { const Message msg = Message() << "Invalid environment variables: you have " << kTestTotalShards << " = " << total_shards << ", but have left " << kTestShardIndex << " unset.\n"; ColoredPrintf(COLOR_RED, msg.GetString().c_str()); fflush(stdout); exit(EXIT_FAILURE); } else if (shard_index < 0 || shard_index >= total_shards) { const Message msg = Message() << "Invalid environment variables: we require 0 <= " << kTestShardIndex << " < " << kTestTotalShards << ", but you have " << kTestShardIndex << "=" << shard_index << ", " << kTestTotalShards << "=" << total_shards << ".\n"; ColoredPrintf(COLOR_RED, msg.GetString().c_str()); fflush(stdout); exit(EXIT_FAILURE); } return total_shards > 1; } // Parses the environment variable var as an Int32. If it is unset, // returns default_val. If it is not an Int32, prints an error // and aborts. Int32 Int32FromEnvOrDie(const char* var, Int32 default_val) { const char* str_val = posix::GetEnv(var); if (str_val == NULL) { return default_val; } Int32 result; if (!ParseInt32(Message() << "The value of environment variable " << var, str_val, &result)) { exit(EXIT_FAILURE); } return result; } // Given the total number of shards, the shard index, and the test id, // returns true iff the test should be run on this shard. The test id is // some arbitrary but unique non-negative integer assigned to each test // method. Assumes that 0 <= shard_index < total_shards. bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) { return (test_id % total_shards) == shard_index; } // Compares the name of each test with the user-specified filter to // decide whether the test should be run, then records the result in // each TestCase and TestInfo object. // If shard_tests == true, further filters tests based on sharding // variables in the environment - see // https://github.com/google/googletest/blob/master/googletest/docs/advanced.md // . Returns the number of tests that should run. int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) { const Int32 total_shards = shard_tests == HONOR_SHARDING_PROTOCOL ? Int32FromEnvOrDie(kTestTotalShards, -1) : -1; const Int32 shard_index = shard_tests == HONOR_SHARDING_PROTOCOL ? Int32FromEnvOrDie(kTestShardIndex, -1) : -1; // num_runnable_tests are the number of tests that will // run across all shards (i.e., match filter and are not disabled). // num_selected_tests are the number of tests to be run on // this shard. int num_runnable_tests = 0; int num_selected_tests = 0; for (size_t i = 0; i < test_cases_.size(); i++) { TestCase* const test_case = test_cases_[i]; const std::string &test_case_name = test_case->name(); test_case->set_should_run(false); for (size_t j = 0; j < test_case->test_info_list().size(); j++) { TestInfo* const test_info = test_case->test_info_list()[j]; const std::string test_name(test_info->name()); // A test is disabled if test case name or test name matches // kDisableTestFilter. const bool is_disabled = internal::UnitTestOptions::MatchesFilter(test_case_name, kDisableTestFilter) || internal::UnitTestOptions::MatchesFilter(test_name, kDisableTestFilter); test_info->is_disabled_ = is_disabled; const bool matches_filter = internal::UnitTestOptions::FilterMatchesTest(test_case_name, test_name); test_info->matches_filter_ = matches_filter; const bool is_runnable = (GTEST_FLAG(also_run_disabled_tests) || !is_disabled) && matches_filter; const bool is_in_another_shard = shard_tests != IGNORE_SHARDING_PROTOCOL && !ShouldRunTestOnShard(total_shards, shard_index, num_runnable_tests); test_info->is_in_another_shard_ = is_in_another_shard; const bool is_selected = is_runnable && !is_in_another_shard; num_runnable_tests += is_runnable; num_selected_tests += is_selected; test_info->should_run_ = is_selected; test_case->set_should_run(test_case->should_run() || is_selected); } } return num_selected_tests; } // Prints the given C-string on a single line by replacing all '\n' // characters with string "\\n". If the output takes more than // max_length characters, only prints the first max_length characters // and "...". static void PrintOnOneLine(const char* str, int max_length) { if (str != NULL) { for (int i = 0; *str != '\0'; ++str) { if (i >= max_length) { printf("..."); break; } if (*str == '\n') { printf("\\n"); i += 2; } else { printf("%c", *str); ++i; } } } } // Prints the names of the tests matching the user-specified filter flag. void UnitTestImpl::ListTestsMatchingFilter() { // Print at most this many characters for each type/value parameter. const int kMaxParamLength = 250; for (size_t i = 0; i < test_cases_.size(); i++) { const TestCase* const test_case = test_cases_[i]; bool printed_test_case_name = false; for (size_t j = 0; j < test_case->test_info_list().size(); j++) { const TestInfo* const test_info = test_case->test_info_list()[j]; if (test_info->matches_filter_) { if (!printed_test_case_name) { printed_test_case_name = true; printf("%s.", test_case->name()); if (test_case->type_param() != NULL) { printf(" # %s = ", kTypeParamLabel); // We print the type parameter on a single line to make // the output easy to parse by a program. PrintOnOneLine(test_case->type_param(), kMaxParamLength); } printf("\n"); } printf(" %s", test_info->name()); if (test_info->value_param() != NULL) { printf(" # %s = ", kValueParamLabel); // We print the value parameter on a single line to make the // output easy to parse by a program. PrintOnOneLine(test_info->value_param(), kMaxParamLength); } printf("\n"); } } } fflush(stdout); const std::string& output_format = UnitTestOptions::GetOutputFormat(); if (output_format == "xml" || output_format == "json") { FILE* fileout = OpenFileForWriting( UnitTestOptions::GetAbsolutePathToOutputFile().c_str()); std::stringstream stream; if (output_format == "xml") { XmlUnitTestResultPrinter( UnitTestOptions::GetAbsolutePathToOutputFile().c_str()) .PrintXmlTestsList(&stream, test_cases_); } else if (output_format == "json") { JsonUnitTestResultPrinter( UnitTestOptions::GetAbsolutePathToOutputFile().c_str()) .PrintJsonTestList(&stream, test_cases_); } fprintf(fileout, "%s", StringStreamToString(&stream).c_str()); fclose(fileout); } } // Sets the OS stack trace getter. // // Does nothing if the input and the current OS stack trace getter are // the same; otherwise, deletes the old getter and makes the input the // current getter. void UnitTestImpl::set_os_stack_trace_getter( OsStackTraceGetterInterface* getter) { if (os_stack_trace_getter_ != getter) { delete os_stack_trace_getter_; os_stack_trace_getter_ = getter; } } // Returns the current OS stack trace getter if it is not NULL; // otherwise, creates an OsStackTraceGetter, makes it the current // getter, and returns it. OsStackTraceGetterInterface* UnitTestImpl::os_stack_trace_getter() { if (os_stack_trace_getter_ == NULL) { #ifdef GTEST_OS_STACK_TRACE_GETTER_ os_stack_trace_getter_ = new GTEST_OS_STACK_TRACE_GETTER_; #else os_stack_trace_getter_ = new OsStackTraceGetter; #endif // GTEST_OS_STACK_TRACE_GETTER_ } return os_stack_trace_getter_; } // Returns the most specific TestResult currently running. TestResult* UnitTestImpl::current_test_result() { if (current_test_info_ != NULL) { return ¤t_test_info_->result_; } if (current_test_case_ != NULL) { return ¤t_test_case_->ad_hoc_test_result_; } return &ad_hoc_test_result_; } // Shuffles all test cases, and the tests within each test case, // making sure that death tests are still run first. void UnitTestImpl::ShuffleTests() { // Shuffles the death test cases. ShuffleRange(random(), 0, last_death_test_case_ + 1, &test_case_indices_); // Shuffles the non-death test cases. ShuffleRange(random(), last_death_test_case_ + 1, static_cast(test_cases_.size()), &test_case_indices_); // Shuffles the tests inside each test case. for (size_t i = 0; i < test_cases_.size(); i++) { test_cases_[i]->ShuffleTests(random()); } } // Restores the test cases and tests to their order before the first shuffle. void UnitTestImpl::UnshuffleTests() { for (size_t i = 0; i < test_cases_.size(); i++) { // Unshuffles the tests in each test case. test_cases_[i]->UnshuffleTests(); // Resets the index of each test case. test_case_indices_[i] = static_cast(i); } } // Returns the current OS stack trace as an std::string. // // The maximum number of stack frames to be included is specified by // the gtest_stack_trace_depth flag. The skip_count parameter // specifies the number of top frames to be skipped, which doesn't // count against the number of frames to be included. // // For example, if Foo() calls Bar(), which in turn calls // GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in // the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't. std::string GetCurrentOsStackTraceExceptTop(UnitTest* /*unit_test*/, int skip_count) { // We pass skip_count + 1 to skip this wrapper function in addition // to what the user really wants to skip. return GetUnitTestImpl()->CurrentOsStackTraceExceptTop(skip_count + 1); } // Used by the GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_ macro to // suppress unreachable code warnings. namespace { class ClassUniqueToAlwaysTrue {}; } bool IsTrue(bool condition) { return condition; } bool AlwaysTrue() { #if GTEST_HAS_EXCEPTIONS // This condition is always false so AlwaysTrue() never actually throws, // but it makes the compiler think that it may throw. if (IsTrue(false)) throw ClassUniqueToAlwaysTrue(); #endif // GTEST_HAS_EXCEPTIONS return true; } // If *pstr starts with the given prefix, modifies *pstr to be right // past the prefix and returns true; otherwise leaves *pstr unchanged // and returns false. None of pstr, *pstr, and prefix can be NULL. bool SkipPrefix(const char* prefix, const char** pstr) { const size_t prefix_len = strlen(prefix); if (strncmp(*pstr, prefix, prefix_len) == 0) { *pstr += prefix_len; return true; } return false; } // Parses a string as a command line flag. The string should have // the format "--flag=value". When def_optional is true, the "=value" // part can be omitted. // // Returns the value of the flag, or NULL if the parsing failed. static const char* ParseFlagValue(const char* str, const char* flag, bool def_optional) { // str and flag must not be NULL. if (str == NULL || flag == NULL) return NULL; // The flag must start with "--" followed by GTEST_FLAG_PREFIX_. const std::string flag_str = std::string("--") + GTEST_FLAG_PREFIX_ + flag; const size_t flag_len = flag_str.length(); if (strncmp(str, flag_str.c_str(), flag_len) != 0) return NULL; // Skips the flag name. const char* flag_end = str + flag_len; // When def_optional is true, it's OK to not have a "=value" part. if (def_optional && (flag_end[0] == '\0')) { return flag_end; } // If def_optional is true and there are more characters after the // flag name, or if def_optional is false, there must be a '=' after // the flag name. if (flag_end[0] != '=') return NULL; // Returns the string after "=". return flag_end + 1; } // Parses a string for a bool flag, in the form of either // "--flag=value" or "--flag". // // In the former case, the value is taken as true as long as it does // not start with '0', 'f', or 'F'. // // In the latter case, the value is taken as true. // // On success, stores the value of the flag in *value, and returns // true. On failure, returns false without changing *value. static bool ParseBoolFlag(const char* str, const char* flag, bool* value) { // Gets the value of the flag as a string. const char* const value_str = ParseFlagValue(str, flag, true); // Aborts if the parsing failed. if (value_str == NULL) return false; // Converts the string value to a bool. *value = !(*value_str == '0' || *value_str == 'f' || *value_str == 'F'); return true; } // Parses a string for an Int32 flag, in the form of // "--flag=value". // // On success, stores the value of the flag in *value, and returns // true. On failure, returns false without changing *value. bool ParseInt32Flag(const char* str, const char* flag, Int32* value) { // Gets the value of the flag as a string. const char* const value_str = ParseFlagValue(str, flag, false); // Aborts if the parsing failed. if (value_str == NULL) return false; // Sets *value to the value of the flag. return ParseInt32(Message() << "The value of flag --" << flag, value_str, value); } // Parses a string for a string flag, in the form of // "--flag=value". // // On success, stores the value of the flag in *value, and returns // true. On failure, returns false without changing *value. template static bool ParseStringFlag(const char* str, const char* flag, String* value) { // Gets the value of the flag as a string. const char* const value_str = ParseFlagValue(str, flag, false); // Aborts if the parsing failed. if (value_str == NULL) return false; // Sets *value to the value of the flag. *value = value_str; return true; } // Determines whether a string has a prefix that Google Test uses for its // flags, i.e., starts with GTEST_FLAG_PREFIX_ or GTEST_FLAG_PREFIX_DASH_. // If Google Test detects that a command line flag has its prefix but is not // recognized, it will print its help message. Flags starting with // GTEST_INTERNAL_PREFIX_ followed by "internal_" are considered Google Test // internal flags and do not trigger the help message. static bool HasGoogleTestFlagPrefix(const char* str) { return (SkipPrefix("--", &str) || SkipPrefix("-", &str) || SkipPrefix("/", &str)) && !SkipPrefix(GTEST_FLAG_PREFIX_ "internal_", &str) && (SkipPrefix(GTEST_FLAG_PREFIX_, &str) || SkipPrefix(GTEST_FLAG_PREFIX_DASH_, &str)); } // Prints a string containing code-encoded text. The following escape // sequences can be used in the string to control the text color: // // @@ prints a single '@' character. // @R changes the color to red. // @G changes the color to green. // @Y changes the color to yellow. // @D changes to the default terminal text color. // // FIXME: Write tests for this once we add stdout // capturing to Google Test. static void PrintColorEncoded(const char* str) { GTestColor color = COLOR_DEFAULT; // The current color. // Conceptually, we split the string into segments divided by escape // sequences. Then we print one segment at a time. At the end of // each iteration, the str pointer advances to the beginning of the // next segment. for (;;) { const char* p = strchr(str, '@'); if (p == NULL) { ColoredPrintf(color, "%s", str); return; } ColoredPrintf(color, "%s", std::string(str, p).c_str()); const char ch = p[1]; str = p + 2; if (ch == '@') { ColoredPrintf(color, "@"); } else if (ch == 'D') { color = COLOR_DEFAULT; } else if (ch == 'R') { color = COLOR_RED; } else if (ch == 'G') { color = COLOR_GREEN; } else if (ch == 'Y') { color = COLOR_YELLOW; } else { --str; } } } static const char kColorEncodedHelpMessage[] = "This program contains tests written using " GTEST_NAME_ ". You can use the\n" "following command line flags to control its behavior:\n" "\n" "Test Selection:\n" " @G--" GTEST_FLAG_PREFIX_ "list_tests@D\n" " List the names of all tests instead of running them. The name of\n" " TEST(Foo, Bar) is \"Foo.Bar\".\n" " @G--" GTEST_FLAG_PREFIX_ "filter=@YPOSTIVE_PATTERNS" "[@G-@YNEGATIVE_PATTERNS]@D\n" " Run only the tests whose name matches one of the positive patterns but\n" " none of the negative patterns. '?' matches any single character; '*'\n" " matches any substring; ':' separates two patterns.\n" " @G--" GTEST_FLAG_PREFIX_ "also_run_disabled_tests@D\n" " Run all disabled tests too.\n" "\n" "Test Execution:\n" " @G--" GTEST_FLAG_PREFIX_ "repeat=@Y[COUNT]@D\n" " Run the tests repeatedly; use a negative count to repeat forever.\n" " @G--" GTEST_FLAG_PREFIX_ "shuffle@D\n" " Randomize tests' orders on every iteration.\n" " @G--" GTEST_FLAG_PREFIX_ "random_seed=@Y[NUMBER]@D\n" " Random number seed to use for shuffling test orders (between 1 and\n" " 99999, or 0 to use a seed based on the current time).\n" "\n" "Test Output:\n" " @G--" GTEST_FLAG_PREFIX_ "color=@Y(@Gyes@Y|@Gno@Y|@Gauto@Y)@D\n" " Enable/disable colored output. The default is @Gauto@D.\n" " -@G-" GTEST_FLAG_PREFIX_ "print_time=0@D\n" " Don't print the elapsed time of each test.\n" " @G--" GTEST_FLAG_PREFIX_ "output=@Y(@Gjson@Y|@Gxml@Y)[@G:@YDIRECTORY_PATH@G" GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n" " Generate a JSON or XML report in the given directory or with the given\n" " file name. @YFILE_PATH@D defaults to @Gtest_details.xml@D.\n" # if GTEST_CAN_STREAM_RESULTS_ " @G--" GTEST_FLAG_PREFIX_ "stream_result_to=@YHOST@G:@YPORT@D\n" " Stream test results to the given server.\n" # endif // GTEST_CAN_STREAM_RESULTS_ "\n" "Assertion Behavior:\n" # if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS " @G--" GTEST_FLAG_PREFIX_ "death_test_style=@Y(@Gfast@Y|@Gthreadsafe@Y)@D\n" " Set the default death test style.\n" # endif // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS " @G--" GTEST_FLAG_PREFIX_ "break_on_failure@D\n" " Turn assertion failures into debugger break-points.\n" " @G--" GTEST_FLAG_PREFIX_ "throw_on_failure@D\n" " Turn assertion failures into C++ exceptions for use by an external\n" " test framework.\n" " @G--" GTEST_FLAG_PREFIX_ "catch_exceptions=0@D\n" " Do not report exceptions as test failures. Instead, allow them\n" " to crash the program or throw a pop-up (on Windows).\n" "\n" "Except for @G--" GTEST_FLAG_PREFIX_ "list_tests@D, you can alternatively set " "the corresponding\n" "environment variable of a flag (all letters in upper-case). For example, to\n" "disable colored text output, you can either specify @G--" GTEST_FLAG_PREFIX_ "color=no@D or set\n" "the @G" GTEST_FLAG_PREFIX_UPPER_ "COLOR@D environment variable to @Gno@D.\n" "\n" "For more information, please read the " GTEST_NAME_ " documentation at\n" "@G" GTEST_PROJECT_URL_ "@D. If you find a bug in " GTEST_NAME_ "\n" "(not one in your own code or tests), please report it to\n" "@G<" GTEST_DEV_EMAIL_ ">@D.\n"; static bool ParseGoogleTestFlag(const char* const arg) { return ParseBoolFlag(arg, kAlsoRunDisabledTestsFlag, >EST_FLAG(also_run_disabled_tests)) || ParseBoolFlag(arg, kBreakOnFailureFlag, >EST_FLAG(break_on_failure)) || ParseBoolFlag(arg, kCatchExceptionsFlag, >EST_FLAG(catch_exceptions)) || ParseStringFlag(arg, kColorFlag, >EST_FLAG(color)) || ParseStringFlag(arg, kDeathTestStyleFlag, >EST_FLAG(death_test_style)) || ParseBoolFlag(arg, kDeathTestUseFork, >EST_FLAG(death_test_use_fork)) || ParseStringFlag(arg, kFilterFlag, >EST_FLAG(filter)) || ParseStringFlag(arg, kInternalRunDeathTestFlag, >EST_FLAG(internal_run_death_test)) || ParseBoolFlag(arg, kListTestsFlag, >EST_FLAG(list_tests)) || ParseStringFlag(arg, kOutputFlag, >EST_FLAG(output)) || ParseBoolFlag(arg, kPrintTimeFlag, >EST_FLAG(print_time)) || ParseBoolFlag(arg, kPrintUTF8Flag, >EST_FLAG(print_utf8)) || ParseInt32Flag(arg, kRandomSeedFlag, >EST_FLAG(random_seed)) || ParseInt32Flag(arg, kRepeatFlag, >EST_FLAG(repeat)) || ParseBoolFlag(arg, kShuffleFlag, >EST_FLAG(shuffle)) || ParseInt32Flag(arg, kStackTraceDepthFlag, >EST_FLAG(stack_trace_depth)) || ParseStringFlag(arg, kStreamResultToFlag, >EST_FLAG(stream_result_to)) || ParseBoolFlag(arg, kThrowOnFailureFlag, >EST_FLAG(throw_on_failure)); } #if GTEST_USE_OWN_FLAGFILE_FLAG_ static void LoadFlagsFromFile(const std::string& path) { FILE* flagfile = posix::FOpen(path.c_str(), "r"); if (!flagfile) { GTEST_LOG_(FATAL) << "Unable to open file \"" << GTEST_FLAG(flagfile) << "\""; } std::string contents(ReadEntireFile(flagfile)); posix::FClose(flagfile); std::vector lines; SplitString(contents, '\n', &lines); for (size_t i = 0; i < lines.size(); ++i) { if (lines[i].empty()) continue; if (!ParseGoogleTestFlag(lines[i].c_str())) g_help_flag = true; } } #endif // GTEST_USE_OWN_FLAGFILE_FLAG_ // Parses the command line for Google Test flags, without initializing // other parts of Google Test. The type parameter CharType can be // instantiated to either char or wchar_t. template void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) { for (int i = 1; i < *argc; i++) { const std::string arg_string = StreamableToString(argv[i]); const char* const arg = arg_string.c_str(); using internal::ParseBoolFlag; using internal::ParseInt32Flag; using internal::ParseStringFlag; bool remove_flag = false; if (ParseGoogleTestFlag(arg)) { remove_flag = true; #if GTEST_USE_OWN_FLAGFILE_FLAG_ } else if (ParseStringFlag(arg, kFlagfileFlag, >EST_FLAG(flagfile))) { LoadFlagsFromFile(GTEST_FLAG(flagfile)); remove_flag = true; #endif // GTEST_USE_OWN_FLAGFILE_FLAG_ } else if (arg_string == "--help" || arg_string == "-h" || arg_string == "-?" || arg_string == "/?" || HasGoogleTestFlagPrefix(arg)) { // Both help flag and unrecognized Google Test flags (excluding // internal ones) trigger help display. g_help_flag = true; } if (remove_flag) { // Shift the remainder of the argv list left by one. Note // that argv has (*argc + 1) elements, the last one always being // NULL. The following loop moves the trailing NULL element as // well. for (int j = i; j != *argc; j++) { argv[j] = argv[j + 1]; } // Decrements the argument count. (*argc)--; // We also need to decrement the iterator as we just removed // an element. i--; } } if (g_help_flag) { // We print the help here instead of in RUN_ALL_TESTS(), as the // latter may not be called at all if the user is using Google // Test with another testing framework. PrintColorEncoded(kColorEncodedHelpMessage); } } // Parses the command line for Google Test flags, without initializing // other parts of Google Test. void ParseGoogleTestFlagsOnly(int* argc, char** argv) { ParseGoogleTestFlagsOnlyImpl(argc, argv); // Fix the value of *_NSGetArgc() on macOS, but iff // *_NSGetArgv() == argv // Only applicable to char** version of argv #if GTEST_OS_MAC #ifndef GTEST_OS_IOS if (*_NSGetArgv() == argv) { *_NSGetArgc() = *argc; } #endif #endif } void ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv) { ParseGoogleTestFlagsOnlyImpl(argc, argv); } // The internal implementation of InitGoogleTest(). // // The type parameter CharType can be instantiated to either char or // wchar_t. template void InitGoogleTestImpl(int* argc, CharType** argv) { // We don't want to run the initialization code twice. if (GTestIsInitialized()) return; if (*argc <= 0) return; g_argvs.clear(); for (int i = 0; i != *argc; i++) { g_argvs.push_back(StreamableToString(argv[i])); } #if GTEST_HAS_ABSL absl::InitializeSymbolizer(g_argvs[0].c_str()); #endif // GTEST_HAS_ABSL ParseGoogleTestFlagsOnly(argc, argv); GetUnitTestImpl()->PostFlagParsingInit(); } } // namespace internal // Initializes Google Test. This must be called before calling // RUN_ALL_TESTS(). In particular, it parses a command line for the // flags that Google Test recognizes. Whenever a Google Test flag is // seen, it is removed from argv, and *argc is decremented. // // No value is returned. Instead, the Google Test flag variables are // updated. // // Calling the function for the second time has no user-visible effect. void InitGoogleTest(int* argc, char** argv) { #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv); #else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) internal::InitGoogleTestImpl(argc, argv); #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) } // This overloaded version can be used in Windows programs compiled in // UNICODE mode. void InitGoogleTest(int* argc, wchar_t** argv) { #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv); #else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) internal::InitGoogleTestImpl(argc, argv); #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) } std::string TempDir() { #if defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_) return GTEST_CUSTOM_TEMPDIR_FUNCTION_(); #endif #if GTEST_OS_WINDOWS_MOBILE return "\\temp\\"; #elif GTEST_OS_WINDOWS const char* temp_dir = internal::posix::GetEnv("TEMP"); if (temp_dir == NULL || temp_dir[0] == '\0') return "\\temp\\"; else if (temp_dir[strlen(temp_dir) - 1] == '\\') return temp_dir; else return std::string(temp_dir) + "\\"; #elif GTEST_OS_LINUX_ANDROID return "/sdcard/"; #else return "/tmp/"; #endif // GTEST_OS_WINDOWS_MOBILE } // Class ScopedTrace // Pushes the given source file location and message onto a per-thread // trace stack maintained by Google Test. void ScopedTrace::PushTrace(const char* file, int line, std::string message) { internal::TraceInfo trace; trace.file = file; trace.line = line; trace.message.swap(message); UnitTest::GetInstance()->PushGTestTrace(trace); } // Pops the info pushed by the c'tor. ScopedTrace::~ScopedTrace() GTEST_LOCK_EXCLUDED_(&UnitTest::mutex_) { UnitTest::GetInstance()->PopGTestTrace(); } } // namespace testing Index: head/contrib/googletest/googletest/test/BUILD.bazel =================================================================== --- head/contrib/googletest/googletest/test/BUILD.bazel (revision 345769) +++ head/contrib/googletest/googletest/test/BUILD.bazel (revision 345770) @@ -1,527 +1,534 @@ # Copyright 2017 Google Inc. # All Rights Reserved. # # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Author: misterg@google.com (Gennadiy Civil) # # Bazel BUILD for The Google C++ Testing Framework (Google Test) licenses(["notice"]) config_setting( name = "windows", values = {"cpu": "x64_windows"}, ) config_setting( name = "windows_msvc", values = {"cpu": "x64_windows_msvc"}, ) config_setting( name = "has_absl", values = {"define": "absl=1"}, ) #on windows exclude gtest-tuple.h and googletest-tuple-test.cc cc_test( name = "gtest_all_test", size = "small", srcs = glob( include = [ "gtest-*.cc", "googletest-*.cc", "*.h", "googletest/include/gtest/**/*.h", ], exclude = [ "gtest-unittest-api_test.cc", "googletest-tuple-test.cc", "googletest/src/gtest-all.cc", "gtest_all_test.cc", "gtest-death-test_ex_test.cc", "gtest-listener_test.cc", "gtest-unittest-api_test.cc", "googletest-param-test-test.cc", "googletest-catch-exceptions-test_.cc", "googletest-color-test_.cc", "googletest-env-var-test_.cc", "googletest-filter-unittest_.cc", "googletest-break-on-failure-unittest_.cc", "googletest-listener-test.cc", "googletest-output-test_.cc", "googletest-list-tests-unittest_.cc", "googletest-shuffle-test_.cc", "googletest-uninitialized-test_.cc", "googletest-death-test_ex_test.cc", "googletest-param-test-test", "googletest-throw-on-failure-test_.cc", "googletest-param-test-invalid-name1-test_.cc", "googletest-param-test-invalid-name2-test_.cc", ], ) + select({ "//:windows": [], "//:windows_msvc": [], "//conditions:default": [ "googletest-tuple-test.cc", ], }), copts = select({ "//:windows": ["-DGTEST_USE_OWN_TR1_TUPLE=0"], "//:windows_msvc": ["-DGTEST_USE_OWN_TR1_TUPLE=0"], "//conditions:default": ["-DGTEST_USE_OWN_TR1_TUPLE=1"], }), includes = [ "googletest", "googletest/include", "googletest/include/internal", "googletest/test", ], linkopts = select({ "//:windows": [], "//:windows_msvc": [], "//conditions:default": [ "-pthread", ], }), deps = ["//:gtest_main"], ) # Tests death tests. cc_test( name = "googletest-death-test-test", size = "medium", srcs = ["googletest-death-test-test.cc"], deps = ["//:gtest_main"], ) cc_test( name = "gtest_test_macro_stack_footprint_test", size = "small", srcs = ["gtest_test_macro_stack_footprint_test.cc"], deps = ["//:gtest"], ) #These googletest tests have their own main() cc_test( name = "googletest-listener-test", size = "small", srcs = ["googletest-listener-test.cc"], deps = ["//:gtest_main"], ) cc_test( name = "gtest-unittest-api_test", size = "small", srcs = [ "gtest-unittest-api_test.cc", ], deps = [ "//:gtest", ], ) cc_test( name = "googletest-param-test-test", size = "small", srcs = [ "googletest-param-test-test.cc", "googletest-param-test-test.h", "googletest-param-test2-test.cc", ], deps = ["//:gtest"], ) cc_test( name = "gtest_unittest", size = "small", srcs = ["gtest_unittest.cc"], args = ["--heap_check=strict"], shard_count = 2, deps = ["//:gtest_main"], ) # Py tests py_library( name = "gtest_test_utils", testonly = 1, srcs = ["gtest_test_utils.py"], ) cc_binary( name = "gtest_help_test_", testonly = 1, srcs = ["gtest_help_test_.cc"], deps = ["//:gtest_main"], ) py_test( name = "gtest_help_test", size = "small", srcs = ["gtest_help_test.py"], data = [":gtest_help_test_"], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-output-test_", testonly = 1, srcs = ["googletest-output-test_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-output-test", size = "small", srcs = ["googletest-output-test.py"], args = select({ ":has_absl": [], "//conditions:default": ["--no_stacktrace_support"], }), data = [ "googletest-output-test-golden-lin.txt", ":googletest-output-test_", ], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-color-test_", testonly = 1, srcs = ["googletest-color-test_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-color-test", size = "small", srcs = ["googletest-color-test.py"], data = [":googletest-color-test_"], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-env-var-test_", testonly = 1, srcs = ["googletest-env-var-test_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-env-var-test", size = "medium", srcs = ["googletest-env-var-test.py"], data = [":googletest-env-var-test_"], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-filter-unittest_", testonly = 1, srcs = ["googletest-filter-unittest_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-filter-unittest", size = "medium", srcs = ["googletest-filter-unittest.py"], data = [":googletest-filter-unittest_"], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-break-on-failure-unittest_", testonly = 1, srcs = ["googletest-break-on-failure-unittest_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-break-on-failure-unittest", size = "small", srcs = ["googletest-break-on-failure-unittest.py"], data = [":googletest-break-on-failure-unittest_"], deps = [":gtest_test_utils"], ) cc_test( name = "gtest_assert_by_exception_test", size = "small", srcs = ["gtest_assert_by_exception_test.cc"], deps = ["//:gtest"], ) cc_binary( name = "googletest-throw-on-failure-test_", testonly = 1, srcs = ["googletest-throw-on-failure-test_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-throw-on-failure-test", size = "small", srcs = ["googletest-throw-on-failure-test.py"], data = [":googletest-throw-on-failure-test_"], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-list-tests-unittest_", testonly = 1, srcs = ["googletest-list-tests-unittest_.cc"], deps = ["//:gtest"], ) +cc_test( + name = "gtest_skip_in_environment_setup_test", + size = "small", + srcs = ["gtest_skip_in_environment_setup_test.cc"], + deps = ["//:gtest_main"], +) + py_test( name = "googletest-list-tests-unittest", size = "small", srcs = ["googletest-list-tests-unittest.py"], data = [":googletest-list-tests-unittest_"], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-shuffle-test_", srcs = ["googletest-shuffle-test_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-shuffle-test", size = "small", srcs = ["googletest-shuffle-test.py"], data = [":googletest-shuffle-test_"], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-catch-exceptions-no-ex-test_", testonly = 1, srcs = ["googletest-catch-exceptions-test_.cc"], deps = ["//:gtest_main"], ) cc_binary( name = "googletest-catch-exceptions-ex-test_", testonly = 1, srcs = ["googletest-catch-exceptions-test_.cc"], copts = ["-fexceptions"], deps = ["//:gtest_main"], ) py_test( name = "googletest-catch-exceptions-test", size = "small", srcs = ["googletest-catch-exceptions-test.py"], data = [ ":googletest-catch-exceptions-ex-test_", ":googletest-catch-exceptions-no-ex-test_", ], deps = [":gtest_test_utils"], ) cc_binary( name = "gtest_xml_output_unittest_", testonly = 1, srcs = ["gtest_xml_output_unittest_.cc"], deps = ["//:gtest"], ) cc_test( name = "gtest_no_test_unittest", size = "small", srcs = ["gtest_no_test_unittest.cc"], deps = ["//:gtest"], ) py_test( name = "gtest_xml_output_unittest", size = "small", srcs = [ "gtest_xml_output_unittest.py", "gtest_xml_test_utils.py", ], args = select({ ":has_absl": [], "//conditions:default": ["--no_stacktrace_support"], }), data = [ # We invoke gtest_no_test_unittest to verify the XML output # when the test program contains no test definition. ":gtest_no_test_unittest", ":gtest_xml_output_unittest_", ], deps = [":gtest_test_utils"], ) cc_binary( name = "gtest_xml_outfile1_test_", testonly = 1, srcs = ["gtest_xml_outfile1_test_.cc"], deps = ["//:gtest_main"], ) cc_binary( name = "gtest_xml_outfile2_test_", testonly = 1, srcs = ["gtest_xml_outfile2_test_.cc"], deps = ["//:gtest_main"], ) py_test( name = "gtest_xml_outfiles_test", size = "small", srcs = [ "gtest_xml_outfiles_test.py", "gtest_xml_test_utils.py", ], data = [ ":gtest_xml_outfile1_test_", ":gtest_xml_outfile2_test_", ], deps = [":gtest_test_utils"], ) cc_binary( name = "googletest-uninitialized-test_", testonly = 1, srcs = ["googletest-uninitialized-test_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-uninitialized-test", size = "medium", srcs = ["googletest-uninitialized-test.py"], data = ["googletest-uninitialized-test_"], deps = [":gtest_test_utils"], ) cc_binary( name = "gtest_testbridge_test_", testonly = 1, srcs = ["gtest_testbridge_test_.cc"], deps = ["//:gtest_main"], ) # Tests that filtering via testbridge works py_test( name = "gtest_testbridge_test", size = "small", srcs = ["gtest_testbridge_test.py"], data = [":gtest_testbridge_test_"], deps = [":gtest_test_utils"], ) py_test( name = "googletest-json-outfiles-test", size = "small", srcs = [ "googletest-json-outfiles-test.py", "gtest_json_test_utils.py", ], data = [ ":gtest_xml_outfile1_test_", ":gtest_xml_outfile2_test_", ], deps = [":gtest_test_utils"], ) py_test( name = "googletest-json-output-unittest", size = "medium", srcs = [ "googletest-json-output-unittest.py", "gtest_json_test_utils.py", ], data = [ # We invoke gtest_no_test_unittest to verify the JSON output # when the test program contains no test definition. ":gtest_no_test_unittest", ":gtest_xml_output_unittest_", ], args = select({ ":has_absl": [], "//conditions:default": ["--no_stacktrace_support"], }), deps = [":gtest_test_utils"], ) # Verifies interaction of death tests and exceptions. cc_test( name = "googletest-death-test_ex_catch_test", size = "medium", srcs = ["googletest-death-test_ex_test.cc"], copts = ["-fexceptions"], defines = ["GTEST_ENABLE_CATCH_EXCEPTIONS_=1"], deps = ["//:gtest"], ) cc_binary( name = "googletest-param-test-invalid-name1-test_", testonly = 1, srcs = ["googletest-param-test-invalid-name1-test_.cc"], deps = ["//:gtest"], ) cc_binary( name = "googletest-param-test-invalid-name2-test_", testonly = 1, srcs = ["googletest-param-test-invalid-name2-test_.cc"], deps = ["//:gtest"], ) py_test( name = "googletest-param-test-invalid-name1-test", size = "small", srcs = ["googletest-param-test-invalid-name1-test.py"], data = [":googletest-param-test-invalid-name1-test_"], deps = [":gtest_test_utils"], ) py_test( name = "googletest-param-test-invalid-name2-test", size = "small", srcs = ["googletest-param-test-invalid-name2-test.py"], data = [":googletest-param-test-invalid-name2-test_"], deps = [":gtest_test_utils"], ) Index: head/contrib/googletest/googletest/test/gtest_skip_in_environment_setup_test.cc =================================================================== --- head/contrib/googletest/googletest/test/gtest_skip_in_environment_setup_test.cc (nonexistent) +++ head/contrib/googletest/googletest/test/gtest_skip_in_environment_setup_test.cc (revision 345770) @@ -0,0 +1,60 @@ +// Copyright 2019, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// This test verifies that skipping in the environment results in the +// testcases being skipped. +// +// This is a reproduction case for +// https://github.com/google/googletest/issues/2189 . + +#include +#include + +class SetupEnvironment : public testing::Environment { +public: + void SetUp() override { + GTEST_SKIP() << "Skipping the entire environment"; + } +}; + +TEST(Test, AlwaysPasses) { + EXPECT_EQ(true, true); +} + +TEST(Test, AlwaysFails) { + EXPECT_EQ(true, false); +} + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + testing::AddGlobalTestEnvironment(new SetupEnvironment()); + + return (RUN_ALL_TESTS()); +} Property changes on: head/contrib/googletest/googletest/test/gtest_skip_in_environment_setup_test.cc ___________________________________________________________________ Added: svn:eol-style ## -0,0 +1 ## +native \ No newline at end of property Added: svn:keywords ## -0,0 +1,2 ## +FreeBSD=%H +\ No newline at end of property \ No newline at end of property Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Index: head/lib/googletest/gtest_main/tests/Makefile =================================================================== --- head/lib/googletest/gtest_main/tests/Makefile (revision 345769) +++ head/lib/googletest/gtest_main/tests/Makefile (revision 345770) @@ -1,43 +1,44 @@ # $FreeBSD$ .include .PATH: ${GOOGLETEST_SRCROOT}/src ${GOOGLETEST_SRCROOT}/test GTESTS+= googletest-death-test-test GTESTS+= googletest-filepath-test GTESTS+= googletest-linked-ptr-test GTESTS+= googletest-listener-test GTESTS+= gtest_main_unittest GTESTS+= googletest-message-test GTESTS+= googletest-options-test GTESTS+= googletest-port-test GTESTS+= gtest_pred_impl_unittest GTESTS+= googletest-printers-test GTESTS+= gtest_prod_test GTESTS+= gtest_sole_header_test GTESTS+= googletest-test-part-test GTESTS+= gtest-typed-test_test GTESTS+= gtest_skip_test +GTESTS+= gtest_skip_in_environment_setup_test GTESTS+= gtest_unittest CXXFLAGS+= -I${GOOGLETEST_SRCROOT}/include CXXFLAGS+= -I${GOOGLETEST_SRCROOT} SRCS.gtest-typed-test_test= \ gtest-typed-test_test.cc \ gtest-typed-test2_test.cc SRCS.gtest_prod_test= \ gtest_prod_test.cc \ production.cc LIBADD+= gtest gtest_main LIBADD.googletest-port-test+= pthread LIBADD.gtest_unittest+= pthread # The next release will resolve a number of build warnings issues. NO_WERROR= .include