diff --git a/contrib/kyua/engine/scheduler.cpp b/contrib/kyua/engine/scheduler.cpp index e75091a40e38..d4507a247323 100644 --- a/contrib/kyua/engine/scheduler.cpp +++ b/contrib/kyua/engine/scheduler.cpp @@ -1,1639 +1,1642 @@ // Copyright 2014 The Kyua Authors. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of Google Inc. nor the names of its contributors // may be used to endorse or promote products derived from this software // without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "engine/scheduler.hpp" extern "C" { #include } #include #include #include #include #include #include "engine/config.hpp" #include "engine/exceptions.hpp" #include "engine/execenv/execenv.hpp" #include "engine/requirements.hpp" #include "model/context.hpp" #include "model/metadata.hpp" #include "model/test_case.hpp" #include "model/test_program.hpp" #include "model/test_result.hpp" #include "utils/config/tree.ipp" #include "utils/datetime.hpp" #include "utils/defs.hpp" #include "utils/env.hpp" #include "utils/format/macros.hpp" #include "utils/fs/directory.hpp" #include "utils/fs/exceptions.hpp" #include "utils/fs/operations.hpp" #include "utils/fs/path.hpp" #include "utils/logging/macros.hpp" #include "utils/noncopyable.hpp" #include "utils/optional.ipp" #include "utils/passwd.hpp" #include "utils/process/executor.ipp" #include "utils/process/status.hpp" #include "utils/sanity.hpp" #include "utils/stacktrace.hpp" #include "utils/stream.hpp" #include "utils/text/operations.ipp" namespace config = utils::config; namespace datetime = utils::datetime; namespace execenv = engine::execenv; namespace executor = utils::process::executor; namespace fs = utils::fs; namespace logging = utils::logging; namespace passwd = utils::passwd; namespace process = utils::process; namespace scheduler = engine::scheduler; namespace text = utils::text; using utils::none; using utils::optional; /// Timeout for the test case cleanup operation. /// /// TODO(jmmv): This is here only for testing purposes. Maybe we should expose /// this setting as part of the user_config. datetime::delta scheduler::cleanup_timeout(60, 0); /// Timeout for the test case execenv cleanup operation. datetime::delta scheduler::execenv_cleanup_timeout(60, 0); /// Timeout for the test case listing operation. /// /// TODO(jmmv): This is here only for testing purposes. Maybe we should expose /// this setting as part of the user_config. datetime::delta scheduler::list_timeout(300, 0); namespace { /// Magic exit status to indicate that the test case was probably skipped. /// /// The test case was only skipped if and only if we return this exit code and /// we find the skipped_cookie file on disk. static const int exit_skipped = 84; /// Text file containing the skip reason for the test case. /// /// This will only be present within unique_work_directory if the test case /// exited with the exit_skipped code. However, there is no guarantee that the /// file is there (say if the test really decided to exit with code exit_skipped /// on its own). static const char* skipped_cookie = "skipped.txt"; /// Mapping of interface names to interface definitions. typedef std::map< std::string, std::shared_ptr< scheduler::interface > > interfaces_map; /// Mapping of interface names to interface definitions. /// /// Use register_interface() to add an entry to this global table. static interfaces_map interfaces; /// Scans the contents of a directory and appends the file listing to a file. /// /// \param dir_path The directory to scan. /// \param output_file The file to which to append the listing. /// /// \throw engine::error If there are problems listing the files. static void append_files_listing(const fs::path& dir_path, const fs::path& output_file) { std::ofstream output(output_file.c_str(), std::ios::app); if (!output) throw engine::error(F("Failed to open output file %s for append") % output_file); try { std::set < std::string > names; const fs::directory dir(dir_path); for (fs::directory::const_iterator iter = dir.begin(); iter != dir.end(); ++iter) { if (iter->name != "." && iter->name != "..") names.insert(iter->name); } if (!names.empty()) { output << "Files left in work directory after failure: " << text::join(names, ", ") << '\n'; } } catch (const fs::error& e) { throw engine::error(F("Cannot append files listing to %s: %s") % output_file % e.what()); } } /// Maintenance data held while a test is being executed. /// /// This data structure exists from the moment when a test is executed via /// scheduler::spawn_test() or scheduler::impl::spawn_cleanup() to when it is /// cleaned up with result_handle::cleanup(). /// /// This is a base data type intended to be extended for the test and cleanup /// cases so that each contains only the relevant data. struct exec_data : utils::noncopyable { /// Test program data for this test case. const model::test_program_ptr test_program; /// Name of the test case. const std::string test_case_name; /// Constructor. /// /// \param test_program_ Test program data for this test case. /// \param test_case_name_ Name of the test case. exec_data(const model::test_program_ptr test_program_, const std::string& test_case_name_) : test_program(test_program_), test_case_name(test_case_name_) { } /// Destructor. virtual ~exec_data(void) { } }; /// Maintenance data held while a test is being executed. struct test_exec_data : public exec_data { /// Test program-specific execution interface. const std::shared_ptr< scheduler::interface > interface; /// User configuration passed to the execution of the test. We need this /// here to recover it later when chaining the execution of a cleanup /// routine (if any). const config::tree user_config; /// Whether this test case still needs to have its cleanup routine executed. /// /// This is set externally when the cleanup routine is actually invoked to /// denote that no further attempts shall be made at cleaning this up. bool needs_cleanup; /// Whether this test case still needs to have its execenv cleanup executed. /// /// This is set externally when the cleanup routine is actually invoked to /// denote that no further attempts shall be made at cleaning this up. bool needs_execenv_cleanup; /// Original PID of the test case subprocess. /// /// This is used for the cleanup upon termination by a signal, to reap the /// leftovers and form missing exit_handle. pid_t pid; /// The exit_handle for this test once it has completed. /// /// This is set externally when the test case has finished, as we need this /// information to invoke the followup cleanup routine in the right context, /// as indicated by needs_cleanup. optional< executor::exit_handle > exit_handle; /// Constructor. /// /// \param test_program_ Test program data for this test case. /// \param test_case_name_ Name of the test case. /// \param interface_ Test program-specific execution interface. /// \param user_config_ User configuration passed to the test. test_exec_data(const model::test_program_ptr test_program_, const std::string& test_case_name_, const std::shared_ptr< scheduler::interface > interface_, const config::tree& user_config_, const pid_t pid_) : exec_data(test_program_, test_case_name_), interface(interface_), user_config(user_config_), pid(pid_) { const model::test_case& test_case = test_program->find(test_case_name); needs_cleanup = test_case.get_metadata().has_cleanup(); needs_execenv_cleanup = test_case.get_metadata().has_execenv(); } }; /// Maintenance data held while a test cleanup routine is being executed. /// /// Instances of this object are related to a previous test_exec_data, as /// cleanup routines can only exist once the test has been run. struct cleanup_exec_data : public exec_data { /// The exit handle of the test. This is necessary so that we can return /// the correct exit_handle to the user of the scheduler. executor::exit_handle body_exit_handle; /// The final result of the test's body. This is necessary to compute the /// right return value for a test with a cleanup routine: the body result is /// respected if it is a "bad" result; else the result of the cleanup /// routine is used if it has failed. model::test_result body_result; /// Constructor. /// /// \param test_program_ Test program data for this test case. /// \param test_case_name_ Name of the test case. /// \param body_exit_handle_ If not none, exit handle of the body /// corresponding to the cleanup routine represented by this exec_data. /// \param body_result_ If not none, result of the body corresponding to the /// cleanup routine represented by this exec_data. cleanup_exec_data(const model::test_program_ptr test_program_, const std::string& test_case_name_, const executor::exit_handle& body_exit_handle_, const model::test_result& body_result_) : exec_data(test_program_, test_case_name_), body_exit_handle(body_exit_handle_), body_result(body_result_) { } }; /// Maintenance data held while a test execenv cleanup is being executed. /// /// Instances of this object are related to a previous test_exec_data, as /// cleanup routines can only exist once the test has been run. struct execenv_exec_data : public exec_data { /// The exit handle of the test. This is necessary so that we can return /// the correct exit_handle to the user of the scheduler. executor::exit_handle body_exit_handle; /// The final result of the test's body. This is necessary to compute the /// right return value for a test with a cleanup routine: the body result is /// respected if it is a "bad" result; else the result of the cleanup /// routine is used if it has failed. model::test_result body_result; /// Constructor. /// /// \param test_program_ Test program data for this test case. /// \param test_case_name_ Name of the test case. /// \param body_exit_handle_ If not none, exit handle of the body /// corresponding to the cleanup routine represented by this exec_data. /// \param body_result_ If not none, result of the body corresponding to the /// cleanup routine represented by this exec_data. execenv_exec_data(const model::test_program_ptr test_program_, const std::string& test_case_name_, const executor::exit_handle& body_exit_handle_, const model::test_result& body_result_) : exec_data(test_program_, test_case_name_), body_exit_handle(body_exit_handle_), body_result(body_result_) { } }; /// Shared pointer to exec_data. /// /// We require this because we want exec_data to not be copyable, and thus we /// cannot just store it in the map without move constructors. typedef std::shared_ptr< exec_data > exec_data_ptr; /// Mapping of active PIDs to their maintenance data. typedef std::map< int, exec_data_ptr > exec_data_map; /// Enforces a test program to hold an absolute path. /// /// TODO(jmmv): This function (which is a pretty ugly hack) exists because we /// want the interface hooks to receive a test_program as their argument. /// However, those hooks run after the test program has been isolated, which /// means that the current directory has changed since when the test_program /// objects were created. This causes the absolute_path() method of /// test_program to return bogus values if the internal representation of their /// path is relative. We should fix somehow: maybe making the fs module grab /// its "current_path" view at program startup time; or maybe by grabbing the /// current path at test_program creation time; or maybe something else. /// /// \param program The test program to modify. /// /// \return A new test program whose internal paths are absolute. static model::test_program force_absolute_paths(const model::test_program program) { const std::string& relative = program.relative_path().str(); const std::string absolute = program.absolute_path().str(); const std::string root = absolute.substr( 0, absolute.length() - relative.length()); return model::test_program( program.interface_name(), program.relative_path(), fs::path(root), program.test_suite_name(), program.get_metadata(), program.test_cases()); } /// Functor to list the test cases of a test program. class list_test_cases { /// Interface of the test program to execute. std::shared_ptr< scheduler::interface > _interface; /// Test program to execute. const model::test_program _test_program; /// User-provided configuration variables. const config::tree& _user_config; public: /// Constructor. /// /// \param interface Interface of the test program to execute. /// \param test_program Test program to execute. /// \param user_config User-provided configuration variables. list_test_cases( const std::shared_ptr< scheduler::interface > interface, const model::test_program* test_program, const config::tree& user_config) : _interface(interface), _test_program(force_absolute_paths(*test_program)), _user_config(user_config) { } /// Body of the subprocess. void operator()(const fs::path& /* control_directory */) { const config::properties_map vars = scheduler::generate_config( _user_config, _test_program.test_suite_name()); _interface->exec_list(_test_program, vars); } }; /// Functor to execute a test program in a child process. class run_test_program { /// Interface of the test program to execute. std::shared_ptr< scheduler::interface > _interface; /// Test program to execute. const model::test_program _test_program; /// Name of the test case to execute. const std::string& _test_case_name; /// User-provided configuration variables. const config::tree& _user_config; /// Verifies if the test case needs to be skipped or not. /// /// We could very well run this on the scheduler parent process before /// issuing the fork. However, doing this here in the child process is /// better for two reasons: first, it allows us to continue using the simple /// spawn/wait abstraction of the scheduler; and, second, we parallelize the /// requirements checks among tests. /// /// \post If the test's preconditions are not met, the caller process is /// terminated with a special exit code and a "skipped cookie" is written to /// the disk with the reason for the failure. /// /// \param skipped_cookie_path File to create with the skip reason details /// if this test is skipped. void do_requirements_check(const fs::path& skipped_cookie_path) { const model::test_case& test_case = _test_program.find( _test_case_name); const std::string skip_reason = engine::check_reqs( test_case.get_metadata(), _user_config, _test_program.test_suite_name(), fs::current_path()); if (skip_reason.empty()) return; std::ofstream output(skipped_cookie_path.c_str()); if (!output) { std::perror((F("Failed to open %s for write") % skipped_cookie_path).str().c_str()); std::abort(); } output << skip_reason; output.close(); // Abruptly terminate the process. We don't want to run any destructors // inherited from the parent process by mistake, which could, for // example, delete our own control files! ::_exit(exit_skipped); } public: /// Constructor. /// /// \param interface Interface of the test program to execute. /// \param test_program Test program to execute. /// \param test_case_name Name of the test case to execute. /// \param user_config User-provided configuration variables. run_test_program( const std::shared_ptr< scheduler::interface > interface, const model::test_program_ptr test_program, const std::string& test_case_name, const config::tree& user_config) : _interface(interface), _test_program(force_absolute_paths(*test_program)), _test_case_name(test_case_name), _user_config(user_config) { } /// Body of the subprocess. /// /// \param control_directory The testcase directory where files will be /// read from. void operator()(const fs::path& control_directory) { const model::test_case& test_case = _test_program.find( _test_case_name); if (test_case.fake_result()) ::_exit(EXIT_SUCCESS); do_requirements_check(control_directory / skipped_cookie); const config::properties_map vars = scheduler::generate_config( _user_config, _test_program.test_suite_name()); _interface->exec_test(_test_program, _test_case_name, vars, control_directory); } }; /// Functor to execute a test program in a child process. class run_test_cleanup { /// Interface of the test program to execute. std::shared_ptr< scheduler::interface > _interface; /// Test program to execute. const model::test_program _test_program; /// Name of the test case to execute. const std::string& _test_case_name; /// User-provided configuration variables. const config::tree& _user_config; public: /// Constructor. /// /// \param interface Interface of the test program to execute. /// \param test_program Test program to execute. /// \param test_case_name Name of the test case to execute. /// \param user_config User-provided configuration variables. run_test_cleanup( const std::shared_ptr< scheduler::interface > interface, const model::test_program_ptr test_program, const std::string& test_case_name, const config::tree& user_config) : _interface(interface), _test_program(force_absolute_paths(*test_program)), _test_case_name(test_case_name), _user_config(user_config) { } /// Body of the subprocess. /// /// \param control_directory The testcase directory where cleanup will be /// run from. void operator()(const fs::path& control_directory) { const config::properties_map vars = scheduler::generate_config( _user_config, _test_program.test_suite_name()); _interface->exec_cleanup(_test_program, _test_case_name, vars, control_directory); } }; /// Functor to execute a test execenv cleanup in a child process. class run_execenv_cleanup { /// Test program to execute. const model::test_program _test_program; /// Name of the test case to execute. const std::string& _test_case_name; public: /// Constructor. /// /// \param test_program Test program to execute. /// \param test_case_name Name of the test case to execute. run_execenv_cleanup( const model::test_program_ptr test_program, const std::string& test_case_name) : _test_program(force_absolute_paths(*test_program)), _test_case_name(test_case_name) { } /// Body of the subprocess. /// /// \param control_directory The testcase directory where cleanup will be /// run from. void operator()(const fs::path& /* control_directory */) { auto e = execenv::get(_test_program, _test_case_name); e->cleanup(); } }; /// Obtains the right scheduler interface for a given test program. /// /// \param name The name of the interface of the test program. /// /// \return An scheduler interface. std::shared_ptr< scheduler::interface > find_interface(const std::string& name) { const interfaces_map::const_iterator iter = interfaces.find(name); PRE(interfaces.find(name) != interfaces.end()); return (*iter).second; } } // anonymous namespace void scheduler::interface::exec_cleanup( const model::test_program& /* test_program */, const std::string& /* test_case_name */, const config::properties_map& /* vars */, const utils::fs::path& /* control_directory */) const { // Most test interfaces do not support standalone cleanup routines so // provide a default implementation that does nothing. UNREACHABLE_MSG("exec_cleanup not implemented for an interface that " "supports standalone cleanup routines"); } /// Internal implementation of a lazy_test_program. struct engine::scheduler::lazy_test_program::impl : utils::noncopyable { /// Whether the test cases list has been yet loaded or not. bool _loaded; /// User configuration to pass to the test program list operation. config::tree _user_config; /// Scheduler context to use to load test cases. scheduler::scheduler_handle& _scheduler_handle; /// Constructor. /// /// \param user_config_ User configuration to pass to the test program list /// operation. /// \param scheduler_handle_ Scheduler context to use when loading test /// cases. impl(const config::tree& user_config_, scheduler::scheduler_handle& scheduler_handle_) : _loaded(false), _user_config(user_config_), _scheduler_handle(scheduler_handle_) { } }; /// Constructs a new test program. /// /// \param interface_name_ Name of the test program interface. /// \param binary_ The name of the test program binary relative to root_. /// \param root_ The root of the test suite containing the test program. /// \param test_suite_name_ The name of the test suite this program belongs to. /// \param md_ Metadata of the test program. /// \param user_config_ User configuration to pass to the scheduler. /// \param scheduler_handle_ Scheduler context to use to load test cases. scheduler::lazy_test_program::lazy_test_program( const std::string& interface_name_, const fs::path& binary_, const fs::path& root_, const std::string& test_suite_name_, const model::metadata& md_, const config::tree& user_config_, scheduler::scheduler_handle& scheduler_handle_) : test_program(interface_name_, binary_, root_, test_suite_name_, md_, model::test_cases_map()), _pimpl(new impl(user_config_, scheduler_handle_)) { } /// Gets or loads the list of test cases from the test program. /// /// \return The list of test cases provided by the test program. const model::test_cases_map& scheduler::lazy_test_program::test_cases(void) const { _pimpl->_scheduler_handle.check_interrupt(); if (!_pimpl->_loaded) { const model::test_cases_map tcs = _pimpl->_scheduler_handle.list_tests( this, _pimpl->_user_config); // Due to the restrictions on when set_test_cases() may be called (as a // way to lazily initialize the test cases list before it is ever // returned), this cast is valid. const_cast< scheduler::lazy_test_program* >(this)->set_test_cases(tcs); _pimpl->_loaded = true; _pimpl->_scheduler_handle.check_interrupt(); } INV(_pimpl->_loaded); return test_program::test_cases(); } /// Internal implementation for the result_handle class. struct engine::scheduler::result_handle::bimpl : utils::noncopyable { /// Generic executor exit handle for this result handle. executor::exit_handle generic; /// Mutable pointer to the corresponding scheduler state. /// /// This object references a member of the scheduler_handle that yielded /// this result_handle instance. We need this direct access to clean up /// after ourselves when the result is destroyed. exec_data_map& all_exec_data; /// Constructor. /// /// \param generic_ Generic executor exit handle for this result handle. /// \param [in,out] all_exec_data_ Global object keeping track of all active /// executions for an scheduler. This is a pointer to a member of the /// scheduler_handle object. bimpl(const executor::exit_handle generic_, exec_data_map& all_exec_data_) : generic(generic_), all_exec_data(all_exec_data_) { } /// Destructor. ~bimpl(void) { LD(F("Removing %s from all_exec_data") % generic.original_pid()); all_exec_data.erase(generic.original_pid()); } }; /// Constructor. /// /// \param pbimpl Constructed internal implementation. scheduler::result_handle::result_handle(std::shared_ptr< bimpl > pbimpl) : _pbimpl(pbimpl) { } /// Destructor. scheduler::result_handle::~result_handle(void) { } /// Cleans up the test case results. /// /// This function should be called explicitly as it provides the means to /// control any exceptions raised during cleanup. Do not rely on the destructor /// to clean things up. /// /// \throw engine::error If the cleanup fails, especially due to the inability /// to remove the work directory. void scheduler::result_handle::cleanup(void) { _pbimpl->generic.cleanup(); } /// Returns the original PID corresponding to this result. /// /// \return An exec_handle. int scheduler::result_handle::original_pid(void) const { return _pbimpl->generic.original_pid(); } /// Returns the timestamp of when spawn_test was called. /// /// \return A timestamp. const datetime::timestamp& scheduler::result_handle::start_time(void) const { return _pbimpl->generic.start_time(); } /// Returns the timestamp of when wait_any_test returned this object. /// /// \return A timestamp. const datetime::timestamp& scheduler::result_handle::end_time(void) const { return _pbimpl->generic.end_time(); } /// Returns the path to the test-specific work directory. /// /// This is guaranteed to be clear of files created by the scheduler. /// /// \return The path to a directory that exists until cleanup() is called. fs::path scheduler::result_handle::work_directory(void) const { return _pbimpl->generic.work_directory(); } /// Returns the path to the test's stdout file. /// /// \return The path to a file that exists until cleanup() is called. const fs::path& scheduler::result_handle::stdout_file(void) const { return _pbimpl->generic.stdout_file(); } /// Returns the path to the test's stderr file. /// /// \return The path to a file that exists until cleanup() is called. const fs::path& scheduler::result_handle::stderr_file(void) const { return _pbimpl->generic.stderr_file(); } /// Internal implementation for the test_result_handle class. struct engine::scheduler::test_result_handle::impl : utils::noncopyable { /// Test program data for this test case. model::test_program_ptr test_program; /// Name of the test case. std::string test_case_name; /// The actual result of the test execution. const model::test_result test_result; /// Constructor. /// /// \param test_program_ Test program data for this test case. /// \param test_case_name_ Name of the test case. /// \param test_result_ The actual result of the test execution. impl(const model::test_program_ptr test_program_, const std::string& test_case_name_, const model::test_result& test_result_) : test_program(test_program_), test_case_name(test_case_name_), test_result(test_result_) { } }; /// Constructor. /// /// \param pbimpl Constructed internal implementation for the base object. /// \param pimpl Constructed internal implementation. scheduler::test_result_handle::test_result_handle( std::shared_ptr< bimpl > pbimpl, std::shared_ptr< impl > pimpl) : result_handle(pbimpl), _pimpl(pimpl) { } /// Destructor. scheduler::test_result_handle::~test_result_handle(void) { } /// Returns the test program that yielded this result. /// /// \return A test program. const model::test_program_ptr scheduler::test_result_handle::test_program(void) const { return _pimpl->test_program; } /// Returns the name of the test case that yielded this result. /// /// \return A test case name const std::string& scheduler::test_result_handle::test_case_name(void) const { return _pimpl->test_case_name; } /// Returns the actual result of the test execution. /// /// \return A test result. const model::test_result& scheduler::test_result_handle::test_result(void) const { return _pimpl->test_result; } /// Internal implementation for the scheduler_handle. struct engine::scheduler::scheduler_handle::impl : utils::noncopyable { /// Generic executor instance encapsulated by this one. executor::executor_handle generic; /// Mapping of exec handles to the data required at run time. exec_data_map all_exec_data; /// Collection of test_exec_data objects. typedef std::vector< const test_exec_data* > test_exec_data_vector; /// Constructor. impl(void) : generic(executor::setup()) { } /// Destructor. /// /// This runs any pending cleanup routines, which should only happen if the /// scheduler is abruptly terminated (aka if a signal is received). ~impl(void) { const test_exec_data_vector tests_data = tests_needing_cleanup(); for (test_exec_data_vector::const_iterator iter = tests_data.begin(); iter != tests_data.end(); ++iter) { const test_exec_data* test_data = *iter; try { sync_cleanup(test_data); } catch (const std::runtime_error& e) { LW(F("Failed to run cleanup routine for %s:%s on abrupt " "termination") % test_data->test_program->relative_path() % test_data->test_case_name); } } const test_exec_data_vector td = tests_needing_execenv_cleanup(); for (test_exec_data_vector::const_iterator iter = td.begin(); iter != td.end(); ++iter) { const test_exec_data* test_data = *iter; try { sync_execenv_cleanup(test_data); } catch (const std::runtime_error& e) { LW(F("Failed to run execenv cleanup routine for %s:%s on abrupt " "termination") % test_data->test_program->relative_path() % test_data->test_case_name); } } } /// Finds any pending exec_datas that correspond to tests needing cleanup. /// /// \return The collection of test_exec_data objects that have their /// needs_cleanup property set to true. test_exec_data_vector tests_needing_cleanup(void) { test_exec_data_vector tests_data; for (exec_data_map::const_iterator iter = all_exec_data.begin(); iter != all_exec_data.end(); ++iter) { const exec_data_ptr data = (*iter).second; try { test_exec_data* test_data = &dynamic_cast< test_exec_data& >( *data.get()); if (test_data->needs_cleanup) { tests_data.push_back(test_data); test_data->needs_cleanup = false; if (!test_data->exit_handle) test_data->exit_handle = generic.reap(test_data->pid); } } catch (const std::bad_cast& e) { // Do nothing for cleanup_exec_data objects. } } return tests_data; } /// Finds any pending exec_datas that correspond to tests needing execenv /// cleanup. /// /// \return The collection of test_exec_data objects that have their /// specific execenv property set. test_exec_data_vector tests_needing_execenv_cleanup(void) { test_exec_data_vector tests_data; for (exec_data_map::const_iterator iter = all_exec_data.begin(); iter != all_exec_data.end(); ++iter) { const exec_data_ptr data = (*iter).second; try { test_exec_data* test_data = &dynamic_cast< test_exec_data& >( *data.get()); if (test_data->needs_execenv_cleanup) { tests_data.push_back(test_data); test_data->needs_execenv_cleanup = false; if (!test_data->exit_handle) test_data->exit_handle = generic.reap(test_data->pid); } } catch (const std::bad_cast& e) { // Do nothing for other objects. } } return tests_data; } /// Cleans up a single test case synchronously. /// /// \param test_data The data of the previously executed test case to be /// cleaned up. void sync_cleanup(const test_exec_data* test_data) { // The message in this result should never be seen by the user, but use // something reasonable just in case it leaks and we need to pinpoint // the call site. model::test_result result(model::test_result_broken, "Test case died abruptly"); const executor::exec_handle cleanup_handle = spawn_cleanup( test_data->test_program, test_data->test_case_name, test_data->user_config, test_data->exit_handle.get(), result); generic.wait(cleanup_handle); } /// Forks and executes a test case cleanup routine asynchronously. /// /// \param test_program The container test program. /// \param test_case_name The name of the test case to run. /// \param user_config User-provided configuration variables. /// \param body_handle The exit handle of the test case's corresponding /// body. The cleanup will be executed in the same context. /// \param body_result The result of the test case's corresponding body. /// /// \return A handle for the background operation. Used to match the result /// of the execution returned by wait_any() with this invocation. executor::exec_handle spawn_cleanup(const model::test_program_ptr test_program, const std::string& test_case_name, const config::tree& user_config, const executor::exit_handle& body_handle, const model::test_result& body_result) { generic.check_interrupt(); const std::shared_ptr< scheduler::interface > interface = find_interface(test_program->interface_name()); LI(F("Spawning %s:%s (cleanup)") % test_program->absolute_path() % test_case_name); const executor::exec_handle handle = generic.spawn_followup( run_test_cleanup(interface, test_program, test_case_name, user_config), body_handle, cleanup_timeout); const exec_data_ptr data(new cleanup_exec_data( test_program, test_case_name, body_handle, body_result)); LD(F("Inserting %s into all_exec_data (cleanup)") % handle.pid()); INV_MSG(all_exec_data.find(handle.pid()) == all_exec_data.end(), F("PID %s already in all_exec_data; not properly cleaned " "up or reused too fast") % handle.pid());; all_exec_data.insert(exec_data_map::value_type(handle.pid(), data)); return handle; } /// Cleans up a single test case execenv synchronously. /// /// \param test_data The data of the previously executed test case to be /// cleaned up. void sync_execenv_cleanup(const test_exec_data* test_data) { // The message in this result should never be seen by the user, but use // something reasonable just in case it leaks and we need to pinpoint // the call site. model::test_result result(model::test_result_broken, "Test case died abruptly"); const executor::exec_handle cleanup_handle = spawn_execenv_cleanup( test_data->test_program, test_data->test_case_name, test_data->exit_handle.get(), result); generic.wait(cleanup_handle); } /// Forks and executes a test case execenv cleanup asynchronously. /// /// \param test_program The container test program. /// \param test_case_name The name of the test case to run. /// \param body_handle The exit handle of the test case's corresponding /// body. The cleanup will be executed in the same context. /// \param body_result The result of the test case's corresponding body. /// /// \return A handle for the background operation. Used to match the result /// of the execution returned by wait_any() with this invocation. executor::exec_handle spawn_execenv_cleanup(const model::test_program_ptr test_program, const std::string& test_case_name, const executor::exit_handle& body_handle, const model::test_result& body_result) { generic.check_interrupt(); LI(F("Spawning %s:%s (execenv cleanup)") % test_program->absolute_path() % test_case_name); const executor::exec_handle handle = generic.spawn_followup( run_execenv_cleanup(test_program, test_case_name), body_handle, execenv_cleanup_timeout); const exec_data_ptr data(new execenv_exec_data( test_program, test_case_name, body_handle, body_result)); LD(F("Inserting %s into all_exec_data (execenv cleanup)") % handle.pid()); INV_MSG(all_exec_data.find(handle.pid()) == all_exec_data.end(), F("PID %s already in all_exec_data; not properly cleaned " "up or reused too fast") % handle.pid());; all_exec_data.insert(exec_data_map::value_type(handle.pid(), data)); return handle; } }; /// Constructor. scheduler::scheduler_handle::scheduler_handle(void) : _pimpl(new impl()) { } /// Destructor. scheduler::scheduler_handle::~scheduler_handle(void) { } /// Queries the path to the root of the work directory for all tests. /// /// \return A path. const fs::path& scheduler::scheduler_handle::root_work_directory(void) const { return _pimpl->generic.root_work_directory(); } /// Cleans up the scheduler state. /// /// This function should be called explicitly as it provides the means to /// control any exceptions raised during cleanup. Do not rely on the destructor /// to clean things up. /// /// \throw engine::error If there are problems cleaning up the scheduler. void scheduler::scheduler_handle::cleanup(void) { _pimpl->generic.cleanup(); } /// Checks if the given interface name is valid. /// /// \param name The name of the interface to validate. /// /// \throw engine::error If the given interface is not supported. void scheduler::ensure_valid_interface(const std::string& name) { if (interfaces.find(name) == interfaces.end()) throw engine::error(F("Unsupported test interface '%s'") % name); } /// Registers a new interface. /// /// \param name The name of the interface. Must not have yet been registered. /// \param spec Interface specification. void scheduler::register_interface(const std::string& name, const std::shared_ptr< interface > spec) { PRE(interfaces.find(name) == interfaces.end()); interfaces.insert(interfaces_map::value_type(name, spec)); } /// Returns the names of all registered interfaces. /// /// \return A collection of interface names. std::set< std::string > scheduler::registered_interface_names(void) { std::set< std::string > names; for (interfaces_map::const_iterator iter = interfaces.begin(); iter != interfaces.end(); ++iter) { names.insert((*iter).first); } return names; } /// Initializes the scheduler. /// /// \pre This function can only be called if there is no other scheduler_handle /// object alive. /// /// \return A handle to the operations of the scheduler. scheduler::scheduler_handle scheduler::setup(void) { return scheduler_handle(); } /// Retrieves the list of test cases from a test program. /// /// This operation is currently synchronous. /// /// This operation should never throw. Any errors during the processing of the /// test case list are subsumed into a single test case in the return value that /// represents the failed retrieval. /// /// \param test_program The test program from which to obtain the list of test /// cases. /// \param user_config User-provided configuration variables. /// /// \return The list of test cases. model::test_cases_map scheduler::scheduler_handle::list_tests( const model::test_program* test_program, const config::tree& user_config) { _pimpl->generic.check_interrupt(); const std::shared_ptr< scheduler::interface > interface = find_interface( test_program->interface_name()); try { const executor::exec_handle exec_handle = _pimpl->generic.spawn( list_test_cases(interface, test_program, user_config), list_timeout, none); executor::exit_handle exit_handle = _pimpl->generic.wait(exec_handle); const model::test_cases_map test_cases = interface->parse_list( exit_handle.status(), exit_handle.stdout_file(), exit_handle.stderr_file()); exit_handle.cleanup(); if (test_cases.empty()) throw std::runtime_error("Empty test cases list"); return test_cases; } catch (const std::runtime_error& e) { // TODO(jmmv): This is a very ugly workaround for the fact that we // cannot report failures at the test-program level. LW(F("Failed to load test cases list: %s") % e.what()); model::test_cases_map fake_test_cases; fake_test_cases.insert(model::test_cases_map::value_type( "__test_cases_list__", model::test_case( "__test_cases_list__", "Represents the correct processing of the test cases list", model::test_result(model::test_result_broken, e.what())))); return fake_test_cases; } } /// Forks and executes a test case asynchronously. /// /// Note that the caller needn't know if the test has a cleanup routine or not. /// If there indeed is a cleanup routine, we trigger it at wait_any() time. /// /// \param test_program The container test program. /// \param test_case_name The name of the test case to run. /// \param user_config User-provided configuration variables. /// /// \return A handle for the background operation. Used to match the result of /// the execution returned by wait_any() with this invocation. scheduler::exec_handle scheduler::scheduler_handle::spawn_test( const model::test_program_ptr test_program, const std::string& test_case_name, const config::tree& user_config) { _pimpl->generic.check_interrupt(); const std::shared_ptr< scheduler::interface > interface = find_interface( test_program->interface_name()); LI(F("Spawning %s:%s") % test_program->absolute_path() % test_case_name); const model::test_case& test_case = test_program->find(test_case_name); optional< passwd::user > unprivileged_user; if (user_config.is_set("unprivileged_user") && test_case.get_metadata().required_user() == "unprivileged") { unprivileged_user = user_config.lookup< engine::user_node >( "unprivileged_user"); } const executor::exec_handle handle = _pimpl->generic.spawn( run_test_program(interface, test_program, test_case_name, user_config), test_case.get_metadata().timeout(), unprivileged_user); const exec_data_ptr data(new test_exec_data( test_program, test_case_name, interface, user_config, handle.pid())); LD(F("Inserting %s into all_exec_data") % handle.pid()); INV_MSG( _pimpl->all_exec_data.find(handle.pid()) == _pimpl->all_exec_data.end(), F("PID %s already in all_exec_data; not cleaned up or reused too fast") % handle.pid());; _pimpl->all_exec_data.insert(exec_data_map::value_type(handle.pid(), data)); return handle.pid(); } /// Waits for completion of any forked test case. /// /// Note that if the terminated test case has a cleanup routine, this function /// is the one in charge of spawning the cleanup routine asynchronously. /// /// \return The result of the execution of a subprocess. This is a dynamically /// allocated object because the scheduler can spawn subprocesses of various /// types and, at wait time, we don't know upfront what we are going to get. scheduler::result_handle_ptr scheduler::scheduler_handle::wait_any(void) { _pimpl->generic.check_interrupt(); executor::exit_handle handle = _pimpl->generic.wait_any(); const exec_data_map::iterator iter = _pimpl->all_exec_data.find( handle.original_pid()); exec_data_ptr data = (*iter).second; utils::dump_stacktrace_if_available(data->test_program->absolute_path(), _pimpl->generic, handle); optional< model::test_result > result; // test itself try { test_exec_data* test_data = &dynamic_cast< test_exec_data& >( *data.get()); LD(F("Got %s from all_exec_data") % handle.original_pid()); test_data->exit_handle = handle; const model::test_case& test_case = test_data->test_program->find( test_data->test_case_name); result = test_case.fake_result(); if (!result && handle.status() && handle.status().get().exited() && handle.status().get().exitstatus() == exit_skipped) { // If the test's process terminated with our magic "exit_skipped" // status, there are two cases to handle. The first is the case // where the "skipped cookie" exists, in which case we never got to // actually invoke the test program; if that's the case, handle it // here. The second case is where the test case actually decided to // exit with the "exit_skipped" status; in that case, just fall back // to the regular status handling. const fs::path skipped_cookie_path = handle.control_directory() / skipped_cookie; std::ifstream input(skipped_cookie_path.c_str()); if (input) { result = model::test_result(model::test_result_skipped, utils::read_stream(input)); input.close(); // If we determined that the test needs to be skipped, we do not // want to run the cleanup routine because doing so could result // in errors. However, we still want to run the cleanup routine // if the test's body reports a skip (because actions could have // already been taken). test_data->needs_cleanup = false; test_data->needs_execenv_cleanup = false; } } if (!result) { result = test_data->interface->compute_result( handle.status(), handle.control_directory(), handle.stdout_file(), handle.stderr_file()); } INV(result); if (!result.get().good()) { append_files_listing(handle.work_directory(), handle.stderr_file()); } if (test_data->needs_cleanup) { INV(test_case.get_metadata().has_cleanup()); // The test body has completed and we have processed it. If there // is a cleanup routine, trigger it now and wait for any other test // completion. The caller never knows about cleanup routines. _pimpl->spawn_cleanup(test_data->test_program, test_data->test_case_name, test_data->user_config, handle, result.get()); // TODO(jmmv): Chaining this call is ugly. We'd be better off by // looping over terminated processes until we got a result suitable // for user consumption. For the time being this is good enough and // not a problem because the call chain won't get big: the majority // of test cases do not have cleanup routines. return wait_any(); } if (test_data->needs_execenv_cleanup) { INV(test_case.get_metadata().has_execenv()); _pimpl->spawn_execenv_cleanup(test_data->test_program, test_data->test_case_name, handle, result.get()); test_data->needs_execenv_cleanup = false; return wait_any(); } } catch (const std::bad_cast& e) { // ok, let's check for another type } // test cleanup try { const cleanup_exec_data* cleanup_data = &dynamic_cast< const cleanup_exec_data& >(*data.get()); LD(F("Got %s from all_exec_data (cleanup)") % handle.original_pid()); // Handle the completion of cleanup subprocesses internally: the caller // is not aware that these exist so, when we return, we must return the // data for the original test that triggered this routine. For example, // because the caller wants to see the exact same exec_handle that was // returned by spawn_test. const model::test_result& body_result = cleanup_data->body_result; if (body_result.good()) { if (!handle.status()) { result = model::test_result(model::test_result_broken, "Test case cleanup timed out"); } else { if (!handle.status().get().exited() || handle.status().get().exitstatus() != EXIT_SUCCESS) { result = model::test_result( model::test_result_broken, "Test case cleanup did not terminate successfully"); } else { result = body_result; } } } else { result = body_result; } // Untrack the cleanup process. This must be done explicitly because we // do not create a result_handle object for the cleanup, and that is the // one in charge of doing so in the regular (non-cleanup) case. LD(F("Removing %s from all_exec_data (cleanup) in favor of %s") % handle.original_pid() % cleanup_data->body_exit_handle.original_pid()); _pimpl->all_exec_data.erase(handle.original_pid()); handle = cleanup_data->body_exit_handle; const exec_data_map::iterator it = _pimpl->all_exec_data.find( handle.original_pid()); if (it != _pimpl->all_exec_data.end()) { exec_data_ptr d = (*it).second; test_exec_data* test_data = &dynamic_cast< test_exec_data& >( *d.get()); const model::test_case& test_case = cleanup_data->test_program->find(cleanup_data->test_case_name); test_data->needs_cleanup = false; if (test_data->needs_execenv_cleanup) { INV(test_case.get_metadata().has_execenv()); _pimpl->spawn_execenv_cleanup(cleanup_data->test_program, cleanup_data->test_case_name, handle, result.get()); test_data->needs_execenv_cleanup = false; return wait_any(); } } } catch (const std::bad_cast& e) { // ok, let's check for another type } // execenv cleanup try { const execenv_exec_data* execenv_data = &dynamic_cast< const execenv_exec_data& >(*data.get()); LD(F("Got %s from all_exec_data (execenv cleanup)") % handle.original_pid()); const model::test_result& body_result = execenv_data->body_result; if (body_result.good()) { if (!handle.status()) { result = model::test_result(model::test_result_broken, "Test case execenv cleanup timed out"); } else { if (!handle.status().get().exited() || handle.status().get().exitstatus() != EXIT_SUCCESS) { result = model::test_result( model::test_result_broken, "Test case execenv cleanup did not terminate successfully"); // ? } else { result = body_result; } } } else { result = body_result; } LD(F("Removing %s from all_exec_data (execenv cleanup) in favor of %s") % handle.original_pid() % execenv_data->body_exit_handle.original_pid()); _pimpl->all_exec_data.erase(handle.original_pid()); handle = execenv_data->body_exit_handle; } catch (const std::bad_cast& e) { // ok, it was one of the types above } INV(result); std::shared_ptr< result_handle::bimpl > result_handle_bimpl( new result_handle::bimpl(handle, _pimpl->all_exec_data)); std::shared_ptr< test_result_handle::impl > test_result_handle_impl( new test_result_handle::impl( data->test_program, data->test_case_name, result.get())); return result_handle_ptr(new test_result_handle(result_handle_bimpl, test_result_handle_impl)); } /// Forks and executes a test case synchronously for debugging. /// /// \pre No other processes should be in execution by the scheduler. /// /// \param test_program The container test program. /// \param test_case_name The name of the test case to run. /// \param user_config User-provided configuration variables. /// \param stdout_target File to which to write the stdout of the test case. /// \param stderr_target File to which to write the stderr of the test case. /// /// \return The result of the execution of the test. scheduler::result_handle_ptr scheduler::scheduler_handle::debug_test( const model::test_program_ptr test_program, const std::string& test_case_name, const config::tree& user_config, const fs::path& stdout_target, const fs::path& stderr_target) { const exec_handle exec_handle = spawn_test( test_program, test_case_name, user_config); result_handle_ptr result_handle = wait_any(); // TODO(jmmv): We need to do this while the subprocess is alive. This is // important for debugging purposes, as we should see the contents of stdout // or stderr as they come in. // // Unfortunately, we cannot do so. We cannot just read and block from a // file, waiting for further output to appear... as this only works on pipes // or sockets. We need a better interface for this whole thing. { std::auto_ptr< std::ostream > output = utils::open_ostream( stdout_target); *output << utils::read_file(result_handle->stdout_file()); } { std::auto_ptr< std::ostream > output = utils::open_ostream( stderr_target); *output << utils::read_file(result_handle->stderr_file()); } INV(result_handle->original_pid() == exec_handle); return result_handle; } /// Checks if an interrupt has fired. /// /// Calls to this function should be sprinkled in strategic places through the /// code protected by an interrupts_handler object. /// /// This is just a wrapper over signals::check_interrupt() to avoid leaking this /// dependency to the caller. /// /// \throw signals::interrupted_error If there has been an interrupt. void scheduler::scheduler_handle::check_interrupt(void) const { _pimpl->generic.check_interrupt(); } /// Queries the current execution context. /// /// \return The queried context. model::context scheduler::current_context(void) { return model::context(fs::current_path(), utils::getallenv()); } /// Generates the set of configuration variables for a test program. /// /// \param user_config The configuration variables provided by the user. /// \param test_suite The name of the test suite. /// /// \return The mapping of configuration variables for the test program. config::properties_map scheduler::generate_config(const config::tree& user_config, const std::string& test_suite) { config::properties_map props; try { props = user_config.all_properties(F("test_suites.%s") % test_suite, true); } catch (const config::unknown_key_error& unused_error) { // Ignore: not all test suites have entries in the configuration. } // TODO(jmmv): This is a hack that exists for the ATF interface only, so it // should be moved there. if (user_config.is_set("unprivileged_user")) { const passwd::user& user = user_config.lookup< engine::user_node >("unprivileged_user"); + // The property is duplicated using both ATF and Kyua naming styles + // for better UX. props["unprivileged-user"] = user.name; + props["unprivileged_user"] = user.name; } return props; } diff --git a/contrib/kyua/engine/scheduler_test.cpp b/contrib/kyua/engine/scheduler_test.cpp index e144761d8f01..d91c448f2e5e 100644 --- a/contrib/kyua/engine/scheduler_test.cpp +++ b/contrib/kyua/engine/scheduler_test.cpp @@ -1,1242 +1,1243 @@ // Copyright 2014 The Kyua Authors. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of Google Inc. nor the names of its contributors // may be used to endorse or promote products derived from this software // without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "engine/scheduler.hpp" extern "C" { #include #include #include } #include #include #include #include #include #include "engine/config.hpp" #include "engine/exceptions.hpp" #include "model/context.hpp" #include "model/metadata.hpp" #include "model/test_case.hpp" #include "model/test_program.hpp" #include "model/test_result.hpp" #include "utils/config/tree.ipp" #include "utils/datetime.hpp" #include "utils/defs.hpp" #include "utils/env.hpp" #include "utils/format/containers.ipp" #include "utils/format/macros.hpp" #include "utils/fs/operations.hpp" #include "utils/fs/path.hpp" #include "utils/optional.ipp" #include "utils/passwd.hpp" #include "utils/process/status.hpp" #include "utils/sanity.hpp" #include "utils/stacktrace.hpp" #include "utils/stream.hpp" #include "utils/test_utils.ipp" #include "utils/text/exceptions.hpp" #include "utils/text/operations.ipp" namespace config = utils::config; namespace datetime = utils::datetime; namespace fs = utils::fs; namespace passwd = utils::passwd; namespace process = utils::process; namespace scheduler = engine::scheduler; namespace text = utils::text; using utils::none; using utils::optional; namespace { /// Checks if a string starts with a prefix. /// /// \param str The string to be tested. /// \param prefix The prefix to look for. /// /// \return True if the string is prefixed as specified. static bool starts_with(const std::string& str, const std::string& prefix) { return (str.length() >= prefix.length() && str.substr(0, prefix.length()) == prefix); } /// Strips a prefix from a string and converts the rest to an integer. /// /// \param str The string to be tested. /// \param prefix The prefix to strip from the string. /// /// \return The part of the string after the prefix converted to an integer. static int suffix_to_int(const std::string& str, const std::string& prefix) { PRE(starts_with(str, prefix)); try { return text::to_type< int >(str.substr(prefix.length())); } catch (const text::value_error& error) { std::cerr << F("Failed: %s\n") % error.what(); std::abort(); } } /// Mock interface definition for testing. /// /// This scheduler interface does not execute external binaries. It is designed /// to simulate the scheduler of various programs with different exit statuses. class mock_interface : public scheduler::interface { /// Executes the subprocess simulating an exec. /// /// This is just a simple wrapper over _exit(2) because we cannot use /// std::exit on exit from this mock interface. The reason is that we do /// not want to invoke any destructors as otherwise we'd clear up the global /// scheduler state by mistake. This wouldn't be a major problem if it /// wasn't because doing so deletes on-disk files and we want to leave them /// in place so that the parent process can test for them! /// /// \param exit_code Exit code. void do_exit(const int exit_code) const UTILS_NORETURN { std::cout.flush(); std::cerr.flush(); ::_exit(exit_code); } /// Executes a test case that creates various files and then fails. void exec_create_files_and_fail(void) const UTILS_NORETURN { std::cerr << "This should not be clobbered\n"; atf::utils::create_file("first file", ""); atf::utils::create_file("second-file", ""); fs::mkdir_p(fs::path("dir1/dir2"), 0755); ::kill(::getpid(), SIGTERM); std::abort(); } /// Executes a test case that deletes all files in the current directory. /// /// This is intended to validate that the test runs in an empty directory, /// separate from any control files that the scheduler may have created. void exec_delete_all(void) const UTILS_NORETURN { const int exit_code = ::system("rm *") == -1 ? EXIT_FAILURE : EXIT_SUCCESS; // Recreate our own cookie. atf::utils::create_file("exec_test_was_called", ""); do_exit(exit_code); } /// Executes a test case that returns a specific exit code. /// /// \param exit_code Exit status to terminate the program with. void exec_exit(const int exit_code) const UTILS_NORETURN { do_exit(exit_code); } /// Executes a test case that just fails. void exec_fail(void) const UTILS_NORETURN { std::cerr << "This should not be clobbered\n"; ::kill(::getpid(), SIGTERM); std::abort(); } /// Executes a test case that prints all input parameters to the functor. /// /// \param test_program The test program to execute. /// \param test_case_name Name of the test case to invoke, which must be a /// number. /// \param vars User-provided variables to pass to the test program. void exec_print_params(const model::test_program& test_program, const std::string& test_case_name, const config::properties_map& vars) const UTILS_NORETURN { std::cout << F("Test program: %s\n") % test_program.relative_path(); std::cout << F("Test case: %s\n") % test_case_name; for (config::properties_map::const_iterator iter = vars.begin(); iter != vars.end(); ++iter) { std::cout << F("%s=%s\n") % (*iter).first % (*iter).second; } std::cerr << F("stderr: %s\n") % test_case_name; do_exit(EXIT_SUCCESS); } public: /// Executes a test program's list operation. /// /// This method is intended to be called within a subprocess and is expected /// to terminate execution either by exec(2)ing the test program or by /// exiting with a failure. /// /// \param test_program The test program to execute. /// \param vars User-provided variables to pass to the test program. void exec_list(const model::test_program& test_program, const config::properties_map& vars) const UTILS_NORETURN { const std::string name = test_program.absolute_path().leaf_name(); std::cerr << name; std::cerr.flush(); if (name == "check_i_exist") { if (fs::exists(test_program.absolute_path())) { std::cout << "found\n"; do_exit(EXIT_SUCCESS); } else { std::cout << "not_found\n"; do_exit(EXIT_FAILURE); } } else if (name == "empty") { do_exit(EXIT_SUCCESS); } else if (name == "misbehave") { utils::abort_without_coredump(); } else if (name == "timeout") { std::cout << "sleeping\n"; std::cout.flush(); ::sleep(100); utils::abort_without_coredump(); } else if (name == "vars") { for (config::properties_map::const_iterator iter = vars.begin(); iter != vars.end(); ++iter) { std::cout << F("%s_%s\n") % (*iter).first % (*iter).second; } do_exit(15); } else { std::abort(); } } /// Computes the test cases list of a test program. /// /// \param status The termination status of the subprocess used to execute /// the exec_test() method or none if the test timed out. /// \param stdout_path Path to the file containing the stdout of the test. /// \param stderr_path Path to the file containing the stderr of the test. /// /// \return A list of test cases. model::test_cases_map parse_list(const optional< process::status >& status, const fs::path& stdout_path, const fs::path& stderr_path) const { const std::string name = utils::read_file(stderr_path); if (name == "check_i_exist") { ATF_REQUIRE(status.get().exited()); ATF_REQUIRE_EQ(EXIT_SUCCESS, status.get().exitstatus()); } else if (name == "empty") { ATF_REQUIRE(status.get().exited()); ATF_REQUIRE_EQ(EXIT_SUCCESS, status.get().exitstatus()); } else if (name == "misbehave") { throw std::runtime_error("misbehaved in parse_list"); } else if (name == "timeout") { ATF_REQUIRE(!status); } else if (name == "vars") { ATF_REQUIRE(status.get().exited()); ATF_REQUIRE_EQ(15, status.get().exitstatus()); } else { ATF_FAIL("Invalid stderr contents; got " + name); } model::test_cases_map_builder test_cases_builder; std::ifstream input(stdout_path.c_str()); ATF_REQUIRE(input); std::string line; while (std::getline(input, line).good()) { test_cases_builder.add(line); } return test_cases_builder.build(); } /// Executes a test case of the test program. /// /// This method is intended to be called within a subprocess and is expected /// to terminate execution either by exec(2)ing the test program or by /// exiting with a failure. /// /// \param test_program The test program to execute. /// \param test_case_name Name of the test case to invoke. /// \param vars User-provided variables to pass to the test program. /// \param control_directory Directory where the interface may place control /// files. void exec_test(const model::test_program& test_program, const std::string& test_case_name, const config::properties_map& vars, const fs::path& control_directory) const { const fs::path cookie = control_directory / "exec_test_was_called"; std::ofstream control_file(cookie.c_str()); if (!control_file) { std::cerr << "Failed to create " << cookie << '\n'; std::abort(); } control_file << test_case_name; control_file.close(); if (test_case_name == "check_i_exist") { do_exit(fs::exists(test_program.absolute_path()) ? 0 : 1); } else if (starts_with(test_case_name, "cleanup_timeout")) { exec_exit(EXIT_SUCCESS); } else if (starts_with(test_case_name, "create_files_and_fail")) { exec_create_files_and_fail(); } else if (test_case_name == "delete_all") { exec_delete_all(); } else if (starts_with(test_case_name, "exit ")) { exec_exit(suffix_to_int(test_case_name, "exit ")); } else if (starts_with(test_case_name, "fail")) { exec_fail(); } else if (starts_with(test_case_name, "fail_body_fail_cleanup")) { exec_fail(); } else if (starts_with(test_case_name, "fail_body_pass_cleanup")) { exec_fail(); } else if (starts_with(test_case_name, "pass_body_fail_cleanup")) { exec_exit(EXIT_SUCCESS); } else if (starts_with(test_case_name, "print_params")) { exec_print_params(test_program, test_case_name, vars); } else if (starts_with(test_case_name, "skip_body_pass_cleanup")) { exec_exit(EXIT_SUCCESS); } else { std::cerr << "Unknown test case " << test_case_name << '\n'; std::abort(); } } /// Executes a test cleanup routine of the test program. /// /// This method is intended to be called within a subprocess and is expected /// to terminate execution either by exec(2)ing the test program or by /// exiting with a failure. /// /// \param test_case_name Name of the test case to invoke. void exec_cleanup(const model::test_program& /* test_program */, const std::string& test_case_name, const config::properties_map& /* vars */, const fs::path& /* control_directory */) const { std::cout << "exec_cleanup was called\n"; std::cout.flush(); if (starts_with(test_case_name, "cleanup_timeout")) { ::sleep(100); std::abort(); } else if (starts_with(test_case_name, "fail_body_fail_cleanup")) { exec_fail(); } else if (starts_with(test_case_name, "fail_body_pass_cleanup")) { exec_exit(EXIT_SUCCESS); } else if (starts_with(test_case_name, "pass_body_fail_cleanup")) { exec_fail(); } else if (starts_with(test_case_name, "skip_body_pass_cleanup")) { exec_exit(EXIT_SUCCESS); } else { std::cerr << "Should not have been called for a test without " "a cleanup routine" << '\n'; std::abort(); } } /// Computes the result of a test case based on its termination status. /// /// \param status The termination status of the subprocess used to execute /// the exec_test() method or none if the test timed out. /// \param control_directory Path to the directory where the interface may /// have placed control files. /// \param stdout_path Path to the file containing the stdout of the test. /// \param stderr_path Path to the file containing the stderr of the test. /// /// \return A test result. model::test_result compute_result(const optional< process::status >& status, const fs::path& control_directory, const fs::path& stdout_path, const fs::path& stderr_path) const { // Do not use any ATF_* macros here. Some of the tests below invoke // this code in a subprocess, and terminating such subprocess due to a // failed ATF_* macro yields mysterious failures that are incredibly // hard to debug. (Case in point: the signal_handling test is racy by // nature, and the test run by exec_test() above may not have created // the cookie we expect below. We don't want to "silently" exit if the // file is not there.) if (!status) { return model::test_result(model::test_result_broken, "Timed out"); } if (status.get().exited()) { // Only sanity-check the work directory-related parameters in case // of a clean exit. In all other cases, there is no guarantee that // these were ever created. const fs::path cookie = control_directory / "exec_test_was_called"; if (!atf::utils::file_exists(cookie.str())) { return model::test_result( model::test_result_broken, "compute_result's control_directory does not seem to point " "to the right location"); } const std::string test_case_name = utils::read_file(cookie); if (!atf::utils::file_exists(stdout_path.str())) { return model::test_result( model::test_result_broken, "compute_result's stdout_path does not exist"); } if (!atf::utils::file_exists(stderr_path.str())) { return model::test_result( model::test_result_broken, "compute_result's stderr_path does not exist"); } if (test_case_name == "skip_body_pass_cleanup") { return model::test_result( model::test_result_skipped, F("Exit %s") % status.get().exitstatus()); } else { return model::test_result( model::test_result_passed, F("Exit %s") % status.get().exitstatus()); } } else { return model::test_result( model::test_result_failed, F("Signal %s") % status.get().termsig()); } } }; } // anonymous namespace /// Runs list_tests on the scheduler and returns the results. /// /// \param test_name The name of the test supported by our exec_list function. /// \param user_config Optional user settings for the test. /// /// \return The loaded list of test cases. static model::test_cases_map check_integration_list(const char* test_name, const fs::path root, const config::tree& user_config = engine::empty_config()) { const model::test_program program = model::test_program_builder( "mock", fs::path(test_name), root, "the-suite") .build(); scheduler::scheduler_handle handle = scheduler::setup(); const model::test_cases_map test_cases = handle.list_tests(&program, user_config); handle.cleanup(); return test_cases; } ATF_TEST_CASE_WITHOUT_HEAD(integration__list_some); ATF_TEST_CASE_BODY(integration__list_some) { config::tree user_config = engine::empty_config(); user_config.set_string("test_suites.the-suite.first", "test"); user_config.set_string("test_suites.the-suite.second", "TEST"); user_config.set_string("test_suites.abc.unused", "unused"); const model::test_cases_map test_cases = check_integration_list( "vars", fs::path("."), user_config); const model::test_cases_map exp_test_cases = model::test_cases_map_builder() .add("first_test").add("second_TEST").build(); ATF_REQUIRE_EQ(exp_test_cases, test_cases); } ATF_TEST_CASE_WITHOUT_HEAD(integration__list_check_paths); ATF_TEST_CASE_BODY(integration__list_check_paths) { fs::mkdir_p(fs::path("dir1/dir2/dir3"), 0755); atf::utils::create_file("dir1/dir2/dir3/check_i_exist", ""); const model::test_cases_map test_cases = check_integration_list( "dir2/dir3/check_i_exist", fs::path("dir1")); const model::test_cases_map exp_test_cases = model::test_cases_map_builder() .add("found").build(); ATF_REQUIRE_EQ(exp_test_cases, test_cases); } ATF_TEST_CASE_WITHOUT_HEAD(integration__list_timeout); ATF_TEST_CASE_BODY(integration__list_timeout) { scheduler::list_timeout = datetime::delta(1, 0); const model::test_cases_map test_cases = check_integration_list( "timeout", fs::path(".")); const model::test_cases_map exp_test_cases = model::test_cases_map_builder() .add("sleeping").build(); ATF_REQUIRE_EQ(exp_test_cases, test_cases); } ATF_TEST_CASE_WITHOUT_HEAD(integration__list_fail); ATF_TEST_CASE_BODY(integration__list_fail) { const model::test_cases_map test_cases = check_integration_list( "misbehave", fs::path(".")); ATF_REQUIRE_EQ(1, test_cases.size()); const model::test_case& test_case = test_cases.begin()->second; ATF_REQUIRE_EQ("__test_cases_list__", test_case.name()); ATF_REQUIRE(test_case.fake_result()); ATF_REQUIRE_EQ(model::test_result(model::test_result_broken, "misbehaved in parse_list"), test_case.fake_result().get()); } ATF_TEST_CASE_WITHOUT_HEAD(integration__list_empty); ATF_TEST_CASE_BODY(integration__list_empty) { const model::test_cases_map test_cases = check_integration_list( "empty", fs::path(".")); ATF_REQUIRE_EQ(1, test_cases.size()); const model::test_case& test_case = test_cases.begin()->second; ATF_REQUIRE_EQ("__test_cases_list__", test_case.name()); ATF_REQUIRE(test_case.fake_result()); ATF_REQUIRE_EQ(model::test_result(model::test_result_broken, "Empty test cases list"), test_case.fake_result().get()); } ATF_TEST_CASE_WITHOUT_HEAD(integration__run_one); ATF_TEST_CASE_BODY(integration__run_one) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case("exit 41").build_ptr(); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); const scheduler::exec_handle exec_handle = handle.spawn_test( program, "exit 41", user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(exec_handle, result_handle->original_pid()); ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 41"), test_result_handle->test_result()); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__run_many); ATF_TEST_CASE_BODY(integration__run_many) { static const std::size_t num_test_programs = 30; const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); // We mess around with the "current time" below, so make sure the tests do // not spuriously exceed their deadline by bumping it to a large number. const model::metadata infinite_timeout = model::metadata_builder() .set_timeout(datetime::delta(1000000L, 0)).build(); std::size_t total_tests = 0; std::map< scheduler::exec_handle, model::test_program_ptr > exp_test_programs; std::map< scheduler::exec_handle, std::string > exp_test_case_names; std::map< scheduler::exec_handle, datetime::timestamp > exp_start_times; std::map< scheduler::exec_handle, int > exp_exit_statuses; for (std::size_t i = 0; i < num_test_programs; ++i) { const std::string test_case_0 = F("exit %s") % (i * 3 + 0); const std::string test_case_1 = F("exit %s") % (i * 3 + 1); const std::string test_case_2 = F("exit %s") % (i * 3 + 2); const model::test_program_ptr program = model::test_program_builder( "mock", fs::path(F("program-%s") % i), fs::current_path(), "the-suite") .set_metadata(infinite_timeout) .add_test_case(test_case_0) .add_test_case(test_case_1) .add_test_case(test_case_2) .build_ptr(); const datetime::timestamp start_time = datetime::timestamp::from_values( 2014, 12, 8, 9, 40, 0, i); scheduler::exec_handle exec_handle; datetime::set_mock_now(start_time); exec_handle = handle.spawn_test(program, test_case_0, user_config); exp_test_programs.insert(std::make_pair(exec_handle, program)); exp_test_case_names.insert(std::make_pair(exec_handle, test_case_0)); exp_start_times.insert(std::make_pair(exec_handle, start_time)); exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3)); ++total_tests; datetime::set_mock_now(start_time); exec_handle = handle.spawn_test(program, test_case_1, user_config); exp_test_programs.insert(std::make_pair(exec_handle, program)); exp_test_case_names.insert(std::make_pair(exec_handle, test_case_1)); exp_start_times.insert(std::make_pair(exec_handle, start_time)); exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3 + 1)); ++total_tests; datetime::set_mock_now(start_time); exec_handle = handle.spawn_test(program, test_case_2, user_config); exp_test_programs.insert(std::make_pair(exec_handle, program)); exp_test_case_names.insert(std::make_pair(exec_handle, test_case_2)); exp_start_times.insert(std::make_pair(exec_handle, start_time)); exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3 + 2)); ++total_tests; } for (std::size_t i = 0; i < total_tests; ++i) { const datetime::timestamp end_time = datetime::timestamp::from_values( 2014, 12, 8, 9, 50, 10, i); datetime::set_mock_now(end_time); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); const scheduler::exec_handle exec_handle = result_handle->original_pid(); const model::test_program_ptr test_program = exp_test_programs.find( exec_handle)->second; const std::string& test_case_name = exp_test_case_names.find( exec_handle)->second; const datetime::timestamp& start_time = exp_start_times.find( exec_handle)->second; const int exit_status = exp_exit_statuses.find(exec_handle)->second; ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, F("Exit %s") % exit_status), test_result_handle->test_result()); ATF_REQUIRE_EQ(test_program, test_result_handle->test_program()); ATF_REQUIRE_EQ(test_case_name, test_result_handle->test_case_name()); ATF_REQUIRE_EQ(start_time, result_handle->start_time()); ATF_REQUIRE_EQ(end_time, result_handle->end_time()); result_handle->cleanup(); ATF_REQUIRE(!atf::utils::file_exists( result_handle->stdout_file().str())); ATF_REQUIRE(!atf::utils::file_exists( result_handle->stderr_file().str())); ATF_REQUIRE(!atf::utils::file_exists( result_handle->work_directory().str())); result_handle.reset(); } handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__run_check_paths); ATF_TEST_CASE_BODY(integration__run_check_paths) { fs::mkdir_p(fs::path("dir1/dir2/dir3"), 0755); atf::utils::create_file("dir1/dir2/dir3/program", ""); const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("dir2/dir3/program"), fs::path("dir1"), "the-suite") .add_test_case("check_i_exist").build_ptr(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, "check_i_exist", engine::default_config()); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"), test_result_handle->test_result()); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__parameters_and_output); ATF_TEST_CASE_BODY(integration__parameters_and_output) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case("print_params").build_ptr(); config::tree user_config = engine::empty_config(); user_config.set_string("test_suites.the-suite.one", "first variable"); user_config.set_string("test_suites.the-suite.two", "second variable"); scheduler::scheduler_handle handle = scheduler::setup(); const scheduler::exec_handle exec_handle = handle.spawn_test( program, "print_params", user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(exec_handle, result_handle->original_pid()); ATF_REQUIRE_EQ(program, test_result_handle->test_program()); ATF_REQUIRE_EQ("print_params", test_result_handle->test_case_name()); ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"), test_result_handle->test_result()); const fs::path stdout_file = result_handle->stdout_file(); ATF_REQUIRE(atf::utils::compare_file( stdout_file.str(), "Test program: the-program\n" "Test case: print_params\n" "one=first variable\n" "two=second variable\n")); const fs::path stderr_file = result_handle->stderr_file(); ATF_REQUIRE(atf::utils::compare_file( stderr_file.str(), "stderr: print_params\n")); result_handle->cleanup(); ATF_REQUIRE(!fs::exists(stdout_file)); ATF_REQUIRE(!fs::exists(stderr_file)); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__fake_result); ATF_TEST_CASE_BODY(integration__fake_result) { const model::test_result fake_result(model::test_result_skipped, "Some fake details"); model::test_cases_map test_cases; test_cases.insert(model::test_cases_map::value_type( "__fake__", model::test_case("__fake__", "ABC", fake_result))); const model::test_program_ptr program(new model::test_program( "mock", fs::path("the-program"), fs::current_path(), "the-suite", model::metadata_builder().build(), test_cases)); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, "__fake__", user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(fake_result, test_result_handle->test_result()); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__head_skips); ATF_TEST_CASE_BODY(integration__cleanup__head_skips) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case("skip_me", model::metadata_builder() .add_required_config("variable-that-does-not-exist") .set_has_cleanup(true) .build()) .build_ptr(); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, "skip_me", user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(model::test_result( model::test_result_skipped, "Required configuration property " "'variable-that-does-not-exist' not defined"), test_result_handle->test_result()); ATF_REQUIRE(!atf::utils::grep_file("exec_cleanup was called", result_handle->stdout_file().str())); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } /// Runs a test to verify the behavior of cleanup routines. /// /// \param test_case The name of the test case to invoke. /// \param exp_result The expected test result of the execution. static void do_cleanup_test(const char* test_case, const model::test_result& exp_result) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case(test_case) .set_metadata(model::metadata_builder().set_has_cleanup(true).build()) .build_ptr(); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, test_case, user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(exp_result, test_result_handle->test_result()); ATF_REQUIRE(atf::utils::compare_file( result_handle->stdout_file().str(), "exec_cleanup was called\n")); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_skips); ATF_TEST_CASE_BODY(integration__cleanup__body_skips) { do_cleanup_test( "skip_body_pass_cleanup", model::test_result(model::test_result_skipped, "Exit 0")); } ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_bad__cleanup_ok); ATF_TEST_CASE_BODY(integration__cleanup__body_bad__cleanup_ok) { do_cleanup_test( "fail_body_pass_cleanup", model::test_result(model::test_result_failed, "Signal 15")); } ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_ok__cleanup_bad); ATF_TEST_CASE_BODY(integration__cleanup__body_ok__cleanup_bad) { do_cleanup_test( "pass_body_fail_cleanup", model::test_result(model::test_result_broken, "Test case cleanup " "did not terminate successfully")); } ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_bad__cleanup_bad); ATF_TEST_CASE_BODY(integration__cleanup__body_bad__cleanup_bad) { do_cleanup_test( "fail_body_fail_cleanup", model::test_result(model::test_result_failed, "Signal 15")); } ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__timeout); ATF_TEST_CASE_BODY(integration__cleanup__timeout) { scheduler::cleanup_timeout = datetime::delta(1, 0); do_cleanup_test( "cleanup_timeout", model::test_result(model::test_result_broken, "Test case cleanup " "timed out")); } ATF_TEST_CASE_WITHOUT_HEAD(integration__check_requirements); ATF_TEST_CASE_BODY(integration__check_requirements) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case("exit 12") .set_metadata(model::metadata_builder() .add_required_config("abcde").build()) .build_ptr(); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, "exit 12", user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(model::test_result( model::test_result_skipped, "Required configuration property 'abcde' not defined"), test_result_handle->test_result()); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__stacktrace); ATF_TEST_CASE_BODY(integration__stacktrace) { utils::prepare_coredump_test(this); const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case("unknown-dumps-core").build_ptr(); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, "unknown-dumps-core", user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(model::test_result(model::test_result_failed, F("Signal %s") % SIGABRT), test_result_handle->test_result()); ATF_REQUIRE(!atf::utils::grep_file("attempting to gather stack trace", result_handle->stdout_file().str())); ATF_REQUIRE( atf::utils::grep_file("attempting to gather stack trace", result_handle->stderr_file().str())); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } /// Runs a test to verify the dumping of the list of existing files on failure. /// /// \param test_case The name of the test case to invoke. /// \param exp_stderr Expected contents of stderr. static void do_check_list_files_on_failure(const char* test_case, const char* exp_stderr) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case(test_case).build_ptr(); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, test_case, user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); atf::utils::cat_file(result_handle->stdout_file().str(), "child stdout: "); ATF_REQUIRE(atf::utils::compare_file(result_handle->stdout_file().str(), "")); atf::utils::cat_file(result_handle->stderr_file().str(), "child stderr: "); ATF_REQUIRE(atf::utils::compare_file(result_handle->stderr_file().str(), exp_stderr)); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(integration__list_files_on_failure__none); ATF_TEST_CASE_BODY(integration__list_files_on_failure__none) { do_check_list_files_on_failure("fail", "This should not be clobbered\n"); } ATF_TEST_CASE_WITHOUT_HEAD(integration__list_files_on_failure__some); ATF_TEST_CASE_BODY(integration__list_files_on_failure__some) { do_check_list_files_on_failure( "create_files_and_fail", "This should not be clobbered\n" "Files left in work directory after failure: " "dir1, first file, second-file\n"); } ATF_TEST_CASE_WITHOUT_HEAD(integration__prevent_clobbering_control_files); ATF_TEST_CASE_BODY(integration__prevent_clobbering_control_files) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case("delete_all").build_ptr(); const config::tree user_config = engine::empty_config(); scheduler::scheduler_handle handle = scheduler::setup(); (void)handle.spawn_test(program, "delete_all", user_config); scheduler::result_handle_ptr result_handle = handle.wait_any(); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"), test_result_handle->test_result()); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); } ATF_TEST_CASE_WITHOUT_HEAD(debug_test); ATF_TEST_CASE_BODY(debug_test) { const model::test_program_ptr program = model::test_program_builder( "mock", fs::path("the-program"), fs::current_path(), "the-suite") .add_test_case("print_params").build_ptr(); config::tree user_config = engine::empty_config(); user_config.set_string("test_suites.the-suite.one", "first variable"); user_config.set_string("test_suites.the-suite.two", "second variable"); scheduler::scheduler_handle handle = scheduler::setup(); const fs::path stdout_file("custom-stdout.txt"); const fs::path stderr_file("custom-stderr.txt"); scheduler::result_handle_ptr result_handle = handle.debug_test( program, "print_params", user_config, stdout_file, stderr_file); const scheduler::test_result_handle* test_result_handle = dynamic_cast< const scheduler::test_result_handle* >( result_handle.get()); ATF_REQUIRE_EQ(program, test_result_handle->test_program()); ATF_REQUIRE_EQ("print_params", test_result_handle->test_case_name()); ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"), test_result_handle->test_result()); // The original output went to a file. It's only an artifact of // debug_test() that we later get a copy in our own files. ATF_REQUIRE(stdout_file != result_handle->stdout_file()); ATF_REQUIRE(stderr_file != result_handle->stderr_file()); result_handle->cleanup(); result_handle.reset(); handle.cleanup(); ATF_REQUIRE(atf::utils::compare_file( stdout_file.str(), "Test program: the-program\n" "Test case: print_params\n" "one=first variable\n" "two=second variable\n")); ATF_REQUIRE(atf::utils::compare_file( stderr_file.str(), "stderr: print_params\n")); } ATF_TEST_CASE_WITHOUT_HEAD(ensure_valid_interface); ATF_TEST_CASE_BODY(ensure_valid_interface) { scheduler::ensure_valid_interface("mock"); ATF_REQUIRE_THROW_RE(engine::error, "Unsupported test interface 'mock2'", scheduler::ensure_valid_interface("mock2")); scheduler::register_interface( "mock2", std::shared_ptr< scheduler::interface >(new mock_interface())); scheduler::ensure_valid_interface("mock2"); // Standard interfaces should not be present unless registered. ATF_REQUIRE_THROW_RE(engine::error, "Unsupported test interface 'plain'", scheduler::ensure_valid_interface("plain")); } ATF_TEST_CASE_WITHOUT_HEAD(registered_interface_names); ATF_TEST_CASE_BODY(registered_interface_names) { std::set< std::string > exp_names; exp_names.insert("mock"); ATF_REQUIRE_EQ(exp_names, scheduler::registered_interface_names()); scheduler::register_interface( "mock2", std::shared_ptr< scheduler::interface >(new mock_interface())); exp_names.insert("mock2"); ATF_REQUIRE_EQ(exp_names, scheduler::registered_interface_names()); } ATF_TEST_CASE_WITHOUT_HEAD(current_context); ATF_TEST_CASE_BODY(current_context) { const model::context context = scheduler::current_context(); ATF_REQUIRE_EQ(fs::current_path(), context.cwd()); ATF_REQUIRE(utils::getallenv() == context.env()); } ATF_TEST_CASE_WITHOUT_HEAD(generate_config__empty); ATF_TEST_CASE_BODY(generate_config__empty) { const config::tree user_config = engine::empty_config(); const config::properties_map exp_props; ATF_REQUIRE_EQ(exp_props, scheduler::generate_config(user_config, "missing")); } ATF_TEST_CASE_WITHOUT_HEAD(generate_config__no_matches); ATF_TEST_CASE_BODY(generate_config__no_matches) { config::tree user_config = engine::empty_config(); user_config.set_string("architecture", "foo"); user_config.set_string("test_suites.one.var1", "value 1"); const config::properties_map exp_props; ATF_REQUIRE_EQ(exp_props, scheduler::generate_config(user_config, "two")); } ATF_TEST_CASE_WITHOUT_HEAD(generate_config__some_matches); ATF_TEST_CASE_BODY(generate_config__some_matches) { std::vector< passwd::user > mock_users; mock_users.push_back(passwd::user("nobody", 1234, 5678)); passwd::set_mock_users_for_testing(mock_users); config::tree user_config = engine::empty_config(); user_config.set_string("architecture", "foo"); user_config.set_string("unprivileged_user", "nobody"); user_config.set_string("test_suites.one.var1", "value 1"); user_config.set_string("test_suites.two.var2", "value 2"); config::properties_map exp_props; exp_props["unprivileged-user"] = "nobody"; + exp_props["unprivileged_user"] = "nobody"; exp_props["var1"] = "value 1"; ATF_REQUIRE_EQ(exp_props, scheduler::generate_config(user_config, "one")); } ATF_INIT_TEST_CASES(tcs) { scheduler::register_interface( "mock", std::shared_ptr< scheduler::interface >(new mock_interface())); ATF_ADD_TEST_CASE(tcs, integration__list_some); ATF_ADD_TEST_CASE(tcs, integration__list_check_paths); ATF_ADD_TEST_CASE(tcs, integration__list_timeout); ATF_ADD_TEST_CASE(tcs, integration__list_fail); ATF_ADD_TEST_CASE(tcs, integration__list_empty); ATF_ADD_TEST_CASE(tcs, integration__run_one); ATF_ADD_TEST_CASE(tcs, integration__run_many); ATF_ADD_TEST_CASE(tcs, integration__run_check_paths); ATF_ADD_TEST_CASE(tcs, integration__parameters_and_output); ATF_ADD_TEST_CASE(tcs, integration__fake_result); ATF_ADD_TEST_CASE(tcs, integration__cleanup__head_skips); ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_skips); ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_ok__cleanup_bad); ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_bad__cleanup_ok); ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_bad__cleanup_bad); ATF_ADD_TEST_CASE(tcs, integration__cleanup__timeout); ATF_ADD_TEST_CASE(tcs, integration__check_requirements); ATF_ADD_TEST_CASE(tcs, integration__stacktrace); ATF_ADD_TEST_CASE(tcs, integration__list_files_on_failure__none); ATF_ADD_TEST_CASE(tcs, integration__list_files_on_failure__some); ATF_ADD_TEST_CASE(tcs, integration__prevent_clobbering_control_files); ATF_ADD_TEST_CASE(tcs, debug_test); ATF_ADD_TEST_CASE(tcs, ensure_valid_interface); ATF_ADD_TEST_CASE(tcs, registered_interface_names); ATF_ADD_TEST_CASE(tcs, current_context); ATF_ADD_TEST_CASE(tcs, generate_config__empty); ATF_ADD_TEST_CASE(tcs, generate_config__no_matches); ATF_ADD_TEST_CASE(tcs, generate_config__some_matches); } diff --git a/contrib/kyua/integration/cmd_test_test.sh b/contrib/kyua/integration/cmd_test_test.sh index f0862cf29582..4c2fe863a4f5 100644 --- a/contrib/kyua/integration/cmd_test_test.sh +++ b/contrib/kyua/integration/cmd_test_test.sh @@ -1,1071 +1,1114 @@ # Copyright 2011 The Kyua Authors. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of Google Inc. nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. utils_test_case one_test_program__all_pass one_test_program__all_pass_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < passed [S.UUUs] simple_all_pass:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 0 failed, 1 skipped) EOF utils_cp_helper simple_all_pass . atf_check -s exit:0 -o file:expout -e empty kyua test } utils_test_case one_test_program__some_fail one_test_program__some_fail_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < failed: This fails on purpose [S.UUUs] simple_some_fail:pass -> passed [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 1 failed, 0 skipped) EOF utils_cp_helper simple_some_fail . atf_check -s exit:1 -o file:expout -e empty kyua test } utils_test_case many_test_programs__all_pass many_test_programs__all_pass_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < passed [S.UUUs] first:skip -> skipped: The reason for skipping is this [S.UUUs] fourth:main -> skipped: Required file '/non-existent/foo' not found [S.UUUs] second:pass -> passed [S.UUUs] second:skip -> skipped: The reason for skipping is this [S.UUUs] third:pass -> passed [S.UUUs] third:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 3/7 passed (0 broken, 0 failed, 4 skipped) EOF utils_cp_helper simple_all_pass first utils_cp_helper simple_all_pass second utils_cp_helper simple_all_pass third echo "not executed" >fourth; chmod +x fourth atf_check -s exit:0 -o file:expout -e empty kyua test } utils_test_case many_test_programs__some_fail many_test_programs__some_fail_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < failed: This fails on purpose [S.UUUs] first:pass -> passed [S.UUUs] fourth:main -> failed: Returned non-success exit status 76 [S.UUUs] second:fail -> failed: This fails on purpose [S.UUUs] second:pass -> passed [S.UUUs] third:pass -> passed [S.UUUs] third:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 3/7 passed (0 broken, 3 failed, 1 skipped) EOF utils_cp_helper simple_some_fail first utils_cp_helper simple_some_fail second utils_cp_helper simple_all_pass third echo '#! /bin/sh' >fourth echo 'exit 76' >>fourth chmod +x fourth atf_check -s exit:1 -o file:expout -e empty kyua test } utils_test_case expect__all_pass expect__all_pass_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < expected_failure: This is the reason for death [S.UUUs] expect_all_pass:exit -> expected_failure: Exiting with correct code [S.UUUs] expect_all_pass:failure -> expected_failure: Oh no: Forced failure [S.UUUs] expect_all_pass:signal -> expected_failure: Exiting with correct signal [S.UUUs] expect_all_pass:timeout -> expected_failure: This times out [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 5/5 passed (0 broken, 0 failed, 0 skipped) EOF # CHECK_STYLE_ENABLE utils_cp_helper expect_all_pass . atf_check -s exit:0 -o file:expout -e empty kyua test } utils_test_case expect__some_fail expect__some_fail_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < failed: Test case was expected to terminate abruptly but it continued execution [S.UUUs] expect_some_fail:exit -> failed: Test case expected to exit with code 12 but got code 34 [S.UUUs] expect_some_fail:failure -> failed: Test case was expecting a failure but none were raised [S.UUUs] expect_some_fail:pass -> passed [S.UUUs] expect_some_fail:signal -> failed: Test case expected to receive signal 15 but got 9 [S.UUUs] expect_some_fail:timeout -> failed: Test case was expected to hang but it continued execution [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/6 passed (0 broken, 5 failed, 0 skipped) EOF # CHECK_STYLE_ENABLE utils_cp_helper expect_some_fail . atf_check -s exit:1 -o file:expout -e empty kyua test } utils_test_case premature_exit premature_exit_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < broken: Premature exit; test case received signal 9 [S.UUUs] bogus_test_cases:exit -> broken: Premature exit; test case exited with code 0 [S.UUUs] bogus_test_cases:pass -> passed [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/3 passed (2 broken, 0 failed, 0 skipped) EOF # CHECK_STYLE_ENABLE utils_cp_helper bogus_test_cases . atf_check -s exit:1 -o file:expout -e empty kyua test } utils_test_case no_args no_args_body() { utils_install_stable_test_wrapper cat >Kyuafile <subdir/Kyuafile <expout < passed [S.UUUs] simple_all_pass:skip -> skipped: The reason for skipping is this [S.UUUs] subdir/simple_some_fail:fail -> failed: This fails on purpose [S.UUUs] subdir/simple_some_fail:pass -> passed [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 2/4 passed (0 broken, 1 failed, 1 skipped) EOF atf_check -s exit:1 -o file:expout -e empty kyua test } utils_test_case one_arg__subdir one_arg__subdir_body() { utils_install_stable_test_wrapper cat >Kyuafile <subdir/Kyuafile <expout < passed [S.UUUs] subdir/simple_all_pass:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 0 failed, 1 skipped) EOF # CHECK_STYLE_ENABLE atf_check -s exit:0 -o file:expout -e empty kyua test subdir } utils_test_case one_arg__test_case one_arg__test_case_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 0/1 passed (0 broken, 0 failed, 1 skipped) EOF atf_check -s exit:0 -o file:expout -e empty kyua test first:skip } utils_test_case one_arg__test_program one_arg__test_program_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < failed: This fails on purpose [S.UUUs] second:pass -> passed [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 1 failed, 0 skipped) EOF atf_check -s exit:1 -o file:expout -e empty kyua test second } utils_test_case one_arg__invalid one_arg__invalid_body() { cat >experr <experr <Kyuafile <subdir/Kyuafile <expout < passed [S.UUUs] subdir/second:fail -> failed: This fails on purpose [S.UUUs] subdir/second:pass -> passed [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 2/3 passed (0 broken, 1 failed, 0 skipped) EOF atf_check -s exit:1 -o file:expout -e empty kyua test subdir first:pass } utils_test_case many_args__invalid many_args__invalid_body() { cat >experr <experr <Kyuafile <expout <experr <Kyuafile <expout < passed [S.UUUs] first:skip -> skipped: The reason for skipping is this [S.UUUs] third:fail -> failed: This fails on purpose [S.UUUs] third:pass -> passed [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 2/4 passed (0 broken, 1 failed, 1 skipped) EOF cat >experr <root/Kyuafile <root/subdir/Kyuafile <expout < passed [S.UUUs] first:skip -> skipped: The reason for skipping is this [S.UUUs] subdir/fourth:fail -> failed: This fails on purpose [S.UUUs] Results file id is $(utils_results_id root) Results saved to $(utils_results_file root) 1/3 passed (0 broken, 1 failed, 1 skipped) EOF atf_check -s exit:1 -o file:expout -e empty kyua test \ -k "$(pwd)/root/Kyuafile" first subdir/fourth:fail } utils_test_case only_load_used_test_programs only_load_used_test_programs_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout < passed [S.UUUs] first:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 0 failed, 1 skipped) EOF CREATE_COOKIE="$(pwd)/cookie"; export CREATE_COOKIE atf_check -s exit:0 -o file:expout -e empty kyua test first if [ -f "${CREATE_COOKIE}" ]; then atf_fail "An unmatched test case has been executed, which harms" \ "performance" fi } utils_test_case config_behavior config_behavior_body() { cat >"my-config" <Kyuafile <"my-config" <Kyuafile <Kyuafile <expout < failed: This fails on purpose [S.UUUs] some-program:pass -> passed [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 1 failed, 0 skipped) EOF atf_check -s exit:1 -o file:expout -e empty kyua test cat >expout <Kyuafile <Kyuafile <Kyuafile <Kyuafile <subdir/Kyuafile <expout < passed [S.UUUs] first:skip -> skipped: The reason for skipping is this [S.UUUs] subdir/second:pass -> passed [S.UUUs] subdir/second:skip -> skipped: The reason for skipping is this [S.UUUs] subdir/third:pass -> passed [S.UUUs] subdir/third:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 3/6 passed (0 broken, 0 failed, 3 skipped) EOF mkdir build mkdir build/subdir utils_cp_helper simple_all_pass build/first utils_cp_helper simple_all_pass build/subdir/second utils_cp_helper simple_all_pass build/subdir/third atf_check -s exit:0 -o file:expout -e empty kyua test --build-root=build } utils_test_case kyuafile_flag__no_args kyuafile_flag__no_args_body() { utils_install_stable_test_wrapper cat >Kyuafile <myfile <expout < passed [S.UUUs] sometest:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 0 failed, 1 skipped) EOF atf_check -s exit:0 -o file:expout -e empty kyua test -k myfile atf_check -s exit:0 -o file:expout -e empty kyua test --kyuafile=myfile } utils_test_case kyuafile_flag__some_args kyuafile_flag__some_args_body() { utils_install_stable_test_wrapper cat >Kyuafile <myfile <expout < passed [S.UUUs] sometest:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 0 failed, 1 skipped) EOF atf_check -s exit:0 -o file:expout -e empty kyua test -k myfile sometest cat >expout < passed [S.UUUs] sometest:skip -> skipped: The reason for skipping is this [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 1/2 passed (0 broken, 0 failed, 1 skipped) EOF atf_check -s exit:0 -o file:expout -e empty kyua test --kyuafile=myfile \ sometest } utils_test_case interrupt interrupt_body() { cat >Kyuafile <stdout 2>stderr & pid=${!} echo "Kyua subprocess is PID ${pid}" while [ ! -f body ]; do echo "Waiting for body to start" sleep 1 done echo "Body started" sleep 1 echo "Sending INT signal to ${pid}" kill -INT ${pid} echo "Waiting for process ${pid} to exit" wait ${pid} ret=${?} sed -e 's,^,kyua stdout:,' stdout sed -e 's,^,kyua stderr:,' stderr echo "Process ${pid} exited" [ ${ret} -ne 0 ] || atf_fail 'No error code reported' [ -f cleanup ] || atf_fail 'Cleanup part not executed after signal' atf_expect_pass atf_check -s exit:0 -o ignore -e empty grep 'Signal caught' stderr atf_check -s exit:0 -o ignore -e empty \ grep 'kyua: E: Interrupted by signal' stderr } utils_test_case exclusive_tests exclusive_tests_body() { cat >Kyuafile <>Kyuafile done utils_cp_helper race . atf_check \ -s exit:0 \ -o match:"100/100 passed" \ kyua \ -v parallelism=20 \ -v test_suites.integration.shared_file="$(pwd)/shared_file" \ test } utils_test_case no_test_program_match no_test_program_match_body() { utils_install_stable_test_wrapper cat >Kyuafile <expout <experr <Kyuafile <expout <experr <experr <subdir/Kyuafile <experr <subdir/Kyuafile <experr <"${HOME}/.kyua/kyua.conf" <Kyuafile <Kyuafile <non_executable # CHECK_STYLE_DISABLE cat >expout < broken: Invalid header for test case list; expecting Content-Type for application/X-atf-tp version 1, got '' [S.UUUs] non_executable:__test_cases_list__ -> broken: Permission denied to run test program [S.UUUs] Results file id is $(utils_results_id) Results saved to $(utils_results_file) 0/2 passed (2 broken, 0 failed, 0 skipped) EOF # CHECK_STYLE_ENABLE atf_check -s exit:1 -o file:expout -e empty kyua test } utils_test_case missing_test_program missing_test_program_body() { cat >Kyuafile <subdir/Kyuafile <subdir/ok # CHECK_STYLE_DISABLE cat >experr < #include ATF_TEST_CASE(get_variable); ATF_TEST_CASE_HEAD(get_variable) { + const char* varname = ::getenv("CONFIG_VAR_NAME"); + if (varname == NULL) { + varname = "the-variable"; + } const char* output = ::getenv("CONFIG_VAR_FILE"); if (output == NULL) { - set_md_var("require.config", "the-variable"); + set_md_var("require.config", varname); } else { - if (has_config_var("the-variable")) { - atf::utils::create_file(output, get_config_var("the-variable") + + if (has_config_var(varname)) { + atf::utils::create_file(output, get_config_var(varname) + std::string("\n")); } else { atf::utils::create_file(output, "NOT DEFINED\n"); } } } ATF_TEST_CASE_BODY(get_variable) { ATF_REQUIRE_EQ("value2", get_config_var("the-variable")); } ATF_INIT_TEST_CASES(tcs) { ATF_ADD_TEST_CASE(tcs, get_variable); }