1// Copyright 2011 The Kyua Authors.
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are
6// met:
7//
8// * Redistributions of source code must retain the above copyright
9//   notice, this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright
11//   notice, this list of conditions and the following disclaimer in the
12//   documentation and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors
14//   may be used to endorse or promote products derived from this software
15//   without specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29#include "cli/cmd_report.hpp"
30
31#include <algorithm>
32#include <cstddef>
33#include <cstdlib>
34#include <map>
35#include <ostream>
36#include <string>
37#include <vector>
38
39#include "cli/common.ipp"
40#include "drivers/scan_results.hpp"
41#include "model/context.hpp"
42#include "model/metadata.hpp"
43#include "model/test_case.hpp"
44#include "model/test_program.hpp"
45#include "model/test_result.hpp"
46#include "model/types.hpp"
47#include "store/layout.hpp"
48#include "store/read_transaction.hpp"
49#include "utils/cmdline/exceptions.hpp"
50#include "utils/cmdline/options.hpp"
51#include "utils/cmdline/parser.ipp"
52#include "utils/cmdline/ui.hpp"
53#include "utils/datetime.hpp"
54#include "utils/defs.hpp"
55#include "utils/format/macros.hpp"
56#include "utils/fs/path.hpp"
57#include "utils/optional.ipp"
58#include "utils/sanity.hpp"
59#include "utils/stream.hpp"
60#include "utils/text/operations.ipp"
61
62namespace cmdline = utils::cmdline;
63namespace config = utils::config;
64namespace datetime = utils::datetime;
65namespace fs = utils::fs;
66namespace layout = store::layout;
67namespace text = utils::text;
68
69using cli::cmd_report;
70using utils::optional;
71
72
73namespace {
74
75
76/// Generates a plain-text report intended to be printed to the console.
77class report_console_hooks : public drivers::scan_results::base_hooks {
78    /// Stream to which to write the report.
79    std::ostream& _output;
80
81    /// Whether to include details in the report or not.
82    const bool _verbose;
83
84    /// Collection of result types to include in the report.
85    const cli::result_types& _results_filters;
86
87    /// Path to the results file being read.
88    const fs::path& _results_file;
89
90    /// The start time of the first test.
91    optional< utils::datetime::timestamp > _start_time;
92
93    /// The end time of the last test.
94    optional< utils::datetime::timestamp > _end_time;
95
96    /// The total run time of the tests.  Note that we cannot subtract _end_time
97    /// from _start_time to compute this due to parallel execution.
98    utils::datetime::delta _runtime;
99
100    /// Representation of a single result.
101    struct result_data {
102        /// The relative path to the test program.
103        utils::fs::path binary_path;
104
105        /// The name of the test case.
106        std::string test_case_name;
107
108        /// The result of the test case.
109        model::test_result result;
110
111        /// The duration of the test case execution.
112        utils::datetime::delta duration;
113
114        /// Constructs a new results data.
115        ///
116        /// \param binary_path_ The relative path to the test program.
117        /// \param test_case_name_ The name of the test case.
118        /// \param result_ The result of the test case.
119        /// \param duration_ The duration of the test case execution.
120        result_data(const utils::fs::path& binary_path_,
121                    const std::string& test_case_name_,
122                    const model::test_result& result_,
123                    const utils::datetime::delta& duration_) :
124            binary_path(binary_path_), test_case_name(test_case_name_),
125            result(result_), duration(duration_)
126        {
127        }
128    };
129
130    /// Results received, broken down by their type.
131    ///
132    /// Note that this may not include all results, as keeping the whole list in
133    /// memory may be too much.
134    std::map< model::test_result_type, std::vector< result_data > > _results;
135
136    /// Pretty-prints the value of an environment variable.
137    ///
138    /// \param indent Prefix for the lines to print.  Continuation lines
139    ///     use this indentation twice.
140    /// \param name Name of the variable.
141    /// \param value Value of the variable.  Can have newlines.
142    void
143    print_env_var(const char* indent, const std::string& name,
144                  const std::string& value)
145    {
146        const std::vector< std::string > lines = text::split(value, '\n');
147        if (lines.size() == 0) {
148            _output << F("%s%s=\n") % indent % name;;
149        } else {
150            _output << F("%s%s=%s\n") % indent % name % lines[0];
151            for (std::vector< std::string >::size_type i = 1;
152                 i < lines.size(); ++i) {
153                _output << F("%s%s%s\n") % indent % indent % lines[i];
154            }
155        }
156    }
157
158    /// Prints the execution context to the output.
159    ///
160    /// \param context The context to dump.
161    void
162    print_context(const model::context& context)
163    {
164        _output << "===> Execution context\n";
165
166        _output << F("Current directory: %s\n") % context.cwd();
167        const std::map< std::string, std::string >& env = context.env();
168        if (env.empty())
169            _output << "No environment variables recorded\n";
170        else {
171            _output << "Environment variables:\n";
172            for (std::map< std::string, std::string >::const_iterator
173                     iter = env.begin(); iter != env.end(); iter++) {
174                print_env_var("    ", (*iter).first, (*iter).second);
175            }
176        }
177    }
178
179    /// Dumps a detailed view of the test case.
180    ///
181    /// \param result_iter Results iterator pointing at the test case to be
182    ///     dumped.
183    void
184    print_test_case_and_result(const store::results_iterator& result_iter)
185    {
186        const model::test_case& test_case =
187            result_iter.test_program()->find(result_iter.test_case_name());
188        const model::properties_map props =
189            test_case.get_metadata().to_properties();
190
191        _output << F("===> %s:%s\n") %
192            result_iter.test_program()->relative_path() %
193            result_iter.test_case_name();
194        _output << F("Result:     %s\n") %
195            cli::format_result(result_iter.result());
196        _output << F("Start time: %s\n") %
197            result_iter.start_time().to_iso8601_in_utc();
198        _output << F("End time:   %s\n") %
199            result_iter.end_time().to_iso8601_in_utc();
200        _output << F("Duration:   %s\n") %
201            cli::format_delta(result_iter.end_time() -
202                              result_iter.start_time());
203
204        _output << "\n";
205        _output << "Metadata:\n";
206        for (model::properties_map::const_iterator iter = props.begin();
207             iter != props.end(); ++iter) {
208            if ((*iter).second.empty()) {
209                _output << F("    %s is empty\n") % (*iter).first;
210            } else {
211                _output << F("    %s = %s\n") % (*iter).first % (*iter).second;
212            }
213        }
214
215        const std::string stdout_contents = result_iter.stdout_contents();
216        if (!stdout_contents.empty()) {
217            _output << "\n"
218                    << "Standard output:\n"
219                    << stdout_contents;
220        }
221
222        const std::string stderr_contents = result_iter.stderr_contents();
223        if (!stderr_contents.empty()) {
224            _output << "\n"
225                    << "Standard error:\n"
226                    << stderr_contents;
227        }
228    }
229
230    /// Counts how many results of a given type have been received.
231    ///
232    /// \param type Test result type to count results for.
233    ///
234    /// \return The number of test results with \p type.
235    std::size_t
236    count_results(const model::test_result_type type)
237    {
238        const std::map< model::test_result_type,
239                        std::vector< result_data > >::const_iterator iter =
240            _results.find(type);
241        if (iter == _results.end())
242            return 0;
243        else
244            return (*iter).second.size();
245    }
246
247    /// Prints a set of results.
248    ///
249    /// \param type Test result type to print results for.
250    /// \param title Title used when printing results.
251    void
252    print_results(const model::test_result_type type,
253                  const char* title)
254    {
255        const std::map< model::test_result_type,
256                        std::vector< result_data > >::const_iterator iter2 =
257            _results.find(type);
258        if (iter2 == _results.end())
259            return;
260        const std::vector< result_data >& all = (*iter2).second;
261
262        _output << F("===> %s\n") % title;
263        for (std::vector< result_data >::const_iterator iter = all.begin();
264             iter != all.end(); iter++) {
265            _output << F("%s:%s  ->  %s  [%s]\n") % (*iter).binary_path %
266                (*iter).test_case_name %
267                cli::format_result((*iter).result) %
268                cli::format_delta((*iter).duration);
269        }
270    }
271
272public:
273    /// Constructor for the hooks.
274    ///
275    /// \param [out] output_ Stream to which to write the report.
276    /// \param verbose_ Whether to include details in the output or not.
277    /// \param results_filters_ The result types to include in the report.
278    ///     Cannot be empty.
279    /// \param results_file_ Path to the results file being read.
280    report_console_hooks(std::ostream& output_, const bool verbose_,
281                         const cli::result_types& results_filters_,
282                         const fs::path& results_file_) :
283        _output(output_),
284        _verbose(verbose_),
285        _results_filters(results_filters_),
286        _results_file(results_file_)
287    {
288        PRE(!results_filters_.empty());
289    }
290
291    /// Callback executed when the context is loaded.
292    ///
293    /// \param context The context loaded from the database.
294    void
295    got_context(const model::context& context)
296    {
297        if (_verbose)
298            print_context(context);
299    }
300
301    /// Callback executed when a test results is found.
302    ///
303    /// \param iter Container for the test result's data.
304    void
305    got_result(store::results_iterator& iter)
306    {
307        if (!_start_time || _start_time.get() > iter.start_time())
308            _start_time = iter.start_time();
309        if (!_end_time || _end_time.get() < iter.end_time())
310            _end_time = iter.end_time();
311
312        const datetime::delta duration = iter.end_time() - iter.start_time();
313
314        _runtime += duration;
315        const model::test_result result = iter.result();
316        _results[result.type()].push_back(
317            result_data(iter.test_program()->relative_path(),
318                        iter.test_case_name(), iter.result(), duration));
319
320        if (_verbose) {
321            // TODO(jmmv): _results_filters is a list and is small enough for
322            // std::find to not be an expensive operation here (probably).  But
323            // we should be using a std::set instead.
324            if (std::find(_results_filters.begin(), _results_filters.end(),
325                          iter.result().type()) != _results_filters.end()) {
326                print_test_case_and_result(iter);
327            }
328        }
329    }
330
331    /// Prints the tests summary.
332    void
333    end(const drivers::scan_results::result& /* r */)
334    {
335        typedef std::map< model::test_result_type, const char* > types_map;
336
337        types_map titles;
338        titles[model::test_result_broken] = "Broken tests";
339        titles[model::test_result_expected_failure] = "Expected failures";
340        titles[model::test_result_failed] = "Failed tests";
341        titles[model::test_result_passed] = "Passed tests";
342        titles[model::test_result_skipped] = "Skipped tests";
343
344        for (cli::result_types::const_iterator iter = _results_filters.begin();
345             iter != _results_filters.end(); ++iter) {
346            const types_map::const_iterator match = titles.find(*iter);
347            INV_MSG(match != titles.end(), "Conditional does not match user "
348                    "input validation in parse_types()");
349            print_results((*match).first, (*match).second);
350        }
351
352        const std::size_t broken = count_results(model::test_result_broken);
353        const std::size_t failed = count_results(model::test_result_failed);
354        const std::size_t passed = count_results(model::test_result_passed);
355        const std::size_t skipped = count_results(model::test_result_skipped);
356        const std::size_t xfail = count_results(
357            model::test_result_expected_failure);
358        const std::size_t total = broken + failed + passed + skipped + xfail;
359
360        _output << "===> Summary\n";
361        _output << F("Results read from %s\n") % _results_file;
362        _output << F("Test cases: %s total, %s skipped, %s expected failures, "
363                     "%s broken, %s failed\n") %
364            total % skipped % xfail % broken % failed;
365        if (_verbose && _start_time) {
366            INV(_end_time);
367            _output << F("Start time: %s\n") %
368                    _start_time.get().to_iso8601_in_utc();
369            _output << F("End time:   %s\n") %
370                    _end_time.get().to_iso8601_in_utc();
371        }
372        _output << F("Total time: %s\n") % cli::format_delta(_runtime);
373    }
374};
375
376
377}  // anonymous namespace
378
379
380/// Default constructor for cmd_report.
381cmd_report::cmd_report(void) : cli_command(
382    "report", "", 0, -1,
383    "Generates a report with the results of a test suite run")
384{
385    add_option(results_file_open_option);
386    add_option(cmdline::bool_option(
387        "verbose", "Include the execution context and the details of each test "
388        "case in the report"));
389    add_option(cmdline::path_option("output", "Path to the output file", "path",
390                                    "/dev/stdout"));
391    add_option(results_filter_option);
392}
393
394
395/// Entry point for the "report" subcommand.
396///
397/// \param ui Object to interact with the I/O of the program.
398/// \param cmdline Representation of the command line to the subcommand.
399///
400/// \return 0 if everything is OK, 1 if the statement is invalid or if there is
401/// any other problem.
402int
403cmd_report::run(cmdline::ui* ui,
404                const cmdline::parsed_cmdline& cmdline,
405                const config::tree& /* user_config */)
406{
407    std::auto_ptr< std::ostream > output = utils::open_ostream(
408        cmdline.get_option< cmdline::path_option >("output"));
409
410    const fs::path results_file = layout::find_results(
411        results_file_open(cmdline));
412
413    const result_types types = get_result_types(cmdline);
414    report_console_hooks hooks(*output.get(), cmdline.has_option("verbose"),
415                               types, results_file);
416    const drivers::scan_results::result result = drivers::scan_results::drive(
417        results_file, parse_filters(cmdline.arguments()), hooks);
418
419    return report_unused_filters(result.unused_filters, ui) ?
420        EXIT_FAILURE : EXIT_SUCCESS;
421}
422