print profile summary and test summary

This commit is contained in:
Victoria Jeffrey 2016-09-14 12:20:32 -04:00
parent 83488cfa9a
commit ecac8ae9cb
8 changed files with 132 additions and 87 deletions

View file

@ -213,13 +213,11 @@ class Inspec::InspecCLI < Inspec::BaseCLI # rubocop:disable Metrics/ClassLength
def run_command(opts)
runner = Inspec::Runner.new(opts)
ctx = runner.create_context(opts)
res = ctx.load(opts[:command])
res = runner.eval_with_virtual_profile(opts[:command])
runner.load
return :ruby_eval, res if ctx.rules.empty?
runner.register_rules(ctx)
return :rspec_run, runner.run # rubocop:disable Style/RedundantReturn
return :ruby_eval, res if runner.all_rules.empty?
return :rspec_run, runner.run_tests # rubocop:disable Style/RedundantReturn
end
end

View file

@ -142,8 +142,12 @@ module Inspec
"Inspec::Profile<#{name}>"
end
def info
res = params.dup
# return info using uncached params
def info!
info(load_params.dup)
end
def info(res = params.dup)
# add information about the controls
controls = res[:controls].map do |id, rule|
next if id.to_s.empty?

View file

@ -17,7 +17,8 @@ module Inspec
'attributes' => attributes })
end
attr_reader :attributes, :rules, :profile_id, :resource_registry
attr_reader :attributes, :profile_id, :resource_registry
attr_accessor :rules
def initialize(profile_id, backend, conf)
if backend.nil?
fail 'ProfileContext is initiated with a backend == nil. ' \
@ -132,7 +133,12 @@ module Inspec
def register_rule(r)
# get the full ID
r.instance_variable_set(:@__file, current_load[:file])
file = if @current_load.nil?
'unknown'
else
@current_load[:file] || 'unknown'
end
r.instance_variable_set(:@__file, file)
r.instance_variable_set(:@__group_title, current_load[:title])
# add the rule to the registry

View file

@ -36,11 +36,8 @@ class InspecRspecMiniJson < RSpec::Core::Formatters::JsonFormatter
# Called after stop has been called and the run is complete.
def dump_summary(summary)
@output_hash[:version] = Inspec::VERSION
@output_hash[:summary] = {
@output_hash[:statistics] = {
duration: summary.duration,
example_count: summary.example_count,
failure_count: summary.failure_count,
skip_count: summary.pending_count,
}
end
@ -86,7 +83,7 @@ class InspecRspecMiniJson < RSpec::Core::Formatters::JsonFormatter
end
end
class InspecRspecJson < InspecRspecMiniJson
class InspecRspecJson < InspecRspecMiniJson # rubocop:disable Metrics/ClassLength
RSpec::Core::Formatters.register self, :start, :stop, :dump_summary
attr_writer :backend
@ -108,7 +105,7 @@ class InspecRspecJson < InspecRspecMiniJson
def start(_notification)
# Note that the default profile may have no name - therefore
# the hash may have a valid nil => entry.
@profiles_info ||= Hash[@profiles.map { |x| profile_info(x) }]
@profiles_info = Hash[@profiles.map { |x| profile_info(x) }]
end
def dump_one_example(example, control)
@ -133,20 +130,59 @@ class InspecRspecJson < InspecRspecMiniJson
@output_hash[:other_checks] = missing
end
def dump_summary(summary)
super(summary)
def controls_summary
failed = 0
skipped = 0
passed = 0
critical = 0
major = 0
minor = 0
@control_tests.each do |control|
next if control[:id].start_with? '(generated from '
next unless control[:results]
if control[:results].any? { |r| r[:status] == 'failed' }
failed += 1
if control[:impact] >= 0.7
critical += 1
elsif control[:impact] >= 0.4
major += 1
else
minor += 1
end
elsif control[:results].any? { |r| r[:status] == 'skipped' }
skipped += 1
else
passed += 1
end
end
total = failed + passed + skipped
{ 'total' => total,
'failed' => {
'total' => failed,
'critical' => critical,
'major' => major,
'minor' => minor,
},
'skipped' => skipped,
'passed' => passed }
end
def tests_summary
total = 0
failed = 0
skipped = 0
passed = 0
@profiles_info.each do |_name, profile|
total += profile[:controls].length
profile[:controls].each do |_control_name, control|
next unless control[:results]
if control[:results].any? { |r| r[:status] == 'failed' }
all_tests = @anonymous_tests + @control_tests
all_tests.each do |control|
next unless control[:results]
control[:results].each do |result|
if result[:status] == 'failed'
failed += 1
elsif control[:results].any? { |r| r[:status] == 'skipped' }
elsif result[:status] == 'skipped'
skipped += 1
else
passed += 1
@ -154,13 +190,13 @@ class InspecRspecJson < InspecRspecMiniJson
end
end
# TODO: provide this information in the output
{ 'total' => total, 'failed' => failed, 'skipped' => skipped, 'passed' => passed }
end
private
def profile_info(profile)
info = profile.info.dup
info = profile.info!.dup
[info[:name], info]
end
@ -246,10 +282,11 @@ class InspecRspecCli < InspecRspecJson # rubocop:disable Metrics/ClassLength
@current_profile = nil
@missing_controls = []
@anonymous_tests = []
@control_tests = []
super(*args)
end
def close(_notification)
def close(_notification) # rubocop:disable Metrics/AbcSize
flush_current_control
output.puts('') unless @current_control.nil?
print_tests
@ -266,13 +303,20 @@ class InspecRspecCli < InspecRspecJson # rubocop:disable Metrics/ClassLength
output.puts('')
end
res = @output_hash[:summary]
passed = res[:example_count] - res[:failure_count] - res[:skip_count]
s = format('Summary: %s%d successful%s, %s%d failures%s, %s%d skipped%s',
COLORS['passed'], passed, COLORS['reset'],
COLORS['failed'], res[:failure_count], COLORS['reset'],
COLORS['skipped'], res[:skip_count], COLORS['reset'])
output.puts(s)
controls_res = controls_summary
tests_res = tests_summary
s = format('Profile Summary: %s%d successful%s, %s%d failures%s, %s%d skipped%s',
COLORS['passed'], controls_res['passed'], COLORS['reset'],
COLORS['failed'], controls_res['failed']['total'], COLORS['reset'],
COLORS['skipped'], controls_res['skipped'], COLORS['reset'])
output.puts(s) if controls_res['total'] > 0
s = format('Test Summary: %s%d successful%s, %s%d failures%s, %s%d skipped%s',
COLORS['passed'], tests_res['passed'], COLORS['reset'],
COLORS['failed'], tests_res['failed'], COLORS['reset'],
COLORS['skipped'], tests_res['skipped'], COLORS['reset'])
output.puts(s) if !@anonymous_tests.empty? || @current_control.nil?
end
private
@ -425,6 +469,7 @@ class InspecRspecCli < InspecRspecJson # rubocop:disable Metrics/ClassLength
if control_id.start_with? '(generated from '
@anonymous_tests.push(@current_control)
else
@control_tests.push(@current_control)
print_line(
color: @colors[summary_indicator] || '',
indicator: @indicators[summary_indicator] || @indicators['unknown'],

View file

@ -32,7 +32,6 @@ module Inspec
extend Forwardable
def_delegator :@test_collector, :report
def_delegator :@test_collector, :reset
attr_reader :backend, :rules, :attributes
def initialize(conf = {})
@ -64,8 +63,15 @@ module Inspec
@test_collector.backend = @backend
end
def run(with = nil)
Inspec::Log.debug "Starting run with targets: #{@target_profiles.map(&:to_s)}"
def reset
@test_collector.reset
@target_profiles.each do |profile|
profile.runner_context.rules = {}
end
@rules = []
end
def load
all_controls = []
@target_profiles.each do |profile|
@ -79,7 +85,15 @@ module Inspec
all_controls.each do |rule|
register_rule(rule)
end
end
def run(with = nil)
Inspec::Log.debug "Starting run with targets: #{@target_profiles.map(&:to_s)}"
load
run_tests(with)
end
def run_tests(with = nil)
@test_collector.run(with)
end
@ -133,20 +147,6 @@ module Inspec
@target_profiles << profile if supports_profile?(profile)
end
#
# This is used by inspec-shell and inspec-detect. This should
# probably be cleaned up a bit.
#
# @params [Hash] Options
# @returns [Inspec::ProfileContext]
#
def create_context(options = {})
meta = options[:metadata]
profile_id = nil
profile_id = meta.params[:name] unless meta.nil?
Inspec::ProfileContext.new(profile_id, @backend, @conf.merge(options))
end
def supports_profile?(profile)
return true if @ignore_supports
@ -180,6 +180,14 @@ module Inspec
new_tests
end
def eval_with_virtual_profile(command)
require 'fetchers/mock'
add_target({ 'inspec.yml' => 'name: inspec-shell' })
our_profile = @target_profiles.first
ctx = our_profile.runner_context
ctx.load(command)
end
private
def block_source_info(block)

View file

@ -15,18 +15,13 @@ module Inspec
end
def start
# Create an in-memory empty runner so that we can add tests to it later.
# This context lasts for the duration of this "start" method call/pry
# session.
@ctx = @runner.create_context
configure_pry
# This will hold a single evaluation binding context as opened within
# the instance_eval context of the anonymous class that the profile
# context creates to evaluate each individual test file. We want to
# pretend like we are constantly appending to the same file and want
# to capture the local variable context from inside said class.
@ctx_binding = @ctx.load('binding')
@ctx_binding = @runner.eval_with_virtual_profile('binding')
configure_pry
@ctx_binding.pry
end
@ -51,26 +46,20 @@ module Inspec
# Track the rules currently registered and what their merge count is.
Pry.hooks.add_hook(:before_eval, 'inspec_before_eval') do
@current_eval_rules = @ctx.rules.each_with_object({}) do |(rule_id, rule), h|
h[rule_id] = Inspec::Rule.merge_count(rule)
end
@runner.reset
end
# After pry has evaluated a commanding within the binding context of a
# test file, register all the rules it discovered.
Pry.hooks.add_hook(:after_eval, 'inspec_after_eval') do
@current_eval_new_tests =
@runner.register_rules(@ctx) do |rule_id, rule|
@current_eval_rules[rule_id] != Inspec::Rule.merge_count(rule)
end
@runner.run if @current_eval_new_tests
@runner.load
@runner.run_tests if !@runner.all_rules.empty?
end
# Don't print out control class inspection when the user uses DSL methods.
# Instead produce a result of evaluating their control.
Pry.config.print = proc do |_output_, value, pry|
next if @current_eval_new_tests
next if !@runner.all_rules.empty?
pry.pager.open do |pager|
pager.print pry.config.output_prefix
Pry::ColorPrinter.pp(value, pager, Pry::Terminal.width! - 1)

View file

@ -18,7 +18,8 @@ describe 'inspec exec' do
\e[37m gordon-1.0: Verify the version number of Gordon (1 skipped)\e[0m
\e[37m Can't find file \"/tmp/gordon/config.yaml\"\e[0m
"
stdout.must_include "\nSummary: \e[32m4 successful\e[0m, \e[31m0 failures\e[0m, \e[37m1 skipped\e[0m\n"
stdout.must_include "\nProfile Summary: \e[32m2 successful\e[0m, \e[31m0 failures\e[0m, \e[37m1 skipped\e[0m"
stdout.must_include "\nTest Summary: \e[32m4 successful\e[0m, \e[31m0 failures\e[0m, \e[37m1 skipped\e[0m\n"
end
it 'executes a minimum metadata-only profile' do
@ -33,7 +34,7 @@ Target: local://
No tests executed.\e[0m
Summary: \e[32m0 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
Test Summary: \e[32m0 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
"
end
@ -49,7 +50,7 @@ Target: local://
No tests executed.\e[0m
Summary: \e[32m0 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
Test Summary: \e[32m0 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
"
end
@ -78,14 +79,15 @@ Summary: \e[32m0 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
out = inspec('exec ' + example_profile + ' --controls tmp-1.0')
out.stderr.must_equal ''
out.exit_status.must_equal 0
out.stdout.must_include "\nSummary: \e[32m1 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m\n"
out.stdout.must_include "\nProfile Summary: \e[32m1 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m\n"
end
it 'can execute a simple file with the default formatter' do
out = inspec('exec ' + example_control)
out.stderr.must_equal ''
out.exit_status.must_equal 0
out.stdout.must_include "\nSummary: \e[32m2 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m\n"
out.stdout.must_include "\nProfile Summary: \e[32m1 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m\n"
out.stdout.must_include "\nTest Summary: \e[32m2 successful\e[0m, \e[31m0 failures\e[0m"
end
describe 'with a profile that is not supported on this OS/platform' do
@ -137,7 +139,8 @@ Target: local://
File /tmp
\e[32m \xE2\x9C\x94 should be directory\e[0m
Summary: \e[32m2 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
Profile Summary: \e[32m1 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
Test Summary: \e[32m2 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
"
end
end

View file

@ -63,9 +63,7 @@ describe 'inspec shell tests' do
j.keys.must_include 'version'
j.keys.must_include 'profiles'
j.keys.must_include 'other_checks'
j.keys.must_include 'summary'
j['summary']['example_count'].must_equal 1
j['summary']['failure_count'].must_equal 0
j.keys.must_include 'statistics'
end
it 'runs anonymous tests that succeed' do
@ -80,9 +78,7 @@ describe 'inspec shell tests' do
j.keys.must_include 'version'
j.keys.must_include 'profiles'
j.keys.must_include 'other_checks'
j.keys.must_include 'summary'
j['summary']['example_count'].must_equal 1
j['summary']['failure_count'].must_equal 1
j.keys.must_include 'statistics'
end
it 'runs anonymous tests that fail' do
@ -97,9 +93,7 @@ describe 'inspec shell tests' do
j.keys.must_include 'version'
j.keys.must_include 'profiles'
j.keys.must_include 'other_checks'
j.keys.must_include 'summary'
j['summary']['example_count'].must_equal 1
j['summary']['failure_count'].must_equal 0
j.keys.must_include 'statistics'
end
it 'runs controls with tests' do
@ -114,14 +108,12 @@ describe 'inspec shell tests' do
j.keys.must_include 'version'
j.keys.must_include 'profiles'
j.keys.must_include 'other_checks'
j.keys.must_include 'summary'
j['summary']['example_count'].must_equal 2
j['summary']['failure_count'].must_equal 1
j.keys.must_include 'statistics'
end
it 'runs controls with multiple tests' do
out = do_shell_c("control \"test\" do describe file(\"#{__FILE__}\") do it { should exist } end; describe file(\"foo/bar/baz\") do it { should exist } end end", 1)
out.stdout.must_include '1 successful'
out.stdout.must_include '0 successful'
out.stdout.must_include '1 failures'
end
end
@ -178,7 +170,7 @@ describe 'inspec shell tests' do
it 'runs controls with multiple tests' do
out = do_shell("control \"test\" do describe file(\"#{__FILE__}\") do it { should exist } end; describe file(\"foo/bar/baz\") do it { should exist } end end")
out.stdout.must_include '1 successful'
out.stdout.must_include '0 successful'
out.stdout.must_include '1 failures'
end