mirror of
https://github.com/inspec/inspec
synced 2024-11-24 05:33:17 +00:00
Merge pull request #1034 from chef/ssd/git-fetcher
Add GitFetcher and rework Fetchers+SourceReaders
This commit is contained in:
commit
1c10d7fbb4
40 changed files with 1158 additions and 670 deletions
|
@ -4,7 +4,6 @@
|
|||
|
||||
require 'uri'
|
||||
require 'inspec/fetcher'
|
||||
require 'fetchers/url'
|
||||
|
||||
# InSpec Target Helper for Chef Compliance
|
||||
# reuses UrlHelper, but it knows the target server and the access token already
|
||||
|
@ -14,11 +13,14 @@ module Compliance
|
|||
name 'compliance'
|
||||
priority 500
|
||||
|
||||
def self.resolve(target, _opts = {})
|
||||
return nil unless target.is_a?(String)
|
||||
# check for local scheme compliance://
|
||||
uri = URI(target)
|
||||
return nil unless URI(uri).scheme == 'compliance'
|
||||
def self.resolve(target)
|
||||
uri = if target.is_a?(String) && URI(target).scheme == 'compliance'
|
||||
URI(target)
|
||||
elsif target.respond_to?(:key?) && target.key?(:compliance)
|
||||
URI("compliance://#{target[:compliance]}")
|
||||
end
|
||||
|
||||
return nil if uri.nil?
|
||||
|
||||
# check if we have a compliance token
|
||||
config = Compliance::Configuration.new
|
||||
|
@ -27,18 +29,33 @@ module Compliance
|
|||
# verifies that the target e.g base/ssh exists
|
||||
profile = uri.host + uri.path
|
||||
Compliance::API.exist?(config, profile)
|
||||
super(target_url(config, profile), config)
|
||||
new(target_url(profile, config), config)
|
||||
rescue URI::Error => _e
|
||||
nil
|
||||
end
|
||||
|
||||
def self.target_url(config, profile)
|
||||
def self.target_url(profile, config)
|
||||
owner, id = profile.split('/')
|
||||
"#{config['server']}/owners/#{owner}/compliance/#{id}/tar"
|
||||
end
|
||||
|
||||
#
|
||||
# We want to save supermarket: in the lockfile rather than url: to
|
||||
# make sure we go back through the ComplianceAPI handling.
|
||||
#
|
||||
def resolved_source
|
||||
{ supermarket: supermarket_profile_name }
|
||||
end
|
||||
|
||||
def to_s
|
||||
'Chef Compliance Profile Loader'
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def supermarket_profile_name
|
||||
m = %r{^#{@config['server']}/owners/(?<owner>[^/]+)/compliance/(?<id>[^/]+)/tar$}.match(@target)
|
||||
"#{m[:owner]}/#{m[:id]}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9,18 +9,14 @@ module Supermarket
|
|||
class API
|
||||
SUPERMARKET_URL = 'https://supermarket.chef.io'.freeze
|
||||
|
||||
def self.supermarket_url
|
||||
SUPERMARKET_URL
|
||||
end
|
||||
|
||||
# displays a list of profiles
|
||||
def self.profiles
|
||||
url = "#{SUPERMARKET_URL}/api/v1/tools-search"
|
||||
def self.profiles(supermarket_url = SUPERMARKET_URL)
|
||||
url = "#{supermarket_url}/api/v1/tools-search"
|
||||
_success, data = get(url, { q: 'compliance_profile' })
|
||||
if !data.nil?
|
||||
profiles = JSON.parse(data)
|
||||
profiles['items'].map { |x|
|
||||
m = %r{^#{Supermarket::API.supermarket_url}/api/v1/tools/(?<slug>[\w-]+)(/)?$}.match(x['tool'])
|
||||
m = %r{^#{supermarket_url}/api/v1/tools/(?<slug>[\w-]+)(/)?$}.match(x['tool'])
|
||||
x['slug'] = m[:slug]
|
||||
x
|
||||
}
|
||||
|
@ -37,10 +33,10 @@ module Supermarket
|
|||
end
|
||||
|
||||
# displays profile infos
|
||||
def self.info(profile)
|
||||
def self.info(profile, supermarket_url = SUPERMARKET_URL)
|
||||
_tool_owner, tool_name = profile_name("supermarket://#{profile}")
|
||||
return if tool_name.nil? || tool_name.empty?
|
||||
url = "#{SUPERMARKET_URL}/api/v1/tools/#{tool_name}"
|
||||
url = "#{supermarket_url}/api/v1/tools/#{tool_name}"
|
||||
_success, data = get(url, {})
|
||||
JSON.parse(data) if !data.nil?
|
||||
rescue JSON::ParserError
|
||||
|
@ -48,24 +44,24 @@ module Supermarket
|
|||
end
|
||||
|
||||
# compares a profile with the supermarket tool info
|
||||
def self.same?(profile, supermarket_tool)
|
||||
def self.same?(profile, supermarket_tool, supermarket_url = SUPERMARKET_URL)
|
||||
tool_owner, tool_name = profile_name(profile)
|
||||
tool = "#{SUPERMARKET_URL}/api/v1/tools/#{tool_name}"
|
||||
tool = "#{supermarket_url}/api/v1/tools/#{tool_name}"
|
||||
supermarket_tool['tool_owner'] == tool_owner && supermarket_tool['tool'] == tool
|
||||
end
|
||||
|
||||
def self.find(profile)
|
||||
profiles = Supermarket::API.profiles
|
||||
def self.find(profile, supermarket_url)
|
||||
profiles = Supermarket::API.profiles(supermarket_url=SUPERMARKET_URL)
|
||||
if !profiles.empty?
|
||||
index = profiles.index { |t| same?(profile, t) }
|
||||
index = profiles.index { |t| same?(profile, t, supermarket_url) }
|
||||
# return profile or nil
|
||||
profiles[index] if !index.nil? && index >= 0
|
||||
end
|
||||
end
|
||||
|
||||
# verifies that a profile exists
|
||||
def self.exist?(profile)
|
||||
!find(profile).nil?
|
||||
def self.exist?(profile, supermarket_url = SUPERMARKET_URL)
|
||||
!find(profile, supermarket_url).nil?
|
||||
end
|
||||
|
||||
def self.get(url, params)
|
||||
|
|
|
@ -8,16 +8,21 @@ require 'fetchers/url'
|
|||
|
||||
# InSpec Target Helper for Supermarket
|
||||
module Supermarket
|
||||
class Fetcher < Fetchers::Url
|
||||
class Fetcher < Inspec.fetcher(1)
|
||||
name 'supermarket'
|
||||
priority 500
|
||||
|
||||
def self.resolve(target, opts = {})
|
||||
return nil unless target.is_a?(String)
|
||||
return nil unless URI(target).scheme == 'supermarket'
|
||||
return nil unless Supermarket::API.exist?(target)
|
||||
tool_info = Supermarket::API.find(target)
|
||||
super(tool_info['tool_source_url'], opts)
|
||||
supermarket_uri, supermarket_server = if target.is_a?(String) && URI(target).scheme == 'supermarket'
|
||||
[target, Supermarket::API::SUPERMARKET_URL]
|
||||
elsif target.respond_to?(:key?) && target.key?(:supermarket)
|
||||
supermarket_server = target[:supermarket_url] || Supermarket::API::SUPERMARKET_URL
|
||||
["supermarket://#{target[:supermarket]}", supermarket_server]
|
||||
end
|
||||
return nil unless supermarket_uri
|
||||
return nil unless Supermarket::API.exist?(supermarket_uri, supermarket_server)
|
||||
tool_info = Supermarket::API.find(supermarket_uri, supermarket_server)
|
||||
resolve_next(tool_info['tool_source_url'], opts)
|
||||
rescue URI::Error
|
||||
nil
|
||||
end
|
||||
|
|
162
lib/fetchers/git.rb
Normal file
162
lib/fetchers/git.rb
Normal file
|
@ -0,0 +1,162 @@
|
|||
# encoding: utf-8
|
||||
require 'tmpdir'
|
||||
require 'fileutils'
|
||||
require 'mixlib/shellout'
|
||||
require 'inspec/log'
|
||||
|
||||
module Fetchers
|
||||
#
|
||||
# The git fetcher uses the git binary to fetch remote git sources.
|
||||
# Git-based sources should be specified with the `git:` key in the
|
||||
# source hash. Additionally, we accept `:branch`, `:ref`, and `:tag`
|
||||
# keys to allow users to pin to a particular revision.
|
||||
#
|
||||
# Parts of this class are derived from:
|
||||
#
|
||||
# https://github.com/chef/omnibus/blob/master/lib/omnibus/fetchers/git_fetcher.rb
|
||||
#
|
||||
# which is Copyright 2012-2014 Chef Software, Inc. and offered under
|
||||
# the same Apache 2 software license as inspec.
|
||||
#
|
||||
# Many thanks to the omnibus authors!
|
||||
#
|
||||
# Note that we haven't replicated all of omnibus' features here. If
|
||||
# you got to this file during debugging, you may want to look at the
|
||||
# omnibus source for hints.
|
||||
#
|
||||
class Git < Inspec.fetcher(1)
|
||||
name 'git'
|
||||
priority 200
|
||||
|
||||
def self.resolve(target, opts = {})
|
||||
if target.respond_to?(:has_key?) &&target.key?(:git)
|
||||
new(target[:git], opts.merge(target))
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(remote_url, opts = {})
|
||||
@branch = opts[:branch]
|
||||
@tag = opts[:tag]
|
||||
@ref = opts[:ref]
|
||||
@remote_url = remote_url
|
||||
@repo_directory = nil
|
||||
end
|
||||
|
||||
def fetch(dir)
|
||||
@repo_directory = dir
|
||||
if cloned?
|
||||
checkout
|
||||
else
|
||||
Dir.mktmpdir do |tmpdir|
|
||||
checkout(tmpdir)
|
||||
Inspec::Log.debug("Checkout of #{resolved_ref} successful. Moving checkout to #{dir}")
|
||||
FileUtils.cp_r(tmpdir, @repo_directory)
|
||||
end
|
||||
end
|
||||
@repo_directory
|
||||
end
|
||||
|
||||
def archive_path
|
||||
@repo_directory
|
||||
end
|
||||
|
||||
def resolved_source
|
||||
{ git: @remote_url, ref: resolved_ref }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def resolved_ref
|
||||
@resolved_ref ||= if @ref
|
||||
@ref
|
||||
elsif @branch
|
||||
resolve_ref(@branch)
|
||||
elsif @tag
|
||||
resolve_ref(@tag)
|
||||
else
|
||||
resolve_ref('master')
|
||||
end
|
||||
end
|
||||
|
||||
def resolve_ref(ref_name)
|
||||
cmd = shellout("git ls-remote \"#{@remote_url}\" \"#{ref_name}*\"")
|
||||
ref = parse_ls_remote(cmd.stdout, ref_name)
|
||||
if !ref
|
||||
fail "Unable to resolve #{ref_name} to a specific git commit for #{@remote_url}"
|
||||
end
|
||||
ref
|
||||
end
|
||||
|
||||
#
|
||||
# The following comment is a minor modification of the comment in
|
||||
# the omnibus source for a similar function:
|
||||
#
|
||||
# Dereference annotated tags.
|
||||
#
|
||||
# The +remote_list+ parameter is assumed to look like this:
|
||||
#
|
||||
# a2ed66c01f42514bcab77fd628149eccb4ecee28 refs/tags/rel-0.11.0
|
||||
# f915286abdbc1907878376cce9222ac0b08b12b8 refs/tags/rel-0.11.0^{}
|
||||
#
|
||||
# The SHA with ^{} is the commit pointed to by an annotated
|
||||
# tag. If ref isn't an annotated tag, there will not be a line
|
||||
# with trailing ^{}.
|
||||
#
|
||||
# @param [String] output
|
||||
# output from `git ls-remote origin` command
|
||||
# @param [String] ref_name
|
||||
# the target git ref_name
|
||||
#
|
||||
# @return [String]
|
||||
#
|
||||
def parse_ls_remote(output, ref_name)
|
||||
pairs = output.lines.map { |l| l.chomp.split("\t") }
|
||||
tagged_commit = pairs.find { |m| m[1].end_with?("#{ref_name}^{}") }
|
||||
if tagged_commit
|
||||
tagged_commit.first
|
||||
else
|
||||
found = pairs.find { |m| m[1].end_with?(ref_name.to_s) }
|
||||
if found
|
||||
found.first
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def cloned?
|
||||
File.directory?(File.join(@repo_directory, '.git'))
|
||||
end
|
||||
|
||||
def clone(dir = @repo_directory)
|
||||
git_cmd("clone #{@remote_url} ./", dir) unless cloned?
|
||||
@repo_directory
|
||||
end
|
||||
|
||||
def checkout(dir = @repo_directory)
|
||||
clone(dir)
|
||||
git_cmd("checkout #{resolved_ref}", dir)
|
||||
@repo_directory
|
||||
end
|
||||
|
||||
def git_cmd(cmd, dir = @repo_directory)
|
||||
cmd = shellout("git #{cmd}", cwd: dir)
|
||||
cmd.error!
|
||||
cmd.status
|
||||
rescue Errno::ENOENT
|
||||
raise 'To use git sources, you must have git installed.'
|
||||
end
|
||||
|
||||
def shellout(cmd, opts = {})
|
||||
Inspec::Log.debug("Running external command: #{cmd} (#{opts})")
|
||||
cmd = Mixlib::ShellOut.new(cmd, opts)
|
||||
cmd.run_command
|
||||
Inspec::Log.debug("External command: completed with exit status: #{cmd.exitstatus}")
|
||||
Inspec::Log.debug('External command: STDOUT BEGIN')
|
||||
Inspec::Log.debug(cmd.stdout)
|
||||
Inspec::Log.debug('External command: STDOUT END')
|
||||
Inspec::Log.debug('External command: STDERR BEGIN')
|
||||
Inspec::Log.debug(cmd.stderr)
|
||||
Inspec::Log.debug('External command: STDERR END')
|
||||
cmd
|
||||
end
|
||||
end
|
||||
end
|
|
@ -7,11 +7,29 @@ module Fetchers
|
|||
name 'local'
|
||||
priority 0
|
||||
|
||||
attr_reader :files
|
||||
|
||||
def self.resolve(target)
|
||||
return nil unless target.is_a?(String)
|
||||
local_path = if target.is_a?(String)
|
||||
resolve_from_string(target)
|
||||
elsif target.is_a?(Hash)
|
||||
resolve_from_hash(target)
|
||||
end
|
||||
|
||||
if local_path
|
||||
new(local_path)
|
||||
end
|
||||
end
|
||||
|
||||
def self.resolve_from_hash(target)
|
||||
if target.key?(:path)
|
||||
local_path = target[:path]
|
||||
if target.key?(:cwd)
|
||||
local_path = File.expand_path(local_path, target[:cwd])
|
||||
end
|
||||
local_path
|
||||
end
|
||||
end
|
||||
|
||||
def self.resolve_from_string(target)
|
||||
# Support "urls" in the form of file://
|
||||
if target.start_with?('file://')
|
||||
target = target.gsub(%r{^file://}, '')
|
||||
|
@ -20,26 +38,25 @@ module Fetchers
|
|||
target = target.tr('\\', '/')
|
||||
end
|
||||
|
||||
if !File.exist?(target)
|
||||
nil
|
||||
else
|
||||
new(target)
|
||||
if File.exist?(target)
|
||||
target
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(target)
|
||||
@target = target
|
||||
if File.file?(target)
|
||||
@files = [target]
|
||||
else
|
||||
@files = Dir[File.join(target, '**', '*')]
|
||||
end
|
||||
end
|
||||
|
||||
def read(file)
|
||||
return nil unless files.include?(file)
|
||||
return nil unless File.file?(file)
|
||||
File.read(file)
|
||||
def fetch(_path)
|
||||
archive_path
|
||||
end
|
||||
|
||||
def archive_path
|
||||
@target
|
||||
end
|
||||
|
||||
def resolved_source
|
||||
{ path: @target }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,12 +16,16 @@ module Fetchers
|
|||
@data = data
|
||||
end
|
||||
|
||||
def files
|
||||
@data.keys
|
||||
def fetch(_path)
|
||||
archive_path
|
||||
end
|
||||
|
||||
def read(file)
|
||||
@data[file]
|
||||
def archive_path
|
||||
{ mock: @data }
|
||||
end
|
||||
|
||||
def resolved_source
|
||||
{ mock_fetcher: true }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'rubygems/package'
|
||||
require 'zlib'
|
||||
|
||||
module Fetchers
|
||||
class Tar < Inspec.fetcher(1)
|
||||
name 'tar'
|
||||
priority 100
|
||||
|
||||
attr_reader :files
|
||||
|
||||
def self.resolve(target)
|
||||
unless target.is_a?(String) && File.file?(target) && target.end_with?('.tar.gz', '.tgz')
|
||||
return nil
|
||||
end
|
||||
new(target)
|
||||
end
|
||||
|
||||
def archive_path
|
||||
target
|
||||
end
|
||||
|
||||
def initialize(target)
|
||||
@target = target
|
||||
@contents = {}
|
||||
@files = []
|
||||
Gem::Package::TarReader.new(Zlib::GzipReader.open(@target)) do |tar|
|
||||
@files = tar.map(&:full_name)
|
||||
end
|
||||
end
|
||||
|
||||
def read(file)
|
||||
@contents[file] ||= read_from_tar(file)
|
||||
end
|
||||
|
||||
def read_from_tar(file)
|
||||
return nil unless @files.include?(file)
|
||||
res = nil
|
||||
# NB `TarReader` includes `Enumerable` beginning with Ruby 2.x
|
||||
Gem::Package::TarReader.new(Zlib::GzipReader.open(@target)) do |tar|
|
||||
tar.each do |entry|
|
||||
next unless entry.file? && file == entry.full_name
|
||||
res = entry.read
|
||||
break
|
||||
end
|
||||
end
|
||||
res
|
||||
end
|
||||
end
|
||||
end
|
|
@ -8,21 +8,31 @@ require 'open-uri'
|
|||
|
||||
module Fetchers
|
||||
class Url < Inspec.fetcher(1)
|
||||
MIME_TYPES = {
|
||||
'application/x-zip-compressed' => '.zip',
|
||||
'application/zip' => '.zip',
|
||||
'application/x-gzip' => '.tar.gz',
|
||||
'application/gzip' => '.tar.gz',
|
||||
}.freeze
|
||||
|
||||
name 'url'
|
||||
priority 200
|
||||
|
||||
attr_reader :files
|
||||
|
||||
def self.resolve(target, opts = {})
|
||||
return nil unless target.is_a?(String)
|
||||
if target.is_a?(Hash) && target.key?(:url)
|
||||
resolve_from_string(target[:url], opts)
|
||||
elsif target.is_a?(String)
|
||||
resolve_from_string(target, opts)
|
||||
end
|
||||
end
|
||||
|
||||
def self.resolve_from_string(target, opts)
|
||||
uri = URI.parse(target)
|
||||
return nil if uri.nil? or uri.scheme.nil?
|
||||
return nil unless %{ http https }.include? uri.scheme
|
||||
target = transform(target)
|
||||
# fetch this url and hand it off
|
||||
res = new(target, opts)
|
||||
resolve_next(res.archive.path, res)
|
||||
rescue URI::Error => _e
|
||||
new(target, opts)
|
||||
rescue URI::Error
|
||||
nil
|
||||
end
|
||||
|
||||
|
@ -44,38 +54,51 @@ module Fetchers
|
|||
# https://github.com/hardening-io/tests-os-hardening/tree/48bd4388ddffde68badd83aefa654e7af3231876
|
||||
# is transformed to
|
||||
# https://github.com/hardening-io/tests-os-hardening/archive/48bd4388ddffde68badd83aefa654e7af3231876.tar.gz
|
||||
GITHUB_URL_REGEX = %r{^https?://(www\.)?github\.com/(?<user>[\w-]+)/(?<repo>[\w-]+)(\.git)?(/)?$}
|
||||
GITHUB_URL_WITH_TREE_REGEX = %r{^https?://(www\.)?github\.com/(?<user>[\w-]+)/(?<repo>[\w-]+)/tree/(?<commit>[\w\.]+)(/)?$}
|
||||
def self.transform(target)
|
||||
# support for default github url
|
||||
m = %r{^https?://(www\.)?github\.com/(?<user>[\w-]+)/(?<repo>[\w-]+)(\.git)?(/)?$}.match(target)
|
||||
return "https://github.com/#{m[:user]}/#{m[:repo]}/archive/master.tar.gz" if m
|
||||
|
||||
# support for branch and commit urls
|
||||
m = %r{^https?://(www\.)?github\.com/(?<user>[\w-]+)/(?<repo>[\w-]+)/tree/(?<commit>[\w\.]+)(/)?$}.match(target)
|
||||
return "https://github.com/#{m[:user]}/#{m[:repo]}/archive/#{m[:commit]}.tar.gz" if m
|
||||
|
||||
# if we could not find a match, return the original value
|
||||
target
|
||||
transformed_target = if m = GITHUB_URL_REGEX.match(target) # rubocop:disable Lint/AssignmentInCondition
|
||||
"https://github.com/#{m[:user]}/#{m[:repo]}/archive/master.tar.gz"
|
||||
elsif m = GITHUB_URL_WITH_TREE_REGEX.match(target) # rubocop:disable Lint/AssignmentInCondition
|
||||
"https://github.com/#{m[:user]}/#{m[:repo]}/archive/#{m[:commit]}.tar.gz"
|
||||
end
|
||||
|
||||
MIME_TYPES = {
|
||||
'application/x-zip-compressed' => '.zip',
|
||||
'application/zip' => '.zip',
|
||||
'application/x-gzip' => '.tar.gz',
|
||||
'application/gzip' => '.tar.gz',
|
||||
}.freeze
|
||||
if transformed_target
|
||||
Inspec::Log.warn("URL target #{target} transformed to #{transformed_target}. Consider using the git fetcher")
|
||||
transformed_target
|
||||
else
|
||||
target
|
||||
end
|
||||
end
|
||||
|
||||
attr_reader :files, :archive_path
|
||||
|
||||
def initialize(url, opts)
|
||||
@target = url
|
||||
@insecure = opts['insecure']
|
||||
@token = opts['token']
|
||||
@config = opts
|
||||
end
|
||||
|
||||
def fetch(path)
|
||||
Inspec::Log.debug("Fetching URL: #{@target}")
|
||||
@archive_path = download_archive(path)
|
||||
end
|
||||
|
||||
def resolved_source
|
||||
{ url: @target }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# download url into archive using opts,
|
||||
# returns File object and content-type from HTTP headers
|
||||
def self.download_archive(url, opts = {})
|
||||
def download_archive(path)
|
||||
http_opts = {}
|
||||
# http_opts['http_basic_authentication'] = [opts['user'] || '', opts['password'] || ''] if opts['user']
|
||||
http_opts['ssl_verify_mode'.to_sym] = OpenSSL::SSL::VERIFY_NONE if opts['insecure']
|
||||
http_opts['Authorization'] = "Bearer #{opts['token']}" if opts['token']
|
||||
http_opts['ssl_verify_mode'.to_sym] = OpenSSL::SSL::VERIFY_NONE if @insecure
|
||||
http_opts['Authorization'] = "Bearer #{@token}" if @token
|
||||
|
||||
remote = open(
|
||||
url,
|
||||
http_opts,
|
||||
)
|
||||
remote = open(@target, http_opts)
|
||||
|
||||
content_type = remote.meta['content-type']
|
||||
file_type = MIME_TYPES[content_type] ||
|
||||
|
@ -86,25 +109,16 @@ module Fetchers
|
|||
if file_type.nil?
|
||||
fail "Could not determine file type for content type #{content_type}."
|
||||
end
|
||||
|
||||
final_path = "#{path}#{file_type}"
|
||||
# download content
|
||||
archive = Tempfile.new(['inspec-dl-', file_type])
|
||||
archive.binmode
|
||||
archive.write(remote.read)
|
||||
archive.rewind
|
||||
archive.close
|
||||
archive
|
||||
end
|
||||
|
||||
attr_reader :archive
|
||||
|
||||
def initialize(url, opts)
|
||||
@target = url
|
||||
@archive = self.class.download_archive(url, opts)
|
||||
end
|
||||
|
||||
def archive_path
|
||||
@archive.path
|
||||
FileUtils.mv(archive.path, final_path)
|
||||
Inspec::Log.debug("Fetched archive moved to: #{final_path}")
|
||||
final_path
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'zip'
|
||||
|
||||
module Fetchers
|
||||
class Zip < Inspec.fetcher(1)
|
||||
name 'zip'
|
||||
priority 100
|
||||
|
||||
attr_reader :files
|
||||
|
||||
def self.resolve(target)
|
||||
unless target.is_a?(String) && File.file?(target) && target.end_with?('.zip')
|
||||
return nil
|
||||
end
|
||||
new(target)
|
||||
end
|
||||
|
||||
def initialize(target)
|
||||
@target = target
|
||||
@contents = {}
|
||||
@files = []
|
||||
::Zip::InputStream.open(@target) do |io|
|
||||
while (entry = io.get_next_entry)
|
||||
@files.push(entry.name.sub(%r{/+$}, ''))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def read(file)
|
||||
@contents[file] ||= read_from_zip(file)
|
||||
end
|
||||
|
||||
def read_from_zip(file)
|
||||
return nil unless @files.include?(file)
|
||||
res = nil
|
||||
::Zip::InputStream.open(@target) do |io|
|
||||
while (entry = io.get_next_entry)
|
||||
next unless file == entry.name
|
||||
res = io.read
|
||||
break
|
||||
end
|
||||
end
|
||||
res
|
||||
end
|
||||
end
|
||||
end
|
|
@ -62,7 +62,7 @@ EOF
|
|||
def to_yaml
|
||||
{
|
||||
'lockfile_version' => CURRENT_LOCKFILE_VERSION,
|
||||
'depends' => @deps,
|
||||
'depends' => @deps.map { |i| stringify_keys(i) },
|
||||
}.to_yaml
|
||||
end
|
||||
|
||||
|
@ -88,7 +88,33 @@ EOF
|
|||
end
|
||||
|
||||
def parse_content_hash_0(lockfile_content_hash)
|
||||
@deps = lockfile_content_hash['depends']
|
||||
@deps = if lockfile_content_hash['depends']
|
||||
lockfile_content_hash['depends'].map { |i| symbolize_keys(i) }
|
||||
end
|
||||
end
|
||||
|
||||
def mutate_hash_keys_with(hash, fun)
|
||||
hash.each_with_object({}) do |v, memo|
|
||||
key = fun.call(v[0])
|
||||
value = if v[1].is_a?(Hash)
|
||||
mutate_hash_keys_with(v[1], fun)
|
||||
elsif v[1].is_a?(Array)
|
||||
v[1].map do |i|
|
||||
i.is_a?(Hash) ? mutate_hash_keys_with(i, fun) : i
|
||||
end
|
||||
else
|
||||
v[1]
|
||||
end
|
||||
memo[key] = value
|
||||
end
|
||||
end
|
||||
|
||||
def stringify_keys(hash)
|
||||
mutate_hash_keys_with(hash, proc { |i| i.to_s })
|
||||
end
|
||||
|
||||
def symbolize_keys(hash)
|
||||
mutate_hash_keys_with(hash, proc { |i| i.to_sym })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,7 +8,7 @@ module Inspec
|
|||
# Inspec::Requirement represents a given profile dependency, where
|
||||
# appropriate we delegate to Inspec::Profile directly.
|
||||
#
|
||||
class Requirement # rubocop:disable Metrics/ClassLength
|
||||
class Requirement
|
||||
attr_reader :name, :dep, :cwd, :opts
|
||||
attr_writer :dependencies
|
||||
|
||||
|
@ -20,12 +20,11 @@ module Inspec
|
|||
end
|
||||
|
||||
def self.from_lock_entry(entry, cwd, vendor_index, backend)
|
||||
req = new(entry['name'],
|
||||
entry['version_constraints'],
|
||||
req = new(entry[:name],
|
||||
entry[:version_constraints],
|
||||
vendor_index,
|
||||
cwd,
|
||||
{ url: entry['resolved_source'],
|
||||
backend: backend })
|
||||
entry[:resolved_source].merge(backend: backend))
|
||||
|
||||
locked_deps = []
|
||||
Array(entry['dependencies']).each do |dep_entry|
|
||||
|
@ -59,10 +58,14 @@ module Inspec
|
|||
@dep.match?(name, version)
|
||||
end
|
||||
|
||||
def resolved_source
|
||||
@resolved_source ||= fetcher.resolved_source
|
||||
end
|
||||
|
||||
def to_hash
|
||||
h = {
|
||||
'name' => name,
|
||||
'resolved_source' => source_url,
|
||||
'resolved_source' => resolved_source,
|
||||
'version_constraints' => @version_requirement.to_s,
|
||||
}
|
||||
|
||||
|
@ -70,9 +73,7 @@ module Inspec
|
|||
h['dependencies'] = dependencies.map(&:to_hash)
|
||||
end
|
||||
|
||||
if is_vendored?
|
||||
h['content_hash'] = content_hash
|
||||
end
|
||||
h['content_hash'] = content_hash if content_hash
|
||||
h
|
||||
end
|
||||
|
||||
|
@ -80,50 +81,21 @@ module Inspec
|
|||
@dependencies = dep_array
|
||||
end
|
||||
|
||||
def is_vendored?
|
||||
@vendor_index.exists?(@name, source_url)
|
||||
end
|
||||
|
||||
def content_hash
|
||||
@content_hash ||= begin
|
||||
archive_path = @vendor_index.archive_entry_for(@name, source_url)
|
||||
fail "No vendored archive path for #{self}, cannot take content hash" if archive_path.nil?
|
||||
archive_path = @vendor_index.archive_entry_for(fetcher.cache_key) || fetcher.archive_path
|
||||
if archive_path && File.file?(archive_path)
|
||||
Digest::SHA256.hexdigest File.read(archive_path)
|
||||
end
|
||||
end
|
||||
|
||||
def source_url
|
||||
if opts[:path]
|
||||
"file://#{File.expand_path(opts[:path], @cwd)}"
|
||||
elsif opts[:url]
|
||||
opts[:url]
|
||||
end
|
||||
end
|
||||
|
||||
def local_path
|
||||
@local_path ||= if fetcher.class == Fetchers::Local
|
||||
File.expand_path(fetcher.target, @cwd)
|
||||
else
|
||||
@vendor_index.prefered_entry_for(@name, source_url)
|
||||
end
|
||||
end
|
||||
|
||||
def fetcher
|
||||
@fetcher ||= Inspec::Fetcher.resolve(source_url)
|
||||
@fetcher ||= Inspec::Fetcher.resolve(opts)
|
||||
fail "No fetcher for #{name} (options: #{opts})" if @fetcher.nil?
|
||||
@fetcher
|
||||
end
|
||||
|
||||
def pull
|
||||
# TODO(ssd): Dispatch on the class here is gross. Seems like
|
||||
# Fetcher is missing an API we want.
|
||||
if fetcher.class == Fetchers::Local || @vendor_index.exists?(@name, source_url)
|
||||
local_path
|
||||
else
|
||||
@vendor_index.add(@name, source_url, fetcher.archive_path)
|
||||
end
|
||||
end
|
||||
|
||||
def dependencies
|
||||
@dependencies ||= profile.metadata.dependencies.map do |r|
|
||||
Inspec::Requirement.from_metadata(r, @vendor_index, cwd: @cwd, backend: @backend)
|
||||
|
@ -131,20 +103,17 @@ module Inspec
|
|||
end
|
||||
|
||||
def to_s
|
||||
"#{dep} (#{source_url})"
|
||||
end
|
||||
|
||||
def path
|
||||
@path ||= pull
|
||||
"#{dep} (#{resolved_source})"
|
||||
end
|
||||
|
||||
def profile
|
||||
return nil if path.nil?
|
||||
opts = { backend: @backend }
|
||||
opts = @opts.dup
|
||||
opts[:cache] = @vendor_index
|
||||
opts[:backend] = @backend
|
||||
if !@dependencies.nil?
|
||||
opts[:dependencies] = Inspec::DependencySet.from_array(@dependencies, @cwd, @vendor_index, @backend)
|
||||
end
|
||||
@profile ||= Inspec::Profile.for_target(path, opts)
|
||||
@profile ||= Inspec::Profile.for_target(opts, opts)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -46,17 +46,17 @@ module Inspec
|
|||
path_string + " -> #{dep.name}"
|
||||
end
|
||||
|
||||
if seen_items.key?(dep.source_url)
|
||||
if seen_items.key?(dep.resolved_source)
|
||||
fail Inspec::CyclicDependencyError, "Dependency #{dep} would cause a dependency cycle (#{path_string})"
|
||||
else
|
||||
seen_items[dep.source_url] = true
|
||||
seen_items[dep.resolved_source] = true
|
||||
end
|
||||
|
||||
if !dep.source_satisfies_spec?
|
||||
fail Inspec::UnsatisfiedVersionSpecification, "The profile #{dep.name} from #{dep.source_url} has a version #{dep.source_version} which doesn't match #{dep.required_version}"
|
||||
fail Inspec::UnsatisfiedVersionSpecification, "The profile #{dep.name} from #{dep.resolved_source} has a version #{dep.source_version} which doesn't match #{dep.required_version}"
|
||||
end
|
||||
|
||||
Inspec::Log.debug("Adding #{dep.source_url}")
|
||||
Inspec::Log.debug("Adding dependency #{dep.name} (#{dep.resolved_source})")
|
||||
graph[dep.name] = dep
|
||||
if !dep.dependencies.empty?
|
||||
# Recursively resolve any transitive dependencies.
|
||||
|
|
|
@ -23,32 +23,17 @@ module Inspec
|
|||
FileUtils.mkdir_p(@path) unless File.directory?(@path)
|
||||
end
|
||||
|
||||
def add(name, source, path_from)
|
||||
path_to = base_path_for(name, source)
|
||||
path_to = if File.directory?(path_to)
|
||||
path_to
|
||||
elsif path_from.end_with?('.zip')
|
||||
"#{path_to}.tar.gz"
|
||||
elsif path_from.end_with?('.tar.gz')
|
||||
"#{path_to}.tar.gz"
|
||||
else
|
||||
fail "Cannot add unknown archive #{path} to vendor index"
|
||||
end
|
||||
FileUtils.cp_r(path_from, path_to)
|
||||
path_to
|
||||
end
|
||||
|
||||
def prefered_entry_for(name, source_url)
|
||||
path = base_path_for(name, source_url)
|
||||
def prefered_entry_for(key)
|
||||
path = base_path_for(key)
|
||||
if File.directory?(path)
|
||||
path
|
||||
else
|
||||
archive_entry_for(name, source_url)
|
||||
archive_entry_for(key)
|
||||
end
|
||||
end
|
||||
|
||||
def archive_entry_for(name, source_url)
|
||||
path = base_path_for(name, source_url)
|
||||
def archive_entry_for(key)
|
||||
path = base_path_for(key)
|
||||
if File.exist?("#{path}.tar.gz")
|
||||
"#{path}.tar.gz"
|
||||
elsif File.exist?("#{path}.zip")
|
||||
|
@ -64,8 +49,8 @@ module Inspec
|
|||
# @param [String] source_url
|
||||
# @return [Boolean]
|
||||
#
|
||||
def exists?(name, source_url)
|
||||
path = base_path_for(name, source_url)
|
||||
def exists?(key)
|
||||
path = base_path_for(key)
|
||||
File.directory?(path) || File.exist?("#{path}.tar.gz") || File.exist?("#{path}.zip")
|
||||
end
|
||||
|
||||
|
@ -80,26 +65,8 @@ module Inspec
|
|||
# @param [String] source_url
|
||||
# @return [String]
|
||||
#
|
||||
def base_path_for(name, source_url)
|
||||
File.join(@path, key_for(name, source_url))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
#
|
||||
# Return the key for a given profile in the vendor index.
|
||||
#
|
||||
# The `source_url` parameter should be a URI-like string that
|
||||
# fully specifies the source of the exact version we want to pull
|
||||
# down.
|
||||
#
|
||||
# @param [String] name
|
||||
# @param [String] source_url
|
||||
# @return [String]
|
||||
#
|
||||
def key_for(name, source_url)
|
||||
source_hash = Digest::SHA256.hexdigest source_url
|
||||
"#{name}-#{source_hash}"
|
||||
def base_path_for(cache_key)
|
||||
File.join(@path, cache_key)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -17,6 +17,5 @@ module Inspec
|
|||
end
|
||||
|
||||
require 'fetchers/local'
|
||||
require 'fetchers/zip'
|
||||
require 'fetchers/tar'
|
||||
require 'fetchers/url'
|
||||
require 'fetchers/git'
|
||||
|
|
219
lib/inspec/file_provider.rb
Normal file
219
lib/inspec/file_provider.rb
Normal file
|
@ -0,0 +1,219 @@
|
|||
# encoding: utf-8
|
||||
require 'rubygems/package'
|
||||
require 'zlib'
|
||||
require 'zip'
|
||||
|
||||
module Inspec
|
||||
class FileProvider
|
||||
def self.for_path(path)
|
||||
if path.is_a?(Hash)
|
||||
MockProvider.new(path)
|
||||
elsif File.directory?(path)
|
||||
DirProvider.new(path)
|
||||
elsif File.exist?(path) && path.end_with?('.tar.gz', 'tgz')
|
||||
TarProvider.new(path)
|
||||
elsif File.exist?(path) && path.end_with?('.zip')
|
||||
ZipProvider.new(path)
|
||||
elsif File.exist?(path)
|
||||
DirProvider.new(path)
|
||||
else
|
||||
fail "No file provider for the provided path: #{path}"
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(_path)
|
||||
end
|
||||
|
||||
def read(_file)
|
||||
fail "#{self} does not implement `read(...)`. This is required."
|
||||
end
|
||||
|
||||
def files
|
||||
fail "Fetcher #{self} does not implement `files()`. This is required."
|
||||
end
|
||||
|
||||
def relative_provider
|
||||
RelativeFileProvider.new(self)
|
||||
end
|
||||
end
|
||||
|
||||
class MockProvider < FileProvider
|
||||
attr_reader :files
|
||||
def initialize(path)
|
||||
@data = path[:mock]
|
||||
@files = @data.keys
|
||||
end
|
||||
|
||||
def read(file)
|
||||
@data[file]
|
||||
end
|
||||
end
|
||||
|
||||
class DirProvider < FileProvider
|
||||
attr_reader :files
|
||||
def initialize(path)
|
||||
@files = if File.file?(path)
|
||||
[path]
|
||||
else
|
||||
Dir[File.join(path, '**', '*')]
|
||||
end
|
||||
@path = path
|
||||
end
|
||||
|
||||
def read(file)
|
||||
return nil unless files.include?(file)
|
||||
return nil unless File.file?(file)
|
||||
File.read(file)
|
||||
end
|
||||
end
|
||||
|
||||
class ZipProvider < FileProvider
|
||||
attr_reader :files
|
||||
|
||||
def initialize(path)
|
||||
@path = path
|
||||
@contents = {}
|
||||
@files = []
|
||||
::Zip::InputStream.open(@path) do |io|
|
||||
while (entry = io.get_next_entry)
|
||||
@files.push(entry.name.sub(%r{/+$}, ''))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def read(file)
|
||||
@contents[file] ||= read_from_zip(file)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def read_from_zip(file)
|
||||
return nil unless @files.include?(file)
|
||||
res = nil
|
||||
::Zip::InputStream.open(@path) do |io|
|
||||
while (entry = io.get_next_entry)
|
||||
next unless file == entry.name
|
||||
res = io.read
|
||||
break
|
||||
end
|
||||
end
|
||||
res
|
||||
end
|
||||
end
|
||||
|
||||
class TarProvider < FileProvider
|
||||
attr_reader :files
|
||||
|
||||
def initialize(path)
|
||||
@path = path
|
||||
@contents = {}
|
||||
@files = []
|
||||
Gem::Package::TarReader.new(Zlib::GzipReader.open(@path)) do |tar|
|
||||
@files = tar.map(&:full_name)
|
||||
end
|
||||
end
|
||||
|
||||
def read(file)
|
||||
@contents[file] ||= read_from_tar(file)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def read_from_tar(file)
|
||||
return nil unless @files.include?(file)
|
||||
res = nil
|
||||
# NB `TarReader` includes `Enumerable` beginning with Ruby 2.x
|
||||
Gem::Package::TarReader.new(Zlib::GzipReader.open(@path)) do |tar|
|
||||
tar.each do |entry|
|
||||
next unless entry.file? && file == entry.full_name
|
||||
res = entry.read
|
||||
break
|
||||
end
|
||||
end
|
||||
res
|
||||
end
|
||||
end
|
||||
|
||||
class RelativeFileProvider
|
||||
BLACKLIST_FILES = [
|
||||
'/pax_global_header',
|
||||
'pax_global_header',
|
||||
].freeze
|
||||
|
||||
attr_reader :files
|
||||
attr_reader :prefix
|
||||
attr_reader :parent
|
||||
|
||||
def initialize(parent_provider)
|
||||
@parent = parent_provider
|
||||
@prefix = get_prefix(parent.files)
|
||||
if @prefix.nil?
|
||||
fail "Could not determine path prefix for #{parent}"
|
||||
end
|
||||
@files = parent.files.find_all { |x| x.start_with?(prefix) && x != prefix }
|
||||
.map { |x| x[prefix.length..-1] }
|
||||
end
|
||||
|
||||
def abs_path(file)
|
||||
return nil if file.nil?
|
||||
prefix + file
|
||||
end
|
||||
|
||||
def read(file)
|
||||
parent.read(abs_path(file))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def get_prefix(fs)
|
||||
return '' if fs.empty?
|
||||
|
||||
# filter backlisted files
|
||||
fs -= BLACKLIST_FILES
|
||||
|
||||
sorted = fs.sort_by(&:length)
|
||||
get_folder_prefix(sorted)
|
||||
end
|
||||
|
||||
def prefix_candidate_for(file)
|
||||
if file.end_with?(File::SEPARATOR)
|
||||
file
|
||||
else
|
||||
file + File::SEPARATOR
|
||||
end
|
||||
end
|
||||
|
||||
def get_folder_prefix(fs)
|
||||
return get_files_prefix(fs) if fs.length == 1
|
||||
first, *rest = fs
|
||||
pre = prefix_candidate_for(first)
|
||||
|
||||
if rest.all? { |i| i.start_with? pre }
|
||||
return get_folder_prefix(rest)
|
||||
end
|
||||
get_files_prefix(fs)
|
||||
end
|
||||
|
||||
def get_files_prefix(fs)
|
||||
return '' if fs.empty?
|
||||
|
||||
file = fs[0]
|
||||
bn = File.basename(file)
|
||||
# no more prefixes
|
||||
return '' if bn == file
|
||||
|
||||
i = file.rindex(bn)
|
||||
pre = file[0..i-1]
|
||||
|
||||
rest = fs.find_all { |f| !f.start_with?(pre) }
|
||||
return pre if rest.empty?
|
||||
|
||||
new_pre = get_prefix(rest)
|
||||
return new_pre if pre.start_with? new_pre
|
||||
# edge case: completely different prefixes; retry prefix detection
|
||||
a = File.dirname(pre + 'a')
|
||||
b = File.dirname(new_pre + 'b')
|
||||
get_prefix([a, b])
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,106 +1,70 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'utils/plugin_registry'
|
||||
require 'digest'
|
||||
|
||||
module Inspec
|
||||
module Plugins
|
||||
#
|
||||
# An Inspec::Plugins::Fetcher is responsible for fetching a remote
|
||||
# source to a local directory or file provided by the user.
|
||||
#
|
||||
# In general, there are two kinds of fetchers. (1) Fetchers that
|
||||
# implement this entire API (see the Git or Url fetchers for
|
||||
# examples), and (2) fetchers that only implement self.resolve and
|
||||
# then call the resolve_next method with a modified target hash.
|
||||
# Fetchers in (2) do not need to implement the functions in this
|
||||
# class because the caller will never actually get an instance of
|
||||
# those fetchers.
|
||||
#
|
||||
class Fetcher < PluginRegistry::Plugin
|
||||
def self.plugin_registry
|
||||
Inspec::Fetcher
|
||||
end
|
||||
|
||||
# Provide a list of files that are available to this fetcher.
|
||||
#
|
||||
# @return [Array[String]] A list of filenames
|
||||
def files
|
||||
fail "Fetcher #{self} does not implement `files()`. This is required."
|
||||
end
|
||||
|
||||
# Read a file using this fetcher. The name must correspond to a file
|
||||
# available to this fetcher. Use #files to retrieve the list of
|
||||
# files.
|
||||
# The path to the archive on disk. This can be passed to a
|
||||
# FileProvider to get access to the files in the fetched
|
||||
# profile.
|
||||
#
|
||||
# @param [String] _file The filename you are interested in
|
||||
# @return [String] The file's contents
|
||||
def read(_file)
|
||||
fail "Fetcher #{self} does not implement `read(...)`. This is required."
|
||||
def archive_path
|
||||
fail "Fetcher #{self} does not implement `archive_path()`. This is required."
|
||||
end
|
||||
|
||||
def relative_target
|
||||
RelFetcher.new(self)
|
||||
end
|
||||
#
|
||||
# Fetches the remote source to a local source, using the
|
||||
# provided path as a partial filename. That is, if you pass
|
||||
# /foo/bar/baz, the fetcher can create:
|
||||
#
|
||||
# /foo/bar/baz/: A profile directory, or
|
||||
# /foo/bar/baz.tar.gz: A profile tarball, or
|
||||
# /foo/bar/baz.zip
|
||||
#
|
||||
def fetch(_path)
|
||||
fail "Fetcher #{self} does not implement `fetch()`. This is required."
|
||||
end
|
||||
|
||||
BLACKLIST_FILES = [
|
||||
'/pax_global_header',
|
||||
'pax_global_header',
|
||||
].freeze
|
||||
|
||||
class RelFetcher < Fetcher
|
||||
attr_reader :files
|
||||
attr_reader :prefix
|
||||
|
||||
def initialize(fetcher)
|
||||
@parent = fetcher
|
||||
@prefix = get_prefix(fetcher.files)
|
||||
@files = fetcher.files.find_all { |x| x.start_with? prefix }
|
||||
.map { |x| x[prefix.length..-1] }
|
||||
#
|
||||
# The full specification of the remote source, with any
|
||||
# ambigious references provided by the user resolved to an exact
|
||||
# reference where possible. For example, in the Git provide, a
|
||||
# tag will be resolved to an exact revision.
|
||||
#
|
||||
def resolved_source
|
||||
fail "Fetcher #{self} does not implement `resolved_source()`. This is required for terminal fetchers."
|
||||
end
|
||||
|
||||
def abs_path(file)
|
||||
return nil if file.nil?
|
||||
prefix + file
|
||||
#
|
||||
# A string based on the components of the resolved source,
|
||||
# suitable for constructing per-source file names.
|
||||
#
|
||||
def cache_key
|
||||
key = ''
|
||||
resolved_source.each do |k, v|
|
||||
key << "#{k}:#{v}"
|
||||
end
|
||||
|
||||
def read(file)
|
||||
@parent.read(abs_path(file))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def get_prefix(fs)
|
||||
return '' if fs.empty?
|
||||
|
||||
# filter backlisted files
|
||||
fs -= BLACKLIST_FILES
|
||||
|
||||
sorted = fs.sort_by(&:length)
|
||||
get_folder_prefix(sorted)
|
||||
end
|
||||
|
||||
def get_folder_prefix(fs, first_iteration = true)
|
||||
return get_files_prefix(fs) if fs.length == 1
|
||||
pre = fs[0] + File::SEPARATOR
|
||||
rest = fs[1..-1]
|
||||
if rest.all? { |i| i.start_with? pre }
|
||||
return get_folder_prefix(rest, false)
|
||||
end
|
||||
return get_files_prefix(fs) if first_iteration
|
||||
fs
|
||||
end
|
||||
|
||||
def get_files_prefix(fs)
|
||||
return '' if fs.empty?
|
||||
|
||||
file = fs[0]
|
||||
bn = File.basename(file)
|
||||
# no more prefixes
|
||||
return '' if bn == file
|
||||
|
||||
i = file.rindex(bn)
|
||||
pre = file[0..i-1]
|
||||
|
||||
rest = fs.find_all { |f| !f.start_with?(pre) }
|
||||
return pre if rest.empty?
|
||||
|
||||
new_pre = get_prefix(rest)
|
||||
return new_pre if pre.start_with? new_pre
|
||||
# edge case: completely different prefixes; retry prefix detection
|
||||
a = File.dirname(pre + 'a')
|
||||
b = File.dirname(new_pre + 'b')
|
||||
get_prefix([a, b])
|
||||
Digest::SHA256.hexdigest key
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6,11 +6,14 @@
|
|||
require 'forwardable'
|
||||
require 'inspec/polyfill'
|
||||
require 'inspec/fetcher'
|
||||
require 'inspec/file_provider'
|
||||
require 'inspec/source_reader'
|
||||
require 'inspec/metadata'
|
||||
require 'inspec/backend'
|
||||
require 'inspec/rule'
|
||||
require 'inspec/log'
|
||||
require 'inspec/profile_context'
|
||||
require 'inspec/dependencies/vendor_index'
|
||||
require 'inspec/dependencies/lockfile'
|
||||
require 'inspec/dependencies/dependency_set'
|
||||
|
||||
|
@ -18,24 +21,34 @@ module Inspec
|
|||
class Profile # rubocop:disable Metrics/ClassLength
|
||||
extend Forwardable
|
||||
|
||||
def self.resolve_target(target)
|
||||
# Fetchers retrieve file contents
|
||||
def self.resolve_target(target, cache = nil)
|
||||
cache ||= VendorIndex.new
|
||||
fetcher = Inspec::Fetcher.resolve(target)
|
||||
if fetcher.nil?
|
||||
fail("Could not fetch inspec profile in #{target.inspect}.")
|
||||
end
|
||||
# Source readers understand the target's structure and provide
|
||||
# access to tests, libraries, and metadata
|
||||
reader = Inspec::SourceReader.resolve(fetcher.relative_target)
|
||||
|
||||
if cache.exists?(fetcher.cache_key)
|
||||
Inspec::Log.debug "Using cached dependency for #{target}"
|
||||
cache.prefered_entry_for(fetcher.cache_key)
|
||||
else
|
||||
fetcher.fetch(cache.base_path_for(fetcher.cache_key))
|
||||
fetcher.archive_path
|
||||
end
|
||||
end
|
||||
|
||||
def self.for_path(path, opts)
|
||||
file_provider = FileProvider.for_path(path)
|
||||
reader = Inspec::SourceReader.resolve(file_provider.relative_provider)
|
||||
if reader.nil?
|
||||
fail("Don't understand inspec profile in #{target.inspect}, it "\
|
||||
fail("Don't understand inspec profile in #{path}, it " \
|
||||
"doesn't look like a supported profile structure.")
|
||||
end
|
||||
reader
|
||||
new(reader, opts)
|
||||
end
|
||||
|
||||
def self.for_target(target, opts = {})
|
||||
new(resolve_target(target), opts.merge(target: target))
|
||||
for_path(resolve_target(target, opts[:cache]), opts.merge(target: target))
|
||||
end
|
||||
|
||||
attr_reader :source_reader
|
||||
|
@ -46,18 +59,18 @@ module Inspec
|
|||
|
||||
# rubocop:disable Metrics/AbcSize
|
||||
def initialize(source_reader, options = {})
|
||||
@options = options
|
||||
@target = @options.delete(:target)
|
||||
@logger = @options[:logger] || Logger.new(nil)
|
||||
@source_reader = source_reader
|
||||
if options[:dependencies]
|
||||
@target = options.delete(:target)
|
||||
@logger = options[:logger] || Logger.new(nil)
|
||||
@locked_dependencies = options[:dependencies]
|
||||
end
|
||||
@controls = options[:controls] || []
|
||||
@profile_id = @options[:id]
|
||||
@backend = @options[:backend] || Inspec::Backend.create(options)
|
||||
@profile_id = options[:id]
|
||||
@backend = options[:backend] || Inspec::Backend.create(options)
|
||||
@source_reader = source_reader
|
||||
@tests_collected = false
|
||||
Metadata.finalize(@source_reader.metadata, @profile_id)
|
||||
@runner_context = @options[:profile_context] || Inspec::ProfileContext.for_profile(self, @backend, @options[:attributes])
|
||||
@runner_context = options[:profile_context] || Inspec::ProfileContext.for_profile(self,
|
||||
@backend,
|
||||
options[:attributes])
|
||||
end
|
||||
|
||||
def name
|
||||
|
|
|
@ -11,9 +11,6 @@ module Inspec
|
|||
class SourceReaderRegistry < PluginRegistry
|
||||
def resolve(target)
|
||||
return nil if target.nil?
|
||||
unless target.is_a? Inspec::Plugins::Fetcher
|
||||
fail "SourceReader cannot resolve targets that aren't Fetchers: #{target.class}"
|
||||
end
|
||||
super(target)
|
||||
end
|
||||
end
|
||||
|
|
44
test/functional/gitfetcher_test.rb
Normal file
44
test/functional/gitfetcher_test.rb
Normal file
|
@ -0,0 +1,44 @@
|
|||
require 'functional/helper'
|
||||
require 'fileutils'
|
||||
require 'tmpdir'
|
||||
|
||||
describe 'profiles with git-based dependencies' do
|
||||
include FunctionalHelper
|
||||
before(:all) do
|
||||
@tmpdir = Dir.mktmpdir
|
||||
@profile_dir = File.join(@tmpdir, "test-profile")
|
||||
@git_dep_dir = File.join(@tmpdir, "git-dep")
|
||||
|
||||
Dir.chdir(@tmpdir) do
|
||||
inspec("init profile git-dep")
|
||||
inspec("init profile test-profile")
|
||||
end
|
||||
|
||||
Dir.chdir(@git_dep_dir) do
|
||||
CMD.run_command("git init")
|
||||
CMD.run_command("git add .")
|
||||
CMD.run_command("git commit -m 'initial commit'")
|
||||
CMD.run_command("git commit -m 'another commit' --allow-empty")
|
||||
CMD.run_command("git tag antag")
|
||||
end
|
||||
|
||||
File.open(File.join(@profile_dir, "inspec.yml"), 'a') do |f|
|
||||
f.write <<EOF
|
||||
depends:
|
||||
- name: git-dep
|
||||
git: #{@git_dep_dir}
|
||||
tag: antag
|
||||
EOF
|
||||
end
|
||||
end
|
||||
|
||||
after(:all) do
|
||||
FileUtils.rm_rf(@tmpdir)
|
||||
end
|
||||
|
||||
it 'executes a profile with a git based dependency' do
|
||||
out = inspec("exec #{@profile_dir}")
|
||||
out.stderr.must_equal ''
|
||||
out.exit_status.must_equal 0
|
||||
end
|
||||
end
|
|
@ -1,7 +1,6 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'functional/helper'
|
||||
|
||||
describe 'example inheritance profile' do
|
||||
|
@ -11,14 +10,14 @@ describe 'example inheritance profile' do
|
|||
it 'check succeeds with --profiles-path' do
|
||||
out = inspec('check ' + path + ' --profiles-path ' + examples_path)
|
||||
out.stderr.must_equal ''
|
||||
out.stdout.must_match /Valid.*true/
|
||||
out.stdout.must_match(/Valid.*true/)
|
||||
out.exit_status.must_equal 0
|
||||
end
|
||||
|
||||
it 'check succeeds without --profiles-path using inspec.yml' do
|
||||
out = inspec('check ' + path)
|
||||
out.stderr.must_equal ''
|
||||
out.stdout.must_match /Valid.*true/
|
||||
out.stdout.must_match(/Valid.*true/)
|
||||
out.exit_status.must_equal 0
|
||||
end
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ describe 'inspec archive' do
|
|||
it 'archive is successful' do
|
||||
out = inspec('archive ' + example_profile + ' --overwrite')
|
||||
out.exit_status.must_equal 0
|
||||
out.stdout.must_match /Generate archive [^ ]*profile.tar.gz/
|
||||
out.stdout.must_match(/Generate archive [^ ]*profile.tar.gz/)
|
||||
out.stdout.must_include 'Finished archive generation.'
|
||||
end
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ Summary: \e[32m0 successful\e[0m, \e[31m0 failures\e[0m, \e[37m0 skipped\e[0m
|
|||
let(:json) { JSON.load(out.stdout) }
|
||||
|
||||
it 'exits with an error' do
|
||||
out.stderr.must_match /^This OS\/platform \(.+\) is not supported by this profile.$/
|
||||
out.stderr.must_match(/^This OS\/platform \(.+\) is not supported by this profile.$/)
|
||||
out.exit_status.must_equal 1
|
||||
end
|
||||
end
|
||||
|
|
|
@ -72,7 +72,7 @@ describe 'inspec json' do
|
|||
end
|
||||
|
||||
it 'has a the source code' do
|
||||
control['code'].must_match /\Acontrol \"tmp-1.0\" do.*end\n\Z/m
|
||||
control['code'].must_match(/\Acontrol \"tmp-1.0\" do.*end\n\Z/m)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -143,7 +143,7 @@ describe 'inspec shell tests' do
|
|||
|
||||
it 'exposes all resources' do
|
||||
out = do_shell('os')
|
||||
out.stdout.must_match /\=> .*Operating.* .*System.* .*Detection/
|
||||
out.stdout.must_match(/\=> .*Operating.* .*System.* .*Detection/)
|
||||
end
|
||||
|
||||
it 'can run ruby expressions' do
|
||||
|
|
|
@ -46,7 +46,7 @@ describe 'command tests' do
|
|||
describe 'check' do
|
||||
it 'verifies that a profile is ok' do
|
||||
out = inspec('check ' + example_profile)
|
||||
out.stdout.must_match /Valid.*true/
|
||||
out.stdout.must_match(/Valid.*true/)
|
||||
out.exit_status.must_equal 0
|
||||
end
|
||||
end
|
||||
|
|
72
test/unit/dependencies/lockfile_test.rb
Normal file
72
test/unit/dependencies/lockfile_test.rb
Normal file
|
@ -0,0 +1,72 @@
|
|||
require 'helper'
|
||||
|
||||
describe Inspec::Lockfile do
|
||||
# Ruby 1.9: .to_yaml format is slightly different
|
||||
if Gem::Version.new(RUBY_VERSION) < Gem::Version.new("2.0")
|
||||
let(:lockfile_content) {
|
||||
<<EOF
|
||||
---
|
||||
lockfile_version: 0
|
||||
depends:
|
||||
- name: foo
|
||||
resolved_source:
|
||||
url: http://foo
|
||||
version_constraints: ! '>= 0'
|
||||
dependencies:
|
||||
- name: bar
|
||||
resolved_source:
|
||||
url: http://bar
|
||||
version_constraints: ! '>= 0'
|
||||
EOF
|
||||
}
|
||||
else
|
||||
let(:lockfile_content) {
|
||||
<<EOF
|
||||
---
|
||||
lockfile_version: 0
|
||||
depends:
|
||||
- name: foo
|
||||
resolved_source:
|
||||
url: http://foo
|
||||
version_constraints: ">= 0"
|
||||
dependencies:
|
||||
- name: bar
|
||||
resolved_source:
|
||||
url: http://bar
|
||||
version_constraints: ">= 0"
|
||||
EOF
|
||||
}
|
||||
end
|
||||
|
||||
let(:lockfile_hash) {
|
||||
{ 'lockfile_version' => 0,
|
||||
'depends' => [
|
||||
{ 'name' => "foo", 'resolved_source' => {'url' => "http://foo"}, 'version_constraints' => ">= 0",
|
||||
'dependencies' => [{ 'name' => 'bar', 'resolved_source' => {'url' => 'http://bar' }, 'version_constraints' => ">= 0"}]
|
||||
}]}
|
||||
}
|
||||
|
||||
let(:lockfile_hash_with_symbols) {
|
||||
{ 'lockfile_version' => 0,
|
||||
'depends' => [
|
||||
{ name: "foo", resolved_source: {url: "http://foo"}, version_constraints: ">= 0",
|
||||
dependencies: [{ name: 'bar', resolved_source: {url: 'http://bar' }, version_constraints: ">= 0"}]
|
||||
}]}
|
||||
}
|
||||
|
||||
it "can generate a yaml representation of the lockfile" do
|
||||
l = Inspec::Lockfile.new(lockfile_hash)
|
||||
l.to_yaml.force_encoding("UTF-8").must_equal lockfile_content
|
||||
end
|
||||
|
||||
it "can generates a yaml representation of the lockfile even when the depends keys are symbols" do
|
||||
l = Inspec::Lockfile.new(lockfile_hash_with_symbols)
|
||||
l.to_yaml.force_encoding("UTF-8").must_equal lockfile_content
|
||||
end
|
||||
|
||||
it "uses symbol keys for the deps by default" do
|
||||
File.stubs(:read).with("testfile").returns(lockfile_content)
|
||||
l = Inspec::Lockfile.from_file("testfile")
|
||||
l.deps.must_equal lockfile_hash_with_symbols['depends']
|
||||
end
|
||||
end
|
|
@ -3,10 +3,13 @@ require 'inspec/errors'
|
|||
require 'inspec/dependencies/resolver'
|
||||
|
||||
class FakeDep
|
||||
attr_reader :name, :source_url
|
||||
attr_reader :name
|
||||
def initialize(name)
|
||||
@name = name
|
||||
@source_url = "file://#{name}"
|
||||
end
|
||||
|
||||
def resolved_source
|
||||
{ path: name }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -19,56 +19,3 @@ describe Inspec::Fetcher do
|
|||
res.target.must_equal __FILE__
|
||||
end
|
||||
end
|
||||
|
||||
describe Inspec::Plugins::RelFetcher do
|
||||
def fetcher
|
||||
src_fetcher.expects(:files).returns(in_files).at_least_once
|
||||
Inspec::Plugins::RelFetcher.new(src_fetcher)
|
||||
end
|
||||
|
||||
let(:src_fetcher) { mock() }
|
||||
|
||||
IN_AND_OUT = {
|
||||
[] => [],
|
||||
%w{file} => %w{file},
|
||||
# don't prefix just by filename
|
||||
%w{file file_a} => %w{file file_a},
|
||||
%w{path/file path/file_a} => %w{file file_a},
|
||||
%w{path/to/file} => %w{file},
|
||||
%w{/path/to/file} => %w{file},
|
||||
%w{alice bob} => %w{alice bob},
|
||||
# mixed paths
|
||||
%w{x/a y/b} => %w{x/a y/b},
|
||||
%w{/x/a /y/b} => %w{x/a y/b},
|
||||
%w{z/x/a z/y/b} => %w{x/a y/b},
|
||||
%w{/z/x/a /z/y/b} => %w{x/a y/b},
|
||||
# mixed with relative path
|
||||
%w{a path/to/b} => %w{a path/to/b},
|
||||
%w{path/to/b a} => %w{path/to/b a},
|
||||
%w{path/to/b path/a} => %w{to/b a},
|
||||
%w{path/to/b path/a c} => %w{path/to/b path/a c},
|
||||
# mixed with absolute paths
|
||||
%w{/path/to/b /a} => %w{path/to/b a},
|
||||
%w{/path/to/b /path/a} => %w{to/b a},
|
||||
%w{/path/to/b /path/a /c} => %w{path/to/b path/a c},
|
||||
# mixing absolute and relative paths
|
||||
%w{path/a /path/b} => %w{path/a /path/b},
|
||||
%w{/path/a path/b} => %w{/path/a path/b},
|
||||
# extract folder structure buildup
|
||||
%w{/a /a/b /a/b/c} => %w{c},
|
||||
%w{/a /a/b /a/b/c/d/e} => %w{e},
|
||||
# ignore pax_global_header, which are commonly seen in github tars and are not
|
||||
# ignored by all tar streaming tools, its not extracted by GNU tar since 1.14
|
||||
%w{/pax_global_header /a/b} => %w{b},
|
||||
%w{pax_global_header a/b} => %w{b},
|
||||
}.each do |ins, outs|
|
||||
describe 'empty profile' do
|
||||
let(:in_files) { ins }
|
||||
|
||||
it 'also has no files' do
|
||||
fetcher.files.must_equal outs
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
|
125
test/unit/fetchers/git_test.rb
Normal file
125
test/unit/fetchers/git_test.rb
Normal file
|
@ -0,0 +1,125 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
require 'helper'
|
||||
|
||||
describe Fetchers::Git do
|
||||
let(:fetcher) { Fetchers::Git }
|
||||
|
||||
it 'registers with the fetchers registry' do
|
||||
reg = Inspec::Fetcher.registry
|
||||
_(reg['git']).must_equal fetcher
|
||||
end
|
||||
|
||||
it "handles sources specified by a :git key" do
|
||||
f = fetcher.resolve({git: "https://example.com/foo.gi"})
|
||||
f.wont_be_nil
|
||||
f.must_be_kind_of Fetchers::Git
|
||||
end
|
||||
|
||||
describe "when given a valid repository" do
|
||||
let(:git_dep_dir) { "test-directory" }
|
||||
let(:git_master_ref) { "bf4d5774f02d24155bfc34b5897d22785a304cfa" }
|
||||
let(:git_branch_ref) { "b979579e5fc8edb72511fe5d2a1230dede71eff7" }
|
||||
let(:git_tag_ref) { "efc85d89ee9d5798ca93ee95db0c711b99061590" }
|
||||
let(:git_output) {
|
||||
out = mock()
|
||||
out.stubs(:stdout).returns("")
|
||||
out.stubs(:exitstatus).returns(0)
|
||||
out.stubs(:stderr).returns("")
|
||||
out.stubs(:status).returns(true)
|
||||
out.stubs(:error!).returns(false)
|
||||
out.stubs(:run_command).returns(true)
|
||||
out
|
||||
}
|
||||
|
||||
let(:git_ls_remote_output) {
|
||||
out = mock()
|
||||
out.stubs(:stdout).returns("9abea97db10a428709353fd582b969d0e17cb923\tHEAD
|
||||
bf4d5774f02d24155bfc34b5897d22785a304cfa\trefs/heads/master
|
||||
b979579e5fc8edb72511fe5d2a1230dede71eff7\trefs/heads/somebranch
|
||||
d9d5a6fe85c3df709bb1c878c2de8f2cb5893ced\trefs/tags/boringtag
|
||||
ad280246a1a2b3d1b1179e1a8d9e1a044e7ee94f\trefs/tags/antag
|
||||
efc85d89ee9d5798ca93ee95db0c711b99061590\trefs/tags/antag^{}
|
||||
be002c56b0806ea40aabf7a2b742c41182336198\trefs/tags/anothertag
|
||||
a7729ce65636d6d8b80159dd5dd7a40fdb6f2501\trefs/tags/anothertag^{}\n")
|
||||
out.stubs(:exitstatus).returns(0)
|
||||
out.stubs(:stderr).returns("")
|
||||
out.stubs(:error!).returns(false)
|
||||
out.stubs(:run_command).returns(true)
|
||||
out
|
||||
}
|
||||
|
||||
before do
|
||||
# git fetcher likes to make directories, let's stub that for every test
|
||||
Dir.stubs(:mktmpdir).yields("test-tmp-dir")
|
||||
File.stubs(:directory?).with("fetchpath/.git").returns(false)
|
||||
FileUtils.stubs(:mkdir_p)
|
||||
end
|
||||
|
||||
def expect_ls_remote(ref)
|
||||
Mixlib::ShellOut.expects(:new).with("git ls-remote \"#{git_dep_dir}\" \"#{ref}*\"", {}).returns(git_ls_remote_output)
|
||||
end
|
||||
|
||||
def expect_checkout(ref, at='test-tmp-dir')
|
||||
Mixlib::ShellOut.expects(:new).with("git checkout #{ref}", {cwd: at}).returns(git_output)
|
||||
end
|
||||
|
||||
def expect_clone
|
||||
Mixlib::ShellOut.expects(:new).with("git clone #{git_dep_dir} ./", {cwd: 'test-tmp-dir'}).returns(git_output)
|
||||
end
|
||||
|
||||
def expect_mv_into_place
|
||||
FileUtils.expects(:cp_r).with('test-tmp-dir', 'fetchpath')
|
||||
end
|
||||
|
||||
it "resolves to the revision of master by default" do
|
||||
expect_ls_remote('master')
|
||||
result = fetcher.resolve({git: git_dep_dir})
|
||||
result.resolved_source.must_equal({git: git_dep_dir, ref: git_master_ref })
|
||||
end
|
||||
|
||||
it "can resolve a tag" do
|
||||
expect_ls_remote('antag')
|
||||
result = fetcher.resolve({git: git_dep_dir, tag: 'antag'})
|
||||
result.resolved_source.must_equal({git: git_dep_dir, ref: git_tag_ref })
|
||||
end
|
||||
|
||||
it "can resolve a branch" do
|
||||
expect_ls_remote('somebranch')
|
||||
result = fetcher.resolve({git: git_dep_dir, branch: 'somebranch'})
|
||||
result.resolved_source.must_equal({git: git_dep_dir, ref: git_branch_ref })
|
||||
end
|
||||
|
||||
it "assumes the ref you gave it is the thing you want" do
|
||||
result = fetcher.resolve({git: git_dep_dir, ref: 'a_test_ref'})
|
||||
result.resolved_source.must_equal({git: git_dep_dir, ref: 'a_test_ref' })
|
||||
end
|
||||
|
||||
it "fetches to the given location" do
|
||||
expect_ls_remote('master')
|
||||
expect_clone()
|
||||
expect_checkout(git_master_ref)
|
||||
expect_mv_into_place()
|
||||
result = fetcher.resolve({git: git_dep_dir})
|
||||
result.fetch("fetchpath")
|
||||
end
|
||||
|
||||
it "doesn't refetch an already cloned repo" do
|
||||
File.expects(:directory?).with("fetchpath/.git").at_least_once.returns(true)
|
||||
expect_ls_remote('master')
|
||||
expect_checkout(git_master_ref, 'fetchpath')
|
||||
result = fetcher.resolve({git: git_dep_dir})
|
||||
result.fetch("fetchpath")
|
||||
end
|
||||
|
||||
it "returns the repo_path that we fetched to as the archive_path" do
|
||||
File.expects(:directory?).with("fetchpath/.git").at_least_once.returns(true)
|
||||
expect_ls_remote('master')
|
||||
expect_checkout(git_master_ref, 'fetchpath')
|
||||
result = fetcher.resolve({git: git_dep_dir})
|
||||
result.fetch("fetchpath")
|
||||
result.archive_path.must_equal 'fetchpath'
|
||||
end
|
||||
end
|
||||
end
|
|
@ -18,24 +18,6 @@ describe Fetchers::Local do
|
|||
it 'must be resolved' do
|
||||
_(res).must_be_kind_of fetcher
|
||||
end
|
||||
|
||||
it 'must only contain this file' do
|
||||
_(res.files).must_equal [__FILE__]
|
||||
end
|
||||
|
||||
it 'must not read if the file doesnt exist' do
|
||||
_(res.read('file-does-not-exist')).must_be_nil
|
||||
end
|
||||
|
||||
it 'must not read files not covered' do
|
||||
not_covered = File.expand_path('../tar_test.rb', __FILE__)
|
||||
_(File.file?(not_covered)).must_equal true
|
||||
_(res.read(not_covered)).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
_(res.read(__FILE__)).must_equal File.read(__FILE__)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'applied to this folder' do
|
||||
|
@ -45,23 +27,5 @@ describe Fetchers::Local do
|
|||
it 'must be resolved' do
|
||||
_(res).must_be_kind_of fetcher
|
||||
end
|
||||
|
||||
it 'must contain all files' do
|
||||
_(res.files).must_include __FILE__
|
||||
end
|
||||
|
||||
it 'must not read if the file doesnt exist' do
|
||||
_(res.read('file-not-in-folder')).must_be_nil
|
||||
end
|
||||
|
||||
it 'must not read files not covered' do
|
||||
not_covered = File.expand_path('../../../helper.rb', __FILE__)
|
||||
_(File.file?(not_covered)).must_equal true
|
||||
_(res.read(not_covered)).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
_(res.read(__FILE__)).must_equal File.read(__FILE__)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -24,20 +24,5 @@ describe Fetchers::Mock do
|
|||
it 'must be resolved' do
|
||||
fetcher.resolve({}).must_be_kind_of fetcher
|
||||
end
|
||||
|
||||
it 'has no files on empty' do
|
||||
fetcher.resolve({}).files.must_equal []
|
||||
end
|
||||
|
||||
it 'has files' do
|
||||
f = rand.to_s
|
||||
fetcher.resolve({f => nil}).files.must_equal [f]
|
||||
end
|
||||
|
||||
it 'can read a file' do
|
||||
f = rand.to_s
|
||||
s = rand.to_s
|
||||
fetcher.resolve({f => s}).read(f).must_equal s
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'helper'
|
||||
|
||||
describe Fetchers::Tar do
|
||||
let(:fetcher) { Fetchers::Tar }
|
||||
|
||||
it 'registers with the fetchers registry' do
|
||||
reg = Inspec::Fetcher.registry
|
||||
_(reg['tar']).must_equal fetcher
|
||||
end
|
||||
|
||||
describe 'applied to a tar archive' do
|
||||
let(:target) { MockLoader.profile_tgz('complete-profile') }
|
||||
let(:res) { fetcher.resolve(target) }
|
||||
|
||||
it 'must be resolved' do
|
||||
_(res).must_be_kind_of fetcher
|
||||
end
|
||||
|
||||
it 'must contain all files' do
|
||||
_(res.files.sort).must_equal %w{inspec.yml libraries libraries/testlib.rb
|
||||
controls controls/filesystem_spec.rb}.sort
|
||||
end
|
||||
|
||||
it 'must not read if the file isnt included' do
|
||||
_(res.read('file-not-in-archive')).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
_(res.read('inspec.yml')).must_match /^name: complete$/
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,7 +1,6 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'helper'
|
||||
|
||||
describe Fetchers::Url do
|
||||
|
@ -27,17 +26,16 @@ describe Fetchers::Url do
|
|||
it 'handles a http url' do
|
||||
url = 'http://chef.io/some.tar.gz'
|
||||
res = fetcher.resolve(url)
|
||||
_(res).must_be_kind_of Fetchers::Local
|
||||
_(res.parent).must_be_kind_of Fetchers::Url
|
||||
_(res.parent.target).must_equal 'http://chef.io/some.tar.gz'
|
||||
_(res).must_be_kind_of Fetchers::Url
|
||||
_(res.resolved_source).must_equal({url: 'http://chef.io/some.tar.gz'})
|
||||
end
|
||||
|
||||
it 'handles a https url' do
|
||||
url = 'https://chef.io/some.tar.gz'
|
||||
res = fetcher.resolve(url)
|
||||
_(res).must_be_kind_of Fetchers::Local
|
||||
_(res.parent).must_be_kind_of Fetchers::Url
|
||||
_(res.parent.target).must_equal 'https://chef.io/some.tar.gz'
|
||||
_(res).must_be_kind_of Fetchers::Url
|
||||
_(res.resolved_source).must_equal({url: 'https://chef.io/some.tar.gz'})
|
||||
|
||||
end
|
||||
|
||||
it 'doesnt handle other schemas' do
|
||||
|
@ -57,7 +55,7 @@ describe Fetchers::Url do
|
|||
it "resolves a github url #{github}" do
|
||||
res = fetcher.resolve(github)
|
||||
_(res).wont_be_nil
|
||||
_(res.parent.target).must_equal 'https://github.com/chef/inspec/archive/master.tar.gz'
|
||||
_(res.resolved_source).must_equal({url: 'https://github.com/chef/inspec/archive/master.tar.gz'})
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -65,88 +63,40 @@ describe Fetchers::Url do
|
|||
github = 'https://github.com/hardening-io/tests-os-hardening/tree/2.0'
|
||||
res = fetcher.resolve(github)
|
||||
_(res).wont_be_nil
|
||||
_(res.parent.target).must_equal 'https://github.com/hardening-io/tests-os-hardening/archive/2.0.tar.gz'
|
||||
_(res.resolved_source).must_equal({url: 'https://github.com/hardening-io/tests-os-hardening/archive/2.0.tar.gz'})
|
||||
end
|
||||
|
||||
it "resolves a github commit url" do
|
||||
github = 'https://github.com/hardening-io/tests-os-hardening/tree/48bd4388ddffde68badd83aefa654e7af3231876'
|
||||
res = fetcher.resolve(github)
|
||||
_(res).wont_be_nil
|
||||
_(res.parent.target).must_equal 'https://github.com/hardening-io/tests-os-hardening/archive/48bd4388ddffde68badd83aefa654e7af3231876.tar.gz'
|
||||
_(res.resolved_source).must_equal({url: 'https://github.com/hardening-io/tests-os-hardening/archive/48bd4388ddffde68badd83aefa654e7af3231876.tar.gz'})
|
||||
end
|
||||
end
|
||||
|
||||
describe 'applied to a valid url (mocked tar.gz)' do
|
||||
let(:mock_file) { MockLoader.profile_tgz('complete-profile') }
|
||||
let(:target) { 'http://myurl/file.tar.gz' }
|
||||
let(:res) {
|
||||
mock_open = Minitest::Mock.new
|
||||
mock_open.expect :meta, {'content-type' => 'application/gzip'}
|
||||
mock_open.expect :read, File.open(mock_file, 'rb').read
|
||||
fetcher.expects(:open).returns(mock_open)
|
||||
fetcher.resolve(target)
|
||||
let(:subject) { fetcher.resolve(target) }
|
||||
let(:mock_open) {
|
||||
m = Minitest::Mock.new
|
||||
m.expect :meta, {'content-type' => 'application/gzip'}
|
||||
m.expect :read, File.open(mock_file, 'rb').read
|
||||
m
|
||||
}
|
||||
|
||||
it 'must be resolved to the final format' do
|
||||
_(res).must_be_kind_of Fetchers::Tar
|
||||
end
|
||||
|
||||
it 'must be resolved to the final format' do
|
||||
_(res.parent).must_be_kind_of fetcher
|
||||
end
|
||||
|
||||
it 'must contain all files' do
|
||||
_(res.files.sort).must_equal %w{inspec.yml libraries libraries/testlib.rb
|
||||
controls controls/filesystem_spec.rb}.sort
|
||||
end
|
||||
|
||||
it 'must not read if the file isnt included' do
|
||||
_(res.read('file-not-in-archive')).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
_(res.read('inspec.yml')).must_match /^name: complete$/
|
||||
end
|
||||
end
|
||||
|
||||
describe 'applied to a valid url (mocked zip)' do
|
||||
let(:mock_file) { MockLoader.profile_zip('complete-profile') }
|
||||
let(:target) { 'http://myurl/file.tar.gz' }
|
||||
let(:res) {
|
||||
mock_open = Minitest::Mock.new
|
||||
mock_open.expect :meta, {'content-type' => 'application/zip'}
|
||||
mock_open.expect :read, File.open(mock_file, 'rb').read
|
||||
fetcher.expects(:open).returns(mock_open)
|
||||
fetcher.resolve(target)
|
||||
let(:mock_dest) {
|
||||
f = Tempfile.new("url-fetch-test")
|
||||
f.path
|
||||
}
|
||||
|
||||
it 'must be resolved to the final format' do
|
||||
_(res).must_be_kind_of Fetchers::Zip
|
||||
it 'tries to fetch the file' do
|
||||
subject.expects(:open).returns(mock_open)
|
||||
subject.fetch(mock_dest)
|
||||
end
|
||||
|
||||
it 'must contain all files' do
|
||||
_(res.files.sort).must_equal %w{inspec.yml libraries libraries/testlib.rb
|
||||
controls controls/filesystem_spec.rb}.sort
|
||||
end
|
||||
|
||||
it 'must not read if the file isnt included' do
|
||||
_(res.read('file-not-in-archive')).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
_(res.read('inspec.yml')).must_match /^name: complete$/
|
||||
end
|
||||
end
|
||||
|
||||
describe 'applied to a valid url with wrong content-type' do
|
||||
let(:mock_file) { MockLoader.profile_zip('complete-profile') }
|
||||
let(:target) { 'http://myurl/file.tar.gz' }
|
||||
|
||||
it 'must be resolved to the final format' do
|
||||
mock_open = Minitest::Mock.new
|
||||
mock_open.expect :meta, {'content-type' => 'wrong'}
|
||||
fetcher.expects(:open).returns(mock_open)
|
||||
proc { fetcher.resolve(target) }.must_throw RuntimeError
|
||||
it "returns the resolved_source hash" do
|
||||
subject.resolved_source.must_equal({ url: target })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'helper'
|
||||
|
||||
describe Fetchers::Zip do
|
||||
let(:fetcher) { Fetchers::Zip }
|
||||
|
||||
it 'registers with the fetchers registry' do
|
||||
reg = Inspec::Fetcher.registry
|
||||
_(reg['zip']).must_equal fetcher
|
||||
end
|
||||
|
||||
describe 'applied to a zipped archive' do
|
||||
let(:target) { MockLoader.profile_zip('complete-profile') }
|
||||
let(:res) { fetcher.resolve(target) }
|
||||
|
||||
it 'must be resolved' do
|
||||
_(res).must_be_kind_of fetcher
|
||||
end
|
||||
|
||||
it 'must contain all files' do
|
||||
_(res.files.sort).must_equal %w{inspec.yml libraries libraries/testlib.rb
|
||||
controls controls/filesystem_spec.rb}.sort
|
||||
end
|
||||
|
||||
it 'must not read if the file isnt included' do
|
||||
_(res.read('file-not-in-archive')).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
_(res.read('inspec.yml')).must_match /^name: complete$/
|
||||
end
|
||||
end
|
||||
end
|
205
test/unit/file_provider_test.rb
Normal file
205
test/unit/file_provider_test.rb
Normal file
|
@ -0,0 +1,205 @@
|
|||
# encoding: utf-8
|
||||
# author: Dominik Richter
|
||||
# author: Christoph Hartmann
|
||||
|
||||
require 'helper'
|
||||
|
||||
describe Inspec::MockProvider do
|
||||
let(:subject) { Inspec::MockProvider.new(target) }
|
||||
|
||||
describe 'without data' do
|
||||
let(:target) {{ mock: {}}}
|
||||
it 'has no files on empty' do
|
||||
subject.files.must_equal []
|
||||
end
|
||||
end
|
||||
|
||||
describe 'with_data' do
|
||||
let(:file_name) { rand.to_s }
|
||||
let(:file_content) { rand.to_s }
|
||||
let(:target) {{ mock: { file_name => file_content } }}
|
||||
|
||||
it 'has files' do
|
||||
subject.files.must_equal [file_name]
|
||||
end
|
||||
|
||||
it 'can read a file' do
|
||||
subject.read(file_name).must_equal file_content
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Inspec::DirProvider do
|
||||
let(:subject) { Inspec::DirProvider.new(target) }
|
||||
|
||||
describe 'applied to this file' do
|
||||
let(:target) { __FILE__ }
|
||||
|
||||
it 'must only contain this file' do
|
||||
subject.files.must_equal [__FILE__]
|
||||
end
|
||||
|
||||
it 'must not read if the file doesnt exist' do
|
||||
subject.read('file-does-not-exist').must_be_nil
|
||||
end
|
||||
|
||||
it 'must not read files not covered' do
|
||||
not_covered = File.expand_path('../../helper.rb', __FILE__)
|
||||
puts "#{not_covered}"
|
||||
File.file?(not_covered).must_equal true
|
||||
subject.read(not_covered).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
subject.read(__FILE__).must_equal File.read(__FILE__)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'applied to this folder' do
|
||||
let(:target) { File.dirname(__FILE__) }
|
||||
|
||||
it 'must contain all files' do
|
||||
subject.files.must_include __FILE__
|
||||
end
|
||||
|
||||
it 'must not read if the file doesnt exist' do
|
||||
subject.read('file-not-in-folder').must_be_nil
|
||||
end
|
||||
|
||||
it 'must not read files not covered' do
|
||||
not_covered = File.expand_path('../../helper.rb', __FILE__)
|
||||
File.file?(not_covered).must_equal true
|
||||
subject.read(not_covered).must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
subject.read(__FILE__).must_equal File.read(__FILE__)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Inspec::ZipProvider do
|
||||
let(:subject) { Inspec::ZipProvider.new(target) }
|
||||
|
||||
describe 'applied to a tar archive' do
|
||||
let(:target) { MockLoader.profile_zip('complete-profile') }
|
||||
|
||||
it 'must contain all files' do
|
||||
subject.files.sort.must_equal %w{inspec.yml libraries libraries/testlib.rb
|
||||
controls controls/filesystem_spec.rb}.sort
|
||||
end
|
||||
|
||||
it 'must not read if the file isnt included' do
|
||||
subject.read('file-not-in-archive').must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
subject.read('inspec.yml').must_match(/^name: complete$/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
describe Inspec::ZipProvider do
|
||||
let(:subject) { Inspec::ZipProvider.new(target) }
|
||||
|
||||
describe 'applied to a tar archive' do
|
||||
let(:target) { MockLoader.profile_zip('complete-profile') }
|
||||
|
||||
it 'must contain all files' do
|
||||
subject.files.sort.must_equal %w{inspec.yml libraries libraries/testlib.rb
|
||||
controls controls/filesystem_spec.rb}.sort
|
||||
end
|
||||
|
||||
it 'must not read if the file isnt included' do
|
||||
subject.read('file-not-in-archive').must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
subject.read('inspec.yml').must_match(/^name: complete$/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Inspec::TarProvider do
|
||||
let(:subject) { Inspec::TarProvider.new(target) }
|
||||
|
||||
describe 'applied to a tar archive' do
|
||||
let(:target) { MockLoader.profile_tgz('complete-profile') }
|
||||
|
||||
it 'must contain all files' do
|
||||
subject.files.sort.must_equal %w{inspec.yml libraries libraries/testlib.rb
|
||||
controls controls/filesystem_spec.rb}.sort
|
||||
end
|
||||
|
||||
it 'must not read if the file isnt included' do
|
||||
subject.read('file-not-in-archive').must_be_nil
|
||||
end
|
||||
|
||||
it 'must read the contents of the file' do
|
||||
subject.read('inspec.yml').must_match(/^name: complete$/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe Inspec::RelativeFileProvider do
|
||||
def fetcher
|
||||
src_fetcher.expects(:files).returns(in_files).at_least_once
|
||||
Inspec::RelativeFileProvider.new(src_fetcher)
|
||||
end
|
||||
|
||||
let(:src_fetcher) { mock() }
|
||||
|
||||
IN_AND_OUT = {
|
||||
[] => [],
|
||||
%w{file} => %w{file},
|
||||
# don't prefix just by filename
|
||||
%w{file file_a} => %w{file file_a},
|
||||
%w{path/file path/file_a} => %w{file file_a},
|
||||
%w{path/to/file} => %w{file},
|
||||
%w{/path/to/file} => %w{file},
|
||||
%w{alice bob} => %w{alice bob},
|
||||
# mixed paths
|
||||
%w{x/a y/b} => %w{x/a y/b},
|
||||
%w{/x/a /y/b} => %w{x/a y/b},
|
||||
%w{z/x/a z/y/b} => %w{x/a y/b},
|
||||
%w{/z/x/a /z/y/b} => %w{x/a y/b},
|
||||
# mixed with relative path
|
||||
%w{a path/to/b} => %w{a path/to/b},
|
||||
%w{path/to/b a} => %w{path/to/b a},
|
||||
%w{path/to/b path/a} => %w{to/b a},
|
||||
%w{path/to/b path/a c} => %w{path/to/b path/a c},
|
||||
# When the first element is the directory
|
||||
%w{path/ path/to/b path/a} => %w{to/b a},
|
||||
%w{path path/to/b path/a} => %w{to/b a},
|
||||
# mixed with absolute paths
|
||||
%w{/path/to/b /a} => %w{path/to/b a},
|
||||
%w{/path/to/b /path/a} => %w{to/b a},
|
||||
%w{/path/to/b /path/a /c} => %w{path/to/b path/a c},
|
||||
# mixing absolute and relative paths
|
||||
%w{path/a /path/b} => %w{path/a /path/b},
|
||||
%w{/path/a path/b} => %w{/path/a path/b},
|
||||
# extract folder structure buildup
|
||||
%w{/a /a/b /a/b/c} => %w{c},
|
||||
%w{/a /a/b /a/b/c/d/e} => %w{e},
|
||||
# extract folder structure buildup (relative)
|
||||
%w{a a/b a/b/c} => %w{c},
|
||||
%w{a a/b a/b/c/d/e} => %w{e},
|
||||
# extract folder structure buildup (relative)
|
||||
%w{a/ a/b/ a/b/c} => %w{c},
|
||||
%w{a/ a/b/ a/b/c/d/e} => %w{e},
|
||||
# ignore pax_global_header, which are commonly seen in github tars and are not
|
||||
# ignored by all tar streaming tools, its not extracted by GNU tar since 1.14
|
||||
%w{/pax_global_header /a/b} => %w{b},
|
||||
%w{pax_global_header a/b} => %w{b},
|
||||
}.each do |ins, outs|
|
||||
describe 'empty profile' do
|
||||
let(:in_files) { ins }
|
||||
|
||||
it "turns #{ins} into #{outs}" do
|
||||
fetcher.files.must_equal outs
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -20,7 +20,7 @@ describe Inspec::Plugins::Resource do
|
|||
random_name = (0...50).map { (65 + rand(26)).chr }.join
|
||||
Class.new(base) do
|
||||
name random_name
|
||||
instance_eval &block
|
||||
instance_eval(&block)
|
||||
end
|
||||
Inspec::Resource.registry[random_name]
|
||||
end
|
||||
|
|
|
@ -91,7 +91,7 @@ describe Inspec::ProfileContext do
|
|||
it 'supports empty describe calls' do
|
||||
load('describe').must_output ''
|
||||
profile.rules.keys.length.must_equal 1
|
||||
profile.rules.keys[0].must_match /^\(generated from \(eval\):1 [0-9a-f]+\)$/
|
||||
profile.rules.keys[0].must_match(/^\(generated from \(eval\):1 [0-9a-f]+\)$/)
|
||||
profile.rules.values[0].must_be_kind_of Inspec::Rule
|
||||
end
|
||||
|
||||
|
@ -99,7 +99,7 @@ describe Inspec::ProfileContext do
|
|||
load('describe true do; it { should_eq true }; end')
|
||||
.must_output ''
|
||||
profile.rules.keys.length.must_equal 1
|
||||
profile.rules.keys[0].must_match /^\(generated from \(eval\):1 [0-9a-f]+\)$/
|
||||
profile.rules.keys[0].must_match(/^\(generated from \(eval\):1 [0-9a-f]+\)$/)
|
||||
profile.rules.values[0].must_be_kind_of Inspec::Rule
|
||||
end
|
||||
|
||||
|
@ -108,7 +108,7 @@ describe Inspec::ProfileContext do
|
|||
.must_output ''
|
||||
profile.rules.keys.length.must_equal 3
|
||||
[0, 1, 2].each do |i|
|
||||
profile.rules.keys[i].must_match /^\(generated from \(eval\):2 [0-9a-f]+\)$/
|
||||
profile.rules.keys[i].must_match(/^\(generated from \(eval\):2 [0-9a-f]+\)$/)
|
||||
profile.rules.values[i].must_be_kind_of Inspec::Rule
|
||||
end
|
||||
end
|
||||
|
|
|
@ -96,7 +96,6 @@ describe Inspec::Profile do
|
|||
let(:profile_id) { 'legacy-empty-metadata' }
|
||||
|
||||
it 'prints loads of warnings' do
|
||||
metadata_rb = "#{home}/mock/profiles/#{profile_id}/metadata.rb"
|
||||
logger.expect :info, nil, ["Checking profile in #{home}/mock/profiles/#{profile_id}"]
|
||||
logger.expect :error, nil, ["Missing profile name in metadata.rb"]
|
||||
logger.expect :warn, nil, ['The use of `metadata.rb` is deprecated. Use `inspec.yml`.']
|
||||
|
|
|
@ -13,8 +13,8 @@ describe SourceReaders::Flat do
|
|||
end
|
||||
|
||||
describe 'with a flat file' do
|
||||
let(:target) { Inspec::Fetcher.resolve(__FILE__) }
|
||||
let(:res) { Inspec::SourceReader.resolve(target.relative_target) }
|
||||
let(:target) { Inspec::FileProvider.for_path(__FILE__) }
|
||||
let(:res) { Inspec::SourceReader.resolve(target.relative_provider) }
|
||||
|
||||
it 'resolves the target' do
|
||||
_(res).must_be_kind_of reader
|
||||
|
@ -35,8 +35,8 @@ describe SourceReaders::Flat do
|
|||
end
|
||||
|
||||
describe 'with a flat folder' do
|
||||
let(:target) { Inspec::Fetcher.resolve(File.dirname(__FILE__)) }
|
||||
let(:res) { Inspec::SourceReader.resolve(target.relative_target) }
|
||||
let(:target) { Inspec::FileProvider.for_path(File.dirname(__FILE__)) }
|
||||
let(:res) { Inspec::SourceReader.resolve(target.relative_provider) }
|
||||
|
||||
it 'resolves the target' do
|
||||
_(res).must_be_kind_of reader
|
||||
|
|
|
@ -13,8 +13,8 @@ describe SourceReaders::InspecReader do
|
|||
end
|
||||
|
||||
describe 'with a valid profile' do
|
||||
let(:mock_file) { mock_file = MockLoader.profile_tgz('complete-profile') }
|
||||
let(:target) { Inspec::Fetcher.resolve(mock_file) }
|
||||
let(:mock_file) { MockLoader.profile_tgz('complete-profile') }
|
||||
let(:target) { Inspec::FileProvider.for_path(mock_file) }
|
||||
let(:res) { Inspec::SourceReader.resolve(target) }
|
||||
|
||||
it 'resolves the target to inspec' do
|
||||
|
@ -27,12 +27,12 @@ describe SourceReaders::InspecReader do
|
|||
|
||||
it 'retrieves all files' do
|
||||
_(res.tests.keys).must_equal %w{controls/filesystem_spec.rb}
|
||||
_(res.tests.values[0]).must_match /^control 'test01' do$/
|
||||
_(res.tests.values[0]).must_match(/^control 'test01' do$/)
|
||||
end
|
||||
|
||||
it 'retrieves all libraries' do
|
||||
_(res.libraries.keys).must_equal %w{libraries/testlib.rb}
|
||||
_(res.libraries.values[0]).must_match /^# Library resource$/
|
||||
_(res.libraries.values[0]).must_match(/^# Library resource$/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue