mirror of
https://github.com/inspec/inspec
synced 2024-11-10 07:04:15 +00:00
* CLEAN-UP: Remove unused rake tasks missing, kitchen and version tasks are not used * CLEAN-UP: Remove functional rake test test:default covers both unit and functional tests default was introduced to cover both functional and unit in PR #4102 * CLEAN-UP: Remove unused docs related tasks * CLEAN-UP: Remove tasks/* from expeditor config * REVERT: Undo deletion of functional test * RECOVER: Retain docs:cli task * UNDO: Revert changes to config * CHORE: Add reference to this PR in note for future use --------- Signed-off-by: Sonu Saha <sonu.saha@progress.com> Co-authored-by: Sonu Saha <98935583+ahasunos@users.noreply.github.com>
This commit is contained in:
parent
4cdccae096
commit
eda7c8f5f8
5 changed files with 6 additions and 407 deletions
101
Rakefile
101
Rakefile
|
@ -4,17 +4,11 @@ require "bundler"
|
||||||
require "bundler/gem_helper"
|
require "bundler/gem_helper"
|
||||||
require "rake/testtask"
|
require "rake/testtask"
|
||||||
require "train"
|
require "train"
|
||||||
require_relative "tasks/spdx"
|
|
||||||
require "fileutils"
|
require "fileutils"
|
||||||
|
|
||||||
Bundler::GemHelper.install_tasks name: "inspec-core"
|
Bundler::GemHelper.install_tasks name: "inspec-core"
|
||||||
Bundler::GemHelper.install_tasks name: "inspec"
|
Bundler::GemHelper.install_tasks name: "inspec"
|
||||||
|
|
||||||
def prompt(message)
|
|
||||||
print(message)
|
|
||||||
STDIN.gets.chomp
|
|
||||||
end
|
|
||||||
|
|
||||||
# The docs tasks rely on ruby-progressbar. If we can't load it, then don't
|
# The docs tasks rely on ruby-progressbar. If we can't load it, then don't
|
||||||
# load the docs tasks. This is necessary to allow this Rakefile to work
|
# load the docs tasks. This is necessary to allow this Rakefile to work
|
||||||
# when the "tests" gem group in the Gemfile has been excluded, such as
|
# when the "tests" gem group in the Gemfile has been excluded, such as
|
||||||
|
@ -26,13 +20,6 @@ rescue LoadError
|
||||||
puts "docs tasks are unavailable because the ruby-progressbar gem is not available."
|
puts "docs tasks are unavailable because the ruby-progressbar gem is not available."
|
||||||
end
|
end
|
||||||
|
|
||||||
begin
|
|
||||||
require "git"
|
|
||||||
require_relative "tasks/contrib"
|
|
||||||
rescue LoadError
|
|
||||||
puts "contrib tasks are unavailable because the git gem is not available."
|
|
||||||
end
|
|
||||||
|
|
||||||
task :install do
|
task :install do
|
||||||
inspec_bin_path = ::File.join(::File.dirname(__FILE__), "inspec-bin")
|
inspec_bin_path = ::File.join(::File.dirname(__FILE__), "inspec-bin")
|
||||||
Dir.chdir(inspec_bin_path)
|
Dir.chdir(inspec_bin_path)
|
||||||
|
@ -74,18 +61,6 @@ namespace :test do
|
||||||
puts Dir[*GLOBS].sort
|
puts Dir[*GLOBS].sort
|
||||||
end
|
end
|
||||||
|
|
||||||
task :missing do
|
|
||||||
missing = Dir["test/**/*"] - Dir[*GLOBS]
|
|
||||||
|
|
||||||
missing.reject! { |f| ! File.file? f }
|
|
||||||
missing.reject! { |f| f =~ %r{test/(integration|cookbooks)} }
|
|
||||||
missing.reject! { |f| f =~ %r{test/fixtures} }
|
|
||||||
missing.reject! { |f| f =~ /test.*helper/ }
|
|
||||||
missing.reject! { |f| f =~ %r{test/docker} }
|
|
||||||
|
|
||||||
puts missing.sort
|
|
||||||
end
|
|
||||||
|
|
||||||
# rubocop:disable Style/BlockDelimiters,Layout/ExtraSpacing,Lint/AssignmentInCondition
|
# rubocop:disable Style/BlockDelimiters,Layout/ExtraSpacing,Lint/AssignmentInCondition
|
||||||
|
|
||||||
def n_threads_run(n_workers, jobs)
|
def n_threads_run(n_workers, jobs)
|
||||||
|
@ -255,80 +230,6 @@ namespace :test do
|
||||||
end
|
end
|
||||||
# Inject a prerequisite task
|
# Inject a prerequisite task
|
||||||
task unit: [:accept_license]
|
task unit: [:accept_license]
|
||||||
|
|
||||||
task :kitchen, [:os] do |task, args|
|
|
||||||
concurrency = ENV["CONCURRENCY"] || 1
|
|
||||||
os = args[:os] || ENV["OS"] || ""
|
|
||||||
ENV["DOCKER"] = "true" if ENV["docker"].nil?
|
|
||||||
sh("bundle exec kitchen test -c #{concurrency} #{os}")
|
|
||||||
end
|
|
||||||
# Inject a prerequisite task
|
|
||||||
task kitchen: [:accept_license]
|
|
||||||
|
|
||||||
task :ssh, [:target] do |_t, args|
|
|
||||||
tests_path = File.join(File.dirname(__FILE__), "test", "integration", "test", "integration", "default")
|
|
||||||
key_files = ENV["key_files"] || File.join(ENV["HOME"], ".ssh", "id_rsa")
|
|
||||||
|
|
||||||
sh_cmd = "bin/inspec exec #{tests_path}/"
|
|
||||||
sh_cmd += ENV["test"] ? "#{ENV["test"]}_spec.rb" : "*"
|
|
||||||
sh_cmd += " --sudo" unless args[:target].split("@")[0] == "root"
|
|
||||||
sh_cmd += " -t ssh://#{args[:target]}"
|
|
||||||
sh_cmd += " --key_files=#{key_files}"
|
|
||||||
sh_cmd += " --format=#{ENV["format"]}" if ENV["format"]
|
|
||||||
|
|
||||||
sh("sh", "-c", sh_cmd)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Print the current version of this gem or update it.
|
# NOTE: Rakefile clean-up was done in PR #6367 (https://github.com/inspec/inspec/pull/6367)
|
||||||
#
|
|
||||||
# @param [Type] target the new version you want to set, or nil if you only want to show
|
|
||||||
def inspec_version(target = nil)
|
|
||||||
path = "lib/inspec/version.rb"
|
|
||||||
require_relative path.sub(/.rb$/, "")
|
|
||||||
|
|
||||||
nu_version = target.nil? ? "" : " -> #{target}"
|
|
||||||
puts "Inspec: #{Inspec::VERSION}#{nu_version}"
|
|
||||||
|
|
||||||
unless target.nil?
|
|
||||||
raw = File.read(path)
|
|
||||||
nu = raw.sub(/VERSION.*/, "VERSION = '#{target}'.freeze")
|
|
||||||
File.write(path, nu)
|
|
||||||
load(path)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Check if a command is available
|
|
||||||
#
|
|
||||||
# @param [Type] x the command you are interested in
|
|
||||||
# @param [Type] msg the message to display if the command is missing
|
|
||||||
def require_command(x, msg = nil)
|
|
||||||
return if system("command -v #{x} || exit 1")
|
|
||||||
|
|
||||||
msg ||= "Please install it first!"
|
|
||||||
puts "\033[31;1mCan't find command #{x.inspect}. #{msg}\033[0m"
|
|
||||||
exit 1
|
|
||||||
end
|
|
||||||
|
|
||||||
# Check if a required environment variable has been set
|
|
||||||
#
|
|
||||||
# @param [String] x the variable you are interested in
|
|
||||||
# @param [String] msg the message you want to display if the variable is missing
|
|
||||||
def require_env(x, msg = nil)
|
|
||||||
exists = `env | grep "^#{x}="`
|
|
||||||
return unless exists.empty?
|
|
||||||
|
|
||||||
puts "\033[31;1mCan't find environment variable #{x.inspect}. #{msg}\033[0m"
|
|
||||||
exit 1
|
|
||||||
end
|
|
||||||
|
|
||||||
# Check the requirements for running an update of this repository.
|
|
||||||
def check_update_requirements
|
|
||||||
require_command "git"
|
|
||||||
end
|
|
||||||
|
|
||||||
# Show the current version of this gem.
|
|
||||||
desc "Show the version of this gem"
|
|
||||||
task :version do
|
|
||||||
inspec_version
|
|
||||||
end
|
|
||||||
|
|
|
@ -1,70 +0,0 @@
|
||||||
# Rake tasks to assist in coordinating operations with separately
|
|
||||||
# maintained projects.
|
|
||||||
|
|
||||||
require "fileutils"
|
|
||||||
require "yaml"
|
|
||||||
require "git"
|
|
||||||
|
|
||||||
CONTRIB_DIR = File.expand_path(File.join(__dir__, "..", "contrib")).freeze
|
|
||||||
RESOURCE_DOC_DIR = File.expand_path(File.join(__dir__, "..", "docs", "resources")).freeze
|
|
||||||
|
|
||||||
namespace :contrib do # rubocop: disable Metrics/BlockLength
|
|
||||||
config = nil
|
|
||||||
|
|
||||||
task :read_config do
|
|
||||||
config = YAML.load(File.read(File.join(CONTRIB_DIR, "contrib.yaml")))
|
|
||||||
end
|
|
||||||
|
|
||||||
task fetch_resource_packs: [:read_config] do
|
|
||||||
puts "Fetching contrib resource packs..."
|
|
||||||
config["resource_packs"].each do |name, info|
|
|
||||||
clone_path = File.join(CONTRIB_DIR, name)
|
|
||||||
git = nil
|
|
||||||
verb = nil
|
|
||||||
if File.exist?(clone_path)
|
|
||||||
git = Git.open(clone_path)
|
|
||||||
git.fetch
|
|
||||||
verb = "fetched"
|
|
||||||
else
|
|
||||||
git = Git.clone(info["git_repo"], name, path: CONTRIB_DIR)
|
|
||||||
verb = "cloned"
|
|
||||||
end
|
|
||||||
|
|
||||||
sha = git.log[0].sha[0..6]
|
|
||||||
branch = git.current_branch
|
|
||||||
puts " #{name}: #{verb}, now at #{sha}" + (branch ? " (#{branch})" : "")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
desc "Copy docs from resource packs into the core for doc building"
|
|
||||||
task copy_docs: [:fetch_resource_packs] do
|
|
||||||
config["resource_packs"].each do |name, info|
|
|
||||||
doc_sub_dir = info["doc_sub_dir"] || "docs/resources"
|
|
||||||
doc_src_path = File.join(CONTRIB_DIR, name, doc_sub_dir)
|
|
||||||
dest_path = RESOURCE_DOC_DIR
|
|
||||||
Dir.chdir(doc_src_path) do
|
|
||||||
Dir["*.md*"].sort.each do |file|
|
|
||||||
# TODO: check file for Availability section in markdown?
|
|
||||||
FileUtils.cp(file, dest_path)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
desc "Cleanup docs from resource packs in core"
|
|
||||||
task cleanup_docs: [:read_config] do
|
|
||||||
# TODO: I don't see the point of this cleanup phase
|
|
||||||
config["resource_packs"].each do |name, info|
|
|
||||||
doc_sub_dir = info["doc_sub_dir"] || "docs/resources"
|
|
||||||
doc_src_path = File.join(CONTRIB_DIR, name, doc_sub_dir)
|
|
||||||
dest_path = RESOURCE_DOC_DIR
|
|
||||||
Dir.chdir(doc_src_path) do
|
|
||||||
Dir["*.md*"].sort.each do |file|
|
|
||||||
cruft = File.join(dest_path, file)
|
|
||||||
FileUtils.rm_f(cruft)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
# rubocop enable: Metrics/BlockLength
|
|
173
tasks/docs.rb
173
tasks/docs.rb
|
@ -17,9 +17,7 @@
|
||||||
require "erb"
|
require "erb"
|
||||||
require "fileutils"
|
require "fileutils"
|
||||||
require "yaml"
|
require "yaml"
|
||||||
require_relative "./shared"
|
|
||||||
require "git"
|
require "git"
|
||||||
require_relative "./contrib"
|
|
||||||
|
|
||||||
DOCS_DIR = "../docs".freeze
|
DOCS_DIR = "../docs".freeze
|
||||||
|
|
||||||
|
@ -121,112 +119,6 @@ class RST
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
class ResourceDocs
|
|
||||||
def initialize(root)
|
|
||||||
@paths = {} # cache of paths
|
|
||||||
@root = root # relative root path for all docs
|
|
||||||
end
|
|
||||||
|
|
||||||
def render(path)
|
|
||||||
@paths[path] ||= render_path(path)
|
|
||||||
end
|
|
||||||
|
|
||||||
def partial(x)
|
|
||||||
render(x + ".md.erb")
|
|
||||||
end
|
|
||||||
|
|
||||||
def overview_page(resource_doc_files) # rubocop:disable Metrics/AbcSize, Metrics/MethodLength
|
|
||||||
renderer = Markdown
|
|
||||||
markdown = renderer.meta(title: "InSpec Resources Reference")
|
|
||||||
markdown << renderer.h1("InSpec Resources Reference")
|
|
||||||
markdown << renderer.p("The following list of InSpec resources are available.")
|
|
||||||
|
|
||||||
contrib_config = YAML.load(File.read(File.join(CONTRIB_DIR, "contrib.yaml")))
|
|
||||||
|
|
||||||
# TODO: clean this up using Hash.new and friends
|
|
||||||
|
|
||||||
# Build a list of resources keyed on the group they are a part of.
|
|
||||||
# We'll determine the group using regexes.
|
|
||||||
group_regexes = [
|
|
||||||
# These are hardcoded present in the main repo. If they become resource
|
|
||||||
# packs, this should change.
|
|
||||||
{ group_name: "AWS", regex: /^aws_/ },
|
|
||||||
{ group_name: "Azure", regex: /^azure(rm)?_/ },
|
|
||||||
]
|
|
||||||
# Also pick up regexes and group names from contrib resource packs.
|
|
||||||
contrib_config["resource_packs"].values.each do |project_info|
|
|
||||||
group_regexes << { group_name: project_info["doc_group_title"], regex: Regexp.new(project_info["resource_file_regex"]) }
|
|
||||||
end
|
|
||||||
|
|
||||||
# OK, apply the regexes we have to the resource doc file list we were passed.
|
|
||||||
# doc_file looks like /resources/foo.md.erb - trim off directory and file extension
|
|
||||||
trimmed_doc_files = resource_doc_files.dup.map { |file| File.basename(file).sub(/\.md(\.erb)?$/, "") }
|
|
||||||
resources_by_group = Hash[group_regexes.map { |info| [info[:group_name], []] }] # Initialize each group to an empty array
|
|
||||||
resources_by_group["OS"] = []
|
|
||||||
trimmed_doc_files.each do |doc_file|
|
|
||||||
matched = false
|
|
||||||
group_regexes.each do |group_info|
|
|
||||||
next if matched
|
|
||||||
|
|
||||||
if doc_file =~ group_info[:regex]
|
|
||||||
resources_by_group[group_info[:group_name]] << doc_file
|
|
||||||
matched = true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
# Any resources that don't match a regex are assumed to be 'os' resources.
|
|
||||||
resources_by_group["OS"] << doc_file unless matched
|
|
||||||
end
|
|
||||||
|
|
||||||
# Now transform the resource lists into HTML
|
|
||||||
markdown_resource_links_by_group = {}
|
|
||||||
resources_by_group.each do |group_name, resource_list|
|
|
||||||
markdown_resource_links_by_group[group_name] = resource_list.map do |resource_name|
|
|
||||||
renderer.li(renderer.a(resource_name.gsub("_", '\\_'), "resources/" + resource_name + ".html"))
|
|
||||||
end.join("")
|
|
||||||
end
|
|
||||||
|
|
||||||
# Remove any groups that have no resource docs.
|
|
||||||
resources_by_group.reject! { |_, resource_list| resource_list.empty? }
|
|
||||||
|
|
||||||
# Generate the big buttons that jump to the section of the page for each group.
|
|
||||||
markdown << '<div class="row columns align">'
|
|
||||||
# "Sorted, except OS is always in first place"
|
|
||||||
ordered_group_names = ["OS"] + resources_by_group.keys.sort.reject { |group_name| group_name == "OS" }
|
|
||||||
button_template = '<a class="resources-button button btn-lg btn-purple-o shadow margin-right-xs" href="%s">%s</a>'
|
|
||||||
ordered_group_names.each do |group_name|
|
|
||||||
markdown << format(button_template, "#" + (group_name + "-resources").downcase, group_name)
|
|
||||||
markdown << "\n"
|
|
||||||
end
|
|
||||||
markdown << "</div>"
|
|
||||||
|
|
||||||
# Generate the actual long lists of links
|
|
||||||
group_section_header_template = '
|
|
||||||
<div class="brdr-left margin-top-sm margin-under-xs">
|
|
||||||
<h3 class="margin-left-xs"><a id="%s" class="a-purple"><h3 class="a-purple">%s</h3></a></h3>
|
|
||||||
</div>
|
|
||||||
'
|
|
||||||
ordered_group_names.each do |group_name|
|
|
||||||
markdown << format(group_section_header_template, (group_name + "-resources").downcase, group_name)
|
|
||||||
markdown << renderer.ul(markdown_resource_links_by_group[group_name])
|
|
||||||
end
|
|
||||||
|
|
||||||
markdown
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def namify(n)
|
|
||||||
n.capitalize.gsub(/\baws\b/i, "AWS")
|
|
||||||
end
|
|
||||||
|
|
||||||
def render_path(path)
|
|
||||||
abs = File.join(@root, path)
|
|
||||||
raise "Can't find file to render in #{abs}" unless File.file?(abs)
|
|
||||||
|
|
||||||
ERB.new(File.read(abs)).result(binding)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
namespace :docs do # rubocop:disable Metrics/BlockLength
|
namespace :docs do # rubocop:disable Metrics/BlockLength
|
||||||
desc "Create cli docs"
|
desc "Create cli docs"
|
||||||
task :cli do
|
task :cli do
|
||||||
|
@ -278,69 +170,14 @@ namespace :docs do # rubocop:disable Metrics/BlockLength
|
||||||
res << "\n\n" if f == RST
|
res << "\n\n" if f == RST
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# TODO: The directory is broken, so we need to fix it
|
||||||
|
# Use the docs-chef-io directory to fix the cli doc build
|
||||||
|
# doc_directory = File.join(pwd, "docs-chef-io/content/inspec")
|
||||||
|
# dst = File.join(doc_directory , "cli#{f.suffix}")
|
||||||
dst = File.join(DOCS_DIR, "cli#{f.suffix}")
|
dst = File.join(DOCS_DIR, "cli#{f.suffix}")
|
||||||
File.write(dst, res)
|
File.write(dst, res)
|
||||||
puts "Documentation generated in #{dst.inspect}"
|
puts "Documentation generated in #{dst.inspect}"
|
||||||
end
|
end
|
||||||
|
|
||||||
desc "Create resources docs"
|
|
||||||
# This task injects the contrib:cleanup_docs as a followup
|
|
||||||
# to the actual doc building.
|
|
||||||
task resources: %i{resources_actual contrib:cleanup_docs}
|
|
||||||
|
|
||||||
task resources_actual: %i{clean contrib:copy_docs} do
|
|
||||||
src = DOCS_DIR
|
|
||||||
dst = File.join("source", "docs", "reference", "resources")
|
|
||||||
mkdir_p(dst)
|
|
||||||
|
|
||||||
docs = ResourceDocs.new(src)
|
|
||||||
resources =
|
|
||||||
Dir.chdir(src) { Dir["resources/*.md{.erb,}"] }.sort
|
|
||||||
puts "Found #{resources.length} resource docs"
|
|
||||||
puts "Rendering docs to #{dst}/"
|
|
||||||
|
|
||||||
# Render all resources
|
|
||||||
seen = {}
|
|
||||||
resources.reverse_each do |file| # bias towards .erb files?
|
|
||||||
dst_name = File.basename(file).sub(/\.md(\.erb)?$/, ".html.md")
|
|
||||||
|
|
||||||
next if seen[dst_name]
|
|
||||||
|
|
||||||
seen[dst_name] = true
|
|
||||||
res = docs.render(file)
|
|
||||||
File.write(File.join(dst, dst_name), res)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Create a resource summary markdown doc
|
|
||||||
dst = File.join(src, "resources.md")
|
|
||||||
puts "Create #{dst}"
|
|
||||||
File.write(dst, docs.overview_page(resources))
|
|
||||||
end
|
|
||||||
|
|
||||||
desc "Clean all rendered docs from www/"
|
|
||||||
task :clean do
|
|
||||||
dst = File.join("source", "docs", "reference")
|
|
||||||
rm_rf(dst)
|
|
||||||
mkdir_p(dst)
|
|
||||||
end
|
|
||||||
|
|
||||||
desc "Copy fixed doc files"
|
|
||||||
task copy: %i{clean resources} do
|
|
||||||
src = DOCS_DIR
|
|
||||||
dst = File.join("source", "docs", "reference")
|
|
||||||
files = Dir[File.join(src, "*.md")]
|
|
||||||
|
|
||||||
files.each do |path|
|
|
||||||
name = File.basename(path).sub(/\.md$/, ".html.md")
|
|
||||||
cp(path, File.join(dst, name))
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
desc "Create all docs in docs/ from source code"
|
# NOTE: Many of the docs tasks were removed in PR #6367 (https://github.com/inspec/inspec/pull/6367)
|
||||||
task docs: %w{docs:cli docs:copy docs:resources} do
|
|
||||||
# TODO: remove:
|
|
||||||
Verify.file(File.join("source", "docs", "reference", "README.html.md"))
|
|
||||||
Verify.file(File.join("source", "docs", "reference", "cli.html.md"))
|
|
||||||
Verify.file(File.join("source", "docs", "reference", "resources.html.md"))
|
|
||||||
end
|
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
# Copyright:: Copyright (c) 2015 Chef Software, Inc.
|
|
||||||
# License:: Apache License, Version 2.0
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
|
|
||||||
module Log
|
|
||||||
def self.section(msg)
|
|
||||||
puts "\n\033[36;1m====> #{msg}\033[0m"
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.info(msg)
|
|
||||||
puts "\033[32;1m----> #{msg}\033[0m"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
module Verify
|
|
||||||
def self.file(path)
|
|
||||||
return print("\033[32m.\033[0m") if File.file?(path)
|
|
||||||
|
|
||||||
raise "Failed to build this step. Looking for file in #{path} but it doesn't exist."
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.ok
|
|
||||||
puts "\n\033[32mAll build checks passed.\033[0m"
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,32 +0,0 @@
|
||||||
# Copyright:: Copyright (c) 2017 Chef Software, Inc.
|
|
||||||
# License:: Apache License, Version 2.0
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
|
|
||||||
UTILS_DIR = File.expand_path(File.join(__dir__, "..", "lib/utils")).freeze
|
|
||||||
|
|
||||||
desc "Updates the list of the spdx valid licenses"
|
|
||||||
task :spdx do
|
|
||||||
# Kudos to Foodcritic for providing that idea
|
|
||||||
# @see https://github.com/Foodcritic/foodcritic/pull/530/files
|
|
||||||
# list of valid SPDX.org license strings. To build an array run this:
|
|
||||||
require "json"
|
|
||||||
require "net/http"
|
|
||||||
json_data = JSON.parse(Net::HTTP.get(URI("https://raw.githubusercontent.com/spdx/license-list-data/master/json/licenses.json")))
|
|
||||||
licenses = json_data["licenses"].map { |l| l["licenseId"] }
|
|
||||||
# "All Rights Reserved" is non-standard extra value to cover proprietary license
|
|
||||||
licenses.push("All Rights Reserved")
|
|
||||||
licenses.sort!
|
|
||||||
File.write(File.join(UTILS_DIR, "spdx.txt"), licenses.join("\n"))
|
|
||||||
end
|
|
Loading…
Reference in a new issue