From 8e8b0553a2ff3a1d1319dff088fc9ed4c430aeec Mon Sep 17 00:00:00 2001 From: "McAfee, Patrick" Date: Wed, 20 Jan 2016 12:13:48 -0500 Subject: [PATCH] Started clearer messages and updated rubocop --- .gitignore | 3 +- .simplecov | 4 +- jenkins_pipeline_builder.gemspec | 2 +- lib/jenkins_pipeline_builder/cli/describe.rb | 10 +-- lib/jenkins_pipeline_builder/cli/helper.rb | 7 +- lib/jenkins_pipeline_builder/compiler.rb | 78 ++++++++----------- lib/jenkins_pipeline_builder/extension_set.rb | 2 +- lib/jenkins_pipeline_builder/extensions.rb | 2 +- .../extensions/builders.rb | 8 +- .../extensions/helpers/extension_helper.rb | 10 +-- .../extensions/job_attributes.rb | 8 +- lib/jenkins_pipeline_builder/generator.rb | 31 ++++---- lib/jenkins_pipeline_builder/job.rb | 2 +- .../job_collection.rb | 26 +++---- lib/jenkins_pipeline_builder/project.rb | 10 +-- lib/jenkins_pipeline_builder/version.rb | 2 +- lib/jenkins_pipeline_builder/view.rb | 32 ++++---- .../jenkins_pipeline_builder/compiler_spec.rb | 21 ++--- .../extensions/registered_spec.rb | 10 +-- .../test_bad_json_files/project.json | 15 ++++ .../test_bad_json_files/view.json | 10 +++ .../test_bad_yaml_files/project.yaml | 8 ++ .../test_bad_yaml_files/view.yaml | 5 ++ .../generator_spec.rb | 26 ++++++- 24 files changed, 179 insertions(+), 153 deletions(-) create mode 100644 spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/project.json create mode 100644 spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/view.json create mode 100644 spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/project.yaml create mode 100644 spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/view.yaml diff --git a/.gitignore b/.gitignore index e880358..ec79633 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ out Gemfile.lock *.gem .DS_Store -.idea \ No newline at end of file +.idea +.byebug_history diff --git a/.simplecov b/.simplecov index 7bc45a0..7e13e35 100644 --- a/.simplecov +++ b/.simplecov @@ -3,10 +3,10 @@ SimpleCov.profiles.define 'spec' do add_group 'jenkins_pipeline_builder', '/lib/' add_filter 'spec' coverage_dir 'out/coverage' - formatter SimpleCov::Formatter::MultiFormatter[ + formatter SimpleCov::Formatter::MultiFormatter.new([ SimpleCov::Formatter::Console, SimpleCov::Formatter::RcovFormatter, - ] + ]) end class SimpleCov::Formatter::Console diff --git a/jenkins_pipeline_builder.gemspec b/jenkins_pipeline_builder.gemspec index c900bdf..266947a 100644 --- a/jenkins_pipeline_builder.gemspec +++ b/jenkins_pipeline_builder.gemspec @@ -33,7 +33,7 @@ automating Job & Pipeline creation from the YAML files checked-in with your appl spec.add_development_dependency 'bump' spec.add_development_dependency 'json' spec.add_development_dependency 'gem-release' - spec.add_development_dependency 'pry' + spec.add_development_dependency 'byebug' spec.add_development_dependency 'simplecov' spec.add_development_dependency 'simplecov-rcov' spec.add_development_dependency 'kwalify' diff --git a/lib/jenkins_pipeline_builder/cli/describe.rb b/lib/jenkins_pipeline_builder/cli/describe.rb index fa1be9a..8b0e3a8 100644 --- a/lib/jenkins_pipeline_builder/cli/describe.rb +++ b/lib/jenkins_pipeline_builder/cli/describe.rb @@ -28,11 +28,11 @@ module CLI klass_name = entry.to_s.classify # rubocop:disable Style/AccessModifierIndentation klass = Class.new(Thor) do - if entry == :job_attributes - extensions = JenkinsPipelineBuilder.registry.registry[:job].select { |_, x| x.is_a? ExtensionSet } - else - extensions = JenkinsPipelineBuilder.registry.registry[:job][entry] - end + extensions = if entry == :job_attributes + JenkinsPipelineBuilder.registry.registry[:job].select { |_, x| x.is_a? ExtensionSet } + else + JenkinsPipelineBuilder.registry.registry[:job][entry] + end extensions.each do |key, extset| # TODO: don't just take the first diff --git a/lib/jenkins_pipeline_builder/cli/helper.rb b/lib/jenkins_pipeline_builder/cli/helper.rb index 8dab7b7..d563649 100644 --- a/lib/jenkins_pipeline_builder/cli/helper.rb +++ b/lib/jenkins_pipeline_builder/cli/helper.rb @@ -71,11 +71,8 @@ def self.valid_cli_creds?(options) end def self.process_creds_file(file) - if file.end_with? 'json' - return JSON.parse(IO.read(File.expand_path(file))) - else - return YAML.load_file(File.expand_path(file)) - end + return JSON.parse(IO.read(File.expand_path(file))) if file.end_with? 'json' + YAML.load_file(File.expand_path(file)) end def self.process_cli_creds(options) diff --git a/lib/jenkins_pipeline_builder/compiler.rb b/lib/jenkins_pipeline_builder/compiler.rb index e743c1c..f555b03 100644 --- a/lib/jenkins_pipeline_builder/compiler.rb +++ b/lib/jenkins_pipeline_builder/compiler.rb @@ -44,9 +44,15 @@ def get_settings_bag(item_bag, settings_bag = {}) my_settings_bag.merge(bag) end + def compile_job(item, settings = {}) + new_item = compile(item, settings) + [true, new_item] + rescue => e + return [false, [e.message]] + end + def compile(item, settings = {}) - success, item = handle_enable(item, settings) - return false, item unless success + item = handle_enable(item, settings) case item when String @@ -56,16 +62,16 @@ def compile(item, settings = {}) when Array return compile_array item, settings end - [true, item] + item end def handle_enable(item, settings) - return true, item unless item.is_a? Hash + return item unless item.is_a? Hash if enable_block_present? item enabled_switch = resolve_value(item[:enabled], settings) - return [true, {}] if enabled_switch == 'false' + return {} if enabled_switch == 'false' if enabled_switch != 'true' - return [false, { 'value error' => "Invalid value for #{item[:enabled]}: #{enabled_switch}" }] + fail "Invalid value for #{item[:enabled]}: #{enabled_switch}" end if item[:parameters].is_a? Hash item = item.merge item[:parameters] @@ -75,7 +81,7 @@ def handle_enable(item, settings) item = item[:parameters] end end - [true, item] + item end private @@ -85,64 +91,44 @@ def enable_block_present?(item) end def compile_string(item, settings) - errors = {} - new_value = resolve_value(item, settings) - errors[item] = "Failed to resolve #{item}" if new_value.nil? - return false, errors unless errors.empty? - [true, new_value] + resolve_value(item, settings) + rescue => e + raise "Failed to resolve #{item} because: #{e.message}" end def compile_array(array, settings) - errors = {} result = [] array.each do |value| - success, payload = compile_array_item value, settings, array - errors[value] = payload unless success + payload = compile_array_item value, settings, array result << payload end - return false, errors unless errors.empty? - [true, result] + result end def compile_array_item(item, settings, array) - success, payload = compile(item, settings) - return false, "found a nil value when processing following array:\n #{array.inspect}" if item.nil? - return false, payload unless success - return false, "Failed to resolve:\n===>item #{item}\n\n===>of list: #{array.inspect}" if payload.nil? - [true, payload] + fail "Found a nil value when processing following array:\n #{array.inspect}" if item.nil? + payload = compile(item, settings) + fail "Failed to resolve:\n===>item #{item}\n\n===>of list: #{array.inspect}" if payload.nil? + payload end - def compile_item(key, value, errors, settings) + def compile_item(key, value, settings) if value.nil? - errors[key] = "key: #{key} has a nil value, this is often a yaml syntax error. Skipping children and siblings" - return false, errors[key] - end - success, payload = compile(value, settings) - unless success - errors.merge!(payload) - return false, payload + fail "key: #{key} has a nil value, this is often a yaml syntax error. Skipping children and siblings" end - if payload.nil? - errors[key] = "Failed to resolve:\n===>key: #{key}\n\n===>value: #{value}\n\n===>of: #{item}" - return false, errors[key] - end - [true, payload] + payload = compile(value, settings) + fail "Failed to resolve:\n===>key: #{key}\n\n===>value: #{value} payload" if payload.nil? + payload end def compile_hash(item, settings) - success, item = handle_enable(item, settings) - return false, item unless success - - errors = {} + item = handle_enable(item, settings) result = {} - item.each do |key, value| - success, payload = compile_item(key, value, errors, settings) - next unless success + payload = compile_item(key, value, settings) result[key] = payload unless payload == {} end - return false, errors unless errors.empty? - [true, result] + result end def resolve_value(value, settings) @@ -155,6 +141,7 @@ def resolve_value(value, settings) end settings = settings.with_indifferent_access + # TODO: this is actually a shallow copy and should be fixed value_s = value.to_s.clone correct_job_names! value_s # Then we look for normal values to replace @@ -165,7 +152,8 @@ def resolve_value(value, settings) vars = value_s.scan(/{{([^{}@]+)}}/).flatten vars.select! do |var| var_val = settings[var] - value_s.gsub!("{{#{var}}}", var_val.to_s) unless var_val.nil? + fail "Could not find defined substitution variable: #{var}" if var_val.nil? + value_s.gsub!("{{#{var}}}", var_val.to_s) var_val.nil? end return nil if vars.count != 0 diff --git a/lib/jenkins_pipeline_builder/extension_set.rb b/lib/jenkins_pipeline_builder/extension_set.rb index 5cd595e..bec42b6 100644 --- a/lib/jenkins_pipeline_builder/extension_set.rb +++ b/lib/jenkins_pipeline_builder/extension_set.rb @@ -7,7 +7,7 @@ class ExtensionSet :description, :announced, :type - ] + ].freeze SET_METHODS.each do |method_name| define_method method_name do |value = nil| return settings[method_name] if value.nil? diff --git a/lib/jenkins_pipeline_builder/extensions.rb b/lib/jenkins_pipeline_builder/extensions.rb index 399ee87..deef48d 100644 --- a/lib/jenkins_pipeline_builder/extensions.rb +++ b/lib/jenkins_pipeline_builder/extensions.rb @@ -35,7 +35,7 @@ class Extension after: false, xml: false, parameters: [] - } + }.freeze EXT_METHODS.keys.each do |method_name| define_method method_name do |value = nil| return instance_variable_get("@#{method_name}") if value.nil? diff --git a/lib/jenkins_pipeline_builder/extensions/builders.rb b/lib/jenkins_pipeline_builder/extensions/builders.rb index f564a76..9076aa9 100644 --- a/lib/jenkins_pipeline_builder/extensions/builders.rb +++ b/lib/jenkins_pipeline_builder/extensions/builders.rb @@ -282,14 +282,10 @@ else send('selector', 'class' => 'hudson.plugins.copyartifact.StatusBuildSelector') end - if params[:fingerprint].nil? + if params[:fingerprint].nil? || params[:fingerprint].to_s == 'true' doNotFingerprintArtifacts false else - if params[:fingerprint].to_s == 'true' - doNotFingerprintArtifacts false - else - doNotFingerprintArtifacts true - end + doNotFingerprintArtifacts true end flatten true if params[:flatten] optional true if params[:optional] diff --git a/lib/jenkins_pipeline_builder/extensions/helpers/extension_helper.rb b/lib/jenkins_pipeline_builder/extensions/helpers/extension_helper.rb index b135485..a7eb37f 100644 --- a/lib/jenkins_pipeline_builder/extensions/helpers/extension_helper.rb +++ b/lib/jenkins_pipeline_builder/extensions/helpers/extension_helper.rb @@ -6,11 +6,11 @@ def initialize(params, builder, defaults = {}) # That will allow for defaults to be pulled out of the extension and it # will also let better enable overriding of those values that do not have # an option to do so currently. - if params.is_a? Hash - @params = defaults.merge params - else - @params = params - end + @params = if params.is_a? Hash + defaults.merge params + else + params + end @builder = builder super @params end diff --git a/lib/jenkins_pipeline_builder/extensions/job_attributes.rb b/lib/jenkins_pipeline_builder/extensions/job_attributes.rb index 45ab416..536877c 100644 --- a/lib/jenkins_pipeline_builder/extensions/job_attributes.rb +++ b/lib/jenkins_pipeline_builder/extensions/job_attributes.rb @@ -32,7 +32,7 @@ end xml path: '//project' do |description| - description "#{description}" + description description.to_s end end @@ -44,7 +44,7 @@ announced false xml path: '//project' do |jdk| - jdk "#{jdk}" + jdk jdk.to_s end end @@ -60,7 +60,7 @@ end xml path: '//project' do |disabled| - disabled "#{disabled}" + disabled disabled.to_s end end @@ -257,7 +257,7 @@ send('hudson.model.ParametersDefinitionProperty') do parameterDefinitions do params.each do |param| - send(params.param_type param) do + send(params.param_type(param)) do name param[:name] description param[:description] defaultValue param[:default] diff --git a/lib/jenkins_pipeline_builder/generator.rb b/lib/jenkins_pipeline_builder/generator.rb index 85768c5..7f138dc 100644 --- a/lib/jenkins_pipeline_builder/generator.rb +++ b/lib/jenkins_pipeline_builder/generator.rb @@ -90,7 +90,7 @@ def resolve_job_by_name(name, settings = {}) job_value = job[:value] logger.debug "Compiling job #{name}" compiler = JenkinsPipelineBuilder::Compiler.new self - success, payload = compiler.compile(job_value, settings) + success, payload = compiler.compile_job(job_value, settings) [success, payload] end @@ -115,11 +115,11 @@ def load_job_collection(path) end def publish(project_name) - if job_collection.projects.any? - errors = publish_project(project_name) - else - errors = publish_jobs(job_collection.standalone_jobs) - end + errors = if job_collection.projects.any? + publish_project(project_name) + else + publish_jobs(job_collection.standalone_jobs) + end print_compile_errors errors errors end @@ -142,12 +142,9 @@ def print_compile_errors(errors) end def print_project_errors(errors) - errors.each do |k, v| - puts "Encountered errors processing: #{k}:" - v.each do |key, error| - puts " key: #{key} had the following error:" - puts " #{error.inspect}" - end + errors.each do |error| + puts 'Encountered errors processing:' + puts error.inspect end end @@ -210,12 +207,10 @@ def create_views(views) def create_jobs_and_views(project) success, payload = resolve_project(project) - if success - logger.info 'successfully resolved project' - compiled_project = payload - else - return { project_name: 'Failed to resolve' } - end + return { project_name: 'Failed to resolve' } unless success + + logger.info 'successfully resolved project' + compiled_project = payload errors = publish_jobs(compiled_project[:value][:jobs]) if compiled_project[:value][:jobs] return errors unless compiled_project[:value][:views] diff --git a/lib/jenkins_pipeline_builder/job.rb b/lib/jenkins_pipeline_builder/job.rb index 57a53b1..f7d5a2f 100644 --- a/lib/jenkins_pipeline_builder/job.rb +++ b/lib/jenkins_pipeline_builder/job.rb @@ -59,7 +59,7 @@ def job_methods def local_output(xml) logger.info "Will create job #{job}" - logger.info "#{xml}" if @debug + logger.info xml.to_s if @debug FileUtils.mkdir_p(out_dir) unless File.exist?(out_dir) File.open("#{out_dir}/#{name}.xml", 'w') { |f| f.write xml } [true, nil] diff --git a/lib/jenkins_pipeline_builder/job_collection.rb b/lib/jenkins_pipeline_builder/job_collection.rb index 977613e..f076914 100644 --- a/lib/jenkins_pipeline_builder/job_collection.rb +++ b/lib/jenkins_pipeline_builder/job_collection.rb @@ -2,7 +2,7 @@ module JenkinsPipelineBuilder class JobCollection attr_accessor :collection, :remote_dependencies attr_reader :loaded - alias_method :loaded?, :loaded + alias loaded? loaded def initialize @collection = {} @@ -70,15 +70,17 @@ def load_from_path(path, remote = false) private def load_file(path, remote = false) - if path.end_with? 'json' - hash = JSON.parse(IO.read(path)) - else # elsif path.end_with?("yml") || path.end_with?("yaml") - hash = YAML.load_file(path) - end + hash = if path.end_with? 'json' + JSON.parse(IO.read(path)) + else # elsif path.end_with?("yml") || path.end_with?("yaml") + YAML.load_file(path) + end logger.info "Loading file #{path}" hash.each do |section| load_section section, remote end + rescue StandardError => err + raise "There was an error while parsing a file #{err.message}" end def load_section(section, remote) @@ -100,14 +102,10 @@ def process_collection!(name, key, value, remote) if collection.key?(name) existing_remote = collection[name.to_s][:remote] # skip if the existing item is local and the new item is remote - if remote && !existing_remote - return - # override if the existing item is remote and the new is local - elsif existing_remote && !remote - logger.info "Duplicate item with name '#{name}' was detected from the remote folder." - else - fail "Duplicate item with name '#{name}' was detected." - end + return if remote && !existing_remote + fail "Duplicate item with name '#{name}' was detected." unless existing_remote && !remote + # override if the existing item is remote and the new is local + logger.info "Duplicate item with name '#{name}' was detected from the remote folder." end collection[name.to_s] = { name: name.to_s, type: key, value: value, remote: remote } end diff --git a/lib/jenkins_pipeline_builder/project.rb b/lib/jenkins_pipeline_builder/project.rb index cac8d1c..bcd6516 100644 --- a/lib/jenkins_pipeline_builder/project.rb +++ b/lib/jenkins_pipeline_builder/project.rb @@ -10,12 +10,10 @@ def initialize(name, input) def publish success, payload = resolve_project(input) - if success - logger.info 'successfully resolved project' - compiled_project = payload - else - return { project_name: 'Failed to resolve' } - end + return { project_name: 'Failed to resolve' } unless success + + logger.info 'successfully resolved project' + compiled_project = payload self.errors = publish_jobs(compiled_project[:value][:jobs]) if compiled_project[:value][:jobs] return unless compiled_project[:value][:views] diff --git a/lib/jenkins_pipeline_builder/version.rb b/lib/jenkins_pipeline_builder/version.rb index 5fe8173..144920f 100644 --- a/lib/jenkins_pipeline_builder/version.rb +++ b/lib/jenkins_pipeline_builder/version.rb @@ -21,5 +21,5 @@ # module JenkinsPipelineBuilder - VERSION = '0.13.4' + VERSION = '0.13.4'.freeze end diff --git a/lib/jenkins_pipeline_builder/view.rb b/lib/jenkins_pipeline_builder/view.rb index 5559b75..0be9564 100644 --- a/lib/jenkins_pipeline_builder/view.rb +++ b/lib/jenkins_pipeline_builder/view.rb @@ -35,11 +35,11 @@ def initialize(generator) end def generate(path) - if path.end_with? 'json' - hash = JSON.parse(IO.read(path)) - else - hash = YAML.load_file(path) - end + hash = if path.end_with? 'json' + JSON.parse(IO.read(path)) + else + YAML.load_file(path) + end hash.each do |item| Utils.symbolize_keys_deep!(item) @@ -90,16 +90,19 @@ def clean_up_views(params) if params[:parent_view] create_base_view(params[:parent_view], 'nestedView') unless exists?(params[:parent_view]) delete(params[:name], params[:parent_view]) if exists?(params[:name], params[:parent_view]) - else - delete(params[:name]) if exists?(params[:name]) + elsif exists?(params[:name]) + delete(params[:name]) end end def post_params(params) payload = post_payload params - payload.merge!('filterQueue' => 'on') if params[:filter_queue] - payload.merge!('filterExecutors' => 'on') if params[:filter_executors] - payload.merge!('useincluderegex' => 'on', 'includeRegex' => params[:regex]) if params[:regex] + payload['filterQueue'] = 'on' if params[:filter_queue] + payload['filterExecutors'] = 'on' if params[:filter_executors] + if params[:regex] + payload['useincluderegex'] = 'on' + payload['includeRegex'] = params[:regex] + end payload end @@ -113,7 +116,7 @@ def post_payload(params) 'statusFilter' => '', 'columns' => get_columns(params[:type]) } - json.merge!('groupingRules' => params[:groupingRules]) if params[:groupingRules] + json['groupingRules'] = params[:groupingRules] if params[:groupingRules] { 'name' => params[:name], @@ -211,11 +214,8 @@ def list_children(parent_view = nil, filter = '', ignorecase = true) path = parent_view.nil? ? '' : "/view/#{parent_view}" response_json = @client.api_get_request(path) response_json['views'].each do |view| - if ignorecase - view_names << view['name'] if view['name'] =~ /#{filter}/i - else - view_names << view['name'] if view['name'] =~ /#{filter}/ - end + filter_exp = ignorecase ? /#{filter}/i : /#{filter}/ + view_names << view['name'] if view['name'] =~ filter_exp end view_names end diff --git a/spec/lib/jenkins_pipeline_builder/compiler_spec.rb b/spec/lib/jenkins_pipeline_builder/compiler_spec.rb index c279255..060d934 100644 --- a/spec/lib/jenkins_pipeline_builder/compiler_spec.rb +++ b/spec/lib/jenkins_pipeline_builder/compiler_spec.rb @@ -63,7 +63,7 @@ builders: [{ shell_command: "echo 'Running DummyPipeline'" }] } result = compiler.compile(job, settings_bag) - expect(result[1]).to eq(job_compiled) + expect(result).to eq(job_compiled) end it 'compiles a job with a downstream name change' do @@ -96,7 +96,7 @@ publishers: [{ downstream: { project: 'DummyPipeline-02' } }] } result = compiler.compile(job, settings_bag) - expect(result[1]).to eq(job_compiled) + expect(result).to eq(job_compiled) end it 'compiles an enabled job with a string parameter' do @@ -107,7 +107,7 @@ settings_bag = { var: 'this_is_a_var', name: 'name' } result = compiler.compile(my_job, settings_bag) - expect(result[1]).to eq(compiled_job) + expect(result).to eq(compiled_job) end end @@ -115,39 +115,34 @@ it 'generates correct new jobs with true' do item = { enabled: '{{use1}}', parameters: { rootPom: 'path_to_pomasd' } } settings = { name: 'PushTest', description: 'DB Pipeline tooling', git_repo: 'git@github.roving.com:devops/DBPipeline.git', git_branch: 'master', excluded_user: 'buildmaster', hipchat_room: 'CD Builds', hipchat_auth_token: 'f3e98ed54605b36f56dd2c562e3775', discard_days: '30', discard_number: '100', maven_name: 'tools-maven-3.0.3', hipchat_jenkins_url: 'https://cd-jenkins.ad.prodcc.net/', use1: true } - success, item = compiler.handle_enable(item, settings) - expect(success).to be true + item = compiler.handle_enable(item, settings) expect(item).to eq(rootPom: 'path_to_pomasd') end it 'generates correct new jobs when the params are a string' do item = { enabled: '{{use1}}', parameters: 'path_to_pomasd' } settings = { name: 'PushTest', description: 'DB Pipeline tooling', git_repo: 'git@github.roving.com:devops/DBPipeline.git', git_branch: 'master', excluded_user: 'buildmaster', hipchat_room: 'CD Builds', hipchat_auth_token: 'f3e98ed54605b36f56dd2c562e3775', discard_days: '30', discard_number: '100', maven_name: 'tools-maven-3.0.3', hipchat_jenkins_url: 'https://cd-jenkins.ad.prodcc.net/', use1: true } - success, item = compiler.handle_enable(item, settings) - expect(success).to be true + item = compiler.handle_enable(item, settings) expect(item).to eq('path_to_pomasd') end it 'generates correct new jobs with false' do item = { enabled: '{{use1}}', parameters: { rootPom: 'path_to_pomasd' } } settings = { name: 'PushTest', description: 'DB Pipeline tooling', git_repo: 'git@github.roving.com:devops/DBPipeline.git', git_branch: 'master', excluded_user: 'buildmaster', hipchat_room: 'CD Builds', hipchat_auth_token: 'f3e98ed54605b36f56dd2c562e3775', discard_days: '30', discard_number: '100', maven_name: 'tools-maven-3.0.3', hipchat_jenkins_url: 'https://cd-jenkins.ad.prodcc.net/', use1: false } - success, item = compiler.handle_enable(item, settings) - expect(success).to be true + item = compiler.handle_enable(item, settings) expect(item).to eq({}) end it 'fails when value not found' do item = { enabled: '{{use_fail}}', parameters: { rootPom: 'path_to_pomasd' } } settings = { name: 'PushTest', description: 'DB Pipeline tooling', git_repo: 'git@github.roving.com:devops/DBPipeline.git', git_branch: 'master', excluded_user: 'buildmaster', hipchat_room: 'CD Builds', hipchat_auth_token: 'f3e98ed54605b36f56dd2c562e3775', discard_days: '30', discard_number: '100', maven_name: 'tools-maven-3.0.3', hipchat_jenkins_url: 'https://cd-jenkins.ad.prodcc.net/', use1: true } - success, _item = compiler.handle_enable(item, settings) - expect(success).to be false + expect { compiler.handle_enable(item, settings) }.to raise_error(/Could not find defined substitution variable: use_fail/) end it 'removes empty builders' do item = { enabled: '{{use}}', parameters: { rootPom: 'one' } } settings = { name: 'PushTest', description: 'DB Pipeline tooling', git_repo: 'git@github.roving.com:devops/DBPipeline.git', git_branch: 'master', excluded_user: 'buildmaster', hipchat_room: 'CD Builds', hipchat_auth_token: 'f3e98ed54605b36f56dd2c562e3775', discard_days: '30', discard_number: '100', maven_name: 'tools-maven-3.0.3', hipchat_jenkins_url: 'https://cd-jenkins.ad.prodcc.net/', use: false } - success, result = compiler.handle_enable(item, settings) - expect(success).to be true + result = compiler.handle_enable(item, settings) expect(result).to eq({}) end end diff --git a/spec/lib/jenkins_pipeline_builder/extensions/registered_spec.rb b/spec/lib/jenkins_pipeline_builder/extensions/registered_spec.rb index f2b5f9e..05e9b37 100644 --- a/spec/lib/jenkins_pipeline_builder/extensions/registered_spec.rb +++ b/spec/lib/jenkins_pipeline_builder/extensions/registered_spec.rb @@ -9,7 +9,7 @@ rvm: ['0', '0.5'], timestamp: ['0'], xvfb: ['0'] -} +}.freeze PUBLISHERS = { archive_artifact: ['0'], brakeman: ['0'], @@ -30,7 +30,7 @@ publish_tap_results: ['0'], sonar_result: ['0'], xunit: ['0'] -} +}.freeze BUILDERS = { blocking_downstream: ['0'], copy_artifact: ['0'], @@ -43,13 +43,13 @@ system_groovy: ['0'], sonar_standalone: ['0'] -} +}.freeze TRIGGERS = { git_push: ['0'], periodic_build: ['0'], scm_polling: ['0'], upstream: ['0'] -} +}.freeze JOB_ATTRIBUTES = { concurrent_build: ['0'], description: ['0'], @@ -64,7 +64,7 @@ promoted_builds: ['0'], scm_params: ['0', '2.0'], throttle: ['0'] -} +}.freeze describe 'built in extensions' do before :each do diff --git a/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/project.json b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/project.json new file mode 100644 index 0000000..3d2a10d --- /dev/null +++ b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/project.json @@ -0,0 +1,15 @@ +[ + { + "defaults": { + "name": "global", + "description": "Tests, all the tests" + } + }, + { + "project": { + "name": "TestProject", + "jobs": [ + "{{name}}-part1" + ] + } +] diff --git a/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/view.json b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/view.json new file mode 100644 index 0000000..683ba07 --- /dev/null +++ b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_json_files/view.json @@ -0,0 +1,10 @@ +[ + { + "view": { + "name": "{{name}} View", + "type": "listview", + "description": "{{description}}", + "regex": "{{name}}.*" + } + } +] diff --git a/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/project.yaml b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/project.yaml new file mode 100644 index 0000000..92b86fd --- /dev/null +++ b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/project.yaml @@ -0,0 +1,8 @@ +- defaults: + name: global + description: 'Tests, all the tests' + +- project: + name: BadTestProject + jobs: + - '{{name}}-part1' diff --git a/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/view.yaml b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/view.yaml new file mode 100644 index 0000000..4c1d451 --- /dev/null +++ b/spec/lib/jenkins_pipeline_builder/fixtures/generator_tests/test_bad_yaml_files/view.yaml @@ -0,0 +1,5 @@ +- view: + name: '{{name}} View' + type: 'listview' + description: '{{description}}' + regex: '{{name}}.*' diff --git a/spec/lib/jenkins_pipeline_builder/generator_spec.rb b/spec/lib/jenkins_pipeline_builder/generator_spec.rb index 6878cb5..3d15522 100644 --- a/spec/lib/jenkins_pipeline_builder/generator_spec.rb +++ b/spec/lib/jenkins_pipeline_builder/generator_spec.rb @@ -174,7 +174,7 @@ def fixture_path(fixture) git_repo_name: 'generator_tests')) .and_return(pr_generator) expect(pr_generator).to receive(:delete_closed_prs) - pr_generator.stub(:convert!) do |job_collection, pr| + allow(pr_generator).to receive(:convert!) do |job_collection, pr| job_collection.defaults[:value][:application_name] = "testapp-PR#{pr}" end expect(pr_generator).to receive(:open_prs).and_return [1, 2] @@ -210,6 +210,19 @@ def fixture_path(fixture) path = File.expand_path('../fixtures/generator_tests/test_combo_files', __FILE__) @generator.job_collection.load_from_path path end + + it 'errors when reading a bad yaml file' do + path = File.expand_path('../fixtures/generator_tests/test_bad_yaml_files', __FILE__) + expect { @generator.job_collection.load_from_path path }.to raise_error( + RuntimeError, /There was an error while parsing a file/ + ) + end + it 'errors when reading a bad json file' do + path = File.expand_path('../fixtures/generator_tests/test_bad_json_files', __FILE__) + expect { @generator.job_collection.load_from_path path }.to raise_error( + RuntimeError, /There was an error while parsing a file/ + ) + end end describe '#dump' do @@ -224,7 +237,7 @@ def fixture_path(fixture) end end stub_request(:get, 'http://username:password@127.0.0.1:8080/job/test_job/config.xml') - .to_return(status: 200, body: "#{body}", headers: {}) + .to_return(status: 200, body: body.to_s, headers: {}) @generator.dump(job_name) expect(File.exist?("#{job_name}.xml")).to be true File.delete("#{job_name}.xml") @@ -234,7 +247,7 @@ def fixture_path(fixture) describe '#projects' do it 'returns a list of projects' do path = File.expand_path('../fixtures/generator_tests/multi_project', __FILE__) - expect(@generator.projects path).to eq %w(SamplePipeline1 SamplePipeline2 SamplePipeline3) + expect(@generator.projects(path)).to eq %w(SamplePipeline1 SamplePipeline2 SamplePipeline3) end end @@ -243,6 +256,13 @@ def fixture_path(fixture) allow(JenkinsPipelineBuilder.client).to receive(:plugin).and_return double( list_installed: { 'description' => '20.0', 'git' => '20.0' }) end + after :each do + file_paths = ['out/xml/TemplatePipeline-10.xml', + 'out/xml/TemplatePipeline-11.xml'] + file_paths.each do |file_path| + File.delete(file_path) if File.exist?(file_path) + end + end it 'generates xml and saves to disk without sending jobs to the server' do job_name = 'TemplatePipeline' path = File.expand_path('../fixtures/generator_tests/template_pipeline', __FILE__)