diff --git a/Gemfile b/Gemfile index 01002a4c759..18fc0e0d973 100644 --- a/Gemfile +++ b/Gemfile @@ -32,8 +32,8 @@ gem "bootsnap", ">= 1.8.1", :require => false # gem "bundler", "~> 2.1", ">= 2.1.4", "!= 2.2.10", :require => false gem "byebug", :require => false gem "color", "~>1.8" -gem "connection_pool", :require => false # For Dalli gem "config", "~>2.2", ">=2.2.3", :require => false +gem "connection_pool", :require => false # For Dalli gem "dalli", "~>3.2.3", :require => false gem "default_value_for", "~>3.3" gem "docker-api", "~>1.33.6", :require => false @@ -61,9 +61,9 @@ gem "net-ldap", "~>0.16.1", :require => false gem "net-ping", "~>1.7.4", :require => false gem "openscap", "~>0.4.8", :require => false gem "optimist", "~>3.0", :require => false -gem "psych", ">=3.1", :require => false # 3.1 safe_load changed positional to kwargs like aliases: true: https://github.com/ruby/psych/commit/4d4439d6d0adfcbd211ea295779315f1baa7dadd gem "pg", ">=1.4.1", :require => false gem "pg-dsn_parser", "~>0.1.1", :require => false +gem "psych", ">=3.1", :require => false # 3.1 safe_load changed positional to kwargs like aliases: true: https://github.com/ruby/psych/commit/4d4439d6d0adfcbd211ea295779315f1baa7dadd gem "query_relation", "~>0.1.0", :require => false gem "rack", ">=2.2.6.4", :require => false gem "rack-attack", "~>6.5.0", :require => false diff --git a/Rakefile b/Rakefile index a27c5ab9d22..597db56446f 100644 --- a/Rakefile +++ b/Rakefile @@ -2,8 +2,8 @@ # Add your own tasks in files placed in lib/tasks ending in .rake, # for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. -require File.expand_path('../config/application', __FILE__) -require File.expand_path('../lib/tasks/evm_rake_helper', __FILE__) +require File.expand_path('config/application', __dir__) +require File.expand_path('lib/tasks/evm_rake_helper', __dir__) include Rake::DSL Vmdb::Application.load_tasks diff --git a/app/channels/application_cable/connection.rb b/app/channels/application_cable/connection.rb index 208ba3f1d27..6b6108cf030 100644 --- a/app/channels/application_cable/connection.rb +++ b/app/channels/application_cable/connection.rb @@ -10,6 +10,7 @@ def connect def find_verified_user return reject_unauthorized_connection unless cookies[:ws_token] + userid = TokenManager.new('ws').token_get_info(cookies[:ws_token], :userid) User.find_by(:userid => userid.presence) || reject_unauthorized_connection end diff --git a/app/mailers/generic_mailer.rb b/app/mailers/generic_mailer.rb index 9428fbe4f26..68b559ffcf1 100644 --- a/app/mailers/generic_mailer.rb +++ b/app/mailers/generic_mailer.rb @@ -20,7 +20,7 @@ def self.deliver(method, options = {}) rcpts.each do |rcpt| rcpt.split(',').each do |to| options[:to] = to - individual = send(method, options) + individual = send(method, options) begin individual.deliver_now rescue Net::SMTPError @@ -41,7 +41,6 @@ def self.deliver(method, options = {}) rescue => e _log.error("method: #{method} options: #{options} delivery-error #{e}") end - end msg @@ -49,13 +48,14 @@ def self.deliver(method, options = {}) def self.deliver_queue(method, options = {}) return unless MiqRegion.my_region.role_assigned?('notifier') + _log.info("starting: method: #{method} args: #{options} ") options[:attachment] &&= attachment_to_blob(options[:attachment]) MiqQueue.submit_job( :service => "notifier", :class_name => name, :method_name => 'deliver', - :args => [method, options], + :args => [method, options] ) end @@ -95,12 +95,14 @@ def self.blob_to_attachment(attachment) when String # Actual Body blob_to_attachment(:attachment => attachment) when Hash - attachment[:body] ||= begin - blob = BinaryBlob.find(attachment.delete(:attachment_id)) - body = blob.binary unless blob.nil? - blob.destroy unless blob.nil? - body - end if attachment[:attachment_id].kind_of?(Numeric) + if attachment[:attachment_id].kind_of?(Numeric) + attachment[:body] ||= begin + blob = BinaryBlob.find(attachment.delete(:attachment_id)) + body = blob.binary unless blob.nil? + blob.destroy unless blob.nil? + body + end + end attachment[:filename] ||= "evm_attachment" attachment else @@ -162,6 +164,7 @@ def prepare_generic_email(options) options[:attachment].each do |a| name = a[:filename] next if name.nil? + attachments[name] = {:mime_type => a[:content_type], :content => a[:body]} end mail(:subject => options[:subject], :to => options[:to], :from => options[:from], :cc => options[:cc], :bcc => options[:bcc], :date => options[:sent_on]) @@ -172,7 +175,7 @@ def prepare_generic_email(options) OPTIONAL_SMTP_KEYS = [:enable_starttls_auto, :openssl_verify_mode] def set_mailer_smtp(evm_settings = nil) evm_settings ||= ::Settings.smtp - am_settings = {} + am_settings = {} DESTINATION_SMTP_KEYS.each { |key| am_settings[key] = evm_settings[key] } am_settings[:address] ||= evm_settings[:host] # vmdb.yml has key :host, ActionMailer expects :address @@ -181,7 +184,7 @@ def set_mailer_smtp(evm_settings = nil) case evm_settings[:authentication].to_s.to_sym when :none then AUTHENTICATION_SMTP_KEYS.each { |key| am_settings[key] = nil } when :plain, :login then AUTHENTICATION_SMTP_KEYS.each { |key| am_settings[key] = evm_settings[key] } - else raise ArgumentError, "authentication value #{evm_settings[:authentication].inspect} must be one of: 'none', 'plain', 'login'" + else raise ArgumentError, "authentication value #{evm_settings[:authentication].inspect} must be one of: 'none', 'plain', 'login'" end OPTIONAL_SMTP_KEYS.each { |key| am_settings[key] = evm_settings[key] if evm_settings.key?(key) } diff --git a/app/models/account.rb b/app/models/account.rb index 2fdda58e7ff..835165b1584 100644 --- a/app/models/account.rb +++ b/app/models/account.rb @@ -67,7 +67,7 @@ def self.xml_to_hashes(xmlNode, findPath, typeName) nh[:acctid] = nh.delete("#{typeName}id".to_sym) nh[:acctid] = nil unless nh[:acctid].respond_to?(:to_int) || nh[:acctid].to_s =~ /^-?[0-9]+$/ # Convert to signed integer values for acctid - nh[:acctid] = [nh[:acctid].to_i].pack("I").unpack("i")[0] unless nh[:acctid].nil? + nh[:acctid] = [nh[:acctid].to_i].pack("I").unpack1("i") unless nh[:acctid].nil? # Find the users for this group / groups for this user nh[:members] = [] @@ -93,7 +93,7 @@ def with_valid_account_type(valid_account_type) if accttype == valid_account_type yield else - raise _("Cannot call method '%{caller}' on an Account of type '%{type}'") % {:caller => caller[0][/`.*'/][1..-2], + raise _("Cannot call method '%{caller}' on an Account of type '%{type}'") % {:caller => caller(1..1).first[/`.*'/][1..-2], :type => accttype} end end @@ -136,18 +136,18 @@ def remove_all_groups # FIXME: Why not use .pluralize? # def members - send("#{accttype_opposite}s") + send(:"#{accttype_opposite}s") end def add_member(member) - send("add_#{accttype_opposite}", member) + send(:"add_#{accttype_opposite}", member) end def remove_member(member) - send("remove_#{accttype_opposite}", member) + send(:"remove_#{accttype_opposite}", member) end def remove_all_members - send("remove_all_#{accttype_opposite}s") + send(:"remove_all_#{accttype_opposite}s") end end diff --git a/app/models/advanced_setting.rb b/app/models/advanced_setting.rb index 7ca95a3bcab..fd484b250f9 100644 --- a/app/models/advanced_setting.rb +++ b/app/models/advanced_setting.rb @@ -20,6 +20,7 @@ def self.xml_to_hashes(xml_node, find_path) el.each_element { |e| result << e.attributes.to_h } end + result end end diff --git a/app/models/aliases/automation_manager_configured_system.rb b/app/models/aliases/automation_manager_configured_system.rb index 9750e5de23a..15fc349869b 100644 --- a/app/models/aliases/automation_manager_configured_system.rb +++ b/app/models/aliases/automation_manager_configured_system.rb @@ -1 +1 @@ -::AutomationManagerConfiguredSystem = ::ManageIQ::Providers::AnsibleTower::AutomationManager::ConfiguredSystem +AutomationManagerConfiguredSystem = ManageIQ::Providers::AnsibleTower::AutomationManager::ConfiguredSystem diff --git a/app/models/aliases/ems_automation.rb b/app/models/aliases/ems_automation.rb index 000262634b8..750d9c36b04 100644 --- a/app/models/aliases/ems_automation.rb +++ b/app/models/aliases/ems_automation.rb @@ -1 +1 @@ -::EmsAutomation = ::ManageIQ::Providers::ExternalAutomationManager +EmsAutomation = ManageIQ::Providers::ExternalAutomationManager diff --git a/app/models/aliases/ems_cloud.rb b/app/models/aliases/ems_cloud.rb index 17a987896cb..3db8adc781e 100644 --- a/app/models/aliases/ems_cloud.rb +++ b/app/models/aliases/ems_cloud.rb @@ -1 +1 @@ -::EmsCloud = ::ManageIQ::Providers::CloudManager +EmsCloud = ManageIQ::Providers::CloudManager diff --git a/app/models/aliases/ems_configuration.rb b/app/models/aliases/ems_configuration.rb index f9effe5ca9c..2ee09788146 100644 --- a/app/models/aliases/ems_configuration.rb +++ b/app/models/aliases/ems_configuration.rb @@ -1 +1 @@ -::EmsConfiguration = ::ManageIQ::Providers::ConfigurationManager +EmsConfiguration = ManageIQ::Providers::ConfigurationManager diff --git a/app/models/aliases/ems_infra.rb b/app/models/aliases/ems_infra.rb index e1ab1a08c54..d1f57aea891 100644 --- a/app/models/aliases/ems_infra.rb +++ b/app/models/aliases/ems_infra.rb @@ -1 +1 @@ -::EmsInfra = ::ManageIQ::Providers::InfraManager +EmsInfra = ManageIQ::Providers::InfraManager diff --git a/app/models/aliases/ems_network.rb b/app/models/aliases/ems_network.rb index 518a52e42c7..34f888df35f 100644 --- a/app/models/aliases/ems_network.rb +++ b/app/models/aliases/ems_network.rb @@ -1 +1 @@ -::EmsNetwork = ::ManageIQ::Providers::NetworkManager +EmsNetwork = ManageIQ::Providers::NetworkManager diff --git a/app/models/aliases/ems_physical_infra.rb b/app/models/aliases/ems_physical_infra.rb index 3f66c0403ff..5d7fc01a357 100644 --- a/app/models/aliases/ems_physical_infra.rb +++ b/app/models/aliases/ems_physical_infra.rb @@ -1 +1 @@ -::EmsPhysicalInfra = ::ManageIQ::Providers::PhysicalInfraManager +EmsPhysicalInfra = ManageIQ::Providers::PhysicalInfraManager diff --git a/app/models/aliases/ems_refresh/refreshers/base_refresher.rb b/app/models/aliases/ems_refresh/refreshers/base_refresher.rb index 4a830f5f4f3..597c67e2350 100644 --- a/app/models/aliases/ems_refresh/refreshers/base_refresher.rb +++ b/app/models/aliases/ems_refresh/refreshers/base_refresher.rb @@ -1 +1 @@ -::EmsRefresh::Refreshers::BaseRefresher = ManageIQ::Providers::BaseManager::Refresher +EmsRefresh::Refreshers::BaseRefresher = ManageIQ::Providers::BaseManager::Refresher diff --git a/app/models/aliases/ems_storage.rb b/app/models/aliases/ems_storage.rb index fee7b476fd4..fda230f3ca4 100644 --- a/app/models/aliases/ems_storage.rb +++ b/app/models/aliases/ems_storage.rb @@ -1 +1 @@ -::EmsStorage = ::ManageIQ::Providers::StorageManager +EmsStorage = ManageIQ::Providers::StorageManager diff --git a/app/models/aliases/miq_ems_metrics_collector_worker.rb b/app/models/aliases/miq_ems_metrics_collector_worker.rb index 1d841035abb..f479ee36493 100644 --- a/app/models/aliases/miq_ems_metrics_collector_worker.rb +++ b/app/models/aliases/miq_ems_metrics_collector_worker.rb @@ -1 +1 @@ -::MiqEmsMetricsCollectorWorker = ManageIQ::Providers::BaseManager::MetricsCollectorWorker +MiqEmsMetricsCollectorWorker = ManageIQ::Providers::BaseManager::MetricsCollectorWorker diff --git a/app/models/aliases/miq_ems_operations_worker.rb b/app/models/aliases/miq_ems_operations_worker.rb index 1df3fc4f01c..8af2c59bc04 100644 --- a/app/models/aliases/miq_ems_operations_worker.rb +++ b/app/models/aliases/miq_ems_operations_worker.rb @@ -1 +1 @@ -::MiqEmsOperationsWorker = ManageIQ::Providers::BaseManager::OperationsWorker +MiqEmsOperationsWorker = ManageIQ::Providers::BaseManager::OperationsWorker diff --git a/app/models/aliases/miq_ems_refresh_worker.rb b/app/models/aliases/miq_ems_refresh_worker.rb index 61171a0edda..04cc59f4db0 100644 --- a/app/models/aliases/miq_ems_refresh_worker.rb +++ b/app/models/aliases/miq_ems_refresh_worker.rb @@ -1 +1 @@ -::MiqEmsRefreshWorker = ManageIQ::Providers::BaseManager::RefreshWorker +MiqEmsRefreshWorker = ManageIQ::Providers::BaseManager::RefreshWorker diff --git a/app/models/aliases/miq_event_catcher.rb b/app/models/aliases/miq_event_catcher.rb index 55d2e83ce83..19a920a9346 100644 --- a/app/models/aliases/miq_event_catcher.rb +++ b/app/models/aliases/miq_event_catcher.rb @@ -1 +1 @@ -::MiqEventCatcher = ManageIQ::Providers::BaseManager::EventCatcher +MiqEventCatcher = ManageIQ::Providers::BaseManager::EventCatcher diff --git a/app/models/aliases/miq_provision_cloud.rb b/app/models/aliases/miq_provision_cloud.rb index c02770c6905..c77244b7cfa 100644 --- a/app/models/aliases/miq_provision_cloud.rb +++ b/app/models/aliases/miq_provision_cloud.rb @@ -1 +1 @@ -::MiqProvisionCloud = ::ManageIQ::Providers::CloudManager::Provision +MiqProvisionCloud = ManageIQ::Providers::CloudManager::Provision diff --git a/app/models/aliases/miq_provision_cloud_workflow.rb b/app/models/aliases/miq_provision_cloud_workflow.rb index 3d4e3a055e2..ae08625d951 100644 --- a/app/models/aliases/miq_provision_cloud_workflow.rb +++ b/app/models/aliases/miq_provision_cloud_workflow.rb @@ -1 +1 @@ -::MiqProvisionCloudWorkflow = ::ManageIQ::Providers::CloudManager::ProvisionWorkflow +MiqProvisionCloudWorkflow = ManageIQ::Providers::CloudManager::ProvisionWorkflow diff --git a/app/models/aliases/miq_provision_infra_workflow.rb b/app/models/aliases/miq_provision_infra_workflow.rb index ad460eecac7..65add6b32b6 100644 --- a/app/models/aliases/miq_provision_infra_workflow.rb +++ b/app/models/aliases/miq_provision_infra_workflow.rb @@ -1 +1 @@ -::MiqProvisionInfraWorkflow = ::ManageIQ::Providers::InfraManager::ProvisionWorkflow +MiqProvisionInfraWorkflow = ManageIQ::Providers::InfraManager::ProvisionWorkflow diff --git a/app/models/aliases/template_cloud.rb b/app/models/aliases/template_cloud.rb index 05a3f732ccd..0d9890960e8 100644 --- a/app/models/aliases/template_cloud.rb +++ b/app/models/aliases/template_cloud.rb @@ -1 +1 @@ -::TemplateCloud = ManageIQ::Providers::CloudManager::Template +TemplateCloud = ManageIQ::Providers::CloudManager::Template diff --git a/app/models/aliases/template_infra.rb b/app/models/aliases/template_infra.rb index 69b0c457faa..f06bf873b8d 100644 --- a/app/models/aliases/template_infra.rb +++ b/app/models/aliases/template_infra.rb @@ -1,2 +1,2 @@ require 'manageiq/providers/infra_manager/template' -::TemplateInfra = ManageIQ::Providers::InfraManager::Template +TemplateInfra = ManageIQ::Providers::InfraManager::Template diff --git a/app/models/aliases/vm_cloud.rb b/app/models/aliases/vm_cloud.rb index 14c5dd8441b..909f4fe482b 100644 --- a/app/models/aliases/vm_cloud.rb +++ b/app/models/aliases/vm_cloud.rb @@ -1 +1 @@ -::VmCloud = ManageIQ::Providers::CloudManager::Vm +VmCloud = ManageIQ::Providers::CloudManager::Vm diff --git a/app/models/aliases/vm_infra.rb b/app/models/aliases/vm_infra.rb index 42b8d9dc1cd..16a5bc28a74 100644 --- a/app/models/aliases/vm_infra.rb +++ b/app/models/aliases/vm_infra.rb @@ -1 +1 @@ -::VmInfra = ManageIQ::Providers::InfraManager::Vm +VmInfra = ManageIQ::Providers::InfraManager::Vm diff --git a/app/models/aliases/workers/ems_refresh_worker.rb b/app/models/aliases/workers/ems_refresh_worker.rb index bad5c767353..bccf79ff0fd 100644 --- a/app/models/aliases/workers/ems_refresh_worker.rb +++ b/app/models/aliases/workers/ems_refresh_worker.rb @@ -1 +1 @@ -::EmsRefreshWorker = ManageIQ::Providers::BaseManager::RefreshWorker::Runner +EmsRefreshWorker = ManageIQ::Providers::BaseManager::RefreshWorker::Runner diff --git a/app/models/aliases/workers/event_catcher.rb b/app/models/aliases/workers/event_catcher.rb index b5f32cf0940..5daca70fa63 100644 --- a/app/models/aliases/workers/event_catcher.rb +++ b/app/models/aliases/workers/event_catcher.rb @@ -1,2 +1,2 @@ -::EventCatcher = ManageIQ::Providers::BaseManager::EventCatcher::Runner -::EventCatcherHandledException = ::EventCatcher::EventCatcherHandledException +EventCatcher = ManageIQ::Providers::BaseManager::EventCatcher::Runner +EventCatcherHandledException = EventCatcher::EventCatcherHandledException diff --git a/app/models/application_record.rb b/app/models/application_record.rb index 326cd668225..f2bb3d7782a 100644 --- a/app/models/application_record.rb +++ b/app/models/application_record.rb @@ -32,6 +32,7 @@ def self.display_name(number = 1) def self.human_attribute_name(attribute, options = {}) return super if options.delete(:ui) == true + "#{name}: #{super}" end end diff --git a/app/models/asset_tag_import.rb b/app/models/asset_tag_import.rb index e742016caaa..9fe0d107270 100644 --- a/app/models/asset_tag_import.rb +++ b/app/models/asset_tag_import.rb @@ -1,8 +1,7 @@ class AssetTagImport include Vmdb::Logging - attr_accessor :errors - attr_accessor :stats + attr_accessor :errors, :stats # The required fields list is not limited anymore, so pass nil. REQUIRED_COLS = {VmOrTemplate => nil, Host => nil} @@ -24,6 +23,7 @@ def self.upload(klass, fd) klass = Object.const_get(klass.to_s) raise _("%{name} not supported for upload!") % {:name => klass} unless REQUIRED_COLS.key?(klass) raise _("%{name} not supported for upload!") % {:name => klass} unless MATCH_KEYS.key?(klass) + data, keys, tags = MiqBulkImport.upload(fd, REQUIRED_COLS[klass], MATCH_KEYS[klass].dup) import = new(:data => data, :keys => keys, :tags => tags, :klass => klass) @@ -69,13 +69,13 @@ def verify end @verified_data.each do |id, data| - if data.length > 1 - obj = @klass.find_by(:id => id) - while data.length > 1 - data.shift - _log.warn("#{@klass.name} #{obj.name}, Multiple lines for the same object, the last line is applied") - @errors.add(:singlevaluedassettag, "#{@klass.name}: #{obj.name}, Multiple lines for the same object, the last line is applied") - end + next unless data.length > 1 + + obj = @klass.find_by(:id => id) + while data.length > 1 + data.shift + _log.warn("#{@klass.name} #{obj.name}, Multiple lines for the same object, the last line is applied") + @errors.add(:singlevaluedassettag, "#{@klass.name}: #{obj.name}, Multiple lines for the same object, the last line is applied") end end @@ -87,31 +87,29 @@ def verify def apply @verified_data.each do |id, data| obj = @klass.find_by(:id => id) - if obj - attrs = obj.miq_custom_attributes - new_attrs = [] - data[0].each do |key, value| - # Add custom attribute here. - attr = attrs.detect { |ca| ca.name == key } - if attr.nil? - if value.blank? - _log.info("#{@klass.name}: #{obj.name}, Skipping tag <#{key}> due to blank value") - else - _log.info("#{@klass.name}: #{obj.name}, Adding tag <#{key}>, value <#{value}>") - new_attrs << {:name => key, :value => value, :source => 'EVM'} - end + next unless obj + + attrs = obj.miq_custom_attributes + new_attrs = [] + data[0].each do |key, value| + # Add custom attribute here. + attr = attrs.detect { |ca| ca.name == key } + if attr.nil? + if value.blank? + _log.info("#{@klass.name}: #{obj.name}, Skipping tag <#{key}> due to blank value") else - if value.blank? - _log.info("#{@klass.name}: #{obj.name}, Deleting tag <#{key}> due to blank value") - attr.delete - else - _log.info("#{@klass.name}: #{obj.name}, Updating tag <#{key}>, value <#{value}>") - attr.update_attribute(:value, value) - end + _log.info("#{@klass.name}: #{obj.name}, Adding tag <#{key}>, value <#{value}>") + new_attrs << {:name => key, :value => value, :source => 'EVM'} end + elsif value.blank? + _log.info("#{@klass.name}: #{obj.name}, Deleting tag <#{key}> due to blank value") + attr.delete + else + _log.info("#{@klass.name}: #{obj.name}, Updating tag <#{key}>, value <#{value}>") + attr.update_attribute(:value, value) end - obj.custom_attributes.create(new_attrs) end + obj.custom_attributes.create(new_attrs) end end diff --git a/app/models/assigned_server_role.rb b/app/models/assigned_server_role.rb index c1ac6442894..d77d02ad265 100644 --- a/app/models/assigned_server_role.rb +++ b/app/models/assigned_server_role.rb @@ -22,7 +22,7 @@ def is_master? end def inactive? - !self.active? + !active? end def set_master @@ -37,16 +37,15 @@ def remove_master def set_priority(val) # Only allow 1 Primary in the RoleScope - if val == HIGH_PRIORITY && server_role.master_supported? - if ['zone', 'region'].include?(server_role.role_scope) - method = "find_other_servers_in_#{server_role.role_scope}" + if val == HIGH_PRIORITY && server_role.master_supported? && ['zone', 'region'].include?(server_role.role_scope) + method = "find_other_servers_in_#{server_role.role_scope}" other_servers = miq_server.send(method) other_servers.each do |server| assigned = server.assigned_server_roles.find_by(:server_role_id => server_role_id) next if assigned.nil? - assigned.update_attribute(:priority, DEFAULT_PRIORITY) if assigned.priority == HIGH_PRIORITY + + assigned.update_attribute(:priority, DEFAULT_PRIORITY) if assigned.priority == HIGH_PRIORITY end - end end update_attribute(:priority, val) @@ -55,7 +54,7 @@ def set_priority(val) def activate_in_region(override = false) return unless server_role.role_scope == 'region' - if override || self.inactive? + if override || inactive? MiqRegion.my_region.lock do if server_role.master_supported? servers = MiqRegion.my_region.active_miq_servers @@ -72,7 +71,7 @@ def activate_in_region(override = false) def deactivate_in_region(override = false) return unless server_role.role_scope == 'region' - if override || self.active? + if override || active? MiqRegion.my_region.lock do deactivate(override) end @@ -82,7 +81,7 @@ def deactivate_in_region(override = false) def activate_in_zone(override = false) return unless server_role.role_scope == 'zone' - if override || self.inactive? + if override || inactive? miq_server.zone.lock do |_zone| if server_role.master_supported? servers = miq_server.zone.active_miq_servers @@ -99,7 +98,7 @@ def activate_in_zone(override = false) def deactivate_in_zone(override = false) return unless server_role.role_scope == 'zone' - if override || self.active? + if override || active? miq_server.zone.lock do |_zone| deactivate(override) end @@ -121,14 +120,14 @@ def deactivate_in_role_scope end def activate(override = false) - if override || self.inactive? + if override || inactive? _log.info("Activating Role <#{server_role.name}> on Server <#{miq_server.name}>") update(:active => true) end end def deactivate(override = false) - if override || self.active? + if override || active? _log.info("Deactivating Role <#{server_role.name}> on Server <#{miq_server.name}>") update(:active => false) end diff --git a/app/models/audit_event.rb b/app/models/audit_event.rb index a9c14626cb2..7c3a5154918 100644 --- a/app/models/audit_event.rb +++ b/app/models/audit_event.rb @@ -1,7 +1,7 @@ class AuditEvent < ApplicationRecord validates :event, :status, :message, :severity, :presence => true - validates :status, :inclusion => { :in => %w(success failure) } - validates :severity, :inclusion => { :in => %w(fatal error warn info debug) } + validates :status, :inclusion => {:in => %w[success failure]} + validates :severity, :inclusion => {:in => %w[fatal error warn info debug]} include Purging diff --git a/app/models/authentication.rb b/app/models/authentication.rb index 9e4122f3d6a..c4387513d80 100644 --- a/app/models/authentication.rb +++ b/app/models/authentication.rb @@ -60,7 +60,7 @@ def self.new(*args, &block) "incomplete" => 1, "error" => 2, "unreachable" => 2, - "invalid" => 3, + "invalid" => 3 ).freeze # Builds a case statement that case be used in a sql ORDER BY. @@ -79,7 +79,7 @@ def self.new(*args, &block) end end.else(-1) - RETRYABLE_STATUS = %w(error unreachable).freeze + RETRYABLE_STATUS = %w[error unreachable].freeze CREDENTIAL_TYPES = { :external_credential_types => 'ManageIQ::Providers::ExternalAutomationManager::Authentication', @@ -159,11 +159,13 @@ def native_ref def set_credentials_changed_on return unless @auth_changed + self.credentials_changed_on = Time.now.utc end def after_authentication_changed return unless @auth_changed + _log.info("[#{resource_type}] [#{resource_id}], previously valid on: [#{last_valid_on}]") raise_event(:changed) diff --git a/app/models/authenticator/base.rb b/app/models/authenticator/base.rb index 99c981290fb..bb6837b69ce 100644 --- a/app/models/authenticator/base.rb +++ b/app/models/authenticator/base.rb @@ -19,6 +19,7 @@ def self.short_name end attr_reader :config + def initialize(config) @config = config end @@ -41,6 +42,7 @@ def user_authorizable_with_system_token? def authorize_user(userid) return unless user_authorizable_without_authentication? + authenticate(userid, "", {}, {:require_user => true, :authorize_only => true}) end @@ -89,11 +91,10 @@ def authenticate(username, password, request = nil, options = {}) audit_success(audit.merge(:message => "Authentication successful for user #{username}")) else reason = failure_reason(username, request) - reason = ": #{reason}" unless reason.blank? + reason = ": #{reason}" if reason.present? audit_failure(audit.merge(:message => "Authentication failed for userid #{username}#{reason}")) raise MiqException::MiqEVMLoginError, fail_message end - rescue MiqException::MiqEVMLoginError => err _log.warn(err.message) raise @@ -107,6 +108,7 @@ def authenticate(username, password, request = nil, options = {}) if task.nil? || MiqTask.status_error?(task.status) || MiqTask.status_timeout?(task.status) raise MiqException::MiqEVMLoginError, fail_message end + user_or_taskid = case_insensitive_find_by_userid(task.userid) end @@ -123,58 +125,58 @@ def authorize(taskid, username, *args) decrypt_ldap_password(config) if MiqLdap.using_ldap? run_task(taskid, "Authorizing") do |task| - begin - identity = find_external_identity(username, args[0], args[1]) - - unless identity - msg = "Authentication failed for userid #{username}, unable to find user object in #{self.class.proper_name}" - _log.warn(msg) - audit_failure(audit.merge(:message => msg)) - task.error(msg) - task.state_finished - return nil - end - incoming_groups = groups_for(identity) - matching_groups = match_groups(incoming_groups) - userid, user = find_or_initialize_user(identity, username) - update_user_attributes(user, userid, identity) - audit_new_user(audit, user) if user.new_record? - user.miq_groups = matching_groups - - if matching_groups.empty? - msg = "Authentication failed for userid #{user.userid}, unable to match user's group membership to an EVM role. The incoming groups are: #{incoming_groups.join(", ")}" - _log.warn(msg) - audit_failure(audit.merge(:message => msg)) - task.error(msg) - task.state_finished - user.save! unless user.new_record? - return nil - end + identity = find_external_identity(username, args[0], args[1]) - user.lastlogon = Time.now.utc - if user.new_record? - User.with_lock do - user.save! - rescue ActiveRecord::RecordInvalid # Try update when catching create race condition. - userid, user = find_or_initialize_user(identity, username) - update_user_attributes(user, userid, identity) - user.miq_groups = matching_groups - user.save! - end - else + unless identity + msg = "Authentication failed for userid #{username}, unable to find user object in #{self.class.proper_name}" + _log.warn(msg) + audit_failure(audit.merge(:message => msg)) + task.error(msg) + task.state_finished + return nil + end + + incoming_groups = groups_for(identity) + matching_groups = match_groups(incoming_groups) + userid, user = find_or_initialize_user(identity, username) + update_user_attributes(user, userid, identity) + audit_new_user(audit, user) if user.new_record? + user.miq_groups = matching_groups + + if matching_groups.empty? + msg = "Authentication failed for userid #{user.userid}, unable to match user's group membership to an EVM role. The incoming groups are: #{incoming_groups.join(", ")}" + _log.warn(msg) + audit_failure(audit.merge(:message => msg)) + task.error(msg) + task.state_finished + user.save! unless user.new_record? + return nil + end + + user.lastlogon = Time.now.utc + if user.new_record? + User.with_lock do + user.save! + rescue ActiveRecord::RecordInvalid # Try update when catching create race condition. + userid, user = find_or_initialize_user(identity, username) + update_user_attributes(user, userid, identity) + user.miq_groups = matching_groups user.save! end + else + user.save! + end - _log.info("Authorized User: [#{user.userid}]") - task.userid = user.userid - task.update_status("Finished", "Ok", "User authorized successfully") + _log.info("Authorized User: [#{user.userid}]") + task.userid = user.userid + task.update_status("Finished", "Ok", "User authorized successfully") + + user + rescue Exception => err + audit_failure(audit.merge(:message => err.message)) + raise - user - rescue Exception => err - audit_failure(audit.merge(:message => err.message)) - raise - end end end @@ -279,7 +281,7 @@ def authorize_queue(username, _request, _options, *args) :instance_id => task.id, :method_name => :queue_callback_on_exceptions, :args => ['Finished'] - }, + } ) else authorize(task.id, username, *args) diff --git a/app/models/authenticator/httpd.rb b/app/models/authenticator/httpd.rb index d6c6bb4208f..97e2ebf09d2 100644 --- a/app/models/authenticator/httpd.rb +++ b/app/models/authenticator/httpd.rb @@ -65,7 +65,7 @@ def update_user_attributes(user, username, identity) user.userid = username user.first_name = user_attrs[:firstname] user.last_name = user_attrs[:lastname] - user.email = user_attrs[:email] unless user_attrs[:email].blank? + user.email = user_attrs[:email] if user_attrs[:email].present? user.name = user_attrs[:fullname] user.name = "#{user.first_name} #{user.last_name}" if user.name.blank? user.name = user.userid if user.name.blank? @@ -115,8 +115,8 @@ def find_userid_as_username(identity, username) def find_userid_as_distinguished_name(user_attrs) dn_domain = user_attrs[:domain].downcase.split(".").map { |s| "dc=#{s}" }.join(",") - user = User.in_my_region.where("userid LIKE ?", "%=#{user_attrs[:username]},%,#{dn_domain}").last - user + User.in_my_region.where("userid LIKE ?", "%=#{user_attrs[:username]},%,#{dn_domain}").last + end def username_to_upn_name(user_attrs) @@ -167,8 +167,8 @@ def user_details_from_headers(username, request) X-REMOTE-USER-EMAIL X-REMOTE-USER-DOMAIN X-REMOTE-USER-GROUPS - ].each_with_object({}) do |k, h| - h[k] = request.headers[k]&.force_encoding("UTF-8") + ].index_with do |k| + request.headers[k]&.force_encoding("UTF-8") end.delete_nils end @@ -192,10 +192,11 @@ def user_attrs_from_external_directory(username) end end - ATTRS_NEEDED = %w(mail givenname sn displayname domainname).freeze + ATTRS_NEEDED = %w[mail givenname sn displayname domainname].freeze def user_attrs_from_external_directory_via_dbus(username) return unless username + require "dbus" sysbus = DBus.system_bus @@ -210,7 +211,7 @@ def user_attrs_from_external_directory_via_dbus(username) {:user_name => username, :error => err} end - ATTRS_NEEDED.each_with_object({}) { |attr, hash| hash[attr] = Array(user_attrs[attr]).first } + ATTRS_NEEDED.index_with { |attr| Array(user_attrs[attr]).first } end def user_attrs_from_external_directory_via_dbus_api_service(username) diff --git a/app/models/authenticator/ldap.rb b/app/models/authenticator/ldap.rb index a7e1517a368..a6400454037 100644 --- a/app/models/authenticator/ldap.rb +++ b/app/models/authenticator/ldap.rb @@ -5,7 +5,7 @@ def self.proper_name end def self.authenticates_for - super + %w(ldaps) + super + %w[ldaps] end def self.validate_config(config) @@ -19,8 +19,10 @@ def self.validate_config(config) def autocreate_user(username) # when default group for ldap users is enabled, create the user return unless config[:default_group_for_users] + default_group = MiqGroup.in_my_region.find_by(:description => config[:default_group_for_users]) return unless default_group + create_user_from_ldap(username) { [default_group] } end @@ -48,7 +50,7 @@ def create_user_from_ldap(username) lobj = ldap.get_user_object(username) if lobj.nil? raise _("Unable to auto-create user because LDAP search returned no data for user: [%{name}]") % - {:name => username} + {:name => username} end groups = yield lobj @@ -91,7 +93,7 @@ def groups_for(obj) authentication = config.dup authentication[:group_memberships_max_depth] ||= DEFAULT_GROUP_MEMBERSHIPS_MAX_DEPTH - if authentication.key?(:user_proxies) && !authentication[:user_proxies].blank? && authentication.key?(:get_direct_groups) && authentication[:get_direct_groups] == false + if authentication.key?(:user_proxies) && authentication[:user_proxies].present? && authentication.key?(:get_direct_groups) && authentication[:get_direct_groups] == false _log.info("Skipping getting group memberships directly assigned to user bacause it has been disabled in the configuration") groups = [] else @@ -101,11 +103,11 @@ def groups_for(obj) if authentication.key?(:user_proxies) if (sid = MiqLdap.get_attr(obj, :objectsid)) authentication[:user_proxies].each do |auth| - begin - groups += user_proxy_membership(auth, MiqLdap.sid_to_s(sid)) - rescue Exception => err - _log.warn("#{err.message} (from Authenticator#user_proxy_membership)") - end + + groups += user_proxy_membership(auth, MiqLdap.sid_to_s(sid)) + rescue Exception => err + _log.warn("#{err.message} (from Authenticator#user_proxy_membership)") + end else _log.warn("User Object has no objectSID") @@ -121,7 +123,7 @@ def update_user_attributes(user, _username, lobj) user.last_name = ldap.get_attr(lobj, :sn) email = ldap.get_attr(lobj, :mail) email = email.first if email.kind_of?(Array) - user.email = email unless email.blank? + user.email = email if email.present? user.name = ldap.get_attr(lobj, :displayname) user.name = "#{user.first_name} #{user.last_name}" if user.name.blank? user.name = user.userid if user.name.blank? @@ -129,7 +131,7 @@ def update_user_attributes(user, _username, lobj) REQUIRED_LDAP_USER_PROXY_KEYS = [:basedn, :bind_dn, :bind_pwd, :ldaphost, :ldapport, :mode] def user_proxy_membership(auth, sid) - authentication = config + authentication = config auth[:bind_dn] ||= authentication[:bind_dn] auth[:bind_pwd] ||= authentication[:bind_pwd] auth[:ldapport] ||= authentication[:ldapport] @@ -148,9 +150,11 @@ def user_proxy_membership(auth, sid) _log.info("Bind DN: [#{auth[:bind_dn]}], Host: [#{auth[:ldaphost]}], Port: [#{auth[:ldapport]}], Mode: [#{auth[:mode]}]") raise "Cannot Bind" unless ldap_up.bind(auth[:bind_dn], auth[:bind_pwd]) # now bind with bind_dn so that we can do our searches. + _log.info("User SID: [#{sid}], FSP DN: [#{fsp_dn}]") user_proxy_object = ldap_up.search(:base => fsp_dn, :scope => :base).first raise "Unable to find user proxy object in LDAP" if user_proxy_object.nil? + _log.debug("UserProxy obj from LDAP: #{user_proxy_object.inspect}") ldap_up.get_memberships(user_proxy_object, auth[:group_memberships_max_depth]) end diff --git a/app/models/automate_workspace.rb b/app/models/automate_workspace.rb index 7e7cae10e7f..9dcd44cf043 100644 --- a/app/models/automate_workspace.rb +++ b/app/models/automate_workspace.rb @@ -39,8 +39,10 @@ def encrypted_value(object_name, attribute) value = fetch_value(object_name, attribute) raise ArgumentError, "#{object_name} : Attribute #{attribute} not found" unless value raise ArgumentError, "#{object_name} : Attribute #{attribute} invalid type" unless value.kind_of?(String) + match_data = /^password::(.*)/.match(value) raise ArgumentError, "Attribute #{attribute} is not a password type" unless match_data + match_data[1] end diff --git a/app/models/automation_request.rb b/app/models/automation_request.rb index bf3478d674a..cdf3903c90f 100644 --- a/app/models/automation_request.rb +++ b/app/models/automation_request.rb @@ -23,7 +23,7 @@ def self.create_from_ws(version, user, uri_parts, parameters, requester) uri_options = MiqRequestWorkflow.parse_ws_string(uri_parts) [:namespace, :class, :instance, :message].each { |key| options[key] = uri_options.delete(key) if uri_options.key?(key) } uri_options.keys.each { |key| _log.warn("invalid keyword <#{key}> specified in uri_parts") } - options[:namespace] = (options.delete(:namespace) || DEFAULT_NAMESPACE).strip.gsub(/(^\/|\/$)/, "") # Strip blanks and slashes from beginning and end of string + options[:namespace] = (options.delete(:namespace) || DEFAULT_NAMESPACE).strip.gsub(/(^\/|\/$)/, "") # Strip blanks and slashes from beginning and end of string options[:class_name] = (options.delete(:class) || DEFAULT_CLASS).strip.gsub(/(^\/|\/$)/, "") options[:instance_name] = (options.delete(:instance) || DEFAULT_INSTANCE).strip options.merge!(parse_schedule_options(parameters.select { |key, _v| key.to_s.include?('schedule') })) @@ -47,6 +47,7 @@ def self.zone(options) unless Zone.where(:name => zone_name).exists? raise ArgumentError, _("unknown zone %{zone_name}") % {:zone_name => zone_name} end + zone_name end diff --git a/app/models/binary_blob.rb b/app/models/binary_blob.rb index de67e0b53ca..605566ab5a5 100644 --- a/app/models/binary_blob.rb +++ b/app/models/binary_blob.rb @@ -7,7 +7,7 @@ class BinaryBlob < ApplicationRecord def delete_binary self.md5 = self.size = self.part_size = nil binary_blob_parts.delete_all - self.save! + save! end # Get binary file from database into a raw String @@ -19,6 +19,7 @@ def binary unless md5.nil? || md5 == Digest::MD5.hexdigest(data) raise _("md5 of %{name} id [%{number}] is incorrect") % {:name => self.class.name, :number => id} end + data end @@ -36,9 +37,7 @@ def binary=(data) buf = data.slice!(0..self.part_size) binary_blob_parts << BinaryBlobPart.new(:data => buf) end - self.save! - - self + save! end # Write binary file from the database into a file @@ -66,6 +65,7 @@ def dump_binary(path_or_io) unless md5.nil? || md5 == hasher.hexdigest raise _("md5 of %{name} id [%{number}] is incorrect") % {:name => self.class.name, :number => id} end + true end @@ -89,7 +89,7 @@ def store_binary(path) end self.md5 = hasher.hexdigest - self.save! + save! self end diff --git a/app/models/blacklisted_event.rb b/app/models/blacklisted_event.rb index c48ceb0fe0b..4f4dfa065e4 100644 --- a/app/models/blacklisted_event.rb +++ b/app/models/blacklisted_event.rb @@ -1,5 +1,5 @@ class BlacklistedEvent < ApplicationRecord - belongs_to :ext_management_system, :foreign_key => "ems_id" + belongs_to :ext_management_system, :foreign_key => "ems_id" attribute :enabled, :default => true after_validation :log_enabling, :if => :enabled_changed?, :unless => :new_record? diff --git a/app/models/bottleneck_event.rb b/app/models/bottleneck_event.rb index a118cb0cf94..036a4b54447 100644 --- a/app/models/bottleneck_event.rb +++ b/app/models/bottleneck_event.rb @@ -20,6 +20,7 @@ def self.generate_future_events(obj) future_event_definitions_for_obj(obj).each do |e| result = calculate_future_event(obj, e[:definition][:calculation]) next if result.blank? || result[:timestamp].nil? + # TODO: determine wheter we omit results that are in the past event = new(e[:definition][:event]) @@ -42,6 +43,7 @@ def self.generate_future_events(obj) def self.calculate_future_event(obj, options) method = "calculate_future_#{options[:name]}" raise _("'%{name}', calculation not supported") % {:name => options[:name]} unless respond_to?(method) + send(method, obj, options) end @@ -144,6 +146,7 @@ def self.remove_duplicate_find_results(recs) recs.inject([]) do |a, r| key = [r.resource_type, r.resource_id, r.event_type, r.severity, r.message].join("|") next(a) if seen.include?(key) + seen << key a << r end diff --git a/app/models/chargeable_field.rb b/app/models/chargeable_field.rb index 895aca6c511..4bf61a2ab2b 100644 --- a/app/models/chargeable_field.rb +++ b/app/models/chargeable_field.rb @@ -51,7 +51,8 @@ def measure(consumption, options, sub_metric = nil) return 1.0 if fixed? return 0 if options.method_for_allocated_metrics != :current_value && consumption.none?(metric, sub_metric) return consumption.send(options.method_for_allocated_metrics, metric, sub_metric) if allocated? - return consumption.avg(metric) if used? + + consumption.avg(metric) if used? end def fixed? @@ -75,7 +76,7 @@ def metric_key(sub_metric = nil) # fixed_compute_metric is used in report and calculations # TODO: remove and unify with metric_key def metric_column_key - fixed? ? metric_key.gsub(/\_1|\_2/, '') : metric_key + fixed? ? metric_key.gsub(/_1|_2/, '') : metric_key end def cost_keys(sub_metric = nil) @@ -94,7 +95,7 @@ def rate_name end def self.cols_on_metric_rollup - (%w(id tag_names resource_id) + chargeable_cols_on_metric_rollup).uniq + (%w[id tag_names resource_id] + chargeable_cols_on_metric_rollup).uniq end def self.col_index(column) diff --git a/app/models/chargeback.rb b/app/models/chargeback.rb index 98a2a4e48d7..2a133aa7e6d 100644 --- a/app/models/chargeback.rb +++ b/app/models/chargeback.rb @@ -10,7 +10,7 @@ class Chargeback < ActsAsArModel :entity => :binary, :tag_name => :string, :label_name => :string, - :fixed_compute_metric => :integer, + :fixed_compute_metric => :integer ) ALLOWED_FIELD_SUFFIXES = %w[ @@ -32,12 +32,12 @@ class Chargeback < ActsAsArModel def self.dynamic_rate_columns @chargeable_fields = {} @chargeable_fields[self.class] ||= - begin - ChargeableField.all.each_with_object({}) do |chargeable_field, result| - next unless report_col_options.keys.include?("#{chargeable_field.rate_name}_cost") + ChargeableField.all.each_with_object({}) do |chargeable_field, result| + next unless report_col_options.keys.include?("#{chargeable_field.rate_name}_cost") + result["#{chargeable_field.rate_name}_rate"] = :string - end end + end def self.refresh_dynamic_metric_columns @@ -123,7 +123,7 @@ def initialize(options, consumption, region, result_key) self.tag_name = result_key[:key_object] ? result_key[:key_object].description : _('') elsif @options[:groupby_label].present? label_value = self.class.groupby_label_value(consumption, options[:groupby_label]) - self.label_name = label_value.present? ? label_value : _('') + self.label_name = (label_value.presence || _('')) else init_extra_fields(consumption, region) end @@ -166,11 +166,11 @@ def new_chargeback_calculate_costs(consumption, rates) # TODO: duration_of_report_step is 30.days for price plans but for consumption history, # it's used for date ranges and needs to be 1.month with rails 5.1 duration = @options.interval == "monthly" ? 30.days : @options.duration_of_report_step - results = plan.calculate_list_of_costs_input(resource_type: showback_category, - data: data, - start_time: consumption.instance_variable_get("@start_time"), - end_time: consumption.instance_variable_get("@end_time"), - cycle_duration: duration) + results = plan.calculate_list_of_costs_input(:resource_type => showback_category, + :data => data, + :start_time => consumption.instance_variable_get(:@start_time), + :end_time => consumption.instance_variable_get(:@end_time), + :cycle_duration => duration) results.each do |cost_value, sb_rate| r = ChargebackRateDetail.find(sb_rate.concept) @@ -215,6 +215,7 @@ def calculate_costs(consumption, rates) end r.charge(consumption, @options).each do |field, value| next if @options.skip_field_accumulation?(field, self[field]) + _log.debug("Calculation with field: #{field} and with value: #{value}") (self[field] = self[field].kind_of?(Numeric) ? (self[field] || 0) + value : value) _log.debug("Accumulated value: #{self[field]}") @@ -248,7 +249,7 @@ def self.set_chargeback_report_options(rpt, group_by, header_for_tag, groupby_la when "tenant" then ["tenant_name"] else report_static_cols end - rpt.cols = %w(start_date display_range) + static_cols + rpt.cols = %w[start_date display_range] + static_cols if group_by == "date-first" rpt.col_order = ["display_range"] + static_cols rpt.sortby = (["start_date"] + static_cols) @@ -258,9 +259,9 @@ def self.set_chargeback_report_options(rpt, group_by, header_for_tag, groupby_la end rpt.col_order.each do |c| - header_column = if (c == report_tag_field && header_for_tag) + header_column = if c == report_tag_field && header_for_tag header_for_tag - elsif (c == report_label_field && groupby_label) + elsif c == report_label_field && groupby_label groupby_label else c @@ -281,7 +282,7 @@ def tags end def self.load_custom_attributes_for(cols) - chargeback_klass = report_cb_model(self.to_s).safe_constantize + chargeback_klass = report_cb_model(to_s).safe_constantize chargeback_klass.load_custom_attributes_for(cols) cols.each do |x| next unless x.include?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX) diff --git a/app/models/chargeback/consumption.rb b/app/models/chargeback/consumption.rb index 9918d09ace3..56c1c612b97 100644 --- a/app/models/chargeback/consumption.rb +++ b/app/models/chargeback/consumption.rb @@ -10,9 +10,9 @@ def consumed_hours_in_interval # 2) We cannot charge for future hours (i.e. weekly report on Monday, should charge just monday) # 3) We cannot charge for hours after the resource has been retired. @consumed_hours_in_interval ||= begin - consumed = (consumption_end - consumption_start).round / 1.hour - consumed > 0 ? consumed : 0 - end + consumed = (consumption_end - consumption_start).round / 1.hour + consumed > 0 ? consumed : 0 + end end def hours_in_month @@ -60,7 +60,7 @@ def hours_in_interval def monthly? # A heuristic. Is the interval lenght about 30 days? - (hours_in_interval * 1.hour - 30.days).abs < 3.days + ((hours_in_interval * 1.hour) - 30.days).abs < 3.days end def born_at diff --git a/app/models/chargeback/consumption_history.rb b/app/models/chargeback/consumption_history.rb index 50b6fee0204..b30edd80701 100644 --- a/app/models/chargeback/consumption_history.rb +++ b/app/models/chargeback/consumption_history.rb @@ -20,6 +20,7 @@ def self.for_report(cb_class, options, region) records = uniq_timestamp_record_map(records, options.group_by_tenant?) next if records.empty? + _log.info("Found #{records.flatten.flatten.count - records.keys.count} records for time range #{[query_start_time, query_end_time].inspect}") # we are building hash with grouped calculated values diff --git a/app/models/chargeback/consumption_with_rollups.rb b/app/models/chargeback/consumption_with_rollups.rb index 453133edce4..1a36c6256f7 100644 --- a/app/models/chargeback/consumption_with_rollups.rb +++ b/app/models/chargeback/consumption_with_rollups.rb @@ -82,6 +82,7 @@ def max(metric, sub_metric = nil) def sum_of_maxes_from_grouped_values(metric, sub_metric = nil) return max(metric, sub_metric) if sub_metric + @grouped_values ||= {} grouped_rollups = rollup_records.group_by { |x| x[ChargeableField.col_index(:resource_id)] } @@ -169,9 +170,8 @@ def sub_metric_rollups(sub_metric) def values(metric, sub_metric = nil) @values ||= {} - @values["#{metric}#{sub_metric}"] ||= begin - sub_metric ? sub_metric_rollups(sub_metric) : rollup_records.collect { |x| rollup_field(x, metric) }.compact - end + @values["#{metric}#{sub_metric}"] ||= sub_metric ? sub_metric_rollups(sub_metric) : rollup_records.collect { |x| rollup_field(x, metric) }.compact + end def rollup_field(rollup, metric) @@ -226,11 +226,10 @@ def tag_filtered_for_rollup_records return @rollup_records unless tag_name_filter @tag_filtered_for_rollup_records ||= {} - @tag_filtered_for_rollup_records[tag_name_filter] ||= begin - @rollup_records.select do |rollup| - (resource_tag_names(rollup) & [tag_name_filter]).present? - end + @tag_filtered_for_rollup_records[tag_name_filter] ||= @rollup_records.select do |rollup| + (resource_tag_names(rollup) & [tag_name_filter]).present? end + end end end diff --git a/app/models/chargeback/report_options.rb b/app/models/chargeback/report_options.rb index e2c030ba9a8..4530eeccec9 100644 --- a/app/models/chargeback/report_options.rb +++ b/app/models/chargeback/report_options.rb @@ -18,7 +18,7 @@ class Chargeback :ext_options, :include_metrics, # enable charging allocated resources with C & U :method_for_allocated_metrics, - :cumulative_rate_calculation, + :cumulative_rate_calculation ) do def self.new_from_h(hash) new(*hash.values_at(*members)) @@ -37,7 +37,7 @@ def cumulative_rate_calculation? !!self[:cumulative_rate_calculation] end - ALLOCATED_METHODS_WHITELIST = %i(max avg current_value).freeze + ALLOCATED_METHODS_WHITELIST = %i[max avg current_value].freeze def method_for_allocated_metrics method = (self[:method_for_allocated_metrics] || :max).to_sym @@ -47,6 +47,7 @@ def method_for_allocated_metrics end return :sum_of_maxes_from_grouped_values if method == :max && group_by_tenant? + method end diff --git a/app/models/chargeback_configured_system.rb b/app/models/chargeback_configured_system.rb index 6f24f91fdf8..adf55c7d48d 100644 --- a/app/models/chargeback_configured_system.rb +++ b/app/models/chargeback_configured_system.rb @@ -68,15 +68,14 @@ def self.report_col_options def self.configured_systems(region) @configured_systems ||= {} @configured_systems[region] ||= - begin - if @options[:entity_id] - ConfiguredSystem.where(:id => @options[:entity_id]) - elsif @options[:tag] - ConfiguredSystem.find_tagged_with(:all => @options[:tag], :ns => '*') - else - raise _('Unable to find strategy for Configured Systems selection') - end + if @options[:entity_id] + ConfiguredSystem.where(:id => @options[:entity_id]) + elsif @options[:tag] + ConfiguredSystem.find_tagged_with(:all => @options[:tag], :ns => '*') + else + raise _('Unable to find strategy for Configured Systems selection') end + end def self.display_name(number = 1) diff --git a/app/models/chargeback_container_image.rb b/app/models/chargeback_container_image.rb index b94df93425e..a069c27d03a 100644 --- a/app/models/chargeback_container_image.rb +++ b/app/models/chargeback_container_image.rb @@ -19,7 +19,7 @@ class ChargebackContainerImage < Chargeback :memory_allocated_metric => :float, :net_io_used_cost => :float, :net_io_used_metric => :float, - :total_cost => :float, + :total_cost => :float ) def self.build_results_for_report_ChargebackContainerImage(options) @@ -61,7 +61,7 @@ def self.build_results_for_report_ChargebackContainerImage(options) end def self.load_custom_attribute_groupby(groupby_label) - report_cb_model(self.name).safe_constantize.add_custom_attribute(groupby_label_method(groupby_label)) + report_cb_model(name).safe_constantize.add_custom_attribute(groupby_label_method(groupby_label)) end def self.groupby_label_method(groupby_label) @@ -92,7 +92,7 @@ def self.where_clause(records, _options, region) end def self.report_static_cols - %w(project_name image_name) + %w[project_name image_name] end def self.report_col_options diff --git a/app/models/chargeback_container_project.rb b/app/models/chargeback_container_project.rb index d9496cab8d4..832ab976fd6 100644 --- a/app/models/chargeback_container_project.rb +++ b/app/models/chargeback_container_project.rb @@ -15,7 +15,7 @@ class ChargebackContainerProject < Chargeback :memory_used_metric => :float, :net_io_used_cost => :float, :net_io_used_metric => :float, - :total_cost => :float, + :total_cost => :float ) def self.build_results_for_report_ChargebackContainerProject(options) @@ -51,7 +51,7 @@ def self.where_clause(records, _options, region) end def self.report_static_cols - %w(project_name) + %w[project_name] end def self.report_col_options diff --git a/app/models/chargeback_rate.rb b/app/models/chargeback_rate.rb index 89855a1170b..19e7e4369a7 100644 --- a/app/models/chargeback_rate.rb +++ b/app/models/chargeback_rate.rb @@ -37,11 +37,10 @@ def self.tag_class(klass) def rate_details_relevant_to(report_cols, allowed_cols) # we can memoize, as we get the same report_cols through the life of the object - @relevant ||= begin - chargeback_rate_details.select do |r| - r.affects_report_fields(report_cols) && allowed_cols.include?(r.metric_column_key) - end + @relevant ||= chargeback_rate_details.select do |r| + r.affects_report_fields(report_cols) && allowed_cols.include?(r.metric_column_key) end + end def self.validate_rate_type(type) @@ -117,15 +116,13 @@ def self.seed _log.info("Creating [#{cbr[:description]}] with guid=[#{cbr[:guid]}]") rec = create(cbr) rec.chargeback_rate_details.create(rates) - else - if fix_mtime > rec.created_on - _log.info("Updating [#{cbr[:description]}] with guid=[#{cbr[:guid]}]") - rec.update(cbr) - rec.chargeback_rate_details.clear - rec.chargeback_rate_details.create(rates) - rec.created_on = fix_mtime - rec.save! - end + elsif fix_mtime > rec.created_on + _log.info("Updating [#{cbr[:description]}] with guid=[#{cbr[:guid]}]") + rec.update(cbr) + rec.chargeback_rate_details.clear + rec.chargeback_rate_details.create(rates) + rec.created_on = fix_mtime + rec.save! end end end diff --git a/app/models/chargeback_rate_detail.rb b/app/models/chargeback_rate_detail.rb index 6ec511bd705..c26bc938ef8 100644 --- a/app/models/chargeback_rate_detail.rb +++ b/app/models/chargeback_rate_detail.rb @@ -14,7 +14,7 @@ class ChargebackRateDetail < ApplicationRecord delegate :metric_column_key, :metric_key, :cost_keys, :rate_key, :to => :chargeable_field - FORM_ATTRIBUTES = %i(description per_time per_unit metric group source metric chargeable_field_id sub_metric).freeze + FORM_ATTRIBUTES = %i[description per_time per_unit metric group source metric chargeable_field_id sub_metric].freeze PER_TIME_TYPES = { "hourly" => N_("Hourly"), "daily" => N_("Daily"), @@ -27,6 +27,7 @@ class ChargebackRateDetail < ApplicationRecord # def showback_unit(p_per_unit = nil) return '' unless chargeable_field.detail_measure + {'bytes' => '', 'kilobytes' => 'KiB', 'megabytes' => 'MiB', @@ -46,7 +47,7 @@ def showback_unit(p_per_unit = nil) def populate_showback_rate(plan, rate_detail, entity) group = rate_detail.chargeable_field.showback_measure field, _, calculation = rate_detail.chargeable_field.showback_dimension - unit = rate_detail.showback_unit + unit = rate_detail.showback_unit showback_rate = ManageIQ::Showback::Rate.find_or_create_by(:entity => entity, :group => group, @@ -144,27 +145,27 @@ def find_rate(value) } def hourly_cost(value, consumption) - return 0.0 unless self.enabled? + return 0.0 unless enabled? (fixed_rate, variable_rate) = find_rate(value) hourly_fixed_rate = hourly(fixed_rate, consumption) hourly_variable_rate = hourly(variable_rate, consumption) - hourly_fixed_rate + rate_adjustment * value * hourly_variable_rate + hourly_fixed_rate + (rate_adjustment * value * hourly_variable_rate) end def hourly(rate, consumption) - hourly_rate = case per_time - when "hourly" then rate - when "daily" then rate / 24 - when "weekly" then rate / 24 / 7 - when "monthly" then rate / consumption.hours_in_month - when "yearly" then rate / 24 / 365 - else raise "rate time unit of '#{per_time}' not supported" - end - - hourly_rate + case per_time + when "hourly" then rate + when "daily" then rate / 24 + when "weekly" then rate / 24 / 7 + when "monthly" then rate / consumption.hours_in_month + when "yearly" then rate / 24 / 365 + else raise "rate time unit of '#{per_time}' not supported" + end + + end def rate_adjustment @@ -187,7 +188,7 @@ def friendly_rate s = "" chargeback_tiers.each do |tier| # Example: Daily @ .02 per MHz from 0.0 to Infinity - s += "#{per_time.to_s.capitalize} @ #{tier.fixed_rate} + "\ + s += "#{per_time.to_s.capitalize} @ #{tier.fixed_rate} + " \ "#{tier.variable_rate} per #{per_unit_display} from #{tier.start} to #{tier.finish}\n" end s.chomp @@ -210,9 +211,9 @@ def show_rates def save_tiers(tiers) temp = self.class.new(:chargeback_tiers => tiers) if temp.contiguous_tiers? - self.chargeback_tiers.replace(tiers) + chargeback_tiers.replace(tiers) else - temp.errors.each {|error| errors.add(error.attribute, error.message)} + temp.errors.each { |error| errors.add(error.attribute, error.message) } end end @@ -225,13 +226,13 @@ def contiguous_tiers? tiers = chargeback_tiers tiers.each_with_index do |tier, index| - if single_tier?(tier,tiers) + if single_tier?(tier, tiers) error = true if !tier.starts_with_zero? || !tier.ends_with_infinity? - elsif first_tier?(tier,tiers) + elsif first_tier?(tier, tiers) error = true if !tier.starts_with_zero? || tier.ends_with_infinity? - elsif last_tier?(tier,tiers) + elsif last_tier?(tier, tiers) error = true if !consecutive_tiers?(tier, tiers[index - 1]) || !tier.ends_with_infinity? - elsif middle_tier?(tier,tiers) + elsif middle_tier?(tier, tiers) error = true if !consecutive_tiers?(tier, tiers[index - 1]) || tier.ends_with_infinity? end @@ -259,19 +260,19 @@ def metric_and_cost_by(consumption, options) [metric_value, cost] end - def first_tier?(tier,tiers) + def first_tier?(tier, tiers) tier == tiers.first end - def last_tier?(tier,tiers) + def last_tier?(tier, tiers) tier == tiers.last end - def single_tier?(tier,tiers) + def single_tier?(tier, tiers) first_tier?(tier, tiers) && last_tier?(tier, tiers) end - def middle_tier?(tier,tiers) + def middle_tier?(tier, tiers) !first_tier?(tier, tiers) && !last_tier?(tier, tiers) end @@ -299,17 +300,17 @@ def self.default_rate_details_for(rate_type) rate_details.push(detail_new) - if detail_new.chargeable_field.metric == 'derived_vm_allocated_disk_storage' - volume_types = CloudVolume.volume_types - volume_types.push('unclassified') if volume_types.present? - volume_types.each do |volume_type| - storage_detail_new = detail_new.dup - storage_detail_new.sub_metric = volume_type - detail[:tiers].sort_by { |tier| tier[:start] }.each do |tier| - storage_detail_new.chargeback_tiers << ChargebackTier.new(tier.slice(*ChargebackTier::FORM_ATTRIBUTES)) - end - rate_details.push(storage_detail_new) + next unless detail_new.chargeable_field.metric == 'derived_vm_allocated_disk_storage' + + volume_types = CloudVolume.volume_types + volume_types.push('unclassified') if volume_types.present? + volume_types.each do |volume_type| + storage_detail_new = detail_new.dup + storage_detail_new.sub_metric = volume_type + detail[:tiers].sort_by { |tier| tier[:start] }.each do |tier| + storage_detail_new.chargeback_tiers << ChargebackTier.new(tier.slice(*ChargebackTier::FORM_ATTRIBUTES)) end + rate_details.push(storage_detail_new) end end end diff --git a/app/models/chargeback_rate_detail_currency.rb b/app/models/chargeback_rate_detail_currency.rb index c4d2658d878..6dabcc831f8 100644 --- a/app/models/chargeback_rate_detail_currency.rb +++ b/app/models/chargeback_rate_detail_currency.rb @@ -1,14 +1,12 @@ require "money" class ChargebackRateDetailCurrency < ApplicationRecord - belongs_to :chargeback_rate_detail - validates :code, :presence => true, :length => {:maximum => 100} validates :name, :presence => true, :length => {:maximum => 100} validates :full_name, :presence => true, :length => {:maximum => 100} validates :symbol, :presence => true, :length => {:maximum => 100} - has_many :chargeback_rate_detail, :foreign_key => "chargeback_rate_detail_currency_id" + has_many :chargeback_rate_detail self.table_name = 'currencies' diff --git a/app/models/chargeback_rate_detail_measure.rb b/app/models/chargeback_rate_detail_measure.rb index e5a1774f256..440251a0d17 100644 --- a/app/models/chargeback_rate_detail_measure.rb +++ b/app/models/chargeback_rate_detail_measure.rb @@ -13,12 +13,13 @@ def measures def adjust(from_unit, to_unit) return 1 if from_unit == to_unit + jumps = units.index(to_unit) - units.index(from_unit) BigDecimal(step)**jumps end private def units_same_length - unless (units.count == units_display.count) + unless units.count == units_display.count errors.add("Units Problem", "Units_display length diferent that the units length") end end diff --git a/app/models/chargeback_tier.rb b/app/models/chargeback_tier.rb index 0b128939a74..f7008ec9163 100644 --- a/app/models/chargeback_tier.rb +++ b/app/models/chargeback_tier.rb @@ -7,7 +7,7 @@ class ChargebackTier < ApplicationRecord default_scope { order(:start => :asc) } - FORM_ATTRIBUTES = %i(fixed_rate variable_rate start finish).freeze + FORM_ATTRIBUTES = %i[fixed_rate variable_rate start finish].freeze def self.to_float(s) if s.to_s.include?("Infinity") @@ -18,7 +18,7 @@ def self.to_float(s) end def includes?(value) - starts_with_zero? && value.zero? || value > start && value.to_f <= finish + (starts_with_zero? && value.zero?) || (value > start && value.to_f <= finish) end def starts_with_zero? diff --git a/app/models/chargeback_vm.rb b/app/models/chargeback_vm.rb index 6f44a01584a..c0e6290e9d2 100644 --- a/app/models/chargeback_vm.rb +++ b/app/models/chargeback_vm.rb @@ -31,15 +31,15 @@ class ChargebackVm < Chargeback :storage_used_cost => :float, :storage_used_metric => :float, :storage_cost => :float, - :total_cost => :float, + :total_cost => :float ) - DEFAULT_STORAGE_METRICS = %w( + DEFAULT_STORAGE_METRICS = %w[ storage_allocated_unclassified_metric storage_allocated_unclassified_cost storage_allocated_metric storage_allocated_cost - ).freeze + ].freeze cache_with_timeout(:current_volume_types) do volume_types = CloudVolume.volume_types @@ -64,7 +64,7 @@ def self.attribute_names # } def self.dynamic_columns_for(column_type) current_volume_types.each_with_object({}) do |volume_type, result| - %i(metric cost rate).collect do |type| + %i[metric cost rate].collect do |type| result["storage_allocated_#{volume_type || 'unclassified'}_#{type}"] = column_type end end @@ -115,7 +115,7 @@ def self.extra_resources_without_rollups(region) end def self.report_static_cols - %w(vm_name) + %w[vm_name] end def self.sub_metric_columns @@ -160,24 +160,24 @@ def self.vm_owner(consumption, region) def self.vms(region) @vms ||= {} - @vms[region] ||= - begin + # Find Vms by user or by tag - if @options[:entity_id] - Vm.where(:id => @options[:entity_id]) - elsif @options[:owner] - user = User.lookup_by_userid(@options[:owner]) + @vms[region] ||= + if @options[:entity_id] + Vm.where(:id => @options[:entity_id]) + elsif @options[:owner] + user = User.lookup_by_userid(@options[:owner]) if user.nil? _log.error("Unable to find user '#{@options[:owner]}'. Calculating chargeback costs aborted.") raise MiqException::Error, _("Unable to find user '%{name}'") % {:name => @options[:owner]} end user.vms - elsif @options[:tag] - vms = Vm.find_tagged_with(:all => @options[:tag], :ns => '*') + elsif @options[:tag] + vms = Vm.find_tagged_with(:all => @options[:tag], :ns => '*') vms &= @report_user.accessible_vms if @report_user && @report_user.self_service? vms - elsif @options[:tenant_id] - tenant = Tenant.find_by(:id => @options[:tenant_id]) + elsif @options[:tenant_id] + tenant = Tenant.find_by(:id => @options[:tenant_id]) if tenant.nil? error_message = "Unable to find tenant '#{@options[:tenant_id]}'" _log.info("#{error_message}. Calculating chargeback costs skipped for #{@options[:tenant_id]} in region #{region}.") @@ -197,17 +197,17 @@ def self.vms(region) else Vm.where(:tenant_id => tenant.subtree.select(:id)) end - elsif @options[:service_id] - service = Service.find(@options[:service_id]) + elsif @options[:service_id] + service = Service.find(@options[:service_id]) if service.nil? _log.error("Unable to find service '#{@options[:service_id]}'. Calculating chargeback costs aborted.") raise MiqException::Error, "Unable to find service '#{@options[:service_id]}'" end service.vms - else - raise _('Unable to find strategy for VM selection') - end + else + raise _('Unable to find strategy for VM selection') end + end def self.display_name(number = 1) diff --git a/app/models/classification.rb b/app/models/classification.rb index 6014b727d50..9630e05fd12 100644 --- a/app/models/classification.rb +++ b/app/models/classification.rb @@ -31,7 +31,7 @@ class Classification < ApplicationRecord scope :is_category, -> { where(:parent_id => nil) } scope :is_entry, -> { where.not(:parent_id => nil) } - scope :with_writable_parents, -> { includes(:parent).where(:parents_classifications => { :read_only => false}) } + scope :with_writable_parents, -> { includes(:parent).where(:parents_classifications => {:read_only => false}) } DEFAULT_NAMESPACE = "/managed".freeze @@ -73,7 +73,7 @@ def self.managed with_tag_name.where(tags_arel[:name].matches_regexp("/managed/[^\\/]+$")) end - attr_writer :ns + attr_writer :ns, :name def ns @ns ||= DEFAULT_NAMESPACE if new_record? @@ -163,7 +163,7 @@ def self.bulk_reassignment(options = {}) begin d.remove_entry_from(t) - rescue StandardError => err + rescue => err _log.error("Error occurred while removing entry name: [#{d.name}] from #{options[:model]} name: #{t.name}") _log.error("#{err.class} - #{err}") failed_deletes[t] << d @@ -175,7 +175,7 @@ def self.bulk_reassignment(options = {}) begin a.assign_entry_to(t) - rescue StandardError => err + rescue => err _log.error("Error occurred while adding entry name: [#{a.name}] to #{options[:model]} name: #{t.name}") _log.error("#{err.class} - #{err}") failed_adds[t] << a @@ -217,13 +217,13 @@ def self.categories(region_id = my_region_number, ns = DEFAULT_NAMESPACE) def self.category_names_for_perf_by_tag(region_id = my_region_number, ns = DEFAULT_NAMESPACE) in_region(region_id).is_category.where(:perf_by_tag => true) - .includes(:tag) - .collect { |c| c.name if c.tag2ns(c.tag.name) == ns } - .compact + .includes(:tag) + .collect { |c| c.name if c.tag2ns(c.tag.name) == ns } + .compact end def self.find_assigned_entries(obj, ns = DEFAULT_NAMESPACE) - unless obj.respond_to?("tag_with") + unless obj.respond_to?(:tag_with) raise _("Class '%{name}' is not eligible for classification") % {:name => obj.class} end @@ -268,6 +268,7 @@ def self.tag_to_model_hash(tag) def add_entry(options) raise _("entries can only be added to classifications") unless category? + # Inherit from parent classification options.merge!(:read_only => read_only, :syntax => syntax, :single_value => single_value, :ns => ns) children.create!(options) @@ -275,8 +276,9 @@ def add_entry(options) def lookup_by_entry(type) raise _("method is only available for an entry") if category? + klass = type.constantize - unless klass.respond_to?("find_tagged_with") + unless klass.respond_to?(:find_tagged_with) raise _("Class '%{type}' is not eligible for classification") % {:type => type} end @@ -288,7 +290,7 @@ def lookup_by_entry(type) def assign_entry_to(obj, is_request = true) raise _("method is only available for an entry") if category? - unless obj.respond_to?("tag_with") + unless obj.respond_to?(:tag_with) raise _("Class '%{name}' is not eligible for classification") % {:name => obj.class} end @@ -331,8 +333,6 @@ def name @name ||= tag2name(tag_name || tag.name) end - attr_writer :name - def self.lookup_category_by_description(description, region_id = my_region_number) is_category.in_region(region_id).find_by(:description => description) end @@ -397,7 +397,7 @@ def self.export_to_yaml end def export_to_array - h = attributes.except(*%w(id tag_id reserved parent_id)) + h = attributes.except(*%w[id tag_id reserved parent_id]) h["name"] = name h["entries"] = entries.collect(&:export_to_array).flatten if category? [h] @@ -467,6 +467,7 @@ def self.seed category = is_category.new(c.except(:entries)) next unless category.valid? # HACK: Skip seeding if categories aren't valid/unique + _log.info("Creating category #{c[:name]}") category.save! add_entries_from_hash(category, c[:entries]) diff --git a/app/models/classification_import.rb b/app/models/classification_import.rb index ce5af66af1a..feee6a6f99f 100644 --- a/app/models/classification_import.rb +++ b/app/models/classification_import.rb @@ -54,11 +54,11 @@ def verify @verified_data[vms[0].id][line["category"]] ||= [] entry = nil cat.entries.each do |e| - if e.description == line["entry"] - @verified_data[vms[0].id][line["category"]].push(line["entry"]) - entry = e - break - end + next unless e.description == line["entry"] + + @verified_data[vms[0].id][line["category"]].push(line["entry"]) + entry = e + break end if entry.nil? bad += 1 @@ -73,13 +73,13 @@ def verify @verified_data.each do |id, data| data.each do |category, entries| cat = Classification.find_by(:description => category) - if cat.single_value && entries.length > 1 - vm = VmOrTemplate.find_by(:id => id) - while entries.length > 1 - e = entries.shift - _log.warn("Vm: #{vm.name}, Location: #{vm.location}, Category: #{category}: Multiple values given for single-valued category, value #{e} will be ignored") - @errors.add(:singlevaluedcategory, "Vm #{vm.name}, Location: #{vm.location}, Category: #{category}: Multiple values given for single-valued category, value #{e} will be ignored") - end + next unless cat.single_value && entries.length > 1 + + vm = VmOrTemplate.find_by(:id => id) + while entries.length > 1 + e = entries.shift + _log.warn("Vm: #{vm.name}, Location: #{vm.location}, Category: #{category}: Multiple values given for single-valued category, value #{e} will be ignored") + @errors.add(:singlevaluedcategory, "Vm #{vm.name}, Location: #{vm.location}, Category: #{category}: Multiple values given for single-valued category, value #{e} will be ignored") end end end @@ -91,18 +91,19 @@ def verify def apply @verified_data.each do |id, data| vm = VmOrTemplate.find_by(:id => id) - if vm - data.each do |category, entries| - cat = Classification.find_by(:description => category) - next unless cat - entries.each do |ent| - cat.entries.each do |e| - if e.description == ent - _log.info("Vm: #{vm.name}, Location: #{vm.location}, Category: #{cat.description}: Applying entry #{ent}") - e.assign_entry_to(vm) - break - end - end + next unless vm + + data.each do |category, entries| + cat = Classification.find_by(:description => category) + next unless cat + + entries.each do |ent| + cat.entries.each do |e| + next unless e.description == ent + + _log.info("Vm: #{vm.name}, Location: #{vm.location}, Category: #{cat.description}: Applying entry #{ent}") + e.assign_entry_to(vm) + break end end end diff --git a/app/models/cloud_network.rb b/app/models/cloud_network.rb index 6576aec0e60..83f4000db88 100644 --- a/app/models/cloud_network.rb +++ b/app/models/cloud_network.rb @@ -18,13 +18,13 @@ class CloudNetwork < ApplicationRecord has_many :floating_ips, :dependent => :destroy has_many :vms, -> { distinct }, :through => :network_ports, :source => :device, :source_type => 'VmOrTemplate' - has_many :public_network_routers, :foreign_key => :cloud_network_id, :class_name => "NetworkRouter" + has_many :public_network_routers, :class_name => "NetworkRouter" has_many :public_network_vms, -> { distinct }, :through => :public_network_routers, :source => :vms has_many :private_networks, -> { distinct }, :through => :public_network_routers, :source => :cloud_networks # TODO(lsmola) figure out what this means, like security groups used by VMs in the network? It's not being # refreshed, so we can probably delete this association - has_many :security_groups + has_many :security_groups has_many :firewall_rules, :as => :resource, :dependent => :destroy # Use for virtual columns, mainly for modeling array and hash types, we get from the API @@ -35,8 +35,8 @@ class CloudNetwork < ApplicationRecord virtual_column :qos_policy_id, :type => :string # Define all getters and setters for extra_attributes related virtual columns - %i(maximum_transmission_unit port_security_enabled qos_policy_id).each do |action| - define_method("#{action}=") do |value| + %i[maximum_transmission_unit port_security_enabled qos_policy_id].each do |action| + define_method(:"#{action}=") do |value| extra_attributes_save(action, value) end @@ -109,10 +109,10 @@ def raw_delete_cloud_network(_options) def extra_attributes_save(key, value) self.extra_attributes = {} if extra_attributes.blank? - self.extra_attributes[key] = value + extra_attributes[key] = value end def extra_attributes_load(key) - self.extra_attributes[key] unless extra_attributes.blank? + extra_attributes[key] if extra_attributes.present? end end diff --git a/app/models/cloud_object_store_container.rb b/app/models/cloud_object_store_container.rb index eac5129e8a2..7b9a676de89 100644 --- a/app/models/cloud_object_store_container.rb +++ b/app/models/cloud_object_store_container.rb @@ -41,6 +41,7 @@ def self.cloud_object_store_container_create_queue(userid, ext_management_system def self.cloud_object_store_container_create(ems_id, options) raise ArgumentError, _("ems_id cannot be nil") if ems_id.nil? + ext_management_system = ExtManagementSystem.find(ems_id) raise ArgumentError, _("ext_management_system cannot be found") if ext_management_system.nil? diff --git a/app/models/cloud_resource_quota.rb b/app/models/cloud_resource_quota.rb index a230026a7d3..ab4fca69b6e 100644 --- a/app/models/cloud_resource_quota.rb +++ b/app/models/cloud_resource_quota.rb @@ -6,7 +6,7 @@ class CloudResourceQuota < ApplicationRecord # find the currently used value for this quota def used - send("#{name}_quota_used") + send(:"#{name}_quota_used") end def method_missing(method, *args, &block) diff --git a/app/models/cloud_subnet.rb b/app/models/cloud_subnet.rb index 3f392cce906..708156b8064 100644 --- a/app/models/cloud_subnet.rb +++ b/app/models/cloud_subnet.rb @@ -33,8 +33,8 @@ class CloudSubnet < ApplicationRecord virtual_column :network_type, :type => :string # Define all getters and setters for extra_attributes related virtual columns - %i(allocation_pools host_routes ip_version subnetpool_id network_type).each do |action| - define_method("#{action}=") do |value| + %i[allocation_pools host_routes ip_version subnetpool_id network_type].each do |action| + define_method(:"#{action}=") do |value| extra_attributes_save(action, value) end @@ -84,10 +84,10 @@ def raw_delete_cloud_subnet def extra_attributes_save(key, value) self.extra_attributes = {} if extra_attributes.blank? - self.extra_attributes[key] = value + extra_attributes[key] = value end def extra_attributes_load(key) - self.extra_attributes[key] unless extra_attributes.blank? + extra_attributes[key] if extra_attributes.present? end end diff --git a/app/models/cloud_tenant.rb b/app/models/cloud_tenant.rb index b90e55f95fd..d20e21eb87f 100644 --- a/app/models/cloud_tenant.rb +++ b/app/models/cloud_tenant.rb @@ -1,6 +1,6 @@ class CloudTenant < ApplicationRecord include CloudTenancyMixin - TENANT_MAPPING_ASSOCIATIONS = %i(vms_and_templates).freeze + TENANT_MAPPING_ASSOCIATIONS = %i[vms_and_templates].freeze include NewWithTypeStiMixin include CustomActionsMixin @@ -30,7 +30,7 @@ class CloudTenant < ApplicationRecord has_many :flavors, :through => :cloud_tenant_flavors has_many :cloud_volume_types, :through => :ext_management_system - alias_method :direct_cloud_networks, :cloud_networks + alias direct_cloud_networks cloud_networks acts_as_miq_taggable @@ -178,12 +178,14 @@ def self.with_ext_management_system(ems_id) def self.post_refresh_ems(ems_id, _) ems = ExtManagementSystem.find(ems_id) - MiqQueue.put_unless_exists( - :class_name => ems.class.name, - :instance_id => ems_id, - :method_name => 'sync_cloud_tenants_with_tenants', - :zone => ems.my_zone - ) if ems.supports?(:cloud_tenant_mapping) + if ems.supports?(:cloud_tenant_mapping) + MiqQueue.put_unless_exists( + :class_name => ems.class.name, + :instance_id => ems_id, + :method_name => 'sync_cloud_tenants_with_tenants', + :zone => ems.my_zone + ) + end end def self.tenant_joins_clause(scope) diff --git a/app/models/cloud_volume.rb b/app/models/cloud_volume.rb index d6980c1bc50..795de34d31f 100644 --- a/app/models/cloud_volume.rb +++ b/app/models/cloud_volume.rb @@ -78,6 +78,7 @@ def self.create_volume_queue(userid, ext_management_system, options = {}) def self.create_volume(ems_id, options = {}) raise ArgumentError, _("ems_id cannot be nil") if ems_id.nil? + ext_management_system = ExtManagementSystem.find(ems_id) raise ArgumentError, _("ext_management_system cannot be found") if ext_management_system.nil? diff --git a/app/models/compliance.rb b/app/models/compliance.rb index 299560f7bb1..f7345716d94 100644 --- a/app/models/compliance.rb +++ b/app/models/compliance.rb @@ -1,6 +1,6 @@ class Compliance < ApplicationRecord include Purging - belongs_to :resource, :polymorphic => true + belongs_to :resource, :polymorphic => true has_many :compliance_details, :dependent => :destroy def self.check_compliance_queue(targets, inputs = {}) @@ -15,17 +15,17 @@ def self.check_compliance_queue(targets, inputs = {}) def self.scan_and_check_compliance_queue(targets, inputs = {}) Array.wrap(targets).each do |target| - if target.kind_of?(Host) - # Queue this with the vc-refresher taskid, so that any concurrent ems_refreshes don't clash with this one. - MiqQueue.put( - :class_name => name, - :method_name => 'scan_and_check_compliance', - :args => [[target.class.name, target.id], inputs], - :task_id => 'vc-refresher', - :role => "ems_inventory", - :zone => target.ext_management_system.try(:my_zone) - ) - end + next unless target.kind_of?(Host) + + # Queue this with the vc-refresher taskid, so that any concurrent ems_refreshes don't clash with this one. + MiqQueue.put( + :class_name => name, + :method_name => 'scan_and_check_compliance', + :args => [[target.class.name, target.id], inputs], + :task_id => 'vc-refresher', + :role => "ems_inventory", + :zone => target.ext_management_system.try(:my_zone) + ) end end @@ -67,11 +67,12 @@ def self.check_compliance(target, _inputs = {}) end end target_class = target.class.base_model.name.downcase - target_class = "vm" if target_class.match("template") + target_class = "vm" if target_class.match?("template") unless target.respond_to?(:compliances) raise _("Compliance check not supported for %{class_name} objects") % {:class_name => target.class.name} end + check_event = "#{target_class}_compliance_check" _log.info("Checking compliance...") results = MiqPolicy.enforce_policy(target, check_event) @@ -88,7 +89,6 @@ def self.check_compliance(target, _inputs = {}) event = results[:result] ? "#{target_class}_compliance_passed" : "#{target_class}_compliance_failed" _log.info("Raising EVM Event: #{event}") MiqEvent.raise_evm_event_queue(target, event) - # results[:result] end @@ -96,7 +96,7 @@ def self.set_compliancy(compliant, target, event, details) name = target.respond_to?(:name) ? target.name : "NA" _log.info("Marking as #{compliant ? "" : "Non-"}Compliant Object with Class: [#{target.class}], Id: [#{target.id}], Name: [#{name}]") - comp = create(:resource => target, :compliant => compliant, :event_type => event, :timestamp => Time.now.utc) + comp = create(:resource => target, :compliant => compliant, :event_type => event, :timestamp => Time.now.utc) details.each do |p| dhash = { diff --git a/app/models/condition.rb b/app/models/condition.rb index e27f0a9ffa2..8f631aa7b71 100644 --- a/app/models/condition.rb +++ b/app/models/condition.rb @@ -32,7 +32,7 @@ class Condition < ApplicationRecord def applies_to?(rec, inputs = {}) rec_model = rec.class.base_model.name - rec_model = "Vm" if rec_model.downcase.match("template") + rec_model = "Vm" if rec_model.downcase.match?("template") return false if towhat && rec_model != towhat return true if applies_to_exp.nil? @@ -51,9 +51,9 @@ def self.evaluate(cond, rec, _inputs = {}, attr = :expression) case mode when "tag" - unless %w(any all none).include?(expression["include"]) + unless %w[any all none].include?(expression["include"]) raise _("condition '%{name}', include value \"%{value}\", is invalid. Should be one of \"any, all or none\"") % - {:name => name, :value => expression["include"]} + {:name => name, :value => expression["include"]} end result = expression["include"] != "any" @@ -61,8 +61,8 @@ def self.evaluate(cond, rec, _inputs = {}, attr = :expression) if rec.is_tagged_with?(tag, :ns => expression["ns"]) result = true if expression["include"] == "any" result = false if expression["include"] == "none" - else - result = false if expression["include"] == "all" + elsif expression["include"] == "all" + result = false end end when "tag_expr", "tag_expr_v2", "object" @@ -113,18 +113,18 @@ def self._subst(rec, opts, tag, mode) case mode.downcase when "exist" - ref.nil? ? value = false : value = ref.is_tagged_with?(tag, :ns => "*") + value = ref.nil? ? false : ref.is_tagged_with?(tag, :ns => "*") when "value" - if ref.kind_of?(Hash) - value = ref.fetch(tag, "") - else - value = ref.nil? ? "" : Tag.list(ref, :ns => tag) - end + value = if ref.kind_of?(Hash) + ref.fetch(tag, "") + else + ref.nil? ? "" : Tag.list(ref, :ns => tag) + end value = MiqExpression.quote(value, ohash[:type]&.to_sym) when "count" - ref.nil? ? value = 0 : value = ref.tag_list(:ns => tag).length + value = ref.nil? ? 0 : ref.tag_list(:ns => tag).length when "registry" - ref.nil? ? value = "" : value = registry_data(ref, tag, ohash) + value = ref.nil? ? "" : registry_data(ref, tag, ohash) value = MiqExpression.quote(value, ohash[:type]&.to_sym) end value @@ -165,7 +165,7 @@ def self._subst_find(rec, expr) list = l.collect do |obj| value = MiqExpression.quote(obj.send(attr), opts[:type]&.to_sym) - value = value.gsub(/\\/, '\&\&') if value.kind_of?(String) + value = value.gsub("\\", '\&\&') if value.kind_of?(String) e = search.gsub(/]*>.+<\/value>/im, value.to_s) obj if do_eval(e) end.compact @@ -184,7 +184,7 @@ def self._subst_find(rec, expr) if checkmode == "count" e = check.gsub(//i, list.length.to_s) left, operator, right = e.split - raise _("Illegal operator, '%{operator}'") % {:operator => operator} unless %w(== != < > <= >=).include?(operator) + raise _("Illegal operator, '%{operator}'") % {:operator => operator} unless %w[== != < > <= >=].include?(operator) MiqPolicy.logger.debug("MIQ(condition-_subst_find): Check Expression after substitution: [#{e}]") result = !!left.to_f.send(operator, right.to_f) @@ -203,7 +203,7 @@ def self._subst_find(rec, expr) list.each do |obj| opts, _ref = options2hash(raw_opts, obj) value = MiqExpression.quote(obj.send(checkattr), opts[:type]&.to_sym) - value = value.gsub(/\\/, '\&\&') if value.kind_of?(String) + value = value.gsub("\\", '\&\&') if value.kind_of?(String) e = check.gsub(/]*>.+<\/value>/im, value.to_s) MiqPolicy.logger.debug("MIQ(condition-_subst_find): Check Expression after substitution: [#{e}]") @@ -219,14 +219,14 @@ def self._subst_find(rec, expr) def self.options2hash(opts, rec) ref = rec ohash = {} - unless opts.blank? + if opts.present? val = nil opts.split(",").each do |o| attr, val = o.split("=") ohash[attr.strip.downcase.to_sym] = val.strip.downcase end - if ohash[:ref] != rec.class.to_s.downcase && !exclude_from_object_ref_substitution(ohash[:ref], rec) - ref = rec.send(val) if val && rec.respond_to?(val) + if ohash[:ref] != rec.class.to_s.downcase && !exclude_from_object_ref_substitution(ohash[:ref], rec) && (val && rec.respond_to?(val)) + ref = rec.send(val) end end return ohash, ref @@ -242,7 +242,7 @@ def self.exclude_from_object_ref_substitution(reference, rec) def self.registry_data(ref, name, ohash) # HKLM\Software\Microsoft\Windows\CurrentVersion\explorer\Shell Folders\Common AppData == 'C:\Documents and Settings\All Users\Application Data' # HKLM\Software\Microsoft\Windows\CurrentVersion\explorer\Shell Folders : Common AppData == 'C:\Documents and Settings\All Users\Application Data' - return nil unless ref.respond_to?("registry_items") + return nil unless ref.respond_to?(:registry_items) registry_items = ref.registry_items if ohash[:key_exists] @@ -295,5 +295,4 @@ def self.import_from_hash(condition, options = {}) return c, status end - end # class Condition diff --git a/app/models/configuration_profile.rb b/app/models/configuration_profile.rb index 5007d5053b3..cdb53e484f1 100644 --- a/app/models/configuration_profile.rb +++ b/app/models/configuration_profile.rb @@ -26,11 +26,11 @@ class ConfigurationProfile < ApplicationRecord delegate :my_zone, :provider, :zone, :to => :manager - virtual_has_one :configuration_architecture, :class_name => 'ConfigurationArchitecture', :uses => :configuration_tags + virtual_has_one :configuration_architecture, :class_name => 'ConfigurationArchitecture', :uses => :configuration_tags virtual_has_one :configuration_compute_profile, :class_name => 'ConfigurationProfile', :uses => :configuration_tags virtual_has_one :configuration_domain, :class_name => 'ConfigurationDomain', :uses => :configuration_tags - virtual_has_one :configuration_environment, :class_name => 'ConfigurationEnvironment', :uses => :configuration_tags - virtual_has_one :configuration_realm, :class_name => 'ConfigurationRealm', :uses => :configuration_tags + virtual_has_one :configuration_environment, :class_name => 'ConfigurationEnvironment', :uses => :configuration_tags + virtual_has_one :configuration_realm, :class_name => 'ConfigurationRealm', :uses => :configuration_tags virtual_column :total_configured_systems, :type => :integer virtual_column :my_zone, :type => :string @@ -69,7 +69,7 @@ def tag_hash @tag_hash ||= configuration_tags.index_by(&:class) end - alias_method :configuration_manager, :manager + alias configuration_manager manager def total_configured_systems Rbac.filtered(configured_systems).count diff --git a/app/models/configured_system.rb b/app/models/configured_system.rb index 4e24788bcdb..b243eac093c 100644 --- a/app/models/configured_system.rb +++ b/app/models/configured_system.rb @@ -19,8 +19,8 @@ class ConfiguredSystem < ApplicationRecord has_and_belongs_to_many :configuration_tags alias ext_management_system manager - alias_attribute :name, :hostname - alias_method :configuration_manager, :manager + alias_attribute :name, :hostname + alias configuration_manager manager delegate :name, :to => :configuration_profile, :prefix => true, :allow_nil => true delegate :name, :to => :configuration_architecture, :prefix => true, :allow_nil => true @@ -57,7 +57,7 @@ class ConfiguredSystem < ApplicationRecord scope :with_inventory_root_group, ->(group_id) { where(:inventory_root_group_id => group_id) } scope :with_manager, ->(manager_id) { where(:manager_id => manager_id) } scope :with_configuration_profile_id, ->(profile_id) { where(:configuration_profile_id => profile_id) } - scope :without_configuration_profile_id, -> { where(:configuration_profile_id => nil) } + scope :without_configuration_profile_id, -> { where(:configuration_profile_id => nil) } scope :under_configuration_managers, -> { where(:manager => ManageIQ::Providers::ConfigurationManager.all) } def configuration_architecture @@ -82,6 +82,7 @@ def configuration_realm def counterparts return [] unless counterpart + [counterpart] + counterpart.counterparts.where.not(:id => id) end @@ -96,6 +97,7 @@ def provisionable? def self.provisionable?(ids) cs = ConfiguredSystem.where(:id => ids) return false if cs.blank? + cs.all?(&:provisionable?) end diff --git a/app/models/container.rb b/app/models/container.rb index 9f9721decf5..f2093b1fa4e 100644 --- a/app/models/container.rb +++ b/app/models/container.rb @@ -59,6 +59,7 @@ def perf_rollup_parents(interval_name = nil) def disconnect_inv return if archived? + _log.info("Disconnecting Container [#{name}] id [#{id}] from EMS") self.deleted_on = Time.now.utc save diff --git a/app/models/container_group.rb b/app/models/container_group.rb index d6b5645f0f3..1f390aad90e 100644 --- a/app/models/container_group.rb +++ b/app/models/container_group.rb @@ -16,7 +16,7 @@ class ContainerGroup < ApplicationRecord has_many :containers, :dependent => :destroy has_many :container_images, -> { distinct }, :through => :containers - belongs_to :ext_management_system, :foreign_key => "ems_id" + belongs_to :ext_management_system, :foreign_key => "ems_id" has_many :labels, -> { where(:section => "labels") }, # rubocop:disable Rails/HasManyOrHasOneDependent :class_name => "CustomAttribute", :as => :resource, @@ -105,8 +105,9 @@ def perf_rollup_parents(interval_name = nil) def disconnect_inv return if archived? + _log.info("Disconnecting Pod [#{name}] id [#{id}] from EMS [#{ext_management_system.name}] id [#{ext_management_system.id}]") - self.containers.each(&:disconnect_inv) + containers.each(&:disconnect_inv) self.container_services = [] self.container_replicator_id = nil self.container_build_pod_id = nil diff --git a/app/models/container_image.rb b/app/models/container_image.rb index f9d4ec1ff7e..5f578e7e5d9 100644 --- a/app/models/container_image.rb +++ b/app/models/container_image.rb @@ -60,6 +60,7 @@ def perf_rollup_parents(interval_name = nil) def full_name return docker_id if image_ref && image_ref.start_with?(DOCKER_PULLABLE_PREFIX) + result = "" result << "#{container_image_registry.full_name}/" unless container_image_registry.nil? result << name @@ -81,7 +82,7 @@ def docker_id end # The guid is required by the smart analysis infrastructure - alias_method :guid, :docker_id + alias guid docker_id def display_registry container_image_registry.present? ? container_image_registry.full_name : _("Unknown image source") @@ -115,6 +116,7 @@ def openscap_failed_rules_summary def disconnect_inv return if archived? + _log.info("Disconnecting Image [#{name}] id [#{id}] from EMS [#{ext_management_system.name}] id [#{ext_management_system.id}]") self.container_image_registry = nil self.deleted_on = Time.now.utc @@ -122,7 +124,7 @@ def disconnect_inv end def self.disconnect_inv(ids) - _log.info "Disconnecting Images [#{ids}]" + _log.info("Disconnecting Images [#{ids}]") where(:id => ids).update_all(:container_image_registry_id => nil, :deleted_on => Time.now.utc) end @@ -134,5 +136,5 @@ def limit_memory_bytes containers.map(&:limit_memory_bytes).compact.sum end - alias_method :perform_metadata_sync, :sync_stashed_metadata + alias perform_metadata_sync sync_stashed_metadata end diff --git a/app/models/container_node.rb b/app/models/container_node.rb index 976fe4ce1c4..86365c139cc 100644 --- a/app/models/container_node.rb +++ b/app/models/container_node.rb @@ -43,7 +43,6 @@ class ContainerNode < ApplicationRecord has_many :miq_alert_statuses, :as => :resource delegate :my_zone, :to => :ext_management_system, :allow_nil => true - virtual_column :ready_condition_status, :type => :string, :uses => :container_conditions virtual_delegate :system_distribution, :to => "operating_system.distribution", :allow_nil => true, :type => :string virtual_delegate :kernel_version, :to => :operating_system, :allow_nil => true, :type => :string @@ -110,8 +109,9 @@ def external_logging_path def disconnect_inv return if archived? + _log.info("Disconnecting Node [#{name}] id [#{id}] from EMS [#{ext_management_system.name}]" \ - "id [#{ext_management_system.id}] ") + "id [#{ext_management_system.id}] ") self.deleted_on = Time.now.utc save end diff --git a/app/models/container_quota.rb b/app/models/container_quota.rb index 227c0f782c5..456e1225124 100644 --- a/app/models/container_quota.rb +++ b/app/models/container_quota.rb @@ -21,6 +21,7 @@ class ContainerQuota < ApplicationRecord def disconnect_inv return if archived? + _log.info("Archiving Container Quota [#{name}] id [#{id}]") # This allows looking only at ContainerQuotaItem created_at..deleted_on # without also checking parent ContaineQuota is active. diff --git a/app/models/container_quota_item.rb b/app/models/container_quota_item.rb index aad9084d369..d214e3e918c 100644 --- a/app/models/container_quota_item.rb +++ b/app/models/container_quota_item.rb @@ -13,6 +13,7 @@ class ContainerQuotaItem < ApplicationRecord def disconnect_inv return if archived? + _log.info("Archiving Container Quota id [#{container_quota_id}] Item [#{resource}]") # This allows looking only at ContainerQuotaItem created_at..deleted_on # without also checking parent ContaineQuota is active. diff --git a/app/models/container_replicator.rb b/app/models/container_replicator.rb index 566cc2cbf1a..21ebcaef543 100644 --- a/app/models/container_replicator.rb +++ b/app/models/container_replicator.rb @@ -7,7 +7,7 @@ class ContainerReplicator < ApplicationRecord include MiqPolicyMixin include TenantIdentityMixin - belongs_to :ext_management_system, :foreign_key => "ems_id" + belongs_to :ext_management_system, :foreign_key => "ems_id" has_many :container_groups belongs_to :container_project has_many :labels, -> { where(:section => "labels") }, # rubocop:disable Rails/HasManyOrHasOneDependent diff --git a/app/models/container_service.rb b/app/models/container_service.rb index 8f736378695..6599fc8af00 100644 --- a/app/models/container_service.rb +++ b/app/models/container_service.rb @@ -4,7 +4,7 @@ class ContainerService < ApplicationRecord # :name, :uid, :creation_timestamp, :resource_version, :namespace # :labels, :selector, :protocol, :port, :container_port, :portal_ip, :session_affinity - belongs_to :ext_management_system, :foreign_key => "ems_id" + belongs_to :ext_management_system, :foreign_key => "ems_id" has_and_belongs_to_many :container_groups, :join_table => :container_groups_container_services has_many :container_routes has_many :container_service_port_configs, :dependent => :destroy diff --git a/app/models/currency.rb b/app/models/currency.rb index 68668f7cf28..088b49ee0df 100644 --- a/app/models/currency.rb +++ b/app/models/currency.rb @@ -1,6 +1,4 @@ class Currency < ApplicationRecord - belongs_to :chargeback_rate_detail - validates :code, :presence => true, :length => {:maximum => 100} validates :name, :presence => true, :length => {:maximum => 100} validates :full_name, :presence => true, :length => {:maximum => 100} diff --git a/app/models/custom_attribute.rb b/app/models/custom_attribute.rb index dd820a869cc..b8a0728446b 100644 --- a/app/models/custom_attribute.rb +++ b/app/models/custom_attribute.rb @@ -1,6 +1,6 @@ class CustomAttribute < ApplicationRecord - ALLOWED_API_VALUE_TYPES = %w(DateTime Time Date).freeze - ALLOWED_API_SECTIONS = %w(metadata cluster_settings).freeze + ALLOWED_API_VALUE_TYPES = %w[DateTime Time Date].freeze + ALLOWED_API_SECTIONS = %w[metadata cluster_settings].freeze belongs_to :resource, :polymorphic => true serialize :serialized_value diff --git a/app/models/custom_button.rb b/app/models/custom_button.rb index 95b5e6b6acc..72c297d0398 100644 --- a/app/models/custom_button.rb +++ b/app/models/custom_button.rb @@ -68,6 +68,7 @@ def self.buttons_for(other, applies_to_id = nil) applies_to_class = other else raise _("Instance has no id") if other.id.nil? + applies_to_class = other.class.base_model.name applies_to_id = other.id end @@ -95,6 +96,7 @@ def applies_to=(other) self.applies_to_id = nil else raise _("Instance has no id") if other.id.nil? + self.applies_to_class = other.class.base_model.name self.applies_to_id = other.id end @@ -203,12 +205,14 @@ def get_resource_action def evaluate_enablement_expression_for(object) return true unless enablement_expression return false if enablement_expression && !object # list + enablement_expression.lenient_evaluate(object) end def evaluate_visibility_expression_for(object) return true unless visibility_expression return false if visibility_expression && !object # object == nil, method is called for list of objects + visibility_expression.lenient_evaluate(object) end @@ -225,18 +229,20 @@ def self.button_classes def visible_for_current_user? return false unless visibility.key?(:roles) + visibility[:roles].include?(User.current_user.miq_user_role_name) || visibility[:roles].include?("_ALL_") end def self.get_user(user) user = User.lookup_by_userid(user) if user.kind_of?(String) raise _("Unable to find user '%{user}'") % {:user => user} if user.nil? + user end def copy(options = {}) options[:guid] = SecureRandom.uuid - options.each_with_object(dup) { |(k, v), button| button.send("#{k}=", v) }.tap(&:save!) + options.each_with_object(dup) { |(k, v), button| button.send(:"#{k}=", v) }.tap(&:save!) end def self.display_name(number = 1) diff --git a/app/models/custom_button_set.rb b/app/models/custom_button_set.rb index a4adfb16d04..4a99b4b5df4 100644 --- a/app/models/custom_button_set.rb +++ b/app/models/custom_button_set.rb @@ -107,7 +107,7 @@ def self.filter_with_visibility_expression(custom_button_sets, object) def deep_copy(options) raise ArgumentError, "options[:owner] is required" if options[:owner].blank? - options.each_with_object(dup) { |(k, v), button_set| button_set.send("#{k}=", v) }.tap do |cbs| + options.each_with_object(dup) { |(k, v), button_set| button_set.send(:"#{k}=", v) }.tap do |cbs| cbs.guid = SecureRandom.uuid cbs.name = "#{name}-#{cbs.guid}" cbs.set_data[:button_order] = [] diff --git a/app/models/customization_template_kickstart.rb b/app/models/customization_template_kickstart.rb index 838b7c01b3c..9247163fd7a 100644 --- a/app/models/customization_template_kickstart.rb +++ b/app/models/customization_template_kickstart.rb @@ -17,7 +17,7 @@ def self.kernel_args(pxe_server, pxe_image, mac_address) File.join(pxe_server.access_url, pxe_server_filepath(pxe_server, pxe_image, mac_address)) end - { :ks => ks_access_path, :ksdevice => mac_address } + {:ks => ks_access_path, :ksdevice => mac_address} end def default_filename diff --git a/app/models/customization_template_sysprep.rb b/app/models/customization_template_sysprep.rb index d65a148d1a9..bb54a98778e 100644 --- a/app/models/customization_template_sysprep.rb +++ b/app/models/customization_template_sysprep.rb @@ -1,25 +1,25 @@ class CustomizationTemplateSysprep < CustomizationTemplate DISKPART_FILENAME = "diskpart.txt".freeze - DISKPART_CONTENTS = <<-EOF -select disk 0 -clean -create partition primary -select partition 1 -format fs=ntfs label="Windows" quick -assign letter=c -active -exit -EOF + DISKPART_CONTENTS = <<~EOF + select disk 0 + clean + create partition primary + select partition 1 + format fs=ntfs label="Windows" quick + assign letter=c + active + exit + EOF IMAGE_BAT_FILENAME = "image.bat".freeze - IMAGE_BAT_CONTENTS = <<-EOF -diskpart /s diskpart.txt -s:\\<%= evm[:windows_images_directory] %>\\imagex.exe /apply s:\\<%= evm[:windows_images_directory] %>\\<%= evm[:windows_image_path] %> <%= evm[:windows_image_index] %> c: -copy unattend.xml c:\\windows\\system32\\sysprep\\ -bcdboot c:\\windows /s c: -s:\\<%= evm[:windows_images_directory] %>\\curl <%= evm[:post_install_callback_url] %> -wpeutil shutdown -EOF + IMAGE_BAT_CONTENTS = <<~EOF + diskpart /s diskpart.txt + s:\\<%= evm[:windows_images_directory] %>\\imagex.exe /apply s:\\<%= evm[:windows_images_directory] %>\\<%= evm[:windows_image_path] %> <%= evm[:windows_image_index] %> c: + copy unattend.xml c:\\windows\\system32\\sysprep\\ + bcdboot c:\\windows /s c: + s:\\<%= evm[:windows_images_directory] %>\\curl <%= evm[:post_install_callback_url] %> + wpeutil shutdown + EOF UNATTEND_FILENAME = "unattend.xml".freeze @@ -38,7 +38,7 @@ def create_files_on_server(pxe_server, pxe_image, mac_address, windows_image, su image_bat_options = substitution_options.merge( :windows_images_directory => pxe_server.windows_images_directory.chomp("/").gsub("/", "\\\\"), :windows_image_path => windows_image.path.chomp("/").gsub("/", "\\\\"), - :windows_image_index => windows_image.index, + :windows_image_index => windows_image.index ) image_bat_contents = self.class.substitute_erb(IMAGE_BAT_CONTENTS, image_bat_options) diff --git a/app/models/dialog.rb b/app/models/dialog.rb index a538548d107..a03f6a57f12 100644 --- a/app/models/dialog.rb +++ b/app/models/dialog.rb @@ -15,7 +15,7 @@ class Dialog < ApplicationRecord before_destroy :reject_if_has_resource_actions validates :name, :unique_within_region => true - alias_attribute :name, :label + alias_attribute :name, :label attr_accessor :target_resource @@ -69,6 +69,7 @@ def validate_children dialog_tabs.each do |dt| next if dt.valid? + dt.errors.full_messages.each do |err_msg| errors.add(:base, _("Dialog %{dialog_label} / %{error_message}") % {:dialog_label => label, :error_message => err_msg}) @@ -140,11 +141,11 @@ def field(name) end def content(target = nil, resource_action = nil, all_attributes = false) - return DialogSerializer.new.serialize(Array[self], all_attributes) if target.nil? && resource_action.nil? + return DialogSerializer.new.serialize([self], all_attributes) if target.nil? && resource_action.nil? workflow = ResourceActionWorkflow.new({}, User.current_user, resource_action, :target => target) - DialogSerializer.new.serialize(Array[workflow.dialog], all_attributes) + DialogSerializer.new.serialize([workflow.dialog], all_attributes) end # Allows you to pass dialog tabs as a hash @@ -180,7 +181,7 @@ def deep_copy(new_attributes = {}) new_dialog.dialog_tabs = dialog_tabs.collect(&:deep_copy) new_attributes.each do |attr, value| - new_dialog.send("#{attr}=", value) + new_dialog.send(:"#{attr}=", value) end new_dialog end @@ -188,7 +189,7 @@ def deep_copy(new_attributes = {}) private def dialog_field_hash - @dialog_field_hash ||= dialog_fields.each_with_object({}) { |df, hash| hash[df.name] = df } + @dialog_field_hash ||= dialog_fields.index_by { |df| df.name } end def reject_if_has_resource_actions diff --git a/app/models/dialog/ansible_playbook_service_dialog.rb b/app/models/dialog/ansible_playbook_service_dialog.rb index b8db3718ab4..c91807f2ab4 100644 --- a/app/models/dialog/ansible_playbook_service_dialog.rb +++ b/app/models/dialog/ansible_playbook_service_dialog.rb @@ -10,7 +10,7 @@ def create_dialog(label, extra_vars, hosts = 'localhost') Dialog.new(:label => label, :buttons => "submit,cancel").tap do |dialog| tab = dialog.dialog_tabs.build(:display => "edit", :label => "Basic Information", :position => 0) add_options_group(tab, 0, hosts) - unless extra_vars.blank? + if extra_vars.present? add_variables_group(tab, 1, extra_vars) end dialog.save! diff --git a/app/models/dialog_field.rb b/app/models/dialog_field.rb index 0b9e2435e2d..d470bddd7ed 100644 --- a/app/models/dialog_field.rb +++ b/app/models/dialog_field.rb @@ -1,8 +1,7 @@ class DialogField < ApplicationRecord include NewWithTypeStiMixin - attr_accessor :value - attr_accessor :dialog + attr_accessor :value, :dialog belongs_to :dialog_group has_one :resource_action, :as => :resource, :dependent => :destroy @@ -25,13 +24,13 @@ class DialogField < ApplicationRecord alias_attribute :order, :position - validates_presence_of :name - validates :name, :exclusion => {:in => %w(action controller), + validates :name, :presence => true + validates :name, :exclusion => {:in => %w[action controller], :message => "Field Name %{value} is reserved."} attribute :required, :default => false attribute :visible, :default => true - validates :visible, inclusion: { in: [ true, false ] } + validates :visible, :inclusion => {:in => [true, false]} attribute :load_values_on_init, :default => true serialize :values @@ -70,7 +69,7 @@ class DialogField < ApplicationRecord "DialogFieldRadioButton" => N_("Radio Button") } - DIALOG_FIELD_DYNAMIC_CLASSES = %w( + DIALOG_FIELD_DYNAMIC_CLASSES = %w[ DialogFieldCheckBox DialogFieldDateControl DialogFieldDateTimeControl @@ -78,7 +77,7 @@ class DialogField < ApplicationRecord DialogFieldRadioButton DialogFieldTextAreaBox DialogFieldTextBox - ) + ] def self.dialog_field_types DIALOG_FIELD_TYPES @@ -151,7 +150,7 @@ def trigger_automate_value_updates def update_dialog_field_responders(id_list) dialog_field_responders.destroy_all - self.dialog_field_responders = available_dialog_field_responders(id_list) unless id_list.blank? + self.dialog_field_responders = available_dialog_field_responders(id_list) if id_list.present? end def deep_copy diff --git a/app/models/dialog_field_association_validator.rb b/app/models/dialog_field_association_validator.rb index bbdfde1dabe..ffd960ee08b 100644 --- a/app/models/dialog_field_association_validator.rb +++ b/app/models/dialog_field_association_validator.rb @@ -1,7 +1,9 @@ class DialogFieldAssociationValidator class DialogFieldAssociationCircularReferenceError < RuntimeError; end + def check_for_circular_references(hash, k, collection = []) raise DialogFieldAssociationCircularReferenceError, "#{k} already exists in #{collection}" if collection.include?(k) + collection << k hash[k]&.each do |val| check_for_circular_references(hash, val, collection.dup) diff --git a/app/models/dialog_field_check_box.rb b/app/models/dialog_field_check_box.rb index fa4f6f8d93e..1b3bc280cd3 100644 --- a/app/models/dialog_field_check_box.rb +++ b/app/models/dialog_field_check_box.rb @@ -1,5 +1,5 @@ class DialogFieldCheckBox < DialogField - AUTOMATE_VALUE_FIELDS = %w(required read_only visible description).freeze + AUTOMATE_VALUE_FIELDS = %w[required read_only visible description].freeze def checked? value == "t" @@ -15,10 +15,11 @@ def script_error_values def normalize_automate_values(automate_hash) self.class::AUTOMATE_VALUE_FIELDS.each do |key| - send("#{key}=", automate_hash[key]) if automate_hash.key?(key) + send(:"#{key}=", automate_hash[key]) if automate_hash.key?(key) end return initial_values if automate_hash["value"].blank? + automate_hash["value"].to_s end diff --git a/app/models/dialog_field_date_control.rb b/app/models/dialog_field_date_control.rb index a21a0e10f15..0006523467f 100644 --- a/app/models/dialog_field_date_control.rb +++ b/app/models/dialog_field_date_control.rb @@ -1,5 +1,5 @@ class DialogFieldDateControl < DialogField - AUTOMATE_VALUE_FIELDS = %w(show_past_dates read_only visible description).freeze + AUTOMATE_VALUE_FIELDS = %w[show_past_dates read_only visible description].freeze include TimezoneMixin @@ -13,6 +13,7 @@ def show_past_dates=(value) def automate_output_value return nil if @value.blank? + Date.parse(@value).iso8601 end @@ -23,14 +24,15 @@ def value def normalize_automate_values(automate_hash) self.class::AUTOMATE_VALUE_FIELDS.each do |key| - send("#{key}=", automate_hash[key]) if automate_hash.key?(key) + send(:"#{key}=", automate_hash[key]) if automate_hash.key?(key) end return default_time if automate_hash["value"].blank? + begin - return DateTime.parse(automate_hash["value"].to_s).iso8601 + DateTime.parse(automate_hash["value"].to_s).iso8601 rescue - return default_time + default_time end end @@ -47,6 +49,6 @@ def refresh_json_value private def default_time - with_current_user_timezone { Time.zone.now + 1.day }.strftime("%m/%d/%Y") + with_current_user_timezone { 1.day.from_now }.strftime("%m/%d/%Y") end end diff --git a/app/models/dialog_field_date_time_control.rb b/app/models/dialog_field_date_time_control.rb index c4112c1f772..13dfb045abc 100644 --- a/app/models/dialog_field_date_time_control.rb +++ b/app/models/dialog_field_date_time_control.rb @@ -1,8 +1,9 @@ class DialogFieldDateTimeControl < DialogFieldDateControl - AUTOMATE_VALUE_FIELDS = %w(show_past_dates read_only visible description).freeze + AUTOMATE_VALUE_FIELDS = %w[show_past_dates read_only visible description].freeze def automate_output_value return nil if @value.blank? + with_current_user_timezone { Time.zone.parse(@value).utc.iso8601 } end @@ -28,6 +29,6 @@ def refresh_json_value private def default_time - with_current_user_timezone { Time.zone.now + 1.day }.strftime("%m/%d/%Y %H:%M") + with_current_user_timezone { 1.day.from_now }.strftime("%m/%d/%Y %H:%M") end end diff --git a/app/models/dialog_field_drop_down_list.rb b/app/models/dialog_field_drop_down_list.rb index 161862e2153..0235b1e8669 100644 --- a/app/models/dialog_field_drop_down_list.rb +++ b/app/models/dialog_field_drop_down_list.rb @@ -9,9 +9,9 @@ def show_refresh_button? end def force_multi_value - return true if options[:force_multi_value].present? && - options[:force_multi_value] != "null" && - options[:force_multi_value] + true if options[:force_multi_value].present? && + options[:force_multi_value] != "null" && + options[:force_multi_value] end def force_multi_value=(setting) @@ -49,6 +49,7 @@ def refresh_json_value(checked_value) def automate_output_value return super unless force_multi_value + a = if @value.kind_of?(Integer) [@value] elsif @value.kind_of?(Array) @@ -62,6 +63,7 @@ def automate_output_value def automate_key_name return super unless force_multi_value + MiqAeEngine.create_automation_attribute_array_key(super) end @@ -84,6 +86,7 @@ def use_first_value_as_default def default_value_included?(values_list) if force_multi_value return false if default_value.blank? + converted_values_list = values_list.collect { |value_pair| value_pair[0].send(value_modifier) } converted_default_values = JSON.parse(default_value).collect { |value| value.send(value_modifier) } overlap = converted_values_list & converted_default_values @@ -95,6 +98,7 @@ def default_value_included?(values_list) def coerce_default_value_into_proper_format return unless default_value + unless JSON.parse(default_value).kind_of?(Array) self.default_value = Array.wrap(default_value).to_json end diff --git a/app/models/dialog_field_importer.rb b/app/models/dialog_field_importer.rb index 8d071d1d159..4cec8c813d6 100644 --- a/app/models/dialog_field_importer.rb +++ b/app/models/dialog_field_importer.rb @@ -48,6 +48,7 @@ def set_category_for_tag_control(dialog_field, dialog_field_attributes) def adjust_category(opts) return nil if opts[:category_description].nil? + category = find_category(opts) category.try(:id).to_s if category.try(:description) == opts[:category_description] end diff --git a/app/models/dialog_field_serializer.rb b/app/models/dialog_field_serializer.rb index b0c5ac77fd5..71acf1af4e7 100644 --- a/app/models/dialog_field_serializer.rb +++ b/app/models/dialog_field_serializer.rb @@ -29,7 +29,7 @@ def serialize(dialog_field, all_attributes = false) dialog_field.options[:force_single_value] = dialog_field.options[:force_single_value] || category.single_value end end - json_options = dialog_field.dynamic? ? {:methods => [:type], :except => [:values]} : {:methods => %i(type values)} + json_options = dialog_field.dynamic? ? {:methods => [:type], :except => [:values]} : {:methods => %i[type values]} included_attributes(dialog_field.as_json(json_options), all_attributes).merge(extra_attributes) end end diff --git a/app/models/dialog_field_sorted_item.rb b/app/models/dialog_field_sorted_item.rb index d3240abfbf2..a88928df653 100644 --- a/app/models/dialog_field_sorted_item.rb +++ b/app/models/dialog_field_sorted_item.rb @@ -1,5 +1,5 @@ class DialogFieldSortedItem < DialogField - AUTOMATE_VALUE_FIELDS = %w(sort_by sort_order data_type default_value required read_only visible description).freeze + AUTOMATE_VALUE_FIELDS = %w[sort_by sort_order data_type default_value required read_only visible description].freeze def initialize_value_context if load_values_on_init @@ -22,6 +22,7 @@ def sort_by=(value) unless [:value, :description, :none].include?(value.to_sym) raise _("Invalid sort_by type <%{value}> specified.") % {:value => value} end + options[:sort_by] = value.to_sym end @@ -33,6 +34,7 @@ def sort_order=(value) unless [:ascending, :descending].include?(value.to_sym) raise _("Invalid sort_order type <%{value}> specified.") % {:value => value} end + options[:sort_order] = value.to_sym end @@ -55,11 +57,11 @@ def script_error_values def normalize_automate_values(automate_hash) AUTOMATE_VALUE_FIELDS.each do |key| - send("#{key}=", automate_hash[key]) if automate_hash.key?(key) + send(:"#{key}=", automate_hash[key]) if automate_hash.key?(key) end result = automate_hash["values"].to_a - result.blank? ? initial_values : result + (result.presence || initial_values) end def trigger_automate_value_updates @@ -105,6 +107,7 @@ def sort_data(data_to_sort) data_to_sort = data_to_sort.sort_by { |d| d.send(value_position).send(value_modifier) } return data_to_sort.reverse! if sort_order == :descending + data_to_sort end diff --git a/app/models/dialog_field_tag_control.rb b/app/models/dialog_field_tag_control.rb index 6c35e22683c..04e90742a10 100644 --- a/app/models/dialog_field_tag_control.rb +++ b/app/models/dialog_field_tag_control.rb @@ -54,7 +54,7 @@ def self.allowed_tag_categories def value_from_dialog_fields(dialog_values) value = dialog_values[automate_key_name] - value.gsub(/Classification::/, '') if value + value.gsub("Classification::", '') if value end def values @@ -80,8 +80,8 @@ def values available_tags.reverse! if sort_order == :descending - available_tags = blank_value + available_tags - available_tags + blank_value + available_tags + end def automate_output_value diff --git a/app/models/dialog_field_text_box.rb b/app/models/dialog_field_text_box.rb index 06aab77c22a..fdfe703a491 100644 --- a/app/models/dialog_field_text_box.rb +++ b/app/models/dialog_field_text_box.rb @@ -9,6 +9,7 @@ def initialize_value_context def value return nil if @value.nil? + convert_value_to_type end @@ -26,22 +27,24 @@ def protected? def value_from_dialog_fields(dialog_values) value_from_dialog_field = dialog_values[automate_key_name] - self.protected? ? ManageIQ::Password.decrypt(value_from_dialog_field) : value_from_dialog_field + protected? ? ManageIQ::Password.decrypt(value_from_dialog_field) : value_from_dialog_field end def automate_output_value return nil if @value.nil? - return ManageIQ::Password.try_encrypt(@value) if self.protected? + return ManageIQ::Password.try_encrypt(@value) if protected? + convert_value_to_type end def automate_key_name - return "password::#{super}" if self.protected? + return "password::#{super}" if protected? + super end def validate_field_data(dialog_tab, dialog_group) - return if !required? && @value.blank? || !visible + return if (!required? && @value.blank?) || !visible return "#{dialog_tab.label}/#{dialog_group.label}/#{label} is required" if required? && @value.blank? return "#{dialog_tab.label}/#{dialog_group.label}/#{label} must be an integer" if value_supposed_to_be_int? @@ -50,7 +53,8 @@ def validate_field_data(dialog_tab, dialog_group) rule = validator_rule if validator_type == 'regex' return unless rule - "#{dialog_tab.label}/#{dialog_group.label}/#{label} is invalid" unless @value.to_s =~ /#{rule}/ + + "#{dialog_tab.label}/#{dialog_group.label}/#{label} is invalid" unless /#{rule}/.match?(@value.to_s) end def script_error_values @@ -63,7 +67,7 @@ def sample_text def normalize_automate_values(automate_hash) self.class::AUTOMATE_VALUE_FIELDS.each do |key| - send("#{key}=", automate_hash[key]) if automate_hash.key?(key) + send(:"#{key}=", automate_hash[key]) if automate_hash.key?(key) end automate_hash["value"].to_s.presence || initial_values diff --git a/app/models/dialog_group.rb b/app/models/dialog_group.rb index 495e343d106..2445b99b29f 100644 --- a/app/models/dialog_group.rb +++ b/app/models/dialog_group.rb @@ -39,6 +39,7 @@ def validate_children dialog_fields.each do |df| next if df.valid? + df.errors.full_messages.each do |err_msg| errors.add(:base, _("Box %{box_label} / %{error_message}") % {:box_label => label, :error_message => err_msg}) end diff --git a/app/models/dialog_import_validator.rb b/app/models/dialog_import_validator.rb index b089cacdda0..87f6ff34adc 100644 --- a/app/models/dialog_import_validator.rb +++ b/app/models/dialog_import_validator.rb @@ -47,6 +47,7 @@ def check_dialog_for_validity(dialog) def check_dialog_tabs_for_validity(dialog_tabs) dialog_tabs.each do |dialog_tab| raise ParsedNonDialogYamlError unless dialog_tab["dialog_groups"] + check_dialog_groups_for_validity(dialog_tab["dialog_groups"]) end end @@ -54,6 +55,7 @@ def check_dialog_tabs_for_validity(dialog_tabs) def check_dialog_groups_for_validity(dialog_groups) dialog_groups.each do |dialog_group| raise ParsedNonDialogYamlError unless dialog_group["dialog_fields"] + check_dialog_fields_for_validity(dialog_group["dialog_fields"]) end end @@ -61,6 +63,7 @@ def check_dialog_groups_for_validity(dialog_groups) def check_dialog_fields_for_validity(dialog_fields) dialog_fields.each do |dialog_field| raise InvalidDialogFieldTypeError unless valid_dialog_field_type?(dialog_field["type"]) + check_dialog_associations_for_validity(dialog_fields) end end @@ -68,8 +71,8 @@ def check_dialog_fields_for_validity(dialog_fields) def check_dialog_associations_for_validity(dialog_fields) associations = {} dialog_fields.each { |df| associations.merge!(df["name"] => df["dialog_field_responders"]) if df["dialog_field_responders"].present? } - unless associations.blank? - associations.each_key { |k| @dialog_field_association_validator.check_for_circular_references(associations, k) } + if associations.present? + associations.each_key { |k| @dialog_field_association_validator.check_for_circular_references(associations, k) } end end diff --git a/app/models/dialog_serializer.rb b/app/models/dialog_serializer.rb index 171251ef391..84372c7e3ec 100644 --- a/app/models/dialog_serializer.rb +++ b/app/models/dialog_serializer.rb @@ -1,5 +1,5 @@ class DialogSerializer < Serializer - EXCLUDED_ATTRIBUTES = %w(created_at id updated_at) + EXCLUDED_ATTRIBUTES = %w[created_at id updated_at] def initialize(dialog_tab_serializer = DialogTabSerializer.new) @dialog_tab_serializer = dialog_tab_serializer diff --git a/app/models/dialog_tab.rb b/app/models/dialog_tab.rb index ed96c626850..5b4c4bb2b1d 100644 --- a/app/models/dialog_tab.rb +++ b/app/models/dialog_tab.rb @@ -25,6 +25,7 @@ def validate_children dialog_groups.each do |dg| next if dg.valid? + dg.errors.full_messages.each do |err_msg| errors.add(:base, _("Tab %{tab_label} / %{error_message}") % {:tab_label => label, :error_message => err_msg}) end diff --git a/app/models/dictionary.rb b/app/models/dictionary.rb index 0e3693073cd..9ea00d8d8d7 100644 --- a/app/models/dictionary.rb +++ b/app/models/dictionary.rb @@ -6,7 +6,7 @@ def self.gettext(text, opts = {}) opts[:plural] = false if opts[:plural].nil? opts[:translate] = true if opts[:translate].nil? - key, suffix = text.split("__") # HACK: Sometimes we need to add a suffix to report columns, this should probably be moved into the presenter. + key, suffix = text.split("__") # HACK: Sometimes we need to add a suffix to report columns, this should probably be moved into the presenter. i18n_result = i18n_lookup(opts[:type], key) i18n_result ||= i18n_lookup(opts[:type], key.split(".").last) @@ -21,7 +21,7 @@ def self.gettext(text, opts = {}) opts[:translate] ? _(i18n_result) : i18n_result end - result << " (#{suffix.titleize})" if result && suffix # HACK: continued. i.e. Adding (Min) or (Max) to a column name. + result << " (#{suffix.titleize})" if result && suffix # HACK: continued. i.e. Adding (Min) or (Max) to a column name. return result if result return text unless opts[:notfound] diff --git a/app/models/disk.rb b/app/models/disk.rb index a1210b16b3c..b9b0e3de4a7 100644 --- a/app/models/disk.rb +++ b/app/models/disk.rb @@ -9,7 +9,7 @@ class Disk < ApplicationRecord virtual_column :unallocated_space, :type => :integer, :uses => :allocated_space virtual_column :unallocated_space_percent, :type => :float, :uses => :unallocated_space virtual_column :used_percent_of_provisioned, :type => :float - virtual_column :partitions_aligned, :type => :string, :uses => {:partitions => :aligned} + virtual_column :partitions_aligned, :type => :string, :uses => {:partitions => :aligned} virtual_column :used_disk_storage, :type => :integer, :arel => (lambda do |t| t.grouping(t.coalesce([t[:size_on_disk], t[:size], 0])) end) @@ -48,21 +48,25 @@ def self.find_cdroms def allocated_space return nil if size.nil? + partitions.inject(0) { |t, p| t + p.size } end def allocated_space_percent return nil if size.nil? || size == 0 + Float(allocated_space) / size * 100 end def unallocated_space return nil if size.nil? + size - allocated_space end def unallocated_space_percent return nil if size.nil? || size == 0 + Float(unallocated_space) / size * 100 end @@ -79,11 +83,13 @@ def rdm_disk? end def partitions_aligned - return "Not Applicable" if self.rdm_disk? + return "Not Applicable" if rdm_disk? + plist = partitions return "Unknown" if plist.empty? return "True" if plist.all?(&:aligned?) return "False" if plist.any? { |p| p.aligned? == false } + "Unknown" end diff --git a/app/models/drift_state/purging.rb b/app/models/drift_state/purging.rb index d861f7e8f9f..43416f9642a 100644 --- a/app/models/drift_state/purging.rb +++ b/app/models/drift_state/purging.rb @@ -15,12 +15,12 @@ def purge_window_size end def purge_count(mode, value) - send("purge_count_by_#{mode}", value) + send(:"purge_count_by_#{mode}", value) end # @param mode [:date, :remaining] def purge(mode, value, window = nil, &block) - send("purge_by_#{mode}", value, window, &block) + send(:"purge_by_#{mode}", value, window, &block) end private diff --git a/app/models/ems_cluster.rb b/app/models/ems_cluster.rb index 09deef40f9a..d5c111667bb 100644 --- a/app/models/ems_cluster.rb +++ b/app/models/ems_cluster.rb @@ -118,7 +118,7 @@ def direct_vms Relationship.resources(direct_vm_rels).sort_by { |v| v.name.downcase } end - alias_method :direct_miq_templates, :miq_templates + alias direct_miq_templates miq_templates def direct_vms_and_templates (direct_vms + direct_miq_templates).sort_by { |v| v.name.downcase } @@ -128,7 +128,7 @@ def direct_vm_ids Relationship.resource_ids(direct_vm_rels) end - alias_method :direct_miq_template_ids, :miq_template_ids + alias direct_miq_template_ids miq_template_ids def direct_vm_or_template_ids direct_vm_ids + direct_miq_template_ids @@ -154,8 +154,8 @@ def resource_pools_with_default Relationship.resources(child_and_grandchild_rels(:of_type => 'ResourcePool')) end - alias_method :add_resource_pool, :set_child - alias_method :remove_resource_pool, :remove_child + alias add_resource_pool set_child + alias remove_resource_pool remove_child def remove_all_resource_pools remove_all_children(:of_type => 'ResourcePool') @@ -175,7 +175,7 @@ def all_resource_pools_with_default # Parent relationship methods def parent_folder - detect_ancestor(:of_type => "EmsFolder") { |a| !a.kind_of?(Datacenter) && !%w(host vm).include?(a.name) } # TODO: Fix this to use EmsFolder#hidden? + detect_ancestor(:of_type => "EmsFolder") { |a| !a.kind_of?(Datacenter) && !%w[host vm].include?(a.name) } # TODO: Fix this to use EmsFolder#hidden? end def parent_datacenter @@ -207,6 +207,7 @@ def event_where_clause(assoc = :ems_events) def ems_events ewc = event_where_clause return [] if ewc.blank? + EmsEvent.where(ewc).order("timestamp").to_a end @@ -218,7 +219,7 @@ def scan(_userid = "system") :affinity => ext_management_system, :class_name => self.class.to_s, :method_name => "save_drift_state", - :instance_id => id, + :instance_id => id ) end @@ -237,11 +238,12 @@ def memory_reserve def effective_resource(resource) resource = resource.to_s - unless %w(cpu vcpu memory).include?(resource) + unless %w[cpu vcpu memory].include?(resource) raise ArgumentError, _("Unknown resource %{name}") % {:name => resource.inspect} end + resource = "cpu" if resource == "vcpu" - send("effective_#{resource}") + send(:"effective_#{resource}") end # @@ -276,7 +278,7 @@ def self.get_perf_collection_object_list def get_perf_collection_object_list hosts = hosts_enabled_for_perf_capture - self.perf_capture_enabled? ? [self] + hosts : hosts + perf_capture_enabled? ? [self] + hosts : hosts end def perf_capture_enabled_host_ids=(ids) diff --git a/app/models/ems_event.rb b/app/models/ems_event.rb index 9324669d817..346f10fe9f4 100644 --- a/app/models/ems_event.rb +++ b/app/models/ems_event.rb @@ -212,7 +212,7 @@ def self.process_availability_zone_in_event!(event, options = {}) if vm.respond_to?(:availability_zone) availability_zone = vm.availability_zone unless availability_zone.nil? - event[:availability_zone_id] = availability_zone.id + event[:availability_zone_id] = availability_zone.id end end end @@ -228,6 +228,7 @@ def self.process_cluster_in_event!(event, options = {}) def self.first_chained_event(ems_id, chain_id) return nil if chain_id.nil? + EmsEvent.where(:ems_id => ems_id, :chain_id => chain_id).order(:id).first end @@ -281,7 +282,7 @@ def self.display_name(number = 1) private def self.event_allowed_ems_ref_keys - %w(vm_ems_ref dest_vm_ems_ref) + %w[vm_ems_ref dest_vm_ems_ref] end private_class_method :event_allowed_ems_ref_keys @@ -289,11 +290,11 @@ def self.create_event(event) event.delete_if { |k,| k.to_s.ends_with?("_ems_ref") && !event_allowed_ems_ref_keys.include?(k.to_s) } new_event = EmsEvent.create(event) unless EmsEvent.exists?( - :event_type => event[:event_type], - :timestamp => event[:timestamp], - :chain_id => event[:chain_id], - :ems_id => event[:ems_id], - :ems_ref => event[:ems_ref], + :event_type => event[:event_type], + :timestamp => event[:timestamp], + :chain_id => event[:chain_id], + :ems_id => event[:ems_id], + :ems_ref => event[:ems_ref] ) new_event.handle_event if new_event new_event @@ -338,7 +339,7 @@ def self.create_completed_event(event, orig_task = nil) :vm_ems_ref => source_event.vm_ems_ref, :vm_or_template_id => source_event.vm_or_template_id } - new_event[:username] = event.username unless event.username.blank? + new_event[:username] = event.username if event.username.present? # Fill in the dest information if we have it unless dest_event.nil? @@ -348,10 +349,10 @@ def self.create_completed_event(event, orig_task = nil) new_event.merge!( :dest_host_name => dest_event.host_name, :dest_host_id => dest_event.host_id, - :dest_vm_name => dest_event.send("#{dest_key}vm_name"), - :dest_vm_location => dest_event.send("#{dest_key}vm_location"), - :dest_vm_ems_ref => dest_event.send("#{dest_key}vm_ems_ref"), - :dest_vm_or_template_id => dest_event.send("#{dest_key}vm_or_template_id") + :dest_vm_name => dest_event.send(:"#{dest_key}vm_name"), + :dest_vm_location => dest_event.send(:"#{dest_key}vm_location"), + :dest_vm_ems_ref => dest_event.send(:"#{dest_key}vm_ems_ref"), + :dest_vm_or_template_id => dest_event.send(:"#{dest_key}vm_or_template_id") ) end @@ -390,7 +391,7 @@ def get_refresh_target(target_type) def vm_refresh_target (vm_or_template && vm_or_template.ext_management_system ? vm_or_template : host_refresh_target) end - alias_method :src_vm_refresh_target, :vm_refresh_target + alias src_vm_refresh_target vm_refresh_target def src_vm_or_dest_host_refresh_target vm_or_template ? vm_refresh_target : dest_host_refresh_target @@ -399,7 +400,7 @@ def src_vm_or_dest_host_refresh_target def host_refresh_target (host && host.ext_management_system ? host : ems_refresh_target) end - alias_method :src_host_refresh_target, :host_refresh_target + alias src_host_refresh_target host_refresh_target def dest_vm_refresh_target (dest_vm_or_template && dest_vm_or_template.ext_management_system ? dest_vm_or_template : dest_host_refresh_target) @@ -412,7 +413,7 @@ def dest_host_refresh_target def ems_cluster_refresh_target ext_management_system end - alias_method :src_ems_cluster_refresh_target, :ems_cluster_refresh_target + alias src_ems_cluster_refresh_target ems_cluster_refresh_target def ems_refresh_target ext_management_system diff --git a/app/models/ems_event/automate.rb b/app/models/ems_event/automate.rb index c2d94523736..16ee5de5c4a 100644 --- a/app/models/ems_event/automate.rb +++ b/app/models/ems_event/automate.rb @@ -18,7 +18,7 @@ def refresh(*targets, sync) targets = targets.flatten return if targets.blank? - refresh_targets = targets.collect { |t| get_target("#{t}_refresh_target") unless t.blank? }.compact.uniq + refresh_targets = targets.collect { |t| get_target("#{t}_refresh_target") if t.present? }.compact.uniq return if refresh_targets.empty? EmsRefresh.queue_refresh(refresh_targets, nil, :create_task => sync) @@ -32,8 +32,8 @@ def policy(target_str, policy_event, param = nil) return if target.nil? || policy_event.nil? || policy_src.nil? inputs = { - policy_src.class.table_name.to_sym => policy_src, - :ems_event => self + policy_src.class.table_name.to_sym => policy_src, + :ems_event => self } begin MiqEvent.raise_evm_event(target, policy_event, inputs) @@ -90,7 +90,7 @@ def src_vm_destroy_all_snapshots private def parse_policy_parameters(target_str, policy_event, param) - target = get_target(target_str) unless target_str.blank? + target = get_target(target_str) if target_str.present? policy_event ||= event_type policy_src = parse_policy_source(target, param) if target diff --git a/app/models/ems_folder.rb b/app/models/ems_folder.rb index 5e965130be5..082ad1c3b38 100644 --- a/app/models/ems_folder.rb +++ b/app/models/ems_folder.rb @@ -33,8 +33,8 @@ def folders children(:of_type => 'EmsFolder') end - alias_method :add_folder, :set_child - alias_method :remove_folder, :remove_child + alias add_folder set_child + alias remove_folder remove_child def remove_all_folders remove_all_children(:of_type => 'EmsFolder') @@ -53,8 +53,8 @@ def clusters children(:of_type => 'EmsCluster') end - alias_method :add_cluster, :set_child - alias_method :remove_cluster, :remove_child + alias add_cluster set_child + alias remove_cluster remove_child def remove_all_clusters remove_all_children(:of_type => 'EmsCluster') @@ -66,8 +66,8 @@ def hosts children(:of_type => 'Host') end - alias_method :add_host, :set_child - alias_method :remove_host, :remove_child + alias add_host set_child + alias remove_host remove_child def remove_all_hosts remove_all_children(:of_type => 'Host') @@ -87,8 +87,8 @@ def vms vms_and_templates.select { |v| v.kind_of?(Vm) } end - alias_method :add_vm, :set_child - alias_method :remove_vm, :remove_child + alias add_vm set_child + alias remove_vm remove_child def remove_all_vms remove_all_children(:of_type => 'Vm') diff --git a/app/models/ems_refresh.rb b/app/models/ems_refresh.rb index 550e3f8669e..25aeeb0f8b2 100644 --- a/app/models/ems_refresh.rb +++ b/app/models/ems_refresh.rb @@ -46,7 +46,7 @@ def self.queue_refresh(target, id = nil, opts = {}) queue_merge(ts, ems, opts[:create_task]) end - return task_ids if opts[:create_task] + task_ids if opts[:create_task] end def self.refresh(target, id = nil) @@ -60,11 +60,14 @@ def self.refresh(target, id = nil) # Split the targets into refresher groups groups = targets.group_by do |t| - ems = case - when t.respond_to?(:ext_management_system) then t.ext_management_system - when t.respond_to?(:manager_id) then manager_by_manager_id[t.manager_id] ||= t.manager - when t.respond_to?(:manager) then t.manager - else t + ems = if t.respond_to?(:ext_management_system) + t.ext_management_system + elsif t.respond_to?(:manager_id) + manager_by_manager_id[t.manager_id] ||= t.manager + elsif t.respond_to?(:manager) + t.manager + else + t end ems.refresher if ems.respond_to?(:refresher) end @@ -131,7 +134,7 @@ def self.queue_merge(targets, ems, create_task = false) # Items will be naturally serialized since there is a dedicated worker. MiqQueue.put_or_update(queue_options) do |msg, item| - targets = msg.nil? ? targets : msg.data.concat(targets) + targets = msg.data.concat(targets) unless msg.nil? targets = uniq_targets(targets) obj = targets.select { |t| t.kind_of?(ApplicationRecord) } if obj.present? @@ -167,7 +170,7 @@ def self.queue_merge(targets, ems, create_task = false) task_id end - def self.create_refresh_task(ems, targets) + def self.create_refresh_task(ems, _targets) task_options = { :action => "EmsRefresh(#{ems.name}) Refreshing relationships and power states", :userid => "system" @@ -220,8 +223,10 @@ def self.log_format_deletes(deletes) [:name, :product_name, :device_name].each do |k| next unless d.respond_to?(k) + v = d.send(k) next if v.nil? + s << " #{k}: [#{v}]" break end diff --git a/app/models/ems_refresh/link_inventory.rb b/app/models/ems_refresh/link_inventory.rb index 1f95976e9bc..b25e67b9fd0 100644 --- a/app/models/ems_refresh/link_inventory.rb +++ b/app/models/ems_refresh/link_inventory.rb @@ -32,12 +32,12 @@ def update_relats_by_ids(prev_ids, new_ids, disconnect_proc, connect_proc, bulk_ unless prev_ids.nil? || disconnect_proc.nil? prev_ids.each do |p| - begin - disconnect_proc.call(p) - rescue => err - _log.error("An error occurred while disconnecting id [#{p}]: #{err}") - _log.log_backtrace(err) - end + + disconnect_proc.call(p) + rescue => err + _log.error("An error occurred while disconnecting id [#{p}]: #{err}") + _log.log_backtrace(err) + end end @@ -51,12 +51,12 @@ def update_relats_by_ids(prev_ids, new_ids, disconnect_proc, connect_proc, bulk_ end elsif connect_proc new_ids.each do |n| - begin - connect_proc.call(n) - rescue => err - _log.error("EMS: [#{@ems.name}], id: [#{@ems.id}] An error occurred while connecting id [#{n}]: #{err}") - _log.log_backtrace(err) - end + + connect_proc.call(n) + rescue => err + _log.error("EMS: [#{@ems.name}], id: [#{@ems.id}] An error occurred while connecting id [#{n}]: #{err}") + _log.log_backtrace(err) + end end end diff --git a/app/models/ems_refresh/save_inventory.rb b/app/models/ems_refresh/save_inventory.rb index 47eea55df54..d9dfb93619c 100644 --- a/app/models/ems_refresh/save_inventory.rb +++ b/app/models/ems_refresh/save_inventory.rb @@ -5,10 +5,11 @@ module EmsRefresh::SaveInventory def save_vms_inventory(ems, hashes, target = nil) return if hashes.nil? + target = ems if target.nil? log_header = "EMS: [#{ems.name}], id: [#{ems.id}]" - disconnects = if (target.kind_of?(ExtManagementSystem) || target.kind_of?(Host)) + disconnects = if target.kind_of?(ExtManagementSystem) || target.kind_of?(Host) target.vms_and_templates.reload.to_a elsif target.kind_of?(Vm) [target.ruby_clone] @@ -125,6 +126,7 @@ def save_vms_inventory(ems, hashes, target = nil) _log.warn("#{log_header} Processing Vm: [#{name}] failed with error [#{err}]. Skipping Vm.") else raise if EmsRefresh.debug_failures + _log.error("#{log_header} Processing Vm: [#{name}] failed with error [#{err}]. Skipping Vm.") _log.log_backtrace(err) end @@ -179,10 +181,12 @@ def save_vms_inventory(ems, hashes, target = nil) # def save_tags_inventory(object, collection, _target = nil) return if collection.nil? + tags = collection.kind_of?(Hash) ? collection[:tags] : collection ProviderTagMapping.retag_entity(object, tags) rescue => err raise if EmsRefresh.debug_failures + _log.error("Auto-tagging failed on #{object.class} [#{object.name}] with error [#{err}].") _log.log_backtrace(err) end @@ -200,6 +204,7 @@ def save_operating_system_inventory(parent, hash) def save_hardware_inventory(parent, hash) return if hash.nil? + save_inventory_single(:hardware, parent, hash, [:disks, :guest_devices, :networks, :firmwares]) parent.save! end @@ -217,20 +222,20 @@ def save_guest_devices_inventory(hardware, hashes) hardware.save! if hardware.id.nil? h[:network][:hardware_id] = hardware.id end - if h[:child_devices] - # Save the hardware to force an id if not found - hardware.save! if hardware.id.nil? - h[:child_devices].each do |child_device| - child_device[:hardware_id] = hardware.id - end + next unless h[:child_devices] + + # Save the hardware to force an id if not found + hardware.save! if hardware.id.nil? + h[:child_devices].each do |child_device| + child_device[:hardware_id] = hardware.id end end deletes = hardware.guest_devices.where(:device_type => ["ethernet", "storage"]) - find_key = %i(device_type uid_ems) - child_keys = %i(network miq_scsi_targets firmwares physical_network_ports) - extra_keys = %i(switch lan) + find_key = %i[device_type uid_ems] + child_keys = %i[network miq_scsi_targets firmwares physical_network_ports] + extra_keys = %i[switch lan] save_inventory_multi(hardware.guest_devices, hashes, deletes, find_key, child_keys, extra_keys) store_ids_for_new_records(hardware.guest_devices, hashes, find_key) @@ -333,6 +338,7 @@ def save_custom_attributes_inventory(parent, hashes, mode = :refresh) def save_ems_custom_attributes_inventory(parent, hashes) return if hashes.nil? + save_inventory_multi(parent.ems_custom_attributes, hashes, :use_association, [:section, :name]) end diff --git a/app/models/ems_refresh/save_inventory_cloud.rb b/app/models/ems_refresh/save_inventory_cloud.rb index 221592b4930..d690e591091 100644 --- a/app/models/ems_refresh/save_inventory_cloud.rb +++ b/app/models/ems_refresh/save_inventory_cloud.rb @@ -121,6 +121,7 @@ def save_cloud_volume_snapshots_inventory(ems, hashes, target = nil) def link_volumes_to_base_snapshots(hashes) base_snapshot_to_volume = hashes.each_with_object({}) do |h, bsh| next unless (base_snapshot = h[:base_snapshot]) + (bsh[base_snapshot[:id]] ||= []) << h[:id] end diff --git a/app/models/ems_refresh/save_inventory_helper.rb b/app/models/ems_refresh/save_inventory_helper.rb index 4ad882a9bc2..9a29b0ad9ad 100644 --- a/app/models/ems_refresh/save_inventory_helper.rb +++ b/app/models/ems_refresh/save_inventory_helper.rb @@ -1,7 +1,7 @@ module EmsRefresh::SaveInventoryHelper class TypedIndex - attr_accessor :record_index, :key_attribute_types - attr_accessor :find_key + attr_accessor :record_index, :key_attribute_types, :find_key + def initialize(records, find_key) # Save the columns associated with the find keys, so we can coerce the hash values during fetch if records.first @@ -62,7 +62,7 @@ def save_inventory_multi(association, hashes, deletes, find_key, child_keys = [] # Delete the items no longer found deletes = deletes_index.values - unless deletes.blank? + if deletes.present? ActiveRecord::Base.transaction do type = association.proxy_association.reflection.name _log.info("[#{type}] Deleting #{log_format_deletes(deletes)}") @@ -86,7 +86,7 @@ def save_inventory_single(type, parent, hash, child_keys = [], extra_keys = [], if child update!(child, hash, [:type, *remove_keys]) else - child = parent.send("create_#{type}!", hash.except(*remove_keys)) + child = parent.send(:"create_#{type}!", hash.except(*remove_keys)) end save_child_inventory(child, hash, child_keys) end @@ -99,7 +99,7 @@ def save_inventory_with_findkey(association, hash, deletes, new_records, record_ new_records << found else update!(found, hash, [:id, :type]) - deletes.delete(found) unless deletes.blank? + deletes.delete(found) if deletes.present? end found end @@ -119,7 +119,7 @@ def restore_keys(hash, keys, backup) end def save_child_inventory(obj, hashes, child_keys, *args) - child_keys.each { |k| send("save_#{k}_inventory", obj, hashes[k], *args) if hashes.key?(k) } + child_keys.each { |k| send(:"save_#{k}_inventory", obj, hashes[k], *args) if hashes.key?(k) } end def store_ids_for_new_records(records, hashes, keys) diff --git a/app/models/ems_refresh/save_inventory_infra.rb b/app/models/ems_refresh/save_inventory_infra.rb index afd115b1dd1..043c208183c 100644 --- a/app/models/ems_refresh/save_inventory_infra.rb +++ b/app/models/ems_refresh/save_inventory_infra.rb @@ -19,7 +19,7 @@ def save_hosts_inventory(ems, hashes, target = nil) target = ems if target.nil? log_header = "EMS: [#{ems.name}], id: [#{ems.id}]" - disconnects = if (target == ems) + disconnects = if target == ems target.hosts.reload.to_a elsif target.kind_of?(Host) [target.clone] @@ -48,10 +48,10 @@ def save_hosts_inventory(ems, hashes, target = nil) found = ems.hosts.build(h) else _log.info("#{log_header} Updating Host [#{found.name}] id: [#{found.id}] hostname: [#{found.hostname}] IP: [#{found.ipaddress}] ems_ref: [#{h[:ems_ref]}]") - h[:ems_id] = ems.id # Steal this host from the previous EMS + h[:ems_id] = ems.id # Steal this host from the previous EMS # Adjust the names so they do not keep changing in the event of DNS problems - ip_part = /[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/ + ip_part = /[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/ ip_whole = /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/ # Keep the previous ip address if we don't have a new one or the new one is not an ip address @@ -60,7 +60,7 @@ def save_hosts_inventory(ems, hashes, target = nil) # Keep the previous hostname unless it's nil or it's an ip address h[:hostname] = found.hostname unless found.hostname.nil? || (found.hostname =~ ip_whole) - if found.name =~ /#{h[:name]} - \d+$/ + if /#{h[:name]} - \d+$/.match?(found.name) # Update the name to be found.name if it has the same ems_ref and the name # already has a '- int' suffix to work around duplicate hostnames h[:name] = found.name @@ -95,6 +95,7 @@ def save_hosts_inventory(ems, hashes, target = nil) _log.warn("#{log_header} Processing Host: [#{name}] failed with error [#{err.class}: #{err}]. Skipping Host.") else raise if EmsRefresh.debug_failures + _log.error("#{log_header} Processing Host: [#{name}] failed with error [#{err.class}: #{err}]. Skipping Host.") _log.log_backtrace(err) end @@ -141,7 +142,7 @@ def save_folders_inventory(ems, hashes, target = nil) save_inventory_multi(ems.ems_folders, hashes, deletes, [:uid_ems], nil, :ems_children) store_ids_for_new_records(ems.ems_folders, hashes, :uid_ems) end - alias_method :save_ems_folders_inventory, :save_folders_inventory + alias save_ems_folders_inventory save_folders_inventory def save_clusters_inventory(ems, hashes, target = nil) target = ems if target.nil? @@ -152,7 +153,7 @@ def save_clusters_inventory(ems, hashes, target = nil) save_inventory_multi(ems.ems_clusters, hashes, deletes, [:uid_ems], nil, :ems_children) store_ids_for_new_records(ems.ems_clusters, hashes, :uid_ems) end - alias_method :save_ems_clusters_inventory, :save_clusters_inventory + alias save_ems_clusters_inventory save_clusters_inventory def save_resource_pools_inventory(ems, hashes, target = nil) target = ems if target.nil? diff --git a/app/models/endpoint.rb b/app/models/endpoint.rb index 95bc4232bcb..6212be250e5 100644 --- a/app/models/endpoint.rb +++ b/app/models/endpoint.rb @@ -67,6 +67,7 @@ def resolve_verify_ssl_value(val) # Returns a list, to support concatenated PEM certs. def parse_certificate_authority return [] if certificate_authority.blank? + certificate_authority.split(/(?=-----BEGIN)/).reject(&:blank?).collect do |pem_fragment| OpenSSL::X509::Certificate.new(pem_fragment) end diff --git a/app/models/entitlement.rb b/app/models/entitlement.rb index b045a5bad13..ba4f5c9be90 100644 --- a/app/models/entitlement.rb +++ b/app/models/entitlement.rb @@ -13,6 +13,7 @@ def self.valid_filters?(filters_hash) return true unless filters_hash # nil ok return false unless filters_hash.kind_of?(Hash) # must be Hash return true if filters_hash.blank? # {} ok + filters_hash["managed"].present? || filters_hash["belongsto"].present? end @@ -65,6 +66,7 @@ def remove_tag_from_managed_filter(filter_to_remove) self.filters["managed"].each do |filter| next unless filter.first.starts_with?(category) next unless filter.include?(filter_to_remove) + filter.delete(filter_to_remove) end self.filters["managed"].reject!(&:empty?) diff --git a/app/models/event_stream.rb b/app/models/event_stream.rb index 68f18d626b4..0e8a36f612f 100644 --- a/app/models/event_stream.rb +++ b/app/models/event_stream.rb @@ -7,12 +7,12 @@ class EventStream < ApplicationRecord belongs_to :generating_ems, :class_name => "ExtManagementSystem" belongs_to :vm_or_template - alias_method :src_vm_or_template, :vm_or_template + alias src_vm_or_template vm_or_template belongs_to :vm, :foreign_key => :vm_or_template_id belongs_to :miq_template, :foreign_key => :vm_or_template_id belongs_to :host belongs_to :availability_zone - alias_method :src_host, :host + alias src_host host belongs_to :dest_vm_or_template, :class_name => "VmOrTemplate" belongs_to :dest_vm, :class_name => "Vm", :foreign_key => :dest_vm_or_template_id diff --git a/app/models/ext_management_system.rb b/app/models/ext_management_system.rb index 7f96a31626f..fa340cfa80e 100644 --- a/app/models/ext_management_system.rb +++ b/app/models/ext_management_system.rb @@ -88,7 +88,7 @@ def self.create_from_params(params, endpoints, authentications) has_many :physical_servers, :foreign_key => :ems_id, :inverse_of => :ext_management_system, :dependent => :destroy has_many :physical_server_profiles, :foreign_key => :ems_id, :inverse_of => :ext_management_system, :dependent => :destroy has_many :physical_server_profile_templates, :foreign_key => :ems_id, :inverse_of => :ext_management_system, :dependent => :destroy - has_many :placement_groups, :foreign_key => :ems_id, :inverse_of => :ext_management_system, :dependent => :destroy + has_many :placement_groups, :foreign_key => :ems_id, :inverse_of => :ext_management_system, :dependent => :destroy has_many :vm_and_template_labels, :through => :vms_and_templates, :source => :labels # Only taggings mapped from labels, excluding user-assigned tags. @@ -142,7 +142,7 @@ def self.create_from_params(params, endpoints, authentications) serialize :options - supports :refresh_ems + supports :refresh_ems def edit_with_params(params, endpoints, authentications) tap do |ems| @@ -164,6 +164,7 @@ def edit_with_params(params, endpoints, authentications) def hostname_uniqueness_valid? return unless hostname_required? return unless hostname.present? # Presence is checked elsewhere + # check uniqueness per provider type existing_hostnames = (self.class.all - [self]).map(&:hostname).compact.map(&:downcase) @@ -233,7 +234,7 @@ def validate_zone_not_maintenance_when_ems_enabled? :verify_ssl=, :certificate_authority, :certificate_authority=, - :to => :default_endpoint, + :to => :default_endpoint, :allow_nil => true delegate :path, :path=, :to => :default_endpoint, :prefix => "endpoint", :allow_nil => true @@ -247,7 +248,7 @@ def validate_zone_not_maintenance_when_ems_enabled? :allow_nil => true, :prefix => :default - alias_method :address, :hostname # TODO: Remove all callers of address + alias address hostname # TODO: Remove all callers of address virtual_column :ipaddress, :type => :string, :uses => :endpoints virtual_column :hostname, :type => :string, :uses => :endpoints @@ -303,7 +304,7 @@ def validate_zone_not_maintenance_when_ems_enabled? virtual_sum :total_cloud_vcpus, :vms, :cpu_total_cores virtual_sum :total_cloud_memory, :vms, :ram_size - alias_method :clusters, :ems_clusters # Used by web-services to return clusters as the property name + alias clusters ems_clusters # Used by web-services to return clusters as the property name alias_attribute :to_s, :name attribute :enabled, :default => true @@ -444,6 +445,7 @@ def self.provision_workflow_class def self.belongsto_descendant_class(name) return unless (descendant = BELONGS_TO_DESCENDANTS_CLASSES_BY_NAME.keys.detect { |x| name.end_with?(x) }) + BELONGS_TO_DESCENDANTS_CLASSES_BY_NAME[descendant] end @@ -552,7 +554,7 @@ def self.hostname_required? def my_zone zone.try(:name).presence || MiqServer.my_zone end - alias_method :zone_name, :my_zone + alias zone_name my_zone def emstype_description self.class.description || emstype.titleize @@ -560,6 +562,7 @@ def emstype_description def with_provider_connection(options = {}) raise _("no block given") unless block_given? + _log.info("Connecting through #{self.class.name}: [#{name}]") connection = connect(options) yield connection @@ -599,6 +602,7 @@ def refresh_ems(opts = {}) unless authentication_status_ok? raise _("Provider failed last authentication check") end + EmsRefresh.queue_refresh(self, nil, opts) end @@ -615,11 +619,11 @@ def refresh end def self.ems_infra_discovery_types - @ems_infra_discovery_types ||= %w(virtualcenter rhevm openstack_infra) + @ems_infra_discovery_types ||= %w[virtualcenter rhevm openstack_infra] end def self.ems_physical_infra_discovery_types - @ems_physical_infra_discovery_types ||= %w(lenovo_ph_infra) + @ems_physical_infra_discovery_types ||= %w[lenovo_ph_infra] end # override destroy_queue from AsyncDeleteMixin @@ -726,8 +730,8 @@ def enforce_policy(target, event) MiqEvent.raise_evm_event(target, event, inputs) end - alias_method :all_storages, :storages - alias_method :datastores, :storages # Used by web-services to return datastores as the property name + alias all_storages storages + alias datastores storages # Used by web-services to return datastores as the property name # # Relationship methods @@ -742,8 +746,8 @@ def folders children(:of_type => 'EmsFolder').sort_by { |c| c.name.downcase } end - alias_method :add_folder, :set_child - alias_method :remove_folder, :remove_child + alias add_folder set_child + alias remove_folder remove_child def remove_all_folders remove_all_children(:of_type => 'EmsFolder') @@ -753,6 +757,7 @@ def get_folder_paths(folder = nil) exclude_root_folder = folder.nil? folder ||= ems_folder_root return [] if folder.nil? + folder.child_folder_paths( :exclude_root_folder => exclude_root_folder, :exclude_datacenters => true, @@ -764,7 +769,7 @@ def resource_pools_non_default if @association_cache.include?(:resource_pools) resource_pools.select { |r| !r.is_default } else - resource_pools.where("is_default != ?", true).to_a + resource_pools.where.not(:is_default => true).to_a end end @@ -776,15 +781,15 @@ def vm_count_by_state(state) vms.inject(0) { |t, vm| vm.power_state == state ? t + 1 : t } end - def total_vms_on; vm_count_by_state("on"); end + def total_vms_on = vm_count_by_state("on") - def total_vms_off; vm_count_by_state("off"); end + def total_vms_off = vm_count_by_state("off") - def total_vms_unknown; vm_count_by_state("unknown"); end + def total_vms_unknown = vm_count_by_state("unknown") - def total_vms_never; vm_count_by_state("never"); end + def total_vms_never = vm_count_by_state("never") - def total_vms_suspended; vm_count_by_state("suspended"); end + def total_vms_suspended = vm_count_by_state("suspended") def get_reserve(field) (hosts + ems_clusters).inject(0) { |v, obj| v + (obj.send(field) || 0) } @@ -810,9 +815,10 @@ def perf_rollup_parents(interval_name = nil) def perf_capture_enabled? return @perf_capture_enabled unless @perf_capture_enabled.nil? + @perf_capture_enabled = ems_clusters.any?(&:perf_capture_enabled?) || host.any?(&:perf_capture_enabled?) end - alias_method :perf_capture_enabled, :perf_capture_enabled? + alias perf_capture_enabled perf_capture_enabled? Vmdb::Deprecation.deprecate_methods(self, :perf_capture_enabled => :perf_capture_enabled?) # Some workers hold open a connection to the provider and thus do not @@ -833,16 +839,19 @@ def self.event_monitor_class def event_monitor return if event_monitor_class.nil? + event_monitor_class.find_by_ems(self).first end def start_event_monitor return if event_monitor_class.nil? + event_monitor_class.start_worker_for_ems(self) end def stop_event_monitor return if event_monitor_class.nil? + _log.info("EMS [#{name}] id [#{id}]: Stopping event monitor.") event_monitor_class.stop_worker_for_ems(self) end @@ -859,14 +868,14 @@ def stop_event_monitor_queue end def stop_event_monitor_queue_on_change - if event_monitor_class && !self.new_record? && default_endpoint.changed.include_any?("hostname", "ipaddress") + if event_monitor_class && !new_record? && default_endpoint.changed.include_any?("hostname", "ipaddress") _log.info("EMS: [#{name}], Hostname or IP address has changed, stopping Event Monitor. It will be restarted by the WorkerMonitor.") stop_event_monitor_queue end end def stop_event_monitor_queue_on_credential_change - if event_monitor_class && !self.new_record? && self.credentials_changed? + if event_monitor_class && !new_record? && credentials_changed? _log.info("EMS: [#{name}], Credentials have changed, stopping Event Monitor. It will be restarted by the WorkerMonitor.") stop_event_monitor_queue end @@ -923,14 +932,14 @@ def stop_refresh_worker_queue end def stop_refresh_worker_queue_on_change - if refresh_worker_class && !self.new_record? && default_endpoint.changed.include_any?("hostname", "ipaddress") + if refresh_worker_class && !new_record? && default_endpoint.changed.include_any?("hostname", "ipaddress") _log.info("EMS: [#{name}], Hostname or IP address has changed, stopping Refresh Worker. It will be restarted by the WorkerMonitor.") stop_refresh_worker_queue end end def stop_refresh_worker_queue_on_credential_change - if refresh_worker_class && !self.new_record? && self.credentials_changed? + if refresh_worker_class && !new_record? && credentials_changed? _log.info("EMS: [#{name}], Credentials have changed, stopping Refresh Worker. It will be restarted by the WorkerMonitor.") stop_refresh_worker_queue end @@ -968,10 +977,11 @@ def self.inventory_status ] end return if data.empty? + data = data.sort_by { |e| [e[0], e[1], e[2], e[3]] } # remove 0's (except for the region) data = data.map { |row| row.each_with_index.map { |col, i| i.positive? && col.to_s == "0" ? nil : col } } - data.unshift(%w(region zone kind ems clusters hosts vms storages containers groups images nodes projects)) + data.unshift(%w[region zone kind ems clusters hosts vms storages containers groups images nodes projects]) # remove columns where all values (except for the header) are blank data.first.dup.each do |col_header| col = data.first.index(col_header) @@ -1010,6 +1020,7 @@ def build_connection(options = {}) def build_endpoint_by_role(options) return if options.blank? + endpoint = endpoints.detect { |e| e.role == options[:role].to_s } if endpoint endpoint.assign_attributes(options) @@ -1020,6 +1031,7 @@ def build_endpoint_by_role(options) def build_authentication_by_role(options) return if options.blank? + role = options.delete(:role) creds = {} creds[role] = options diff --git a/app/models/external_url.rb b/app/models/external_url.rb index 87f8cca7d75..49329269fbf 100644 --- a/app/models/external_url.rb +++ b/app/models/external_url.rb @@ -2,5 +2,5 @@ class ExternalUrl < ApplicationRecord belongs_to :resource, :polymorphic => true belongs_to :user - validates :url, :format => URI::regexp, :allow_nil => false + validates :url, :format => URI::DEFAULT_PARSER.make_regexp, :allow_nil => false end diff --git a/app/models/file_depot.rb b/app/models/file_depot.rb index ac48847f9a5..b67b9d5fbfd 100644 --- a/app/models/file_depot.rb +++ b/app/models/file_depot.rb @@ -7,9 +7,9 @@ class FileDepot < ApplicationRecord has_many :miq_schedules, :dependent => :nullify has_many :miq_servers, :dependent => :nullify, :foreign_key => :log_file_depot_id has_many :log_files - validates_presence_of :uri + validates :uri, :presence => true - attr_accessor :file + attr_accessor :file def self.supported_depots descendants.each_with_object({}) { |klass, hash| hash[klass.name] = klass.display_name } diff --git a/app/models/file_depot_ftp.rb b/app/models/file_depot_ftp.rb index c2a00b3568f..9039173b0cd 100644 --- a/app/models/file_depot_ftp.rb +++ b/app/models/file_depot_ftp.rb @@ -14,23 +14,23 @@ def self.validate_settings(settings) def upload_file(file) super with_connection do - begin - return if file_exists?(destination_file) - - upload(file.local_file, destination_file) - rescue => err - msg = "Error '#{err.message.chomp}', writing to FTP: [#{uri}], Username: [#{authentication_userid}]" - _log.error(msg) - raise _("Error '%{message}', writing to FTP: [%{uri}], Username: [%{id}]") % {:message => err.message.chomp, - :uri => uri, - :id => authentication_userid} - else - file.update( - :state => "available", - :log_uri => destination_file - ) - file.post_upload_tasks - end + + return if file_exists?(destination_file) + + upload(file.local_file, destination_file) + rescue => err + msg = "Error '#{err.message.chomp}', writing to FTP: [#{uri}], Username: [#{authentication_userid}]" + _log.error(msg) + raise _("Error '%{message}', writing to FTP: [%{uri}], Username: [%{id}]") % {:message => err.message.chomp, + :uri => uri, + :id => authentication_userid} + else + file.update( + :state => "available", + :log_uri => destination_file + ) + file.post_upload_tasks + end end @@ -46,11 +46,13 @@ def remove_file(file) def verify_credentials(_auth_type = nil, cred_hash = nil) res = with_connection(cred_hash, &:last_response) raise _("Depot Settings validation failed") unless res + res end def with_connection(cred_hash = nil) raise _("no block given") unless block_given? + _log.info("Connecting through #{self.class.name}: [#{name}]") begin connection = connect(cred_hash) @@ -68,7 +70,7 @@ def connect(cred_hash = nil) begin _log.info("Connecting to #{self.class.name}: #{name} host: #{host}...") @ftp = Net::FTP.new(host) - @ftp.passive = true # Use passive mode to avoid firewall issues see http://slacksite.com/other/ftp.html#passive + @ftp.passive = true # Use passive mode to avoid firewall issues see http://slacksite.com/other/ftp.html#passive # @ftp.debug_mode = true if settings[:debug] # TODO: add debug option creds = cred_hash ? [cred_hash[:username], cred_hash[:password]] : login_credentials @ftp.login(*creds) diff --git a/app/models/file_depot_s3.rb b/app/models/file_depot_s3.rb index 379871514ed..31c45ddab09 100644 --- a/app/models/file_depot_s3.rb +++ b/app/models/file_depot_s3.rb @@ -14,7 +14,7 @@ def connect(options = {}) username = options[:username] || authentication_userid(options[:auth_type]) password = options[:password] || authentication_password(options[:auth_type]) - # Note: The hard-coded aws_region will be removed after manageiq-ui-class implements region selection + # NOTE: The hard-coded aws_region will be removed after manageiq-ui-class implements region selection aws_region = options[:region] || "us-east-1" $aws_log ||= Vmdb::Loggers.create_logger("aws.log") @@ -31,12 +31,12 @@ def connect(options = {}) def with_depot_connection(options = {}) raise _("no block given") unless block_given? + _log.info("Connecting through #{self.class.name}: [#{name}]") yield connect(options) end def verify_credentials(auth_type = nil, options = {}) - connection_rescue_block do # aws-sdk does Lazy Connections, so call a cheap function with_depot_connection(options.merge(:auth_type => auth_type)) do |s3| diff --git a/app/models/file_depot_smb.rb b/app/models/file_depot_smb.rb index 541849ab0ae..78ecfacd3cf 100644 --- a/app/models/file_depot_smb.rb +++ b/app/models/file_depot_smb.rb @@ -8,6 +8,7 @@ def self.uri_prefix def self.validate_settings(settings) res = MiqSmbSession.new(settings).verify raise _("Depot Settings validation failed with error: %{error}") % {:error => res.last} unless res.first + res end diff --git a/app/models/filesystem.rb b/app/models/filesystem.rb index 626e9804d01..08e9930c90d 100644 --- a/app/models/filesystem.rb +++ b/app/models/filesystem.rb @@ -86,7 +86,7 @@ def image_name unless ext.nil? ext.sub!(".", "") ext.downcase! - return ext if %w(dll exe log txt xml ini doc pdf zip).include?(ext) + return ext if %w[dll exe log txt xml ini doc pdf zip].include?(ext) end "unknown" end @@ -107,21 +107,24 @@ def contents=(val) def has_contents? !self.binary_blob.nil? end - alias_method :contents_available, :has_contents? + alias contents_available has_contents? def contents_displayable? return false if name.nil? # We will display max 20k characters in the UI textarea return false if size > 20_000 + mime_type = MIME::Types.of(name).first return has_contents? && contents.force_encoding("UTF-8").ascii_only? if mime_type.nil? + !mime_type.binary? end def displayable_contents return nil unless has_contents? + bom = contents.byteslice(0, 2).bytes - if contents_displayable? && (bom == UTF_16BE_BOM || bom == UTF_16LE_BOM) + if contents_displayable? && [UTF_16BE_BOM, UTF_16LE_BOM].include?(bom) contents.force_encoding('UTF-16').encode('UTF-8') else contents @@ -129,21 +132,21 @@ def displayable_contents end [ - [:suid_bit, 04000], - [:sgid_bit, 02000], - [:sticky_bit, 01000], - [:owner_read, 00400], - [:owner_write, 00200], - [:owner_exec, 00100], - [:group_read, 00040], - [:group_write, 00020], - [:group_exec, 00010], - [:other_read, 00004], - [:other_write, 00002], - [:other_exec, 00001], + [:suid_bit, 0o4000], + [:sgid_bit, 0o2000], + [:sticky_bit, 0o1000], + [:owner_read, 0o0400], + [:owner_write, 0o0200], + [:owner_exec, 0o0100], + [:group_read, 0o0040], + [:group_write, 0o0020], + [:group_exec, 0o0010], + [:other_read, 0o0004], + [:other_write, 0o0002], + [:other_exec, 0o0001], ].each do |m, o| - define_method("permission_#{m}?") do - return permissions && permissions.to_i(8) & o != 0 + define_method(:"permission_#{m}?") do + permissions && permissions.to_i(8) & o != 0 end end diff --git a/app/models/firewall_rule.rb b/app/models/firewall_rule.rb index fa72e29ce29..f9e509e66a9 100644 --- a/app/models/firewall_rule.rb +++ b/app/models/firewall_rule.rb @@ -8,6 +8,7 @@ def operating_system def operating_system=(os) raise ArgumentError, _("must be an OperatingSystem") unless os.kind_of?(OperatingSystem) + self.resource = os end @@ -36,6 +37,7 @@ def self.xml_to_hashes(xmlNode, findPath) el.each_element { |e| result << e.attributes.to_h } end + result end end diff --git a/app/models/firmware_target.rb b/app/models/firmware_target.rb index 2a850d22fad..f00480ddb5e 100644 --- a/app/models/firmware_target.rb +++ b/app/models/firmware_target.rb @@ -2,8 +2,8 @@ class FirmwareTarget < ApplicationRecord has_many :firmware_binary_firmware_targets, :dependent => :destroy has_many :firmware_binaries, :through => :firmware_binary_firmware_targets - before_create :normalize before_save :normalize + before_create :normalize # Attributes that need to match for target physical server to be assumed compatible. MATCH_ATTRIBUTES = %i[manufacturer model].freeze diff --git a/app/models/flavor.rb b/app/models/flavor.rb index 6b1d19ea31a..151731b729c 100644 --- a/app/models/flavor.rb +++ b/app/models/flavor.rb @@ -27,18 +27,16 @@ def name_with_details else _("%{name} (%{num_cpus} CPU, %{memory_gigabytes} GB RAM, %{root_disk_gigabytes} GB Root Disk)") end + elsif root_disk_size.nil? + _("%{name} (%{num_cpus} CPUs, %{memory_gigabytes} GB RAM, Unknown Size Root Disk)") else - if root_disk_size.nil? - _("%{name} (%{num_cpus} CPUs, %{memory_gigabytes} GB RAM, Unknown Size Root Disk)") - else - _("%{name} (%{num_cpus} CPUs, %{memory_gigabytes} GB RAM, %{root_disk_gigabytes} GB Root Disk)") - end + _("%{name} (%{num_cpus} CPUs, %{memory_gigabytes} GB RAM, %{root_disk_gigabytes} GB Root Disk)") end details % { :name => name, :num_cpus => cpus, :memory_gigabytes => memory.bytes / 1.0.gigabytes, - :root_disk_gigabytes => root_disk_size && root_disk_size.bytes / 1.0.gigabytes + :root_disk_gigabytes => root_disk_size && (root_disk_size.bytes / 1.0.gigabytes) } end @@ -79,8 +77,10 @@ def self.raw_create_flavor(_ext_management_system, _options = {}) def self.create_flavor(ems_id, options) raise ArgumentError, _("ems cannot be nil") if ems_id.nil? + ext_management_system = ExtManagementSystem.find(ems_id) raise ArgumentError, _("ems cannot be found") if ext_management_system.nil? + klass = ext_management_system.class_by_ems(:Flavor) klass.raw_create_flavor(ext_management_system, options) end diff --git a/app/models/generic_object.rb b/app/models/generic_object.rb index 7f41ac2d0ee..dc862e99356 100644 --- a/app/models/generic_object.rb +++ b/app/models/generic_object.rb @@ -40,6 +40,7 @@ def custom_action_buttons def property_attributes=(options) raise "generic_object_definition is nil" unless generic_object_definition + options.keys.each do |k| unless property_attribute_defined?(k) raise ActiveModel::UnknownAttributeError.new(self, k) @@ -115,7 +116,7 @@ def inspect attributes_as_string += ["associations: #{generic_object_definition.property_associations.keys}"] attributes_as_string += ["methods: #{property_methods}"] - prefix = Kernel.instance_method(:inspect).bind(self).call.split(' ', 2).first + prefix = Kernel.instance_method(:inspect).bind_call(self).split(' ', 2).first "#{prefix} #{attributes_as_string.join(", ")}>" end @@ -154,6 +155,7 @@ def method_missing(method_name, *args) def respond_to_missing?(method_name, _include_private = false) return true if property_defined?(method_name.to_s.chomp('=')) + super end @@ -185,7 +187,7 @@ def _call_automate(method_name, *args) @tenant ||= User.current_user.current_tenant raise "A user is required to send [#{method_name}] to automate." unless @user - attrs = { :method_name => method_name } + attrs = {:method_name => method_name} args.each_with_index do |item, idx| attrs["param_#{idx + 1}".to_sym] = item attrs["param_#{idx + 1}_type".to_sym] = item.class.name diff --git a/app/models/generic_object_definition.rb b/app/models/generic_object_definition.rb index 855d02e9ab2..3f17fd56c3e 100644 --- a/app/models/generic_object_definition.rb +++ b/app/models/generic_object_definition.rb @@ -20,10 +20,10 @@ class GenericObjectDefinition < ApplicationRecord :time => N_('Time') }.freeze - FEATURES = %w(attribute association method).freeze + FEATURES = %w[attribute association method].freeze REG_ATTRIBUTE_NAME = /\A[a-z][a-zA-Z_0-9]*\z/ REG_METHOD_NAME = /\A[a-z][a-zA-Z_0-9]*[!?]?\z/ - ALLOWED_ASSOCIATION_TYPES = (MiqReport.reportable_models + %w(GenericObject)).freeze + ALLOWED_ASSOCIATION_TYPES = (MiqReport.reportable_models + %w[GenericObject]).freeze serialize :properties, Hash @@ -49,15 +49,17 @@ class GenericObjectDefinition < ApplicationRecord virtual_total :generic_objects_count, :generic_objects FEATURES.each do |feature| - define_method("property_#{feature}s") do + define_method(:"property_#{feature}s") do return errors[:properties] if properties_changed? && !valid? + properties["#{feature}s".to_sym] end - define_method("property_#{feature}_defined?") do |attr| + define_method(:"property_#{feature}_defined?") do |attr| attr = attr.to_s return property_methods.include?(attr) if feature == 'method' - send("property_#{feature}s").key?(attr) + + send(:"property_#{feature}s").key?(attr) end end @@ -92,7 +94,8 @@ def find_objects(options) def property_getter(attr, val) return type_cast(attr, val) if property_attribute_defined?(attr) - return get_objects_of_association(attr, val) if property_association_defined?(attr) + + get_objects_of_association(attr, val) if property_association_defined?(attr) end def type_cast(attr, value) @@ -221,6 +224,7 @@ def property_keywords def check_not_in_use return true if generic_objects.empty? + errors.add(:base, "Cannot delete the definition while it is referenced by some generic objects") throw :abort end diff --git a/app/models/generic_object_definition/import_export.rb b/app/models/generic_object_definition/import_export.rb index e4d6e857afa..2757fb1669e 100644 --- a/app/models/generic_object_definition/import_export.rb +++ b/app/models/generic_object_definition/import_export.rb @@ -9,6 +9,7 @@ def import_from_hash(god, options = nil) if god["name"].blank? || god["properties"].blank? raise _("Incorrect format.") end + existing_god = GenericObjectDefinition.find_by(:name => god["name"]) if existing_god.present? if options[:overwrite] diff --git a/app/models/git_repository.rb b/app/models/git_repository.rb index f98fcc44e1b..a0500af39b9 100644 --- a/app/models/git_repository.rb +++ b/app/models/git_repository.rb @@ -9,7 +9,7 @@ class GitRepository < ApplicationRecord attr_reader :git_lock - validates :url, :format => Regexp.union(URI.regexp(%w[http https file ssh]), /\A[-\w:.]+@.*:/), :allow_nil => false + validates :url, :format => Regexp.union(URI::DEFAULT_PARSER.make_regexp(%w[http https file ssh]), /\A[-\w:.]+@.*:/), :allow_nil => false attribute :verify_ssl, :default => OpenSSL::SSL::VERIFY_PEER validates :verify_ssl, :inclusion => {:in => [OpenSSL::SSL::VERIFY_NONE, OpenSSL::SSL::VERIFY_PEER]} @@ -18,7 +18,7 @@ class GitRepository < ApplicationRecord has_many :git_tags, :dependent => :destroy after_destroy :broadcast_repo_dir_delete - INFO_KEYS = %w(commit_sha commit_message commit_time name).freeze + INFO_KEYS = %w[commit_sha commit_message commit_time name].freeze def self.delete_repo_dir(id, directory_name) _log.info("Deleting GitRepository[#{id}] in #{directory_name} for MiqServer[#{MiqServer.my_server.id}]...") @@ -62,6 +62,7 @@ def branch_info(name) ensure_refreshed branch = git_branches.detect { |item| item.name == name } raise "Branch #{name} not found" unless branch + branch.attributes.slice(*INFO_KEYS) end @@ -69,6 +70,7 @@ def tag_info(name) ensure_refreshed tag = git_tags.detect { |item| item.name == name } raise "Tag #{name} not found" unless tag + tag.attributes.slice(*INFO_KEYS) end @@ -263,11 +265,11 @@ def proxy_url? return false unless %w[http https].include?(Settings.git_repository_proxy.scheme) repo_url_scheme = begin - URI.parse(url).scheme - rescue URI::InvalidURIError - # url is not a parsable URI, such as git@github.com:ManageIQ/manageiq.git - nil - end + URI.parse(url).scheme + rescue URI::InvalidURIError + # url is not a parsable URI, such as git@github.com:ManageIQ/manageiq.git + nil + end %w[http https].include?(repo_url_scheme) end diff --git a/app/models/guest_application.rb b/app/models/guest_application.rb index f89454e959c..e227fca5428 100644 --- a/app/models/guest_application.rb +++ b/app/models/guest_application.rb @@ -26,6 +26,7 @@ def self.xml_to_hashes(xmlNode, findPath) def v_unique_name return name if arch.blank? || arch == "noarch" + "#{name} (#{arch})" end end diff --git a/app/models/hardware.rb b/app/models/hardware.rb index 96ca638854b..15ecc2f247a 100644 --- a/app/models/hardware.rb +++ b/app/models/hardware.rb @@ -72,24 +72,24 @@ def self.add_elements(parent, xmlNode) # Excluding ethernet devices from deletes because the refresh is the master of the data and it will handle the deletes. deletes[:gd] = parent.hardware.guest_devices - .where.not(:device_type => "ethernet") - .select(:id, :device_type, :location, :address) - .collect { |rec| [rec.id, [rec.device_type, rec.location, rec.address]] } + .where.not(:device_type => "ethernet") + .select(:id, :device_type, :location, :address) + .collect { |rec| [rec.id, [rec.device_type, rec.location, rec.address]] } - if parent.vendor == "redhat" - deletes[:disk] = parent.hardware.disks.select(:id, :device_type, :location) - .collect { |rec| [rec.id, [rec.device_type, "0:#{rec.location}"]] } - else - deletes[:disk] = parent.hardware.disks.select(:id, :device_type, :location) - .collect { |rec| [rec.id, [rec.device_type, rec.location]] } - end + deletes[:disk] = if parent.vendor == "redhat" + parent.hardware.disks.select(:id, :device_type, :location) + .collect { |rec| [rec.id, [rec.device_type, "0:#{rec.location}"]] } + else + parent.hardware.disks.select(:id, :device_type, :location) + .collect { |rec| [rec.id, [rec.device_type, rec.location]] } + end xmlNode.root.each_recursive do |e| - begin - parent.hardware.send("m_#{e.name}", parent, e, deletes) if parent.hardware.respond_to?("m_#{e.name}") - rescue => err - _log.warn(err.to_s) - end + + parent.hardware.send(:"m_#{e.name}", parent, e, deletes) if parent.hardware.respond_to?(:"m_#{e.name}") + rescue => err + _log.warn(err.to_s) + end GuestDevice.delete(deletes[:gd].transpose[0]) @@ -122,7 +122,8 @@ def v_pct_free_disk_space virtual_attribute :v_pct_free_disk_space, :float, :arel => (lambda do |t| t.grouping(Arel::Nodes::Division.new( Arel::Nodes::NamedFunction.new("CAST", [t[:disk_free_space].as("float")]), - t[:disk_capacity]) * 100) + t[:disk_capacity] + ) * 100) end) def v_pct_used_disk_space @@ -132,9 +133,10 @@ def v_pct_used_disk_space # resulting sql: "(cast(disk_free_space as float) / (disk_capacity * -100) + 100)" # to work with arel better, put the 100 at the end virtual_attribute :v_pct_used_disk_space, :float, :arel => (lambda do |t| - t.grouping(Arel::Nodes::Division.new( + t.grouping((Arel::Nodes::Division.new( Arel::Nodes::NamedFunction.new("CAST", [t[:disk_free_space].as("float")]), - t[:disk_capacity]) * -100 + 100) + t[:disk_capacity] + ) * -100) + 100) end) def provisioned_storage @@ -180,7 +182,7 @@ def m_controller(_parent, xmlNode, deletes) da = {"device_type" => xmlNode.attributes["type"].to_s.downcase, "controller_type" => xmlNode.attributes["type"]} # Loop over the device mapping table and add attributes - @@dh.each_pair { |k, v| da.merge!(v => e.attributes[k]) if e.attributes[k] } + @@dh.each_pair { |k, v| da.merge!(v => e.attributes[k]) if e.attributes[k] } if da["device_name"] == 'disk' target = disks diff --git a/app/models/host.rb b/app/models/host.rb index 33e1acbaf37..b2f013f570a 100644 --- a/app/models/host.rb +++ b/app/models/host.rb @@ -26,9 +26,9 @@ class Host < ApplicationRecord nil => "Unknown", }.freeze - validates_presence_of :name - validates_inclusion_of :user_assigned_os, :in => ["linux_generic", "windows_generic", nil] - validates_inclusion_of :vmm_vendor, :in => VENDOR_TYPES.keys + validates :name, :presence => true + validates :user_assigned_os, :inclusion => {:in => ["linux_generic", "windows_generic", nil]} + validates :vmm_vendor, :inclusion => {:in => VENDOR_TYPES.keys} belongs_to :ext_management_system, :foreign_key => "ems_id" belongs_to :ems_cluster @@ -52,11 +52,11 @@ class Host < ApplicationRecord has_many :networks, :through => :hardware has_many :patches, :dependent => :destroy has_many :system_services, :dependent => :destroy - has_many :host_services, :class_name => "SystemService", :foreign_key => "host_id", :inverse_of => :host + has_many :host_services, :class_name => "SystemService", :inverse_of => :host has_many :metrics, :as => :resource # Destroy will be handled by purger has_many :metric_rollups, :as => :resource # Destroy will be handled by purger - has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger + has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger has_many :ems_events, ->(host) { where("host_id = ? OR dest_host_id = ?", host.id, host.id).order(:timestamp) }, @@ -85,8 +85,8 @@ class Host < ApplicationRecord has_many :host_service_groups, :dependent => :destroy has_many :cloud_services, :dependent => :nullify - has_many :host_cloud_services, :class_name => "CloudService", :foreign_key => "host_id", - :inverse_of => :host + has_many :host_cloud_services, :class_name => "CloudService", + :inverse_of => :host has_many :host_aggregate_hosts, :dependent => :destroy has_many :host_aggregates, :through => :host_aggregate_hosts has_many :host_hardwares, :class_name => 'Hardware', :dependent => :nullify @@ -145,7 +145,7 @@ class Host < ApplicationRecord virtual_has_many :resource_pools, :uses => :all_relationships virtual_has_many :miq_scsi_luns, :uses => {:hardware => {:storage_adapters => {:miq_scsi_targets => :miq_scsi_luns}}} - virtual_has_many :processes, :class_name => "OsProcess", :uses => {:operating_system => :processes} + virtual_has_many :processes, :class_name => "OsProcess", :uses => {:operating_system => :processes} virtual_has_many :event_logs, :uses => {:operating_system => :event_logs} virtual_has_many :firewall_rules, :uses => {:operating_system => :firewall_rules} @@ -156,10 +156,10 @@ class Host < ApplicationRecord scope :active, -> { where.not(:ems_id => nil) } scope :archived, -> { where(:ems_id => nil) } - alias_method :datastores, :storages # Used by web-services to return datastores as the property name + alias datastores storages # Used by web-services to return datastores as the property name - alias_method :parent_cluster, :ems_cluster - alias_method :owning_cluster, :ems_cluster + alias parent_cluster ems_cluster + alias owning_cluster ems_cluster include RelationshipMixin self.default_relationship_type = "ems_metadata" @@ -405,13 +405,16 @@ def service_pack def arch if vmm_product.to_s.include?('ESX') return 'x86_64' if vmm_version.to_i >= 4 + return 'x86' end return "unknown" unless hardware && !hardware.cpu_type.nil? + cpu = hardware.cpu_type.to_s.downcase return cpu if cpu.include?('x86') return "x86" if cpu.starts_with?("intel") + "unknown" end @@ -566,13 +569,13 @@ def parent_folder end def owning_folder - detect_ancestor(:of_type => "EmsFolder") { |a| !a.kind_of?(Datacenter) && !%w(host vm).include?(a.name) } + detect_ancestor(:of_type => "EmsFolder") { |a| !a.kind_of?(Datacenter) && !%w[host vm].include?(a.name) } end def parent_datacenter detect_ancestor(:of_type => "EmsFolder") { |a| a.kind_of?(Datacenter) } end - alias_method :owning_datacenter, :parent_datacenter + alias owning_datacenter parent_datacenter def self.save_metadata(id, dataArray) _log.info("for host [#{id}]") @@ -585,24 +588,25 @@ def self.save_metadata(id, dataArray) _log.info("for host [#{id}] host saved") rescue => err _log.log_backtrace(err) - return false + false end def self.batch_update_authentication(host_ids, creds = {}) errors = [] return true if host_ids.blank? + host_ids.each do |id| - begin - host = Host.find(id) - host.update_authentication(creds) - rescue ActiveRecord::RecordNotFound => err - _log.warn("#{err.class.name}-#{err}") - next - rescue => err - errors << err.to_s - _log.error("#{err.class.name}-#{err}") - next - end + + host = Host.find(id) + host.update_authentication(creds) + rescue ActiveRecord::RecordNotFound => err + _log.warn("#{err.class.name}-#{err}") + next + rescue => err + errors << err.to_s + _log.error("#{err.class.name}-#{err}") + next + end errors.empty? ? true : errors end @@ -663,7 +667,7 @@ def verify_credentials_with_ws(_auth_type = nil, _options = {}) def verify_credentials_with_ssh(auth_type = nil, options = {}) raise MiqException::MiqHostError, _("No credentials defined") if missing_credentials?(auth_type) - unless os_image_name =~ /linux_*/ + unless /linux_*/.match?(os_image_name) raise MiqException::MiqHostError, _("Logon to platform [%{os_name}] not supported") % {:os_name => os_image_name} end @@ -759,13 +763,16 @@ def refresh_patches(ssu) sb = ssu.shell_exec("esxupdate query") t = Time.now sb.each_line do |line| - next if line =~ /-{5,}/ # skip any header/footer rows + next if /-{5,}/.match?(line) # skip any header/footer rows + data = line.split(" ") # Find the lines we should skip begin next if data[1, 2].nil? + dhash = {:name => data[0], :vendor => "VMware", :installed_on => Time.parse(data[1, 2].join(" ")).utc} next if dhash[:installed_on] - t >= 0 + dhash[:description] = data[3..-1].join(" ") unless data[3..-1].nil? patches << dhash rescue ArgumentError => err @@ -840,11 +847,11 @@ def refresh_ssh_config(ssu) self.ssh_permit_root_login = 'yes' if permit_list permit_list.each_line do |line| la = line.split(' ') - if la.length == 2 - next if la.first[0, 1] == '#' - self.ssh_permit_root_login = la.last.to_s.downcase - break - end + next unless la.length == 2 + next if la.first[0, 1] == '#' + + self.ssh_permit_root_login = la.last.to_s.downcase + break end rescue # _log.log_backtrace($!) @@ -868,7 +875,7 @@ def refresh_ipmi if ipmi.connected? self.power_state = ipmi.power_state mac = ipmi.mac_address - self.mac_address = mac unless mac.blank? + self.mac_address = mac if mac.present? hw_info = {:manufacturer => ipmi.manufacturer, :model => ipmi.model} if hardware.nil? @@ -886,10 +893,11 @@ def refresh_ipmi end def ipmi_config_valid?(include_mac_addr = false) - return false unless (ipmi_address.present? && has_credentials?(:ipmi)) + return false unless ipmi_address.present? && has_credentials?(:ipmi) + include_mac_addr == true ? mac_address.present? : true end - alias_method :ipmi_enabled, :ipmi_config_valid? + alias ipmi_enabled ipmi_config_valid? def set_custom_field(attribute, value) return unless is_vmware? @@ -927,6 +935,7 @@ def current_memory_headroom def firewall_rules return [] if operating_system.nil? + operating_system.firewall_rules end @@ -1001,7 +1010,7 @@ def scan_from_queue(taskid = nil) unless is_vmware_esxi? if hostname.blank? _log.warn("No hostname defined for #{log_target}") - task.update_status("Finished", "Warn", "Scanning incomplete due to missing hostname") if task + task.update_status("Finished", "Warn", "Scanning incomplete due to missing hostname") if task return end @@ -1009,7 +1018,7 @@ def scan_from_queue(taskid = nil) if missing_credentials? _log.warn("No credentials defined for #{log_target}") - task.update_status("Finished", "Warn", "Scanning incomplete due to Credential Issue") if task + task.update_status("Finished", "Warn", "Scanning incomplete due to Credential Issue") if task return end @@ -1166,8 +1175,8 @@ def get_ports(direction = nil, host_protocol = nil) conditions[:host_protocol] = host_protocol if host_protocol operating_system.firewall_rules.where(conditions) - .flat_map { |rule| rule.port_range.to_a } - .uniq.sort + .flat_map { |rule| rule.port_range.to_a } + .uniq.sort end def service_names @@ -1230,14 +1239,13 @@ def vm_scan_affinity=(list) remove_all_parents list.each { |parent| set_parent(parent) } end - true end - alias_method :set_vm_scan_affinity, :vm_scan_affinity= + alias set_vm_scan_affinity vm_scan_affinity= def vm_scan_affinity with_relationship_type("vm_scan_affinity") { parents } end - alias_method :get_vm_scan_affinity, :vm_scan_affinity + alias get_vm_scan_affinity vm_scan_affinity def processes operating_system.try(:processes) || [] @@ -1277,7 +1285,8 @@ def vcpus_per_core def domain names = hostname.to_s.split(',').first.to_s.split('.') - return names[1..-1].join('.') unless names.blank? + return names[1..-1].join('.') if names.present? + nil end @@ -1339,10 +1348,11 @@ def get_performance_metric(capture_interval, metric, range, function = nil) return values if function.nil? case function.to_sym - when :min, :max then return values.send(function) + when :min, :max then values.send(function) when :avg return 0 if values.length == 0 - return (values.compact.sum / values.length) + + (values.compact.sum / values.length) else raise _("Function %{function} is invalid, should be one of :min, :max, :avg or nil") % {:function => function} end @@ -1359,7 +1369,8 @@ def get_pct_cpu_metric_from_child_vm_performances(metric, capture_interval, time id, capture_interval.to_s, time_range[0], - time_range[1]) + time_range[1] + ) perf_hash = {} vm_perfs.each do |p| diff --git a/app/models/job.rb b/app/models/job.rb index b6faa70482b..b8697758e5a 100644 --- a/app/models/job.rb +++ b/app/models/job.rb @@ -48,7 +48,7 @@ def initialize_attributes end def check_active_on_destroy - if self.is_active? + if is_active? _log.warn("Job is active, delete not allowed - #{attributes_log}") throw :abort end @@ -71,7 +71,7 @@ def update_message(message) self.message = message save - return unless self.is_active? + return unless is_active? # Update worker heartbeat MiqQueue.get_worker(guid).try(:update_heartbeat) @@ -93,6 +93,7 @@ def dispatch_start def dispatch_finish return if @storage_dispatcher_process_finish_flag + _log.info("Dispatch Status is 'finished'") self.dispatch_status = "finished" save @@ -130,7 +131,7 @@ def process_finished(*args) def timeout! message = "job timed out after #{Time.now - updated_on} seconds of inactivity. Inactivity threshold [#{current_job_timeout} seconds]" _log.warn("Job: guid: [#{guid}], #{message}, aborting") - attributes = { :args => [message, "error"] } + attributes = {:args => [message, "error"]} MiqQueue.create_with(attributes).put_unless_exists( :class_name => self.class.base_class.name, :instance_id => id, @@ -155,9 +156,10 @@ def self.check_jobs_for_timeout # Allow jobs to run longer if the MiqQueue task is still active. (Limited to MiqServer for now.) # TODO: can we add method_name, queue_name, role, instance_id to the exists? - if job.miq_server_id - next if MiqQueue.exists?(:state => %w(dequeue ready), :task_id => job.guid, :class_name => "MiqServer") + if job.miq_server_id && MiqQueue.exists?(:state => %w[dequeue ready], :task_id => job.guid, :class_name => "MiqServer") + next end + job.timeout! end rescue Exception @@ -169,7 +171,7 @@ def timeout_adjustment timeout_adjustment = 1 target = target_entity if target.kind_of?(ManageIQ::Providers::Azure::CloudManager::Vm) || - target.kind_of?(ManageIQ::Providers::Azure::CloudManager::Template) + target.kind_of?(ManageIQ::Providers::Azure::CloudManager::Template) timeout_adjustment = 4 end timeout_adjustment @@ -186,12 +188,13 @@ def self.guid_active?(job_guid, timestamp, job_not_found_delay) return job.is_active? unless job.nil? # If Job is NOT found, consider active if timestamp is newer than (now - delay) - if timestamp.kind_of?(String) - timestamp = timestamp.to_time(:utc) - else - timestamp = timestamp.to_time rescue nil - end + timestamp = if timestamp.kind_of?(String) + timestamp.to_time(:utc) + else + timestamp.to_time rescue nil + end return false if timestamp.nil? + (timestamp >= job_not_found_delay.seconds.ago) end @@ -211,7 +214,7 @@ def self.delete_by_id(ids) :class_name => name, :method_name => "destroy", :priority => MiqQueue::HIGH_PRIORITY, - :args => [ids], + :args => [ids] ) end diff --git a/app/models/job/state_machine.rb b/app/models/job/state_machine.rb index c3bf319792f..b2d75d7013a 100644 --- a/app/models/job/state_machine.rb +++ b/app/models/job/state_machine.rb @@ -31,7 +31,7 @@ def transit_state(signal) next_state = permitted_transitions[state] # if current state is not explicitly permitted, is any state (referred by '*') permitted? - next_state = permitted_transitions['*'] unless next_state + next_state ||= permitted_transitions['*'] self.state = next_state end !!next_state diff --git a/app/models/lan.rb b/app/models/lan.rb index bf4803f130a..ba8cc918fec 100644 --- a/app/models/lan.rb +++ b/app/models/lan.rb @@ -11,7 +11,7 @@ class Lan < ApplicationRecord belongs_to :parent, :class_name => "::Lan" # TODO: Should this go through switch and not guest devices? - has_many :hosts, :through => :guest_devices + has_many :hosts, :through => :guest_devices acts_as_miq_taggable end diff --git a/app/models/lifecycle_event.rb b/app/models/lifecycle_event.rb index 03276ad54fc..f52ed4a7624 100644 --- a/app/models/lifecycle_event.rb +++ b/app/models/lifecycle_event.rb @@ -7,13 +7,13 @@ def self.create_event(vm, event_hash) _log.debug(event_hash.inspect) # Update the location if not provided by getting the value from the vm - event_hash[:location] = vm.path if event_hash[:location].blank? && !vm.blank? + event_hash[:location] = vm.path if event_hash[:location].blank? && vm.present? event = LifecycleEvent.new(event_hash) event.save! # create the event and link it to a Vm if a vm was found - unless vm.blank? - vm.lifecycle_events << event unless vm.lifecycle_events.include?(event) + if vm.present? && !vm.lifecycle_events.include?(event) + vm.lifecycle_events << event end end end diff --git a/app/models/log_file.rb b/app/models/log_file.rb index 41b1a48438c..371884068ec 100644 --- a/app/models/log_file.rb +++ b/app/models/log_file.rb @@ -2,7 +2,7 @@ require 'mount/miq_generic_mount_session' class LogFile < ApplicationRecord - belongs_to :resource, :polymorphic => true + belongs_to :resource, :polymorphic => true belongs_to :file_depot belongs_to :miq_task @@ -19,7 +19,7 @@ def relative_path_for_upload(loc_file) date_string = "#{format_log_time(logging_started_on)}_#{format_log_time(logging_ended_on)}" fname = "#{File.basename(loc_file, ".*").capitalize}_" fname += "region_#{MiqRegion.my_region.region rescue "unknown"}_#{zone.name}_#{zone.id}_#{server.name}_#{server.id}_#{date_string}#{File.extname(loc_file)}" - dest = File.join("/", path, fname) + dest = File.join("/", path, fname) _log.info("Built relative path: [#{dest}] from source: [#{loc_file}]") dest end @@ -50,13 +50,13 @@ def upload raise _("Log Depot settings not configured") unless file_depot method = get_post_method(file_depot.uri) - send("upload_log_file_#{method}") + send(:"upload_log_file_#{method}") end def remove method = get_post_method(log_uri) return if method.nil? - return send("remove_log_file_#{method}") if respond_to?("remove_log_file_#{method}") + return send(:"remove_log_file_#{method}") if respond_to?(:"remove_log_file_#{method}") # At this point ftp should have returned klass = Object.const_get("Miq#{method.capitalize}Session") @@ -70,7 +70,7 @@ def file_exists? method = get_post_method(log_uri) return true if method.nil? - return send("file_exists_#{method}?") if respond_to?("file_exists_#{method}?") + return send(:"file_exists_#{method}?") if respond_to?(:"file_exists_#{method}?") # At this point ftp should have returned klass = Object.const_get("Miq#{method.capitalize}Session") @@ -237,6 +237,7 @@ def self._request_logs(options) unless server.respond_to?(:started?) raise MiqException::Error, _("started? not implemented for %{server_name}") % {:server_name => server.class.name} end + unless server.started? if server.respond_to?(:name) raise MiqException::Error, diff --git a/app/models/manageiq/providers/ansible_runner_workflow.rb b/app/models/manageiq/providers/ansible_runner_workflow.rb index e8f712361ab..7a489bfb702 100644 --- a/app/models/manageiq/providers/ansible_runner_workflow.rb +++ b/app/models/manageiq/providers/ansible_runner_workflow.rb @@ -130,6 +130,7 @@ def wait_for_runner_process_async if monitor.running? return handle_runner_timeout(monitor) if job_timeout_exceeded? + queue_signal(:poll_runner, :deliver_on => deliver_on) else process_runner_result(monitor.response) diff --git a/app/models/manageiq/providers/automation_manager/configuration_script.rb b/app/models/manageiq/providers/automation_manager/configuration_script.rb index d87c848184f..3de54eafa72 100644 --- a/app/models/manageiq/providers/automation_manager/configuration_script.rb +++ b/app/models/manageiq/providers/automation_manager/configuration_script.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::AutomationManager::ConfigurationScript < ::ConfigurationScript +class ManageIQ::Providers::AutomationManager::ConfigurationScript < ConfigurationScript end diff --git a/app/models/manageiq/providers/automation_manager/configuration_script_payload.rb b/app/models/manageiq/providers/automation_manager/configuration_script_payload.rb index 5fef3adb337..e279798a37d 100644 --- a/app/models/manageiq/providers/automation_manager/configuration_script_payload.rb +++ b/app/models/manageiq/providers/automation_manager/configuration_script_payload.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::AutomationManager::ConfigurationScriptPayload < ::ConfigurationScriptPayload +class ManageIQ::Providers::AutomationManager::ConfigurationScriptPayload < ConfigurationScriptPayload end diff --git a/app/models/manageiq/providers/automation_manager/configured_system.rb b/app/models/manageiq/providers/automation_manager/configured_system.rb index 510b350ffc2..3a02e707d41 100644 --- a/app/models/manageiq/providers/automation_manager/configured_system.rb +++ b/app/models/manageiq/providers/automation_manager/configured_system.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::AutomationManager::ConfiguredSystem < ::ConfiguredSystem +class ManageIQ::Providers::AutomationManager::ConfiguredSystem < ConfiguredSystem end diff --git a/app/models/manageiq/providers/automation_manager/orchestration_stack.rb b/app/models/manageiq/providers/automation_manager/orchestration_stack.rb index 6124fd08c14..04eca849388 100644 --- a/app/models/manageiq/providers/automation_manager/orchestration_stack.rb +++ b/app/models/manageiq/providers/automation_manager/orchestration_stack.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::AutomationManager::OrchestrationStack < ::OrchestrationStack +class ManageIQ::Providers::AutomationManager::OrchestrationStack < OrchestrationStack include CiFeatureMixin def retireable? diff --git a/app/models/manageiq/providers/base_manager/event_catcher/runner.rb b/app/models/manageiq/providers/base_manager/event_catcher/runner.rb index 46f94f02adc..1df019f3bed 100644 --- a/app/models/manageiq/providers/base_manager/event_catcher/runner.rb +++ b/app/models/manageiq/providers/base_manager/event_catcher/runner.rb @@ -1,7 +1,7 @@ require 'concurrent/atomic/event' require 'util/duplicate_blocker' -class ManageIQ::Providers::BaseManager::EventCatcher::Runner < ::MiqWorker::Runner +class ManageIQ::Providers::BaseManager::EventCatcher::Runner < MiqWorker::Runner class EventCatcherHandledException < StandardError end @@ -94,6 +94,7 @@ def before_exit(message, _exit_code) # Called when there is any change in BlacklistedEvent def sync_blacklisted_events return unless @ems + filters = @ems.blacklisted_event_names if @filtered_events.nil? || @filtered_events != filters @@ -153,19 +154,19 @@ def start_event_monitor _log.info("#{log_prefix} Starting Event Monitor Thread") tid = Thread.new do - begin - monitor_events - rescue EventCatcherHandledException - Thread.exit - rescue TemporaryFailure - raise - rescue => err - _log.error("#{log_prefix} Event Monitor Thread aborted because [#{err.message}]") - _log.log_backtrace(err) unless err.kind_of?(Errno::ECONNREFUSED) - Thread.exit - ensure - @monitor_started.set - end + + monitor_events + rescue EventCatcherHandledException + Thread.exit + rescue TemporaryFailure + raise + rescue => err + _log.error("#{log_prefix} Event Monitor Thread aborted because [#{err.message}]") + _log.log_backtrace(err) unless err.kind_of?(Errno::ECONNREFUSED) + Thread.exit + ensure + @monitor_started.set + end @monitor_started.wait diff --git a/app/models/manageiq/providers/base_manager/metrics_capture.rb b/app/models/manageiq/providers/base_manager/metrics_capture.rb index 67d3f7a50b4..a0f03491947 100644 --- a/app/models/manageiq/providers/base_manager/metrics_capture.rb +++ b/app/models/manageiq/providers/base_manager/metrics_capture.rb @@ -9,9 +9,7 @@ def initialize(target, ems = nil) @ems = ems end - def zone - ems.zone - end + delegate :zone, :to => :ems def my_zone ems.zone.name diff --git a/app/models/manageiq/providers/base_manager/metrics_collector_worker/runner.rb b/app/models/manageiq/providers/base_manager/metrics_collector_worker/runner.rb index f22ee3dc92c..6124b722eee 100644 --- a/app/models/manageiq/providers/base_manager/metrics_collector_worker/runner.rb +++ b/app/models/manageiq/providers/base_manager/metrics_collector_worker/runner.rb @@ -1,3 +1,3 @@ -class ManageIQ::Providers::BaseManager::MetricsCollectorWorker::Runner < ::MiqQueueWorkerBase::Runner +class ManageIQ::Providers::BaseManager::MetricsCollectorWorker::Runner < MiqQueueWorkerBase::Runner include ProviderWorkerRunnerMixin end diff --git a/app/models/manageiq/providers/base_manager/operations_worker/runner.rb b/app/models/manageiq/providers/base_manager/operations_worker/runner.rb index 10059364f51..5a9d72ac357 100644 --- a/app/models/manageiq/providers/base_manager/operations_worker/runner.rb +++ b/app/models/manageiq/providers/base_manager/operations_worker/runner.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::BaseManager::OperationsWorker::Runner < ::MiqQueueWorkerBase::Runner +class ManageIQ::Providers::BaseManager::OperationsWorker::Runner < MiqQueueWorkerBase::Runner include ProviderWorkerRunnerMixin def worker_roles diff --git a/app/models/manageiq/providers/base_manager/refresh_worker/runner.rb b/app/models/manageiq/providers/base_manager/refresh_worker/runner.rb index 493fce5ad63..0ca7fd2ab83 100644 --- a/app/models/manageiq/providers/base_manager/refresh_worker/runner.rb +++ b/app/models/manageiq/providers/base_manager/refresh_worker/runner.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::BaseManager::RefreshWorker::Runner < ::MiqQueueWorkerBase::Runner +class ManageIQ::Providers::BaseManager::RefreshWorker::Runner < MiqQueueWorkerBase::Runner include ProviderWorkerRunnerMixin def after_initialize diff --git a/app/models/manageiq/providers/base_manager/refresher.rb b/app/models/manageiq/providers/base_manager/refresher.rb index 310463e4b4a..07a644fee3f 100644 --- a/app/models/manageiq/providers/base_manager/refresher.rb +++ b/app/models/manageiq/providers/base_manager/refresher.rb @@ -17,6 +17,7 @@ def initialize(targets) def options return @options if defined?(@options) + @options = Settings.ems_refresh end @@ -99,7 +100,7 @@ def refresh_targets_for_ems(ems, targets) Benchmark.realtime_block(:save_inventory) { save_inventory(ems, target, parsed) } - _log.info "#{log_header} Refreshing target #{target.class} [#{target.name}] id [#{target.id}]...Complete" + _log.info("#{log_header} Refreshing target #{target.class} [#{target.name}] id [#{target.id}]...Complete") end end @@ -157,6 +158,7 @@ def post_refresh(ems, ems_refresh_start_time) # Do any post-operations for this EMS post_process_refresh_classes.each do |klass| next unless klass.respond_to?(:post_refresh_ems) + _log.info("#{log_ems_target} Performing post-refresh operations for #{klass} instances...") klass.post_refresh_ems(ems.id, ems_refresh_start_time) _log.info("#{log_ems_target} Performing post-refresh operations for #{klass} instances...Complete") @@ -186,10 +188,12 @@ def group_targets_by_ems(targets) ems_by_ems_id[t.manager_id] ||= t.manager targets_by_ems_id[t.manager_id] << t else - ems = case - when t.respond_to?(:ext_management_system) then t.ext_management_system - when t.respond_to?(:manager) then t.manager - else t + ems = if t.respond_to?(:ext_management_system) + t.ext_management_system + elsif t.respond_to?(:manager) + t.manager + else + t end if ems.nil? _log.warn("Unable to perform refresh for #{t.class} [#{t.name}] id [#{t.id}], since it is not on an EMS.") diff --git a/app/models/manageiq/providers/cloud_manager.rb b/app/models/manageiq/providers/cloud_manager.rb index 20d2d9317b1..294658dfbbe 100644 --- a/app/models/manageiq/providers/cloud_manager.rb +++ b/app/models/manageiq/providers/cloud_manager.rb @@ -33,9 +33,9 @@ class << model_name virtual_has_many :volume_availability_zones, :class_name => "AvailabilityZone", :uses => :availability_zones - supports :authentication_status + supports :authentication_status - validates_presence_of :zone + validates :zone, :presence => true # TODO: remove and have each manager include this include HasNetworkManagerMixin @@ -56,12 +56,13 @@ def volume_availability_zones # This method is NOT meant to be called from production code. def open_browser raise NotImplementedError unless Rails.env.development? + require 'util/miq-system' MiqSystem.open_browser(browser_url) end def stop_event_monitor_queue_on_credential_change - if event_monitor_class && !self.new_record? && self.credentials_changed? + if event_monitor_class && !new_record? && credentials_changed? _log.info("EMS: [#{name}], Credentials have changed, stopping Event Monitor. It will be restarted by the WorkerMonitor.") stop_event_monitor_queue network_manager.stop_event_monitor_queue if respond_to?(:network_manager) && network_manager @@ -70,6 +71,7 @@ def stop_event_monitor_queue_on_credential_change def sync_cloud_tenants_with_tenants return unless supports?(:cloud_tenant_mapping) + sync_root_tenant sync_tenants sync_deleted_cloud_tenants @@ -81,7 +83,7 @@ def sync_tenants _log.info("Syncing CloudTenant with Tenants...") CloudTenant.with_ext_management_system(id).walk_tree do |cloud_tenant, _| - cloud_tenant_description = cloud_tenant.description.blank? ? cloud_tenant.name : cloud_tenant.description + cloud_tenant_description = (cloud_tenant.description.presence || cloud_tenant.name) tenant_params = {:name => cloud_tenant.name, :description => cloud_tenant_description, :source => cloud_tenant} tenant_parent = cloud_tenant.parent.try(:source_tenant) || source_tenant diff --git a/app/models/manageiq/providers/cloud_manager/auth_key_pair.rb b/app/models/manageiq/providers/cloud_manager/auth_key_pair.rb index 1f8524787b6..3468dbe412d 100644 --- a/app/models/manageiq/providers/cloud_manager/auth_key_pair.rb +++ b/app/models/manageiq/providers/cloud_manager/auth_key_pair.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::CloudManager::AuthKeyPair < ::Authentication +class ManageIQ::Providers::CloudManager::AuthKeyPair < Authentication acts_as_miq_taggable has_and_belongs_to_many :vms, :join_table => :key_pairs_vms, :foreign_key => :authentication_id virtual_belongs_to :ext_management_system, :uses => :resource diff --git a/app/models/manageiq/providers/cloud_manager/orchestration_stack.rb b/app/models/manageiq/providers/cloud_manager/orchestration_stack.rb index f509be4a1df..2fab3d059f5 100644 --- a/app/models/manageiq/providers/cloud_manager/orchestration_stack.rb +++ b/app/models/manageiq/providers/cloud_manager/orchestration_stack.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::CloudManager::OrchestrationStack < ::OrchestrationStack +class ManageIQ::Providers::CloudManager::OrchestrationStack < OrchestrationStack belongs_to :ext_management_system, :foreign_key => :ems_id, :class_name => "ManageIQ::Providers::CloudManager" belongs_to :orchestration_template belongs_to :cloud_tenant diff --git a/app/models/manageiq/providers/cloud_manager/orchestration_template_runner.rb b/app/models/manageiq/providers/cloud_manager/orchestration_template_runner.rb index 2da900cb43f..3d05224d687 100644 --- a/app/models/manageiq/providers/cloud_manager/orchestration_template_runner.rb +++ b/app/models/manageiq/providers/cloud_manager/orchestration_template_runner.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::CloudManager::OrchestrationTemplateRunner < ::Job +class ManageIQ::Providers::CloudManager::OrchestrationTemplateRunner < Job DEFAULT_EXECUTION_TTL = 100.minutes def minimize_indirect @@ -35,7 +35,7 @@ def deploy_orchestration_stack miq_task.update(:name => name) save! my_signal(false, :poll_stack_status, 10) - rescue StandardError => err + rescue => err _log.error("Error deploying orchestration stack : #{err.class} - #{err.message}") my_signal(minimize_indirect, :abort_job, err.message, 'error') end @@ -48,7 +48,7 @@ def update_orchestration_stack miq_task.update(:name => name) save! my_signal(false, :poll_stack_status, 10) - rescue StandardError => err + rescue => err _log.error("Error updating orchestration stack : #{err.class} - #{err.message}") my_signal(minimize_indirect, :abort_job, err.message, 'error') end diff --git a/app/models/manageiq/providers/cloud_manager/provision/cloning.rb b/app/models/manageiq/providers/cloud_manager/provision/cloning.rb index 5f6cfe6bcd2..bf339a3bbb0 100644 --- a/app/models/manageiq/providers/cloud_manager/provision/cloning.rb +++ b/app/models/manageiq/providers/cloud_manager/provision/cloning.rb @@ -22,7 +22,7 @@ def prepare_for_clone_task clone_options[:availability_zone] = dest_availability_zone.ems_ref if dest_availability_zone user_data = userdata_payload - clone_options[:user_data] = user_data unless user_data.blank? + clone_options[:user_data] = user_data if user_data.present? clone_options end diff --git a/app/models/manageiq/providers/cloud_manager/provision/configuration.rb b/app/models/manageiq/providers/cloud_manager/provision/configuration.rb index 87cb0408f6c..f43e501bec0 100644 --- a/app/models/manageiq/providers/cloud_manager/provision/configuration.rb +++ b/app/models/manageiq/providers/cloud_manager/provision/configuration.rb @@ -1,6 +1,7 @@ module ManageIQ::Providers::CloudManager::Provision::Configuration def userdata_payload return nil unless customization_template + options = prepare_customization_template_substitution_options customization_template.script_with_substitution(options) end diff --git a/app/models/manageiq/providers/cloud_manager/provision_workflow.rb b/app/models/manageiq/providers/cloud_manager/provision_workflow.rb index 4b3a0787ad1..d68c24d5530 100644 --- a/app/models/manageiq/providers/cloud_manager/provision_workflow.rb +++ b/app/models/manageiq/providers/cloud_manager/provision_workflow.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::CloudManager::ProvisionWorkflow < ::MiqProvisionVirtWorkflow +class ManageIQ::Providers::CloudManager::ProvisionWorkflow < MiqProvisionVirtWorkflow include DialogFieldValidation include CloudInitTemplateMixin include SysprepTemplateMixin @@ -23,6 +23,7 @@ def allowed_cloud_subnets(_options = {}) targets = get_targets_for_source(cn, :cloud_filter, CloudNetwork, 'cloud_subnets') targets.each_with_object({}) do |cs, hash| next if !az_id.zero? && az_id != cs.availability_zone_id + hash[cs.id] = "#{cs.name} (#{cs.cidr}) | #{cs.availability_zone.try(:name)}" end else @@ -32,6 +33,7 @@ def allowed_cloud_subnets(_options = {}) def allowed_cloud_networks(_options = {}) return {} unless (src = provider_or_tenant_object) + targets = get_targets_for_source(src, :cloud_filter, CloudNetwork, 'all_cloud_networks') allowed_ci(:cloud_network, [:availability_zone], targets.map(&:id)) end @@ -54,6 +56,7 @@ def allowed_security_groups(_options = {}) def allowed_floating_ip_addresses(_options = {}) return {} unless (src_obj = provider_or_tenant_object) + targets = get_targets_for_source(src_obj, :cloud_filter, FloatingIp, 'floating_ips.available') targets.each_with_object({}) do |ip, h| h[ip.id] = ip.address @@ -101,6 +104,7 @@ def allowed_customization_templates(options = {}) # Optional starting set of results maybe passed in. def allowed_ci(ci, relats, filtered_ids = nil) return {} if (sources = resources_for_ui).blank? + super(ci, relats, sources, filtered_ids) end @@ -123,6 +127,7 @@ def availability_zone_to_cloud_network(src) def get_source_and_targets(refresh = false) return @target_resource if @target_resource && refresh == false + result = super return result if result.blank? @@ -143,6 +148,7 @@ def get_targets_for_ems(src, filter_name, klass, relats) ems = src.try(:ext_management_system) return {} if ems.nil? + process_filter(filter_name, klass, ems.deep_send(relats)) end @@ -154,6 +160,7 @@ def dialog_name_from_automate(message, extra_attrs) def provider_or_tenant_object src = resources_for_ui return nil if src[:ems].nil? + obj = src[:cloud_tenant] || src[:ems] load_ar_obj(obj) end diff --git a/app/models/manageiq/providers/cloud_manager/resource_pool.rb b/app/models/manageiq/providers/cloud_manager/resource_pool.rb index c117d180310..de76ad5a9dc 100644 --- a/app/models/manageiq/providers/cloud_manager/resource_pool.rb +++ b/app/models/manageiq/providers/cloud_manager/resource_pool.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::CloudManager::ResourcePool < ::ResourcePool +class ManageIQ::Providers::CloudManager::ResourcePool < ResourcePool end diff --git a/app/models/manageiq/providers/cloud_manager/template.rb b/app/models/manageiq/providers/cloud_manager/template.rb index 0188e16c47a..7c6f2d5f988 100644 --- a/app/models/manageiq/providers/cloud_manager/template.rb +++ b/app/models/manageiq/providers/cloud_manager/template.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::CloudManager::Template < ::MiqTemplate +class ManageIQ::Providers::CloudManager::Template < MiqTemplate attribute :cloud, :default => true virtual_column :image?, :type => :boolean @@ -27,7 +27,7 @@ def self.create_image_queue(userid, ext_management_system, options = {}) } queue_opts = { - :class_name => self.name, + :class_name => name, :method_name => 'create_image', :role => 'ems_operations', :zone => ext_management_system.my_zone, @@ -44,6 +44,7 @@ def self.raw_create_image(_ext_management_system, _options = {}) def self.create_image(ems_id, options) raise ArgumentError, _("ems cannot be nil") if ems_id.nil? + ext_management_system = ExtManagementSystem.find(ems_id) raise ArgumentError, _("ems cannot be found") if ext_management_system.nil? diff --git a/app/models/manageiq/providers/cloud_manager/vm.rb b/app/models/manageiq/providers/cloud_manager/vm.rb index da8ca5b2dc1..88588f1269e 100644 --- a/app/models/manageiq/providers/cloud_manager/vm.rb +++ b/app/models/manageiq/providers/cloud_manager/vm.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::CloudManager::Vm < ::Vm +class ManageIQ::Providers::CloudManager::Vm < Vm belongs_to :availability_zone belongs_to :flavor belongs_to :orchestration_stack @@ -9,7 +9,7 @@ class ManageIQ::Providers::CloudManager::Vm < ::Vm has_many :network_routers, -> { distinct }, :through => :cloud_subnets # Keeping floating_ip for backwards compatibility. Keeping association through vm_id foreign key, because of Amazon # ec2, it allows to associate floating ips without network ports - has_one :floating_ip, :foreign_key => :vm_id + has_one :floating_ip has_many :floating_ips has_many :security_groups, -> { distinct }, :through => :network_ports has_many :cloud_volumes, :through => :disks, :source => :backing, :source_type => "CloudVolume" @@ -20,11 +20,10 @@ class ManageIQ::Providers::CloudManager::Vm < ::Vm has_many :load_balancer_health_checks, -> { distinct }, :through => :load_balancer_pool_members has_and_belongs_to_many :key_pairs, :join_table => :key_pairs_vms, - :foreign_key => :vm_id, :association_foreign_key => :authentication_id, :class_name => "ManageIQ::Providers::CloudManager::AuthKeyPair" - has_many :host_aggregates, :through => :host + has_many :host_aggregates, :through => :host attribute :cloud, :default => true diff --git a/app/models/manageiq/providers/container_manager/metrics_capture.rb b/app/models/manageiq/providers/container_manager/metrics_capture.rb index d5366938b46..7358ba459f1 100644 --- a/app/models/manageiq/providers/container_manager/metrics_capture.rb +++ b/app/models/manageiq/providers/container_manager/metrics_capture.rb @@ -2,7 +2,7 @@ class ManageIQ::Providers::ContainerManager::MetricsCapture < ManageIQ::Provider def capture_ems_targets(_options = {}) return [] unless ems.supports?(:metrics) - MiqPreloader.preload([ems], :container_images => :tags, :container_nodes => :tags, :container_groups => [:tags, :containers => :tags]) + MiqPreloader.preload([ems], :container_images => :tags, :container_nodes => :tags, :container_groups => [:tags, {:containers => :tags}]) with_archived(ems.all_container_nodes).supporting(:capture) + with_archived(ems.all_container_groups).supporting(:capture) + diff --git a/app/models/manageiq/providers/container_manager/orchestration_stack.rb b/app/models/manageiq/providers/container_manager/orchestration_stack.rb index 68b707255ca..09c5de8f73d 100644 --- a/app/models/manageiq/providers/container_manager/orchestration_stack.rb +++ b/app/models/manageiq/providers/container_manager/orchestration_stack.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::ContainerManager::OrchestrationStack < ::OrchestrationStack +class ManageIQ::Providers::ContainerManager::OrchestrationStack < OrchestrationStack belongs_to :ext_management_system, :foreign_key => :ems_id, :class_name => "ManageIQ::Providers::ContainerManager" belongs_to :container_template, :foreign_key => :orchestration_template_id, :class_name => "ContainerTemplate" diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/azure_credential.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/azure_credential.rb index 4e582727c53..43929bc87c1 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/azure_credential.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/azure_credential.rb @@ -72,7 +72,7 @@ def self.params_to_attributes(params) attrs = super.dup attrs[:auth_key] = attrs.delete(:secret) if attrs.key?(:secret) - if %i[client tenant subscription].any? {|opt| attrs.has_key? opt } + if %i[client tenant subscription].any? { |opt| attrs.has_key?(opt) } attrs[:options] ||= {} attrs[:options][:client] = attrs.delete(:client) if attrs.key?(:client) attrs[:options][:tenant] = attrs.delete(:tenant) if attrs.key?(:tenant) diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source.rb index 61c70a3aa52..f9e1e1a4488 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source.rb @@ -73,7 +73,7 @@ def format_sync_error(error) private - VALID_PLAYBOOK_CHECK = /^\s*?-?\s*?(?:hosts|include|import_playbook):\s*?.*?$/.freeze + VALID_PLAYBOOK_CHECK = /^\s*?-?\s*?(?:hosts|include|import_playbook):\s*?.*?$/ # Confirms two things: # diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/google_credential.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/google_credential.rb index a963c29d2d3..1a129918001 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/google_credential.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/google_credential.rb @@ -51,8 +51,8 @@ def self.display_name(number = 1) def self.params_to_attributes(params) attrs = super.dup - attrs[:auth_key] = attrs.delete(:ssh_key_data) if attrs.key?(:ssh_key_data) - attrs[:options] = { :project => attrs.delete(:project) } if attrs[:project] + attrs[:auth_key] = attrs.delete(:ssh_key_data) if attrs.key?(:ssh_key_data) + attrs[:options] = {:project => attrs.delete(:project)} if attrs[:project] attrs end diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/job.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/job.rb index 038f52e7d59..8e38fc92b6e 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/job.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/job.rb @@ -24,7 +24,7 @@ def self.create_stack(playbook, options = {}) def self.raw_create_stack(playbook, options = {}) playbook.run(options) - rescue StandardError => e + rescue => e _log.error("Failed to create job from playbook(#{playbook.name}), error: #{e}") raise MiqException::MiqOrchestrationProvisionError, e.to_s, e.backtrace end diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/job/status.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/job/status.rb index 9c8af340997..2fdba5100fc 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/job/status.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/job/status.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Job::Status < ::OrchestrationStack::Status +class ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Job::Status < OrchestrationStack::Status attr_accessor :task_status # This is a bit confusing, but because the OrchestrationStack::Status doesn't diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/network_credential.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/network_credential.rb index 74ac76dc16d..d6a27ebd067 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/network_credential.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/network_credential.rb @@ -84,7 +84,7 @@ def self.params_to_attributes(params) attrs[:become_password] = attrs.delete(:authorize_password) if attrs.key?(:authorize_password) if attrs[:authorize] - attrs[:options] = { :authorize => attrs.delete(:authorize) } + attrs[:options] = {:authorize => attrs.delete(:authorize)} end attrs diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/openstack_credential.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/openstack_credential.rb index 8f3ecd748ba..b491d4540f4 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/openstack_credential.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/openstack_credential.rb @@ -67,7 +67,7 @@ def self.display_name(number = 1) def self.params_to_attributes(params) attrs = super.dup - if %i[host domain project].any? {|opt| attrs.has_key? opt } + if %i[host domain project].any? { |opt| attrs.has_key?(opt) } attrs[:options] ||= {} attrs[:options][:host] = attrs.delete(:host) if attrs.key?(:host) attrs[:options][:domain] = attrs.delete(:domain) if attrs.key?(:domain) diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/playbook.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/playbook.rb index 0316cb5c3ba..4f2e1aea7fc 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/playbook.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/playbook.rb @@ -29,7 +29,7 @@ def run(vars = {}, _userid = nil) def build_extra_vars(external = {}) (external || {}).each_with_object({}) do |(k, v), hash| match_data = v.kind_of?(String) && /password::/.match(v) - hash[k] = match_data ? ManageIQ::Password.decrypt(v.gsub(/password::/, '')) : v + hash[k] = match_data ? ManageIQ::Password.decrypt(v.gsub("password::", '')) : v end end diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/rhv_credential.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/rhv_credential.rb index 3b1ca0ff033..2760cc967bd 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/rhv_credential.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/rhv_credential.rb @@ -44,7 +44,7 @@ def self.display_name(number = 1) def self.params_to_attributes(params) attrs = super.dup - attrs[:options] = { :host => attrs.delete(:host) } if attrs[:host] + attrs[:options] = {:host => attrs.delete(:host)} if attrs[:host] attrs end diff --git a/app/models/manageiq/providers/embedded_ansible/automation_manager/vmware_credential.rb b/app/models/manageiq/providers/embedded_ansible/automation_manager/vmware_credential.rb index 7674592e4ce..b9785b91847 100644 --- a/app/models/manageiq/providers/embedded_ansible/automation_manager/vmware_credential.rb +++ b/app/models/manageiq/providers/embedded_ansible/automation_manager/vmware_credential.rb @@ -48,7 +48,7 @@ def self.display_name(number = 1) def self.params_to_attributes(params) attrs = super.dup - attrs[:options] = { :host => attrs.delete(:host) } if attrs[:host] + attrs[:options] = {:host => attrs.delete(:host)} if attrs[:host] attrs end diff --git a/app/models/manageiq/providers/embedded_ansible/provider.rb b/app/models/manageiq/providers/embedded_ansible/provider.rb index 885c45bdb29..b4273dd8161 100644 --- a/app/models/manageiq/providers/embedded_ansible/provider.rb +++ b/app/models/manageiq/providers/embedded_ansible/provider.rb @@ -1,11 +1,10 @@ -class ManageIQ::Providers::EmbeddedAnsible::Provider < ::Provider +class ManageIQ::Providers::EmbeddedAnsible::Provider < Provider include DefaultAnsibleObjects has_one :automation_manager, - :foreign_key => "provider_id", - :class_name => "ManageIQ::Providers::EmbeddedAnsible::AutomationManager", - :dependent => :destroy, # to be removed after ansible_tower side code is updated - :autosave => true + :class_name => "ManageIQ::Providers::EmbeddedAnsible::AutomationManager", + :dependent => :destroy, # to be removed after ansible_tower side code is updated + :autosave => true before_validation :ensure_managers @@ -13,7 +12,7 @@ class ManageIQ::Providers::EmbeddedAnsible::Provider < ::Provider def ensure_managers build_automation_manager unless automation_manager - automation_manager.name = _("%{name} Automation Manager") % {:name => name} + automation_manager.name = _("%{name} Automation Manager") % {:name => name} if zone_id_changed? automation_manager.enabled = Zone.maintenance_zone&.id != zone_id automation_manager.zone_id = zone_id diff --git a/app/models/manageiq/providers/embedded_automation_manager/authentication.rb b/app/models/manageiq/providers/embedded_automation_manager/authentication.rb index c3778df700d..ce5447f5bc7 100644 --- a/app/models/manageiq/providers/embedded_automation_manager/authentication.rb +++ b/app/models/manageiq/providers/embedded_automation_manager/authentication.rb @@ -67,7 +67,7 @@ def native_ref end def set_manager_ref - self.manager_ref = self.id + self.manager_ref = id save! end diff --git a/app/models/manageiq/providers/embedded_automation_manager/crud_common.rb b/app/models/manageiq/providers/embedded_automation_manager/crud_common.rb index 69429d798b1..dedeb90e9e9 100644 --- a/app/models/manageiq/providers/embedded_automation_manager/crud_common.rb +++ b/app/models/manageiq/providers/embedded_automation_manager/crud_common.rb @@ -23,7 +23,7 @@ def queue(instance_id, method_name, args, action, auth_user) def notify(op_type, manager_id, params) error = nil yield - rescue StandardError => error + rescue => error _log.debug(error.result.error) if error.kind_of?(AwesomeSpawn::CommandResultError) raise ensure diff --git a/app/models/manageiq/providers/ems_refresh_workflow.rb b/app/models/manageiq/providers/ems_refresh_workflow.rb index 35969374776..ed695a9b0bd 100644 --- a/app/models/manageiq/providers/ems_refresh_workflow.rb +++ b/app/models/manageiq/providers/ems_refresh_workflow.rb @@ -59,12 +59,12 @@ def queue_signal(*args, deliver_on: nil) super(*args, :role => role, :priority => priority, :deliver_on => deliver_on) end - alias_method :initializing, :dispatch_start - alias_method :start, :run_native_op - alias_method :finish, :process_finished - alias_method :abort_job, :process_abort - alias_method :cancel, :process_cancel - alias_method :error, :process_error + alias initializing dispatch_start + alias start run_native_op + alias finish process_finished + alias abort_job process_abort + alias cancel process_cancel + alias error process_error protected diff --git a/app/models/manageiq/providers/inflector.rb b/app/models/manageiq/providers/inflector.rb index 920bbf06bed..9423dffe9a0 100644 --- a/app/models/manageiq/providers/inflector.rb +++ b/app/models/manageiq/providers/inflector.rb @@ -2,12 +2,12 @@ module ManageIQ::Providers::Inflector class ObjectNotNamespacedError < StandardError; end def self.provider_name(class_or_instance) - klass = class_or_instance.class == Class ? class_or_instance : class_or_instance.class + klass = class_or_instance.instance_of?(Class) ? class_or_instance : class_or_instance.class provider_module(klass).name.split('::').last end def self.manager_type(class_or_instance) - klass = class_or_instance.class == Class ? class_or_instance : class_or_instance.class + klass = class_or_instance.instance_of?(Class) ? class_or_instance : class_or_instance.class manager = (klass.name.split('::') - provider_module(klass).name.split('::')).first manager.chomp('Manager') end diff --git a/app/models/manageiq/providers/infra_manager/cluster.rb b/app/models/manageiq/providers/infra_manager/cluster.rb index ed6da07fc2d..6ec34c6530f 100644 --- a/app/models/manageiq/providers/infra_manager/cluster.rb +++ b/app/models/manageiq/providers/infra_manager/cluster.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::InfraManager::Cluster < ::EmsCluster +class ManageIQ::Providers::InfraManager::Cluster < EmsCluster end diff --git a/app/models/manageiq/providers/infra_manager/datacenter.rb b/app/models/manageiq/providers/infra_manager/datacenter.rb index 3612a8e57ab..928c3f5118f 100644 --- a/app/models/manageiq/providers/infra_manager/datacenter.rb +++ b/app/models/manageiq/providers/infra_manager/datacenter.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::InfraManager::Datacenter < ::Datacenter +class ManageIQ::Providers::InfraManager::Datacenter < Datacenter end diff --git a/app/models/manageiq/providers/infra_manager/distributed_virtual_switch.rb b/app/models/manageiq/providers/infra_manager/distributed_virtual_switch.rb index c0c9b721f79..b16aa913d31 100644 --- a/app/models/manageiq/providers/infra_manager/distributed_virtual_switch.rb +++ b/app/models/manageiq/providers/infra_manager/distributed_virtual_switch.rb @@ -1,3 +1,3 @@ -class ManageIQ::Providers::InfraManager::DistributedVirtualSwitch < ::Switch +class ManageIQ::Providers::InfraManager::DistributedVirtualSwitch < Switch belongs_to :ext_management_system, :foreign_key => :ems_id, :inverse_of => :distributed_virtual_switches, :class_name => "ManageIQ::Providers::InfraManager" end diff --git a/app/models/manageiq/providers/infra_manager/folder.rb b/app/models/manageiq/providers/infra_manager/folder.rb index 4e2340c412e..00f2b695dda 100644 --- a/app/models/manageiq/providers/infra_manager/folder.rb +++ b/app/models/manageiq/providers/infra_manager/folder.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::InfraManager::Folder < ::EmsFolder +class ManageIQ::Providers::InfraManager::Folder < EmsFolder end diff --git a/app/models/manageiq/providers/infra_manager/provision_workflow.rb b/app/models/manageiq/providers/infra_manager/provision_workflow.rb index d3859acc420..264b2a09c57 100644 --- a/app/models/manageiq/providers/infra_manager/provision_workflow.rb +++ b/app/models/manageiq/providers/infra_manager/provision_workflow.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::InfraManager::ProvisionWorkflow < ::MiqProvisionVirtWorkflow +class ManageIQ::Providers::InfraManager::ProvisionWorkflow < MiqProvisionVirtWorkflow def set_or_default_hardware_field_values(vm) update_values = { :vm_memory => vm.hardware.memory_mb.to_s, @@ -32,12 +32,14 @@ def get_cpu_values_hash(vm) def allowed_ci(ci, relats, filtered_ids = nil) return {} if get_value(@values[:placement_auto]) == true return {} if (sources = resources_for_ui).blank? + get_ems_metadata_tree(sources) super(ci, relats, sources, filtered_ids) end def get_source_and_targets(refresh = false) return @target_resource if @target_resource && refresh == false + result = super return result if result.blank? diff --git a/app/models/manageiq/providers/infra_manager/resource_pool.rb b/app/models/manageiq/providers/infra_manager/resource_pool.rb index 555c844a1c7..81b92230634 100644 --- a/app/models/manageiq/providers/infra_manager/resource_pool.rb +++ b/app/models/manageiq/providers/infra_manager/resource_pool.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::InfraManager::ResourcePool < ::ResourcePool +class ManageIQ::Providers::InfraManager::ResourcePool < ResourcePool end diff --git a/app/models/manageiq/providers/infra_manager/storage.rb b/app/models/manageiq/providers/infra_manager/storage.rb index 6a6a76b2bdb..af3aaf6e98b 100644 --- a/app/models/manageiq/providers/infra_manager/storage.rb +++ b/app/models/manageiq/providers/infra_manager/storage.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::InfraManager::Storage < ::Storage +class ManageIQ::Providers::InfraManager::Storage < Storage end diff --git a/app/models/manageiq/providers/infra_manager/storage_cluster.rb b/app/models/manageiq/providers/infra_manager/storage_cluster.rb index fe297468674..7a2d13ceef3 100644 --- a/app/models/manageiq/providers/infra_manager/storage_cluster.rb +++ b/app/models/manageiq/providers/infra_manager/storage_cluster.rb @@ -1,2 +1,2 @@ -class ManageIQ::Providers::InfraManager::StorageCluster < ::StorageCluster +class ManageIQ::Providers::InfraManager::StorageCluster < StorageCluster end diff --git a/app/models/manageiq/providers/infra_manager/vm.rb b/app/models/manageiq/providers/infra_manager/vm.rb index b6b18ea2cb6..223b5b67877 100644 --- a/app/models/manageiq/providers/infra_manager/vm.rb +++ b/app/models/manageiq/providers/infra_manager/vm.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::InfraManager::Vm < ::Vm +class ManageIQ::Providers::InfraManager::Vm < Vm attribute :cloud, :default => false # Show certain non-generic charts @@ -12,6 +12,7 @@ def memory_mb_available? def self.calculate_power_state(raw_power_state) return raw_power_state if raw_power_state == "wait_for_launch" + super end diff --git a/app/models/manageiq/providers/inventory.rb b/app/models/manageiq/providers/inventory.rb index 425830a205b..63e517c6edd 100644 --- a/app/models/manageiq/providers/inventory.rb +++ b/app/models/manageiq/providers/inventory.rb @@ -44,9 +44,7 @@ def parse # Returns all InventoryCollections contained in persister # # @return [Array] List of InventoryCollections objects - def inventory_collections - parse.inventory_collections - end + delegate :inventory_collections, :to => :parse # Based on the given provider/manager class, this returns correct collector class # @@ -83,7 +81,7 @@ def self.parser_class_for(ems, target = nil, manager_name = nil) # @param type [String] 'Persister' | 'Collector' | 'Parser' # @param manager_name [String, nil] @see default_manager_name def self.class_for(ems, target, type, manager_name = nil) - ems_class = ems.class == Class ? ems : ems.class + ems_class = ems.instance_of?(Class) ? ems : ems.class provider_module = ManageIQ::Providers::Inflector.provider_module(ems_class) manager_name ||= parsed_manager_name(ems, target) @@ -111,7 +109,7 @@ def self.parsed_manager_name(_ems, target) when InventoryRefresh::TargetCollection 'TargetCollection' else - klass = target.class == Class ? target : target.class + klass = target.instance_of?(Class) ? target : target.class suffix_arr = klass.name.split('::') - ManageIQ::Providers::Inflector.provider_module(klass).name.split("::") suffix_arr.join('::') end diff --git a/app/models/manageiq/providers/inventory/parser.rb b/app/models/manageiq/providers/inventory/parser.rb index 5c730d1701a..1e52274854a 100644 --- a/app/models/manageiq/providers/inventory/parser.rb +++ b/app/models/manageiq/providers/inventory/parser.rb @@ -1,6 +1,5 @@ class ManageIQ::Providers::Inventory::Parser - attr_accessor :collector - attr_accessor :persister + attr_accessor :collector, :persister include Vmdb::Logging diff --git a/app/models/manageiq/providers/inventory/persister.rb b/app/models/manageiq/providers/inventory/persister.rb index 50f54076265..5d4e3ac1127 100644 --- a/app/models/manageiq/providers/inventory/persister.rb +++ b/app/models/manageiq/providers/inventory/persister.rb @@ -34,7 +34,7 @@ def self.from_json(json_data) # Returns serialized Persisted object to JSON # @return [String] serialized Persisted object to JSON - def to_json + def to_json(*_args) JSON.dump(to_hash) end diff --git a/app/models/manageiq/providers/inventory/persister/builder/persister_helper.rb b/app/models/manageiq/providers/inventory/persister/builder/persister_helper.rb index 0b45bfcffa8..8cf5a78f7b4 100644 --- a/app/models/manageiq/providers/inventory/persister/builder/persister_helper.rb +++ b/app/models/manageiq/providers/inventory/persister/builder/persister_helper.rb @@ -130,7 +130,7 @@ def name_references(collection) private def add_collection_for_manager(manager_type, collection_name, extra_properties = {}, settings = {}, &block) - settings[:parent] ||= send("#{manager_type}_manager") + settings[:parent] ||= send(:"#{manager_type}_manager") builder_class = send(manager_type) add_collection(builder_class, collection_name, extra_properties, settings, &block) diff --git a/app/models/manageiq/providers/physical_infra_manager/vm.rb b/app/models/manageiq/providers/physical_infra_manager/vm.rb index 3bfe51e0458..ba68eb8f0d4 100644 --- a/app/models/manageiq/providers/physical_infra_manager/vm.rb +++ b/app/models/manageiq/providers/physical_infra_manager/vm.rb @@ -1,4 +1,4 @@ -class ManageIQ::Providers::PhysicalInfraManager::Vm < ::Vm +class ManageIQ::Providers::PhysicalInfraManager::Vm < Vm attribute :cloud, :default => false def self.display_name(number = 1) diff --git a/app/models/metering.rb b/app/models/metering.rb index dfa6f273662..ea61aba9ee5 100644 --- a/app/models/metering.rb +++ b/app/models/metering.rb @@ -1,7 +1,7 @@ module Metering extend ActiveSupport::Concern - DISALLOWED_SUFFIXES = %w(_cost chargeback_rates).freeze - METERING_ALLOCATED_FIELDS = %w(metering_allocated_cpu_cores_metric metering_allocated_cpu_metric metering_allocated_memory_metric).freeze + DISALLOWED_SUFFIXES = %w[_cost chargeback_rates].freeze + METERING_ALLOCATED_FIELDS = %w[metering_allocated_cpu_cores_metric metering_allocated_cpu_metric metering_allocated_memory_metric].freeze ALLOWED_FIELD_SUFFIXES = %w[ -beginning_of_resource_existence_in_report_interval -end_of_resource_existence_in_report_interval @@ -39,6 +39,7 @@ def calculate_costs(consumption, _) relevant_fields.each do |field| next unless self.class.report_col_options.include?(field) + group, source, * = field.split('_') if field == 'net_io_used_metric' @@ -63,6 +64,7 @@ def calculate_costs(consumption, _) chargable_field = ChargeableField.find_by(:group => group, :source => source) next if METERING_ALLOCATED_FIELDS.include?(field) || field == "existence_hours_metric" || field == "fixed_compute_metric" || chargable_field&.metering? + value = chargable_field.measure_metering(consumption, @options) if chargable_field self[field] = (value || 0) end diff --git a/app/models/metric/aggregation.rb b/app/models/metric/aggregation.rb index 088eb66bf0e..3cfb5487ea0 100644 --- a/app/models/metric/aggregation.rb +++ b/app/models/metric/aggregation.rb @@ -18,22 +18,25 @@ def self.column(col, *args) # args => obj, result, counts, value, default_operat class Aggregate < Common def self.summation(col, _obj, result, counts, value) return if value.nil? + result[col] += value counts[col] += 1 end class << self - alias_method :derived_vm_numvcpus, :summation - alias_method :average, :summation + alias derived_vm_numvcpus summation + alias average summation end def self.latest(col, _obj, result, _counts, value) return if value.nil? + result[col] = value end def self.cpu_usage_rate_average(col, state, result, counts, value) return if value.nil? + if state.try(:total_cpu).to_i > 0 pct = value / 100 total = state.total_cpu @@ -49,6 +52,7 @@ def self.cpu_usage_rate_average(col, state, result, counts, value) def self.mem_usage_absolute_average(col, state, result, counts, value) return if value.nil? + if state && state.total_mem pct = value / 100 total = state.total_mem @@ -63,6 +67,7 @@ def self.mem_usage_absolute_average(col, state, result, counts, value) class Process < Common def self.average(col, _dummy, result, counts, aggregate_only = false) return if aggregate_only || result[col].nil? + result[col] = result[col] / counts[col] unless counts[col] == 0 end @@ -71,11 +76,12 @@ def self.summation(*) end class << self - alias_method :latest, :summation + alias latest summation end def self.cpu_usage_rate_average(col, state, result, counts, aggregate_only = false) return if result[col].nil? + if state.try(:total_cpu).to_i > 0 total = state.total_cpu result[col] = result[col] / total * 100 unless total == 0 @@ -86,6 +92,7 @@ def self.cpu_usage_rate_average(col, state, result, counts, aggregate_only = fal def self.mem_usage_absolute_average(col, state, result, counts, aggregate_only = false) return if result[col].nil? + if state && state.total_mem total = state.total_mem result[col] = result[col] / total * 100 unless total == 0 diff --git a/app/models/metric/ci_mixin.rb b/app/models/metric/ci_mixin.rb index 0763ea8cfb1..62386a8179b 100644 --- a/app/models/metric/ci_mixin.rb +++ b/app/models/metric/ci_mixin.rb @@ -23,6 +23,7 @@ module Metric::CiMixin def has_perf_data? return @has_perf_data unless @has_perf_data.nil? + @has_perf_data = associated_metrics('hourly').exists? end @@ -45,10 +46,14 @@ def first_and_last_capture(interval_name = "hourly") .group(:resource_id) .limit(1).to_a .first - perf.nil? ? [] : [ - perf.first_ts.kind_of?(String) ? Time.parse("#{perf.first_ts} UTC") : perf.first_ts, - perf.last_ts.kind_of?(String) ? Time.parse("#{perf.last_ts} UTC") : perf.last_ts - ] + if perf.nil? + [] + else + [ + perf.first_ts.kind_of?(String) ? Time.parse("#{perf.first_ts} UTC") : perf.first_ts, + perf.last_ts.kind_of?(String) ? Time.parse("#{perf.last_ts} UTC") : perf.last_ts + ] + end end # @@ -58,6 +63,7 @@ def first_and_last_capture(interval_name = "hourly") def performances_maintains_value_for_duration?(options) _log.info("options: #{options.inspect}") raise _("Argument must be an options hash") unless options.kind_of?(Hash) + column = options[:column] value = options[:value].to_f duration = options[:duration] @@ -78,7 +84,7 @@ def performances_maintains_value_for_duration?(options) raise ":value required" if value.nil? raise ":duration required" if duration.nil? # TODO: Check for valid operators - unless percentage.nil? || percentage.kind_of?(Integer) && percentage >= 0 && percentage <= 100 + unless percentage.nil? || (percentage.kind_of?(Integer) && percentage >= 0 && percentage <= 100) raise _(":percentage expected integer from 0-100, received: %{number}") % {:number => percentage} end @@ -122,8 +128,8 @@ def performances_maintains_value_for_duration?(options) # Find the record at or near the starting_on timestamp to determine if we need to handle overlap rec_at_start_on = total_records.reverse.detect { |r| r.timestamp >= starting_on } return false if rec_at_start_on.nil? + start_on_idx = total_records.index { |r| r.timestamp == rec_at_start_on.timestamp } - # colvalue = rec_at_start_on.send(column) if colvalue && colvalue.send(operator, value) # If there is a match at the start_on timestamp then we need to check the records going backwards to find the first one that doesnt match. @@ -202,8 +208,8 @@ def performances_maintains_value_for_duration?(options) matches_in_window += match_history[i] _log.info("Matched?: true, Index: #{i}, Window start index: #{i - recs_in_window}, matches_in_window: #{matches_in_window}, ts: #{rec.timestamp}, #{column}: #{rec.send(column)}") if debug_trace return true if matches_in_window >= recs_to_match - else - _log.info("Matched?: false, Index: #{i}, Window start index: #{i - recs_in_window}, matches_in_window: #{matches_in_window}, ts: #{rec.timestamp}, #{column}: #{rec.send(column)}") if debug_trace + elsif debug_trace + _log.info("Matched?: false, Index: #{i}, Window start index: #{i - recs_in_window}, matches_in_window: #{matches_in_window}, ts: #{rec.timestamp}, #{column}: #{rec.send(column)}") end end false @@ -211,6 +217,7 @@ def performances_maintains_value_for_duration?(options) def get_daily_time_profile_in_my_region_from_tz(tz) return if tz.nil? + TimeProfile.in_region(region_id).rollup_daily_metrics.find_all_with_entire_tz.detect { |p| p.tz_or_default == tz } end diff --git a/app/models/metric/ci_mixin/capture.rb b/app/models/metric/ci_mixin/capture.rb index aab039f249d..425feb1bbf5 100644 --- a/app/models/metric/ci_mixin/capture.rb +++ b/app/models/metric/ci_mixin/capture.rb @@ -1,6 +1,6 @@ module Metric::CiMixin::Capture def perf_capture_object(targets = nil) - if self.kind_of?(ExtManagementSystem) + if kind_of?(ExtManagementSystem) self.class::MetricsCapture.new(targets, ext_management_system) else self.class.module_parent::MetricsCapture.new(targets || self, ext_management_system) diff --git a/app/models/metric/ci_mixin/long_term_averages.rb b/app/models/metric/ci_mixin/long_term_averages.rb index b7c4464c959..4cf0a13dd37 100644 --- a/app/models/metric/ci_mixin/long_term_averages.rb +++ b/app/models/metric/ci_mixin/long_term_averages.rb @@ -6,7 +6,7 @@ module Metric::CiMixin::LongTermAverages Metric::LongTermAverages::AVG_METHODS_WITHOUT_OVERHEAD_INFO.each do |meth, info| define_method(meth) do base = send(info[:base_meth]) - base.nil? || self.kind_of?(Vm) ? base : [base - Metric::ConfigSettings.send("host_overhead_#{info[:overhead_type]}"), 0.0].max + base.nil? || kind_of?(Vm) ? base : [base - Metric::ConfigSettings.send(:"host_overhead_#{info[:overhead_type]}"), 0.0].max end end diff --git a/app/models/metric/ci_mixin/processing.rb b/app/models/metric/ci_mixin/processing.rb index b0d9c3c3a1a..fb612b67fd9 100644 --- a/app/models/metric/ci_mixin/processing.rb +++ b/app/models/metric/ci_mixin/processing.rb @@ -189,6 +189,7 @@ def transform_parameters_row_with_all_metrics(resources, interval_name, start_ti def normalize_value(value, counter) return counter[:rollup] == 'latest' ? nil : 0 if value < 0 + value = value.to_f * counter[:precision] message = nil diff --git a/app/models/metric/ci_mixin/rollup.rb b/app/models/metric/ci_mixin/rollup.rb index 20a2266a7e3..ac8533d076f 100644 --- a/app/models/metric/ci_mixin/rollup.rb +++ b/app/models/metric/ci_mixin/rollup.rb @@ -5,7 +5,7 @@ def perf_rollup_to_parents(interval_name, start_time, end_time = nil) when 'hourly', 'historical' then [perf_rollup_parents('hourly'), 'daily'] when 'daily' then [nil, nil] else raise ArgumentError, _("invalid interval name %{name}") % - {:name => interval_name} + {:name => interval_name} end parents = parent_rollups.to_a.compact.flat_map { |p| [p, interval_name] } @@ -15,14 +15,14 @@ def perf_rollup_to_parents(interval_name, start_time, end_time = nil) next if parent.nil? case new_interval - when 'hourly', 'historical' then + when 'hourly', 'historical' times = Metric::Helper.hours_from_range(start_time, end_time) log_header = "Queueing [#{new_interval}] rollup to #{parent.class.name} id: [#{parent.id}] for times: #{times.inspect}" _log.info("#{log_header}...") times.each { |t| parent.perf_rollup_queue(t, new_interval) } _log.info("#{log_header}...Complete") - when 'daily' then + when 'daily' times_by_tp = Metric::Helper.days_from_range_by_time_profile(start_time, end_time) times_by_tp.each do |tp, times| log_header = "Queueing [#{new_interval}] rollup to #{parent.class.name} id: [#{parent.id}] in time profile: [#{tp.description}] for times: #{times.inspect}" @@ -42,6 +42,7 @@ def perf_rollup_queue(time, interval_name, time_profile = nil) if interval_name == 'daily' && time_profile.nil? raise ArgumentError, _("time_profile must be passed if interval name is 'daily'") end + time_profile = TimeProfile.extract_objects(time_profile) deliver_on = case interval_name @@ -73,6 +74,7 @@ def perf_rollup(time, interval_name, time_profile = nil) if interval_name == 'daily' && time_profile.nil? raise ArgumentError, _("time_profile must be passed if interval name is 'daily'") end + time_profile = TimeProfile.extract_objects(time_profile) _klass, meth = Metric::Helper.class_and_association_for_interval_name(interval_name) @@ -93,7 +95,7 @@ def perf_rollup(time, interval_name, time_profile = nil) end Benchmark.realtime_block(:rollup_perfs) do - new_perf = Metric::Rollup.send("rollup_#{interval_name}", self, time, interval_name, time_profile, new_perf, perf.attributes.symbolize_keys) + new_perf = Metric::Rollup.send(:"rollup_#{interval_name}", self, time, interval_name, time_profile, new_perf, perf.attributes.symbolize_keys) end Benchmark.realtime_block(:db_update_perf) { perf.update(new_perf) } @@ -119,8 +121,10 @@ def perf_rollup_range(start_time, end_time, interval_name, time_profile = nil) Metric::Helper.hours_from_range(start_time, end_time) when 'daily' raise ArgumentError, _("time_profile must be passed if interval name is 'daily'") if time_profile.nil? + time_profile = TimeProfile.extract_objects(time_profile) return if time_profile.nil? || !time_profile.rollup_daily_metrics + Metric::Helper.days_from_range(start_time, end_time, time_profile.tz_or_default) end diff --git a/app/models/metric/ci_mixin/state_finders.rb b/app/models/metric/ci_mixin/state_finders.rb index 0f59ff9068a..1f0aa4ba4e8 100644 --- a/app/models/metric/ci_mixin/state_finders.rb +++ b/app/models/metric/ci_mixin/state_finders.rb @@ -14,7 +14,7 @@ module Metric::CiMixin::StateFinders def vim_performance_state_for_ts(ts) ts = Time.parse(ts).utc if ts.kind_of?(String) ts_iso = ts.utc.iso8601 - return nil unless self.respond_to?(:vim_performance_states) + return nil unless respond_to?(:vim_performance_states) @states_by_ts ||= {} state = @states_by_ts[ts_iso] diff --git a/app/models/metric/ci_mixin/targets.rb b/app/models/metric/ci_mixin/targets.rb index 1591402c7c7..c697ccd587d 100644 --- a/app/models/metric/ci_mixin/targets.rb +++ b/app/models/metric/ci_mixin/targets.rb @@ -9,16 +9,16 @@ def perf_capture_always? when Service then true # going to treat an availability_zone like a host wrt perf_capture settings when Host, EmsCluster, AvailabilityZone, HostAggregate then Metric::Targets.perf_capture_always[:host_and_cluster] - when Storage then Metric::Targets.perf_capture_always[:storage] - else; false + when Storage then Metric::Targets.perf_capture_always[:storage] + else; false end end - alias_method :perf_capture_always, :perf_capture_always? + alias perf_capture_always perf_capture_always? def perf_capture_enabled? - @perf_capture_enabled ||= (perf_capture_always? || self.is_tagged_with?("capture_enabled", :ns => "/performance")) + @perf_capture_enabled ||= (perf_capture_always? || is_tagged_with?("capture_enabled", :ns => "/performance")) end - alias_method :perf_capture_enabled, :perf_capture_enabled? + alias perf_capture_enabled perf_capture_enabled? Vmdb::Deprecation.deprecate_methods(self, :perf_capture_enabled => :perf_capture_enabled?) # TODO: Should enabling a Host also enable the cluster? diff --git a/app/models/metric/common.rb b/app/models/metric/common.rb index e2096956f09..95902d623be 100644 --- a/app/models/metric/common.rb +++ b/app/models/metric/common.rb @@ -47,6 +47,7 @@ module Metric::Common def v_derived_storage_used return nil if derived_storage_total.nil? || derived_storage_free.nil? + derived_storage_total - derived_storage_free end @@ -91,6 +92,7 @@ def v_calc_pct_of_cpu_time(vcol) # We need to divide by the number of running VMs since the is an aggregation of the millisend values of all the child VMs unless resource_type == 'VmOrTemplate' return 0 if derived_vm_count_on.nil? || derived_vm_count_on == 0 + raw_val = (raw_val / derived_vm_count_on) end @@ -119,16 +121,19 @@ def v_derived_host_count def v_derived_cpu_reserved_pct return nil if derived_cpu_reserved.nil? || derived_cpu_available.nil? || derived_cpu_available == 0 + (derived_cpu_reserved / derived_cpu_available * 100) end def v_derived_memory_reserved_pct return nil if derived_memory_reserved.nil? || derived_memory_available.nil? || derived_memory_available == 0 + (derived_memory_reserved / derived_memory_available * 100) end def v_derived_cpu_total_cores_used return nil if cpu_usage_rate_average.nil? || derived_vm_numvcpus.nil? || derived_vm_numvcpus == 0 + (cpu_usage_rate_average * derived_vm_numvcpus) / 100.0 end @@ -146,12 +151,12 @@ def apply_time_profile(profile) end def nil_out_values_for_apply_time_profile - (Metric::Rollup::ROLLUP_COLS + ["assoc_ids", "min_max"]).each { |c| send("#{c}=", nil) } + (Metric::Rollup::ROLLUP_COLS + ["assoc_ids", "min_max"]).each { |c| send(:"#{c}=", nil) } end class_methods do def for_tag_names(args) - where(args.map { |t| "tag_names like ?" }.join(" OR "), *(args.map { |t| "%" + t.join("/") + "%" })) + where(args.map { |_t| "tag_names like ?" }.join(" OR "), *(args.map { |t| "%" + t.join("/") + "%" })) end def for_time_range(start_time, end_time) diff --git a/app/models/metric/finders.rb b/app/models/metric/finders.rb index 4359dba9cea..7bf30b42f75 100644 --- a/app/models/metric/finders.rb +++ b/app/models/metric/finders.rb @@ -24,13 +24,14 @@ def self.hash_by_capture_interval_name_and_timestamp(resource, start_time, end_t def self.find_all_by_range(resource, start_time, end_time, interval_name) return [] if resource.blank? + klass, meth = Metric::Helper.class_and_association_for_interval_name(interval_name) - if !resource.kind_of?(Array) && !resource.kind_of?(ActiveRecord::Relation) - scope = resource.send(meth) - else - scope = klass.where(:resource => resource) - end + scope = if !resource.kind_of?(Array) && !resource.kind_of?(ActiveRecord::Relation) + resource.send(meth) + else + klass.where(:resource => resource) + end scope = scope.where(:capture_interval_name => interval_name) if interval_name != "realtime" scope.for_time_range(start_time, end_time) end diff --git a/app/models/metric/helper.rb b/app/models/metric/helper.rb index b47a3bc82e0..d46bb4572d9 100644 --- a/app/models/metric/helper.rb +++ b/app/models/metric/helper.rb @@ -19,10 +19,12 @@ def self.nearest_realtime_timestamp(ts) return ts if ['00', '20', '40'].include?(sec) sec = sec.to_i - case - when sec < 20 then ts[17, 2] = '20' - when sec < 40 then ts[17, 2] = '40' - else ts = (Time.parse(ts) + (60 - sec)).iso8601 + if sec < 20 + ts[17, 2] = '20' + elsif sec < 40 + ts[17, 2] = '40' + else + ts = (Time.parse(ts) + (60 - sec)).iso8601 end ts end @@ -90,6 +92,7 @@ def self.days_from_range(start_time, end_time = nil, tz = nil) # @return Range def self.time_range_from_hash(range) return range unless range.kind_of?(Hash) + end_time = (range[:end_date] || Time.now.utc).utc days = range[:days] || 20 start_time = (range[:start_date] || (end_time - days.days)).utc @@ -151,6 +154,7 @@ def self.max_count(counts) def self.get_time_zone(options = nil) return TimeProfile::DEFAULT_TZ if options.nil? return options[:time_profile].tz if options[:time_profile] && options[:time_profile].tz + options[:tz] || TimeProfile::DEFAULT_TZ end diff --git a/app/models/metric/long_term_averages.rb b/app/models/metric/long_term_averages.rb index 84d534f41ab..01152ab4622 100644 --- a/app/models/metric/long_term_averages.rb +++ b/app/models/metric/long_term_averages.rb @@ -17,14 +17,14 @@ module Metric::LongTermAverages :column => col, :type => type } - unless AVG_COLS_TO_OVERHEAD_TYPE[col].nil? - AVG_METHODS_WITHOUT_OVERHEAD_INFO[:"#{meth}_without_overhead"] = { - :column => col, - :type => type, - :base_meth => meth, - :overhead_type => AVG_COLS_TO_OVERHEAD_TYPE[col] - } - end + next if AVG_COLS_TO_OVERHEAD_TYPE[col].nil? + + AVG_METHODS_WITHOUT_OVERHEAD_INFO[:"#{meth}_without_overhead"] = { + :column => col, + :type => type, + :base_meth => meth, + :overhead_type => AVG_COLS_TO_OVERHEAD_TYPE[col] + } end AVG_METHODS = AVG_METHODS_INFO.keys @@ -54,7 +54,7 @@ def self.get_averages_over_time_period(obj, options = {}) results[:avg][c] ||= 0 counts[c] ||= 0 - val = p.send(c) || 0 + val = p.send(c) || 0 vals[c] << val val *= 1.0 unless val.nil? Metric::Aggregation::Aggregate.average(c, self, results[:avg], counts, val) diff --git a/app/models/metric/processing.rb b/app/models/metric/processing.rb index 6a3057e7c15..cf895e89e76 100644 --- a/app/models/metric/processing.rb +++ b/app/models/metric/processing.rb @@ -57,14 +57,15 @@ def self.process_derived_columns(obj, attrs, ts = nil) DERIVED_COLS.each do |col| _dummy, group, typ, mode = col.to_s.split("_") next if group == "vm" && obj.kind_of?(Service) && typ != "count" + case typ when "available" # Do not derive "available" values if there haven't been any usage # values collected if group == "cpu" result[col] = total_cpu if have_cpu_metrics && total_cpu > 0 - else - result[col] = total_mem if have_mem_metrics && total_mem > 0 + elsif have_mem_metrics && total_mem > 0 + result[col] = total_mem end when "allocated" method = col.to_s.split("_")[1..-1].join("_") @@ -91,11 +92,11 @@ def self.process_derived_columns(obj, attrs, ts = nil) result[col] = state.send(method) if state.respond_to?(method) end when "rate" - if col.to_s == "cpu_usagemhz_rate_average" && attrs[:cpu_usagemhz_rate_average].blank? + if col.to_s == "cpu_usagemhz_rate_average" && attrs[:cpu_usagemhz_rate_average].blank? && !(total_cpu == 0 || attrs[:cpu_usage_rate_average].nil?) # TODO(lsmola) for some reason, this column is used in chart, although from processing code above, it should # be named derived_cpu_used. Investigate what is the right solution and make it right. For now lets fill # the column shown in charts. - result[col] = (attrs[:cpu_usage_rate_average] / 100 * total_cpu) unless total_cpu == 0 || attrs[:cpu_usage_rate_average].nil? + result[col] = (attrs[:cpu_usage_rate_average] / 100 * total_cpu) end when "reserved" method = group == "cpu" ? :reserve_cpu : :reserve_mem @@ -155,12 +156,14 @@ def self.create_new_metric(klass, last_perf, perf, interval) new_perf = klass.new(attrs) Metric::Rollup::ROLLUP_COLS.each do |c| next if new_perf.send(c).nil? || perf.send(c).nil? + new_perf.send(c.to_s + "=", (new_perf.send(c) + perf.send(c)) / 2) end unless perf.assoc_ids.nil? Metric::Rollup::ASSOC_KEYS.each do |assoc| next if new_perf.assoc_ids.nil? || new_perf.assoc_ids[assoc].blank? || perf.assoc_ids[assoc].blank? + new_perf.assoc_ids[assoc][:on] ||= [] new_perf.assoc_ids[assoc][:off] ||= [] new_perf.assoc_ids[assoc][:on] = (new_perf.assoc_ids[assoc][:on] + perf.assoc_ids[assoc][:on]).uniq! diff --git a/app/models/metric/purging.rb b/app/models/metric/purging.rb index 91c3327963e..581e5150350 100644 --- a/app/models/metric/purging.rb +++ b/app/models/metric/purging.rb @@ -133,6 +133,7 @@ def self.purge_in_batches(scope, window, total = 0, total_limit = nil) query.pluck(:id) end break if batch_ids.empty? + current_window = batch_ids.size else batch_ids = query @@ -143,6 +144,7 @@ def self.purge_in_batches(scope, window, total = 0, total_limit = nil) scope.unscoped.where(:id => batch_ids).delete_all end break if count == 0 + total += count yield(count, total) if block_given? diff --git a/app/models/metric/rollup.rb b/app/models/metric/rollup.rb index 51ac792d0a5..aa84f3f3adb 100644 --- a/app/models/metric/rollup.rb +++ b/app/models/metric/rollup.rb @@ -10,12 +10,12 @@ module Metric::Rollup VM_ROLLUP_COLS = [:cpu_usage_rate_average, :derived_memory_used, :disk_usage_rate_average, :net_usage_rate_average].freeze AGGREGATE_COLS = { - :MiqEnterprise_miq_regions => ROLLUP_COLS, - :MiqRegion_ext_management_systems => NON_STORAGE_ROLLUP_COLS, - :MiqRegion_storages => STORAGE_COLS, - :ExtManagementSystem_hosts => NON_STORAGE_ROLLUP_COLS, - :ExtManagementSystem_container_nodes => NON_STORAGE_ROLLUP_COLS, - :EmsCluster_hosts => [ + :MiqEnterprise_miq_regions => ROLLUP_COLS, + :MiqRegion_ext_management_systems => NON_STORAGE_ROLLUP_COLS, + :MiqRegion_storages => STORAGE_COLS, + :ExtManagementSystem_hosts => NON_STORAGE_ROLLUP_COLS, + :ExtManagementSystem_container_nodes => NON_STORAGE_ROLLUP_COLS, + :EmsCluster_hosts => [ :cpu_ready_delta_summation, :cpu_system_delta_summation, :cpu_usage_rate_average, @@ -32,7 +32,7 @@ module Metric::Rollup :mem_usage_absolute_average, :net_usage_rate_average, ], - :Host_vms => [ + :Host_vms => [ :cpu_ready_delta_summation, :cpu_system_delta_summation, :cpu_used_delta_summation, @@ -194,7 +194,7 @@ def self.rollup_hourly(obj, hour, _interval_name, _time_profile, new_perf, orig_ end class << self - alias_method :rollup_historical, :rollup_hourly + alias rollup_historical rollup_hourly end def self.rollup_daily(obj, day, interval_name, time_profile, new_perf, orig_perf) @@ -226,6 +226,7 @@ def self.rollup_realtime_perfs(obj, rt_perfs, new_perf = {}) end next unless obj.kind_of?(VmOrTemplate) + new_perf[:min_max] ||= {} BURST_COLS.each do |col| value = rt.send(col) @@ -276,6 +277,7 @@ def self.rollup_child_metrics(obj, timestamp, interval_name, assoc) recs.each do |rec| perf = perf_recs.fetch_path(rec.class.base_class.name, rec.id, interval_name, ts) next unless perf + state = rec.vim_performance_state_for_ts(timestamp) agg_cols.each do |c| result[c] ||= 0 @@ -329,13 +331,16 @@ def self.rollup_max(c, result, value) def self.rollup_assoc(c, result, value) return if value.nil? + ASSOC_KEYS.each do |assoc| next if value[assoc].nil? + result[c] ||= {} result[c][assoc] ||= {} [:on, :off].each do |mode| next if value[assoc][mode].nil? + result[c][assoc][mode] ||= [] result[c][assoc][mode].concat(value[assoc][mode]).uniq! end @@ -344,6 +349,7 @@ def self.rollup_assoc(c, result, value) def self.rollup_tags(c, result, value) return if value.blank? + result[c] ||= "" result[c] = result[c].split(TAG_SEP).concat(value.split(TAG_SEP)).uniq.join(TAG_SEP) end diff --git a/app/models/metric_rollup.rb b/app/models/metric_rollup.rb index 7c74bd7bb85..5eb9ff7fce4 100644 --- a/app/models/metric_rollup.rb +++ b/app/models/metric_rollup.rb @@ -6,15 +6,15 @@ class MetricRollup < ApplicationRecord include Metric::Common include Metric::ChargebackHelper - CHARGEBACK_METRIC_FIELDS = %w(derived_vm_numvcpus cpu_usagemhz_rate_average + CHARGEBACK_METRIC_FIELDS = %w[derived_vm_numvcpus cpu_usagemhz_rate_average cpu_usage_rate_average disk_usage_rate_average derived_memory_available derived_memory_used net_usage_rate_average derived_vm_used_disk_storage - derived_vm_allocated_disk_storage).freeze + derived_vm_allocated_disk_storage].freeze - METERING_USED_METRIC_FIELDS = %w(cpu_usagemhz_rate_average derived_memory_used net_usage_rate_average).freeze + METERING_USED_METRIC_FIELDS = %w[cpu_usagemhz_rate_average derived_memory_used net_usage_rate_average].freeze - CAPTURE_INTERVAL_NAMES = %w(hourly daily).freeze + CAPTURE_INTERVAL_NAMES = %w[hourly daily].freeze # # min_max column getters @@ -47,9 +47,9 @@ def extract_from_min_max(col) # This should really be done by subclassing where each subclass can define reservations or # changing the reports to allow for optional reservations. if val.to_i == 0 && col.to_s =~ /(.+)_reserved$/ - return send("#{$1}_available") + send(:"#{$1}_available") else - return val + val end end @@ -63,7 +63,7 @@ def self.latest_rollups(resource_type, resource_ids = nil, capture_interval_name def self.rollups_in_range(resource_type, resource_ids, capture_interval_name, start_date, end_date = nil) capture_interval_name ||= 'hourly' - end_date = end_date.nil? ? Time.zone.today : end_date + end_date = Time.zone.today if end_date.nil? metrics = where(:resource_type => resource_type, :capture_interval_name => capture_interval_name, :timestamp => start_date.beginning_of_day...end_date.end_of_day) diff --git a/app/models/miq_action.rb b/app/models/miq_action.rb index 1e769502e3b..188d51d2ea5 100644 --- a/app/models/miq_action.rb +++ b/app/models/miq_action.rb @@ -3,11 +3,11 @@ class MiqAction < ApplicationRecord include UuidMixin before_validation :default_name_to_guid, :on => :create - before_destroy :check_policy_contents_empty_on_destroy before_save :round_if_memory_reconfigured + before_destroy :check_policy_contents_empty_on_destroy silence_warnings do - const_set("TYPES", + const_set(:TYPES, "create_snapshot" => N_("Create a Snapshot"), "email" => N_("Send an E-mail"), "snmp_trap" => N_("Send an SNMP Trap"), @@ -21,14 +21,13 @@ class MiqAction < ApplicationRecord "inherit_parent_tags" => N_("Inherit Parent Tags"), "remove_tags" => N_("Remove Tags"), "delete_snapshots_by_age" => N_("Delete Snapshots by Age"), - "run_ansible_playbook" => N_("Run Ansible Playbook") - ) + "run_ansible_playbook" => N_("Run Ansible Playbook")) end validates :action_type, :presence => true validates :name, :description, :presence => true, :uniqueness_when_changed => true - validates_format_of :name, :with => /\A[a-z0-9_\-]+\z/i, - :allow_nil => true, :message => "must only contain alpha-numeric, underscore and hyphen chatacters without spaces" + validates :name, :format => {:with => /\A[a-z0-9_-]+\z/i, + :allow_nil => true, :message => "must only contain alpha-numeric, underscore and hyphen chatacters without spaces"} acts_as_miq_taggable acts_as_miq_set_member @@ -53,7 +52,7 @@ class MiqAction < ApplicationRecord } SH_PREAMBLE = begin - preamble = "\#!/bin/sh\n" + preamble = "#!/bin/sh\n" RC_HASH.each { |k, v| preamble += "#{v}=#{k}\n" } preamble end @@ -80,11 +79,11 @@ def validate self.options ||= {} self.options[:to] ||= "" [:from, :to].each do |k| - if self.options && self.options[k] - next if k == :from && self.options[k].blank? # allow blank from addres, we use the default. - match = self.options[k] =~ /^\A([\w\.\-\+]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\z$/i - errors.add(k, "must be a valid email address") unless match - end + next unless self.options && self.options[k] + next if k == :from && self.options[k].blank? # allow blank from addres, we use the default. + + match = self.options[k] =~ /^\A([\w.\-+]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\z$/i + errors.add(k, "must be a valid email address") unless match end when "tag" errors.add("tag", "no tags provided") unless self.options && self.options[:tags] @@ -109,7 +108,7 @@ def self.invoke_actions(apply_policies_to, inputs, succeeded, failed) succeeded.each do |p| actions = case p when MiqPolicy then p.actions_for_event(inputs[:event], :success).uniq - else p.actions_for_event + else p.actions_for_event end actions.each do |a| @@ -129,6 +128,7 @@ def self.invoke_actions(apply_policies_to, inputs, succeeded, failed) failed.each do |p| next unless p.kind_of?(MiqPolicy) # built-in policies are OpenStructs whose actions will be invoked only on success + actions = p.actions_for_event(inputs[:event], :failure).uniq actions.each do |a| @@ -169,16 +169,16 @@ def invoke(rec, inputs) atype = action_type atype = name if atype.nil? || atype == "default" method = "action_" + atype - unless self.respond_to?(method) + unless respond_to?(method) MiqPolicy.logger.info("MIQ(action-invoke) '#{name}', not supported") return end - if inputs[:result] - phrase = "for successful policy" - else - phrase = "for failed policy" - end + phrase = if inputs[:result] + "for successful policy" + else + "for failed policy" + end MiqPolicy.logger.info("MIQ(action-invoke) Invoking action [#{description}] #{phrase} [#{inputs[:policy].description}], event: [#{inputs[:event].description}], entity name: [#{rec.name}], entity type: [#{Dictionary.gettext(rec.class.to_s, :type => :model)}], sequence: [#{inputs[:sequence]}], synchronous? [#{inputs[:synchronous]}]") send(method.to_sym, self, rec, inputs) end @@ -187,7 +187,7 @@ def invoke_action_for_built_in_policy(rec, inputs) atype = action_type atype ||= name method = "action_" + atype - unless self.respond_to?(method) + unless respond_to?(method) MiqPolicy.logger.info("MIQ(action-invoke) '#{name}', not supported") return end @@ -219,10 +219,10 @@ def action_audit(_action, rec, inputs) def action_run_ansible_playbook(action, rec, inputs) service_template = ServiceTemplate.find(action.options[:service_template_id]) - dialog_options = { :hosts => target_hosts(action, rec) } - request_options = { :manageiq_extra_vars => { 'event_target' => rec.href_slug, - 'event_name' => inputs[:event].try(:name) }, - :initiator => 'control' } + dialog_options = {:hosts => target_hosts(action, rec)} + request_options = {:manageiq_extra_vars => {'event_target' => rec.href_slug, + 'event_name' => inputs[:event].try(:name)}, + :initiator => 'control'} service_template.provision_request(target_user(rec), dialog_options, request_options) end @@ -236,53 +236,56 @@ def action_snmp_trap(action, rec, inputs) snmp_inputs = {} snmp_inputs[:host] = action.options[:host] - trap_id_key = (snmp_version == 1) ? :specific_trap : :trap_oid - snmp_inputs[trap_id_key] = action.options[:trap_id] + trap_id_key = snmp_version == 1 ? :specific_trap : :trap_oid + snmp_inputs[trap_id_key] = action.options[:trap_id] vars = [] - action.options[:variables].each do |h| - value = h[:value] - - value = value.gsub(RE_SUBST) do |_s| - # s is ${anything_in_between} - # $1 is anything_in_between - subst = "" - what, method = $1.strip.split(".") - - what = what.strip.downcase unless what.nil? - method = method.strip.downcase unless method.nil? - # ${Cause.Description} - if what == "cause" - if method == "description" - subst = "Policy: #{inputs[:policy].description}" if inputs[:policy].kind_of?(MiqPolicy) - subst = "Alert: #{inputs[:policy].description}" if inputs[:policy].kind_of?(MiqAlert) + unless action.options[:variables].nil? + action.options[:variables].each do |h| + value = h[:value] + + unless value.nil? + value = value.gsub(RE_SUBST) do |_s| + # s is ${anything_in_between} + # $1 is anything_in_between + subst = "" + what, method = $1.strip.split(".") + + what = what.strip.downcase unless what.nil? + method = method.strip.downcase unless method.nil? + # ${Cause.Description} + if what == "cause" && (method == "description") + subst = "Policy: #{inputs[:policy].description}" if inputs[:policy].kind_of?(MiqPolicy) + subst = "Alert: #{inputs[:policy].description}" if inputs[:policy].kind_of?(MiqAlert) + end + + # ${Object.method} + if what == "object" + if method == "type" + subst = rec.class.to_s + elsif method == "ems" && rec.respond_to?(:ext_management_system) + ems = rec.ext_management_system + subst = "vCenter #{ems.hostname}/#{ems.ipaddress}" unless ems.nil? + elsif rec.respond_to?(method) + subst = rec.send(method) + end + end + + subst end end - # ${Object.method} - if what == "object" - if method == "type" - subst = rec.class.to_s - elsif method == "ems" && rec.respond_to?(:ext_management_system) - ems = rec.ext_management_system - subst = "vCenter #{ems.hostname}/#{ems.ipaddress}" unless ems.nil? - elsif rec.respond_to?(method) - subst = rec.send(method) - end - end - - subst - end unless value.nil? - - h[:value] = value - vars << h - end unless action.options[:variables].nil? + h[:value] = value + vars << h + end + end snmp_inputs[:object_list] = vars invoke_or_queue( inputs[:synchronous], __method__, "notifier", nil, MiqSnmp, method_name, [snmp_inputs], - "SNMP Trap [#{rec[:name]}]") + "SNMP Trap [#{rec[:name]}]" + ) end def action_email(action, rec, inputs) @@ -439,6 +442,7 @@ def action_remove_tags(_action, rec, _inputs) def self.inheritable_cats Classification.in_my_region.categories.inject([]) do |arr, c| next(arr) if c.name.starts_with?("folder_path_") || c.entries.empty? + arr << c end end @@ -457,7 +461,7 @@ def run_script(rec) Tempfile.open('miq_action', SCRIPT_DIR) do |fd| fd.puts ruby_file ? RB_PREAMBLE : SH_PREAMBLE fd.puts File.read(filename) - fd.chmod(0755) + fd.chmod(0o755) MiqPolicy.logger.info("MIQ(action_script): Executing: [#{filename}]") @@ -479,7 +483,7 @@ def run_script(rec) info_msg = "MIQ(action_script): Result: #{command_result.output}, rc: #{rc_verbose}" fail_msg = _("Action script exited with rc=%{rc_value}, error=%{error_text}") % - {:rc_value => rc_verbose, :error_text => command_result.error} + {:rc_value => rc_verbose, :error_text => command_result.error} case rc when 0 # Success @@ -529,7 +533,8 @@ def action_script(action, rec, inputs) invoke_or_queue( inputs[:synchronous], action_method, vm_method == "scan" ? "smartstate" : "ems_operations", rec.my_zone, - rec, vm_method, [], "[#{action.description}] of VM [#{rec.name}]") + rec, vm_method, [], "[#{action.description}] of VM [#{rec.name}]" + ) end end @@ -577,7 +582,8 @@ def action_vm_clone(action, rec, inputs) action.options[:datastore], action.options[:powerOn], action.options[:template], action.options[:transform], action.options[:config], action.options[:customization], action.options[:disk] ], - "[#{action.description}] of VM [#{rec.name}]") + "[#{action.description}] of VM [#{rec.name}]" + ) end # Legacy: Replaces by action_vm_analyze @@ -594,8 +600,9 @@ def action_vm_retire(action, rec, inputs) target = inputs[:synchronous] ? VmOrTemplate : rec.class invoke_or_queue( inputs[:synchronous], __method__, "ems_operations", rec.my_zone, target, 'retire', - [[rec], :date => Time.zone.now - 1.day], - "VM Retire for VM [#{rec.name}]") + [[rec], {:date => 1.day.ago}], + "VM Retire for VM [#{rec.name}]" + ) end def action_create_snapshot(action, rec, inputs) @@ -645,7 +652,7 @@ def action_delete_most_recent_snapshot(action, rec, _inputs) end log_prefix += " VM: [#{rec.name}] Id: [#{rec.id}]" - snap = nil + snap = nil rec.snapshots.order("create_time DESC").each do |s| next if s.is_a_type?(:evm_snapshot) @@ -710,19 +717,19 @@ def action_ems_refresh(action, rec, inputs) def action_container_image_analyze(action, rec, inputs) unless rec.kind_of?(ContainerImage) - MiqPolicy.logger.error("MIQ(#{__method__}): Unable to perform action [#{action.description}],"\ - " object [#{rec.inspect}] is not a Container Image") + MiqPolicy.logger.error("MIQ(#{__method__}): Unable to perform action [#{action.description}], " \ + "object [#{rec.inspect}] is not a Container Image") return end if inputs[:event].name == "request_containerimage_scan" - MiqPolicy.logger.warn("MIQ(#{__method__}): Invoking action [#{action.description}] for event"\ - " [#{inputs[:event].description}] would cause infinite loop, skipping") + MiqPolicy.logger.warn("MIQ(#{__method__}): Invoking action [#{action.description}] for event " \ + "[#{inputs[:event].description}] would cause infinite loop, skipping") return end - MiqPolicy.logger.info("MIQ(#{__method__}): Now executing [#{action.description}] of Container Image "\ - "[#{rec.name}]") + MiqPolicy.logger.info("MIQ(#{__method__}): Now executing [#{action.description}] of Container Image " \ + "[#{rec.name}]") rec.scan end @@ -740,11 +747,11 @@ def action_container_image_annotate_scan_results(action, rec, inputs) end if inputs[:synchronous] - MiqPolicy.logger.info("MIQ(#{__method__}): Now executing [#{action.description}] for event "\ + MiqPolicy.logger.info("MIQ(#{__method__}): Now executing [#{action.description}] for event " \ "[#{inputs[:event].description}]") rec.annotate_scan_policy_results(inputs[:policy].name, inputs[:result]) else - MiqPolicy.logger.info("MIQ(#{__method__}): Queueing [#{action.description}] for event "\ + MiqPolicy.logger.info("MIQ(#{__method__}): Queueing [#{action.description}] for event " \ "[#{inputs[:event].description}]") MiqQueue.submit_job( :service => "ems_operations", @@ -753,7 +760,7 @@ def action_container_image_annotate_scan_results(action, rec, inputs) :method_name => :annotate_scan_policy_results, :args => [inputs[:policy].name, inputs[:result]], :instance_id => rec.id, - :priority => MiqQueue::HIGH_PRIORITY, + :priority => MiqQueue::HIGH_PRIORITY ) end end @@ -781,7 +788,7 @@ def action_host_analyze(action, rec, inputs) :class_name => "Host", :method_name => "scan_from_queue", :instance_id => rec.id, - :priority => MiqQueue::HIGH_PRIORITY, + :priority => MiqQueue::HIGH_PRIORITY ) end end @@ -816,7 +823,7 @@ def action_custom_automation(action, rec, inputs) user = rec.tenant_identity unless user raise _("A user is needed to raise an action to automate. [%{name}] id:[%{id}] action: [%{description}]") % - {:name => rec.class.name, :id => rec.id, :description => action.description} + {:name => rec.class.name, :id => rec.id, :description => action.description} end args = { @@ -868,15 +875,14 @@ def action_evaluate_alerts(action, rec, inputs) end end - def action_assign_scan_profile(action, _rec, _inputs) - ScanItem # Cause the ScanItemSet class to load, if not already loaded + def action_assign_scan_profile(action, _rec, _inputs) # Cause the ScanItemSet class to load, if not already loaded profile = ScanItemSet.find_by(:name => action.options[:scan_item_set_name]) if profile MiqPolicy.logger.info("MIQ(action_assign_scan_profile): Action [#{action.description}], using analysis profile: [#{profile.description}]") - return ScanItem.get_profile(profile.name) + ScanItem.get_profile(profile.name) else MiqPolicy.logger.warn("MIQ(action_assign_scan_profile): Unable to perform action [#{action.description}], unable to find analysis profile: [#{action.options[:scan_item_set_name]}]") - return + nil end end diff --git a/app/models/miq_ae_class.rb b/app/models/miq_ae_class.rb index 259cc55a59d..105fa595fde 100644 --- a/app/models/miq_ae_class.rb +++ b/app/models/miq_ae_class.rb @@ -7,9 +7,9 @@ class MiqAeClass < ApplicationRecord belongs_to :domain, :class_name => "MiqAeDomain", :inverse_of => false has_many :ae_fields, -> { order(:priority) }, :class_name => "MiqAeField", :foreign_key => :class_id, :dependent => :destroy, :autosave => true, :inverse_of => :ae_class - has_many :ae_instances, -> { preload(:ae_values) }, :class_name => "MiqAeInstance", :foreign_key => :class_id, + has_many :ae_instances, -> { preload(:ae_values) }, :class_name => "MiqAeInstance", :foreign_key => :class_id, :dependent => :destroy, :inverse_of => :ae_class - has_many :ae_methods, :class_name => "MiqAeMethod", :foreign_key => :class_id, + has_many :ae_methods, :class_name => "MiqAeMethod", :foreign_key => :class_id, :dependent => :destroy, :inverse_of => :ae_class validates :namespace_id, :domain_id, :presence => true @@ -75,25 +75,26 @@ def to_export_xml(options = {}) self.class.column_names.each do |cname| # Remove any columns that we do not want to export - next if %w(id created_on updated_on updated_by).include?(cname) || cname.ends_with?("_id") + next if %w[id created_on updated_on updated_by].include?(cname) || cname.ends_with?("_id") # Skip any columns that we process explicitly - next if %w(name namespace).include?(cname) + next if %w[name namespace].include?(cname) # Process the column - xml_attrs[cname.to_sym] = send(cname) unless send(cname).blank? + xml_attrs[cname.to_sym] = send(cname) if send(cname).present? end xml.MiqAeClass(xml_attrs) do ae_methods.sort_by(&:fqname).each { |m| m.to_export_xml(:builder => xml) } - xml.MiqAeSchema do - ae_fields.sort_by(&:priority).each { |f| f.to_export_xml(:builder => xml) } - end unless ae_fields.empty? + unless ae_fields.empty? + xml.MiqAeSchema do + ae_fields.sort_by(&:priority).each { |f| f.to_export_xml(:builder => xml) } + end + end ae_instances.sort_by(&:fqname).each { |i| i.to_export_xml(:builder => xml) } end end - # my class's fqname is /domain/namespace1/namespace2/class def namespace return nil if ae_namespace.nil? @@ -103,6 +104,7 @@ def namespace def namespace=(ns) raise ArgumentError, "ns cannot be blank" if ns.blank? + self.ae_namespace = MiqAeNamespace.find_or_create_by_fqname(ns) end @@ -124,16 +126,20 @@ def self.get_homonymic_across_domains(user, fqname, enabled = nil) def self.find_homonymic_instances_across_domains(user, fqname) return [] if fqname.blank? + path = MiqAeEngine::MiqAeUri.path(fqname, "miqaedb") ns, klass, inst = MiqAeEngine::MiqAePath.split(path) return [] if ns.blank? || klass.blank? || inst.blank? + get_same_instance_from_classes(get_sorted_homonym_class_across_domains(user, ns, klass), inst) end def self.find_distinct_instances_across_domains(user, fqname) return [] if fqname.blank? + ns, klass = fqname.starts_with?('/') ? parse_fqname(fqname[1..-1]) : parse_fqname(fqname) return [] if ns.blank? || klass.blank? + get_unique_instances_from_classes(get_sorted_homonym_class_across_domains(user, ns, klass)) end @@ -146,6 +152,7 @@ def field_names def field_hash(name) field = ae_fields.detect { |f| f.name.casecmp(name) == 0 } raise "field #{name} not found in class #{@name}" if field.nil? + field.attributes end @@ -153,14 +160,12 @@ def self.copy(options) if options[:new_name] MiqAeClassCopy.new(options[:fqname]).as(options[:new_name], options[:namespace], - options[:overwrite_location] - ) + options[:overwrite_location]) else MiqAeClassCopy.copy_multiple(options[:ids], options[:domain], options[:namespace], - options[:overwrite_location] - ) + options[:overwrite_location]) end end @@ -193,6 +198,7 @@ def set_children_relative_path def self.sub_namespaces(ns_obj, ids) loop do break if ns_obj.nil? || ids.include?("#{ns_obj.class.name}::#{ns_obj.id}") + ids << "#{ns_obj.class.name}::#{ns_obj.id}" ns_obj = ns_obj.parent end @@ -211,6 +217,7 @@ def self.get_sorted_homonym_class_across_domains(user, ns = nil, klass) fq_ns = domain + "/" + partial_ns ae_ns = MiqAeNamespace.lookup_by_fqname(fq_ns) next if ae_ns.nil? + ae_ns.ae_classes.select { |c| File.fnmatch(klass, c.name, File::FNM_CASEFOLD) } end.compact.flatten if class_array.empty? && ns_obj @@ -234,8 +241,10 @@ def self.get_unique_instances_from_classes(klass_array) klass_array.collect do |klass| cls = find_by(:id => klass.id) next if cls.nil? + cls.ae_instances.sort_by(&:fqname).collect do |inst| next if name_set.include?(inst.name) + name_set << inst.name inst end.compact.flatten @@ -248,6 +257,7 @@ def self.get_same_instance_from_classes(klass_array, instance) klass_array.collect do |klass| cls = find_by(:id => klass.id) next if cls.nil? + cls.ae_instances.select { |a| File.fnmatch(instance, a.name, File::FNM_CASEFOLD) } end.compact.flatten end diff --git a/app/models/miq_ae_field.rb b/app/models/miq_ae_field.rb index 49ef913939c..c29675fbc2a 100644 --- a/app/models/miq_ae_field.rb +++ b/app/models/miq_ae_field.rb @@ -11,17 +11,17 @@ class MiqAeField < ApplicationRecord :presence => true, :format => {:with => /\A[\w]+\z/i, :message => N_("may contain only alphanumeric and _ characters")} - validates_inclusion_of :substitute, :in => [true, false] + validates :substitute, :inclusion => {:in => [true, false]} NULL_COALESCING_DATATYPE = "null coalescing".freeze - AVAILABLE_SCOPES = ["class", "instance", "local"] - validates_inclusion_of :scope, :in => AVAILABLE_SCOPES, :allow_nil => true # nil => instance - AVAILABLE_AETYPES = ["assertion", "attribute", "method", "relationship", "state"] - validates_inclusion_of :aetype, :in => AVAILABLE_AETYPES, :allow_nil => true # nil => attribute + AVAILABLE_SCOPES = ["class", "instance", "local"] + validates :scope, :inclusion => {:in => AVAILABLE_SCOPES, :allow_nil => true} # nil => instance + AVAILABLE_AETYPES = ["assertion", "attribute", "method", "relationship", "state"] + validates :aetype, :inclusion => {:in => AVAILABLE_AETYPES, :allow_nil => true} # nil => attribute AVAILABLE_DATATYPES_FOR_UI = ["string", "symbol", "integer", "float", "boolean", "time", "array", "password", NULL_COALESCING_DATATYPE].freeze AVAILABLE_DATATYPES = AVAILABLE_DATATYPES_FOR_UI + - %w(host + %w[host vm storage ems @@ -29,10 +29,10 @@ class MiqAeField < ApplicationRecord server request provision - user) - validates_inclusion_of :datatype, :in => AVAILABLE_DATATYPES, :allow_nil => true # nil => string + user] + validates :datatype, :inclusion => {:in => AVAILABLE_DATATYPES, :allow_nil => true} # nil => string - before_save :set_message_and_default_value + before_save :set_message_and_default_value DEFAULTS = {:substitute => true, :datatype => "string", :aetype => "attribute", :scope => "instance", :message => "create"} @@ -44,7 +44,7 @@ def self.available_datatypes AVAILABLE_DATATYPES end - class < field.id) - val.send("#{attribute}=", value) + val.send(:"#{attribute}=", value) val.save! end @@ -93,13 +93,13 @@ def to_export_xml(options = {}) self.class.column_names.each do |cname| # Remove any columns that we do not want to export - next if %w(id created_on updated_on updated_by).include?(cname) || cname.ends_with?("_id") + next if %w[id created_on updated_on updated_by].include?(cname) || cname.ends_with?("_id") # Skip any columns that we process explicitly - next if %w(name).include?(cname) + next if %w[name].include?(cname) # Process the column - xml_attrs[cname.to_sym] = send(cname) unless send(cname).blank? + xml_attrs[cname.to_sym] = send(cname) if send(cname).present? end xml.MiqAeInstance(xml_attrs) do @@ -123,8 +123,10 @@ def field_names def field_value_hash(name) field = ae_class.ae_fields.detect { |f| f.name.casecmp(name) == 0 } raise "Field #{name} not found in class #{ae_class.fqname}" if field.nil? + value = ae_values.detect { |v| v.field_id == field.id } raise "Field #{name} not found in instance #{self.name} in class #{ae_class.fqname}" if value.nil? + value.attributes end @@ -132,14 +134,12 @@ def self.copy(options) if options[:new_name] MiqAeInstanceCopy.new(options[:fqname]).as(options[:new_name], options[:namespace], - options[:overwrite_location] - ) + options[:overwrite_location]) else MiqAeInstanceCopy.copy_multiple(options[:ids], options[:domain], options[:namespace], - options[:overwrite_location] - ) + options[:overwrite_location]) end end diff --git a/app/models/miq_ae_method.rb b/app/models/miq_ae_method.rb index c57bf52f9ac..e1c6745e12e 100644 --- a/app/models/miq_ae_method.rb +++ b/app/models/miq_ae_method.rb @@ -23,12 +23,12 @@ class MiqAeMethod < ApplicationRecord :format => {:with => /\A[\w]+\z/i, :message => N_("may contain only alphanumeric and _ characters")} - AVAILABLE_LANGUAGES = ["ruby", "perl"] # someday, add sh, perl, python, tcl and any other scripting language - validates_inclusion_of :language, :in => AVAILABLE_LANGUAGES - AVAILABLE_LOCATIONS = %w(builtin inline expression playbook ansible_job_template ansible_workflow_template).freeze - validates_inclusion_of :location, :in => AVAILABLE_LOCATIONS - AVAILABLE_SCOPES = ["class", "instance"] - validates_inclusion_of :scope, :in => AVAILABLE_SCOPES + AVAILABLE_LANGUAGES = ["ruby", "perl"] # someday, add sh, perl, python, tcl and any other scripting language + validates :language, :inclusion => {:in => AVAILABLE_LANGUAGES} + AVAILABLE_LOCATIONS = %w[builtin inline expression playbook ansible_job_template ansible_workflow_template].freeze + validates :location, :inclusion => {:in => AVAILABLE_LOCATIONS} + AVAILABLE_SCOPES = ["class", "instance"] + validates :scope, :inclusion => {:in => AVAILABLE_SCOPES} def self.available_languages AVAILABLE_LANGUAGES @@ -50,6 +50,7 @@ def self.available_expression_objects def self.validate_syntax(code_text) result = ManageIQ::AutomationEngine::SyntaxChecker.check(code_text) return nil if result.valid? + [[result.error_line, result.error_text]] # Array of arrays for future multi-line support end @@ -65,10 +66,10 @@ def data_for_expression end def self.default_method_text - <<-DEFAULT_METHOD_TEXT -# -# Description: -# + <<~DEFAULT_METHOD_TEXT + # + # Description: + # DEFAULT_METHOD_TEXT end @@ -90,13 +91,13 @@ def to_export_xml(options = {}) self.class.column_names.each do |cname| # Remove any columns that we do not want to export - next if %w(id created_on updated_on updated_by).include?(cname) || cname.ends_with?("_id") + next if %w[id created_on updated_on updated_by].include?(cname) || cname.ends_with?("_id") # Skip any columns that we process explicitly - next if %w(name language scope location data).include?(cname) + next if %w[name language scope location data].include?(cname) # Process the column - xml_attrs[cname.to_sym] = send(cname) unless send(cname).blank? + xml_attrs[cname.to_sym] = send(cname) if send(cname).present? end xml.MiqAeMethod(xml_attrs) do @@ -115,6 +116,7 @@ def field_names def field_value_hash(name) field = inputs.detect { |f| f.name.casecmp(name) == 0 } raise "Field #{name} not found in method #{self.name}" if field.nil? + field.attributes end @@ -122,14 +124,12 @@ def self.copy(options) if options[:new_name] MiqAeMethodCopy.new(options[:fqname]).as(options[:new_name], options[:namespace], - options[:overwrite_location] - ) + options[:overwrite_location]) else MiqAeMethodCopy.copy_multiple(options[:ids], options[:domain], options[:namespace], - options[:overwrite_location] - ) + options[:overwrite_location]) end end diff --git a/app/models/miq_ae_namespace.rb b/app/models/miq_ae_namespace.rb index 726cba19291..a36940d786d 100644 --- a/app/models/miq_ae_namespace.rb +++ b/app/models/miq_ae_namespace.rb @@ -16,7 +16,7 @@ class MiqAeNamespace < ApplicationRecord :foreign_key => :namespace_id, :dependent => :destroy, :inverse_of => :ae_namespace validates :name, - :format => {:with => /\A[\w\.\-\$]+\z/i, :message => N_("may contain only alphanumeric and _ . - $ characters")}, + :format => {:with => /\A[\w.\-$]+\z/i, :message => N_("may contain only alphanumeric and _ . - $ characters")}, :presence => true, :uniqueness_when_changed => {:scope => :ancestry, :case_sensitive => false} @@ -59,6 +59,7 @@ def self.find_or_create_by_fqname(fqname, include_classes = true) found = lookup_by_fqname(parts.join('/'), include_classes) break unless found.nil? + new_parts.unshift(parts.pop) end @@ -72,9 +73,8 @@ def self.find_or_create_by_fqname(fqname, include_classes = true) # TODO: broken since 2017 def self.find_tree(find_options = {}) namespaces = where(find_options) - ns_lookup = namespaces.inject({}) do |h, ns| - h[ns.id] = ns - h + ns_lookup = namespaces.index_by do |ns| + ns.id end roots = [] @@ -100,11 +100,13 @@ def editable?(user = User.current_user) raise ArgumentError, "User not provided to editable?" unless user return false if domain? && user.current_tenant.id != tenant_id return source == MiqAeDomain::USER_SOURCE if domain? + ancestors.all? { |a| a.editable?(user) } end def ns_fqname return nil if fqname == domain_name + fqname.sub(domain_name.to_s, '') end diff --git a/app/models/miq_ae_value.rb b/app/models/miq_ae_value.rb index 1f589f000e0..5f3bc2f706d 100644 --- a/app/models/miq_ae_value.rb +++ b/app/models/miq_ae_value.rb @@ -11,13 +11,13 @@ def to_export_xml(options = {}) self.class.column_names.each do |cname| # Remove any columns that we do not want to export - next if %w(id created_on updated_on updated_by).include?(cname) || cname.ends_with?("_id") + next if %w[id created_on updated_on updated_by].include?(cname) || cname.ends_with?("_id") # Skip any columns that we process explicitly - next if %w(name value).include?(cname) + next if %w[name value].include?(cname) # Process the column - xml_attrs[cname.to_sym] = send(cname) unless send(cname).blank? + xml_attrs[cname.to_sym] = send(cname) if send(cname).present? end xml.MiqAeField(xml_attrs) do @@ -31,7 +31,7 @@ def to_export_yaml end def value=(value) - write_attribute(:value, (ae_field.datatype == "password") ? MiqAePassword.encrypt(value) : value) + write_attribute(:value, ae_field.datatype == "password" ? MiqAePassword.encrypt(value) : value) end def self.display_name(number = 1) diff --git a/app/models/miq_ae_workspace.rb b/app/models/miq_ae_workspace.rb index 90201694fa5..5b8bd921315 100644 --- a/app/models/miq_ae_workspace.rb +++ b/app/models/miq_ae_workspace.rb @@ -19,7 +19,7 @@ def evmset(uri, value) if workspace.varset(uri, value) self.setters ||= [] self.setters << [uri, value] - self.save! + save! end end @@ -30,6 +30,7 @@ def self.display_name(number = 1) def self.workspace_from_token(token) ws = MiqAeWorkspace.find_by(:guid => token) raise MiqAeException::WorkspaceNotFound, "Workspace Not Found for token=[#{token}]" if ws.nil? + ws end private_class_method(:workspace_from_token) diff --git a/app/models/miq_alert.rb b/app/models/miq_alert.rb index b2780f7e7b2..22c917dfa46 100644 --- a/app/models/miq_alert.rb +++ b/app/models/miq_alert.rb @@ -11,14 +11,14 @@ class MiqAlert < ApplicationRecord validates :description, :presence => true, :uniqueness_when_changed => true, :length => {:maximum => 255} validate :validate_automate_expressions validate :validate_single_expression - validates :severity, :inclusion => { :in => SEVERITIES } + validates :severity, :inclusion => {:in => SEVERITIES} has_many :miq_alert_statuses, :dependent => :destroy before_save :set_responds_to_events attr_accessor :reserved - BASE_TABLES = %w( + BASE_TABLES = %w[ Vm Host Storage @@ -28,7 +28,7 @@ class MiqAlert < ApplicationRecord ContainerNode ContainerProject PhysicalServer - ) + ] def self.base_tables BASE_TABLES @@ -36,11 +36,11 @@ def self.base_tables acts_as_miq_set_member - ASSIGNMENT_PARENT_ASSOCIATIONS = %i(host ems_cluster ext_management_system my_enterprise physical_server).freeze + ASSIGNMENT_PARENT_ASSOCIATIONS = %i[host ems_cluster ext_management_system my_enterprise physical_server].freeze - HOURLY_TIMER_EVENT = "_hourly_timer_" + HOURLY_TIMER_EVENT = "_hourly_timer_" - cache_with_timeout(:alert_assignments) { Hash.new } + cache_with_timeout(:alert_assignments) { {} } virtual_column :based_on, :type => :string virtual_column :evaluation_description, :type => :string @@ -79,7 +79,7 @@ def evaluation_description # Define methods for notify_* virtual columns [:automate, :email, :evm_event, :snmp].each do |n| - define_method("notify_#{n}") do + define_method(:"notify_#{n}") do (options || {}).has_key_path?(:notifications, n) end end @@ -87,8 +87,8 @@ def evaluation_description def miq_actions [] end - alias_method :actions, :miq_actions - alias_method :owning_miq_actions, :miq_actions + alias actions miq_actions + alias owning_miq_actions miq_actions def set_responds_to_events events = responds_to_events_from_expression @@ -123,7 +123,7 @@ def self.assigned_to_target(target, event = nil) log_target = "Target: #{target.class.name} Name: [#{target.name}], Id: [#{target.id}]" # event can be nil, so the compact removes event if it is nil - key = [target.class.base_model.name, target.id, event].compact.join("_") + key = [target.class.base_model.name, target.id, event].compact.join("_") alert_assignments[key] ||= begin profiles = MiqAlertSet.assigned_to_target(target) @@ -170,6 +170,7 @@ def self.evaluate_alerts(target, event, inputs = {}) enabled_assigned_alerts.each do |a| next if a.postpone_evaluation?(target) + _log.info("#{log_header} #{log_target} Queuing evaluation of Alert: [#{a.description}]") a.evaluate_queue(target, inputs) end @@ -195,7 +196,7 @@ def self.evaluate_hourly_timer # ems_clusters, storages, hosts, ext_management_systems, miq_servers, vms table_name = a.db.constantize.table_name targets += zone.public_send(table_name) - targets += Zone.public_send("#{table_name}_without_a_zone") if Zone.respond_to?("#{table_name}_without_a_zone") + targets += Zone.public_send(:"#{table_name}_without_a_zone") if Zone.respond_to?(:"#{table_name}_without_a_zone") end end @@ -226,20 +227,20 @@ def postpone_evaluation?(target) delay_next_evaluation = (options || {}).fetch_path(:notifications, :delay_next_evaluation) start_skipping_at = Time.now.utc - (delay_next_evaluation || 10.minutes).to_i statuses_not_expired = miq_alert_statuses.where(:resource => target, :result => true) - .where(miq_alert_statuses.arel_table[:evaluated_on].gt(start_skipping_at)) + .where(miq_alert_statuses.arel_table[:evaluated_on].gt(start_skipping_at)) if statuses_not_expired.count > 0 _log.info("Skipping re-evaluation of Alert [#{description}] for target: [#{target.name}] with delay_next_evaluation [#{delay_next_evaluation}]") - return true + true else - return false + false end end def evaluate(target, inputs = {}) target = self.class.normalize_target(target) - return if self.postpone_evaluation?(target) + return if postpone_evaluation?(target) _log.info("Evaluating Alert [#{description}] for target: [#{target.name}]...") result = eval_expression(target, inputs) @@ -258,12 +259,12 @@ def add_status_post_evaluate(target, result, event) status = miq_alert_statuses.find_or_initialize_by(:resource => target, :event_ems_ref => ems_ref) status.result = result status.ems_id = target.try(:ems_id) - status.ems_id ||= target.id if target.is_a?(ExtManagementSystem) + status.ems_id ||= target.id if target.kind_of?(ExtManagementSystem) status.description = status_description || description status.severity = severity - status.severity = event_severity unless event_severity.blank? - status.url = url unless url.blank? - status.event_ems_ref = ems_ref unless ems_ref.blank? + status.severity = event_severity if event_severity.present? + status.url = url if url.present? + status.event_ems_ref = ems_ref if ems_ref.present? status.resolved = resolved status.evaluated_on = Time.now.utc status.save! @@ -277,8 +278,9 @@ def invoke_actions(target, inputs = {}) a.invoke(target, inputs.merge(:result => true, :sequence => a.sequence, :synchronous => false)) else next if a == :delay_next_evaluation + method = "invoke_#{a}" - unless self.respond_to?(method) + unless respond_to?(method) _log.warn("Unknown notification type: [#{a}], skipping invocation") next end @@ -287,7 +289,7 @@ def invoke_actions(target, inputs = {}) end rescue MiqException::StopAction => err _log.error("Stopping action invocation [#{err.message}]") - return + nil rescue MiqException::UnknownActionRc => err _log.error("Aborting action invocation [#{err.message}]") raise @@ -297,7 +299,7 @@ def invoke_actions(target, inputs = {}) end def invoke_automate(target, inputs) - event = options.fetch_path(:notifications, :automate, :event_name) + event = options.fetch_path(:notifications, :automate, :event_name) event_obj = CustomEvent.create( :event_type => event, :target => target, @@ -372,48 +374,43 @@ def eval_expression(target, inputs = {}) raise "unable to evaluate expression: [#{miq_expression.inspect}], unknown format" unless hash_expression case hash_expression[:mode] - when "internal" then return evaluate_internal(target, inputs) - when "automate" then return evaluate_in_automate(target, inputs) - when "script" then return evaluate_script + when "internal" then evaluate_internal(target, inputs) + when "automate" then evaluate_in_automate(target, inputs) + when "script" then evaluate_script else raise "unable to evaluate expression: [#{hash_expression.inspect}], unknown mode" end end def self.rt_perf_model_details(dbs) - dbs.inject({}) do |h, db| - h[db] = Metric::Rollup.const_get("#{db.underscore.upcase}_REALTIME_COLS").inject({}) do |hh, c| + dbs.index_with do |db| + Metric::Rollup.const_get("#{db.underscore.upcase}_REALTIME_COLS").each_with_object({}) do |c, hh| db_column = "#{db}Performance.#{c}" # this is to prevent the string from being collected during string extraction hh[c.to_s] = Dictionary.gettext(db_column) - hh end - h end end def self.operating_range_perf_model_details(dbs) - dbs.inject({}) do |h, db| - h[db] = Metric::LongTermAverages::AVG_COLS.inject({}) do |hh, c| + dbs.index_with do |db| + Metric::LongTermAverages::AVG_COLS.each_with_object({}) do |c, hh| db_column = "#{db}Performance.#{c}" # this is to prevent the string from being collected during string extraction hh[c.to_s] = Dictionary.gettext(db_column) - hh end - h end end def self.hourly_perf_model_details(dbs) - dbs.inject({}) do |h, db| + dbs.each_with_object({}) do |db, h| perf_model = "#{db}Performance" - h[db] = MiqExpression.model_details(perf_model, :include_model => false, :interval => "hourly").inject({}) do |hh, a| + h[db] = MiqExpression.model_details(perf_model, :include_model => false, :interval => "hourly").each_with_object({}) do |a, hh| d, c = a model, col = c.split("-") next(hh) unless model == perf_model next(hh) if ["timestamp", "v_date", "v_time", "resource_name"].include?(col) next(hh) if col.starts_with?("abs_") && col.ends_with?("_timestamp") + hh[col] = d - hh end - h end end @@ -431,7 +428,7 @@ def self.automate_expressions {:name => :time_threshold, :description => N_("How Far Back to Check"), :required => true}, {:name => :freq_threshold, :description => N_("Event Count Threshold"), :required => true, :numeric => true} ]}, - {:name => "event_log_threshold", :description => N_("Event Log Threshold"), :db => ["Vm"], :responds_to_events => "vm_scan_complete", + {:name => "event_log_threshold", :description => N_("Event Log Threshold"), :db => ["Vm"], :responds_to_events => "vm_scan_complete", :options => [ {:name => :event_log_message_filter_type, :description => N_("Message Filter Type"), :values => ["STARTS WITH", "ENDS WITH", "INCLUDES", "REGULAR EXPRESSION"], :required => true}, {:name => :event_log_message_filter_value, :description => N_("Message Filter"), :required => true}, @@ -503,7 +500,7 @@ def self.automate_expressions # TODO: vmware specific def self.ems_alarms(db, ems = nil) ems = ExtManagementSystem.extract_objects(ems) - raise "Unable to find Management System with id: [#{id}]" if ems.nil? + raise "Unable to find Management System with id: [#{id}]" if ems.nil? to = 30 alarms = [] @@ -518,26 +515,27 @@ def self.ems_alarms(db, ems = nil) raise end - alarms.inject({}) do |h, a| + alarms.each_with_object({}) do |a, h| exp = a.fetch_path("info", "expression", "expression") next(h) unless exp next(h) unless exp.detect { |e| e["type"] == EVM_TYPE_TO_VIM_TYPE[db] || e["objectType"] == EVM_TYPE_TO_VIM_TYPE[db] } + h[a["MOR"]] = a["info"]["name"] - h end end def self.expression_types(db = nil) - automate_expressions.inject({}) do |h, e| + automate_expressions.each_with_object({}) do |e, h| next(h) unless db.nil? || e[:db].nil? || e[:db].include?(db) + h[e[:name]] = e[:description] - h end end def self.expression_options(name) exp = expression_by_name(name) return nil unless exp + exp[:options] end @@ -547,7 +545,7 @@ def self.expression_by_name(name) def self.raw_events @raw_events ||= expression_by_name("event_threshold")[:options].find { |h| h[:name] == :event_types }[:values] + - %w(datawarehouse_alert) + %w[datawarehouse_alert] end def self.event_alertable?(event) @@ -596,7 +594,7 @@ def evaluate_internal(target, _inputs = {}) method = "evaluate_method_#{hash_expression[:eval_method]}" options = hash_expression[:options] || {} - raise "Evaluation method '#{hash_expression[:eval_method]}' does not exist" unless self.respond_to?(method) + raise "Evaluation method '#{hash_expression[:eval_method]}' does not exist" unless respond_to?(method) send(method, target, options) end @@ -727,7 +725,7 @@ def evaluate_performance(target, eval_options) status = target.miq_alert_statuses.first if status since_last_eval = (Time.now.utc - status.evaluated_on) - eval_options[:starting_on] = if (since_last_eval >= eval_options[:duration]) + eval_options[:starting_on] = if since_last_eval >= eval_options[:duration] (status.evaluated_on + 1) else (Time.now.utc - status.evaluated_on).seconds.ago.utc @@ -769,6 +767,7 @@ def validate exp_type.each do |fld| next if fld[:required] != true + if hash_expression[:options][fld[:name]].blank? errors.add("field", "'#{fld[:description]}' is required") next @@ -800,17 +799,17 @@ def self.seed alist.each do |alert_hash| guid = alert_hash["guid"] || alert_hash[:guid] rec = find_by(:guid => guid) - if rec.nil? - alert_hash[:read_only] = true - alert = create(alert_hash) - _log.info("Added sample Alert: #{alert.description}") - if action - alert.options ||= {} - alert.options[:notifications] ||= {} - alert.options[:notifications][action.action_type.to_sym] = action.options - alert.save - end - end + next unless rec.nil? + + alert_hash[:read_only] = true + alert = create(alert_hash) + _log.info("Added sample Alert: #{alert.description}") + next unless action + + alert.options ||= {} + alert.options[:notifications] ||= {} + alert.options[:notifications][action.action_type.to_sym] = action.options + alert.save end end end diff --git a/app/models/miq_alert_set.rb b/app/models/miq_alert_set.rb index 016776d0398..bfb8bdce1df 100644 --- a/app/models/miq_alert_set.rb +++ b/app/models/miq_alert_set.rb @@ -29,6 +29,7 @@ def notes def notes=(data) return if data.nil? + self.set_data ||= {} self.set_data[:notes] = data[0..511] end @@ -60,6 +61,7 @@ def self.import_from_yaml(fd, options = {}) def self.seed fixture_file = File.join(FIXTURE_DIR, "miq_alert_sets.yml") return unless File.exist?(fixture_file) + File.open(fixture_file) { |fd| MiqAlertSet.import_from_yaml(fd, :save => true) } end diff --git a/app/models/miq_alert_status.rb b/app/models/miq_alert_status.rb index 602e02d04c6..c2b32135950 100644 --- a/app/models/miq_alert_status.rb +++ b/app/models/miq_alert_status.rb @@ -1,5 +1,5 @@ class MiqAlertStatus < ApplicationRecord - SEVERITY_LEVELS = %w(error warning info).freeze + SEVERITY_LEVELS = %w[error warning info].freeze belongs_to :miq_alert belongs_to :resource, :polymorphic => true @@ -9,14 +9,14 @@ class MiqAlertStatus < ApplicationRecord virtual_column :assignee, :type => :string virtual_column :hidden, :type => :boolean - validates :severity, :acceptance => { :accept => SEVERITY_LEVELS } + validates :severity, :acceptance => {:accept => SEVERITY_LEVELS} def assigned? assignee_id.present? end def hidden? - miq_alert_status_actions.where(:action_type => %w(hide show)).last.try(:action_type) == 'hide' + miq_alert_status_actions.where(:action_type => %w[hide show]).last.try(:action_type) == 'hide' end def self.display_name(number = 1) diff --git a/app/models/miq_alert_status_action.rb b/app/models/miq_alert_status_action.rb index 6cc9173b8a8..441c9b17f87 100644 --- a/app/models/miq_alert_status_action.rb +++ b/app/models/miq_alert_status_action.rb @@ -1,10 +1,10 @@ class MiqAlertStatusAction < ApplicationRecord - ACTION_TYPES = %w(assign acknowledge comment unassign unacknowledge hide show).freeze + ACTION_TYPES = %w[assign acknowledge comment unassign unacknowledge hide show].freeze belongs_to :miq_alert_status belongs_to :assignee, :class_name => 'User' belongs_to :user - validates :action_type, :acceptance => { :accept => ACTION_TYPES }, :presence => true + validates :action_type, :acceptance => {:accept => ACTION_TYPES}, :presence => true validates :user, :presence => true validates :miq_alert_status, :presence => true validates :comment, :presence => true, :if => (->(masa) { masa.action_type == 'comment' }) @@ -17,13 +17,13 @@ class MiqAlertStatusAction < ApplicationRecord def only_assignee_can_acknowledge if ['acknowledge', 'unacknowledge', 'hide', 'show'].include?(action_type) && - miq_alert_status.assignee.try(:id) != user.id + miq_alert_status.assignee.try(:id) != user.id errors.add(:user, "that is not assigned cannot #{action_type}") end end def update_status_acknowledgement - if %w(assign unassign unacknowledge).include?(action_type) + if %w[assign unassign unacknowledge].include?(action_type) miq_alert_status.update!(:acknowledged => false) elsif "acknowledge" == action_type miq_alert_status.update!(:acknowledged => true) diff --git a/app/models/miq_approval.rb b/app/models/miq_approval.rb index 1e9d4d92b3f..df1d73d5faa 100644 --- a/app/models/miq_approval.rb +++ b/app/models/miq_approval.rb @@ -64,6 +64,7 @@ def execute_approval(user) def user_validate(userid) user = userid.kind_of?(User) ? userid : User.lookup_by_userid(userid) raise "not authorized" unless authorized?(user) + user end end diff --git a/app/models/miq_automate.rb b/app/models/miq_automate.rb index de8ccc4f979..b8981a121b0 100644 --- a/app/models/miq_automate.rb +++ b/app/models/miq_automate.rb @@ -11,7 +11,7 @@ def self.async_datastore_reset :method_name => "_async_datastore_reset", :args => [task.id], :priority => MiqQueue::HIGH_PRIORITY, - :msg_timeout => 3600, + :msg_timeout => 3600 ) task.update_status("Queued", "Ok", "Task has been queued") end @@ -21,7 +21,7 @@ def self.async_datastore_reset def self._async_datastore_reset(taskid) task = MiqTask.find_by(:id => taskid) - task.update_status("Active", "Ok", "Resetting Automate Datastore") if task + task.update_status("Active", "Ok", "Resetting Automate Datastore") if task MiqAeDatastore.reset_to_defaults task.update_status("Finished", "Ok", "Resetting Automate Datastore complete") if task end diff --git a/app/models/miq_bulk_import.rb b/app/models/miq_bulk_import.rb index 20338490fd4..74955368199 100644 --- a/app/models/miq_bulk_import.rb +++ b/app/models/miq_bulk_import.rb @@ -5,7 +5,8 @@ def self.upload(fd, tags, keys) _log.info("Uploading CSV file") data = fd.read raise _("File is empty") if data.empty? - data.gsub!(/\r/, "\n") + + data.tr!("\r", "\n") begin reader = CSV.parse(data) rescue CSV::IllegalFormatError @@ -39,9 +40,11 @@ def self.upload(fd, tags, keys) result = [] reader.each do |row| next if row.first.nil? + line = {} header.each_index do |i| next unless tags.include?(header[i]) + line[header[i]] = row[i].strip if row[i] end result.push(line) @@ -75,6 +78,7 @@ def self.find_entry_by_keys(klass, keys) def self.get_sub_key_values(rec, sub_key) unless sub_key.include?(".") return [] unless rec.respond_to?(sub_key) + return rec.send(sub_key).downcase end @@ -89,13 +93,14 @@ def self.get_sub_key_values(rec, sub_key) current = current.send(p) end - current = current.kind_of?(ActiveRecord::Base) ? [current] : current + current = [current] if current.kind_of?(ActiveRecord::Base) - results = current.collect do |c| + current.collect do |c| return [] unless c.respond_to?(attr) + c.send(attr) end.compact - results + end end diff --git a/app/models/miq_compare.rb b/app/models/miq_compare.rb index 1aac54f4a2d..9cbddc969d3 100644 --- a/app/models/miq_compare.rb +++ b/app/models/miq_compare.rb @@ -3,13 +3,7 @@ class MiqCompare EMPTY = '(empty)' TAG_PREFIX = '_tag_' - attr_reader :report - attr_reader :mode - attr_reader :ids - attr_reader :records - - attr_reader :master_list - attr_reader :results + attr_reader :report, :mode, :ids, :records, :master_list, :results attr_accessor :include @@ -144,7 +138,7 @@ def self.sections(report) # 'guest_devices' section below it, would create a 'hardware.guest_devices' # section in the resultant include. def self.build_sections(section, all_sections, full_name = '') - return unless section.key?('include') && !section['include'].blank? + return unless section.key?('include') && section['include'].present? section['include'].each do |name, data| group = data['group'] @@ -172,7 +166,7 @@ def self.build_section(all_sections, name, key = nil, group = nil) name = name.to_sym all_sections[name] = {:fetch => false, :fetched => false, :checked => false} all_sections[name][:key] = key.empty? ? key : key.to_sym unless key.nil? - all_sections[name][:group] = group unless group.blank? + all_sections[name][:group] = group if group.present? end def section_header_text(model) @@ -202,10 +196,10 @@ def prepare_master_list if @report.cols.include?(c) section, column = :_model_, c.to_sym - else - # Determine the section and column based on the last '.' - section, column = $1.to_sym, $2.to_sym if c =~ /(.+)\.([^\.]+)$/ + elsif c =~ /(.+)\.([^.]+)$/ + section, column = $1.to_sym, $2.to_sym end + # Determine the section and column based on the last '.' # See if this section has a key if section == :_model_ @@ -215,7 +209,7 @@ def prepare_master_list column = column.to_s section = "#{TAG_PREFIX}#{column}".to_sym c = Classification.lookup_by_name(column) - section_header = (c.nil? || c.description.blank?) ? column.titleize : c.description + section_header = c.nil? || c.description.blank? ? column.titleize : c.description column = nil # columns will be filled in dynamically when we fetch the section data key = nil else @@ -350,7 +344,7 @@ def fetch_record_section(id, section, sub_sections, columns) sub_rec.each do |r| if key_name.blank? - key = "\##{key_counter}" + key = "##{key_counter}" key_counter += 1 else key = r.send(key_name) @@ -422,8 +416,8 @@ def calculate_record(id) # Determine the base and result records base_id = case @mode - when :compare then @ids[0] # For compare, we are comparing to the first record - when :drift then @ids.each_cons(2) { |x, y| break(x) if y == id } # For drift, we are comparing to the previous timestamp + when :compare then @ids[0] # For compare, we are comparing to the first record + when :drift then @ids.each_cons(2) { |x, y| break(x) if y == id } # For drift, we are comparing to the previous timestamp end base = @results[base_id] result = @results[id] @@ -520,12 +514,12 @@ def clear_calculations(result) # Retrieve all records from the source for the set of ids (mode agnostic) def get_records - send("get_#{@mode}_records") + send(:"get_#{@mode}_records") end # Retrieve the record from the source (mode agnostic) def get_record(id) - send("get_#{@mode}_record", id) + send(:"get_#{@mode}_record", id) end # Find the record for the specified id @@ -538,6 +532,7 @@ def find_record(id) # Retrieve all records from the source for the set of ids (compare mode) def get_compare_records return unless @mode == :compare + recs = @model.where(:id => @ids) error_recs = [] @@ -549,12 +544,13 @@ def get_compare_records new_rec end - _log.error("No record was found for compare object #{@model}, ids: [#{error_recs.join(", ")}]") unless error_recs.blank? + _log.error("No record was found for compare object #{@model}, ids: [#{error_recs.join(", ")}]") if error_recs.present? end # Retrieve the record from the source (compare mode) def get_compare_record(id) return unless @mode == :compare + new_rec = @model.find_by(:id => id) _log.error("No record was found for compare object #{@model}, id: [#{id}]") if new_rec.nil? new_rec @@ -565,12 +561,14 @@ def get_compare_record(id) # Retrieve all records from the source for the set of ids (drift mode) def get_drift_records return unless @mode == :drift + @records = drift_model_record.drift_states.where(:timestamp => @ids).collect(&:data_obj) end # Retrieve the record from the source (drift mode) def get_drift_record(ts) return unless @mode == :drift + new_rec = drift_model_record.drift_states.find_by(:timestamp => ts).data_obj _log.error("No data was found for drift object #{@model} [#{@model_record_id}] at [#{ts}]") if new_rec.nil? new_rec @@ -578,6 +576,7 @@ def get_drift_record(ts) def drift_model_record return unless @mode == :drift + @model_record ||= @model.find_by(:id => @model_record_id) end @@ -591,7 +590,7 @@ def drift_model_record def marshal_dump ivs = instance_variables.reject { |iv| iv.in?(IVS_TO_REMOVE_ON_DUMP) } - ivs.each_with_object({}) { |iv, h| h[iv] = instance_variable_get(iv) } + ivs.index_with { |iv| instance_variable_get(iv) } end def marshal_load(data) diff --git a/app/models/miq_database.rb b/app/models/miq_database.rb index 92a57e6bcc0..5fa12dfe418 100644 --- a/app/models/miq_database.rb +++ b/app/models/miq_database.rb @@ -3,7 +3,7 @@ class MiqDatabase < ApplicationRecord encrypt_column :csrf_secret_token encrypt_column :session_secret_token - validates_presence_of :session_secret_token, :csrf_secret_token + validates :session_secret_token, :csrf_secret_token, :presence => true def self.seed db = first || new @@ -25,7 +25,7 @@ def size end def self.adapter - @adapter ||= ActiveRecord::Base.connection.instance_variable_get("@config")[:adapter] + @adapter ||= ActiveRecord::Base.connection.instance_variable_get(:@config)[:adapter] end def self.display_name(number = 1) diff --git a/app/models/miq_dialog.rb b/app/models/miq_dialog.rb index 06811b6d0bd..a56f353ab76 100644 --- a/app/models/miq_dialog.rb +++ b/app/models/miq_dialog.rb @@ -2,7 +2,7 @@ class MiqDialog < ApplicationRecord include Seeding validates :name, :description, :presence => true - validates :name, :unique_within_region => { :scope => :dialog_type, :match_case => false } + validates :name, :unique_within_region => {:scope => :dialog_type, :match_case => false} scope :with_dialog_type, ->(dialog_type) { where(:dialog_type => dialog_type) } diff --git a/app/models/miq_enterprise.rb b/app/models/miq_enterprise.rb index 27178f1e084..b33350b0d40 100644 --- a/app/models/miq_enterprise.rb +++ b/app/models/miq_enterprise.rb @@ -1,7 +1,7 @@ class MiqEnterprise < ApplicationRecord has_many :metrics, :as => :resource # Destroy will be handled by purger has_many :metric_rollups, :as => :resource # Destroy will be handled by purger - has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger + has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger virtual_has_many :miq_regions, :class_name => "MiqRegion" virtual_has_many :ext_management_systems, :class_name => "ExtManagementSystem" @@ -64,7 +64,7 @@ def policy_events PolicyEvent.all end - alias_method :all_storages, :storages + alias all_storages storages def get_reserve(field) ext_management_systems.inject(0) { |v, obj| v + (obj.send(field) || 0) } @@ -91,7 +91,7 @@ def perf_rollup_parents(_interval_name = nil) def perf_capture_enabled? @perf_capture_enabled ||= ext_management_systems.any?(&:perf_capture_enabled?) end - alias_method :perf_capture_enabled, :perf_capture_enabled? + alias perf_capture_enabled perf_capture_enabled? Vmdb::Deprecation.deprecate_methods(self, :perf_capture_enabled => :perf_capture_enabled?) def self.display_name(number = 1) diff --git a/app/models/miq_event.rb b/app/models/miq_event.rb index 70e2933f085..04e730b376f 100644 --- a/app/models/miq_event.rb +++ b/app/models/miq_event.rb @@ -157,6 +157,7 @@ def update_with_policy_result(result = {}) def self.normalize_event(event) return event if MiqEventDefinition.find_by(:name => event) + "unknown" end @@ -178,11 +179,13 @@ def self.raise_evm_event_queue(target, raw_event, inputs = {}) end def self.raise_evm_alert_event_queue(target, raw_event, inputs = {}) - MiqQueue.put_unless_exists( - :class_name => "MiqAlert", - :method_name => 'evaluate_alerts', - :args => [[target.class.name, target.id], raw_event, inputs] - ) if MiqAlert.alarm_has_alerts?(raw_event) + if MiqAlert.alarm_has_alerts?(raw_event) + MiqQueue.put_unless_exists( + :class_name => "MiqAlert", + :method_name => 'evaluate_alerts', + :args => [[target.class.name, target.id], raw_event, inputs] + ) + end end def self.raise_evm_job_event(target, options = {}, inputs = {}, q_options = {}) @@ -194,7 +197,7 @@ def self.raise_evm_job_event(target, options = {}, inputs = {}, q_options = {}) ) target_model = target.class.base_model.name.downcase - target_model = "vm" if target_model.match("template") + target_model = "vm" if target_model.match?("template") base_event = [target_model, options[:type]].join("_") evm_event = [options[:prefix], base_event, options[:suffix]].compact.join("_") @@ -208,6 +211,7 @@ def self.raise_event_for_children(target, raw_event, inputs = {}) child_event = "#{raw_event}_parent_#{target.class.base_model.name.underscore}" child_assocs.each do |assoc| next unless target.respond_to?(assoc) + children = target.send(assoc) children.each do |child| _log.info("Raising Event [#{child_event}] for Child [(#{child.class}) #{child.name}] of Parent [(#{target.class}) #{target.name}]") diff --git a/app/models/miq_event_definition.rb b/app/models/miq_event_definition.rb index 893aabd9080..6a39551a3d7 100644 --- a/app/models/miq_event_definition.rb +++ b/app/models/miq_event_definition.rb @@ -4,9 +4,9 @@ class MiqEventDefinition < ApplicationRecord validates :name, :uniqueness_when_changed => true, :presence => true, :allow_nil => true, - :format => {:with => /\A[a-z0-9_\-]+\z/i, + :format => {:with => /\A[a-z0-9_-]+\z/i, :message => "must only contain alpha-numeric, underscore and hyphen characters without spaces"} - validates_presence_of :description + validates :description, :presence => true acts_as_miq_set_member acts_as_miq_taggable @@ -101,7 +101,7 @@ def self.add_elements(_vm, xmlNode) eventData.delete("id") # Remove elements that do not belong in the event table - %w( src_vm_guid dest_vm_guid vm_guid ).each do |field| + %w[src_vm_guid dest_vm_guid vm_guid].each do |field| eventData.delete(field) end diff --git a/app/models/miq_event_definition_set.rb b/app/models/miq_event_definition_set.rb index aa5f9de0715..72c6757b0fe 100644 --- a/app/models/miq_event_definition_set.rb +++ b/app/models/miq_event_definition_set.rb @@ -8,7 +8,6 @@ def self.set_definitions_from_path(path) def self.seed existing = all.group_by(&:name) set_definitions_from_path(fixture_path).each do |set| - rec = existing[set['name']].try(:first) if rec.nil? _log.info("Creating [#{set['name']}]") @@ -31,5 +30,5 @@ def self.display_name(number = 1) n_('Event Definition Set', 'Event Definition Sets', number) end - alias_method :events, :miq_event_definitions + alias events miq_event_definitions end diff --git a/app/models/miq_filter.rb b/app/models/miq_filter.rb index 1282347b6d0..85b0869f534 100644 --- a/app/models/miq_filter.rb +++ b/app/models/miq_filter.rb @@ -42,6 +42,7 @@ def self.find_object_by_name(klass, name) def self.belongsto2object_list(tag) # /belongsto/ExtManagementSystem|/EmsCluster|/EmsFolder| raise _("invalid tag: %{tag}") % {:tag => tag} unless tag.starts_with?("/belongsto/ExtManagementSystem") + parts = tag.split("/")[2..-1] depth = parts.size - 1 # ancestry uses 0 based depth @@ -61,8 +62,8 @@ def self.belongsto2object_list(tag) # Get the leaf node object for this EMS leaf_class, leaf_name = parts.last.split("|", 2) leaves = leaf_class.constantize - .includes(:all_relationships) - .where(:name => leaf_name, :ems_id => ems.id) + .includes(:all_relationships) + .where(:name => leaf_name, :ems_id => ems.id) # If multiple leaves come back, filter by depth, and then find which one has # the valid path. It's possible multiple leaves could be at the same depth. diff --git a/app/models/miq_group.rb b/app/models/miq_group.rb index a6c91740133..596b15a464d 100644 --- a/app/models/miq_group.rb +++ b/app/models/miq_group.rb @@ -7,7 +7,7 @@ class MiqGroup < ApplicationRecord has_one :entitlement, :dependent => :destroy, :autosave => true has_one :miq_user_role, :through => :entitlement has_and_belongs_to_many :users - has_many :vms, :dependent => :nullify + has_many :vms, :dependent => :nullify has_many :miq_templates, :dependent => :nullify has_many :miq_reports, :dependent => :nullify has_many :miq_report_results, :dependent => :nullify @@ -17,12 +17,12 @@ class MiqGroup < ApplicationRecord has_many :authentications, :dependent => :nullify virtual_delegate :miq_user_role_name, :to => :entitlement, :allow_nil => true, :type => :string - virtual_column :read_only, :type => :boolean + virtual_column :read_only, :type => :boolean virtual_has_one :sui_product_features, :class_name => "Array" delegate :self_service?, :limited_self_service?, :to => :miq_user_role, :allow_nil => true - validates :description, :presence => true, :unique_within_region => { :match_case => false } + validates :description, :presence => true, :unique_within_region => {:match_case => false} validate :validate_default_tenant, :on => :update, :if => :tenant_id_changed? before_destroy :ensure_can_be_destroyed after_destroy :reset_current_group_for_users @@ -42,7 +42,7 @@ class MiqGroup < ApplicationRecord include CustomActionsMixin include ExternalUrlMixin - alias_method :current_tenant, :tenant + alias current_tenant tenant def name description @@ -112,7 +112,7 @@ def self.seed root_tenant = Tenant.root_tenant groups = where(:group_type => SYSTEM_GROUP, :tenant_id => Tenant.root_tenant) - .includes(:entitlement).index_by(&:description) + .includes(:entitlement).index_by(&:description) roles = MiqUserRole.where("name like 'EvmRole-%'").index_by(&:name) role_map.each_with_index do |(group_name, role_name), index| @@ -121,7 +121,7 @@ def self.seed if user_role.nil? raise StandardError, _("Unable to find user_role 'EvmRole-%{role_name}' for group '%{group_name}'") % - {:role_name => role_name, :group_name => group_name} + {:role_name => role_name, :group_name => group_name} end group.miq_user_role = user_role if group.entitlement.try(:miq_user_role_id) != user_role.id group.sequence = index + 1 @@ -129,11 +129,11 @@ def self.seed group.group_type = SYSTEM_GROUP group.tenant = root_tenant - if group.changed? - mode = group.new_record? ? "Created" : "Updated" - group.save! - _log.info("#{mode} Group: #{group.description} with Role: #{user_role.name}") - end + next unless group.changed? + + mode = group.new_record? ? "Created" : "Updated" + group.save! + _log.info("#{mode} Group: #{group.description} with Role: #{user_role.name}") end # find any default tenant groups that do not have a role @@ -162,6 +162,7 @@ def self.get_ldap_groups_by_user(user, bind_dn, bind_pwd) unless ldap.bind(ldap.fqusername(bind_dn), bind_pwd) raise _("Bind failed for user %{user_name}") % {:user_name => bind_dn} end + user_obj = ldap.get_user_object(ldap.normalize(ldap.fqusername(username))) raise _("Unable to find user %{user_name} in directory") % {:user_name => username} if user_obj.nil? @@ -239,7 +240,7 @@ def referenced_by_tenant? def read_only system_group? || tenant_group? end - alias_method :read_only?, :read_only + alias read_only? read_only virtual_total :user_count, :users @@ -270,7 +271,7 @@ def self.create_tenant_group(tenant) :description => "Tenant #{tenant_full_name} access" ).find_or_create_by!( :group_type => TENANT_GROUP, - :tenant_id => tenant.id, + :tenant_id => tenant.id ) end @@ -298,11 +299,13 @@ def self.with_groups(miq_group_ids) def single_group_users? group_user_ids = user_ids return false if group_user_ids.empty? + users.includes(:miq_groups).where(:id => group_user_ids).where.not(:miq_groups => {:id => id}).count != group_user_ids.size end def sui_product_features return [] unless miq_user_role.allows?(:identifier => 'sui') + MiqProductFeature.feature_all_children('sui').each_with_object([]) do |sui_feature, sui_features| sui_features << sui_feature if miq_user_role.allows?(:identifier => sui_feature) end diff --git a/app/models/miq_policy.rb b/app/models/miq_policy.rb index dd2e34cbb9a..eac3c97d8da 100644 --- a/app/models/miq_policy.rb +++ b/app/models/miq_policy.rb @@ -3,7 +3,7 @@ class MiqPolicy < ApplicationRecord include ReadOnlyMixin - TOWHAT_APPLIES_TO_CLASSES = %w(ContainerGroup + TOWHAT_APPLIES_TO_CLASSES = %w[ContainerGroup ContainerImage ContainerNode ContainerProject @@ -11,7 +11,7 @@ class MiqPolicy < ApplicationRecord ExtManagementSystem Host PhysicalServer - Vm).freeze + Vm].freeze CONDITION_SUCCESS = N_("Success") CONDITION_FAILURE = N_("Failure") @@ -22,7 +22,7 @@ def self.policy_towhat_applies_to_classes def self.policy_modes { - "control" => _("Control"), + "control" => _("Control"), "compliance" => _("Compliance") } end @@ -54,9 +54,9 @@ def self.policy_modes validates :description, :presence => true, :uniqueness_when_changed => true validates :name, :presence => true, :uniqueness_when_changed => true - validates :mode, :inclusion => { :in => %w(compliance control) } - validates :towhat, :inclusion => { :in => TOWHAT_APPLIES_TO_CLASSES, - :message => "should be one of #{TOWHAT_APPLIES_TO_CLASSES.join(", ")}" } + validates :mode, :inclusion => {:in => %w[compliance control]} + validates :towhat, :inclusion => {:in => TOWHAT_APPLIES_TO_CLASSES, + :message => "should be one of #{TOWHAT_APPLIES_TO_CLASSES.join(", ")}"} scope :with_mode, ->(mode) { where(:mode => mode) } scope :with_towhat, ->(towhat) { where(:towhat => towhat) } @@ -74,7 +74,7 @@ def self.policy_modes def self.built_in_policies return @@built_in_policies.dup unless @@built_in_policies.nil? - policy_hashes = YAML.load_file(Rails.root.join("product", "policy", "built_in_policies.yml")) + policy_hashes = YAML.load_file(Rails.root.join("product/policy/built_in_policies.yml")) @@built_in_policies = policy_hashes.collect do |p_hash| policy = OpenStruct.new(p_hash) @@ -105,7 +105,7 @@ def self.built_in_policies @@built_in_policies.dup end - CLEAN_ATTRS = %w(id guid name created_on updated_on miq_policy_id description) + CLEAN_ATTRS = %w[id guid name created_on updated_on miq_policy_id description] def self.clean_attrs(attrs) CLEAN_ATTRS.each { |a| attrs.delete(a) } attrs @@ -127,17 +127,18 @@ def copy(new_fields) def miq_event_definitions miq_policy_contents.collect(&:miq_event_definition).compact.uniq end - alias_method :events, :miq_event_definitions + alias events miq_event_definitions def miq_actions miq_policy_contents.collect(&:miq_action).compact.uniq end - alias_method :actions, :miq_actions + alias actions miq_actions def actions_for_event(event, on = :failure) order = on == :success ? "success_sequence" : "failure_sequence" miq_policy_contents.where(:miq_event_definition => event).order(order).collect do |pe| next unless pe.qualifier == on.to_s + pe.get_action(on) end.compact end @@ -166,7 +167,7 @@ def replace_actions_for_event(event, action_list) fail_seq = 0 action_list.each do |action, opts| opts[:qualifier] ||= "failure" - opts[:sequence] = opts[:qualifier].to_s == "success" ? succes_seq += 1 : fail_seq += 1 + opts[:sequence] = opts[:qualifier].to_s == "success" ? succes_seq += 1 : fail_seq += 1 add_action_for_event(event, action, opts) end end @@ -285,6 +286,7 @@ def self.build_results(policies, profiles, event, status) # ] policies.collect do |p| next unless p.kind_of?(self) # skip built-in policies + { :miq_policy => p, :result => status.to_s, @@ -307,10 +309,12 @@ def self.resolve(rec, list = nil, event = nil) policy_hash['result'], policy_hash['conditions'] = resolve_policy_conditions(p, rec) action_on = policy_hash["result"] == "deny" ? :failure : :success - policy_hash["actions"] = - p.actions_for_event(event, action_on).uniq.collect do |a| - {"id" => a.id, "name" => a.name, "description" => a.description, "result" => policy_hash["result"]} - end unless event.nil? + unless event.nil? + policy_hash["actions"] = + p.actions_for_event(event, action_on).uniq.collect do |a| + {"id" => a.id, "name" => a.name, "description" => a.description, "result" => policy_hash["result"]} + end + end end p.attributes.merge(policy_hash) end.compact @@ -321,7 +325,7 @@ def self.resolve_policy_conditions(policy, rec) conditions = policy.conditions.collect do |c| rec_model = rec.class.base_model.name - rec_model = "Vm" if rec_model.downcase.match("template") + rec_model = "Vm" if rec_model.downcase.match?("template") next unless rec_model == c["towhat"] resolve_condition(c, rec).tap do |cond_hash| @@ -355,7 +359,7 @@ def self.resolve_condition(cond, rec) def applies_to?(rec, inputs = {}) rec_model = rec.class.base_model.name - rec_model = "Vm" if rec_model.downcase.match("template") + rec_model = "Vm" if rec_model.downcase.match?("template") return false if towhat && rec_model != towhat return true if expression.nil? @@ -378,19 +382,18 @@ def self.all_policy_events def self.all_policy_events_filter # Todo Convert to SQL if possible filter_hash = { - "AND" => [ - {"=" => {"field" => "MiqEventDefinition-event_type", "value" => "Default"}}, - {"!=" => {"field" => "MiqEventDefinition-event_group_name", "value" => ""}} - ] - } - EVENT_GROUPS_EXCLUDED.each do |e| - filter_hash["AND"] << {"!=" => {"field" => "MiqEventDefinition-event_group_name", "value" => e}} - end + "AND" => [ + {"=" => {"field" => "MiqEventDefinition-event_type", "value" => "Default"}}, + {"!=" => {"field" => "MiqEventDefinition-event_group_name", "value" => ""}} + ] + } + EVENT_GROUPS_EXCLUDED.each do |e| + filter_hash["AND"] << {"!=" => {"field" => "MiqEventDefinition-event_group_name", "value" => e}} + end - MiqExpression.new(filter_hash) + MiqExpression.new(filter_hash) end - def self.logger $policy_log end @@ -415,13 +418,13 @@ def self.get_policies_for_target(target, mode, event, inputs = {}) plist = built_in_policies.concat(plist).uniq towhat = target.class.base_model.name - towhat = "Vm" if towhat.downcase.match("template") + towhat = "Vm" if towhat.downcase.match?("template") plist.keep_if do |p| p.mode == mode && - p.towhat == towhat && - policy_for_event?(p, event) && - policy_active?(p) && - policy_applicable?(p, target, inputs) + p.towhat == towhat && + policy_for_event?(p, event) && + policy_active?(p) && + policy_applicable?(p, target, inputs) end [profiles, plist] @@ -507,11 +510,11 @@ def add_action_for_event(event, action, opt_hash = nil) opt_hash.delete(:synchronous) pevent = miq_policy_contents.build(opt_hash) - pevent.miq_event_definition = event + pevent.miq_event_definition = event pevent.miq_action = action pevent.save - self.save! + save! end private :add_action_for_event end diff --git a/app/models/miq_policy/import_export.rb b/app/models/miq_policy/import_export.rb index 823fb9380e5..80752306699 100644 --- a/app/models/miq_policy/import_export.rb +++ b/app/models/miq_policy/import_export.rb @@ -1,11 +1,12 @@ module MiqPolicy::ImportExport extend ActiveSupport::Concern - IMPORT_CLASS_NAMES = %w(MiqPolicy MiqPolicySet MiqAlert).freeze + IMPORT_CLASS_NAMES = %w[MiqPolicy MiqPolicySet MiqAlert].freeze module ClassMethods def import_from_hash(policy, options = {}) raise _("No Policy to Import") if policy.nil? + pe = policy.delete("MiqPolicyContent") { |_k| raise "No contents for Policy == #{policy.inspect}" } pc = policy.delete("Condition") || [] @@ -31,7 +32,7 @@ def import_from_hash(policy, options = {}) event = e["MiqEventDefinition"] || e["MiqEvent"] ekey = event["name"] - eventsHash[ekey] = MiqEventDefinition.import_from_hash(event, options) unless eventsHash.key?(ekey) + eventsHash[ekey] = MiqEventDefinition.import_from_hash(event, options) unless eventsHash.key?(ekey) e2a[ekey] = [] unless e2a.key?(ekey) e2a[ekey].push([akey, opts]) @@ -43,11 +44,11 @@ def import_from_hash(policy, options = {}) event_status[:children] ||= [] e2a[ekey].each do |arr| akey, opts = arr - unless akey.nil? - action, s = actionsHash[akey] - actions.push([action, opts]) - event_status[:children].push(s) - end + next if akey.nil? + + action, s = actionsHash[akey] + actions.push([action, opts]) + event_status[:children].push(s) end events.push([event, actions]) @@ -62,7 +63,7 @@ def import_from_hash(policy, options = {}) conditions.push(condition) end - policy["towhat"] ||= "Vm" # Default "towhat" value to "Vm" to support older export decks that don't have a value set. + policy["towhat"] ||= "Vm" # Default "towhat" value to "Vm" to support older export decks that don't have a value set. # Default "active" value to true to support older export decks that don't have a value set. policy["active"] = true if policy["active"].nil? policy["mode"] ||= "control" # Default "mode" value to true to support older export decks that don't have a value set. diff --git a/app/models/miq_policy_set.rb b/app/models/miq_policy_set.rb index 6bbca378b0b..d3683316d74 100644 --- a/app/models/miq_policy_set.rb +++ b/app/models/miq_policy_set.rb @@ -13,6 +13,7 @@ def notes def notes=(data) return if data.nil? + self.set_data ||= {} self.set_data[:notes] = data[0..511] end @@ -23,7 +24,7 @@ def active? def destroy_policy_tags # handle policy assignment removal for deleted policy profile - Tag.find_by(:name => "/miq_policy/assignment/#{self.class.to_s.underscore}/#{id}").try!(:destroy) + Tag.find_by(:name => "/miq_policy/assignment/#{self.class.to_s.underscore}/#{id}")&.destroy end def add_policy(policy) diff --git a/app/models/miq_priority_worker.rb b/app/models/miq_priority_worker.rb index cf8b3ac79cb..113bab28c71 100644 --- a/app/models/miq_priority_worker.rb +++ b/app/models/miq_priority_worker.rb @@ -1,7 +1,7 @@ class MiqPriorityWorker < MiqQueueWorkerBase include MiqWorker::ReplicaPerWorker - self.default_queue_name = "generic" + self.default_queue_name = "generic" def self.queue_priority MiqQueue::HIGH_PRIORITY diff --git a/app/models/miq_product_feature.rb b/app/models/miq_product_feature.rb index 71ddcbfe418..c1c618d6960 100644 --- a/app/models/miq_product_feature.rb +++ b/app/models/miq_product_feature.rb @@ -28,14 +28,14 @@ class MiqProductFeature < ApplicationRecord :tenant_id ] - FEATURE_TYPE_ORDER = %w(view control admin node).freeze + FEATURE_TYPE_ORDER = %w[view control admin node].freeze validates :feature_type, :inclusion => FEATURE_TYPE_ORDER REQUIRED_ATTRIBUTES = [:feature_type, :identifier].freeze - OPTIONAL_ATTRIBUTES = %i(name description children hidden protected).freeze + OPTIONAL_ATTRIBUTES = %i[name description children hidden protected].freeze ALLOWED_ATTRIBUTES = (REQUIRED_ATTRIBUTES + OPTIONAL_ATTRIBUTES).freeze - MY_TENANT_FEATURE_ROOT_IDENTIFIERS = %w(rbac_tenant_manage_quotas).freeze - TENANT_FEATURE_ROOT_IDENTIFIERS = (%w(dialog_new_editor dialog_edit_editor dialog_copy_editor dialog_delete) + MY_TENANT_FEATURE_ROOT_IDENTIFIERS).freeze + MY_TENANT_FEATURE_ROOT_IDENTIFIERS = %w[rbac_tenant_manage_quotas].freeze + TENANT_FEATURE_ROOT_IDENTIFIERS = (%w[dialog_new_editor dialog_edit_editor dialog_copy_editor dialog_delete] + MY_TENANT_FEATURE_ROOT_IDENTIFIERS).freeze def name value = self[:name] @@ -85,7 +85,8 @@ def self.feature_children(identifier, sort = true) def self.feature_all_children(identifier, sort = true) children = feature_children(identifier, false) return [] if children.empty? - result = children + children.flat_map { |c| feature_all_children(c, false) } + + result = children + children.flat_map { |c| feature_all_children(c, false) } sort ? sort_children(result) : result end @@ -129,7 +130,7 @@ def self.features .includes(:tenant) .each_with_object({}) do |f, h| parent_ident = f.parent_identifier - details = DETAIL_ATTRS.each_with_object({}) { |a, dh| dh[a] = f.send(a) } + details = DETAIL_ATTRS.index_with { |a| f.send(a) } h[f.identifier] = {:parent => parent_ident, :children => [], :details => details} end # populate the children based upon parent identifier @@ -298,11 +299,10 @@ def seed_vm_explorer_for_custom_roles end def details - @details ||= begin - attributes.symbolize_keys.slice(*DETAIL_ATTRS).merge( - :children => children.where(:hidden => [false, nil]) + @details ||= attributes.symbolize_keys.slice(*DETAIL_ATTRS).merge( + :children => children.where(:hidden => [false, nil]) ) - end + end def self.display_name(number = 1) diff --git a/app/models/miq_provision.rb b/app/models/miq_provision.rb index 76d18253b60..6c820e8ed07 100644 --- a/app/models/miq_provision.rb +++ b/app/models/miq_provision.rb @@ -23,7 +23,7 @@ class MiqProvision < MiqProvisionTask before_create :set_template_and_networking - virtual_belongs_to :miq_provision_request # Legacy provisioning support + virtual_belongs_to :miq_provision_request # Legacy provisioning support virtual_belongs_to :vm virtual_belongs_to :vm_template virtual_column :placement_auto, :type => :boolean diff --git a/app/models/miq_provision/automate.rb b/app/models/miq_provision/automate.rb index 4665e042a46..4276353ccfc 100644 --- a/app/models/miq_provision/automate.rb +++ b/app/models/miq_provision/automate.rb @@ -65,7 +65,7 @@ def get_network_scope def get_network_details related_vm = vm || source - related_vm_description = (related_vm == vm) ? "VM" : "Template" + related_vm_description = related_vm == vm ? "VM" : "Template" if related_vm.nil? _log.error("No VM or Template Found for Provision Object") @@ -101,19 +101,23 @@ def get_network_details networks = ws.root("networks") - networks.each do |network| - next unless network.kind_of?(Hash) - next unless network[:vc_id] == vc_id - next unless vlan_name.casecmp(network[:vlan]) == 0 - - # Remove passwords - network[:dhcp_servers].each do |dhcp| - domain = dhcp[:domain] - domain.delete(:bind_password) if domain.kind_of?(Hash) - end if network[:dhcp_servers].kind_of?(Array) - - return network - end if networks.kind_of?(Array) + if networks.kind_of?(Array) + networks.each do |network| + next unless network.kind_of?(Hash) + next unless network[:vc_id] == vc_id + next unless vlan_name.casecmp(network[:vlan]) == 0 + + # Remove passwords + if network[:dhcp_servers].kind_of?(Array) + network[:dhcp_servers].each do |dhcp| + domain = dhcp[:domain] + domain.delete(:bind_password) if domain.kind_of?(Hash) + end + end + + return network + end + end _log.warn("- No Network matched in Automate Results: #{ws.to_expanded_xml}") nil @@ -122,11 +126,13 @@ def get_network_details def get_domain return options[:linux_domain_name] unless options[:linux_domain_name].nil? return options[:sysprep_domain_name].first if options[:sysprep_domain_name].kind_of?(Array) + nil end def automate_attributes(message, objects = [get_user]) MiqAeEngine.set_automation_attributes_from_objects( - objects, 'request' => 'UI_PROVISION_INFO', 'message' => message) + objects, 'request' => 'UI_PROVISION_INFO', 'message' => message + ) end end diff --git a/app/models/miq_provision/custom_attributes.rb b/app/models/miq_provision/custom_attributes.rb index d6c61696477..427dd9c27fa 100644 --- a/app/models/miq_provision/custom_attributes.rb +++ b/app/models/miq_provision/custom_attributes.rb @@ -18,12 +18,12 @@ def set_ems_custom_attributes(vm, custom_attrs) return if custom_attrs.blank? custom_attrs.each do |k, v| - begin - _log.info("Setting EMS Custom Attribute key=#{k.to_s.inspect}, value=#{v.to_s.inspect}") - vm.set_custom_field(k.to_s, v.to_s) - rescue => err - _log.warn("Failed to set EMS Custom Attribute <#{k}> to <#{v}>. Reason:<#{err}>") - end + + _log.info("Setting EMS Custom Attribute key=#{k.to_s.inspect}, value=#{v.to_s.inspect}") + vm.set_custom_field(k.to_s, v.to_s) + rescue => err + _log.warn("Failed to set EMS Custom Attribute <#{k}> to <#{v}>. Reason:<#{err}>") + end end end diff --git a/app/models/miq_provision/helper.rb b/app/models/miq_provision/helper.rb index a9d7323e36e..7166a8a9ebc 100644 --- a/app/models/miq_provision/helper.rb +++ b/app/models/miq_provision/helper.rb @@ -1,6 +1,6 @@ module MiqProvision::Helper def hostname_cleanup(name) - hostname_length = (source.platform == 'linux') ? 63 : 15 + hostname_length = source.platform == 'linux' ? 63 : 15 name.strip.gsub(/ +|_+/, "-")[0, hostname_length] end end diff --git a/app/models/miq_provision/naming.rb b/app/models/miq_provision/naming.rb index f51bad3b5a7..73b09387a8a 100644 --- a/app/models/miq_provision/naming.rb +++ b/app/models/miq_provision/naming.rb @@ -47,8 +47,10 @@ def get_vm_full_name(unresolved_vm_name, prov_obj, determine_index) def check_vm_name_uniqueness(fullname, prov_obj) return nil if prov_obj.vm_template.nil? + ems = prov_obj.vm_template.ext_management_system return nil if ems.nil? + VmOrTemplate.find_by("ems_id = ? and lower(name) = ?", ems.id, fullname.downcase) end end diff --git a/app/models/miq_provision/options_helper.rb b/app/models/miq_provision/options_helper.rb index dead030e9e0..1dac1d3b2fb 100644 --- a/app/models/miq_provision/options_helper.rb +++ b/app/models/miq_provision/options_helper.rb @@ -39,25 +39,25 @@ def get_source unless ems.supports?(:provisioning) raise MiqException::MiqProvisionError, _("%{class_name} [%{name}] is attached to <%{ems_class_name}: %{ems_name}> that does not support Provisioning") % - {:class_name => source.class.name, - :name => source.name, - :ems_class_name => ems.class.name, - :ems_name => ems.name} + {:class_name => source.class.name, + :name => source.name, + :ems_class_name => ems.class.name, + :ems_name => ems.name} end if ems.missing_credentials? raise MiqException::MiqProvisionError, _("%{class_name} [%{name}] is attached to <%{ems_class_name}: %{ems_name}> with missing credentials") % - {:class_name => source.class.name, - :name => source.name, - :ems_class_name => ems.class.name, - :ems_name => ems.name} + {:class_name => source.class.name, + :name => source.name, + :ems_class_name => ems.class.name, + :ems_name => ems.name} end source end def get_hostname(dest_vm_name) - name_key = (source.platform == 'windows') ? :sysprep_computer_name : :linux_host_name - computer_name = (get_option(:number_of_vms) > 1) ? nil : get_option(name_key).to_s.strip + name_key = source.platform == 'windows' ? :sysprep_computer_name : :linux_host_name + computer_name = get_option(:number_of_vms) > 1 ? nil : get_option(name_key).to_s.strip computer_name = dest_vm_name if computer_name.blank? hostname_cleanup(computer_name) end @@ -66,8 +66,10 @@ def set_static_ip_address(pass = nil) pass ||= get_option(:pass).to_i pass -= 1 return if pass <= 0 + ip_address = get_option(:ip_addr) return unless ip_address.to_s.ipv4? + ip_seg = ip_address.split('.') ip_seg[-1] = ip_seg[-1].to_i + pass options[:ip_addr] = ip_seg.join('.') diff --git a/app/models/miq_provision/pxe.rb b/app/models/miq_provision/pxe.rb index 327c3baed26..d43ee05bee6 100644 --- a/app/models/miq_provision/pxe.rb +++ b/app/models/miq_provision/pxe.rb @@ -40,14 +40,14 @@ def cidr Integer(32 - Math.log2((IPAddr.new(subnet_mask.to_s, Socket::AF_INET).to_i ^ 0xffffffff) + 1)) rescue ArgumentError => err _log.warn("Cannot convert subnet #{subnet_mask.inspect} to CIDR because #{err.message}") - return nil + nil end def prepare_customization_template_substitution_options(mac_address = nil) substitution_options = nil if customization_template substitution_options = options.dup - substitution_options[:miq_provision_id] = id + substitution_options[:miq_provision_id] = id substitution_options[:post_install_callback_url] = post_install_callback_url substitution_options[:cidr] = cidr substitution_options[:mac_address] = mac_address if mac_address diff --git a/app/models/miq_provision/service.rb b/app/models/miq_provision/service.rb index e7dd7f98d62..0764be14488 100644 --- a/app/models/miq_provision/service.rb +++ b/app/models/miq_provision/service.rb @@ -10,8 +10,8 @@ def get_service_and_service_resource svc_guid = get_option(:service_guid) sr_id = get_option(:service_resource_id) - svc = ::Service.find_by(:guid => svc_guid) unless svc_guid.blank? - sr = ServiceResource.find_by(:id => sr_id) unless sr_id.blank? + svc = ::Service.find_by(:guid => svc_guid) if svc_guid.present? + sr = ServiceResource.find_by(:id => sr_id) if sr_id.present? [svc, sr] end diff --git a/app/models/miq_provision/tagging.rb b/app/models/miq_provision/tagging.rb index cb4d4ca0140..4dba07d1023 100644 --- a/app/models/miq_provision/tagging.rb +++ b/app/models/miq_provision/tagging.rb @@ -8,6 +8,7 @@ def allowed_tags_by_category(category_name) user_tags = get_user_managed_filters category = Classification.lookup_by_name(category_name) raise MiqException::MiqProvisionError, "unknown category, '#{category_name}'" if category.nil? + category.entries.each_with_object({}) do |entry, h| if user_tags.blank? || user_tags.include?(entry.to_tag) h[entry.name] = entry.description diff --git a/app/models/miq_provision_configured_system_request.rb b/app/models/miq_provision_configured_system_request.rb index 3170a74c50f..2218f6d1343 100644 --- a/app/models/miq_provision_configured_system_request.rb +++ b/app/models/miq_provision_configured_system_request.rb @@ -2,8 +2,8 @@ class MiqProvisionConfiguredSystemRequest < MiqRequest TASK_DESCRIPTION = N_('Configured System Provisioning') SOURCE_CLASS_NAME = 'ConfiguredSystem' - validates_inclusion_of :request_state, :in => %w(pending finished) + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished" - validate :must_have_user + validates :request_state, :inclusion => {:in => %w[pending finished] + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} + validate :must_have_user def host_name options[:src_configured_system_ids].length == 1 ? src_configured_systems.pluck(:hostname).first : "Multiple Hosts" diff --git a/app/models/miq_provision_orch_workflow.rb b/app/models/miq_provision_orch_workflow.rb index 918ae1e054a..6e141ce0e67 100644 --- a/app/models/miq_provision_orch_workflow.rb +++ b/app/models/miq_provision_orch_workflow.rb @@ -7,9 +7,9 @@ def initialize(values, requester, options = {}) # Check if the caller passed the source VM as part of the initial call if initial_pass == true src_obj_id = get_value(@values[:src_vm_id]) - unless src_obj_id.blank? + if src_obj_id.present? src_obj = OrchestrationTemplate.find_by(:id => src_obj_id) - @values[:src_vm_id] = [src_obj.id, src_obj.name] unless src_obj.blank? + @values[:src_vm_id] = [src_obj.id, src_obj.name] if src_obj.present? end end @@ -19,8 +19,8 @@ def initialize(values, requester, options = {}) @dialogs = get_pre_dialogs if initial_pass && options[:use_pre_dialog] != false if @dialogs.nil? @dialogs = get_dialogs - else - @running_pre_dialog = true if options[:use_pre_dialog] != false + elsif options[:use_pre_dialog] != false + @running_pre_dialog = true end normalize_numeric_fields unless @dialogs.nil? end @@ -75,6 +75,7 @@ def get_source_and_targets(refresh = false) def allowed_ci(ci, relats, filtered_ids = nil) return {} if get_value(@values[:placement_auto]) == true return {} if (sources = resources_for_ui).blank? + get_ems_metadata_tree(sources) super(ci, relats, sources, filtered_ids) end diff --git a/app/models/miq_provision_request.rb b/app/models/miq_provision_request.rb index a5835fd9b49..42ad39b97de 100644 --- a/app/models/miq_provision_request.rb +++ b/app/models/miq_provision_request.rb @@ -8,13 +8,13 @@ class MiqProvisionRequest < MiqRequest TASK_DESCRIPTION = N_('VM Provisioning') SOURCE_CLASS_NAME = 'Vm' - ACTIVE_STATES = %w(migrated) + base_class::ACTIVE_STATES + ACTIVE_STATES = %w[migrated] + base_class::ACTIVE_STATES - validates_inclusion_of :request_state, - :in => %w(pending provisioned finished) + ACTIVE_STATES, - :message => "should be pending, #{ACTIVE_STATES.join(", ")}, provisioned, or finished" + validates :request_state, + :inclusion => {:in => %w[pending provisioned finished] + ACTIVE_STATES, + :message => "should be pending, #{ACTIVE_STATES.join(", ")}, provisioned, or finished"} validates :source, :presence => true - validate :must_have_user + validate :must_have_user default_value_for(:source_id) { |r| r.get_option(:src_vm_id) || r.get_option(:source_id) } attribute :source_type, :default => "VmOrTemplate" @@ -46,6 +46,7 @@ def self.source_vm_or_template!(source_id) if vm_or_template.ext_management_system.nil? raise MiqException::MiqProvisionError, "Source Template/Vm with id [#{source_id}] has no EMS, unable to provision" end + vm_or_template end @@ -71,6 +72,7 @@ def post_create_request_tasks def update_description_from_tasks return unless requested_task_idx.length == 1 + update(:description => miq_request_tasks.reload.first.description) end @@ -102,7 +104,7 @@ def self.ready?(userid) return false if dept.empty? || env.empty? prov.options[:environment] = "prod" # Set env to prod to get service levels - svc = prov.allowed(:service_level) # Get service levels + svc = prov.allowed(:service_level) # Get service levels return false if env.include?("prod") && svc.empty? # Make sure we have at least one true @@ -134,17 +136,20 @@ def template_valid_error_message end def validate_template - return {:valid => false, - :message => "Unable to find VM with Id [#{source_id}]" - } if source.nil? + if source.nil? + return {:valid => false, + :message => "Unable to find VM with Id [#{source_id}]"} + end - return {:valid => false, - :message => "VM/Template <#{source.name}> with Id <#{source.id}> is archived and cannot be used with provisioning." - } if source.archived? + if source.archived? + return {:valid => false, + :message => "VM/Template <#{source.name}> with Id <#{source.id}> is archived and cannot be used with provisioning."} + end - return {:valid => false, - :message => "VM/Template <#{source.name}> with Id <#{source.id}> is orphaned and cannot be used with provisioning." - } if source.orphaned? + if source.orphaned? + return {:valid => false, + :message => "VM/Template <#{source.name}> with Id <#{source.id}> is orphaned and cannot be used with provisioning."} + end {:valid => true, :message => nil} end diff --git a/app/models/miq_provision_request_template.rb b/app/models/miq_provision_request_template.rb index d2b53593ad6..bce277eb50b 100644 --- a/app/models/miq_provision_request_template.rb +++ b/app/models/miq_provision_request_template.rb @@ -66,7 +66,7 @@ def number_of_vms(service_task, parent_svc, template_service_resource) parent_task = get_parent_task(service_task) root_svc = get_root_svc(parent_svc) value = number_of_vms_from_dialog(root_svc, parent_task) if root_svc && parent_task - vm_count = value.to_i unless value.blank? + vm_count = value.to_i if value.present? resource = template_service_resource.resource vm_count ||= resource.get_option(:number_of_vms) if resource.respond_to?(:get_option) end @@ -75,7 +75,8 @@ def number_of_vms(service_task, parent_svc, template_service_resource) def get_root_svc(parent_svc) return nil unless parent_svc - parent_svc.parent ? parent_svc.parent : parent_svc + + parent_svc.parent || parent_svc end def get_parent_task(service_task) @@ -84,6 +85,7 @@ def get_parent_task(service_task) def number_of_vms_from_dialog(root_svc, parent_task) return nil unless root_svc.options[:dialog] + value = root_svc.options[:dialog]["dialog_option_0_number_of_vms"] if parent_task.service_resource index = parent_task.service_resource.provision_index diff --git a/app/models/miq_provision_task.rb b/app/models/miq_provision_task.rb index 6638288ff61..96b578c3034 100644 --- a/app/models/miq_provision_task.rb +++ b/app/models/miq_provision_task.rb @@ -2,7 +2,7 @@ class MiqProvisionTask < MiqRequestTask include MiqProvisionQuotaMixin include Tagging - validates_inclusion_of :state, :in => %w(pending queued active provisioned finished), :message => "should be pending, queued, active, provisioned or finished" + validates :state, :inclusion => {:in => %w[pending queued active provisioned finished], :message => "should be pending, queued, active, provisioned or finished"} AUTOMATE_DRIVES = true diff --git a/app/models/miq_provision_virt_workflow.rb b/app/models/miq_provision_virt_workflow.rb index 7d8abb9c12c..e77eb287b8b 100644 --- a/app/models/miq_provision_virt_workflow.rb +++ b/app/models/miq_provision_virt_workflow.rb @@ -13,9 +13,9 @@ def initialize(values, requester, options = {}) # Check if the caller passed the source VM as part of the initial call if initial_pass == true src_vm_id = get_value(@values[:src_vm_id]) - unless src_vm_id.blank? + if src_vm_id.present? vm = VmOrTemplate.find_by(:id => src_vm_id) - @values[:src_vm_id] = [vm.id, vm.name] unless vm.blank? + @values[:src_vm_id] = [vm.id, vm.name] if vm.present? end end @@ -25,8 +25,8 @@ def initialize(values, requester, options = {}) @dialogs = get_pre_dialogs if initial_pass && options[:use_pre_dialog] != false if @dialogs.nil? @dialogs = get_dialogs - else - @running_pre_dialog = true if options[:use_pre_dialog] != false + elsif options[:use_pre_dialog] != false + @running_pre_dialog = true end normalize_numeric_fields unless @dialogs.nil? end @@ -57,7 +57,7 @@ def make_request(request, values, requester = nil, auto_approve = false) end if request - request = request.kind_of?(MiqRequest) ? request : MiqRequest.find(request) + request = MiqRequest.find(request) unless request.kind_of?(MiqRequest) request.src_vm_id = get_value(values[:src_vm_id]) end @@ -69,7 +69,7 @@ def refresh_field_values(values) new_src = get_value(values[:src_vm_id]) vm_changed = @last_vm_id != new_src - # Note: This makes a copy of the values hash so we have a copy of the object to modify + # NOTE: This makes a copy of the values hash so we have a copy of the object to modify @values = values get_source_and_targets(true) @@ -131,6 +131,7 @@ def set_on_vm_id_changed if vm.ext_management_system.nil? raise _("Source VM [%{name}] does not belong to a Provider") % {:name => vm.name} end + set_or_default_hardware_field_values(vm) # Record the nic/lan setting on the template for validation checks at provision time. @@ -183,6 +184,7 @@ def update_field_read_only(options = {}) exclude_list = [:sysprep_spec_override, :sysprep_custom_spec, :sysprep_enabled, :sysprep_upload_file, :sysprep_upload_text] fields(:customize) { |fn, f, _dn, _d| f[:read_only] = read_only unless exclude_list.include?(fn) } return unless options[:read_only_fields] + fields(:hardware) { |fn, f, _dn, _d| f[:read_only] = true if options[:read_only_fields].include?(fn) } end @@ -266,7 +268,7 @@ def filter_by_tags(target, options) else f.each do |fd| selected_tag.each do |st| - filters << fd if fd[:tag] =~ st + filters << fd if fd[:tag]&.match?(st) end end end @@ -292,7 +294,7 @@ def selected_tags_by_cat_and_name allowed_tags_and_pre_tags.each_with_object({}) do |cat, hsh| children = cat[:children].each_with_object({}) { |value, result| result[value.first] = value.last } selected_ids = (children.keys & tag_ids) - hsh[cat[:name]] = selected_ids.collect { |t_id| children[t_id][:name] } unless selected_ids.blank? + hsh[cat[:name]] = selected_ids.collect { |t_id| children[t_id][:name] } if selected_ids.present? end end @@ -319,23 +321,23 @@ def allowed_templates(options = {}) templates = MiqTemplate.non_deprecated.in_my_region condition = allowed_template_condition - unless options[:tag_filters].blank? + if options[:tag_filters].present? tag_filters = options[:tag_filters].collect(&:to_s) selected_tags = (Array.wrap(@values[:vm_tags].presence) + Array.wrap(@values[:pre_dialog_vm_tags].presence)).uniq tag_conditions = [] # Collect the filter tags by category - unless selected_tags.blank? + if selected_tags.present? allowed_tags_and_pre_tags.each do |cat| if tag_filters.include?(cat[:name]) children_keys = cat[:children].each_with_object({}) { |t, h| h[t.first] = t.last } conditions = (children_keys.keys & selected_tags).collect { |t_id| "#{cat[:name]}/#{children_keys[t_id][:name]}" } end - tag_conditions << conditions unless conditions.blank? + tag_conditions << conditions if conditions.present? end end - unless tag_conditions.blank? + if tag_conditions.present? _log.info("Filtering VM templates with the following tag_filters: <#{tag_conditions.inspect}>") templates = templates.where(condition).find_tags_by_grouping(tag_conditions, :ns => "/managed") end @@ -367,7 +369,7 @@ def allowed_template_condition end def source_vm_rbac_filter(vms, condition = nil, *extra_cols) - opts = { :user => @requester, :conditions => condition } + opts = {:user => @requester, :conditions => condition} opts[:extra_cols] = extra_cols unless extra_cols.empty? MiqSearch.filtered(get_value(@values[:vm_filter]).to_i, VmOrTemplate, vms, opts) end @@ -379,18 +381,21 @@ def allowed_provision_types(_options = {}) def allowed_snapshots(_options = {}) result = {} return result if (vm = get_source_vm).blank? + vm.snapshots.each { |ss| result[ss.id.to_s] = ss.current? ? "#{ss.name} (Active)" : ss.name } - result["__CURRENT__"] = _(" Use the snapshot that is active at time of provisioning") unless result.blank? + result["__CURRENT__"] = _(" Use the snapshot that is active at time of provisioning") if result.present? result end def allowed_tags(options = {}) return {} if (source = load_ar_obj(get_source_vm)).blank? + super(options.merge(:region_number => source.region_number)) end def allowed_pxe_servers(_options = {}) return {} if (source = load_ar_obj(get_source_vm)).blank? + PxeServer.in_region(source.region_number).each_with_object({}) { |p, h| h[p.id] = p.name } end @@ -418,7 +423,6 @@ def get_source_and_targets(refresh = false) result end - def resources_for_ui auto_placement_enabled? ? {} : super end @@ -454,7 +458,7 @@ def allowed_customization(_options = {}) result = {"disabled" => ""} case src[:vm].platform - when 'windows' then result.merge!("fields" => "Specification", "file" => "Sysprep Answer File") + when 'windows' then result.merge!("fields" => "Specification", "file" => "Sysprep Answer File") when 'linux' then result["fields"] = "Specification" end @@ -466,11 +470,12 @@ def allowed_number_of_vms(options = {}) min, max = options[:min].to_i, options[:max].to_i min = 1 if min < 1 max = min if max < 1 - (min..max).each_with_object({}) { |i, h| h[i] = i.to_s } + (min..max).index_with { |i| i.to_s } end def load_test_ous_data return @ldap_ous unless @ldap_ous.nil? + ous = YAML.load_file("ous.yaml") @ldap_ous = {} ous.each { |ou| @ldap_ous[ou[0].dup] = ou[1].dup } @@ -521,6 +526,7 @@ def build_ou_path_name(ou) paths.each do |path| parts = path.split('=') next if parts.first == 'DC' + path_name = path_name.blank? ? parts.last : File.join(path_name, parts.last) ou[1].replace(path_name) end @@ -531,14 +537,12 @@ def create_ou_tree(ou, h, path) type, pathname = idx.split('=') if type == "DC" create_ou_tree(ou, h, path) + elsif path.blank? + entry = (h[pathname] ||= {}) + entry[:path] = ou[0] + entry[:ou] = ou else - if path.blank? - entry = (h[pathname] ||= {}) - entry[:path] = ou[0] - entry[:ou] = ou - else - create_ou_tree(ou, h[pathname] ||= {}, path) - end + create_ou_tree(ou, h[pathname] ||= {}, path) end end @@ -552,6 +556,7 @@ def allowed_domains(options = {}) # Filter by host platform or is proxy is active next unless options[:platform].nil? || options[:platform].include?(host.platform) next unless options[:active_proxy].nil? || host.is_proxy_active? == options[:active_proxy] + domains[domain] = domain end else @@ -564,6 +569,7 @@ def allowed_domains(options = {}) def update_custom_spec vm = get_source_vm return if vm.nil? + if @customize_option.nil? @current_spec = get_value(@values[:sysprep_custom_spec]) @customize_option = get_value(@values[:sysprep_enabled]) @@ -608,13 +614,13 @@ def update_custom_spec end # Call platform specific method - send("update_fields_from_spec_#{cs_data[:typ].downcase}", cs_data) + send(:"update_fields_from_spec_#{cs_data[:typ].downcase}", cs_data) # Call generic networking method update_fields_from_spec_networking(cs_data) end - else - @values[:sysprep_upload_text] = nil if @customize_option == 'file' + elsif @customize_option == 'file' + @values[:sysprep_upload_text] = nil end @current_spec = selected_spec @@ -626,17 +632,17 @@ def update_fields_from_spec_windows(cs_data) spec = cs_data[:spec] dialog = @dialogs.fetch_path(:dialogs, :customize) - collect_customization_spec_settings(spec, spec_hash, %w(identity guiUnattended), + collect_customization_spec_settings(spec, spec_hash, %w[identity guiUnattended], [:sysprep_timezone, 'timeZone', :sysprep_auto_logon, 'autoLogon', :sysprep_auto_logon_count, 'autoLogonCount']) - collect_customization_spec_settings(spec, spec_hash, %w(identity identification), + collect_customization_spec_settings(spec, spec_hash, %w[identity identification], [:sysprep_domain_name, 'joinDomain', :sysprep_domain_admin, 'domainAdmin', :sysprep_workgroup_name, 'joinWorkgroup']) # PATH:[identity][userData][computerName][name] (VimString) = "VI25Test" - collect_customization_spec_settings(spec, spec_hash, %w(identity userData), + collect_customization_spec_settings(spec, spec_hash, %w[identity userData], [:sysprep_organization, 'orgName', :sysprep_full_name, 'fullName', :sysprep_product_id, 'productId']) - collect_customization_spec_settings(spec, spec_hash, %w(identity licenseFilePrintData), + collect_customization_spec_settings(spec, spec_hash, %w[identity licenseFilePrintData], [:sysprep_server_license_mode, 'autoMode', :sysprep_per_server_max_connections, 'autoUsers']) collect_customization_spec_settings(spec, spec_hash, ['options'], @@ -671,7 +677,7 @@ def update_fields_from_spec_networking(cs_data) spec_hash[:subnet_mask] = adapter['subnetMask'].to_s spec_hash[:ip_addr] = adapter.fetch_path('ip', 'ipAddress').to_s # Combine the WINS server fields into 1 comma separated field list - spec_hash[:wins_servers] = [adapter['primaryWINS'], adapter['secondaryWINS']].collect { |s| s unless s.blank? }.compact.join(', ') + spec_hash[:wins_servers] = [adapter['primaryWINS'], adapter['secondaryWINS']].collect { |s| s.presence }.compact.join(', ') end # In Linux, DNS server settings are global, not per adapter @@ -685,6 +691,7 @@ def update_fields_from_spec_networking(cs_data) def collect_customization_spec_settings(spec, spec_hash, spec_path, fields) return unless (section = spec.fetch_path(spec_path)) + fields.each_slice(2) { |dlg_field, prop| spec_hash[dlg_field] = section[prop] } end @@ -693,7 +700,7 @@ def set_customization_field_from_spec(data_value, dlg_field, dialog) data_type = field_hash[:data_type] cust_method = "custom_#{dlg_field}" - if self.respond_to?(cust_method) + if respond_to?(cust_method) send(cust_method, field_hash, data_value) else value = case data_type @@ -732,7 +739,7 @@ def self.from_ws(*args) prov_options = OpenStruct.new( :values => args[6], :ems_custom_attributes => args[7], - :miq_custom_attributes => args[8], + :miq_custom_attributes => args[8] ) prov_args = args[0, 6] prov_args << prov_options @@ -746,6 +753,7 @@ def self.from_ws_ver_1_0(version, user, src_name, target_name, auto_approve, tag src_name_down = src_name.downcase src = p.allowed_templates.detect { |v| v.name.downcase == src_name_down } raise _("Source template [%{name}] was not found") % {:name => src_name} if src.nil? + p = class_for_source(src.id).new(values, user, :use_pre_dialog => false) # Populate required fields @@ -775,7 +783,7 @@ def ws_template_fields(values, fields, ws_values) data = parse_ws_string(fields) ws_values = parse_ws_string(ws_values) placement_cluster_name = ws_values[:cluster] - unless placement_cluster_name.blank? + if placement_cluster_name.present? data[:placement_cluster_name] = placement_cluster_name.to_s.downcase _log.info("placement_cluster_name:<#{data[:placement_cluster_name].inspect}>") data[:data_centers] = EmsCluster.where("lower(name) = ?", data[:placement_cluster_name]).collect(&:v_parent_datacenter) @@ -798,11 +806,13 @@ def ws_template_fields(values, fields, ws_values) if srcs.length > 1 raise _("Multiple source template were found from input data:<%{data}>") % {:data => data.inspect} end + src = srcs.first end if src.nil? raise _("No source template was found from input data:<%{data}>") % {:data => data.inspect} end + _log.info("VM Found: <#{src.name}> <#{src.guid}> <#{src.uid_ems}> Datacenter:<#{src.datacenter_name}>") src end @@ -864,6 +874,7 @@ def ws_hardware_fields(values, _fields, data) ws_hardware_disk_fields(values, data) ws_hardware_network_fields(values, data) return if (dlg_fields = get_ws_dialog_fields(dialog_name = :hardware)).nil? + data.keys.each { |key| set_ws_field_value(values, key, data, dialog_name, dlg_fields) if dlg_fields.key?(key) } end @@ -871,11 +882,13 @@ def ws_hardware_network_fields(values, data) parse_ws_hardware_fields(:networks, /^network(\d{1,2})$/, values, data) { |n, v, _i| n[:network] = v } # Check and remove invalid networks specifications - values[:networks].delete_if do |d| - result = d[:network].blank? - _log.warn("Skipping network due to blank name: <#{d.inspect}>") if result == true - result - end unless values[:networks].blank? + if values[:networks].present? + values[:networks].delete_if do |d| + result = d[:network].blank? + _log.warn("Skipping network due to blank name: <#{d.inspect}>") if result == true + result + end + end end def ws_hardware_scsi_controller_fields(values, data) @@ -893,17 +906,20 @@ def ws_hardware_disk_fields(values, data) end # Check and remove invalid disk specifications - values[:disk_scsi].delete_if do |d| - result = d[:sizeInMB].to_i == 0 - _log.warn("Skipping disk due to invalid size: <#{d.inspect}>") if result == true - result - end unless values[:disk_scsi].blank? + if values[:disk_scsi].present? + values[:disk_scsi].delete_if do |d| + result = d[:sizeInMB].to_i == 0 + _log.warn("Skipping disk due to invalid size: <#{d.inspect}>") if result == true + result + end + end end def parse_ws_hardware_fields(hw_key, regex_filter, values, data) data.keys.each do |k| key_name = k.to_s.split('.').first next unless key_name =~ regex_filter + item_id = Regexp.last_match(1).to_i v = data.delete(k) _log.info("processing key with value <#{v.inspect}>") @@ -926,6 +942,7 @@ def parse_ws_hardware_fields(hw_key, regex_filter, values, data) def ws_network_fields(values, _fields, data) return if (dlg_fields = get_ws_dialog_fields(dialog_name = :network)).nil? + data.keys.each { |key| set_ws_field_value(values, key, data, dialog_name, dlg_fields) if dlg_fields.key?(key) } end @@ -953,6 +970,7 @@ def self.from_ws_ver_1_x(version, user, template_fields, vm_fields, requester, t p = new(values = {}, user, init_options) src = p.ws_template_fields(values, template_fields, options.values) raise _("Source template [%{name}] was not found") % {:name => src_name} if src.nil? + # Allow new workflow class to determine dialog name instead of using the stored value from the first call. values.delete(:miq_request_dialog_name) values[:placement_auto] = [true, 1] @@ -965,7 +983,7 @@ def self.from_ws_ver_1_x(version, user, template_fields, vm_fields, requester, t p.ws_vm_fields(values, vm_fields) p.ws_requester_fields(values, requester) - values[:vm_tags] = p.ws_tags(tags) # Tags are passed as category=value|cat2=value2... Example: cc=001|environment=test + values[:vm_tags] = p.ws_tags(tags) # Tags are passed as category=value|cat2=value2... Example: cc=001|environment=test values[:ws_values] = p.ws_values(options.values) values[:ws_ems_custom_attributes] = p.ws_values(options.ems_custom_attributes, :parse_ws_string, :modify_key_name => false) values[:ws_miq_custom_attributes] = p.ws_values(options.miq_custom_attributes, :parse_ws_string, :modify_key_name => false) @@ -1113,6 +1131,7 @@ def all_provider_hosts(src) def selected_host(src) raise _("Unable to find Host with Id: [%{id}]") % {:id => src[:host_id]} if src[:host].nil? + [load_ar_obj(src[:host])] end diff --git a/app/models/miq_provision_virt_workflow/dialog_field_validation.rb b/app/models/miq_provision_virt_workflow/dialog_field_validation.rb index 910fb82686a..010599d9e52 100644 --- a/app/models/miq_provision_virt_workflow/dialog_field_validation.rb +++ b/app/models/miq_provision_virt_workflow/dialog_field_validation.rb @@ -27,34 +27,39 @@ def validate_memory_reservation(_field, values, dlg, fld, _value) def validate_pxe_image_id(_field, _values, dlg, fld, _value) return nil unless supports_pxe? return nil unless get_pxe_image.nil? + _("%{description} is required") % {:description => required_description(dlg, fld)} end def validate_pxe_server_id(_field, _values, dlg, fld, _value) return nil unless supports_pxe? return nil unless get_pxe_server.nil? + _("%{description} is required") % {:description => required_description(dlg, fld)} end def validate_placement(field, values, dlg, fld, value) # check the :placement_auto flag, then make sure the field is not blank - return nil unless value.blank? + return nil if value.present? return nil if get_value(values[:placement_auto]) == true - return nil unless get_value(values[field]).blank? + return nil if get_value(values[field]).present? + _("%{description} is required") % {:description => required_description(dlg, fld)} end def validate_sysprep_upload(field, values, dlg, fld, value) - return nil unless value.blank? + return nil if value.present? return nil unless get_value(values[:sysprep_enabled]) == 'file' - return nil unless get_value(values[field]).blank? + return nil if get_value(values[field]).present? + _("%{description} is required") % {:description => required_description(dlg, fld)} end def validate_sysprep_field(field, values, dlg, fld, value) - return nil unless value.blank? + return nil if value.present? return nil unless get_value(values[:sysprep_enabled]) == 'fields' - return nil unless get_value(values[field]).blank? + return nil if get_value(values[field]).present? + _("%{description} is required") % {:description => required_description(dlg, fld)} end end diff --git a/app/models/miq_provision_workflow.rb b/app/models/miq_provision_workflow.rb index 10e8d486241..897bdafc61f 100644 --- a/app/models/miq_provision_workflow.rb +++ b/app/models/miq_provision_workflow.rb @@ -34,6 +34,7 @@ def self.class_for_source(source_or_id) else VmOrTemplate.find_by(:id => source_or_id) end return nil if source.nil? + source.class.manager_class.provision_workflow_class end diff --git a/app/models/miq_queue.rb b/app/models/miq_queue.rb index 5c0e8fa1aeb..b33931c55ac 100644 --- a/app/models/miq_queue.rb +++ b/app/models/miq_queue.rb @@ -114,7 +114,7 @@ def self.lower_priority?(p1, p2) STATE_ERROR = 'error'.freeze STATE_TIMEOUT = 'timeout'.freeze STATE_EXPIRED = "expired".freeze - validates_inclusion_of :state, :in => [STATE_READY, STATE_DEQUEUE, STATE_WARN, STATE_ERROR, STATE_TIMEOUT, STATE_EXPIRED] + validates :state, :inclusion => {:in => [STATE_READY, STATE_DEQUEUE, STATE_WARN, STATE_ERROR, STATE_TIMEOUT, STATE_EXPIRED]} FINISHED_STATES = [STATE_WARN, STATE_ERROR, STATE_TIMEOUT, STATE_EXPIRED].freeze STATUS_OK = 'ok'.freeze @@ -137,7 +137,7 @@ def self.put(options) :zone => Zone.determine_queue_zone(options), :state => STATE_READY, :handler_type => nil, - :handler_id => nil, + :handler_id => nil ) if Zone.maintenance?(options[:zone]) @@ -261,7 +261,7 @@ def self.submit_job(options) # # TODO: Review if other services in submit_job, such as event/smart proxy should follow this pattern. def self.queue_name_for_priority_service(service, priority) - (priority.nil? || MiqQueue.lower_priority?(priority, HIGH_PRIORITY)) ? service.to_s : "generic" + priority.nil? || MiqQueue.lower_priority?(priority, HIGH_PRIORITY) ? service.to_s : "generic" end def self.where_queue_name(is_array) @@ -302,17 +302,17 @@ def self.get(options = {}) result = nil msgs.each do |msg| - begin - _log.info("#{MiqQueue.format_short_log_msg(msg)} previously timed out, retrying...") if msg.state == STATE_TIMEOUT - handler = MiqWorker.my_worker || MiqServer.my_server - msg.update!(:state => STATE_DEQUEUE, :handler => handler) - _log.info("#{MiqQueue.format_full_log_msg(msg)}, Dequeued in: [#{Time.now.utc - msg.created_on}] seconds") - return msg - rescue ActiveRecord::StaleObjectError - result = :stale - rescue => err - raise _("%{log_message} \"%{error}\" attempting to get next message") % {:log_message => _log.prefix, :error => err} - end + + _log.info("#{MiqQueue.format_short_log_msg(msg)} previously timed out, retrying...") if msg.state == STATE_TIMEOUT + handler = MiqWorker.my_worker || MiqServer.my_server + msg.update!(:state => STATE_DEQUEUE, :handler => handler) + _log.info("#{MiqQueue.format_full_log_msg(msg)}, Dequeued in: [#{Time.now.utc - msg.created_on}] seconds") + return msg + rescue ActiveRecord::StaleObjectError + result = :stale + rescue => err + raise _("%{log_message} \"%{error}\" attempting to get next message") % {:log_message => _log.prefix, :error => err} + end _log.debug("All #{prefetch_max_per_worker} messages stale, returning...") if result == :stale result @@ -330,7 +330,7 @@ def unget(options = {}) _log.info("#{MiqQueue.format_full_log_msg(self)}, Requeued") end - # TODO (juliancheal) This is a hack. Brakeman was giving us an SQL injection + # TODO: (juliancheal) This is a hack. Brakeman was giving us an SQL injection # warning when we concatonated the queue_name string onto the query. # Creating two seperate queries like this, resolves the Brakeman issue, but # isn't ideal. This will need to be rewritten using Arel queries at some point. @@ -460,11 +460,11 @@ def deliver(requester = nil, &block) if instance_id begin - if (class_name == requester.class.name) && requester.respond_to?(:id) && (instance_id == requester.id) - obj = requester - else - obj = obj.find(instance_id) - end + obj = if (class_name == requester.class.name) && requester.respond_to?(:id) && (instance_id == requester.id) + requester + else + obj.find(instance_id) + end rescue ActiveRecord::RecordNotFound => err _log.warn("#{MiqQueue.format_short_log_msg(self)} will not be delivered because #{err.message}") return STATUS_WARN, nil, nil @@ -526,7 +526,7 @@ def delivered_in_error(msg = nil) def delivered(state, msg, result) self.state = state _log.info("#{MiqQueue.format_short_log_msg(self)}, State: [#{state}], Delivered in [#{Time.now - delivered_on}] seconds") - m_callback(msg, result) unless miq_callback.blank? + m_callback(msg, result) if miq_callback.present? rescue => err _log.error("#{MiqQueue.format_short_log_msg(self)}, #{err.message}") ensure @@ -551,7 +551,7 @@ def m_callback(msg, result) miq_callback[:args] ||= [] log_args = result.inspect - log_args = "#{log_args[0, 500]}..." if log_args.length > 500 # Trim long results + log_args = "#{log_args[0, 500]}..." if log_args.length > 500 # Trim long results log_args = miq_callback[:args] + [state, msg, log_args] _log.info("#{MiqQueue.format_short_log_msg(self)}, Invoking Callback with args: #{log_args.inspect}") unless obj.nil? @@ -604,23 +604,23 @@ def self.format_full_log_msg(msg) data = msg.data.nil? ? "" : "#{msg.data.length} bytes" args = ManageIQ::Password.sanitize_string(msg.args.inspect) - "Message id: [#{msg.id}], " \ - "Zone: [#{msg.zone}], " \ - "Role: [#{msg.role}], " \ - "Server: [#{msg.server_guid}], " \ - "MiqTask id: [#{msg.miq_task_id}], " \ - "Handler id: [#{handler}], " \ - "Ident: [#{msg.queue_name}], " \ - "Target id: [#{msg.target_id}], " \ - "Instance id: [#{msg.instance_id}], " \ - "Task id: [#{msg.task_id}], " \ - "Command: [#{msg.class_name}.#{msg.method_name}], " \ - "Timeout: [#{msg.msg_timeout}], " \ - "Priority: [#{msg.priority}], " \ - "State: [#{msg.state}], " \ - "Deliver On: [#{msg.deliver_on}], " \ - "Data: [#{data}], " \ - "Args: #{args}" + "Message id: [#{msg.id}], " \ + "Zone: [#{msg.zone}], " \ + "Role: [#{msg.role}], " \ + "Server: [#{msg.server_guid}], " \ + "MiqTask id: [#{msg.miq_task_id}], " \ + "Handler id: [#{handler}], " \ + "Ident: [#{msg.queue_name}], " \ + "Target id: [#{msg.target_id}], " \ + "Instance id: [#{msg.instance_id}], " \ + "Task id: [#{msg.task_id}], " \ + "Command: [#{msg.class_name}.#{msg.method_name}], " \ + "Timeout: [#{msg.msg_timeout}], " \ + "Priority: [#{msg.priority}], " \ + "State: [#{msg.state}], " \ + "Deliver On: [#{msg.deliver_on}], " \ + "Data: [#{data}], " \ + "Args: #{args}" end def self.format_short_log_msg(msg) @@ -649,7 +649,7 @@ def validate_zone_name end end - cache_with_timeout(:valid_zone_names, 1.minute) { Hash.new } + cache_with_timeout(:valid_zone_names, 1.minute) { {} } def activate_miq_task(args) MiqTask.update_status(miq_task_id, MiqTask::STATE_ACTIVE, MiqTask::STATUS_OK, "Task starting") if miq_task_id @@ -693,13 +693,13 @@ def self.optional_values(options, keys = [:zone]) private_class_method :optional_values private_class_method def self.messaging_options_from_env - return unless ENV["MESSAGING_HOSTNAME"] && ENV["MESSAGING_PORT"] && ENV["MESSAGING_USERNAME"] && ENV["MESSAGING_PASSWORD"] + return unless ENV.fetch("MESSAGING_HOSTNAME", nil) && ENV.fetch("MESSAGING_PORT", nil) && ENV.fetch("MESSAGING_USERNAME", nil) && ENV["MESSAGING_PASSWORD"] options = { - :host => ENV["MESSAGING_HOSTNAME"], + :host => ENV.fetch("MESSAGING_HOSTNAME", nil), :port => ENV["MESSAGING_PORT"].to_i, - :username => ENV["MESSAGING_USERNAME"], - :password => ENV["MESSAGING_PASSWORD"], + :username => ENV.fetch("MESSAGING_USERNAME", nil), + :password => ENV.fetch("MESSAGING_PASSWORD", nil), :protocol => ENV.fetch("MESSAGING_PROTOCOL", "Kafka"), :encoding => ENV.fetch("MESSAGING_ENCODING", "json"), :sasl_mechanism => ENV.fetch("MESSAGING_SASL_MECHANISM", "PLAIN") @@ -713,7 +713,7 @@ def self.optional_values(options, keys = [:zone]) options end - MESSAGING_CONFIG_FILE = Rails.root.join("config", "messaging.yml") + MESSAGING_CONFIG_FILE = Rails.root.join("config/messaging.yml") private_class_method def self.messaging_options_from_file return unless MESSAGING_CONFIG_FILE.file? diff --git a/app/models/miq_queue_worker_base/runner.rb b/app/models/miq_queue_worker_base/runner.rb index acf4d717089..b749c0e70e4 100644 --- a/app/models/miq_queue_worker_base/runner.rb +++ b/app/models/miq_queue_worker_base/runner.rb @@ -143,6 +143,7 @@ def do_work heartbeat msg = get_message break if msg.nil? + deliver_message(msg) end end @@ -184,7 +185,7 @@ def miq_listener_thread def miq_messaging_listener_thread loop do - send("miq_messaging_subscribe_#{@worker.class.miq_messaging_subscribe_mode}") do |msg| + send(:"miq_messaging_subscribe_#{@worker.class.miq_messaging_subscribe_mode}") do |msg| @message_queue << msg end rescue => err diff --git a/app/models/miq_region.rb b/app/models/miq_region.rb index 55b97877f10..c601c3169a8 100644 --- a/app/models/miq_region.rb +++ b/app/models/miq_region.rb @@ -29,7 +29,7 @@ class MiqRegion < ApplicationRecord include SupportsFeatureMixin include Metric::CiMixin - alias_method :all_storages, :storages + alias all_storages storages PERF_ROLLUP_CHILDREN = [:ext_management_systems, :storages] @@ -122,6 +122,7 @@ def self.destroy_region(conn, region, tables = nil) else id_cols = connection.columns(t).select { |c| c.name.ends_with?("_id") } next if id_cols.empty? + conditions = id_cols.collect { |c| "(#{sanitize_sql(region_to_conditions(region, c.name))})" }.join(" OR ") end @@ -244,8 +245,8 @@ def self.api_system_auth_token_for_region(region_id, user) VALID_CAPTURE_ALWAYS_TYPES = [:storage, :host_and_cluster] def perf_capture_always - @perf_capture_always ||= VALID_CAPTURE_ALWAYS_TYPES.each_with_object({}) do |type, h| - h[type] = self.is_tagged_with?("capture_enabled", :ns => "/performance/#{type}") + @perf_capture_always ||= VALID_CAPTURE_ALWAYS_TYPES.index_with do |type| + is_tagged_with?("capture_enabled", :ns => "/performance/#{type}") end.freeze end @@ -269,7 +270,7 @@ def perf_capture_always=(options) # Set @perf_capture_always since we already know all the answers options = options.dup (VALID_CAPTURE_ALWAYS_TYPES - options.keys).each do |type| - options[type] = self.is_tagged_with?("capture_enabled", :ns => "/performance/#{type}") + options[type] = is_tagged_with?("capture_enabled", :ns => "/performance/#{type}") end @perf_capture_always = options.freeze end diff --git a/app/models/miq_region_remote.rb b/app/models/miq_region_remote.rb index 3a537113fff..ca02bb7a8de 100644 --- a/app/models/miq_region_remote.rb +++ b/app/models/miq_region_remote.rb @@ -12,6 +12,7 @@ def self.validate_connection_settings(host, port, username, password, database = log_details = "Host: [#{host}]}, Database: [#{database}], Adapter: [#{adapter}], User: [#{username}]" return [_("Validation failed due to missing port")] if port.blank? + begin with_remote_connection(host, port, username, password, database, adapter) do |c| _log.info("Attempting to connection to: #{log_details}...") @@ -26,7 +27,7 @@ def self.validate_connection_settings(host, port, username, password, database = return nil else return [_("Validation failed because region %{region_name} has already been used") % - {:region_name => region.region}] + {:region_name => region.region}] end else _log.info("Attempting to connection to: #{log_details}...Failed") @@ -35,7 +36,7 @@ def self.validate_connection_settings(host, port, username, password, database = end rescue => err _log.warn("Attempting to connection to: #{log_details}...Failed with error: '#{err.message}") - return [_("Validation failed with error: '%{error_message}") % {:error_message => err.message}] + [_("Validation failed with error: '%{error_message}") % {:error_message => err.message}] end end @@ -83,6 +84,7 @@ def self.with_remote_connection(host, port, username, password, database, adapte # connect to localhost, so don't allow that at all. host = host.to_s.strip raise ArgumentError, _("host cannot be blank") if host.blank? + if [nil, "", "localhost", "localhost.localdomain", "127.0.0.1", "0.0.0.0"].include?(host) cfg = ActiveRecord::Base.configurations.configs_for(:env_name => Rails.env)&.first local_database = cfg && cfg.database.to_s.strip diff --git a/app/models/miq_report.rb b/app/models/miq_report.rb index 511938dc31b..bd66e952a62 100644 --- a/app/models/miq_report.rb +++ b/app/models/miq_report.rb @@ -30,9 +30,9 @@ class MiqReport < ApplicationRecord serialize :rpt_options serialize :display_filter - validates_presence_of :name, :title, :db, :rpt_group + validates :name, :title, :db, :rpt_group, :presence => true validates :name, :uniqueness_when_changed => true - validates_inclusion_of :rpt_type, :in => %w( Default Custom ) + validates :rpt_type, :inclusion => {:in => %w[Default Custom]} has_many :miq_report_results, :dependent => :destroy belongs_to :time_profile @@ -46,6 +46,7 @@ class MiqReport < ApplicationRecord alias_attribute :menu_name, :name attr_accessor :ext_options + attr_accessor_that_yamls :table, :sub_table, :filter_summary, :extras, :ids, :scoped_association, :html_title, :file_name, :extras, :record_id, :tl_times, :user_categories, :trend_data, :performance, :include_for_find, :report_run_time, :chart @@ -54,7 +55,7 @@ class MiqReport < ApplicationRecord GROUPINGS = [[:min, N_("Minimum"), N_("Minima")], [:avg, N_("Average"), N_("Averages")], [:max, N_("Maximum"), N_("Maxima")], [:total, N_("Total"), N_("Totals")]].freeze PIVOTS = [[:min, "Minimum"], [:avg, "Average"], [:max, "Maximum"], [:total, "Total"]] - IMPORT_CLASS_NAMES = %w(MiqReport).freeze + IMPORT_CLASS_NAMES = %w[MiqReport].freeze scope :for_user, lambda { |user| if user.report_admin_user? @@ -300,7 +301,7 @@ def format_row(row, allowed_columns = nil, expand_value_format = nil) tz = get_time_zone(User.current_user.settings.fetch_path(:display, :timezone).presence || Time.zone) row.map do |key, _| value = allowed_columns.nil? || allowed_columns&.include?(key) ? format_column(key, row, tz, col_format_hash[key]) : row[key] - [key, expand_value_format.present? ? { :value => value, :style_class => get_style_class(key, row, tz) } : value] + [key, expand_value_format.present? ? {:value => value, :style_class => get_style_class(key, row, tz)} : value] end.to_h end diff --git a/app/models/miq_report/formatters/csv.rb b/app/models/miq_report/formatters/csv.rb index 0b387df3965..51f40f53550 100644 --- a/app/models/miq_report/formatters/csv.rb +++ b/app/models/miq_report/formatters/csv.rb @@ -1,13 +1,14 @@ module MiqReport::Formatters::Csv def to_csv return if (@sub_table || @table).nil? - csv_table = @sub_table ? @sub_table.dup : @table.dup # Duplicate table/sub_table since we will be deleting the ID column + + csv_table = @sub_table ? @sub_table.dup : @table.dup # Duplicate table/sub_table since we will be deleting the ID column csv_table.column_names.delete("id") hidden_columns = csv_table.column_names.select { |column| column_is_hidden?(column) } rpt_options ||= {} - csv_table = csv_table.sub_table(0..rpt_options[:row_limit] - 1) unless rpt_options[:row_limit].blank? # Get only row_limit rows + csv_table = csv_table.sub_table(0..rpt_options[:row_limit] - 1) if rpt_options[:row_limit].present? # Get only row_limit rows csv_table.data.each do |key| key.data.each do |k| if k[0] == "v_date" diff --git a/app/models/miq_report/formatters/text.rb b/app/models/miq_report/formatters/text.rb index 8a1941b3351..4261194d1d7 100644 --- a/app/models/miq_report/formatters/text.rb +++ b/app/models/miq_report/formatters/text.rb @@ -5,5 +5,5 @@ def to_text e.options.ignore_table_width = true end end - alias_method :to_txt, :to_text + alias to_txt to_text end diff --git a/app/models/miq_report/formatting.rb b/app/models/miq_report/formatting.rb index 9d054b7e231..cbd2ed906f6 100644 --- a/app/models/miq_report/formatting.rb +++ b/app/models/miq_report/formatting.rb @@ -1,4 +1,4 @@ -# Note: when changing a formatter, please consider also changing the corresponding entry in miq_formatters.js +# NOTE: when changing a formatter, please consider also changing the corresponding entry in miq_formatters.js module MiqReport::Formatting extend ActiveSupport::Concern @@ -19,7 +19,8 @@ def javascript_format(col, format_name) function_name = format[:function][:name] options = format.merge(format[:function]).slice( - *%i(delimiter separator precision length tz column format prefix suffix description unit)) + *%i[delimiter separator precision length tz column format prefix suffix description unit] + ) [function_name, options] end @@ -27,10 +28,8 @@ def javascript_format(col, format_name) def formatter_by(column) formatter = nil if Chargeback.db_is_chargeback?(db) - if db.to_s == "ChargebackContainerProject" # override format: default is mhz but cores needed for containers - if %w[cpu_used_metric cpu_metric].include?(column) - formatter = :cores - end + if db.to_s == "ChargebackContainerProject" && %w[cpu_used_metric cpu_metric].include?(column) + formatter = :cores end formatter = :_none_ if Chargeback.rate_column?(column.to_s) @@ -104,6 +103,7 @@ def format(col, value, options = {}) def apply_format_precision(value, precision) return value if precision.nil? || !(value.kind_of?(Integer) || value.kind_of?(Float)) + Kernel.format("%.#{precision}f", value) end @@ -135,7 +135,7 @@ def format_currency_with_delimiter(val, options = {}) helper_options = {} helper_options[:delimiter] = options[:delimiter] if options.key?(:delimiter) helper_options[:separator] = options[:separator] if options.key?(:separator) - helper_options[:unit] = options [:unit] if options.key?(:unit) + helper_options[:unit] = options[:unit] if options.key?(:unit) val = apply_format_precision(val, options[:precision]) val = ApplicationController.helpers.number_to_currency(val, helper_options) apply_prefix_and_suffix(val, options) @@ -143,7 +143,7 @@ def format_currency_with_delimiter(val, options = {}) def format_bytes_to_human_size(val, options = {}) helper_options = {} - helper_options[:precision] = options[:precision] || 0 # Precision of 0 returns the significant digits + helper_options[:precision] = options[:precision] || 0 # Precision of 0 returns the significant digits val = ApplicationController.helpers.number_to_human_size(val, helper_options) apply_prefix_and_suffix(val, options) end @@ -171,15 +171,15 @@ def format_boolean(val, options = {}) case options[:format] when "yes_no" - return val == true ? "Yes" : "No" + val == true ? "Yes" : "No" when "y_n" - return val == true ? "Y" : "N" + val == true ? "Y" : "N" when "t_f" - return val == true ? "T" : "F" + val == true ? "T" : "F" when "pass_fail" - return val == true ? "Pass" : "Fail" + val == true ? "Pass" : "Fail" else - return val.to_s.titleize + val.to_s.titleize end end @@ -188,6 +188,7 @@ def format_datetime(val, options) val = val.in_time_zone(options[:tz]) if val.kind_of?(Time) && options[:tz] return val if options[:format].nil? + val.strftime(options[:format]) end @@ -205,22 +206,23 @@ def format_datetime_range(val, options) _col, sfx = col.to_s.split("__") # The suffix (month, quarter, year) defines the range val = val.in_time_zone(get_time_zone("UTC")) - if val.respond_to?("beginning_of_#{sfx}") - stime = val.send("beginning_of_#{sfx}") - etime = val.send("end_of_#{sfx}") + if val.respond_to?(:"beginning_of_#{sfx}") + stime = val.send(:"beginning_of_#{sfx}") + etime = val.send(:"end_of_#{sfx}") else stime = etime = val end if options[:description].to_s.include?("Start") - return stime.strftime(options[:format]) + stime.strftime(options[:format]) else - return "(#{stime.strftime(options[:format])} - #{etime.strftime(options[:format])})" + "(#{stime.strftime(options[:format])} - #{etime.strftime(options[:format])})" end end def format_set(val, options) return val unless val.kind_of?(Array) + options[:delimiter] ||= ", " val.join(options[:delimiter]) end @@ -237,7 +239,7 @@ def format_number_ordinal(val, _options) def format_elapsed_time_human(val, _options) val = val.to_i - names = %w(day hour minute second) + names = %w[day hour minute second] days = (val / 86400) hours = (val / 3600) - (days * 24) @@ -268,6 +270,7 @@ def format_string_truncate(val, options = {}) def format_large_number_to_exponential_form(val, _options = {}) return val if val.to_f < 1.0e+15 + val.to_f.to_s end diff --git a/app/models/miq_report/generator.rb b/app/models/miq_report/generator.rb index 0bb41a87c6c..4659ca75e34 100644 --- a/app/models/miq_report/generator.rb +++ b/app/models/miq_report/generator.rb @@ -65,17 +65,16 @@ def col_to_expression_col(col) def table2class(table) @table2class ||= {} - @table2class[table] ||= begin - case table.to_sym - when :ports, :nics, :storage_adapters - "GuestDevice" - when :"" - self.class.name - else - ref = db_class.reflection_with_virtual(table.to_sym) + @table2class[table] ||= case table.to_sym + when :ports, :nics, :storage_adapters + "GuestDevice" + when :"" + self.class.name + else + ref = db_class.reflection_with_virtual(table.to_sym) ref ? ref.class_name : table.singularize.camelize - end - end + end + @table2class[table] end @@ -119,8 +118,10 @@ def invent_includes # will go away when we drop build_reportable_data def invent_report_includes return {} unless col_order + col_order.each_with_object({}) do |col, ret| next unless col.include?(".") + *rels, column = col.split(".") if col !~ /managed\./ && col !~ /virtual_custom/ (rels.inject(ret) { |h, rel| h[rel] ||= {} }["columns"] ||= []) << column @@ -213,6 +214,7 @@ def generate_table(options = {}) def _generate_table(options = {}) return build_table_from_report(options) if db == self.class.name # Build table based on data from passed in report object + _generate_table_prep results = if custom_results_method @@ -261,7 +263,7 @@ def generate_performance_results(options = {}) results, extras[:group_by_tag_cols], extras[:group_by_tags] = db_class.group_by_tags( db_class.find_entries(ext_options).where(where_clause).where(options[:where_clause]), :category => performance[:group_by_category], - :cat_model => options[:cat_model], + :cat_model => options[:cat_model] ) build_correlate_tag_cols end @@ -342,7 +344,7 @@ def generate_basic_results(options = {}) ) ## add in virtual attributes that can be calculated from sql - rbac_opts[:extra_cols] = va_sql_cols unless va_sql_cols.blank? + rbac_opts[:extra_cols] = va_sql_cols if va_sql_cols.present? rbac_opts[:use_sql_view] = if db_options.nil? || db_options[:use_sql_view].nil? MiqReport.default_use_sql_view else @@ -491,7 +493,7 @@ def get_data_from_report(rpt) end def generate_rows_from_data(data) - data.inject([]) do |arr, d| + data.each_with_object([]) do |d, arr| generate_rows.each do |gen_row| row = {} gen_row.each_with_index do |col_def, col_idx| @@ -500,7 +502,6 @@ def generate_rows_from_data(data) end arr << row end - arr end end @@ -509,9 +510,10 @@ def generate_col_from_data(col_def, data) unless data.key?(col_def[:col_name]) raise _("Column '%{name} does not exist in data") % {:name => col_def[:col_name]} end - return col_def.key?(:function) ? apply_col_function(col_def, data) : data[col_def[:col_name]] + + col_def.key?(:function) ? apply_col_function(col_def, data) : data[col_def[:col_name]] else - return col_def + col_def end end @@ -524,9 +526,10 @@ def apply_col_function(col_def, data) unless data.key?(col_def[:pct_col_name]) raise _("Column '%{name} does not exist in data") % {:name => gen_row[:pct_col_name]} end + col_val = data[col_def[:col_name]] || 0 pct_val = data[col_def[:pct_col_name]] || 0 - return pct_val == 0 ? 0 : (col_val / pct_val * 100.0) + pct_val == 0 ? 0 : (col_val / pct_val * 100.0) else raise _("Column function '%{name}' not supported") % {:name => col_def[:function]} end @@ -534,27 +537,28 @@ def apply_col_function(col_def, data) def build_correlate_tag_cols tags2desc = {} - arr = self.cols.inject([]) do |a, c| + arr = self.cols.each_with_object([]) do |c, a| self.extras[:group_by_tag_cols].each do |tc| tag = tc[(c.length + 1)..-1] - if tc.starts_with?(c) - unless tags2desc.key?(tag) - if tag == "_none_" - tags2desc[tag] = "[None]" - else - entry = Classification.lookup_by_name([performance[:group_by_category], tag].join("/")) - tags2desc[tag] = entry.nil? ? tag.titleize : entry.description - end + next unless tc.starts_with?(c) + + unless tags2desc.key?(tag) + if tag == "_none_" + tags2desc[tag] = "[None]" + else + entry = Classification.lookup_by_name([performance[:group_by_category], tag].join("/")) + tags2desc[tag] = entry.nil? ? tag.titleize : entry.description end - a << [tc, tags2desc[tag]] end + a << [tc, tags2desc[tag]] + end + end + arr.sort_by! { |a| a[1] } + unless arr.blank? || (arr.first[1] == "[None]" && arr.last[1] == "[None]") + while arr.first[1] == "[None]" + arr.push(arr.shift) end - a end - arr.sort! { |a, b| a[1] <=> b[1] } - while arr.first[1] == "[None]" - arr.push(arr.shift) - end unless arr.blank? || (arr.first[1] == "[None]" && arr.last[1] == "[None]") arr.each do |c, h| self.cols.push(c) col_order.push(c) @@ -562,9 +566,11 @@ def build_correlate_tag_cols end tarr = Array(tags2desc).sort_by { |t| t[1] } - while tarr.first[1] == "[None]" - tarr.push(tarr.shift) - end unless tarr.blank? || (tarr.first[1] == "[None]" && tarr.last[1] == "[None]") + unless tarr.blank? || (tarr.first[1] == "[None]" && tarr.last[1] == "[None]") + while tarr.first[1] == "[None]" + tarr.push(tarr.shift) + end + end self.extras[:group_by_tags] = tarr.collect { |a| a[0] } self.extras[:group_by_tag_descriptions] = tarr.collect { |a| a[1] } end @@ -578,7 +584,7 @@ def build_add_missing_timestamps(recs) klass = recs.first.class last_rec = nil - results = recs.sort_by { |r| [r.resource_type, r.resource_id.to_s, r.timestamp.iso8601] }.inject([]) do |arr, rec| + recs.sort_by { |r| [r.resource_type, r.resource_id.to_s, r.timestamp.iso8601] }.each_with_object([]) do |rec, arr| last_rec ||= rec while (rec.timestamp - last_rec.timestamp) > int base_attrs = last_rec.attributes.reject { |k, _v| !base_cols.include?(k) } @@ -588,13 +594,13 @@ def build_add_missing_timestamps(recs) end arr << rec last_rec = rec - arr end - results + end def build_apply_time_profile(results) return unless time_profile + # Apply time profile if one was provided results.each { |rec| rec.apply_time_profile(time_profile) if rec.respond_to?(:apply_time_profile) } end @@ -646,28 +652,27 @@ def build_pivot(data) data = sort_table(data, rpt_options[:pivot][:group_cols].collect(&:to_s), :order => :ascending) # build grouping options for subtotal - options = col_order.inject({}) do |h, col| + options = col_order.each_with_object({}) do |col, h| next(h) unless col.include?("__") c, g = col.split("__") h[c] ||= {} h[c][:grouping] ||= [] h[c][:grouping] << g.to_sym - h end - group_key = rpt_options[:pivot][:group_cols] + group_key = rpt_options[:pivot][:group_cols] data = generate_subtotals(data, group_key, options) data.inject([]) do |a, (k, v)| next(a) if k == :_total_ - row = col_order.inject({}) do |h, col| + + row = col_order.each_with_object({}) do |col, h| if col.include?("__") c, g = col.split("__") h[col] = v[g.to_sym][c] else h[col] = v[:row][col] end - h end a << row end @@ -682,6 +687,7 @@ def cols_for_report(extra_cols = []) def build_cols_from_include(hash, parent_association = nil) return [] if hash.blank? + hash.inject([]) do |a, (k, v)| full_path = get_full_path(parent_association, k) v["columns"].each { |c| a << get_full_path(full_path, c) } if v.key?("columns") @@ -731,18 +737,20 @@ def build_get_attributes_with_options(rec, options = {}) {:only => options[:only], :except => options[:except]} end return {} unless only_or_except + attrs = {} options[:only].each do |a| if self.class.is_trend_column?(a) attrs[a] = build_calculate_trend_point(rec, a) - else - attrs[a] = rec.send(a) if rec.respond_to?(a) + elsif rec.respond_to?(a) + attrs[a] = rec.send(a) + end + end + if options[:qualify_attribute_names] + attrs = attrs.each_with_object({}) do |(k, v), h| + h["#{options[:qualify_attribute_names]}.#{k}"] = v end end - attrs = attrs.inject({}) do |h, (k, v)| - h["#{options[:qualify_attribute_names]}.#{k}"] = v - h - end if options[:qualify_attribute_names] attrs end @@ -754,14 +762,14 @@ def build_add_includes(data_records, entry, includes, parent_association) existing_records = data_records.dup data_records = [] full_path = get_full_path(parent_association, association) - if include_has_options - assoc_options = includes[association].merge(:qualify_attribute_names => full_path, + assoc_options = if include_has_options + includes[association].merge(:qualify_attribute_names => full_path, :only => includes[association]["columns"]) - else - assoc_options = {:qualify_attribute_names => full_path, :only => includes[association]["columns"]} - end + else + {:qualify_attribute_names => full_path, :only => includes[association]["columns"]} + end - if association == "categories" || association == "managed" + if ["categories", "managed"].include?(association) association_objects = [] assochash = {} @descriptions_by_tag_id ||= Classification.is_entry.each_with_object({}) do |c, h| @@ -773,6 +781,7 @@ def build_add_includes(data_records, entry, includes, parent_association) entry[:obj].tags.each do |t| next unless t.name.starts_with?("/managed/#{c}/") next unless @descriptions_by_tag_id.key?(t.id) + entarr << @descriptions_by_tag_id[t.id] end assochash[full_path + "." + c] = entarr unless entarr.empty? @@ -796,11 +805,11 @@ def build_add_includes(data_records, entry, includes, parent_association) data_records << existing_record else association_objects.each do |obj| - if association == "categories" || association == "managed" - association_records = [obj] - else - association_records = build_reportable_data(obj, assoc_options, full_path) - end + association_records = if ["categories", "managed"].include?(association) + [obj] + else + build_reportable_data(obj, assoc_options, full_path) + end association_records.each do |assoc_record| data_records << existing_record.merge(assoc_record) end @@ -849,13 +858,14 @@ def build_report_result(taskid, options, res_opts = {}) end end - res_last_run_on = Time.now.utc + res_last_run_on = Time.now.utc # If a scheduler :at time was provided, convert that to a Time object, otherwise use the current time if res_opts[:at] unless res_opts[:at].kind_of?(Numeric) raise _("Expected scheduled time 'at' to be 'numeric', received '%{type}'") % {:type => res_opts[:at].class} end + at = Time.at(res_opts[:at]).utc else at = res_last_run_on @@ -896,7 +906,8 @@ def append_to_title!(title_suffix) def append_user_filters_to_title(user) return unless user && user.has_filters? - self.append_to_title!(" (filtered for #{user.name})") + + append_to_title!(" (filtered for #{user.name})") end def get_time_zone(default_tz = nil) diff --git a/app/models/miq_report/generator/aggregation.rb b/app/models/miq_report/generator/aggregation.rb index 80ecf0e7f71..35315dff492 100644 --- a/app/models/miq_report/generator/aggregation.rb +++ b/app/models/miq_report/generator/aggregation.rb @@ -1,6 +1,6 @@ module MiqReport::Generator::Aggregation def build_subtotals(all_dims = false) - return unless group == "c" || (!col_options.blank? && col_options.any? { |_c, h| h.key?(:grouping) }) + return unless group == "c" || (col_options.present? && col_options.any? { |_c, h| h.key?(:grouping) }) return if sortby.blank? grouping_keys = all_dims ? sortby : sortby.first @@ -59,6 +59,7 @@ def aggregate_totals(row, group, total, options) def process_totals(group) group.each_key do |g| next if g == :count + group[g].each_key do |c| case g when :total, :count, :min, :max diff --git a/app/models/miq_report/generator/async.rb b/app/models/miq_report/generator/async.rb index 17425bbc904..2ed5a7e383c 100644 --- a/app/models/miq_report/generator/async.rb +++ b/app/models/miq_report/generator/async.rb @@ -6,14 +6,16 @@ def async_generate_tables(options = {}) sync = ::Settings.product.report_sync task = MiqTask.create(:name => "Generate Reports: #{options[:reports].collect(&:name).inspect}") - MiqQueue.submit_job( - :service => "reporting", - :class_name => to_s, - :method_name => "_async_generate_tables", - :args => [task.id, options], - :priority => MiqQueue::HIGH_PRIORITY, - :msg_timeout => default_queue_timeout.to_i_with_method - ) unless sync # Only queued if sync reporting disabled (default) + unless sync + MiqQueue.submit_job( + :service => "reporting", + :class_name => to_s, + :method_name => "_async_generate_tables", + :args => [task.id, options], + :priority => MiqQueue::HIGH_PRIORITY, + :msg_timeout => default_queue_timeout.to_i_with_method + ) + end # Only queued if sync reporting disabled (default) AuditEvent.success(:event => "generate_tables", :target_class => base_class.name, :userid => options[:userid], :message => "#{task.name}, successfully initiated") task.update_status("Queued", "Ok", "Task has been queued") _async_generate_tables(task.id, options) if sync # Only runs if sync reporting enabled @@ -52,7 +54,7 @@ def async_generate_table(options = {}) task = MiqTask.create(:name => _("Generate Report: '%{name}'") % {:name => name}) unless sync # Only queued if sync reporting disabled (default) cb = {:class_name => task.class.name, :instance_id => task.id, :method_name => :queue_callback_on_exceptions, :args => ['Finished']} - if self.new_record? + if new_record? MiqQueue.submit_job( :service => "reporting", :class_name => self.class.to_s, diff --git a/app/models/miq_report/generator/html.rb b/app/models/miq_report/generator/html.rb index c34174c55d5..1f43b43fea0 100644 --- a/app/models/miq_report/generator/html.rb +++ b/app/models/miq_report/generator/html.rb @@ -20,16 +20,15 @@ def build_html_rows(clickable_rows = false) save_val = :_undefined_ # Hang on to the current group value break_label = col_options.fetch_path(sortby[0], :break_label) unless sortby.nil? || col_options.nil? || in_a_widget group_text = nil # Optionally override what gets displayed for the group (i.e. Chargeback) - use_table = sub_table ? sub_table : table + use_table = sub_table || table use_table.data.each_with_index do |d, d_idx| break if row_limit != 0 && d_idx > row_limit - 1 + output = "" if ["y", "c"].include?(group) && !sortby.nil? && save_val != d.data[sortby[0]].to_s - unless d_idx == 0 # If not the first row, we are at a group break - unless group_limit && group_counter >= group_limit # If not past the limit - html_rows += build_group_html_rows(save_val, col_order.length, break_label, group_text) + if !(d_idx == 0) && !(group_limit && group_counter >= group_limit) # If not past the limit + html_rows += build_group_html_rows(save_val, col_order.length, break_label, group_text) group_counter += 1 - end end save_val = d.data[sortby[0]].to_s # Chargeback, sort by date, but show range @@ -37,11 +36,11 @@ def build_html_rows(clickable_rows = false) end # Build click thru if string can be created - if clickable_rows && onclick = build_row_onclick(d.data) - output << "" - else - output << "" - end + output << if clickable_rows && onclick = build_row_onclick(d.data) + "" + else + "" + end row = 1 - row col_order.each_with_index do |c, c_idx| @@ -55,11 +54,9 @@ def build_html_rows(clickable_rows = false) html_rows << output unless hide_detail_rows end - if ["y", "c"].include?(group) && !sortby.nil? - unless group_limit && group_counter >= group_limit - html_rows += build_group_html_rows(save_val, col_order.length, break_label, group_text) + if ["y", "c"].include?(group) && !sortby.nil? && !(group_limit && group_counter >= group_limit) + html_rows += build_group_html_rows(save_val, col_order.length, break_label, group_text) html_rows += build_group_html_rows(:_total_, col_order.length) - end end end @@ -94,7 +91,7 @@ def open_td(style_class, text_align = nil) def build_html_col(output, col_name, col_format, row_data, time_zone) style = get_style_class(col_name, row_data, time_zone) style_class = style.present? ? " class='#{style}'" : nil - alignment_style = if db == 'Tenant' && TenantQuota.can_format_field?(col_name, row_data['tenant_quotas.name']) || row_data[col_name].kind_of?(Integer) || row_data[col_name].kind_of?(Float) + alignment_style = if (db == 'Tenant' && TenantQuota.can_format_field?(col_name, row_data['tenant_quotas.name'])) || row_data[col_name].kind_of?(Integer) || row_data[col_name].kind_of?(Float) :right elsif row_data[col_name].kind_of?(Time) :center @@ -113,18 +110,18 @@ def build_row_onclick(data_row) if ['EmsCluster', 'ExtManagementSystem', 'Host', 'Storage', 'Vm', 'Service'].include?(db) && data_row['id'] controller = db == "ExtManagementSystem" ? "management_system" : db.underscore donav = "DoNav('/#{controller}/show/#{data_row['id']}');" - title = data_row['name'] ? - "View #{ui_lookup(:model => db)} \"#{data_row['name']}\"" : - "View this #{ui_lookup(:model => db)}" + title = if data_row['name'] + "View #{ui_lookup(:model => db)} \"#{data_row['name']}\"" + else + "View this #{ui_lookup(:model => db)}" + end onclick = "onclick=\"#{donav}\" onKeyPress=\"#{donav}\" tabindex='0' style='cursor:hand' title='#{title}'" end # Handle CI performance report rows - if db.ends_with?("Performance") - if data_row['resource_id'] && data_row['resource_type'] # Base click thru on the related resource - donav = "DoNav('/#{data_row['resource_type'].underscore}/show/#{data_row['resource_id']}');" + if db.ends_with?("Performance") && (data_row['resource_id'] && data_row['resource_type']) # Base click thru on the related resource + donav = "DoNav('/#{data_row['resource_type'].underscore}/show/#{data_row['resource_id']}');" onclick = "onclick=\"#{donav}\" onKeyPress=\"#{donav}\" tabindex='0' style='cursor:hand' title='View #{ui_lookup(:model => data_row['resource_type'])} \"#{data_row['resource_name']}\"'" - end end onclick @@ -148,29 +145,29 @@ def build_group_html_rows(group, col_count, label = nil, group_text = nil) if (self.group == 'c') && extras && extras[:grouping] && extras[:grouping][group] display_count = _("Count: %{number}") % {:number => extras[:grouping][group][:count]} end - content << " | #{display_count}" unless display_count.blank? + content << " | #{display_count}" if display_count.present? html_rows << "#{CGI.escapeHTML(content)}" if extras && extras[:grouping] && extras[:grouping][group] # See if group key exists - MiqReport::GROUPINGS.each do |calc| # Add an output row for each group calculation - if extras[:grouping][group].key?(calc.first) # Only add a row if there are calcs of this type for this group value - grp_output = "" - grp_output << "" - grp_output << "#{_(calc.last)}:" - col_order.each_with_index do |c, c_idx| # Go through the columns - next if c_idx == 0 # Skip first column - grp_output << "" + MiqReport::GROUPINGS.each do |calc| # Add an output row for each group calculation + next unless extras[:grouping][group].key?(calc.first) # Only add a row if there are calcs of this type for this group value + + grp_output = "" + grp_output << "" + grp_output << "#{_(calc.last)}:" + col_order.each_with_index do |c, c_idx| # Go through the columns + next if c_idx == 0 # Skip first column + + grp_output << "" + if extras[:grouping][group].key?(calc.first) grp_output << CGI.escapeHTML( - format( - c.split("__").first, extras[:grouping][group][calc.first][c], - :format => self.col_formats[c_idx] ? self.col_formats[c_idx] : :_default_ - ) - ) if extras[:grouping][group].key?(calc.first) - grp_output << "" + c.split("__").first % [extras[:grouping][group][calc.first][c], {:format => self.col_formats[c_idx] || :_default_}] + ) end - grp_output << "" - html_rows << grp_output + grp_output << "" end + grp_output << "" + html_rows << grp_output end end html_rows << " " unless group == :_total_ @@ -182,7 +179,7 @@ def get_style_class(col, row, tz = nil) return if atoms.nil? nh = {} - row.each { |k, v| nh[col_to_expression_col(k).sub(/-/, ".")] = v } # Convert keys to match expression fields + row.each { |k, v| nh[col_to_expression_col(k).sub("-", ".")] = v } # Convert keys to match expression fields field = col_to_expression_col(col) atoms.each do |atom| diff --git a/app/models/miq_report/generator/sorting.rb b/app/models/miq_report/generator/sorting.rb index 6f6fc8a9798..9a034b02ec6 100644 --- a/app/models/miq_report/generator/sorting.rb +++ b/app/models/miq_report/generator/sorting.rb @@ -9,7 +9,7 @@ def sort_table(table, col_names, order) end def build_sort_table - return if sortby.nil? # Are there any sort fields + return if sortby.nil? # Are there any sort fields new_sortby = build_sort_suffix_data sb_nil_sub = [] @@ -17,7 +17,7 @@ def build_sort_table base_col_name = sb.split(SORT_COL_SUFFIX).first ctype = MiqExpression::Target.parse(col_to_expression_col(base_col_name)).column_type sb_nil_sub[idx] = case ctype - when :string, :text, :boolean, nil then "00ff".hex.chr # "\xFF" + when :string, :text, :boolean, nil then "00ff".hex.chr # "\xFF" when :integer, :fixnum, :decimal, :float then @table.data.collect { |d| d.data[sb] }.compact.max.to_i + 1 when :datetime then Time.at(@table.data.collect { |d| d.data[sb] }.compact.max.to_i + 1).utc when :date then @table.data.collect { |d| d.data[sb] }.compact.max.try(:+, 1) @@ -39,6 +39,7 @@ def build_sort_table # Remove any subtituted values we put in the table earlier new_sortby.each_with_index do |sb, idx| next if sb_nil_sub[idx].nil? + @table.data.each { |d| d.data[sb] = nil if d.data[sb] == sb_nil_sub[idx] } end end @@ -52,7 +53,7 @@ def build_sort_suffix_data sort_col = "#{sb}#{SORT_COL_SUFFIX}" @table.add_column(sort_col) { |d| build_value_for_sort_suffix(d.data[col], sfx) } - @table.add_column(sb) { |d| format(sb, d.data[col], :format => col_options ? col_options.fetch_path(sb, :break_format) : nil) } + @table.add_column(sb) { |d| sb % [d.data[col], {:format => col_options ? col_options.fetch_path(sb, :break_format) : nil}] } new_sortby << sort_col @@ -68,7 +69,7 @@ def build_sort_suffix_data def build_value_for_sort_suffix(value, suffix) value = value.in_time_zone(get_time_zone("UTC")) if value && value.kind_of?(Time) - value = value.to_time.utc.beginning_of_day if value && value.kind_of?(Date) + value = value.to_time.utc.beginning_of_day if value && value.kind_of?(Date) suffix = suffix.to_sym if suffix case suffix diff --git a/app/models/miq_report/generator/trend.rb b/app/models/miq_report/generator/trend.rb index 86b99b9e616..a2ecdf7554a 100644 --- a/app/models/miq_report/generator/trend.rb +++ b/app/models/miq_report/generator/trend.rb @@ -65,9 +65,9 @@ def build_calculate_trend_point(rec, col) begin val = Math.slope_y_intercept(rec.send(CHART_X_AXIS_COLUMN_ADJUSTED).to_i, @trend_data[col][:slope], @trend_data[col][:yint]) - return val > 0 ? val : 0 + val > 0 ? val : 0 rescue ZeroDivisionError - return nil + nil end end @@ -80,17 +80,19 @@ def build_trend_data(recs) cols.each do |c| next unless self.class.is_trend_column?(c) + @trend_data[c] = {} coordinates = recs.each_with_object([]) do |r, arr| next unless r.respond_to?(CHART_X_AXIS_COLUMN) && r.respond_to?(c[6..-1]) + if r.respond_to?(:inside_time_profile) && r.inside_time_profile == false _log.debug("Timestamp: [#{r.timestamp}] is outside of time profile: [#{time_profile.description}]") next end y = r.send(c[6..-1]).to_f # y = r.send(CHART_X_AXIS_COLUMN).to_i # Calculate normal way by using the integer value of the timestamp - r.send("#{CHART_X_AXIS_COLUMN_ADJUSTED}=", (recs.first.send(CHART_X_AXIS_COLUMN).to_i + arr.length.days.to_i)) + r.send(:"#{CHART_X_AXIS_COLUMN_ADJUSTED}=", (recs.first.send(CHART_X_AXIS_COLUMN).to_i + arr.length.days.to_i)) x = r.send(CHART_X_AXIS_COLUMN_ADJUSTED).to_i # Calculate by using the number of days out from the first timestamp arr << [x, y] end @@ -98,7 +100,7 @@ def build_trend_data(recs) @trend_data[c][:slope], @trend_data[c][:yint], @trend_data[c][:corr] = begin Math.linear_regression(*coordinates) - rescue StandardError => err + rescue => err _log.warn("#{err.message}, calculating slope") unless err.kind_of?(ZeroDivisionError) nil end @@ -107,6 +109,7 @@ def build_trend_data(recs) def build_trend_limits(recs) return if cols.nil? || @trend_data.blank? + cols.each do |c| # XXX: TODO: Hardcoding column names for now until we have more time to extend the model and allow defining these in YAML case c.to_sym @@ -140,25 +143,25 @@ def build_trend_limits(recs) def calc_value_at_target(limit, trend_data_key, trend_data) unknown = _("Trending Down") if limit.nil? || trend_data[trend_data_key].nil? || trend_data[trend_data_key][:slope].nil? || trend_data[trend_data_key][:yint].nil? || trend_data[trend_data_key][:slope] <= 0 # can't project with a negative slope value - return unknown + unknown else begin result = Math.slope_x_intercept(limit, trend_data[trend_data_key][:slope], trend_data[trend_data_key][:yint]) if result <= 1.year.from_now.to_i if Time.at(result).utc <= Time.now.utc - return Time.at(result).utc.strftime("%m/%d/%Y") + Time.at(result).utc.strftime("%m/%d/%Y") else options = {:days => ((Time.at(result).utc - Time.now.utc) / 1.day).round, :date => Time.at(result).utc.strftime("%m/%d/%Y"), :timezone => get_time_zone("UTC")} - return _("%{days} days, on %{date} (%{timezone})") % options + _("%{days} days, on %{date} (%{timezone})") % options end else - return _("after 1 year") + _("after 1 year") end rescue RangeError - return unknown + unknown rescue => err _log.warn("#{err.message}, calculating trend limit for column: [#{trend_data_key}]") - return unknown + unknown end end end diff --git a/app/models/miq_report/import_export.rb b/app/models/miq_report/import_export.rb index 5da4f72653e..100a0ea885a 100644 --- a/app/models/miq_report/import_export.rb +++ b/app/models/miq_report/import_export.rb @@ -3,12 +3,11 @@ module MiqReport::ImportExport module ClassMethods def view_paths - @view_paths ||= ( - Vmdb::Plugins.map do |engine| - path = engine.root.join('product/views') + @view_paths ||= Vmdb::Plugins.map do |engine| + path = engine.root.join('product/views') path if path.directory? - end.compact - ) + end.compact + end def resolve_view_path(file_name, file_name_no_suffix = nil) @@ -117,7 +116,7 @@ def load_from_view_options(db, current_user = nil, options = {}, cache = {}) def load_from_filename(filename, cache) yaml = cache[filename] ||= YAML.load_file(filename) view = MiqReport.new(yaml) - view.extras ||= {} # Always add in the extras hash + view.extras ||= {} # Always add in the extras hash view.extras[:filename] = File.basename(filename, '.yaml') view end @@ -128,14 +127,12 @@ def view_yaml_filename(db, current_user, options) role = current_user.try(:miq_user_role) # Special code to build the view file name for users of VM restricted roles - if %w(ManageIQ::Providers::CloudManager::Template ManageIQ::Providers::InfraManager::Template - ManageIQ::Providers::CloudManager::Vm ManageIQ::Providers::InfraManager::Vm VmOrTemplate).include?(db) - if role && role.settings && role.settings.fetch_path(:restrictions, :vms) - viewfilerestricted = resolve_view_path('Vm__restricted.yaml') - end + if %w[ManageIQ::Providers::CloudManager::Template ManageIQ::Providers::InfraManager::Template + ManageIQ::Providers::CloudManager::Vm ManageIQ::Providers::InfraManager::Vm VmOrTemplate].include?(db) && (role && role.settings && role.settings.fetch_path(:restrictions, :vms)) + viewfilerestricted = resolve_view_path('Vm__restricted.yaml') end - db = db.gsub(/::/, '_') + db = db.gsub("::", '_') role = role.name.split("-").last if role.try(:read_only?) diff --git a/app/models/miq_report/notification.rb b/app/models/miq_report/notification.rb index bf69f8405a9..8f7c803cd1c 100644 --- a/app/models/miq_report/notification.rb +++ b/app/models/miq_report/notification.rb @@ -4,7 +4,7 @@ def notify_user_of_report(run_on, result, options) url = options[:email_url_prefix] user = User.lookup_by_userid(userid) - from = options[:email] && !options[:email][:from].blank? ? options[:email][:from] : ::Settings.smtp.from + from = options[:email] && options[:email][:from].present? ? options[:email][:from] : ::Settings.smtp.from to = options[:email] ? options[:email][:to] : user.try(:email) msg = nil @@ -18,7 +18,7 @@ def notify_user_of_report(run_on, result, options) send_if_empty = options.fetch_path(:email, :send_if_empty) send_if_empty = true if send_if_empty.nil? - if !self.table_has_records? && !send_if_empty + if !table_has_records? && !send_if_empty _log.info("No records found for scheduled report and :send_if_empty option is false, no Email will be sent. ") return end @@ -31,14 +31,14 @@ def notify_user_of_report(run_on, result, options) curr_tz = Time.zone # Save current time zone setting Time.zone = user ? user.get_timezone : MiqServer.my_server.server_timezone - if self.table_has_records? + if table_has_records? attach_types = options.fetch_path(:email, :attach) || [:pdf] # support legacy schedules attachments = attach_types.collect do |atype| target = atype == :pdf ? result : self { :content_type => "application/#{atype}", :filename => "#{title} #{run_on.utc.iso8601}.#{atype}", - :body => target.send("to_#{atype}") + :body => target.send(:"to_#{atype}") } end end @@ -60,7 +60,7 @@ def notify_user_of_report(run_on, result, options) end def notify_email_body(_url, _result, recipients) - if self.table_has_records? + if table_has_records? _("Please find attached scheduled report \"%{name}\". This report was sent to: %{recipients}.") % {:name => name, :recipients => recipients.join(", ")} else diff --git a/app/models/miq_report/search.rb b/app/models/miq_report/search.rb index a2eba60b4d2..179f99a6743 100644 --- a/app/models/miq_report/search.rb +++ b/app/models/miq_report/search.rb @@ -17,6 +17,7 @@ def association_column(assoc) klass = db_class.follow_associations_with_virtual(parts) # Column is valid if it is accessible via virtual relations or directly. raise _("Invalid reflection <%{item}> on model <%{name}>") % {:item => assoc, :name => db_class} if klass.nil? + # only return attribute if it is accessible directly (not through virtual columns) [klass.arel_table[col.to_sym], klass.type_for_attribute(col).type] if db_class.follow_associations(parts) end @@ -43,10 +44,12 @@ def get_cached_page(ids, includes, options) # @return [Array] for sorting in sql def get_order_info return [] if sortby.nil? # apply limits (note: without order it is non-deterministic) + # Convert sort cols from sub-tables from the form of assoc_name.column to arel Array.wrap(sortby).collect do |c| sql_col, sql_type = association_column(c) return nil if sql_col.nil? + sql_col = Arel::Nodes::NamedFunction.new('LOWER', [sql_col]) if [:string, :text].include?(sql_type) if order.nil? sql_col @@ -94,8 +97,7 @@ def paged_view_search(options = {}) search_options = options.merge(:class => db, :conditions => conditions, :include_for_find => includes, - :references => get_include - ) + :references => get_include) search_options.merge!(:limit => limit, :offset => offset, :order => order) if order search_options[:extra_cols] = va_sql_cols if va_sql_cols.present? search_options[:use_sql_view] = if db_options.nil? || db_options[:use_sql_view].nil? @@ -104,11 +106,11 @@ def paged_view_search(options = {}) db_options[:use_sql_view] end - if options[:parent] - targets = get_parent_targets(options[:parent], options[:association] || options[:parent_method]) - else - targets = db_class - end + targets = if options[:parent] + get_parent_targets(options[:parent], options[:association] || options[:parent_method]) + else + db_class + end if selected_ids.present? targets = targets.first.kind_of?(Integer) ? targets & selected_ids : targets.where(:id => selected_ids) @@ -143,6 +145,7 @@ def paged_view_search(options = {}) def filter_results(results, supported_features_filter) return results if supported_features_filter.nil? + results.select { |result| result.send(supported_features_filter) } end end diff --git a/app/models/miq_report/seeding.rb b/app/models/miq_report/seeding.rb index 61aa8229ffe..d9cc323d85c 100644 --- a/app/models/miq_report/seeding.rb +++ b/app/models/miq_report/seeding.rb @@ -1,8 +1,8 @@ module MiqReport::Seeding extend ActiveSupport::Concern - REPORT_DIR = Rails.root.join("product", "reports").freeze - COMPARE_DIR = Rails.root.join("product", "compare").freeze + REPORT_DIR = Rails.root.join("product/reports").freeze + COMPARE_DIR = Rails.root.join("product/compare").freeze module ClassMethods def seed diff --git a/app/models/miq_report_result.rb b/app/models/miq_report_result.rb index 339302f39c5..547c665705d 100644 --- a/app/models/miq_report_result.rb +++ b/app/models/miq_report_result.rb @@ -181,9 +181,11 @@ def build_html_rows_for_legacy ######################################################################################################### def self.parse_userid(userid) return userid unless userid.to_s.include?("|") + parts = userid.to_s.split("|") - return parts[0] if (parts.last == 'adhoc') - return parts[1] if (parts.last == 'schedule') + return parts[0] if parts.last == 'adhoc' + return parts[1] if parts.last == 'schedule' + raise _("Cannot parse userid %{user_id}") % {:user_id => userid.inspect} end @@ -205,12 +207,12 @@ def purge_for_user def to_pdf # Create the pdf header section html_string = generate_pdf_header( - :title => name.gsub(/'/, '\\\\\&'), # Escape single quotes + :title => name.gsub("'", '\\\\\&'), # Escape single quotes :page_size => report.page_size, :run_date => format_timezone(last_run_on, user_timezone, "gtl") ) - html_string << report_build_html_table(report_results, html_rows.join) # Build the html report table using all html rows + html_string << report_build_html_table(report_results, html_rows.join) # Build the html report table using all html rows PdfGenerator.pdf_from_string(html_string, "pdf_report.css") end @@ -225,8 +227,8 @@ def generate_pdf_header(options = {}) hdr << "@page{size: #{page_size} landscape}" hdr << "@page{margin: 40pt 30pt 40pt 30pt}" hdr << "@page{@top{content: '#{title}';color:blue}}" - hdr << "@page{@bottom-center{font-size: 75%;content: '" + _("Report date: %{report_date}") % {:report_date => run_date} + "'}}" - hdr << "@page{@bottom-right{font-size: 75%;content: '" + _("Page %{page_number} of %{total_pages}") % {:page_number => " ' counter(page) '", :total_pages => " ' counter(pages)}}"} + hdr << ("@page{@bottom-center{font-size: 75%;content: '" + (_("Report date: %{report_date}") % {:report_date => run_date}) + "'}}") + hdr << ("@page{@bottom-right{font-size: 75%;content: '" + (_("Page %{page_number} of %{total_pages}") % {:page_number => " ' counter(page) '", :total_pages => " ' counter(pages)}}"})) hdr << "" end @@ -248,15 +250,17 @@ def async_generate_result(result_type, options = {}) sync = ::Settings.product.report_sync - MiqQueue.submit_job( - :service => "reporting", - :class_name => self.class.name, - :instance_id => id, - :method_name => "_async_generate_result", - :msg_timeout => report.queue_timeout, - :args => [task.id, result_type.to_sym, options], - :priority => MiqQueue::HIGH_PRIORITY - ) unless sync + unless sync + MiqQueue.submit_job( + :service => "reporting", + :class_name => self.class.name, + :instance_id => id, + :method_name => "_async_generate_result", + :msg_timeout => report.queue_timeout, + :args => [task.id, result_type.to_sym, options], + :priority => MiqQueue::HIGH_PRIORITY + ) + end _async_generate_result(task.id, result_type.to_sym, options) if sync AuditEvent.success( @@ -348,7 +352,7 @@ def get_generated_result(result_type) def self.counts_by_userid where("userid NOT LIKE 'widget%'").select("userid, COUNT(id) as count").group("userid") - .collect { |rr| {:userid => rr.userid, :count => rr.count.to_i} } + .collect { |rr| {:userid => rr.userid, :count => rr.count.to_i} } end def self.orphaned_counts_by_userid @@ -362,7 +366,7 @@ def self.delete_by_userid(userids) :class_name => name, :method_name => "destroy_all", :priority => MiqQueue::HIGH_PRIORITY, - :args => [["userid IN (?)", userids]], + :args => [["userid IN (?)", userids]] ) end diff --git a/app/models/miq_report_result/purging.rb b/app/models/miq_report_result/purging.rb index 934043aa9d1..0b781a3165d 100644 --- a/app/models/miq_report_result/purging.rb +++ b/app/models/miq_report_result/purging.rb @@ -15,11 +15,11 @@ def purge_window_size end def purge_count(mode, value) - send("purge_count_by_#{mode}", value) + send(:"purge_count_by_#{mode}", value) end def purge(mode, value, window = nil, &block) - send("purge_by_#{mode}", value, window, &block) + send(:"purge_by_#{mode}", value, window, &block) end def purge_associated_records(ids) diff --git a/app/models/miq_report_result_detail.rb b/app/models/miq_report_result_detail.rb index b8fda13481d..3ebaf36560b 100644 --- a/app/models/miq_report_result_detail.rb +++ b/app/models/miq_report_result_detail.rb @@ -1,5 +1,5 @@ class MiqReportResultDetail < ApplicationRecord - belongs_to :miq_report_result + belongs_to :miq_report_result def self.display_name(number = 1) n_('Report Result Detail', 'Report Result Details', number) diff --git a/app/models/miq_request.rb b/app/models/miq_request.rb index b44f0be8cac..104bfa2174f 100644 --- a/app/models/miq_request.rb +++ b/app/models/miq_request.rb @@ -1,8 +1,8 @@ class MiqRequest < ApplicationRecord extend InterRegionApiMethodRelay - ACTIVE_STATES = %w(active queued) - REQUEST_UNIQUE_KEYS = %w(id state status created_on updated_on type).freeze + ACTIVE_STATES = %w[active queued] + REQUEST_UNIQUE_KEYS = %w[id state status created_on updated_on type].freeze CANCEL_STATUS_REQUESTED = "cancel_requested".freeze CANCEL_STATUS_PROCESSING = "canceling".freeze @@ -22,21 +22,21 @@ class MiqRequest < ApplicationRecord alias_attribute :state, :request_state - serialize :options, Hash + serialize :options, Hash - default_value_for(:message) { |r| "#{r.class::TASK_DESCRIPTION} - Request Created" } + default_value_for(:message) { |r| "#{r.class::TASK_DESCRIPTION} - Request Created" } attribute :request_state, :default => 'pending' - default_value_for(:request_type) { |r| r.request_types.first } + default_value_for(:request_type) { |r| r.request_types.first } attribute :status, :default => 'Ok' attribute :process, :default => true - validates_inclusion_of :approval_state, :in => %w(pending_approval approved denied), :message => "should be 'pending_approval', 'approved' or 'denied'" - validates_inclusion_of :status, :in => %w(Ok Warn Error Timeout Denied) + validates :approval_state, :inclusion => {:in => %w[pending_approval approved denied], :message => "should be 'pending_approval', 'approved' or 'denied'"} + validates :status, :inclusion => {:in => %w[Ok Warn Error Timeout Denied]} - validates :initiated_by, :inclusion => { :in => %w[user system] }, :allow_blank => true - validates :cancelation_status, :inclusion => { :in => CANCEL_STATUS, - :allow_nil => true, - :message => "should be one of #{CANCEL_STATUS.join(", ")}" } + validates :initiated_by, :inclusion => {:in => %w[user system]}, :allow_blank => true + validates :cancelation_status, :inclusion => {:in => CANCEL_STATUS, + :allow_nil => true, + :message => "should be one of #{CANCEL_STATUS.join(", ")}"} validate :validate_class, :validate_request_type @@ -52,7 +52,7 @@ class MiqRequest < ApplicationRecord virtual_column :resource_type, :type => :string virtual_column :state, :type => :string - delegate :allowed_tags, :to => :workflow, :prefix => :v, :allow_nil => true + delegate :allowed_tags, :to => :workflow, :prefix => :v, :allow_nil => true delegate :class, :to => :workflow, :prefix => :v_workflow delegate :deny, :reason, :stamped_on, :to => :first_approval delegate :userid, :to => :requester, :prefix => true @@ -72,12 +72,12 @@ class MiqRequest < ApplicationRecord scope :with_requester, ->(id) { where(:requester_id => User.in_all_regions(id).select(:id)) } MODEL_REQUEST_TYPES = { - :Automate => { + :Automate => { :AutomationRequest => { :automation => N_("Automation") } }, - :Service => { + :Service => { :MiqProvisionConfiguredSystemRequest => { :provision_via_foreman => N_("%{config_mgr_type} Provision") % {:config_mgr_type => ui_lookup(:ui_title => 'foreman')} }, @@ -120,7 +120,7 @@ class MiqRequest < ApplicationRecord }.freeze REQUEST_TYPES_BACKEND_ONLY = { - :MiqProvisionRequestTemplate => {:template => "VM Provision Template"}, + :MiqProvisionRequestTemplate => {:template => "VM Provision Template"}, } REQUEST_TYPES = MODEL_REQUEST_TYPES.values.each_with_object(REQUEST_TYPES_BACKEND_ONLY) { |i, h| i.each { |k, v| h[k] = v } } @@ -186,6 +186,7 @@ def initialize_attributes miq_approvals << build_default_approval if miq_approvals.empty? return unless requester + self.requester_name ||= requester.name self.userid ||= requester.userid self.tenant ||= requester.current_tenant @@ -214,8 +215,7 @@ def build_request_event(event_name) ) {'EventStream::event_stream' => event_obj.id, - :event_stream_id => event_obj.id - } + :event_stream_id => event_obj.id} end def call_automate_event(event_name, synchronous: false) @@ -250,7 +250,7 @@ def pending end def approval_approved - unless self.approved? + unless approved? _log.info("Request: [#{description}] has outstanding approvals") return false end @@ -301,6 +301,7 @@ def self.request_types def request_status return status if self.approval_state == 'approved' && !status.nil? + case self.approval_state when 'pending_approval' then 'Unknown' when 'denied' then 'Error' @@ -319,7 +320,7 @@ def first_approval end def approve(userid, reason) - first_approval.approve(userid, reason) unless self.approved? + first_approval.approve(userid, reason) unless approved? end api_relay_method(:approve) { |_userid, reason| {:reason => reason} } api_relay_method(:deny) { |_userid, reason| {:reason => reason} } @@ -331,7 +332,7 @@ def stamped_by def approver first_approval.approver.try(:name) end - alias_method :approver_role, :approver # TODO: Is this needed anymore? + alias approver_role approver # TODO: Is this needed anymore? def workflow_class klass = self.class.workflow_class @@ -344,14 +345,13 @@ def self.workflow_class end def self.request_task_class - @request_task_class ||= begin - case name - when 'MiqProvisionRequest' - name.underscore.chomp('_request').camelize.constantize - else - name.underscore.gsub(/_request$/, "_task").camelize.constantize - end - end + @request_task_class ||= case name + when 'MiqProvisionRequest' + name.underscore.chomp('_request').camelize.constantize + else + name.underscore.gsub(/_request$/, "_task").camelize.constantize + end + end def requested_task_idx @@ -381,14 +381,14 @@ def update_request_status end msg = states.sort.collect { |s| "#{s[0].capitalize} = #{s[1]}" }.join("; ") - req_state = (states.length == 1) ? states.keys.first : "active" + req_state = states.length == 1 ? states.keys.first : "active" # Determine status to report req_status = status.slice('Error', 'Timeout', 'Warn').keys.first || 'Ok' if req_state == "finished" update_attribute(:fulfilled_on, Time.now.utc) - msg = (req_status == 'Ok') ? "Request complete" : "Request completed with errors" + msg = req_status == 'Ok' ? "Request complete" : "Request completed with errors" end # If there is only 1 request_task, set the parent message the same @@ -473,7 +473,7 @@ def create_request_tasks post_create_request_tasks rescue _log.log_backtrace($ERROR_INFO) # TODO: Add to Request Logs - request_state, status = request_task_created.zero? ? %w(finished Error) : %w(active Warn) + request_state, status = request_task_created.zero? ? %w[finished Error] : %w[active Warn] update(:request_state => request_state, :status => status, :message => "Error: #{$ERROR_INFO}") end end @@ -549,7 +549,7 @@ def post_create(auto_approve) # Helper method when not using workflow def self.update_request(request, values, requester) - request = request.kind_of?(MiqRequest) ? request : MiqRequest.find(request) + request = MiqRequest.find(request) unless request.kind_of?(MiqRequest) request.update_request(values, requester) end @@ -575,7 +575,7 @@ def audit_request_success(requester_id, mode) :event => event_name(mode), :target_class => self.class::SOURCE_CLASS_NAME, :userid => requester_id, - :message => event_message, + :message => event_message ) end @@ -624,12 +624,13 @@ def canceled? end # Helper method to log the request to both the request_logs table and $log - def self.request_log(severity, message = nil, resource_id: nil, &block) + def self.request_log(severity, message = nil, resource_id: nil) formatted_severity = severity.to_s.upcase level = Logger.const_get(formatted_severity) # Partially copied from Logger#add return true if level < $log.level + message = yield if message.nil? && block_given? RequestLog.create(:message => message, :severity => formatted_severity, :resource_id => resource_id) if resource_id @@ -637,7 +638,7 @@ def self.request_log(severity, message = nil, resource_id: nil, &block) end def request_log(severity, message = nil, resource_id: nil, &block) - self.class.request_log(severity, message, resource_id: resource_id, &block) + self.class.request_log(severity, message, :resource_id => resource_id, &block) end private diff --git a/app/models/miq_request_task.rb b/app/models/miq_request_task.rb index b48c9f7968c..05d60cdde6e 100644 --- a/app/models/miq_request_task.rb +++ b/app/models/miq_request_task.rb @@ -18,7 +18,7 @@ class MiqRequestTask < ApplicationRecord delegate :request_class, :task_description, :to => :class - validates_inclusion_of :status, :in => %w( Ok Warn Error Timeout ) + validates :status, :inclusion => {:in => %w[Ok Warn Error Timeout]} include MiqRequestMixin include TenancyMixin @@ -28,9 +28,9 @@ class MiqRequestTask < ApplicationRecord CANCEL_STATUS_FINISHED = "canceled".freeze CANCEL_STATUS = [CANCEL_STATUS_REQUESTED, CANCEL_STATUS_PROCESSING, CANCEL_STATUS_FINISHED].freeze - validates :cancelation_status, :inclusion => { :in => CANCEL_STATUS, - :allow_nil => true, - :message => "should be one of #{CANCEL_STATUS.join(", ")}" } + validates :cancelation_status, :inclusion => {:in => CANCEL_STATUS, + :allow_nil => true, + :message => "should be one of #{CANCEL_STATUS.join(", ")}"} def approved? if miq_request.class.name.include?('Template') && miq_request_task @@ -69,7 +69,7 @@ def update_request_status states["unknown"] = unknown_state unless unknown_state.zero? msg = states.sort.collect { |s| "#{s[0].capitalize} = #{s[1]}" }.join("; ") - req_state = (states.length == 1) ? states.keys.first : "active" + req_state = states.length == 1 ? states.keys.first : "active" # Determine status to report req_status = status.slice('Error', 'Timeout', 'Warn').keys.first || 'Ok' @@ -80,7 +80,7 @@ def update_request_status end if req_state == "finished" - msg = (req_status == 'Ok') ? "Task complete" : "Task completed with errors" + msg = req_status == 'Ok' ? "Task complete" : "Task completed with errors" end # If there is only 1 request_task, set the parent message the same @@ -122,6 +122,7 @@ def task_check_on_delivery if request_class::ACTIVE_STATES.include?(state) raise _("%{task} request is already being processed") % {:task => request_class::TASK_DESCRIPTION} end + task_check_on_execute end @@ -213,13 +214,12 @@ def execute # Process the request do_request - rescue => err message = "Error: #{err.message}" _log.error("[#{message}] encountered during #{request_class::TASK_DESCRIPTION}") _log.log_backtrace(err) update_and_notify_parent(:state => "finished", :status => "Error", :message => message) - return + nil end end @@ -259,7 +259,7 @@ def validate_state end def valid_states - %w(pending finished) + request_class::ACTIVE_STATES + %w[pending finished] + request_class::ACTIVE_STATES end def dialog_values diff --git a/app/models/miq_request_task/dumping.rb b/app/models/miq_request_task/dumping.rb index 34761a09215..8e6c969952c 100644 --- a/app/models/miq_request_task/dumping.rb +++ b/app/models/miq_request_task/dumping.rb @@ -5,7 +5,7 @@ module ClassMethods def dump_obj(obj, prefix = nil, print_obj = STDOUT, print_method = :puts, &block) meth = "dump_#{obj.class.name.underscore}".to_sym - if self.respond_to?(meth) + if respond_to?(meth) return send(meth, obj, prefix, print_obj, print_method, &block) end diff --git a/app/models/miq_request_task/post_install_callback.rb b/app/models/miq_request_task/post_install_callback.rb index 7f9e556e1cc..705d4e101b7 100644 --- a/app/models/miq_request_task/post_install_callback.rb +++ b/app/models/miq_request_task/post_install_callback.rb @@ -15,6 +15,7 @@ def post_install_callback(id) def post_install_callback_url remote_ui_url = MiqRegion.my_region.remote_ui_url(:ipaddress) return nil if remote_ui_url.nil? + "#{File.join(remote_ui_url, "miq_request/post_install_callback")}?task_id=#{id}" end @@ -25,7 +26,7 @@ def provision_completed_queue :method_name => 'provision_completed', :zone => my_zone, :role => my_role, - :tracking_label => tracking_label_id, + :tracking_label => tracking_label_id ) end diff --git a/app/models/miq_request_task/state_machine.rb b/app/models/miq_request_task/state_machine.rb index c5a776763de..98b8b162f8e 100644 --- a/app/models/miq_request_task/state_machine.rb +++ b/app/models/miq_request_task/state_machine.rb @@ -1,6 +1,6 @@ module MiqRequestTask::StateMachine delegate :my_role, :to => :miq_request - delegate :my_zone, :to => :source, :allow_nil => true + delegate :my_zone, :to => :source, :allow_nil => true delegate :my_queue_name, :to => :miq_request def tracking_label_id @@ -58,7 +58,7 @@ def signal_queue(phase) :zone => my_zone, :role => my_role, :queue_name => my_queue_name, - :tracking_label => tracking_label_id, + :tracking_label => tracking_label_id ) end @@ -82,7 +82,7 @@ def requeue_phase :role => my_role, :queue_name => my_queue_name, :tracking_label => tracking_label_id, - :miq_callback => {:class_name => self.class.name, :instance_id => id, :method_name => :execute_callback} + :miq_callback => {:class_name => self.class.name, :instance_id => id, :method_name => :execute_callback} ) end diff --git a/app/models/miq_request_workflow.rb b/app/models/miq_request_workflow.rb index 230ee106dad..51ac990ffff 100644 --- a/app/models/miq_request_workflow.rb +++ b/app/models/miq_request_workflow.rb @@ -36,6 +36,7 @@ def self.all_encrypted_options_fields def self.update_requester_from_parameters(data, user) return user if data[:user_name].blank? + new_user = User.lookup_by_identity(data[:user_name]) unless new_user @@ -56,8 +57,8 @@ def initialize(values, requester, options = {}) if @dialogs.nil? @dialogs = get_dialogs normalize_numeric_fields - else - @running_pre_dialog = true if options[:use_pre_dialog] != false + elsif options[:use_pre_dialog] != false + @running_pre_dialog = true end end @@ -76,12 +77,13 @@ def instance_var_init(values, requester, options) @requester = @requester.clone @requester.current_group_by_description = group_description end - @values.merge!(options) unless options.blank? + @values.merge!(options) if options.present? end # Helper method when not using workflow def make_request(request, values, _requester = nil, auto_approve = false) return false unless validate(values) + password_helper(values, true) # Ensure that tags selected in the pre-dialog get applied to the request values[:vm_tags] = (Array.wrap(values[:vm_tags]) + @values[:pre_dialog_vm_tags]).uniq if @values.try(:[], :pre_dialog_vm_tags).present? @@ -92,6 +94,7 @@ def make_request(request, values, _requester = nil, auto_approve = false) else req = request_class.new(:options => values, :requester => @requester, :request_type => request_type.to_s) return req unless req.valid? # TODO: CatalogController#atomic_req_submit is the only one that enumerates over the errors + values[:__request_type__] = request_type.to_s.presence # Pass this along to MiqRequest#create_request request_class.create_request(values, @requester, auto_approve) end @@ -114,6 +117,7 @@ def init_from_dialog(init_values) else field_values[:values].each do |tz| next unless tz.kind_of?(Array) + if tz[1].to_i_with_method == val.to_i_with_method # Save [value, description] for timezones array init_values[field_name] = [val, tz[0]] @@ -144,24 +148,23 @@ def validate(values) # Check the disabled flag here so we reset the "error" value on each field next if dialog_disabled || fld[:display] == :hide - value = fld[:data_type] =~ /array_/ ? values[f] : get_value(values[f]) + + value = /array_/.match?(fld[:data_type]) ? values[f] : get_value(values[f]) if fld[:required] == true # If :required_method is defined let it determine if the field is value if fld[:required_method].nil? default_require_method = "default_require_#{f}".to_sym - if self.respond_to?(default_require_method) + if respond_to?(default_require_method) fld[:error] = send(default_require_method, f, values, dlg, fld, value) unless fld[:error].nil? valid = false next end - else - if value.blank? - fld[:error] = "#{required_description(dlg, fld)} is required" - valid = false - next - end + elsif value.blank? + fld[:error] = "#{required_description(dlg, fld)} is required" + valid = false + next end else Array.wrap(fld[:required_method]).each do |method| @@ -177,11 +180,9 @@ def validate(values) end end - if fld[:validation_method] && respond_to?(fld[:validation_method]) - if (fld[:error] = send(fld[:validation_method], f, values, dlg, fld, value)) - valid = false + if fld[:validation_method] && respond_to?(fld[:validation_method]) && (fld[:error] = send(fld[:validation_method], f, values, dlg, fld, value)) + valid = false next - end end next if value.blank? @@ -305,14 +306,15 @@ def get_field(field_name, dialog_name = nil, refresh_values = true) end # TODO: Return list in defined ordered - def dialogs - @dialogs[:dialogs].each_pair { |n, d| yield(n, d) } + def dialogs(&block) + @dialogs[:dialogs].each_pair(&block) end def fields(dialog = nil) dialog = [*dialog] unless dialog.nil? @dialogs[:dialogs].each_pair do |dn, d| next unless dialog.blank? || dialog.include?(dn) + d[:fields].each_pair do |fn, f| yield(fn, f, dn, d) end @@ -322,8 +324,8 @@ def fields(dialog = nil) def normalize_numeric_fields fields do |_fn, f, _dn, _d| if f[:data_type] == :integer - f[:default] = f[:default].to_i_with_method unless f[:default].blank? - unless f[:values].blank? + f[:default] = f[:default].to_i_with_method if f[:default].present? + if f[:values].present? keys = f[:values].keys.dup keys.each { |k| f[:values][k.to_i_with_method] = f[:values].delete(k) } end @@ -351,8 +353,10 @@ def self.parse_ws_string(text_input, options = {}) result = {} text_input.split('|').each do |value| next if value.blank? + idx = value.index('=') next if idx.nil? + key = options[:modify_key_name] == false ? value[0, idx].strip : value[0, idx].strip.downcase.to_sym result[key] = value[idx + 1..-1].strip end @@ -390,6 +394,7 @@ def parse_ws_string_v1(values, _options = {}) na = [] values.to_s.split("|").each_slice(2) do |k, v| next if v.nil? + na << [k.strip, v.strip] end na @@ -416,11 +421,9 @@ def set_or_default_field_values(values) selected_key = values[fn] elsif f.key?(:default) && f[:values].key?(f[:default]) selected_key = f[:default] - else - unless f[:values].blank? - sorted_values = f[:values].sort - selected_key = sorted_values.first.first - end + elsif f[:values].present? + sorted_values = f[:values].sort + selected_key = sorted_values.first.first end @values[fn] = [selected_key, f[:values][selected_key]] unless selected_key.nil? else @@ -453,8 +456,8 @@ def set_value_from_list(fn, f, value, values = nil, partial_key = false) if @values[fn].nil? _log.info("set_value_from_list did not matched an item") if partial_key @values[fn] = [nil, nil] - else - _log.info("set_value_from_list matched item value:[#{value}] to item:[#{@values[fn][0]}]") if partial_key + elsif partial_key + _log.info("set_value_from_list matched item value:[#{value}] to item:[#{@values[fn][0]}]") end end end @@ -478,6 +481,7 @@ def required_description(dlg, fld) def allowed_filters(options = {}) model_name = options[:category] return @filters[model_name] unless @filters[model_name].nil? + rails_logger("allowed_filters - #{model_name}", 0) @filters[model_name] = @requester.get_expressions(model_name).invert rails_logger("allowed_filters - #{model_name}", 1) @@ -490,14 +494,15 @@ def dialog_active?(name, config, values) enabled_field = "#{name}_enabled".to_sym # Check if the fields hash contains a _enabled field enabled = get_value(values[enabled_field]) - return false if enabled == false || enabled == "disabled" + return false if [false, "disabled"].include?(enabled) + true end def show_fields(display_flag, field_names, display_field = :display) fields do |fn, f, _dn, _d| if field_names.include?(fn) - flag = f[:display_override].blank? ? display_flag : f[:display_override] + flag = (f[:display_override].presence || display_flag) f[display_field] = flag end end @@ -505,10 +510,11 @@ def show_fields(display_flag, field_names, display_field = :display) def retrieve_ldap(_options = {}) email = get_value(@values[:owner_email]) - unless email.blank? + if email.present? l = MiqLdap.new if l.bind_with_default == true raise _("No information returned for %{email}") % {:email => email} if (d = l.get_user_info(email, "mail")).nil? + [:first_name, :last_name, :address, :city, :state, :zip, :country, :title, :company, :department, :office, :phone, :phone_mobile, :manager, :manager_mail, :manager_phone].each do |prop| @values["owner_#{prop}".to_sym] = d[prop].try(:dup) @@ -534,6 +540,7 @@ def values_less_then(options) field, include_equals = options[:field], options[:include_equals] max_value = field.nil? ? options[:value].to_i_with_method : get_value(@values[field]).to_i_with_method return results if max_value <= 0 + results.reject { |k, _v| include_equals == true ? max_value < k : max_value <= k } end @@ -578,6 +585,7 @@ def allowed_tags(options = {}) cats.each do |t| next if exclude_list.include?(t.name) next unless include_list.blank? || include_list.include?(t.name) + # Force passed tags to be single select single_value = single_select.include?(t.name) ? true : t.single_value? @tags[t.id] = {:name => t.name, :description => t.description, :single_value => single_value, :children => {}, :id => t.id} @@ -588,6 +596,7 @@ def allowed_tags(options = {}) ents.each do |t| full_tag_name = "#{@tags[t.parent_id][:name]}/#{t.name}" next if exclude_list.include?(full_tag_name) + @tags[t.parent_id][:children][t.id] = {:name => t.name, :description => t.description} end @@ -605,7 +614,7 @@ def allowed_tags(options = {}) @tags = tag_results.compact + tags_to_sort @tags.each do |tag| - tag[:children] = if tag[:children].first.last[:name] =~ /^\d/ + tag[:children] = if /^\d/.match?(tag[:children].first.last[:name]) tag[:children].sort_by { |_k, v| v[:name].to_i } else tag[:children].sort_by { |_k, v| v[:description] } @@ -643,7 +652,7 @@ def tag_symbol def build_ci_hash_struct(ci, props) nh = OpenStruct.new(:id => ci.id, :evm_object_class => ci.class.base_class.name.to_sym) - props.each { |p| nh.send("#{p}=", ci.send(p)) } + props.each { |p| nh.send(:"#{p}=", ci.send(p)) } nh end @@ -663,7 +672,7 @@ def get_dialogs def get_pre_dialogs pre_dialogs = nil pre_dialog_name = dialog_name_from_automate('get_pre_dialog_name') - unless pre_dialog_name.blank? + if pre_dialog_name.present? pre_dialog_name = File.basename(pre_dialog_name, ".rb") d = MiqDialog.find_by(:name => pre_dialog_name, :dialog_type => self.class.base_model.name) unless d.nil? @@ -687,7 +696,7 @@ def dialog_name_from_automate(message = 'get_dialog_name', input_fields = [:requ if attrs.key?(key) _log.info("Skipping key=<#{key}> because already set to <#{attrs[key]}>") else - value = (k == :vm_tags) ? get_tags : get_value(@values[k]).to_s + value = k == :vm_tags ? get_tags : get_value(@values[k]).to_s _log.info("Setting attrs[#{key}]=<#{value}>") attrs[key] = value end @@ -703,6 +712,7 @@ def dialog_name_from_automate(message = 'get_dialog_name', input_fields = [:requ ws.root.attributes.each do |key, value| next unless key.downcase.starts_with?(dialog_option_prefix) next unless key.length > dialog_option_prefix_length + key = key[dialog_option_prefix_length..-1].downcase _log.info("Setting @values[#{key}]=<#{value}>") @values[key.to_sym] = value @@ -726,6 +736,7 @@ def request_type def request_class req_class = self.class.request_class return req_class unless get_value(@values[:service_template_request]) == true + (req_class.name + "Template").constantize end @@ -760,6 +771,7 @@ def set_request_values(values) def password_helper(values = @values, encrypt = true) self.class.encrypted_options_fields.each do |pwd_key| next if values[pwd_key].blank? + if encrypt values[pwd_key].replace(ManageIQ::Password.try_encrypt(values[pwd_key])) else @@ -804,7 +816,7 @@ def allowed_ci(ci, relats, sources, filtered_ids = nil) result = nil relats.each do |rsc_type| rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 0) - rc = send("#{rsc_type}_to_#{ci}", sources) + rc = send(:"#{rsc_type}_to_#{ci}", sources) rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 1) unless rc.nil? rc = rc.to_a @@ -821,8 +833,7 @@ def process_filter(filter_prop, ci_klass, targets) filter_id = get_value(@values[filter_prop]).to_i MiqSearch.filtered(filter_id, ci_klass, targets, :user => @requester, - :miq_group => @requester.current_group, - ).tap { rails_logger("process_filter - [#{ci_klass}]", 1) } + :miq_group => @requester.current_group).tap { rails_logger("process_filter - [#{ci_klass}]", 1) } end def find_all_ems_of_type(klass, src = nil) @@ -842,6 +853,7 @@ def find_respools_under_ci(item) def find_classes_under_ci(item, klass) results = [] return results if item.nil? + @_find_classes_under_ci_prefix ||= _log.prefix node = load_ems_node(item, @_find_classes_under_ci_prefix) each_ems_metadata(node.attributes[:object], klass) { |ci| results << ci } unless node.nil? @@ -863,6 +875,7 @@ def load_ems_node(item, log_header) def ems_has_clusters? found = each_ems_metadata(nil, EmsCluster) { |ci| break(ci) } return found.evm_object_class == :EmsCluster if found.kind_of?(OpenStruct) + false end @@ -892,6 +905,7 @@ def get_ems_folders(folder, dh = {}, full_path = "") def get_ems_respool(node, dh = {}, full_path = "") return if node.nil? + if node.kind_of?(XmlHash::Element) folder = node.attributes[:object] if node.name == :ResourcePool @@ -912,7 +926,7 @@ def find_datacenter_for_ci(item, ems_src = nil) def find_hosts_for_respool(item, ems_src = nil) hosts = find_class_above_ci(item, Host, ems_src) - return [hosts] unless hosts.blank? + return [hosts] if hosts.present? cluster = find_cluster_above_ci(item) find_hosts_under_ci(cluster) @@ -930,7 +944,7 @@ def find_class_above_ci(item, klass, _ems_src = nil, datacenter = false) # Walk the xml document parents to find the requested class while node.kind_of?(XmlHash::Element) ci = node.attributes[:object] - if node.name == klass_name && (datacenter == false || datacenter == true && ci.datacenter?) + if node.name == klass_name && (datacenter == false || (datacenter == true && ci.datacenter?)) result = ci break end @@ -958,6 +972,7 @@ def each_ems_metadata(ems_ci = nil, klass = nil, &_blk) def get_ems_metadata_tree(src) @ems_metadata_tree ||= begin return if src[:ems].nil? + st = Time.zone.now result = load_ar_obj(src[:ems]).fulltree_arranged(:except_type => "VmOrTemplate") ems_metadata_tree_add_hosts_under_clusters!(result) @@ -994,8 +1009,10 @@ def add_target(dialog_key, key, klass, result) def ci_to_hash_struct(ci) return if ci.nil? return ci.collect { |c| ci_to_hash_struct(c) } if ci.respond_to?(:collect) + method_name = "#{ci.class.base_class.name.underscore}_to_hash_struct".to_sym return send(method_name, ci) if respond_to?(method_name, true) + default_ci_to_hash_struct(ci) end @@ -1011,7 +1028,7 @@ def vm_or_template_to_hash_struct(ci) def ems_folder_to_hash_struct(ci) build_ci_hash_struct(ci, [:name, :type, :hidden]).tap do |nh| - nh.send("datacenter?=", ci.kind_of?(Datacenter)) + nh.send(:"datacenter?=", ci.kind_of?(Datacenter)) end end @@ -1033,6 +1050,7 @@ def customization_spec_to_hash_struct(ci) def load_ar_obj(ci) return load_ar_objs(ci) if ci.kind_of?(Array) return ci unless ci.kind_of?(OpenStruct) + ci.evm_object_class.to_s.camelize.constantize.find_by(:id => ci.id) end @@ -1048,6 +1066,7 @@ def resources_for_ui def allowed_hosts_obj(options = {}) return [] if (src = resources_for_ui).blank? || src[:ems].nil? + datacenter = src[:datacenter] || options[:datacenter] rails_logger('allowed_hosts_obj', 0) st = Time.now @@ -1070,6 +1089,7 @@ def allowed_hosts_obj(options = {}) def allowed_storages(_options = {}) return [] if (src = resources_for_ui).blank? || src[:ems].nil? + hosts = src[:host].nil? ? allowed_hosts_obj({}) : [load_ar_obj(src[:host])] return [] if hosts.blank? @@ -1114,7 +1134,7 @@ def allowed_respools(_options = {}) filtered_targets = process_filter(:rp_filter, ResourcePool, all_resource_pools) allowed_ci(:respool, [:cluster, :host, :folder], filtered_targets.collect(&:id)) end - alias_method :allowed_resource_pools, :allowed_respools + alias allowed_resource_pools allowed_respools def allowed_folders(_options = {}) allowed_ci(:folder, [:cluster, :host, :respool]) @@ -1122,6 +1142,7 @@ def allowed_folders(_options = {}) def cluster_to_datacenter(src) return nil unless ems_has_clusters? + ci_to_datacenter(src, :cluster, EmsCluster) end @@ -1135,6 +1156,7 @@ def host_to_datacenter(src) def folder_to_datacenter(src) return nil if src[:folder].nil? + ci_to_datacenter(src, :folder, EmsFolder) end @@ -1145,24 +1167,28 @@ def ci_to_datacenter(src, ci, ci_type) def respool_to_cluster(src) return nil unless ems_has_clusters? + sources = src[:respool].nil? ? find_all_ems_of_type(ResourcePool) : [src[:respool]] build_id_to_name_hash(sources.collect { |rp| find_cluster_above_ci(rp) }.compact) end def host_to_cluster(src) return nil unless ems_has_clusters? + sources = src[:host].nil? ? allowed_hosts_obj : [src[:host]] build_id_to_name_hash(sources.collect { |h| find_cluster_above_ci(h) }.compact) end def folder_to_cluster(src) return nil unless ems_has_clusters? + source = find_all_ems_of_type(EmsCluster) build_id_to_name_hash(filter_to_objects_in_same_datacenter(source, src)) end def cluster_to_respool(src) return nil unless ems_has_clusters? + targets = src[:cluster].nil? ? find_all_ems_of_type(ResourcePool) : find_respools_under_ci(src[:cluster]) res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src)) targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] } @@ -1170,6 +1196,7 @@ def cluster_to_respool(src) def folder_to_respool(src) return nil if src[:folder].nil? + datacenter = find_datacenter_for_ci(src[:folder]) targets = find_respools_under_ci(datacenter) res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src)) @@ -1189,6 +1216,7 @@ def host_to_respool(src) def cluster_to_host(src) return nil unless ems_has_clusters? + hosts = src[:cluster].nil? ? find_all_ems_of_type(Host) : find_hosts_under_ci(src[:cluster]) build_id_to_name_hash(hosts) end @@ -1221,6 +1249,7 @@ def host_to_folder(src) def cluster_to_folder(src) return nil unless ems_has_clusters? return nil if src[:cluster].nil? + sources = [src[:cluster]] datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) } @@ -1228,6 +1257,7 @@ def cluster_to_folder(src) def respool_to_folder(src) return nil if src[:respool].nil? + sources = [src[:respool]] datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) } @@ -1252,8 +1282,8 @@ def set_ws_field_value(values, key, data, dialog_name, dlg_fields) [found.id, found.name] if found elsif data_type == :array_integer field_values.keys & set_value - else - [set_value, field_values[set_value]] if field_values.key?(set_value) + elsif field_values.key?(set_value) + [set_value, field_values[set_value]] end set_value = apply_result(result, data_type) @@ -1269,7 +1299,7 @@ def cast_value(value, data_type) case data_type when :integer then value.to_i_with_method when :float then value.to_f - when :boolean then value.to_s.downcase.in?(%w(true t)) + when :boolean then value.to_s.downcase.in?(%w[true t]) when :time then Time.zone.parse(value) when :button then value # Ignore when :array_integer then Array.wrap(value).map!(&:to_i) @@ -1309,7 +1339,7 @@ def set_ws_field_value_by_display_name(values, key, data, dialog_name, dlg_field def set_ws_field_value_by_id_or_name(values, dlg_field, data, dialog_name, dlg_fields, data_key = nil, id_klass = nil) data_key = dlg_field if data_key.blank? if data.key?(data_key) - data[data_key] = "#{id_klass}::#{data[data_key]}" unless id_klass.blank? + data[data_key] = "#{id_klass}::#{data[data_key]}" if id_klass.present? data[dlg_field] = data.delete(data_key) set_ws_field_value(values, dlg_field, data, dialog_name, dlg_fields) else @@ -1359,6 +1389,7 @@ def get_pxe_image def get_image_by_type(image_type) klass, id = get_value(@values[image_type]).to_s.split('::') return nil if id.blank? + klass.constantize.find_by(:id => id) end @@ -1373,10 +1404,12 @@ def allowed_pxe_servers(_options = {}) def allowed_pxe_images(_options = {}) pxe_server = get_pxe_server return [] if pxe_server.nil? + prov_typ = "vm" pxe_server.pxe_images.collect do |p| next if p.pxe_image_type.nil? || p.default_for_windows + # filter pxe images by provision_type to show vm/any or host/any build_ci_hash_struct(p, [:name, :description]) if p.pxe_image_type.provision_type.blank? || p.pxe_image_type.provision_type == prov_typ end.compact @@ -1449,7 +1482,7 @@ def ws_requester_fields(values, fields) def ws_schedule_fields(values, _fields, data) return if (dlg_fields = get_ws_dialog_fields(dialog_name = :schedule)).nil? - unless data[:schedule_time].blank? + if data[:schedule_time].present? values[:schedule_type] = 'schedule' [:schedule_time, :retirement_time].each do |key| data_type = :time @@ -1476,6 +1509,7 @@ def raise_validate_errors def apply_result(result, data_type) return result if data_type == :array_integer + [result.first, result.last] unless result.nil? end diff --git a/app/models/miq_request_workflow/dialog_field_validation.rb b/app/models/miq_request_workflow/dialog_field_validation.rb index 9ea400deabe..d6e438e7db5 100644 --- a/app/models/miq_request_workflow/dialog_field_validation.rb +++ b/app/models/miq_request_workflow/dialog_field_validation.rb @@ -8,35 +8,38 @@ def validate_tags(field, values, _dlg, fld, _value) required_tags = Array.wrap(fld[:required_tags].presence).collect(&:to_sym) missing_tags = required_tags - selected_tags_categories missing_categories_names = missing_tags.collect do |category| - begin - Classification.lookup_by_name(category.to_s).description - rescue StandardError - nil - end + + Classification.lookup_by_name(category.to_s).description + rescue + nil + end.compact return nil if missing_categories_names.blank? + _("Required tag(s): %{names}") % {:names => missing_categories_names.join(', ')} end def validate_length(_field, _values, dlg, fld, value) return _("%{name} is required") % {:name => required_description(dlg, fld)} if value.blank? + if fld[:min_length] && value.to_s.length < fld[:min_length] return _("%{name} must be at least %{length} characters") % {:name => required_description(dlg, fld), :length => fld[:min_length]} end if fld[:max_length] && value.to_s.length > fld[:max_length] - return _("%{name} must not be greater than %{length} characters") % {:name => required_description(dlg, fld), - :length => fld[:max_length]} + _("%{name} must not be greater than %{length} characters") % {:name => required_description(dlg, fld), + :length => fld[:max_length]} end end def validate_regex(_field, _values, dlg, fld, value) regex = fld[:required_regex] return _("%{name} is required") % {:name => required_description(dlg, fld)} if value.blank? + unless value.match(regex) error = _("%{name} must be correctly formatted") % {:name => required_description(dlg, fld)} - error << _(". %{details}") % {:details => fld[:required_regex_fail_details] } if fld[:required_regex_fail_details] + error << (_(". %{details}") % {:details => fld[:required_regex_fail_details]}) if fld[:required_regex_fail_details] error end @@ -45,6 +48,7 @@ def validate_regex(_field, _values, dlg, fld, value) def validate_blacklist(_field, _values, dlg, fld, value) blacklist = fld[:blacklist] return _("%{name} is required") % {:name => required_description(dlg, fld)} if value.blank? + if blacklist && blacklist.include?(value) _("%{name} may not contain blacklisted value") % {:name => required_description(dlg, fld)} end diff --git a/app/models/miq_retire_request.rb b/app/models/miq_retire_request.rb index 56b263e2e98..09f0da89c8e 100644 --- a/app/models/miq_retire_request.rb +++ b/app/models/miq_retire_request.rb @@ -1,9 +1,8 @@ class MiqRetireRequest < MiqRequest - - validates :request_state, :inclusion => { :in => %w(pending finished) + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished" } + validates :request_state, :inclusion => {:in => %w[pending finished] + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} validate :must_have_user - default_value_for(:source_id) { |r| r.get_option(:src_id) } + default_value_for(:source_id) { |r| r.get_option(:src_id) } def my_zone end diff --git a/app/models/miq_retire_task.rb b/app/models/miq_retire_task.rb index f78844ac93c..bfa345745f6 100644 --- a/app/models/miq_retire_task.rb +++ b/app/models/miq_retire_task.rb @@ -60,7 +60,7 @@ def after_ae_delivery(ae_result) def before_ae_starts(_options) reload - if state.to_s.downcase.in?(%w(pending queued)) + if state.to_s.downcase.in?(%w[pending queued]) _log.info("Executing #{request_class::TASK_DESCRIPTION} request: [#{description}]") update_and_notify_parent(:state => "active", :status => "Ok", :message => "In Process") end @@ -68,8 +68,8 @@ def before_ae_starts(_options) def mark_pending_items_as_finished miq_request.miq_request_tasks.each do |s| - if s.state == 'pending' - s.update_and_notify_parent(:state => "finished", :status => "Warn", :message => "Error in Request: #{miq_request.id}. Setting pending Task: #{id} to finished.") unless id == s.id + if s.state == 'pending' && !(id == s.id) + s.update_and_notify_parent(:state => "finished", :status => "Warn", :message => "Error in Request: #{miq_request.id}. Setting pending Task: #{id} to finished.") end end end diff --git a/app/models/miq_schedule.rb b/app/models/miq_schedule.rb index b7017f0b108..fb63c86599d 100644 --- a/app/models/miq_schedule.rb +++ b/app/models/miq_schedule.rb @@ -57,7 +57,7 @@ def run_at val = self[:run_at] if val.kind_of?(Hash) st = val[:start_time] - if st && String === st + if st && st.kind_of?(String) val[:start_time] = st.to_time(:utc).utc end end @@ -127,6 +127,7 @@ def target_ids # Let RBAC evaluate the filter's MiqExpression, and return the first value (the target ids) my_filter = get_filter return [] if my_filter.nil? + Rbac.filtered(resource_type, :filter => my_filter).pluck(:id) end @@ -152,11 +153,11 @@ def next_run_on return nil if enabled == false # calculate what the next run on time should be - if run_at[:interval][:unit].downcase != "once" - time = next_interval_time - else - time = (last_run_on && (last_run_on > run_at[:start_time])) ? nil : run_at[:start_time] - end + time = if run_at[:interval][:unit].downcase != "once" + next_interval_time + else + last_run_on && (last_run_on > run_at[:start_time]) ? nil : run_at[:start_time] + end time.try(:utc) end @@ -186,17 +187,15 @@ def run_at_to_human(timezone) if run_at[:interval][:unit].downcase == "once" _("Run %{interval} on %{start_time}") % {:interval => interval, :start_time => start_time} + elsif run_at[:interval][:value].to_i == 1 + _("Run %{interval} starting on %{start_time}") % {:interval => interval, + :start_time => start_time} else - if run_at[:interval][:value].to_i == 1 - _("Run %{interval} starting on %{start_time}") % {:interval => interval, - :start_time => start_time} - else - return _("Run %{interval} every %{value} %{unit} starting on %{start_time}") % - {:interval => interval, - :value => run_at[:interval][:value], - :unit => unit, - :start_time => start_time} - end + _("Run %{interval} every %{value} %{unit} starting on %{start_time}") % + {:interval => interval, + :value => run_at[:interval][:value], + :unit => unit, + :start_time => start_time} end end @@ -298,7 +297,7 @@ def validate_run_at end end - def verify_file_depot(params) # TODO: This logic belongs in the UI, not sure where + def verify_file_depot(params) # TODO: This logic belongs in the UI, not sure where depot_class = FileDepot.supported_protocols[params[:uri_prefix]] depot = file_depot.class.name == depot_class ? file_depot : build_file_depot(:type => depot_class) depot.name = params[:name] @@ -319,7 +318,7 @@ def verify_file_depot(params) # TODO: This logic belongs in the UI, not sure wh end def next_interval_time - unless self.valid? || errors[:run_at].blank? + unless valid? || errors[:run_at].blank? _log.warn("Invalid schedule [#{id}] [#{name}]: #{Array.wrap(errors[:run_at]).join(", ")}") return nil end @@ -343,7 +342,7 @@ def next_interval_time meth = rails_interval if meth.nil? raise _("Schedule: [%{id}] [%{name}], cannot calculate next run with past start_time using: %{path}") % - {:id => id, :name => name, :path => run_at.fetch_path(:interval, :unit)} + {:id => id, :name => name, :path => run_at.fetch_path(:interval, :unit)} end if meth == :months @@ -359,11 +358,11 @@ def next_interval_time # scheduled start_time and jump there instead of creating thousands of time objects # until we've found the first future run time missed_intervals = (seconds_since_start / interval_value.send(meth)).to_i - while now > (sch_start_time + ((interval_value * missed_intervals).send(meth))) + while now > (sch_start_time + (interval_value * missed_intervals).send(meth)) missed_intervals += 1 end - next_time = sch_start_time + ((interval_value * missed_intervals).send(meth)) + next_time = sch_start_time + (interval_value * missed_intervals).send(meth) next_time += interval_value.send(meth) if next_time < now && interval_value end end @@ -384,7 +383,7 @@ def rails_interval end def interval - unless self.valid? || errors[:run_at].blank? + unless valid? || errors[:run_at].blank? _log.warn("Invalid schedule [#{id}] [#{name}]: #{Array.wrap(errors[:run_at]).join(", ")}") return nil end @@ -413,14 +412,13 @@ def self.preload_schedules def v_interval_unit if run_at[:interval] && run_at[:interval][:unit] - return run_at[:interval][:unit] - else - return nil + run_at[:interval][:unit] end end def v_zone_name return "" if zone.nil? + zone.name end diff --git a/app/models/miq_schedule_worker/jobs.rb b/app/models/miq_schedule_worker/jobs.rb index 9652ffa211b..3f45dea118e 100644 --- a/app/models/miq_schedule_worker/jobs.rb +++ b/app/models/miq_schedule_worker/jobs.rb @@ -22,7 +22,7 @@ def vmdb_database_connection_log_statistics end def job_check_jobs_for_timeout - queue_work_on_each_zone(:class_name => "Job", :method_name => "check_jobs_for_timeout") + queue_work_on_each_zone(:class_name => "Job", :method_name => "check_jobs_for_timeout") end def retirement_check @@ -39,7 +39,7 @@ def ems_authentication_check_schedule end def session_check_session_timeout - queue_work(:class_name => "Session", :method_name => "check_session_timeout", :server_guid => MiqServer.my_guid) + queue_work(:class_name => "Session", :method_name => "check_session_timeout", :server_guid => MiqServer.my_guid) end def job_check_for_evm_snapshots(job_not_found_delay) @@ -78,7 +78,7 @@ def miq_alert_evaluate_hourly_timer end def storage_scan_timer - queue_work(:class_name => "Storage", :method_name => "scan_timer") + queue_work(:class_name => "Storage", :method_name => "scan_timer") end def metric_capture_perf_capture_timer @@ -204,6 +204,7 @@ def check_for_timed_out_active_tasks def queue_work(options) return if options.nil? + options = {:zone => MiqServer.my_zone, :priority => MiqQueue::MEDIUM_PRIORITY}.merge(options) # always has class_name, method_name, zone, priority [often has role] MiqQueue.put_unless_exists(options) diff --git a/app/models/miq_schedule_worker/runner.rb b/app/models/miq_schedule_worker/runner.rb index 083a67b11db..db3fce89533 100644 --- a/app/models/miq_schedule_worker/runner.rb +++ b/app/models/miq_schedule_worker/runner.rb @@ -24,6 +24,7 @@ def dst? def check_dst return if @dst == dst? + run_callbacks(:dst_change) do reset_dst end @@ -59,7 +60,7 @@ def schedule_enabled?(role) def scheduler_for(role) @schedules[role] ||= [] - ::MiqScheduleWorker::Scheduler.new(self._log, @schedules[role], @system_scheduler) + ::MiqScheduleWorker::Scheduler.new(_log, @schedules[role], @system_scheduler) end def schedules_for_all_roles @@ -119,6 +120,7 @@ def schedules_for_all_roles def schedules_for_scheduler_role # These schedules need to run only once in a region per interval, so let the single scheduler role handle them return unless schedule_enabled?(:scheduler) + scheduler = scheduler_for(:scheduler) # Schedule - Check for timed out jobs @@ -325,7 +327,7 @@ def schedule_check_for_task_timeout scheduler.schedule_every( :check_for_timed_out_active_tasks, every, - :first_at => Time.current + 1.minute + :first_at => 1.minute.from_now ) do enqueue(:check_for_timed_out_active_tasks) end @@ -336,26 +338,27 @@ def schedule_chargeback_report_for_service_daily every = worker_settings[:chargeback_generation_interval] at = worker_settings[:chargeback_generation_time_utc] time_at = Time.current.strftime("%Y-%m-%d #{at}").to_time(:utc) - time_at += 1.day if time_at < Time.current + 1.hour + time_at += 1.day if time_at < 1.hour.from_now scheduler = scheduler_for(:scheduler) scheduler.schedule_every( :generate_chargeback_for_service, every, :first_at => time_at ) do - enqueue([:generate_chargeback_for_service, :report_source => "Daily scheduler"]) + enqueue([:generate_chargeback_for_service, {:report_source => "Daily scheduler"}]) end end def schedules_for_database_operations_role return unless schedule_enabled?(:database_operations) + scheduler = scheduler_for(:database_operations) # Schedule - Database reindexing scheduler.schedule_cron( :database_maintenance_reindex_timer, ::Settings.database.maintenance.reindex_schedule, - :tags => %i(database_operations database_maintenance_reindex_schedule), + :tags => %i[database_operations database_maintenance_reindex_schedule] ) do enqueue(:database_maintenance_reindex_timer) end @@ -364,7 +367,7 @@ def schedules_for_database_operations_role scheduler.schedule_cron( :database_maintenance_vacuum_timer, ::Settings.database.maintenance.vacuum_schedule, - :tags => %i(database_operations database_maintenance_vacuum_schedule), + :tags => %i[database_operations database_maintenance_vacuum_schedule] ) do enqueue(:database_maintenance_vacuum_timer) end @@ -394,6 +397,7 @@ def schedules_for_database_operations_role def schedules_for_ems_metrics_coordinator_role return unless schedule_enabled?("ems_metrics_coordinator") + scheduler = scheduler_for(:ems_metrics_coordinator) # Schedule - Performance Collection @@ -412,6 +416,7 @@ def schedules_for_ems_metrics_coordinator_role def schedules_for_event_role # These schedules need to run by the servers with the event role return unless schedule_enabled?(:event) + scheduler = scheduler_for(:event) # Schedule - Purging of event streams @@ -549,7 +554,8 @@ def check_roles_changed begin added.each do |r| m = "schedules_for_#{r}_role" - next unless self.respond_to?(m) + next unless respond_to?(m) + _log.info("Adding Schedules for Role=[#{r}]") send(m) end @@ -559,6 +565,7 @@ def check_roles_changed removed.each do |r| rs = r.to_sym next unless @schedules.key?(rs) + _log.info("Removing Schedules for Role=[#{r}]") @schedules[rs].each do |j| # In Rufus::Scheduler Version 1, schedule returns a JobID @@ -567,13 +574,11 @@ def check_roles_changed # passed to opts if j.kind_of?(Integer) @system_scheduler.unschedule(j) - else - if j.respond_to?(:tags) - if j.tags.any? { |t| t.to_s.starts_with?("miq_schedules_") } - _log.info("Removing user schedule with Tags: #{j.tags.inspect}") - end - j.unschedule + elsif j.respond_to?(:tags) + if j.tags.any? { |t| t.to_s.starts_with?("miq_schedules_") } + _log.info("Removing user schedule with Tags: #{j.tags.inspect}") end + j.unschedule end end @schedules.delete(rs) diff --git a/app/models/miq_schedule_worker/scheduler.rb b/app/models/miq_schedule_worker/scheduler.rb index 8b7a4e45092..98e969e5d3d 100644 --- a/app/models/miq_schedule_worker/scheduler.rb +++ b/app/models/miq_schedule_worker/scheduler.rb @@ -17,13 +17,13 @@ def schedule_every(name, duration = nil, opts = {}, &block) log_schedule(name, opts.merge(:duration => duration)) if duration.blank? - logger.warn("Duration is empty, scheduling ignored. Called from: #{caller[1]}.") + logger.warn("Duration is empty, scheduling ignored. Called from: #{caller(2..2).first}.") return end role_schedule << rufus_scheduler.schedule_every(duration, nil, opts, &block) rescue ArgumentError => err - logger.error("#{err.class} for schedule_every with [#{duration}, #{opts.inspect}]. Called from: #{caller[1]}.") + logger.error("#{err.class} for schedule_every with [#{duration}, #{opts.inspect}]. Called from: #{caller(2..2).first}.") end def schedule_cron(name, cronline, opts = {}, &block) diff --git a/app/models/miq_search.rb b/app/models/miq_search.rb index 019489f4113..a2fbb3163a2 100644 --- a/app/models/miq_search.rb +++ b/app/models/miq_search.rb @@ -76,10 +76,10 @@ def self.get_expressions(options) end def self.descriptions - Hash[*all.select(:id, :description).flat_map {|x| [x.id.to_s, x.description] }] + Hash[*all.select(:id, :description).flat_map { |x| [x.id.to_s, x.description] }] end - FIXTURE_DIR = File.join(Rails.root, "db/fixtures") + FIXTURE_DIR = Rails.root.join("db/fixtures").to_s def self.seed searches = where("name like 'default%'").index_by { |ms| "#{ms.name}-#{ms.db}" } fixture_file = File.join(FIXTURE_DIR, "miq_searches.yml") diff --git a/app/models/miq_server.rb b/app/models/miq_server.rb index 3fb5930d90f..f9e048e45f5 100644 --- a/app/models/miq_server.rb +++ b/app/models/miq_server.rb @@ -18,7 +18,7 @@ class MiqServer < ApplicationRecord belongs_to :vm, :inverse_of => :miq_server belongs_to :zone - has_many :messages, :as => :handler, :class_name => 'MiqQueue' + has_many :messages, :as => :handler, :class_name => 'MiqQueue' has_many :miq_events, :as => :target has_many :miq_workers, :dependent => :destroy @@ -92,7 +92,7 @@ def self.kill_all_workers end def self.pidfile - @pidfile ||= "#{Rails.root}/tmp/pids/evm.pid" + @pidfile ||= "#{Rails.root.join("tmp/pids/evm.pid")}" end def self.running? @@ -112,7 +112,7 @@ def self.seed end def validate_is_deleteable - unless self.is_deleteable? + unless is_deleteable? _log.error(@errors.full_messages) throw :abort end @@ -199,11 +199,13 @@ def monitor Benchmark.realtime_block(:heartbeat) { heartbeat } if threshold_exceeded?(:heartbeat_frequency, now) Benchmark.realtime_block(:server_dequeue) { process_miq_queue } if threshold_exceeded?(:server_dequeue_frequency, now) - Benchmark.realtime_block(:server_monitor) do - server_monitor.monitor_servers - monitor_server_roles if self.is_master? - messaging_health_check - end if threshold_exceeded?(:server_monitor_frequency, now) + if threshold_exceeded?(:server_monitor_frequency, now) + Benchmark.realtime_block(:server_monitor) do + server_monitor.monitor_servers + monitor_server_roles if is_master? + messaging_health_check + end + end Benchmark.realtime_block(:log_active_servers) { log_active_servers } if threshold_exceeded?(:server_log_frequency, now) Benchmark.realtime_block(:role_monitor) { monitor_active_roles } if threshold_exceeded?(:server_role_monitor_frequency, now) @@ -247,7 +249,7 @@ def monitor_myself end def stop(sync = false) - return if self.stopped? + return if stopped? shutdown_and_exit_queue wait_for_stopped if sync @@ -256,7 +258,8 @@ def stop(sync = false) def wait_for_stopped loop do reload - break if self.stopped? + break if stopped? + sleep stop_poll end end @@ -274,7 +277,7 @@ def kill # Then kill this server _log.info("initiated for #{format_full_log_msg}") update(:stopped_on => Time.now.utc, :status => "killed", :is_master => false) - (pid == Process.pid) ? shutdown_and_exit : Process.kill(9, pid) + pid == Process.pid ? shutdown_and_exit : Process.kill(9, pid) end def self.kill @@ -351,13 +354,13 @@ def is_recently_active? def is_deleteable? return true if MiqEnvironment::Command.is_podified? - if self.is_local? + if is_local? message = N_("Cannot delete currently used %{log_message}") % {:log_message => format_short_log_msg} @errors ||= ActiveModel::Errors.new(self) @errors.add(:base, message) return false end - return true if self.stopped? + return true if stopped? if is_recently_active? message = N_("Cannot delete recently active %{log_message}") % {:log_message => format_short_log_msg} @@ -386,7 +389,8 @@ def alive? end def logon_status - return :ready if self.started? + return :ready if started? + started_on < (Time.now.utc - ::Settings.server.startup_timeout) ? :timed_out_starting : status.to_sym end diff --git a/app/models/miq_server/environment_management.rb b/app/models/miq_server/environment_management.rb index a0ef4a5cfc1..87389d9a535 100644 --- a/app/models/miq_server/environment_management.rb +++ b/app/models/miq_server/environment_management.rb @@ -34,6 +34,7 @@ def check_disk_usage(disks) disks.each do |disk| next unless disk[:used_bytes_percent].to_i > threshold + disk_usage_event = case disk[:mount_point].chomp when '/' then 'evm_server_system_disk_high_usage' when '/boot' then 'evm_server_boot_disk_high_usage' @@ -48,6 +49,7 @@ def check_disk_usage(disks) end next unless disk_usage_event + msg = "Filesystem: #{disk[:filesystem]} (#{disk[:type]}) on #{disk[:mount_point]} is #{disk[:used_bytes_percent]}% full with #{ActiveSupport::NumberHelper.number_to_human_size(disk[:available_bytes])} free." MiqEvent.raise_evm_event_queue(self, disk_usage_event, :event_details => msg) end diff --git a/app/models/miq_server/log_management.rb b/app/models/miq_server/log_management.rb index 416553871ef..a47a9ca882c 100644 --- a/app/models/miq_server/log_management.rb +++ b/app/models/miq_server/log_management.rb @@ -18,7 +18,7 @@ def _post_my_logs(options) :instance_id => id, :method_name => "post_logs", :server_guid => guid, - :zone => my_zone, + :zone => my_zone ) do |msg| _log.info("Previous adhoc log collection is still running, skipping...Resource: [#{self.class.name}], id: [#{id}]") unless msg.nil? nil @@ -37,11 +37,12 @@ def last_log_sync_on def last_log_sync_message last_log = log_files.order(:updated_on => :desc).first - last_log.try(:miq_task).try!(:message) + last_log.try(:miq_task)&.message end def include_automate_models_and_dialogs?(value) return value unless value.nil? + Settings.log.collection.include_automate_models_and_dialogs end @@ -52,6 +53,7 @@ def post_logs(options) # the current queue item and task must be errored out on exceptions so re-raise any caught errors raise _("Log depot settings not configured") unless context_log_depot + context_log_depot.update(:support_case => options[:support_case].presence) if include_automate_models_and_dialogs?(options[:include_automate_models_and_dialogs]) @@ -121,14 +123,14 @@ def post_one_log_pattern(pattern, logfile, log_type) begin local_file = VMDB::Util.zip_logs(log_type.to_s.downcase.concat(".zip"), log_patterns(log_type, pattern), "system") - self.log_files << logfile + log_files << logfile logfile.update( :local_file => local_file, :logging_started_on => log_start, :logging_ended_on => log_end, :name => LogFile.logfile_name(self, log_type, date_string), - :description => "Logs for Zone #{zone.name rescue nil} Server #{self.name} #{date_string}", + :description => "Logs for Zone #{zone.name rescue nil} Server #{name} #{date_string}" ) logfile.upload @@ -146,7 +148,7 @@ def post_one_log_pattern(pattern, logfile, log_type) end def post_automate_models(taskid, log_depot) - domain_zip = Rails.root.join("log", "domain.zip") + domain_zip = Rails.root.join("log/domain.zip") backup_automate_models(domain_zip) now = Time.zone.now @@ -167,7 +169,7 @@ def backup_automate_models(domain_zip) end def post_automate_dialogs(taskid, log_depot) - dialog_directory = Rails.root.join("log", "service_dialogs") + dialog_directory = Rails.root.join("log/service_dialogs") FileUtils.mkdir_p(dialog_directory) backup_automate_dialogs(dialog_directory) now = Time.zone.now @@ -240,15 +242,15 @@ def delete_active_log_collections_queue def delete_active_log_collections log_files.each do |lf| - if lf.state == 'collecting' - _log.info("Deleting #{lf.description}") - lf.miq_task&.(:state => 'Finished', :status => 'Error', :message => 'Log Collection Incomplete during Server Startup') - lf.destroy - end + next unless lf.state == 'collecting' + + _log.info("Deleting #{lf.description}") + lf.miq_task&.(:state => 'Finished', :status => 'Error', :message => 'Log Collection Incomplete during Server Startup') + lf.destroy end # Since a task is created before a logfile, there's a chance we have a task without a logfile - MiqTask.where(:miq_server_id => id).where("name like ?", "Zipped log retrieval for %").where("state != ?", "Finished").each do |task| + MiqTask.where(:miq_server_id => id).where("name like ?", "Zipped log retrieval for %").where.not(:state => "Finished").each do |task| task.update(:state => 'Finished', :status => 'Error', :message => 'Log Collection Incomplete during Server Startup') end end @@ -256,11 +258,13 @@ def delete_active_log_collections def log_collection_active_recently?(since = nil) since ||= 15.minutes.ago.utc return true if log_files.exists?(["created_on > ? AND state = ?", since, "collecting"]) + MiqTask.exists?(["miq_server_id = ? and name like ? and state != ? and created_on > ?", id, "Zipped log retrieval for %", "Finished", since]) end def log_collection_active? return true if log_files.exists?(:state => "collecting") + MiqTask.exists?(["miq_server_id = ? and name like ? and state != ?", id, "Zipped log retrieval for %", "Finished"]) end diff --git a/app/models/miq_server/queue_management.rb b/app/models/miq_server/queue_management.rb index a0b720ecd6a..55fa602427c 100644 --- a/app/models/miq_server/queue_management.rb +++ b/app/models/miq_server/queue_management.rb @@ -60,7 +60,7 @@ def shutdown_and_exit_queue # Tell the remote or local server to restart def restart_queue log_message = "Server restart requested" - log_message += ", remote server: [#{name}], GUID: [#{guid}], initiated from: [#{MiqServer.my_server.name}], GUID: [#{MiqServer.my_server.guid}]" if self.is_remote? + log_message += ", remote server: [#{name}], GUID: [#{guid}], initiated from: [#{MiqServer.my_server.name}], GUID: [#{MiqServer.my_server.guid}]" if is_remote? _log.info(log_message) enqueue_for_server('restart') end diff --git a/app/models/miq_server/role_management.rb b/app/models/miq_server/role_management.rb index aa280a8362d..167042369ac 100644 --- a/app/models/miq_server/role_management.rb +++ b/app/models/miq_server/role_management.rb @@ -52,7 +52,7 @@ def sync_assigned_roles end def ensure_default_roles - MiqServer.my_server.add_settings_for_resource(:server => {:role => ENV["MIQ_SERVER_DEFAULT_ROLES"]}) if role.blank? && ENV["MIQ_SERVER_DEFAULT_ROLES"].present? + MiqServer.my_server.add_settings_for_resource(:server => {:role => ENV.fetch("MIQ_SERVER_DEFAULT_ROLES", nil)}) if role.blank? && ENV["MIQ_SERVER_DEFAULT_ROLES"].present? sync_assigned_roles end @@ -123,8 +123,8 @@ def check_server_roles def server_role_names server_roles.pluck(:name).sort end - alias_method :my_roles, :server_role_names - alias_method :assigned_role_names, :server_role_names + alias my_roles server_role_names + alias assigned_role_names server_role_names def server_role_names=(roles) zone.lock do @@ -156,15 +156,13 @@ def server_role_names=(roles) end end end - - roles end def role server_role_names.join(',') end - alias_method :my_role, :role - alias_method :assigned_role, :role + alias my_role role + alias assigned_role role def role=(val) self.server_role_names = val == "*" ? val : val.split(",") @@ -208,7 +206,7 @@ def licensed_role def has_assigned_role?(role) assigned_role_names.include?(role.to_s.strip.downcase) end - alias_method :has_role?, :has_assigned_role? + alias has_role? has_assigned_role? def has_active_role?(role) active_role_names.include?(role.to_s.strip.downcase) @@ -259,13 +257,13 @@ def synchronize_active_roles(servers, roles_to_sync) end active.each do |s, p| - if (inactive.length > 0) && (p > inactive.first.last) - s2, p2 = inactive.shift - _log.info("Migrating Role <#{role_name}> Active on Server <#{s.name}> with Priority <#{p}> to Server <#{s2.name}> with Priority <#{p2}>") - s.deactivate_roles(role_name) - s2.activate_roles(role_name) - active << [s2, p2] - end + next unless (inactive.length > 0) && (p > inactive.first.last) + + s2, p2 = inactive.shift + _log.info("Migrating Role <#{role_name}> Active on Server <#{s.name}> with Priority <#{p}> to Server <#{s2.name}> with Priority <#{p2}>") + s.deactivate_roles(role_name) + s2.activate_roles(role_name) + active << [s2, p2] end end diff --git a/app/models/miq_server/server_monitor.rb b/app/models/miq_server/server_monitor.rb index 74e8772981b..68000dfc369 100644 --- a/app/models/miq_server/server_monitor.rb +++ b/app/models/miq_server/server_monitor.rb @@ -121,6 +121,7 @@ def monitor_servers_as_non_master parent.miq_servers.each do |s| next unless s.status == 'started' next if s.is_master? + @last_servers[s.id] = {:last_hb_change => Time.now.utc, :record => s} end diff --git a/app/models/miq_server/server_smart_proxy.rb b/app/models/miq_server/server_smart_proxy.rb index bafe6d4cefd..28cf6e79d4b 100644 --- a/app/models/miq_server/server_smart_proxy.rb +++ b/app/models/miq_server/server_smart_proxy.rb @@ -3,18 +3,18 @@ module MiqServer::ServerSmartProxy extend ActiveSupport::Concern - SMART_ROLES = %w(smartproxy smartstate).freeze + SMART_ROLES = %w[smartproxy smartstate].freeze def is_a_proxy? - self.has_role?(:SmartProxy) + has_role?(:SmartProxy) end def is_proxy_active? - self.started? && self.has_active_role?(:SmartProxy) + started? && has_active_role?(:SmartProxy) end def is_vix_disk? - has_vix_disk_lib? && self.has_active_role?(:SmartProxy) + has_vix_disk_lib? && has_active_role?(:SmartProxy) end def vm_scan_host_affinity? @@ -97,7 +97,7 @@ def scan_metadata(ost) _log.error(err.to_s) _log.log_backtrace(err, :debug) job.signal(:abort_retry, err.to_s, "error", true) - return + nil end end @@ -113,7 +113,7 @@ def sync_metadata(ost) _log.error(err.to_s) _log.log_backtrace(err, :debug) job.signal(:abort_retry, err.to_s, "error", true) - return + nil end end @@ -132,7 +132,7 @@ def forceVmScan end def concurrent_job_max - return 0 unless self.is_a_proxy? + return 0 unless is_a_proxy? MiqSmartProxyWorker.fetch_worker_settings_from_server(self)[:count].to_i end diff --git a/app/models/miq_server/status_management.rb b/app/models/miq_server/status_management.rb index e3a1db64a9d..4673d6748b9 100644 --- a/app/models/miq_server/status_management.rb +++ b/app/models/miq_server/status_management.rb @@ -34,9 +34,7 @@ module ClassMethods # 2. Delegate and/or deprecate these class methods # 3. Change callers (app/models/miq_schedule_worker/jobs.rb) to use an instance. # 4. Cleanup any existing queue messages. - def status_update - my_server.status_update - end + delegate :status_update, :to => :my_server def log_status log_system_status @@ -58,35 +56,11 @@ def log_system_status unless disks.empty? _log.info("[#{svr_name}] Disk Usage:") format_string = "%-12s %6s %12s %12s %12s %12s %12s %12s %12s %12s %12s" - header = format(format_string, - "Filesystem", - "Type", - "Total", - "Used", - "Available", - "%Used", - "iTotal", - "iUsed", - "iFree", - "%iUsed", - "Mounted on" - ) + header = format_string % ["Filesystem", "Type", "Total", "Used", "Available", "%Used", "iTotal", "iUsed", "iFree", "%iUsed", "Mounted on"] _log.info("[#{svr_name}] #{header}") disks.each do |disk| - formatted = format(format_string, - disk[:filesystem], - disk[:type], - ActiveSupport::NumberHelper.number_to_human_size(disk[:total_bytes]), - ActiveSupport::NumberHelper.number_to_human_size(disk[:used_bytes]), - ActiveSupport::NumberHelper.number_to_human_size(disk[:available_bytes]), - "#{disk[:used_bytes_percent]}%", - disk[:total_inodes], - disk[:used_inodes], - disk[:available_inodes], - "#{disk[:used_inodes_percent]}%", - disk[:mount_point] - ) + formatted = format_string % [disk[:filesystem], disk[:type], ActiveSupport::NumberHelper.number_to_human_size(disk[:total_bytes]), ActiveSupport::NumberHelper.number_to_human_size(disk[:used_bytes]), ActiveSupport::NumberHelper.number_to_human_size(disk[:available_bytes]), "#{disk[:used_bytes_percent]}%", disk[:total_inodes], disk[:used_inodes], disk[:available_inodes], "#{disk[:used_inodes_percent]}%", disk[:mount_point]] _log.info("[#{svr_name}] #{formatted}") end @@ -94,11 +68,11 @@ def log_system_status svr.check_disk_usage(disks) end - queue_count = MiqQueue.nested_count_by(%w(state zone role)) + queue_count = MiqQueue.nested_count_by(%w[state zone role]) states = queue_count.keys.sort_by(&:to_s) states.each { |state| _log.info("[#{svr_name}] MiqQueue count for state=[#{state.inspect}] by zone and role: #{queue_count[state].inspect}") } - job_count = Job.nested_count_by(%w(state zone type)) + job_count = Job.nested_count_by(%w[state zone type]) states = job_count.keys.sort_by(&:to_s) states.each { |state| _log.info("[#{svr_name}] Job count for state=[#{state.inspect}] by zone and process_type: #{job_count[state].inspect}") } end diff --git a/app/models/miq_server/worker_management.rb b/app/models/miq_server/worker_management.rb index 378ea784fa2..c80091edea6 100644 --- a/app/models/miq_server/worker_management.rb +++ b/app/models/miq_server/worker_management.rb @@ -53,7 +53,7 @@ def start_drb_server require 'drb' require 'drb/acl' - acl = ACL.new(%w( deny all allow 127.0.0.1/32 )) + acl = ACL.new(%w[deny all allow 127.0.0.1/32]) DRb.install_acl(acl) require 'tmpdir' diff --git a/app/models/miq_server/worker_management/dequeue.rb b/app/models/miq_server/worker_management/dequeue.rb index 46ea5b1c7f5..4c1f3a2299d 100644 --- a/app/models/miq_server/worker_management/dequeue.rb +++ b/app/models/miq_server/worker_management/dequeue.rb @@ -39,6 +39,7 @@ def get_queue_message_for_worker(w) next if msg.nil? next if MiqQueue.lower_priority?(msg[:priority], get_queue_priority_for_worker(w)) next unless w[:class].required_roles.blank? || msg[:role].blank? || Array.wrap(w[:class].required_roles).include?(msg[:role]) + return messages.delete_at(index) end @@ -47,12 +48,14 @@ def get_queue_message_for_worker(w) end def get_queue_message(pid) - @workers_lock.synchronize(:SH) do - w = @workers[pid] + unless @workers_lock.nil? + @workers_lock.synchronize(:SH) do + w = @workers[pid] - msg = get_queue_message_for_worker(w) - [msg[:id], msg[:lock_version]] if msg - end unless @workers_lock.nil? + msg = get_queue_message_for_worker(w) + [msg[:id], msg[:lock_version]] if msg + end + end end def prefetch_stale_threshold @@ -62,6 +65,7 @@ def prefetch_stale_threshold def prefetch_below_threshold?(queue_name, wcount) @queue_messages_lock.synchronize(:SH) do return false unless @queue_messages.key_path?(queue_name, :messages) + return (@queue_messages[queue_name][:messages].length <= (::Settings.server.prefetch_min_per_worker_dequeue * wcount)) end end @@ -69,6 +73,7 @@ def prefetch_below_threshold?(queue_name, wcount) def prefetch_stale?(queue_name) @queue_messages_lock.synchronize(:SH) do return true if @queue_messages[queue_name].nil? + return ((Time.now.utc - @queue_messages[queue_name][:timestamp]) > prefetch_stale_threshold) end end @@ -76,50 +81,57 @@ def prefetch_stale?(queue_name) def prefetch_has_lower_priority_than_miq_queue?(queue_name) @queue_messages_lock.synchronize(:SH) do return true if @queue_messages[queue_name].nil? || @queue_messages[queue_name][:messages].nil? + msg = @queue_messages[queue_name][:messages].first return true if msg.nil? + return peek(queue_name, MiqQueue.priority(msg[:priority], :higher, 1), 1).any? end end def get_worker_count_and_priority_by_queue_name queue_names = {} - @workers_lock.synchronize(:SH) do - @workers.each do |_pid, w| - next if w[:queue_name].nil? - next if w[:class].nil? - next unless get_worker_dequeue_method(w[:class]) == :drb - options = (queue_names[w[:queue_name]] ||= [0, MiqQueue::MAX_PRIORITY]) - options[0] += 1 - options[1] = MiqQueue.lower_priority(get_queue_priority_for_worker(w), options[1]) + unless @workers_lock.nil? + @workers_lock.synchronize(:SH) do + @workers.each do |_pid, w| + next if w[:queue_name].nil? + next if w[:class].nil? + next unless get_worker_dequeue_method(w[:class]) == :drb + + options = (queue_names[w[:queue_name]] ||= [0, MiqQueue::MAX_PRIORITY]) + options[0] += 1 + options[1] = MiqQueue.lower_priority(get_queue_priority_for_worker(w), options[1]) + end end - end unless @workers_lock.nil? + end queue_names end def register_worker(worker_pid, worker_class, queue_name) worker_class = worker_class.constantize if worker_class.kind_of?(String) - @workers_lock.synchronize(:EX) do - worker_add(worker_pid) - h = @workers[worker_pid] - h[:class] ||= worker_class - h[:queue_name] ||= queue_name - end unless @workers_lock.nil? + unless @workers_lock.nil? + @workers_lock.synchronize(:EX) do + worker_add(worker_pid) + h = @workers[worker_pid] + h[:class] ||= worker_class + h[:queue_name] ||= queue_name + end + end end def populate_queue_messages queue_names = get_worker_count_and_priority_by_queue_name @queue_messages_lock.synchronize(:EX) do queue_names.each do |queue_name, (wcount, priority)| - if prefetch_below_threshold?(queue_name, wcount) || prefetch_stale?(queue_name) || prefetch_has_lower_priority_than_miq_queue?(queue_name) - @queue_messages[queue_name] ||= {} - @queue_messages[queue_name][:timestamp] = Time.now.utc - @queue_messages[queue_name][:messages] = peek(queue_name, priority, (::Settings.server.prefetch_max_per_worker_dequeue * wcount)).collect do |q| - {:id => q.id, :lock_version => q.lock_version, :priority => q.priority, :role => q.role} - end - _log.info("Fetched #{@queue_messages[queue_name][:messages].length} miq_queue rows for queue_name=#{queue_name}, wcount=#{wcount.inspect}, priority=#{priority.inspect}") if @queue_messages[queue_name][:messages].length > 0 + next unless prefetch_below_threshold?(queue_name, wcount) || prefetch_stale?(queue_name) || prefetch_has_lower_priority_than_miq_queue?(queue_name) + + @queue_messages[queue_name] ||= {} + @queue_messages[queue_name][:timestamp] = Time.now.utc + @queue_messages[queue_name][:messages] = peek(queue_name, priority, (::Settings.server.prefetch_max_per_worker_dequeue * wcount)).collect do |q| + {:id => q.id, :lock_version => q.lock_version, :priority => q.priority, :role => q.role} end + _log.info("Fetched #{@queue_messages[queue_name][:messages].length} miq_queue rows for queue_name=#{queue_name}, wcount=#{wcount.inspect}, priority=#{priority.inspect}") if @queue_messages[queue_name][:messages].length > 0 end end end diff --git a/app/models/miq_server/worker_management/heartbeat.rb b/app/models/miq_server/worker_management/heartbeat.rb index 9231f3f92a9..faa2c107d4e 100644 --- a/app/models/miq_server/worker_management/heartbeat.rb +++ b/app/models/miq_server/worker_management/heartbeat.rb @@ -17,7 +17,7 @@ def persist_last_heartbeat(w) end def clean_heartbeat_files - Dir.glob(Rails.root.join("tmp", "*.hb")).each { |f| File.delete(f) } + Dir.glob(Rails.root.join("tmp/*.hb")).each { |f| File.delete(f) } end private diff --git a/app/models/miq_server/worker_management/kubernetes.rb b/app/models/miq_server/worker_management/kubernetes.rb index 3aff19f85ce..ca677285791 100644 --- a/app/models/miq_server/worker_management/kubernetes.rb +++ b/app/models/miq_server/worker_management/kubernetes.rb @@ -127,7 +127,7 @@ def start_kube_monitor(resource = :pods) Thread.new do _log.info("Started new #{resource} monitor thread of #{Thread.list.length} total") begin - send("monitor_#{resource}") + send(:"monitor_#{resource}") rescue HTTP::ConnectionError => e _log.error("Exiting #{resource} monitor thread due to [#{e.class.name}]: #{e}") rescue => e @@ -141,16 +141,16 @@ def ensure_kube_monitors_started [:deployments, :pods].each do |resource| getter = "#{resource}_monitor_thread" thread = send(getter) - if thread.nil? || !thread.alive? - if !thread.nil? && thread.status.nil? - dead_thread = thread - send("#{getter}=", nil) - _log.info("Waiting for the #{getter} Monitor Thread to exit...") - dead_thread.join - end + next unless thread.nil? || !thread.alive? - send("#{getter}=", start_kube_monitor(resource)) + if !thread.nil? && thread.status.nil? + dead_thread = thread + send(:"#{getter}=", nil) + _log.info("Waiting for the #{getter} Monitor Thread to exit...") + dead_thread.join end + + send(:"#{getter}=", start_kube_monitor(resource)) end end @@ -186,18 +186,18 @@ def monitor_pods end def collect_initial(resource = :pods) - objects = orchestrator.send("get_#{resource}") - objects.each { |p| send("save_#{resource.to_s.singularize}", p) } + objects = orchestrator.send(:"get_#{resource}") + objects.each { |p| send(:"save_#{resource.to_s.singularize}", p) } objects.resourceVersion end def watch_for_events(resource, resource_version) - orchestrator.send("watch_#{resource}", resource_version).each do |event| + orchestrator.send(:"watch_#{resource}", resource_version).each do |event| case event.type.downcase when "added", "modified" - send("save_#{resource.to_s.singularize}", event.object) + send(:"save_#{resource.to_s.singularize}", event.object) when "deleted" - send("delete_#{resource.to_s.singularize}", event.object) + send(:"delete_#{resource.to_s.singularize}", event.object) when "error" if (status = event.object) # ocp 3 appears to return 'ERROR' watch events with the object containing the 410 code and "Gone" reason like below: diff --git a/app/models/miq_server/worker_management/monitor.rb b/app/models/miq_server/worker_management/monitor.rb index 9bd1998d76c..1b9e396bc06 100644 --- a/app/models/miq_server/worker_management/monitor.rb +++ b/app/models/miq_server/worker_management/monitor.rb @@ -63,6 +63,7 @@ def check_pending_stop miq_workers.each do |w| next unless w.is_stopped? next unless worker_get_monitor_status(w.pid) == :waiting_for_stop + worker_set_monitor_status(w.pid, nil) end end diff --git a/app/models/miq_server/worker_management/monitor/quiesce.rb b/app/models/miq_server/worker_management/monitor/quiesce.rb index 2e5fa9a3199..bcd653c26da 100644 --- a/app/models/miq_server/worker_management/monitor/quiesce.rb +++ b/app/models/miq_server/worker_management/monitor/quiesce.rb @@ -19,7 +19,7 @@ def quiesce_workers_loop @quiesce_started_on = Time.now.utc @worker_monitor_settings ||= {} @quiesce_loop_timeout = @worker_monitor_settings[:quiesce_loop_timeout] || 5.minutes - worker_monitor_poll = (@worker_monitor_settings[:poll] || 1.seconds).to_i_with_method + worker_monitor_poll = (@worker_monitor_settings[:poll] || 1.second).to_i_with_method miq_workers.each do |w| if w.containerized_worker? @@ -33,7 +33,8 @@ def quiesce_workers_loop my_server.reload # Reload from SQL this MiqServer AND its miq_workers association my_server.heartbeat - break if self.workers_quiesced? + break if workers_quiesced? + sleep worker_monitor_poll end end diff --git a/app/models/miq_server/worker_management/monitor/reason.rb b/app/models/miq_server/worker_management/monitor/reason.rb index ce57db6f98d..81e535087bc 100644 --- a/app/models/miq_server/worker_management/monitor/reason.rb +++ b/app/models/miq_server/worker_management/monitor/reason.rb @@ -5,9 +5,11 @@ module MiqServer::WorkerManagement::Monitor::Reason NOT_RESPONDING = :not_responding def worker_set_monitor_reason(pid, reason) - @workers_lock.synchronize(:EX) do - @workers[pid][:monitor_reason] = reason if @workers.key?(pid) - end unless @workers_lock.nil? + unless @workers_lock.nil? + @workers_lock.synchronize(:EX) do + @workers[pid][:monitor_reason] = reason if @workers.key?(pid) + end + end end def worker_get_monitor_reason(pid) diff --git a/app/models/miq_server/worker_management/monitor/status.rb b/app/models/miq_server/worker_management/monitor/status.rb index e36ea30280a..1de638ba58c 100644 --- a/app/models/miq_server/worker_management/monitor/status.rb +++ b/app/models/miq_server/worker_management/monitor/status.rb @@ -2,9 +2,11 @@ module MiqServer::WorkerManagement::Monitor::Status extend ActiveSupport::Concern def worker_set_monitor_status(pid, status) - @workers_lock.synchronize(:EX) do - @workers[pid][:monitor_status] = status if @workers.key?(pid) - end unless @workers_lock.nil? + unless @workers_lock.nil? + @workers_lock.synchronize(:EX) do + @workers[pid][:monitor_status] = status if @workers.key?(pid) + end + end end def worker_get_monitor_status(pid) diff --git a/app/models/miq_server/worker_management/monitor/system_limits.rb b/app/models/miq_server/worker_management/monitor/system_limits.rb index 3e0b77a0412..489ef32734a 100644 --- a/app/models/miq_server/worker_management/monitor/system_limits.rb +++ b/app/models/miq_server/worker_management/monitor/system_limits.rb @@ -28,6 +28,7 @@ def kill_algorithm_used_swap_percent_gt_value(options) sys = MiqSystem.memory return false if sys[:SwapTotal].nil? || sys[:SwapFree].nil? || sys[:MemFree].nil? || sys[:SwapTotal] == 0 + used = sys[:SwapTotal] - sys[:SwapFree] - sys[:MemFree] pct_used = used / sys[:SwapTotal].to_f * 100 rescue => err @@ -48,6 +49,7 @@ def start_algorithm_used_swap_percent_lt_value(options) sys = MiqSystem.memory return true if sys[:SwapTotal].nil? || sys[:SwapFree].nil? || sys[:MemFree].nil? || sys[:SwapTotal] == 0 + used = sys[:SwapTotal] - sys[:SwapFree] - sys[:MemFree] pct_used = used / sys[:SwapTotal].to_f * 100 rescue => err @@ -114,7 +116,7 @@ def invoke_algorithm(options) def build_algorithm_name(name, type) real_algorithm_name = "#{type}_algorithm_#{name}" if name && type - unless real_algorithm_name && self.respond_to?(real_algorithm_name) + unless real_algorithm_name && respond_to?(real_algorithm_name) default = TYPE_TO_DEFAULT_ALGORITHM[type] _log.warn("Using default algorithm: [#{default}] since [#{name}] is not a valid algorithm") real_algorithm_name = "#{type}_algorithm_#{default}" diff --git a/app/models/miq_server/worker_management/monitor/validation.rb b/app/models/miq_server/worker_management/monitor/validation.rb index 74d4c1f0004..ad2815ae7c1 100644 --- a/app/models/miq_server/worker_management/monitor/validation.rb +++ b/app/models/miq_server/worker_management/monitor/validation.rb @@ -40,6 +40,7 @@ def usage_exceeds_threshold?(usage, threshold) return false unless usage.kind_of?(Numeric) return false unless threshold.kind_of?(Numeric) return false unless threshold > 0 + usage > threshold end end diff --git a/app/models/miq_shortcut.rb b/app/models/miq_shortcut.rb index bc90f9137b6..cea56789329 100644 --- a/app/models/miq_shortcut.rb +++ b/app/models/miq_shortcut.rb @@ -29,6 +29,7 @@ def self.seed db_data.each do |name, rec| next if seed_records_by_name[name] + _log.info("Deleting #{rec.inspect}") rec.destroy end diff --git a/app/models/miq_smart_proxy_worker/runner.rb b/app/models/miq_smart_proxy_worker/runner.rb index e429705e126..ae1c4253857 100644 --- a/app/models/miq_smart_proxy_worker/runner.rb +++ b/app/models/miq_smart_proxy_worker/runner.rb @@ -25,15 +25,15 @@ def start_heartbeat_thread _log.info("#{log_prefix} Starting Heartbeat Thread") tid = Thread.new do - begin - heartbeat_thread - rescue => err - _log.error("#{log_prefix} Heartbeat Thread aborted because [#{err.message}]") - _log.log_backtrace(err) - Thread.exit - ensure - @heartbeat_started.set - end + + heartbeat_thread + rescue => err + _log.error("#{log_prefix} Heartbeat Thread aborted because [#{err.message}]") + _log.log_backtrace(err) + Thread.exit + ensure + @heartbeat_started.set + end @heartbeat_started.wait diff --git a/app/models/miq_snmp.rb b/app/models/miq_snmp.rb index 2f0981b4bf6..8e2094585c1 100644 --- a/app/models/miq_snmp.rb +++ b/app/models/miq_snmp.rb @@ -72,6 +72,7 @@ def self.trap_v2(inputs) # trap_oid: An ObjectId or String with the OID identifier for this trap. trap_oid = inputs[:trap_oid] || inputs['trap_oid'] raise MiqException::Error, _("MiqSnmp.trap_v2: Ensure that a trap object id is provided") if trap_oid.nil? + trap_oid = subst_oid(trap_oid) # A list of additional varbinds to send with the trap. @@ -107,7 +108,7 @@ def self.create_var_bind_list(object_list, base = nil) value = tuple[:value] type = tuple[:type] || tuple[:var_type] snmpType = AVAILABLE_TYPES_HASH[type] - snmpVal = (snmpType == SNMP::Null) ? SNMP::Null.new : snmpType.new(value) + snmpVal = snmpType == SNMP::Null ? SNMP::Null.new : snmpType.new(value) vars << SNMP::VarBind.new(oid, snmpVal) end vars @@ -124,24 +125,24 @@ def self.subst_oid(oid, base = nil) return "#{base}#{oid}" if oid[0, 1] == "." # Need to move these to ManageIQ MIB - oid = case oid.downcase - when "info" then "#{enterprise_oid_string}.1" - when "warn", "warning" then "#{enterprise_oid_string}.2" - when "crit", "critical", "error" then "#{enterprise_oid_string}.3" - when "description" then "#{base}.1" - when "category" then "#{base}.2" - when "message" then "#{base}.3" - when "object" then "#{base}.4" - when "location" then "#{base}.5" - when "platform" then "#{base}.6" - when "url" then "#{base}.7" - when "source" then "#{base}.8" - when "custom1" then "#{base}.9" - when "custom2" then "#{base}.10" - else oid - end - - oid + case oid.downcase + when "info" then "#{enterprise_oid_string}.1" + when "warn", "warning" then "#{enterprise_oid_string}.2" + when "crit", "critical", "error" then "#{enterprise_oid_string}.3" + when "description" then "#{base}.1" + when "category" then "#{base}.2" + when "message" then "#{base}.3" + when "object" then "#{base}.4" + when "location" then "#{base}.5" + when "platform" then "#{base}.6" + when "url" then "#{base}.7" + when "source" then "#{base}.8" + when "custom1" then "#{base}.9" + when "custom2" then "#{base}.10" + else oid + end + + end private_class_method :subst_oid diff --git a/app/models/miq_task.rb b/app/models/miq_task.rb index c85f3847b16..86681546561 100644 --- a/app/models/miq_task.rb +++ b/app/models/miq_task.rb @@ -40,8 +40,8 @@ class MiqTask < ApplicationRecord before_validation :initialize_attributes, :on => :create - before_destroy :check_active, :check_associations before_save :ensure_started + before_destroy :check_active, :check_associations virtual_has_one :task_results virtual_attribute :state_or_status, :string, :arel => (lambda do |t| @@ -56,8 +56,8 @@ class MiqTask < ApplicationRecord scope :with_updated_on_between, ->(from, to) { where("miq_tasks.updated_on BETWEEN ? AND ?", from, to) } scope :with_state, ->(state) { where(:state => state) } scope :finished, -> { with_state('Finished') } - scope :running, -> { where.not(:state => %w(Finished Waiting_to_start Queued)) } - scope :queued, -> { with_state(%w(Waiting_to_start Queued)) } + scope :running, -> { where.not(:state => %w[Finished Waiting_to_start Queued]) } + scope :queued, -> { with_state(%w[Waiting_to_start Queued]) } scope :completed_ok, -> { finished.where(:status => 'Ok') } scope :completed_warn, -> { finished.where(:status => 'Warn') } scope :completed_error, -> { finished.where(:status => 'Error') } @@ -229,14 +229,14 @@ def human_status end def results_ready? - status == STATUS_OK && !task_results.blank? + status == STATUS_OK && task_results.present? end def queue_callback(state, status, message, result) if status.casecmp(STATUS_OK) == 0 message = MESSAGE_TASK_COMPLETED_SUCCESSFULLY - else - message = MESSAGE_TASK_COMPLETED_UNSUCCESSFULLY if message.blank? + elsif message.blank? + message = MESSAGE_TASK_COMPLETED_UNSUCCESSFULLY end self.task_results = result unless result.nil? @@ -255,6 +255,7 @@ def task_results # support legacy task that saved results in the results column return Marshal.load(Base64.decode64(results.split("\n").join)) unless results.nil? return miq_report_result.report_results unless miq_report_result.nil? + if binary_blob result = binary_blob.data return result.kind_of?(String) ? result.force_encoding("UTF-8") : result @@ -324,6 +325,7 @@ def self.wait_for_taskid(task_id, options = {}) options[:timeout] ||= 0 task = MiqTask.find(task_id) return nil if task.nil? + begin Timeout.timeout(options[:timeout]) do while task.state != STATE_FINISHED @@ -345,7 +347,7 @@ def self.delete_older(ts, condition) MiqQueue.submit_job( :class_name => name, :method_name => "destroy_older_by_condition", - :args => [ts, condition], + :args => [ts, condition] ) end @@ -356,11 +358,12 @@ def self.destroy_older_by_condition(ts, condition) def self.delete_by_id(ids) return if ids.empty? + _log.info("Queuing deletion of tasks with the following ids: #{ids.inspect}") MiqQueue.submit_job( :class_name => name, :method_name => "destroy", - :args => [ids], + :args => [ids] ) end diff --git a/app/models/miq_template.rb b/app/models/miq_template.rb index 37fb5a62a9a..29fda2ddc11 100644 --- a/app/models/miq_template.rb +++ b/app/models/miq_template.rb @@ -19,10 +19,10 @@ def self.base_model def self.corresponding_model module_parent::Vm end - class << self; alias_method :corresponding_vm_model, :corresponding_model; end + class << self; alias corresponding_vm_model corresponding_model; end delegate :corresponding_model, :to => :class - alias_method :corresponding_vm_model, :corresponding_model + alias corresponding_vm_model corresponding_model def scan_via_ems? true @@ -37,7 +37,7 @@ def self.without_volume_templates "ManageIQ::Providers::Openstack::CloudManager::VolumeSnapshotTemplate"]) end - def active?; false; end + def active? = false def self.display_name(number = 1) n_('Template and Image', 'Templates and Images', number) diff --git a/app/models/miq_template/operations.rb b/app/models/miq_template/operations.rb index 8e52b9dd41c..aa146fc8877 100644 --- a/app/models/miq_template/operations.rb +++ b/app/models/miq_template/operations.rb @@ -2,6 +2,6 @@ module MiqTemplate::Operations extend ActiveSupport::Concern included do - supports :clone + supports :clone end end diff --git a/app/models/miq_user_scope.rb b/app/models/miq_user_scope.rb index f95737c1177..4529b5a1957 100644 --- a/app/models/miq_user_scope.rb +++ b/app/models/miq_user_scope.rb @@ -1,13 +1,12 @@ class MiqUserScope FEATURE_TYPES = [:view, :control, :admin] - attr_accessor *FEATURE_TYPES - attr_accessor :scope + attr_accessor(*FEATURE_TYPES, :scope) FILTER_TYPES = [:managed, :belongsto, :expression] def initialize(scope) @scope = scope - FEATURE_TYPES.each { |f| instance_variable_set("@#{f}", scope[f]) } + FEATURE_TYPES.each { |f| instance_variable_set(:"@#{f}", scope[f]) } end def get_filters(options = {}) @@ -36,9 +35,9 @@ def filters_by_class_feature_filter(klass, feature_type, filter_type) all = filter[:_all_] for_class = filter[klass.to_s.downcase.to_sym] - result = send("merge_#{filter_type}", all, for_class) + result = send(:"merge_#{filter_type}", all, for_class) - result.blank? ? nil : result + result.presence end def merge_managed(*args) @@ -68,8 +67,8 @@ def self.hash_to_scope(hash) return if managed.blank? && belongs.blank? newh = {:view => {}} - newh[:view][:managed] = {:_all_ => managed} unless managed.blank? - newh[:view][:belongsto] = {:_all_ => belongs} unless belongs.blank? + newh[:view][:managed] = {:_all_ => managed} if managed.present? + newh[:view][:belongsto] = {:_all_ => belongs} if belongs.present? new(newh) end diff --git a/app/models/miq_widget.rb b/app/models/miq_widget.rb index a1f9f244fd1..c344cf6eb23 100644 --- a/app/models/miq_widget.rb +++ b/app/models/miq_widget.rb @@ -8,7 +8,7 @@ class MiqWidget < ApplicationRecord attribute :read_only, :default => false DEFAULT_ROW_COUNT = 5 - IMPORT_CLASS_NAMES = %w(MiqWidget).freeze + IMPORT_CLASS_NAMES = %w[MiqWidget].freeze belongs_to :resource, :polymorphic => true belongs_to :miq_schedule @@ -19,10 +19,10 @@ class MiqWidget < ApplicationRecord has_many :miq_widget_shortcuts, :dependent => :destroy has_many :miq_shortcuts, :through => :miq_widget_shortcuts - validates_presence_of :title, :description + validates :title, :description, :presence => true validates :description, :uniqueness_when_changed => true - VALID_CONTENT_TYPES = %w( report chart rss menu ) - validates_inclusion_of :content_type, :in => VALID_CONTENT_TYPES, :message => "should be one of #{VALID_CONTENT_TYPES.join(", ")}" + VALID_CONTENT_TYPES = %w[report chart rss menu] + validates :content_type, :inclusion => {:in => VALID_CONTENT_TYPES, :message => "should be one of #{VALID_CONTENT_TYPES.join(", ")}"} serialize :visibility serialize :options @@ -51,10 +51,10 @@ def prevent_orphaned_dashboard end end - virtual_column :status, :type => :string, :uses => :miq_task + virtual_column :status, :type => :string, :uses => :miq_task virtual_delegate :status_message, :to => "miq_task.message", :allow_nil => true, :default => "Unknown", :type => :string virtual_delegate :queued_at, :to => "miq_task.created_on", :allow_nil => true, :type => :datetime - virtual_column :last_run_on, :type => :datetime, :uses => :miq_schedule + virtual_column :last_run_on, :type => :datetime, :uses => :miq_schedule def row_count(row_count_param = nil) row_count_param.try(:to_i) || options.try(:[], :row_count) || DEFAULT_ROW_COUNT @@ -79,7 +79,7 @@ def status def create_task(num_targets, userid = User.current_userid) userid ||= "system" - context_data = {:targets => num_targets, :complete => 0} + context_data = {:targets => num_targets, :complete => 0} miq_task = MiqTask.create( :name => "Generate Widget: '#{title}'", :state => MiqTask::STATE_QUEUED, @@ -93,7 +93,7 @@ def create_task(num_targets, userid = User.current_userid) _log.info("Created MiqTask ID: [#{miq_task.id}], Name: [#{miq_task.name}] for: [#{num_targets}] groups") self.miq_task_id = miq_task.id - self.save! + save! miq_task end @@ -107,6 +107,7 @@ def timeout_stalled_task !MiqQueue.where(:method_name => "generate_content", :class_name => self.class.name, :instance_id => id).any?(&:unfinished?) + miq_task.update_status(MiqTask::STATE_FINISHED, MiqTask::STATUS_TIMEOUT, "Timed out stalled task.") end @@ -162,7 +163,7 @@ def generate_content_complete_callback(status, _message, _result) end def generate_content_complete! - self.update!(:last_generated_content_on => Time.now.utc) + update!(:last_generated_content_on => Time.now.utc) end def generate_content_complete_message @@ -185,8 +186,9 @@ def log_prefix def queue_generate_content return if content_type == "menu" + # Called from schedule - unless self.enabled? + unless enabled? _log.info("#{log_prefix} is disabled, content will NOT be generated") return end @@ -199,7 +201,7 @@ def queue_generate_content MiqPreloader.preload(group_hash_visibility_agnostic.keys, [:miq_user_role]) - group_hash = group_hash_visibility_agnostic.select { |k, _v| available_for_group?(k) } # Process users grouped by LDAP group membership of whether they have RBAC + group_hash = group_hash_visibility_agnostic.select { |k, _v| available_for_group?(k) } # Process users grouped by LDAP group membership of whether they have RBAC if group_hash.length == 0 _log.info("#{log_prefix} is not subscribed, content will NOT be generated") @@ -238,6 +240,7 @@ def queue_generate_content def generate_content(klass, group_description, userids, timezones = nil) return if content_type == "menu" + miq_task.state_active if miq_task content_generator.generate(self, klass, group_description, userids, timezones) end @@ -344,7 +347,7 @@ def find_or_build_contents_for_user(group, user, timezone = nil) # TODO: group/user support def create_initial_content_for_user(user, group = nil) - return unless contents_for_user(user).blank? && content_type != "menu" # Menu widgets have no content + return unless contents_for_user(user).blank? && content_type != "menu" # Menu widgets have no content user = self.class.get_user(user) group = self.class.get_group(group) @@ -388,6 +391,7 @@ def contents_for_user(user) def last_run_on_for_user(user) contents = contents_for_user(user) return nil if contents.nil? + contents.miq_report_result.nil? ? contents.updated_at : contents.miq_report_result.last_run_on end @@ -404,12 +408,13 @@ def grouped_subscribers users_by_userid = User.in_my_region.where(:userid => grouped_users.values.flatten.uniq).index_by(&:userid) grouped_users.each_with_object({}) do |(k, v), h| user_objs = users_by_userid.values_at(*v).reject(&:blank?) - h[groups_by_id[k]] = user_objs unless user_objs.blank? + h[groups_by_id[k]] = user_objs if user_objs.present? end end def available_for_group?(group) return false unless group + has_visibility?(:roles, group.miq_user_role_name) || has_visibility?(:groups, group.description) end @@ -497,7 +502,7 @@ def self.sync_from_hash(attrs) end else $log.info("Widget: [#{attrs["description"]}] file has been added to disk, adding to model") - widget = self.create!(attrs) + widget = create!(attrs) end widget.sync_schedule(schedule_info) @@ -535,13 +540,13 @@ def sync_schedule(schedule_info) :filter => MiqExpression.new(filter_for_schedule), :resource_type => self.class.name, :run_at => { - :interval => {:value => value, :unit => unit}, + :interval => {:value => value, :unit => unit}, :tz => server_tz, :start_time => sched_time - }, + } ) self.miq_schedule = sched - self.save! + save! _log.info("Created schedule for Widget: [#{title}]") _log.debug("Widget: [#{title}] created schedule: [#{sched.inspect}]") @@ -566,14 +571,14 @@ def self.seed sync_from_dir end - def save_with_shortcuts(shortcuts) # [[, ], ...] + def save_with_shortcuts(shortcuts) # [[, ], ...] transaction do - ws = [] # Create an array of widget shortcuts + ws = [] # Create an array of widget shortcuts shortcuts.each_with_index do |s, s_idx| ws.push(MiqWidgetShortcut.new(:sequence => s_idx, :description => s.last, :miq_shortcut_id => s.first)) end self.miq_widget_shortcuts = ws - save # .save! raises exception if validate_uniqueness fails + save # .save! raises exception if validate_uniqueness fails end errors.empty? # True if no errors end @@ -587,6 +592,7 @@ def delete_legacy_contents_for_group(group) # TODO: detect date field in the report? def timezone_matters? return true unless options + options.fetch(:timezone_matters, true) end diff --git a/app/models/miq_widget/content_generation.rb b/app/models/miq_widget/content_generation.rb index 08412abcccb..8c1a273b69a 100644 --- a/app/models/miq_widget/content_generation.rb +++ b/app/models/miq_widget/content_generation.rb @@ -4,10 +4,8 @@ def initialize(options) options.each do |k, v| class_eval { attr_accessor k.to_sym } - instance_variable_set("@#{k}", v) + instance_variable_set(:"@#{k}", v) end - - self end def self.based_on_miq_report? diff --git a/app/models/miq_widget/content_option_generator.rb b/app/models/miq_widget/content_option_generator.rb index 27edbe33ad5..ab24e70141e 100644 --- a/app/models/miq_widget/content_option_generator.rb +++ b/app/models/miq_widget/content_option_generator.rb @@ -1,7 +1,7 @@ class MiqWidget::ContentOptionGenerator def generate(group, users, need_timezone = true) if group.kind_of?(MiqGroup) && !group.self_service? - return "MiqGroup", group.description, nil, need_timezone ? timezones_for_users(users) : %w(UTC) + return "MiqGroup", group.description, nil, need_timezone ? timezones_for_users(users) : %w[UTC] else return "User", group.description, userids_for_users(users), nil end diff --git a/app/models/miq_widget/import_export.rb b/app/models/miq_widget/import_export.rb index 9a6ae957cb9..1a794a80fe9 100644 --- a/app/models/miq_widget/import_export.rb +++ b/app/models/miq_widget/import_export.rb @@ -11,11 +11,11 @@ def import_from_hash(widget, _options = {}) def export_to_array h = attributes - %w(id created_at updated_at last_generated_content_on miq_schedule_id miq_task_id).each { |k| h.delete(k) } + %w[id created_at updated_at last_generated_content_on miq_schedule_id miq_task_id].each { |k| h.delete(k) } h["MiqReportContent"] = resource.export_to_array if resource if miq_schedule miq_schedule_attributes = miq_schedule.attributes - %w(id created_on updated_at last_run_on miq_search_id zone_id).each { |key| miq_schedule_attributes.delete(key) } + %w[id created_on updated_at last_run_on miq_search_id zone_id].each { |key| miq_schedule_attributes.delete(key) } h["MiqSchedule"] = miq_schedule_attributes end [self.class.to_s => h] diff --git a/app/models/miq_widget/report_content.rb b/app/models/miq_widget/report_content.rb index 031468663d7..085b496db54 100644 --- a/app/models/miq_widget/report_content.rb +++ b/app/models/miq_widget/report_content.rb @@ -23,7 +23,7 @@ def generate(user_or_group) report.rpt_options[:group_limit] = row_count report.build_html_rows.join else - report.group = nil # Ignore groupings for widgets, unless hiding details + report.group = nil # Ignore groupings for widgets, unless hiding details report.build_html_rows(true)[0..row_count - 1].join # clickable_rows = true end end diff --git a/app/models/miq_widget_set.rb b/app/models/miq_widget_set.rb index 8697f181124..42c70f0cb86 100644 --- a/app/models/miq_widget_set.rb +++ b/app/models/miq_widget_set.rb @@ -3,11 +3,11 @@ class MiqWidgetSet < ApplicationRecord acts_as_miq_set + before_validation :keep_group_when_saving before_destroy :ensure_can_be_destroyed before_destroy :destroy_user_versions before_destroy :delete_from_dashboard_order - before_validation :keep_group_when_saving after_save :add_to_dashboard_order after_save :update_members @@ -29,7 +29,7 @@ class MiqWidgetSet < ApplicationRecord order(Arel.sql(order)) } - WIDGET_DIR = File.expand_path(File.join(Rails.root, "product/dashboard/dashboards")) + WIDGET_DIR = File.expand_path(Rails.root.join("product/dashboard/dashboards").to_s) def self.default_dashboard find_by(:name => 'default', :read_only => true) @@ -118,14 +118,13 @@ def self.sync_from_file(filename) if ws.nil? || ws.updated_on.utc < File.mtime(filename).utc # Convert widget descriptions to ids in set_data members = [] - attrs["set_data"] = attrs.delete("set_data_by_description").inject({}) do |h, k| + attrs["set_data"] = attrs.delete("set_data_by_description").each_with_object({}) do |k, h| col, arr = k h[col] = arr.collect do |d| w = MiqWidget.find_by(:description => d) members << w if w w.try(:id) end.compact - h end owner_description = attrs.delete("owner_description") diff --git a/app/models/miq_widget_set/set_data.rb b/app/models/miq_widget_set/set_data.rb index c9df165cd12..2c60a10c1e6 100644 --- a/app/models/miq_widget_set/set_data.rb +++ b/app/models/miq_widget_set/set_data.rb @@ -26,7 +26,7 @@ def has_widget_id_member?(widget_id) private def init_set_data - old_set_data = self.set_data.to_h.symbolize_keys + old_set_data = set_data.to_h.symbolize_keys new_set_data = {} SET_DATA_COLS.each do |col_key| diff --git a/app/models/miq_worker.rb b/app/models/miq_worker.rb index be328ac4c43..24191eb0640 100644 --- a/app/models/miq_worker.rb +++ b/app/models/miq_worker.rb @@ -12,7 +12,7 @@ class MiqWorker < ApplicationRecord has_many :messages, :as => :handler, :class_name => 'MiqQueue' has_many :active_messages, -> { where(["state = ?", "dequeue"]) }, :as => :handler, :class_name => 'MiqQueue' has_many :ready_messages, -> { where(["state = ?", "ready"]) }, :as => :handler, :class_name => 'MiqQueue' - has_many :processed_messages, -> { where(["state != ?", "ready"]) }, :as => :handler, :class_name => 'MiqQueue', :dependent => :destroy + has_many :processed_messages, -> { where.not(:state => "ready") }, :as => :handler, :class_name => 'MiqQueue', :dependent => :destroy virtual_column :friendly_name, :type => :string virtual_column :uri_or_queue_name, :type => :string @@ -36,8 +36,8 @@ class MiqWorker < ApplicationRecord STATUSES_CURRENT = [STATUS_STARTED, STATUS_READY, STATUS_WORKING] STATUSES_STOPPED = [STATUS_STOPPED, STATUS_KILLED, STATUS_ABORTED] STATUSES_CURRENT_OR_STARTING = STATUSES_CURRENT + STATUSES_STARTING - STATUSES_ALIVE = STATUSES_CURRENT_OR_STARTING + [STATUS_STOPPING] - PROCESS_INFO_FIELDS = %i(priority memory_usage percent_memory percent_cpu memory_size cpu_time proportional_set_size unique_set_size) + STATUSES_ALIVE = STATUSES_CURRENT_OR_STARTING + [STATUS_STOPPING] + PROCESS_INFO_FIELDS = %i[priority memory_usage percent_memory percent_cpu memory_size cpu_time proportional_set_size unique_set_size] PROCESS_TITLE_PREFIX = "MIQ:".freeze @@ -61,6 +61,7 @@ def self.workers return 0 unless has_required_role? return @workers.call if @workers.kind_of?(Proc) return @workers unless @workers.nil? + workers_configured_count end @@ -81,8 +82,8 @@ def scalable? def self.workers_configured_count count = worker_settings[:count] - if maximum_workers_count.kind_of?(Integer) - count = maximum_workers_count if maximum_workers_count < count + if maximum_workers_count.kind_of?(Integer) && (maximum_workers_count < count) + count = maximum_workers_count end count end @@ -98,6 +99,7 @@ def self.concrete_subclasses def self.server_scope return current_scope if current_scope && current_scope.where_values_hash.include?('miq_server_id') + where(:miq_server_id => MiqServer.my_server&.id) end @@ -263,7 +265,8 @@ def self.reload_worker_settings end def self.start_workers - return unless self.has_required_role? + return unless has_required_role? + workers.times { start_worker } end @@ -360,6 +363,7 @@ def self.build_command_line(guid, ems_id = nil) def self.runner_script script = ManageIQ.root.join("lib/workers/bin/run_single_worker.rb") raise "script not found: #{script}" unless File.exist?(script) + script end @@ -371,7 +375,7 @@ def start_runner_via_spawn pid = Kernel.spawn( {"BUNDLER_GROUPS" => self.class.bundler_groups.join(",")}, command_line, - [:out, :err] => [Rails.root.join("log", "evm.log"), "a"] + [:out, :err] => [Rails.root.join("log/evm.log"), "a"] ) Process.detach(pid) pid @@ -393,7 +397,7 @@ def stop end # Let the worker monitor start a new worker - alias_method :restart, :stop + alias restart stop def kill kill_process @@ -427,6 +431,7 @@ def kill_process Process.kill(9, pid) loop do break unless is_alive? + sleep(0.01) end rescue Errno::ESRCH @@ -555,8 +560,8 @@ def abbreviated_class_name def self.minimal_class_name abbreviated_class_name - .sub(/Miq/, "") - .sub(/Worker/, "") + .sub("Miq", "") + .sub("Worker", "") end def minimal_class_name diff --git a/app/models/miq_worker/container_common.rb b/app/models/miq_worker/container_common.rb index 4cfa93f6f27..76b82fbff05 100644 --- a/app/models/miq_worker/container_common.rb +++ b/app/models/miq_worker/container_common.rb @@ -12,11 +12,11 @@ def configure_worker_deployment(definition, replicas = 0) container = definition[:spec][:template][:spec][:containers].first - if container_image_tag.include?("latest") - container[:imagePullPolicy] = "Always" - else - container[:imagePullPolicy] = "IfNotPresent" - end + container[:imagePullPolicy] = if container_image_tag.include?("latest") + "Always" + else + "IfNotPresent" + end container[:image] = container_image container[:env] << {:name => "WORKER_CLASS_NAME", :value => self.class.name} @@ -53,8 +53,8 @@ def patch_deployment end def zone_selector - product = Vmdb::Appliance.PRODUCT_NAME.downcase.gsub(/[^-a-z0-9\.]/, "-") - zone = MiqServer.my_zone.chomp.strip.gsub(/[^-A-Za-z0-9_\.\/]/, "-") + product = Vmdb::Appliance.PRODUCT_NAME.downcase.gsub(/[^-a-z0-9.]/, "-") + zone = MiqServer.my_zone.chomp.strip.gsub(/[^-A-Za-z0-9_.\/]/, "-") {"#{product}/zone-#{zone}" => "true"} end @@ -90,7 +90,7 @@ def resource_constraints end def container_image_namespace - ENV["CONTAINER_IMAGE_NAMESPACE"] + ENV.fetch("CONTAINER_IMAGE_NAMESPACE", nil) end def container_image_name diff --git a/app/models/miq_worker/runner.rb b/app/models/miq_worker/runner.rb index f3e730a00da..fdecf6f9686 100644 --- a/app/models/miq_worker/runner.rb +++ b/app/models/miq_worker/runner.rb @@ -20,12 +20,14 @@ def self.start_worker(*args) def poll_method return @poll_method unless @poll_method.nil? + self.poll_method = worker_settings[:poll_method]&.to_sym end def poll_method=(val) val = "sleep_poll_#{val}" raise ArgumentError, _("poll method '%{value}' not defined") % {:value => val} unless respond_to?(val) + @poll_method = val.to_sym end @@ -76,14 +78,15 @@ def set_connection_pool_size def worker_monitor_drb @worker_monitor_drb ||= begin raise _("%{log} No MiqServer found to establishing DRb Connection to") % {:log => log_prefix} if server.nil? + drb_uri = server.reload.drb_uri if drb_uri.blank? raise _("%{log} Blank DRb_URI for MiqServer with ID=[%{number}], NAME=[%{name}], PID=[%{pid_number}], GUID=[%{guid_number}]") % - {:log => log_prefix, - :number => server.id, - :name => server.name, - :pid_number => server.pid, - :guid_number => server.guid} + {:log => log_prefix, + :number => server.id, + :name => server.name, + :pid_number => server.pid, + :guid_number => server.guid} end _log.info("#{log_prefix} Initializing DRb Connection to MiqServer with ID=[#{server.id}], NAME=[#{server.name}], PID=[#{server.pid}], GUID=[#{server.guid}] DRb URI=[#{drb_uri}]") require 'drb' @@ -194,10 +197,10 @@ def reload_worker_record # def self.safe_log(worker, message = nil, exit_code = 0) - meth = (exit_code == 0) ? :info : :error + meth = exit_code == 0 ? :info : :error prefix = "#{log_prefix} " rescue "" - pid = "PID [#{Process.pid}] " rescue "" + pid = "PID [#{Process.pid}] " rescue "" guid = worker.nil? ? '' : "GUID [#{worker.guid}] " rescue "" id = worker.nil? ? '' : "ID [#{worker.id}] " rescue "" logmsg = "#{prefix}#{id}#{pid}#{guid}#{message}" @@ -228,6 +231,7 @@ def update_worker_record_at_exit(exit_code) def do_exit(message = nil, exit_code = 0) return if @exiting # Prevent running the do_exit logic more than one time + @exiting = true begin @@ -294,8 +298,8 @@ def do_work_loop heartbeat do_work rescue TemporaryFailure => error - msg = "#{log_prefix} Temporary failure (message: '#{error}') caught"\ - " during #do_work. Sleeping for a while before resuming." + msg = "#{log_prefix} Temporary failure (message: '#{error}') caught " \ + "during #do_work. Sleeping for a while before resuming." _log.warn(msg) recover_from_temporary_failure rescue SystemExit @@ -465,7 +469,7 @@ def setup_sigterm_trap def process_message(message, *args) meth = "message_#{message}" - if self.respond_to?(meth) + if respond_to?(meth) send(meth, *args) else _log.warn("#{log_prefix} Message [#{message}] is not recognized, ignoring") @@ -519,7 +523,7 @@ def worker_cmdline end def skip_heartbeat? - ENV["DISABLE_MIQ_WORKER_HEARTBEAT"] + ENV.fetch("DISABLE_MIQ_WORKER_HEARTBEAT", nil) end def warn_about_heartbeat_skipping diff --git a/app/models/miq_worker/systemd_common.rb b/app/models/miq_worker/systemd_common.rb index 5f518bd03b6..ee48a933854 100644 --- a/app/models/miq_worker/systemd_common.rb +++ b/app/models/miq_worker/systemd_common.rb @@ -56,13 +56,9 @@ def sd_notify_started sd_notify.ready end - def sd_notify_stopping - sd_notify.stopping - end + delegate :stopping, :to => :sd_notify, :prefix => true - def sd_notify_watchdog - sd_notify.watchdog - end + delegate :watchdog, :to => :sd_notify, :prefix => true def sd_notify_watchdog_usec(timeout_in_seconds) usec = timeout_in_seconds * 1_000_000 @@ -130,8 +126,7 @@ def unit_config_file def unit_environment_variables # Override this in a child class to add env vars - [ - ] + [] end end end diff --git a/app/models/mixins/active_vm_aggregation_mixin.rb b/app/models/mixins/active_vm_aggregation_mixin.rb index 73595d505df..587c3a38094 100644 --- a/app/models/mixins/active_vm_aggregation_mixin.rb +++ b/app/models/mixins/active_vm_aggregation_mixin.rb @@ -6,7 +6,7 @@ module ActiveVmAggregationMixin virtual_column :allocated_storage, :type => :integer, :uses => :active_vms virtual_column :provisioned_storage, :type => :integer, :uses => :active_vms - virtual_has_many :active_vms, :class_name => "VmOrTemplate", :uses => :vms + virtual_has_many :active_vms, :class_name => "VmOrTemplate", :uses => :vms end def active_vms diff --git a/app/models/mixins/aggregation_mixin/methods.rb b/app/models/mixins/aggregation_mixin/methods.rb index 195f32f916f..1165993a15a 100644 --- a/app/models/mixins/aggregation_mixin/methods.rb +++ b/app/models/mixins/aggregation_mixin/methods.rb @@ -41,7 +41,7 @@ def all_storages def aggregate_hardware(from, field, targets = nil) from = from.to_s.singularize select = field == :aggregate_cpu_speed ? "cpu_total_cores, cpu_speed" : field - targets ||= send("all_#{from.pluralize}") + targets ||= send(:"all_#{from.pluralize}") hdws = Hardware.where(from.singularize => targets).select(select) hdws.inject(0) { |t, hdw| t + hdw.send(field).to_i } end diff --git a/app/models/mixins/alert_mixin.rb b/app/models/mixins/alert_mixin.rb index 7893773cc2e..3c55123160b 100644 --- a/app/models/mixins/alert_mixin.rb +++ b/app/models/mixins/alert_mixin.rb @@ -6,10 +6,10 @@ def event_log_threshold?(options) raise _("option :message_filter_type is required") unless options[:message_filter_type] raise _("option :message_filter_value is required") unless options[:message_filter_value] - allowed_types = %w(STARTS\ WITH ENDS\ WITH INCLUDES REGULAR\ EXPRESSION) + allowed_types = %w[STARTS\ WITH ENDS\ WITH INCLUDES REGULAR\ EXPRESSION] unless allowed_types.include?(options[:message_filter_type]) raise _("option :message_filter_type: %{options}, invalid, expected one of %{type}") % - {:options => options[:message_filter_type], :type => allowed_types} + {:options => options[:message_filter_type], :type => allowed_types} end options.reverse_merge!({:time_threshold => 10.days, :freq_threshold => 2}) @@ -45,13 +45,13 @@ def build_conditions_and_selects(options) end [:source, :event_id, :level, :name].each do |col| - if options[col] - sel_conj = sel.empty? ? "" : ", " - sel << "#{sel_conj}#{col}" - conjunction = cond[0].empty? ? "" : " and " - cond[0] << "#{conjunction}#{col} = ?" - cond << options[col] - end + next unless options[col] + + sel_conj = sel.empty? ? "" : ", " + sel << "#{sel_conj}#{col}" + conjunction = cond[0].empty? ? "" : " and " + cond[0] << "#{conjunction}#{col} = ?" + cond << options[col] end return cond, sel end diff --git a/app/models/mixins/ansible_playbook_mixin.rb b/app/models/mixins/ansible_playbook_mixin.rb index 0ad21d8c86c..5683a1f9683 100644 --- a/app/models/mixins/ansible_playbook_mixin.rb +++ b/app/models/mixins/ansible_playbook_mixin.rb @@ -37,7 +37,7 @@ def playbook_log_stdout(log_option, job) return if log_option == 'on_error' && job.raw_status.succeeded? $log.info("Stdout from ansible job #{job.name}: #{job.raw_stdout('txt_download')}") - rescue StandardError => err + rescue => err if job.nil? $log.error("Job was nil, must pass a valid job") else diff --git a/app/models/mixins/archived_mixin.rb b/app/models/mixins/archived_mixin.rb index 104f96ecb95..5c70bfcd45f 100644 --- a/app/models/mixins/archived_mixin.rb +++ b/app/models/mixins/archived_mixin.rb @@ -4,7 +4,7 @@ module ArchivedMixin included do scope :archived, -> { where.not(:deleted_on => nil) } scope :active, -> { where(:deleted_on => nil) } - scope :not_archived_before, ->(timestamp) { + scope :not_archived_before, lambda { |timestamp| unscope(:where => :deleted_on).where(arel_table[:deleted_on].eq(nil).or(arel_table[:deleted_on].gteq(timestamp))) } end @@ -12,12 +12,12 @@ module ArchivedMixin def archived? !active? end - alias_method :archived, :archived? + alias archived archived? def active? deleted_on.nil? end - alias_method :active, :active? + alias active active? def archive! update!(:deleted_on => Time.now.utc) diff --git a/app/models/mixins/assignment_mixin.rb b/app/models/mixins/assignment_mixin.rb index 43b48cbfd87..74d50398c5c 100644 --- a/app/models/mixins/assignment_mixin.rb +++ b/app/models/mixins/assignment_mixin.rb @@ -13,10 +13,10 @@ def all_assignments(tag = nil) end module_function :all_assignments - included do #:nodoc: + included do # :nodoc: acts_as_miq_taggable - const_set("ASSIGNMENT_PARENT_ASSOCIATIONS", %i(parent_blue_folders parent_resource_pool host ems_cluster ext_management_system my_enterprise physical_server)) unless const_defined?("ASSIGNMENT_PARENT_ASSOCIATIONS") + const_set(:ASSIGNMENT_PARENT_ASSOCIATIONS, %i[parent_blue_folders parent_resource_pool host ems_cluster ext_management_system my_enterprise physical_server]) unless const_defined?(:ASSIGNMENT_PARENT_ASSOCIATIONS) cache_with_timeout(:assignments_cached, 1.minute) { assignments } end @@ -50,6 +50,7 @@ def assign_to_tags(objects, klass) Array.wrap(objects).each do |obj| tag = build_tag_tagging_path(obj, klass) next if tag.nil? + tag_add(tag, :ns => namespace) end reload @@ -59,6 +60,7 @@ def unassign_tags(objects, klass) Array.wrap(objects).each do |obj| tag = build_tag_tagging_path(obj, klass) next if tag.nil? + tag_remove(tag, :ns => namespace) end reload @@ -169,7 +171,7 @@ module ClassMethods def assignments # Get all assigned, enabled instances for type klass records = kind_of?(Class) ? all : self - assignment_map = records.each_with_object({}) { |a, h| h[a.id] = a } + assignment_map = records.index_by { |a| a.id } Tag .includes(:taggings).references(:taggings) .where("taggings.taggable_type = ? and tags.name like ?", name, "#{namespace}/%") @@ -204,7 +206,7 @@ def get_assigned_for_target(target, options = {}) parents.each { |parent| _log.debug("parent id: #{parent.id} class: #{parent.class}") } if parents.kind_of?(Array) - tlist = parents.collect { |p| "#{p.class.base_model.name.underscore}/id/#{p.id}" } # Assigned directly to parents + tlist = parents.collect { |p| "#{p.class.base_model.name.underscore}/id/#{p.id}" } # Assigned directly to parents if options[:tag_list] # Assigned to target (passed in) tlist += options[:tag_list] _log.debug("Using tag list: #{options[:tag_list].join(', ')}") @@ -249,6 +251,7 @@ def build_object_tag_path(obj, klass = nil) "#{obj.class.base_model.name.underscore}/id/#{obj.id}" else # obj is the id of an instance of raise _("Class must be specified when object is an integer") if klass.nil? + "#{klass.underscore}/id/#{obj}" end end diff --git a/app/models/mixins/authentication_mixin.rb b/app/models/mixins/authentication_mixin.rb index fae8e8c2788..680d76de995 100644 --- a/app/models/mixins/authentication_mixin.rb +++ b/app/models/mixins/authentication_mixin.rb @@ -74,7 +74,7 @@ def self.authentication_check_schedule end def supported_auth_attributes - %w(userid password) + %w[userid password] end def default_authentication_type @@ -163,18 +163,21 @@ def authentication_status_ok?(type = nil) def auth_user_pwd(type = nil) cred = authentication_best_fit(type) return nil if cred.nil? || cred.userid.blank? + [cred.userid, cred.password] end def auth_user_token(type = nil) cred = authentication_best_fit(type) return nil if cred.nil? || cred.userid.blank? + [cred.userid, cred.auth_key] end def auth_user_keypair(type = nil) cred = authentication_best_fit(type) return nil if cred.nil? || cred.userid.blank? + [cred.userid, cred.auth_key] end @@ -186,7 +189,7 @@ def update_authentication(data, options = {}) @orig_credentials ||= auth_user_pwd || "none" # Invoke before callback - before_update_authentication if self.respond_to?(:before_update_authentication) && options[:save] + before_update_authentication if respond_to?(:before_update_authentication) && options[:save] data.each_pair do |type, value| cred = authentication_type(type) @@ -219,6 +222,7 @@ def update_authentication(data, options = {}) if value.key?(:userid) && value[:userid].blank? current[:new] = nil next if options[:save] == false + authentication_delete(type) next end @@ -226,18 +230,18 @@ def update_authentication(data, options = {}) # Update or create if cred.nil? # FIXME: after we completely move to DDF and revise the REST API for providers, this will probably be something to delete - if self.kind_of?(ManageIQ::Providers::Openstack::InfraManager) && value[:auth_key] + if kind_of?(ManageIQ::Providers::Openstack::InfraManager) && value[:auth_key] # TODO(lsmola) investigate why build throws an exception, that it needs to be subclass of AuthUseridPassword cred = ManageIQ::Providers::Openstack::InfraManager::AuthKeyPair.new(:name => "#{self.class.name} #{name}", :authtype => type.to_s, - :resource_id => id, :resource_type => "ExtManagementSystem") + :resource_id => id, :resource_type => "ExtManagementSystem") authentications << cred elsif value[:auth_key] cred = AuthToken.new(:name => "#{self.class.name} #{name}", :authtype => type.to_s, - :resource_id => id, :resource_type => "ExtManagementSystem") + :resource_id => id, :resource_type => "ExtManagementSystem") authentications << cred else cred = authentications.build(:name => "#{self.class.name} #{name}", :authtype => type.to_s, - :type => "AuthUseridPassword") + :type => "AuthUseridPassword") end end cred.userid = value[:userid] @@ -249,7 +253,7 @@ def update_authentication(data, options = {}) end # Invoke callback - after_update_authentication if self.respond_to?(:after_update_authentication) && options[:save] + after_update_authentication if respond_to?(:after_update_authentication) && options[:save] @orig_credentials = nil if options[:save] end @@ -261,6 +265,7 @@ def credentials_changed? def authentication_type(type) return nil if type.nil? + available_authentications.detect do |a| a.authentication_type.to_s == type.to_s end @@ -298,8 +303,8 @@ def authentication_check_types_queue(*args) end def authentication_check_attributes(types, method_options) - role = authentication_check_role if self.respond_to?(:authentication_check_role) - zone = my_zone if self.respond_to?(:my_zone) + role = authentication_check_role if respond_to?(:authentication_check_role) + zone = my_zone if respond_to?(:my_zone) # FIXME: Via schedule, a message is created with args = [], so all authentications will be checked, # while an authentication change will create a message with args [:default] or whatever @@ -347,6 +352,7 @@ def authentication_check_types(*args) def retry_scheduled_authentication_check(auth_type, options) return unless options[:attempt] + auth = authentication_best_fit(auth_type) if auth.try(:retryable_status?) @@ -400,6 +406,7 @@ def change_password(current_password, new_password, auth_type = :default) unless supports?(:change_password) raise MiqException::Error, _("Change Password is not supported for %{class_description} provider") % {:class_description => self.class.description} end + if change_password_params_valid?(current_password, new_password) raw_change_password(current_password, new_password) update_authentication(auth_type => {:userid => authentication_userid, :password => new_password}) @@ -459,9 +466,9 @@ def assign_nested_authentication(attributes) private def authentication_check_no_validation(type, options) - header = "type: [#{type.inspect}] for [#{id}] [#{name}]" + header = "type: [#{type.inspect}] for [#{id}] [#{name}]" status, details = - if self.missing_credentials?(type) + if missing_credentials?(type) [:incomplete, "Missing credentials"] else begin @@ -491,7 +498,7 @@ def authentication_component(type, method) return nil if cred.nil? value = cred.public_send(method) - value.blank? ? nil : value + value.presence end def available_authentications diff --git a/app/models/mixins/custom_attribute_mixin.rb b/app/models/mixins/custom_attribute_mixin.rb index 6dedcb99fcd..48797f579d7 100644 --- a/app/models/mixins/custom_attribute_mixin.rb +++ b/app/models/mixins/custom_attribute_mixin.rb @@ -6,7 +6,7 @@ module CustomAttributeMixin DEFAULT_SECTION_NAME = 'Custom Attribute'.freeze CUSTOM_ATTRIBUTE_INVALID_NAME_WARNING = "A custom attribute name must begin with a letter (a-z, but also letters with diacritical marks and non-Latin letters) or an underscore (_). Subsequent characters can be letters, underscores, digits (0-9), or dollar signs ($)".freeze - CUSTOM_ATTRIBUTE_VALID_NAME_REGEXP = /\A[\p{Alpha}_][\p{Alpha}_\d\$]*\z/ + CUSTOM_ATTRIBUTE_VALID_NAME_REGEXP = /\A[\p{Alpha}_][\p{Alpha}_\d$]*\z/ included do has_many :custom_attributes, :as => :resource, :dependent => :destroy @@ -25,7 +25,7 @@ module CustomAttributeMixin define_method(getter) do miq_custom_get(custom_str) end - virtual_column getter, :type => :string # uses not set since miq_custom_get re-queries + virtual_column getter, :type => :string # uses not set since miq_custom_get re-queries define_method(setter) do |value| miq_custom_set(custom_str, value) @@ -50,7 +50,8 @@ def self.invalid_custom_attribute_message(attribute) def self.add_custom_attribute(custom_attribute) return if respond_to?(custom_attribute) - ActiveSupport::Deprecation.warn(invalid_custom_attribute_message(custom_attribute)) unless custom_attribute.to_s =~ CUSTOM_ATTRIBUTE_VALID_NAME_REGEXP + + ActiveSupport::Deprecation.warn(invalid_custom_attribute_message(custom_attribute)) unless CUSTOM_ATTRIBUTE_VALID_NAME_REGEXP.match?(custom_attribute.to_s) ca_sym = custom_attribute.to_sym without_prefix = custom_attribute.sub(CUSTOM_ATTRIBUTES_PREFIX, "") @@ -90,11 +91,12 @@ def self.custom_attribute_arel(name_val, section) def self.to_human(column) col_name, section = column.gsub(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX, '').split(SECTION_SEPARATOR) - _("%{section}: %{custom_key}") % { :custom_key => col_name, :section => section.try(:titleize) || DEFAULT_SECTION_NAME} + _("%{section}: %{custom_key}") % {:custom_key => col_name, :section => section.try(:titleize) || DEFAULT_SECTION_NAME} end def self.column_name(custom_key) return if custom_key.nil? + CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX + custom_key end @@ -112,7 +114,8 @@ def miq_custom_get(key) def miq_custom_set(key, value) return miq_custom_delete(key) if value.blank? - ActiveSupport::Deprecation.warn(self.class.invalid_custom_attribute_message(key)) unless key.to_s =~ self.class::CUSTOM_ATTRIBUTE_VALID_NAME_REGEXP + + ActiveSupport::Deprecation.warn(self.class.invalid_custom_attribute_message(key)) unless self.class::CUSTOM_ATTRIBUTE_VALID_NAME_REGEXP.match?(key.to_s) record = miq_custom_attributes.find_by(:name => key.to_s) if record.nil? diff --git a/app/models/mixins/deprecation_mixin.rb b/app/models/mixins/deprecation_mixin.rb index 969fdcce85e..95626acdd90 100644 --- a/app/models/mixins/deprecation_mixin.rb +++ b/app/models/mixins/deprecation_mixin.rb @@ -7,8 +7,8 @@ module ClassMethods def deprecate_belongs_to(old_belongs_to, new_belongs_to) deprecate_attribute_methods(old_belongs_to, new_belongs_to) ["_id", "_id=", "_id?"].each do |suffix| - define_method("#{old_belongs_to}#{suffix}") do |*args| - args.present? ? send("#{new_belongs_to}#{suffix}", *args) : send("#{new_belongs_to}#{suffix}") + define_method(:"#{old_belongs_to}#{suffix}") do |*args| + args.present? ? send(:"#{new_belongs_to}#{suffix}", *args) : send(:"#{new_belongs_to}#{suffix}") end Vmdb::Deprecation.deprecate_methods(self, "#{old_belongs_to}#{suffix}" => "#{new_belongs_to}#{suffix}") end diff --git a/app/models/mixins/dialog_mixin.rb b/app/models/mixins/dialog_mixin.rb index 74059f2ac0f..07beffdc177 100644 --- a/app/models/mixins/dialog_mixin.rb +++ b/app/models/mixins/dialog_mixin.rb @@ -2,7 +2,7 @@ module DialogMixin extend ActiveSupport::Concern included do - validates_presence_of :label + validates :label, :presence => true end def remove_all_resources diff --git a/app/models/mixins/event_mixin.rb b/app/models/mixins/event_mixin.rb index ead1aeb5ab1..cadeac1fb68 100644 --- a/app/models/mixins/event_mixin.rb +++ b/app/models/mixins/event_mixin.rb @@ -25,6 +25,7 @@ def has_events?(assoc = :ems_events) # It should be considered for removal. @has_events ||= {} return @has_events[assoc] if @has_events.key?(assoc) + @has_events[assoc] = events_assoc_class(assoc).where(event_where_clause(assoc)).exists? end @@ -55,11 +56,9 @@ def miq_event_filter filter end - private - def find_one_event(assoc, order) ewc = event_where_clause(assoc) - events_assoc_class(assoc).where(ewc).order(order).first unless ewc.blank? + events_assoc_class(assoc).where(ewc).order(order).first if ewc.present? end module ClassMethods diff --git a/app/models/mixins/file_depot_mixin.rb b/app/models/mixins/file_depot_mixin.rb index d14888cdced..6eafaa93d7d 100644 --- a/app/models/mixins/file_depot_mixin.rb +++ b/app/models/mixins/file_depot_mixin.rb @@ -22,6 +22,7 @@ def verify_depot_settings(settings) settings["password"] ||= find(settings["id"]).authentication_password if settings["id"] res = mnt_instance(settings).verify raise _("Connection Settings validation failed with error: %{error}") % {:error => res.last} unless res.first + res end @@ -72,7 +73,7 @@ def requires_credentials? def validate_depot_credentials # This only checks that credentials are present - errors.add(:file_depot, "is missing credentials") if self.requires_credentials? && self.missing_credentials? + errors.add(:file_depot, "is missing credentials") if requires_credentials? && missing_credentials? end def verify_depot_credentials(_auth_type = nil) @@ -81,6 +82,7 @@ def verify_depot_credentials(_auth_type = nil) def depot_settings(reload = false) return @depot_settings if !reload && @depot_settings + @depot_settings = { :uri => uri, :uri_prefix => uri_prefix, @@ -93,6 +95,7 @@ def mnt raise _("No credentials defined") if requires_credentials? && missing_credentials? return @mnt if @mnt + @mnt = self.class.mnt_instance(depot_settings) end @@ -109,10 +112,11 @@ def connect_depot def disconnect_depot @connected ||= 0 return if @connected == 0 + mnt.disconnect if @connected == 1 @connected -= 1 end - alias_method :close, :disconnect_depot # TODO: Do we still need this alias? Since this is a mixin, close is a bad override. + alias close disconnect_depot # TODO: Do we still need this alias? Since this is a mixin, close is a bad override. def with_depot connect_depot @@ -162,11 +166,11 @@ def file_delete(file) mnt.delete(file) end end - alias_method :directory_delete, :file_delete + alias directory_delete file_delete - def file_open(*args, &block) + def file_open(...) with_depot do - mnt.open(*args, &block) + mnt.open(...) end end diff --git a/app/models/mixins/filterable_mixin.rb b/app/models/mixins/filterable_mixin.rb index 864b5aecac1..6bd8267396e 100644 --- a/app/models/mixins/filterable_mixin.rb +++ b/app/models/mixins/filterable_mixin.rb @@ -13,8 +13,8 @@ def authorized_for_user?(userid) return false if recs.nil? end - if db.respond_to?(:apply_belongsto_filters) - result = false unless MiqFilter.apply_belongsto_filters([self], bfilters) == [self] + if db.respond_to?(:apply_belongsto_filters) && !(MiqFilter.apply_belongsto_filters([self], bfilters) == [self]) + result = false end result diff --git a/app/models/mixins/inter_region_api_method_relay.rb b/app/models/mixins/inter_region_api_method_relay.rb index 9def3c4b8ea..c0ef24afc58 100644 --- a/app/models/mixins/inter_region_api_method_relay.rb +++ b/app/models/mixins/inter_region_api_method_relay.rb @@ -5,19 +5,19 @@ class InterRegionApiMethodRelayError < RuntimeError; end MAX_INSTANCE_WAIT = 1.minute def self.extended(klass) - unless klass.const_defined?("InstanceMethodRelay") - instance_relay = klass.const_set("InstanceMethodRelay", Module.new) + unless klass.const_defined?(:InstanceMethodRelay) + instance_relay = klass.const_set(:InstanceMethodRelay, Module.new) klass.prepend(instance_relay) end - unless klass.const_defined?("ClassMethodRelay") - class_relay = klass.const_set("ClassMethodRelay", Module.new) + unless klass.const_defined?(:ClassMethodRelay) + class_relay = klass.const_set(:ClassMethodRelay, Module.new) klass.singleton_class.prepend(class_relay) end end def api_relay_method(method, action = method) - relay = const_get("InstanceMethodRelay") + relay = const_get(:InstanceMethodRelay) collection_name = collection_for_class relay.class_eval do @@ -34,7 +34,7 @@ def api_relay_method(method, action = method) end def api_relay_class_method(method, action = method) - relay = const_get("ClassMethodRelay") + relay = const_get(:ClassMethodRelay) collection_name = collection_for_class raise ArgumentError, "A block is required to determine target object region and API arguments" unless block_given? @@ -79,6 +79,7 @@ def self.exec_api_call(region, collection_name, action, api_args = nil, id = nil case result when ManageIQ::API::Client::ActionResult raise InterRegionApiMethodRelayError, result.message if result.failed? + result.attributes when ManageIQ::API::Client::Resource instance_for_resource(result) @@ -99,6 +100,7 @@ def self.instance_for_resource(resource) while wait < MAX_INSTANCE_WAIT instance = klass.find_by(:id => resource.id) return instance if instance + sleep(wait) wait *= 2 end diff --git a/app/models/mixins/miq_ae_yaml_import_export_mixin.rb b/app/models/mixins/miq_ae_yaml_import_export_mixin.rb index 1bd49fa86fd..ad8da018175 100644 --- a/app/models/mixins/miq_ae_yaml_import_export_mixin.rb +++ b/app/models/mixins/miq_ae_yaml_import_export_mixin.rb @@ -55,7 +55,7 @@ def add_instance(class_obj, instance_yaml) def add_method(class_obj, method_yaml) method_attributes = method_yaml.fetch_path('object', 'attributes') _log.info("Importing method: <#{method_attributes['name']}>") - fields = method_yaml.fetch_path('object', 'inputs') + fields = method_yaml.fetch_path('object', 'inputs') method_attributes['class_id'] = class_obj.id MiqAeMethod.new(method_attributes).tap do |method_obj| method_obj.inputs = process_fields(fields) unless fields.nil? @@ -71,6 +71,7 @@ def process_field_value(instance_obj, field) field.each do |fname, value| ae_field = instance_obj.ae_class.ae_fields.detect { |f| fname.casecmp(f.name) == 0 } raise MiqAeException::FieldNotFound, "Field [#{fname}] not found in MiqAeDatastore" if ae_field.nil? + instance_obj.ae_values << MiqAeValue.new({'ae_field' => ae_field}.merge(value)) end end diff --git a/app/models/mixins/miq_policy_mixin.rb b/app/models/mixins/miq_policy_mixin.rb index 802af7f12f9..a4fb7bf72a7 100644 --- a/app/models/mixins/miq_policy_mixin.rb +++ b/app/models/mixins/miq_policy_mixin.rb @@ -65,7 +65,7 @@ def resolve_profiles(list, event = nil) end def passes_policy?(list = nil) - list.nil? ? plist = policies : plist = resolve_policies(list) + plist = list.nil? ? policies : resolve_policies(list) result = true plist.each do |policy| result = false if policy["result"] == "deny" @@ -129,11 +129,13 @@ def rsop(event, targets) targets.each do |t| profiles = (t.get_policies + MiqPolicy.associations_to_get_policies.collect do |assoc| next unless t.respond_to?(assoc) + t.send(assoc).get_policies unless t.send(assoc).nil? end).compact.flatten.uniq presults = t.resolve_profiles(profiles.collect(&:id), eventobj) target_result = presults.inject("allow") do |s, r| break "deny" if r["result"] == "deny" + s end @@ -148,7 +150,7 @@ def rsop_async(event, targets, userid = nil) eventobj = event.kind_of?(String) ? MiqEventDefinition.find_by(:name => event) : MiqEventDefinition.extract_objects(event) raise _("No event found for [%{event}]") % {:event => event} if eventobj.nil? - targets = targets.first.kind_of?(self) ? targets.collect(&:id) : targets + targets = targets.collect(&:id) if targets.first.kind_of?(self) opts = { :action => "#{name} - Resultant Set of Policy, Event: [#{eventobj.description}]", diff --git a/app/models/mixins/miq_provision_mixin.rb b/app/models/mixins/miq_provision_mixin.rb index d0c3671f77b..1fbdca79984 100644 --- a/app/models/mixins/miq_provision_mixin.rb +++ b/app/models/mixins/miq_provision_mixin.rb @@ -55,6 +55,7 @@ def get_owner @owner ||= begin email = get_option(:owner_email).try(:downcase) return if email.blank? + User.lookup_by_lower_email(email, get_user).tap do |owner| owner.current_group_by_description = get_option(:owner_group) if owner end @@ -163,8 +164,10 @@ def get_folder_paths def get_source_vm vm_id = get_option(:src_vm_id) raise _("Source VM not provided") if vm_id.nil? + svm = VmOrTemplate.find_by(:id => vm_id) raise _("Unable to find VM with Id: [%{vm_id}]") % {:vm_id => vm_id} if svm.nil? + svm end @@ -190,7 +193,7 @@ def set_customization_spec(custom_spec_name, override = false) custom_spec_name = custom_spec_name.name unless custom_spec_name.kind_of?(String) options = self.options.dup workflow do |prov_wf| - options[:sysprep_enabled] = %w(fields Specification) + options[:sysprep_enabled] = %w[fields Specification] prov_wf.init_from_dialog(options) prov_wf.get_all_dialogs prov_wf.allowed_customization_specs @@ -229,6 +232,7 @@ def disable_customization_spec def target_type return 'template' if provision_type == 'clone_to_template' + 'vm' end @@ -279,6 +283,7 @@ def request_options def format_web_service_property(key, value) return nil if value.kind_of?(Hash) return nil if value.blank? + value = value.iso8601 if value.kind_of?(Time) {:key => key.to_s, :value => value.to_s} end @@ -294,7 +299,8 @@ def class_to_resource_type_and_key(rsc_class) end def resource_display_name(rsc) - return rsc.name if rsc.respond_to?(:name) + return rsc.name if rsc.respond_to?(:name) + '' end @@ -330,6 +336,7 @@ def resource_class(rsc) if rsc.kind_of?(MiqAeMethodService::MiqAeServiceManageIQ_Providers_CloudManager_AuthKeyPair) return 'ManageIQ::Providers::CloudManager::AuthKeyPair' end + $1 if rsc.class.base_class.name =~ /::MiqAeService(.*)/ end end diff --git a/app/models/mixins/miq_provision_quota_mixin.rb b/app/models/mixins/miq_provision_quota_mixin.rb index 5f3edda1df4..a34e0a272ec 100644 --- a/app/models/mixins/miq_provision_quota_mixin.rb +++ b/app/models/mixins/miq_provision_quota_mixin.rb @@ -9,6 +9,7 @@ def check_quota(quota_type = :vms_by_owner, options = {}) unless respond_to?(quota_method) raise _("check_quota called with an invalid provisioning quota method <%{type}>") % {:type => quota_type} end + send(quota_method, options) end @@ -123,7 +124,7 @@ def quota_find_vms_by_owner(options) vms = [] prov_owner = get_owner unless prov_owner.nil? - cond_str, cond_args = "evm_owner_id = ? AND template = ? AND host_id is not NULL", [prov_owner.id, false] + cond_str, cond_args = "evm_owner_id = ? AND template = ? AND host_id is not NULL", [prov_owner.id, false] # Default return includes retired VMs that are still on a host if options[:retired_vms_only] == true @@ -181,11 +182,11 @@ def quota_find_provisions(options) :class_name => 'MiqProvisionRequest', :method_name => 'create_request_tasks', :state => 'ready', - :deliver_on => scheduled_range, + :deliver_on => scheduled_range ) # Make sure we skip the current MiqProvisionRequest in the calculation. - skip_id = self.class.name == "MiqProvisionRequest" ? id : miq_provision_request.id + skip_id = instance_of?(::MiqProvisionRequest) ? id : miq_provision_request.id load_ids = queued_requests.pluck(:instance_id) load_ids.delete(skip_id) provisions = MiqProvisionRequest.where(:id => load_ids).to_a @@ -195,6 +196,7 @@ def quota_find_provisions(options) today_range = (scheduled_range.first..scheduled_range.last) MiqProvisionRequest.where.not(:request_state => 'pending').where(:updated_on => today_range).each do |prov_req| next if prov_req.id == skip_id + provisions << prov_req if today_range.include?(prov_req.options[:delivered_on]) end end @@ -229,7 +231,7 @@ def quota_find_prov_requests(_options) requests = MiqRequest.where("type = ? and approval_state != ? and (created_on >= ? and created_on < ?)", MiqProvisionRequest.name, 'denied', *today_time_range) # Make sure we skip the current MiqProvisionRequest in the calculation. - skip_id = self.class.name == "MiqProvisionRequest" ? id : miq_provision_request.id + skip_id = instance_of?(::MiqProvisionRequest) ? id : miq_provision_request.id requests.collect { |request| request unless request.id == skip_id }.compact end @@ -277,8 +279,8 @@ def quota_find_active_prov_request_by_tenant(options) def quota_find_active_prov_request(_options) MiqRequest.where( :approval_state => 'approved', - :type => %w(MiqProvisionRequest ServiceTemplateProvisionRequest), - :request_state => %w(active queued pending), + :type => %w[MiqProvisionRequest ServiceTemplateProvisionRequest], + :request_state => %w[active queued pending], :status => 'Ok', :process => true ).where.not(:id => id) @@ -291,6 +293,7 @@ def quota_find_active_prov_request(_options) def vm_quota_values(pr, result) num_vms_for_request = number_of_vms(pr) return if num_vms_for_request.zero? + flavor_obj = flavor(pr) result[:count] += num_vms_for_request result[:memory] += memory(pr, cloud?(pr), vendor(pr), flavor_obj) * num_vms_for_request @@ -300,6 +303,7 @@ def vm_quota_values(pr, result) pr.miq_request_tasks.each do |p| next unless p.state == 'Active' + host_id, storage_id = p.get_option(:dest_host).to_i, p.get_option(:dest_storage).to_i active = result[:active] active[:memory_by_host_id][host_id] += memory(p, cloud?(pr), vendor(pr), flavor_obj) @@ -312,11 +316,13 @@ def vm_quota_values(pr, result) def service_quota_values(request, result) return unless request.service_template + request.service_template.service_resources.each do |sr| if request.service_template.service_type == ServiceTemplate::SERVICE_TYPE_COMPOSITE bundle_quota_values(sr, result) else next if request.service_template.prov_type.starts_with?("generic") + vm_quota_values(sr.resource, result) end end @@ -324,6 +330,7 @@ def service_quota_values(request, result) def bundle_quota_values(service_resource, result) return if service_resource.resource.prov_type.starts_with?('generic') + service_resource.resource.service_resources.each do |sr| vm_quota_values(sr.resource, result) end @@ -333,10 +340,9 @@ def quota_provision_stats(prov_method, options) result = {:count => 0, :memory => 0, :cpu => 0, :storage => 0, :ids => [], :class_name => "MiqProvisionRequest", :active => { :class_name => "MiqProvision", :ids => [], :storage_by_id => Hash.new { |k, v| k[v] = 0 }, - :memory_by_host_id => Hash.new { |k, v| k[v] = 0 }, :cpu_by_host_id => Hash.new { |k, v| k[v] = 0 }, + :memory_by_host_id => Hash.new { |k, v| k[v] = 0 }, :cpu_by_host_id => Hash.new { |k, v| k[v] = 0 }, :vms_by_storage_id => Hash.new { |k, v| k[v] = [] } - } - } + }} send(prov_method, options).each do |pr| service_request?(pr) ? service_quota_values(pr, result) : vm_quota_values(pr, result) @@ -379,6 +385,7 @@ def storage(prov, cloud, vendor, flavor_obj = nil) return prov.get_option(:boot_disk_size).to_i.gigabytes end return nil unless flavor_obj + flavor_obj.root_disk_size.to_i + flavor_obj.ephemeral_disk_size.to_i + flavor_obj.swap_disk_size.to_i else prov.kind_of?(MiqRequest) ? prov.vm_template.provisioned_storage : prov.miq_request.vm_template.provisioned_storage @@ -391,6 +398,6 @@ def memory(prov, cloud, vendor, flavor_obj = nil) request = prov.kind_of?(MiqRequest) ? prov : prov.miq_request memory = request.get_option(:vm_memory).to_i - %w(amazon openstack google).include?(vendor) ? memory : memory.megabytes + %w[amazon openstack google].include?(vendor) ? memory : memory.megabytes end end diff --git a/app/models/mixins/miq_request_mixin.rb b/app/models/mixins/miq_request_mixin.rb index 39f04ffc961..c48936f6116 100644 --- a/app/models/mixins/miq_request_mixin.rb +++ b/app/models/mixins/miq_request_mixin.rb @@ -39,12 +39,12 @@ def get_user @user = User.super_admin end end - alias_method :tenant_identity, :get_user + alias tenant_identity get_user def tags Array.wrap(tag_ids).each do |tag_id| tag = Classification.find(tag_id) - yield(tag.name, tag.parent.name) unless tag.nil? # yield the tag's name and category + yield(tag.name, tag.parent.name) unless tag.nil? # yield the tag's name and category end end @@ -57,7 +57,7 @@ def get_tags tags do |tag, cat| cat = cat.to_sym if vm_tags.key?(cat) - vm_tags[cat] = [vm_tags[cat]] unless vm_tags[cat].kind_of?(Array) + vm_tags[cat] = [vm_tags[cat]] unless vm_tags[cat].kind_of?(Array) vm_tags[cat] << tag else vm_tags[cat] = tag @@ -74,10 +74,11 @@ def clear_tag(category = nil, tag_name = nil) Array.wrap(tag_ids).each do |tag_id| tag = Classification.find(tag_id) next if category.to_s.casecmp(tag.parent.name) != 0 - next if !tag_name.blank? && tag_name.to_s.casecmp(tag.name) != 0 + next if tag_name.present? && tag_name.to_s.casecmp(tag.name) != 0 + deletes << tag_id end - unless deletes.blank? + if deletes.present? self.tag_ids -= deletes update_attribute(:options, options) end @@ -87,8 +88,10 @@ def clear_tag(category = nil, tag_name = nil) def add_tag(category, tag_name) cat = Classification.lookup_by_name(category.to_s) return if cat.nil? + tag = cat.children.detect { |t| t.name.casecmp(tag_name.to_s) == 0 } return if tag.nil? + self.tag_ids ||= [] unless self.tag_ids.include?(tag.id) self.tag_ids << tag.id @@ -99,7 +102,7 @@ def add_tag(category, tag_name) def classifications Array.wrap(self.tag_ids).each do |tag_id| classification = Classification.find(tag_id) - yield(classification) unless classification.nil? # yield the whole classification + yield(classification) unless classification.nil? # yield the whole classification end end @@ -113,7 +116,7 @@ def get_classifications cat = classification.parent.name.to_sym tuple = {:name => classification.name, :description => classification.description} if vm_classifications.key?(cat) - vm_classifications[cat] = [vm_classifications[cat]] unless vm_classifications[cat].kind_of?(Array) + vm_classifications[cat] = [vm_classifications[cat]] unless vm_classifications[cat].kind_of?(Array) vm_classifications[cat] << tuple else vm_classifications[cat] = tuple @@ -137,10 +140,12 @@ def request_tags classifications do |c| tag_name = c.to_tag next unless tag_name.starts_with?(ns) + tag_path = tag_name.split('/')[2..-1].join('/') parts = tag_path.split('/') cat = Classification.lookup_by_name(parts.first) next if cat.show? == false + cat_descript = cat.description tag_descript = Classification.lookup_by_name(tag_path).description ws_tag_data << {:category => parts.first, :category_display_name => cat_descript, @@ -167,10 +172,11 @@ def workflow(request_options = options, flags = {}) def request_dialog(action_name) st = service_template return {} if st.blank? + ra = st.resource_actions.find_by(:action => action_name) values = options[:dialog] dialog = ResourceActionWorkflow.new(values, get_user, ra, {}).dialog - DialogSerializer.new.serialize(Array[dialog]).first + DialogSerializer.new.serialize([dialog]).first end def dialog_zone diff --git a/app/models/mixins/miq_web_server_worker_mixin.rb b/app/models/mixins/miq_web_server_worker_mixin.rb index 3def48504d8..8d7c95e0e85 100644 --- a/app/models/mixins/miq_web_server_worker_mixin.rb +++ b/app/models/mixins/miq_web_server_worker_mixin.rb @@ -24,6 +24,7 @@ def preload_for_console def preload_for_worker_role raise "Expected database to be seeded via `rake db:seed`." unless EvmDatabase.seeded_primordially? + configure_secret_token end @@ -104,6 +105,7 @@ def port_range def reserve_port(ports) free_ports = port_range.to_a - ports raise NoFreePortError if free_ports.empty? + free_ports.first end end @@ -152,7 +154,7 @@ def port end def release_db_connection - self.update_spid!(nil) + update_spid!(nil) self.class.release_db_connection end end diff --git a/app/models/mixins/new_with_type_sti_mixin.rb b/app/models/mixins/new_with_type_sti_mixin.rb index fbe5abc32b5..5e853ff518f 100644 --- a/app/models/mixins/new_with_type_sti_mixin.rb +++ b/app/models/mixins/new_with_type_sti_mixin.rb @@ -10,6 +10,7 @@ def new(*args, &block) unless klass <= self raise _("%{class_name} is not a subclass of %{name}") % {:class_name => klass.name, :name => name} end + args.unshift(args.shift.except(inheritance_column.to_sym, inheritance_column.to_s)) klass.new(*args, &block) else diff --git a/app/models/mixins/ownership_mixin.rb b/app/models/mixins/ownership_mixin.rb index 8c7ec3bc4dc..68007d48c00 100644 --- a/app/models/mixins/ownership_mixin.rb +++ b/app/models/mixins/ownership_mixin.rb @@ -58,20 +58,20 @@ def set_ownership(ids, options) errors.add(:missing_ids, "Unable to find #{name.pluralize} with the following ids #{missing.inspect}") unless missing.empty? objects.each do |obj| - begin - options.each_key do |k| - col = case k - when :owner then :evm_owner - when :group then :miq_group - else - raise _("Unknown option, '%{name}'") % {:name => k} - end - obj.send("#{col}=", options[k]) - end - obj.save - rescue => err - errors.add(:error_updating, "Error, '#{err.message}, updating #{name}: Name: [#{obj.name}], Id: [#{obj.id}]") + + options.each_key do |k| + col = case k + when :owner then :evm_owner + when :group then :miq_group + else + raise _("Unknown option, '%{name}'") % {:name => k} + end + obj.send(:"#{col}=", options[k]) end + obj.save + rescue => err + errors.add(:error_updating, "Error, '#{err.message}, updating #{name}: Name: [#{obj.name}], Id: [#{obj.id}]") + end errors.empty? ? true : errors diff --git a/app/models/mixins/process_tasks_mixin.rb b/app/models/mixins/process_tasks_mixin.rb index 707d9ba2f7c..3dcc991d065 100644 --- a/app/models/mixins/process_tasks_mixin.rb +++ b/app/models/mixins/process_tasks_mixin.rb @@ -6,12 +6,13 @@ module ClassMethods # Processes tasks received from the UI and queues them def process_tasks(options) raise _("No ids given to process_tasks") if options[:ids].blank? + if options[:task] == 'retire_now' options[:ids].each do |id| $log.info("Creating retire request for id [#{id}] with user [#{User.current_user.userid}]") name.constantize.make_retire_request(id, User.current_user) end - elsif options[:task] == "refresh_ems" && respond_to?("refresh_ems") + elsif options[:task] == "refresh_ems" && respond_to?(:refresh_ems) refresh_ems(options[:ids]) msg = "'#{options[:task]}' initiated for #{options[:ids].length} #{ui_lookup(:table => base_class.name).pluralize}" task_audit_event(:success, options, :message => msg) @@ -77,7 +78,7 @@ def invoke_tasks_remote(options) next rescue => err # Handle specific error case, until we can figure out how it occurs - if err.class == ArgumentError && err.message == "cannot interpret as DNS name: nil" + if err.instance_of?(ArgumentError) && err.message == "cannot interpret as DNS name: nil" $log.error("An error occurred while invoking remote tasks...") $log.log_backtrace(err) next @@ -139,7 +140,7 @@ def send_action(action, collection_name, collection, remote_options, id = nil) _log.info("Invoking task #{action} on #{msg_desination}") destination.send(action, post_args) task_audit_event(:success, remote_options, :message => "'#{action}' successfully initiated on #{msg_desination}") - rescue StandardError => err + rescue => err task_audit_event(:failure, remote_options, :message => "'#{action}' failed to be initiated on #{msg_desination}") _log.error(err.message) raise err unless err.kind_of?(NoMethodError) || err.kind_of?(ManageIQ::API::Client::ResourceNotFound) @@ -161,12 +162,14 @@ def task_arguments(options) # default implementation, can be overridden def invoke_task_local(task, instance, options, args) - cb = { - :class_name => task.class.to_s, - :instance_id => task.id, - :method_name => :queue_callback, - :args => ["Finished"] - } if task + if task + cb = { + :class_name => task.class.to_s, + :instance_id => task.id, + :method_name => :queue_callback, + :args => ["Finished"] + } + end q_hash = { :class_name => name, @@ -208,6 +211,7 @@ def validate_task(task, instance, options) end return true unless options[:task] == "retire_now" && instance.retired? + task.error("#{instance.name} is already retired") false end diff --git a/app/models/mixins/provider_object_mixin.rb b/app/models/mixins/provider_object_mixin.rb index c5a1952008f..3cbcf589d5a 100644 --- a/app/models/mixins/provider_object_mixin.rb +++ b/app/models/mixins/provider_object_mixin.rb @@ -1,21 +1,20 @@ module ProviderObjectMixin - def with_provider_connection(options = {}) - raise _("no block given") unless block_given? + def with_provider_connection(options = {}, &block) + raise _("no block given") unless block - connection_source(options).with_provider_connection(options) do |connection| - yield connection - end + connection_source(options).with_provider_connection(options, &block) end def with_provider_object(options = {}) raise _("no block given") unless block_given? + connection_source(options).with_provider_connection(options) do |connection| - begin - handle = provider_object(connection) - yield handle - ensure - provider_object_release(handle) if handle && self.respond_to?(:provider_object_release) - end + + handle = provider_object(connection) + yield handle + ensure + provider_object_release(handle) if handle && respond_to?(:provider_object_release) + end end @@ -28,6 +27,7 @@ def provider_object(_connection = nil) def connection_source(options = {}) source = options[:connection_source] || connection_manager raise _("no connection source available") if source.nil? + source end diff --git a/app/models/mixins/purging_mixin.rb b/app/models/mixins/purging_mixin.rb index 652d0abb13e..178df34b641 100644 --- a/app/models/mixins/purging_mixin.rb +++ b/app/models/mixins/purging_mixin.rb @@ -203,6 +203,7 @@ def purge_in_batches(conditions, window, total = 0, total_limit = nil) # pull back ids - will slow performance batch_ids = query.pluck(:id) break if batch_ids.empty? + current_window = batch_ids.size else batch_ids = query @@ -211,6 +212,7 @@ def purge_in_batches(conditions, window, total = 0, total_limit = nil) _log.info("Purging #{current_window} #{table_name.humanize}.") count = unscoped.where(:id => batch_ids).delete_all break if count == 0 + total += count purge_associated_records(batch_ids) if respond_to?(:purge_associated_records) diff --git a/app/models/mixins/relationship_mixin.rb b/app/models/mixins/relationship_mixin.rb index 753c0de64ea..4b5131d0b1c 100644 --- a/app/models/mixins/relationship_mixin.rb +++ b/app/models/mixins/relationship_mixin.rb @@ -69,7 +69,6 @@ def relationship_type=(rel) relationship_types.push(rel) clear_relationships_cache(:except => :relationships_of) end - rel end def with_relationship_type(rel) @@ -151,6 +150,7 @@ def parent_count(*args) def parent_rel(*args) rels = parent_rels(*args).take(2) raise _("Multiple parents found.") if rels.length > 1 + rels.first end @@ -160,6 +160,7 @@ def parent(*args) rels = parent_rels(*args).take(2) raise _("Multiple parents found.") if rels.length > 1 + rels.first.try(:resource) end @@ -169,12 +170,13 @@ def parent_id(*args) rels = parent_ids(*args).take(2) raise _("Multiple parents found.") if rels.length > 1 + rels.first end # Returns the relationship of the root of the tree the record is in def root_rel - rel = relationship.try!(:root) + rel = relationship&.root # micro-optimization: if the relationship is us, "load" the resource rel.resource = self if rel && rel.resource_id == id && rel.resource_type == self.class.base_class.name.to_s rel || relationship_for_isolated_root @@ -379,7 +381,8 @@ def descendant_count(*args) def descendant_rels_arranged(*args) options = args.extract_options! rel = relationship(:raise_on_multiple => true) - return {} if rel.nil? # TODO: Should this return nil or init_relationship or Relationship.new in a Hash? + return {} if rel.nil? # TODO: Should this return nil or init_relationship or Relationship.new in a Hash? + Relationship.filter_by_resource_type(rel.descendants, options).arrange end @@ -427,6 +430,7 @@ def subtree_rels_arranged(*args) options = args.extract_options! rel = relationship(:raise_on_multiple => true) return {relationship_for_isolated_root => {}} if rel.nil? + Relationship.filter_by_resource_type(rel.subtree, options).arrange end @@ -479,6 +483,7 @@ def relationship(*args) if options[:raise_on_multiple] rels = relationships.take(2) raise _("Multiple relationships found") if rels.length > 1 + rels.first else relationships.first @@ -553,6 +558,7 @@ def fulltree_rels_arranged(*args) options = args.extract_options! root_id = relationship.try(:root_id) return {relationship_for_isolated_root => {}} if root_id.nil? + Relationship.filter_by_resource_type(Relationship.subtree_of(root_id), options).arrange end @@ -611,7 +617,7 @@ def add_children(*child_objs) child_objs end - alias_method :add_child, :add_children + alias add_child add_children def parent=(parent) if parent.nil? @@ -635,14 +641,14 @@ def parent=(parent) clear_relationships_cache end - alias_method :replace_parent, :parent= + alias replace_parent parent= # # Backward compatibility methods # - alias_method :set_parent, :parent= - alias_method :set_child, :add_children + alias set_parent parent= + alias set_child add_children def replace_children(*child_objs) child_objs = child_objs.flatten @@ -700,7 +706,7 @@ def remove_children(*child_objs) remove_all_relationships(to_del) end end - alias_method :remove_child, :remove_children + alias remove_child remove_children def remove_all_parents(*args) return update!(:parent => nil) if use_ancestry? @@ -716,7 +722,7 @@ def remove_all_children(*args) of_type = Array.wrap(options[:of_type]) all_children_removed = of_type.empty? || (child_types - of_type).empty? - if self.is_root? && all_children_removed + if is_root? && all_children_removed remove_all_relationships else remove_all_relationships(child_rels(*args)) diff --git a/app/models/mixins/retirement_mixin.rb b/app/models/mixins/retirement_mixin.rb index 75ab90596a8..78329120878 100644 --- a/app/models/mixins/retirement_mixin.rb +++ b/app/models/mixins/retirement_mixin.rb @@ -32,9 +32,9 @@ def set_retirement_requester(obj_ids, requester) existing_objects = where(:id => obj_ids) updated_count = existing_objects.update_all(:retirement_requester => requester.userid) if updated_count != obj_ids.count - _log.info("Retirement requester for #{self.name} #{(obj_ids - existing_objects.pluck(:id))} not set because objects not found") + _log.info("Retirement requester for #{name} #{obj_ids - existing_objects.pluck(:id)} not set because objects not found") else - _log.info("Retirement requester for #{self.name} #{obj_ids} being set to #{requester.userid}") + _log.info("Retirement requester for #{name} #{obj_ids} being set to #{requester.userid}") end end end @@ -75,6 +75,7 @@ def retires_on=(timestamp) def extend_retires_on(days, date = Time.zone.now) raise _("Invalid Date specified: %{date}") % {:date => date} unless date.kind_of?(ActiveSupport::TimeWithZone) + _log.info("Extending Retirement Date on #{self.class.name} for id:<#{id}>, name:<#{name}> ") new_retires_date = date.in_time_zone + days.to_i.days _log.info("Original Date: #{date} Extend days: #{days} New Retirement Date: #{new_retires_date} for id:<#{id}>, name:<#{name}>") @@ -92,22 +93,22 @@ def retire(options = {}) date = options[:date].in_time_zone unless options[:date].nil? self.retires_on = date - if date - message += " is scheduled to retire on: [#{retires_on.strftime("%x %R %Z")}]" - else - message += " is no longer scheduled to retire" - end + message += if date + " is scheduled to retire on: [#{retires_on.strftime("%x %R %Z")}]" + else + " is no longer scheduled to retire" + end end if options.key?(:warn) message += " and" if options.key?(:date) warn = options[:warn] self.retirement_warn = warn - if warn - message += " has a value for retirement warning days of: [#{retirement_warn}]" - else - message += " has no value for retirement warning days" - end + message += if warn + " has a value for retirement warning days of: [#{retirement_warn}]" + else + " has no value for retirement warning days" + end end save @@ -123,6 +124,7 @@ def raise_retire_audit_event(message) def retirement_check return if retired? || retiring? || retirement_initialized? + requester = system_context_requester if !retirement_warned? && retirement_warning_due? @@ -151,6 +153,7 @@ def allow_retire_request_creation? def retire_now(requester = nil) if retired return if retired_validated? + _log.info("#{retirement_object_title}: [id:<#{id}>, name:<#{name}>], Retires On: [#{retires_on.strftime("%x %R %Z")}], was previously retired, but currently #{retired_invalid_reason}") elsif retiring? _log.info("#{retirement_object_title}: [id:<#{id}>, name:<#{name}>] retirement in progress") @@ -175,6 +178,7 @@ def retire_now(requester = nil) def finish_retirement raise _("%{name} already retired") % {:name => name} if retired? + $log.info("Finishing Retirement for [#{name}]") requester = retirement_requester mark_retired @@ -191,6 +195,7 @@ def mark_retired def start_retirement return if retired? || retiring? + $log.info("Starting Retirement for [id:<#{id}>, name:<#{name}>]") update(:retirement_state => "retiring") end diff --git a/app/models/mixins/scanning_mixin.rb b/app/models/mixins/scanning_mixin.rb index 87afed90739..a6db52a075e 100644 --- a/app/models/mixins/scanning_mixin.rb +++ b/app/models/mixins/scanning_mixin.rb @@ -15,7 +15,7 @@ def default_scan_categories_no_profile end def default_scan_categories - %w(vmconfig accounts software services system profiles) + %w[vmconfig accounts software services system profiles] end # Stash metadata before sync. @@ -34,14 +34,15 @@ def save_metadata(target_id, data_array) taskid = doc.root.attributes["taskid"] _log.info("TaskId = [#{taskid}]") - unless taskid.blank? - name = begin - File.basename(doc.root.elements[1].elements[1].attributes["original_filename"], ".*") - rescue - "vmscan" - end + if taskid.present? + name = begin + File.basename(doc.root.elements[1].elements[1].attributes["original_filename"], ".*") + rescue + "vmscan" + end job = Job.find_by(:guid => taskid) raise _("Unable to process data for job with id <%{number}>. Job not found.") % {:number => taskid} if job.nil? + begin job.signal(:data, xml_file) rescue => err @@ -76,6 +77,7 @@ def save_metadata(target_id, data_array) # Process XML documents from VM scans def add_elements(xml_node) return if xml_node.nil? + _log.info("Adding XML elements for [#{id}] from [#{xml_node.root.name}]") updated = false @@ -103,7 +105,7 @@ def add_elements(xml_node) # Update the last sync time if we did something self.last_sync_on = Time.at(xml_node.root.attributes["created_on"].to_i).utc if updated == true && xml_node.root.attributes["created_on"] save - hardware.save if self.respond_to?(:hardware) && !hardware.nil? + hardware.save if respond_to?(:hardware) && !hardware.nil? end def scan_queue(userid = "system", options = {}) @@ -161,8 +163,9 @@ def scan_metadata(category, options = {}) end def path_arg - return path if self.respond_to?(:path) - return name if self.respond_to?(:name) + return path if respond_to?(:path) + return name if respond_to?(:name) + nil end private :path_arg @@ -215,13 +218,15 @@ def scan_via_miq_vm(miqVm, ost) xml_node_scan.add_attributes("start_time" => ost.scanTime.iso8601) xml_summary.root.add_attributes("taskid" => ost.taskid) - data_dir = File.join(File.expand_path(Rails.root), "data/metadata") + data_dir = Rails.root.join("data/metadata").to_s _log.debug("creating #{data_dir}") - begin - Dir.mkdir(data_dir) - rescue Errno::EEXIST - # Ignore if the directory was created by another thread. - end unless File.exist?(data_dir) + unless File.exist?(data_dir) + begin + Dir.mkdir(data_dir) + rescue Errno::EEXIST + # Ignore if the directory was created by another thread. + end + end ost.skipConfig = true ost.config = OpenStruct.new( :dataDir => data_dir, @@ -306,9 +311,10 @@ def sync_stashed_metadata(ost) xml_summary = nil begin raise _("No synchronize category specified") if ost.category.nil? + categories = ost.category.split(",") ost.scanTime = Time.now.utc - ost.compress = true # Request that data returned from the blackbox is compressed + ost.compress = true # Request that data returned from the blackbox is compressed ost.xml_class = REXML::Document # TODO: if from_time is not a string (see sync_metadata() above), loadXmlData fails. # Just clear it for now, until we figure out the right thing to do. @@ -320,7 +326,7 @@ def sync_stashed_metadata(ost) xml_node = xml_summary.root.add_element("syncmetadata") xml_summary.root.add_attributes("scan_time" => ost.scanTime, "taskid" => ost.taskid) ost.skipConfig = true - data_dir = File.join(File.expand_path(Rails.root), "data/metadata") + data_dir = Rails.root.join("data/metadata").to_s ost.config = OpenStruct.new( :dataDir => data_dir, :forceFleeceDefault => false @@ -346,13 +352,11 @@ def sync_stashed_metadata(ost) _log.info("Starting: Sending target data for [#{c}] to server. Size:[#{data.length}] TaskId:[#{ost.taskid}] target:[#{name}]") save_metadata_op(data, "b64,zlib,xml", ost.taskid) _log.info("Completed: Sending target data for [#{c}] to server. Size:[#{data.length}] TaskId:[#{ost.taskid}] target:[#{name}]") - else + elsif items_total.zero? # Do not send empty XMLs. Warn if there is not data at all, or just not items selected. - if items_total.zero? - _log.warn("Synchronize: No data found for [#{c}]. Items:Total[#{items_total}] Selected[#{items_selected}] TaskId:[#{ost.taskid}] VM:[#{name}]") - else - _log.warn("Synchronize: No data selected for [#{c}]. Items:Total[#{items_total}] Selected[#{items_selected}] TaskId:[#{ost.taskid}] VM:[#{name}]") - end + _log.warn("Synchronize: No data found for [#{c}]. Items:Total[#{items_total}] Selected[#{items_selected}] TaskId:[#{ost.taskid}] VM:[#{name}]") + else + _log.warn("Synchronize: No data selected for [#{c}]. Items:Total[#{items_total}] Selected[#{items_selected}] TaskId:[#{ost.taskid}] VM:[#{name}]") end end rescue => syncErr @@ -384,7 +388,7 @@ def update_job_message(ost, message) :class_name => "Job", :method_name => "update_message", :args => [ost.taskid, message], - :task_id => "job_message_#{Time.now.to_i}", + :task_id => "job_message_#{Time.now.to_i}" ) end end diff --git a/app/models/mixins/scanning_operations_mixin.rb b/app/models/mixins/scanning_operations_mixin.rb index 9a749e56576..3ae14d9d7d3 100644 --- a/app/models/mixins/scanning_operations_mixin.rb +++ b/app/models/mixins/scanning_operations_mixin.rb @@ -13,11 +13,11 @@ def save_metadata_op(xmlFile, type, jobid = nil) :class_name => self.class.base_class.name, :method_name => "save_metadata", :data => Marshal.dump([xmlFile, type]), - :task_id => jobid, + :task_id => jobid ) _log.info("target [#{guid}] data put on queue, job [#{jobid}]") true - rescue StandardError => err + rescue => err _log.log_backtrace(err) false end diff --git a/app/models/mixins/service_mixin.rb b/app/models/mixins/service_mixin.rb index 5fd0d7b231f..20ba49584e3 100644 --- a/app/models/mixins/service_mixin.rb +++ b/app/models/mixins/service_mixin.rb @@ -53,20 +53,19 @@ def combined_group_delay(action) def delay_type(action) return :start_delay if action == :start - return :stop_delay if action == :stop + + :stop_delay if action == :stop end - def each_group_resource(grp_idx = nil) - return enum_for(:each_group_resource) unless block_given? + def each_group_resource(grp_idx = nil, &block) + return enum_for(:each_group_resource) unless block if children.present? && service_resources.empty? children.each do |child| - child.service_resources.each { |sr| yield(sr) } + child.service_resources.each(&block) end elsif grp_idx.nil? - service_resources.each do |sr| - yield(sr) - end + service_resources.each(&block) else service_resources.each do |sr| yield(sr) if sr.group_idx == grp_idx @@ -97,6 +96,7 @@ def next_group_index(current_idx, direction = 1) def parent_services(svc = self) return svc.ancestors if svc.kind_of?(Service) + srs = ServiceResource.where(:resource => svc) srs.collect { |sr| sr.public_send(sr.resource_type.underscore) }.compact end @@ -128,6 +128,7 @@ def create_service_resource(rsc, options) def circular_reference?(child_svc) return true if child_svc == self + if child_svc.kind_of?(Service) ancestor_ids.include?(child_svc.id) elsif child_svc.kind_of?(ServiceTemplate) @@ -138,8 +139,10 @@ def circular_reference?(child_svc) def circular_reference_check(child_svc, parent_svc = self) return child_svc if child_svc == parent_svc return nil unless child_svc.kind_of?(ServiceTemplate) + parent_services(parent_svc).each do |service| return(service) if service.id == child_svc.id + result = circular_reference_check(child_svc, service) return(result) unless result.nil? end diff --git a/app/models/mixins/service_orchestration_options_mixin.rb b/app/models/mixins/service_orchestration_options_mixin.rb index 2fbb21c44a2..e00453bca70 100644 --- a/app/models/mixins/service_orchestration_options_mixin.rb +++ b/app/models/mixins/service_orchestration_options_mixin.rb @@ -31,7 +31,8 @@ def dup_and_process_password(opts, encrypt = :encrypt) proc = ManageIQ::Password.method(encrypt) opts_dump.each do |_opt_name, opt_val| next unless opt_val.kind_of?(Hash) - opt_val.each { |param_key, param_val| opt_val[param_key] = proc.call(param_val) if param_key =~ /password/i } + + opt_val.each { |param_key, param_val| opt_val[param_key] = proc.call(param_val) if /password/i.match?(param_key) } end opts_dump diff --git a/app/models/mixins/storage_mixin.rb b/app/models/mixins/storage_mixin.rb index 62057f4b43c..9a9d1b9decb 100644 --- a/app/models/mixins/storage_mixin.rb +++ b/app/models/mixins/storage_mixin.rb @@ -5,7 +5,7 @@ module StorageMixin # Used to extend classes that utilize the StorageFiles class (Storage and Vm) included do STORAGE_FILE_TYPES.each do |m| - virtual_column "#{m}_size", :type => :integer, :uses => :"#{m}_files" + virtual_column "#{m}_size", :type => :integer, :uses => :"#{m}_files" virtual_has_many :"#{m}_files", :class_name => "StorageFile", :uses => :storage_files_files end end @@ -17,6 +17,7 @@ module StorageMixin def storage_files_by_type return @storage_files_by_type unless @storage_files_by_type.nil? + @storage_files_by_type = StorageFile.split_file_types(storage_files_files) end diff --git a/app/models/mixins/supports_feature_mixin.rb b/app/models/mixins/supports_feature_mixin.rb index b2270f04486..e68a2be1fe6 100644 --- a/app/models/mixins/supports_feature_mixin.rb +++ b/app/models/mixins/supports_feature_mixin.rb @@ -62,7 +62,7 @@ module SupportsFeatureMixin end def self.reason_or_default(reason) - reason.present? ? reason : _("Feature not available/supported") + (reason.presence || _("Feature not available/supported")) end # query instance for the reason why the feature is unsupported @@ -180,7 +180,7 @@ def define_supports_feature_methods(feature, is_supported: true, reason: nil, &b # defines the method on the instance define_method(method_name) do unsupported.delete(feature) - if block_given? + if block begin result = instance_eval(&block) # if no errors yet but result was an error message diff --git a/app/models/mixins/sysprep_template_mixin.rb b/app/models/mixins/sysprep_template_mixin.rb index 38b1a313b1a..98111bb441c 100644 --- a/app/models/mixins/sysprep_template_mixin.rb +++ b/app/models/mixins/sysprep_template_mixin.rb @@ -9,7 +9,7 @@ def allowed_sysprep_customization_templates(_options = {}) result = CustomizationTemplateSysprep.in_region(source.region_number).all.collect do |c| @values[:customization_template_script] = c.script if c.id == customization_template_id - build_ci_hash_struct(c, %i(name description updated_at)) + build_ci_hash_struct(c, %i[name description updated_at]) end result.compact! diff --git a/app/models/mixins/timezone_mixin.rb b/app/models/mixins/timezone_mixin.rb index a4ab8a1fceb..30dc86e0731 100644 --- a/app/models/mixins/timezone_mixin.rb +++ b/app/models/mixins/timezone_mixin.rb @@ -11,9 +11,9 @@ def with_a_timezone(timezone) end end - def with_current_user_timezone + def with_current_user_timezone(&block) timezone = User.current_user.try(:get_timezone) || self.class.server_timezone - with_a_timezone(timezone) { yield } + with_a_timezone(timezone, &block) end module ClassMethods diff --git a/app/models/mixins/yaml_import_export_mixin.rb b/app/models/mixins/yaml_import_export_mixin.rb index 6bf9956bf12..377f3ea949f 100644 --- a/app/models/mixins/yaml_import_export_mixin.rb +++ b/app/models/mixins/yaml_import_export_mixin.rb @@ -4,7 +4,7 @@ module YamlImportExportMixin module ClassMethods def export_to_array(list, klass) begin - klass = klass.kind_of?(Class) ? klass : Object.const_get(klass) + klass = Object.const_get(klass) unless klass.kind_of?(Class) rescue => err _log.error("List: [#{list}], Class: [#{klass}] - #{err.message}") return [] @@ -29,7 +29,7 @@ def export_to_yaml(list, klass) # @return [Array, Array] The array of objects to be imported, # and the array of importing status. def import(fd, options = {}) - fd.rewind # ensure to be at the beginning as the file is read multiple times + fd.rewind # ensure to be at the beginning as the file is read multiple times begin reps = YAML.load(fd.read) validate_import_data_class(reps) diff --git a/app/models/network_port.rb b/app/models/network_port.rb index 06ca1ebe10c..59eec6cc1ac 100644 --- a/app/models/network_port.rb +++ b/app/models/network_port.rb @@ -51,9 +51,9 @@ def cloud_subnets_names end # Define all getters and setters for extra_attributes related virtual columns - %i(binding_virtual_interface_details binding_virtual_nic_type binding_profile extra_dhcp_opts - allowed_address_pairs fixed_ips).each do |action| - define_method("#{action}=") do |value| + %i[binding_virtual_interface_details binding_virtual_nic_type binding_profile extra_dhcp_opts + allowed_address_pairs fixed_ips].each do |action| + define_method(:"#{action}=") do |value| extra_attributes_save(action, value) end @@ -66,10 +66,10 @@ def cloud_subnets_names def extra_attributes_save(key, value) self.extra_attributes = {} if extra_attributes.blank? - self.extra_attributes[key] = value + extra_attributes[key] = value end def extra_attributes_load(key) - self.extra_attributes[key] unless extra_attributes.blank? + extra_attributes[key] if extra_attributes.present? end end diff --git a/app/models/network_router.rb b/app/models/network_router.rb index 52a706bc64e..e680bd5729f 100644 --- a/app/models/network_router.rb +++ b/app/models/network_router.rb @@ -26,15 +26,15 @@ class NetworkRouter < ApplicationRecord serialize :extra_attributes virtual_column :external_gateway_info, :type => :string # :hash - virtual_column :distributed , :type => :boolean - virtual_column :routes , :type => :string # :array - virtual_column :propagating_vgws , :type => :string # :array - virtual_column :main_route_table , :type => :boolean # :array - virtual_column :high_availability , :type => :boolean + virtual_column :distributed, :type => :boolean + virtual_column :routes, :type => :string # :array + virtual_column :propagating_vgws, :type => :string # :array + virtual_column :main_route_table, :type => :boolean # :array + virtual_column :high_availability, :type => :boolean # Define all getters and setters for extra_attributes related virtual columns - %i(external_gateway_info distributed routes propagating_vgws main_route_table high_availability).each do |action| - define_method("#{action}=") do |value| + %i[external_gateway_info distributed routes propagating_vgws main_route_table high_availability].each do |action| + define_method(:"#{action}=") do |value| extra_attributes_save(action, value) end @@ -53,10 +53,10 @@ def self.class_by_ems(ext_management_system) def extra_attributes_save(key, value) self.extra_attributes = {} if extra_attributes.blank? - self.extra_attributes[key] = value + extra_attributes[key] = value end def extra_attributes_load(key) - self.extra_attributes[key] unless extra_attributes.blank? + extra_attributes[key] if extra_attributes.present? end end diff --git a/app/models/network_service.rb b/app/models/network_service.rb index 372e8e998dd..96d45444d87 100644 --- a/app/models/network_service.rb +++ b/app/models/network_service.rb @@ -10,7 +10,7 @@ class NetworkService < ApplicationRecord belongs_to :cloud_tenant belongs_to :orchestration_stack - has_many :network_service_entries, :foreign_key => :network_service_id, :dependent => :destroy + has_many :network_service_entries, :dependent => :destroy alias entries network_service_entries has_many :security_policy_rule_network_services, :dependent => :destroy diff --git a/app/models/notification.rb b/app/models/notification.rb index 723d8032cf9..44d177e83dc 100644 --- a/app/models/notification.rb +++ b/app/models/notification.rb @@ -9,12 +9,11 @@ class Notification < ApplicationRecord has_many :recipients, :class_name => "User", :through => :notification_recipients, :source => :user accepts_nested_attributes_for :notification_recipients + before_save :backup_subject_name before_create :set_notification_recipients # Do not emit notifications if they are not enabled for the server after_commit :emit_message, :on => :create - before_save :backup_subject_name - serialize :options, Hash validate :complete_bindings @@ -27,8 +26,10 @@ def type=(typ) def self.emit_for_event(event) return unless NotificationType.names.include?(event.event_type) + type = NotificationType.find_by(:name => event.event_type) return unless type.enabled? + Notification.create(:notification_type => type, :options => event.full_data, :subject => event.target) @@ -49,6 +50,7 @@ def seen_by_all_recipients? def self.notification_text(name, message_params) return unless message_params && NotificationType.names.include?(name) + type = NotificationType.find_by(:name => name) type.message % message_params end @@ -61,11 +63,12 @@ def complete_bindings # 1. Deprecate now # 2. Fail validation going forward via errors.add(error_args) error_args = [:options, "text bindings for notification_type: '#{notification_type.name}' failed with error: '#{e.message}' with options: '#{options.inspect}' and message #{notification_type.message.inspect}. Next release will not allow a notification without complete bindings."] - ActiveSupport::Deprecation.warn(error_args.join(' '), caller_locations[1..-1].reject {|location| location.label.include?("emit_for_event") }) + ActiveSupport::Deprecation.warn(error_args.join(' '), caller_locations[1..-1].reject { |location| location.label.include?("emit_for_event") }) end def emit_message return unless ::Settings.server.asynchronous_notifications + notification_recipients.pluck(:id, :user_id).each do |id, user| ActionCable.server.broadcast("notifications_#{user}", to_h.merge(:id => id.to_s)) end @@ -78,16 +81,17 @@ def set_notification_recipients Rbac.filtered_object(subject, :user => User.find(subscriber_id)).blank? end end - self.notification_recipients_attributes = subscribers.collect { |id| {:user_id => id } } + self.notification_recipients_attributes = subscribers.collect { |id| {:user_id => id} } end def backup_subject_name return unless subject + backup_name = (subject.try(:name) || subject.try(:description)) # Note, options are read in text_bindings_dynamic and used in text_bindings # if the subject is no longer there such as when a vm is deleted. - self.options[:subject] = backup_name if backup_name + options[:subject] = backup_name if backup_name end def text_bindings @@ -96,10 +100,12 @@ def text_bindings next unless value # Set the link based on the notification_type.link_to - result[:link] = { - :id => value.id, - :model => value.class.name - } if notification_type.link_to.try(:to_sym) == key + if notification_type.link_to.try(:to_sym) == key + result[:link] = { + :id => value.id, + :model => value.class.name + } + end result[key] = { :text => value.try(:name) || value.try(:description) diff --git a/app/models/notification_type.rb b/app/models/notification_type.rb index 3127b7df25a..ba50e1eaaf7 100644 --- a/app/models/notification_type.rb +++ b/app/models/notification_type.rb @@ -8,8 +8,8 @@ class NotificationType < ApplicationRecord AUDIENCE_NONE = 'none'.freeze has_many :notifications validates :message, :presence => true - validates :level, :inclusion => { :in => %w(success error warning info) } - validates :link_to, :inclusion => { :in => %w(subject initiator cause) }, :allow_blank => true + validates :level, :inclusion => {:in => %w[success error warning info]} + validates :link_to, :inclusion => {:in => %w[subject initiator cause]}, :allow_blank => true validates :audience, :inclusion => { :in => [AUDIENCE_USER, AUDIENCE_GROUP, AUDIENCE_TENANT, AUDIENCE_GLOBAL, AUDIENCE_SUPERADMIN, AUDIENCE_NONE] } diff --git a/app/models/openscap_result.rb b/app/models/openscap_result.rb index 720ba9f800b..4db22d47af9 100644 --- a/app/models/openscap_result.rb +++ b/app/models/openscap_result.rb @@ -59,6 +59,7 @@ def ascii8bit_to_utf8(string) def with_openscap_arf(raw) return unless self.class.openscap_available? + begin OpenSCAP.oscap_init # ARF - nist standardized 'Asset Reporting Format' Full representation if a scap scan result. @@ -72,16 +73,17 @@ def with_openscap_arf(raw) def with_openscap_objects(raw) raise "no block given" unless block_given? + with_openscap_arf(raw) do |arf| - begin - test_results = arf.test_result - source_datastream = arf.report_request - bench_source = source_datastream.select_checklist! - benchmark = OpenSCAP::Xccdf::Benchmark.new(bench_source) - yield(test_results.rr, benchmark.items) - ensure - [benchmark, source_datastream, test_results].each { |obj| obj.try(:destroy) } - end + + test_results = arf.test_result + source_datastream = arf.report_request + bench_source = source_datastream.select_checklist! + benchmark = OpenSCAP::Xccdf::Benchmark.new(bench_source) + yield(test_results.rr, benchmark.items) + ensure + [benchmark, source_datastream, test_results].each { |obj| obj.try(:destroy) } + end end end diff --git a/app/models/orchestration_stack.rb b/app/models/orchestration_stack.rb index 510290f7c7b..6e1768b0dc6 100644 --- a/app/models/orchestration_stack.rb +++ b/app/models/orchestration_stack.rb @@ -21,8 +21,6 @@ class OrchestrationStack < ApplicationRecord belongs_to :tenant belongs_to :cloud_tenant - has_many :authentication_orchestration_stacks - has_many :authentications, :through => :authentication_orchestration_stacks has_many :parameters, :dependent => :destroy, :foreign_key => :stack_id, :class_name => "OrchestrationStackParameter" has_many :outputs, :dependent => :destroy, :foreign_key => :stack_id, :class_name => "OrchestrationStackOutput" has_many :resources, :dependent => :destroy, :foreign_key => :stack_id, :class_name => "OrchestrationStackResource" @@ -52,9 +50,9 @@ class OrchestrationStack < ApplicationRecord scope :without_type, ->(type) { where.not(:type => type) } - alias_method :orchestration_stack_parameters, :parameters - alias_method :orchestration_stack_outputs, :outputs - alias_method :orchestration_stack_resources, :resources + alias orchestration_stack_parameters parameters + alias orchestration_stack_outputs outputs + alias orchestration_stack_resources resources supports :retire diff --git a/app/models/orchestration_stack/status.rb b/app/models/orchestration_stack/status.rb index 47cb97dd44d..0e0d97f93f2 100644 --- a/app/models/orchestration_stack/status.rb +++ b/app/models/orchestration_stack/status.rb @@ -1,7 +1,6 @@ class OrchestrationStack class Status - attr_accessor :status - attr_accessor :reason + attr_accessor :status, :reason def initialize(status, reason) self.status = status diff --git a/app/models/orchestration_template.rb b/app/models/orchestration_template.rb index acddcf985e7..28c3f2273ca 100644 --- a/app/models/orchestration_template.rb +++ b/app/models/orchestration_template.rb @@ -14,7 +14,7 @@ class OrchestrationTemplate < ApplicationRecord validates :md5, :uniqueness_when_changed => {:scope => :draft, :message => "of content already exists (content must be unique)"}, :if => :unique_md5? - validates_presence_of :name + validates :name, :presence => true scope :orderable, -> { where(:orderable => true) } @@ -220,6 +220,7 @@ def with_universal_newline(text) def check_not_in_use return true unless in_use? + errors.add(:base, "Cannot delete the template while it is used by some orchestration stacks") throw :abort end diff --git a/app/models/orchestration_template/orchestration_parameter.rb b/app/models/orchestration_template/orchestration_parameter.rb index 54d3fdcea7c..697a8a6ed3f 100644 --- a/app/models/orchestration_template/orchestration_parameter.rb +++ b/app/models/orchestration_template/orchestration_parameter.rb @@ -20,19 +20,12 @@ class OrchestrationParameter # comma_delimited_list # boolean - attr_accessor :name - attr_accessor :label - attr_accessor :description - attr_accessor :data_type - attr_accessor :default_value - attr_accessor :hidden - attr_accessor :required - attr_accessor :reconfigurable + attr_accessor :name, :label, :description, :data_type, :default_value, :hidden, :required, :reconfigurable attr_writer :constraints def initialize(hash = {}) @reconfigurable = true - hash.each { |key, value| public_send("#{key}=", value) } + hash.each { |key, value| public_send(:"#{key}=", value) } end def constraints diff --git a/app/models/orchestration_template/orchestration_parameter_allowed.rb b/app/models/orchestration_template/orchestration_parameter_allowed.rb index 3974ab1da18..0a2203bcf62 100644 --- a/app/models/orchestration_template/orchestration_parameter_allowed.rb +++ b/app/models/orchestration_template/orchestration_parameter_allowed.rb @@ -1,6 +1,5 @@ class OrchestrationTemplate class OrchestrationParameterAllowed < OrchestrationParameterConstraint - attr_accessor :allowed_values - attr_accessor :allow_multiple + attr_accessor :allowed_values, :allow_multiple end end diff --git a/app/models/orchestration_template/orchestration_parameter_constraint.rb b/app/models/orchestration_template/orchestration_parameter_constraint.rb index 1792913e2eb..e4bd742b3d8 100644 --- a/app/models/orchestration_template/orchestration_parameter_constraint.rb +++ b/app/models/orchestration_template/orchestration_parameter_constraint.rb @@ -3,7 +3,7 @@ class OrchestrationParameterConstraint attr_accessor :description def initialize(hash = {}) - hash.each { |key, value| public_send("#{key}=", value) } + hash.each { |key, value| public_send(:"#{key}=", value) } end end end diff --git a/app/models/orchestration_template/orchestration_parameter_group.rb b/app/models/orchestration_template/orchestration_parameter_group.rb index dbf3064b42e..0b84bb0351d 100644 --- a/app/models/orchestration_template/orchestration_parameter_group.rb +++ b/app/models/orchestration_template/orchestration_parameter_group.rb @@ -1,11 +1,9 @@ class OrchestrationTemplate class OrchestrationParameterGroup - attr_accessor :description - attr_accessor :label - attr_accessor :parameters + attr_accessor :description, :label, :parameters def initialize(hash = {}) - hash.each { |key, value| public_send("#{key}=", value) } + hash.each { |key, value| public_send(:"#{key}=", value) } end end end diff --git a/app/models/orchestration_template/orchestration_parameter_length.rb b/app/models/orchestration_template/orchestration_parameter_length.rb index a30958c4f39..7166d0552e1 100644 --- a/app/models/orchestration_template/orchestration_parameter_length.rb +++ b/app/models/orchestration_template/orchestration_parameter_length.rb @@ -1,6 +1,5 @@ class OrchestrationTemplate class OrchestrationParameterLength < OrchestrationParameterConstraint - attr_accessor :min_length - attr_accessor :max_length + attr_accessor :min_length, :max_length end end diff --git a/app/models/orchestration_template/orchestration_parameter_range.rb b/app/models/orchestration_template/orchestration_parameter_range.rb index 02801595ce1..8b9b7a7c652 100644 --- a/app/models/orchestration_template/orchestration_parameter_range.rb +++ b/app/models/orchestration_template/orchestration_parameter_range.rb @@ -1,6 +1,5 @@ class OrchestrationTemplate class OrchestrationParameterRange < OrchestrationParameterConstraint - attr_accessor :min_value - attr_accessor :max_value + attr_accessor :min_value, :max_value end end diff --git a/app/models/orchestration_template/orchestration_resource.rb b/app/models/orchestration_template/orchestration_resource.rb index 8c5c52736d0..7abbef0e13c 100644 --- a/app/models/orchestration_template/orchestration_resource.rb +++ b/app/models/orchestration_template/orchestration_resource.rb @@ -1,10 +1,9 @@ class OrchestrationTemplate class OrchestrationResource - attr_accessor :name - attr_accessor :type + attr_accessor :name, :type def initialize(hash = {}) - hash.each { |key, value| public_send("#{key}=", value) } + hash.each { |key, value| public_send(:"#{key}=", value) } end end end diff --git a/app/models/partition.rb b/app/models/partition.rb index fb4d236f10d..c838e77014f 100644 --- a/app/models/partition.rb +++ b/app/models/partition.rb @@ -6,8 +6,8 @@ class Partition < ApplicationRecord p = Partition.quoted_table_name v = Volume.quoted_table_name Volume.select("DISTINCT #{v}.*") - .joins("JOIN #{p} ON #{v}.hardware_id = #{p}.hardware_id AND #{v}.volume_group = #{p}.volume_group") - .where("#{p}.id" => id) + .joins("JOIN #{p} ON #{v}.hardware_id = #{p}.hardware_id AND #{v}.volume_group = #{p}.volume_group") + .where("#{p}.id" => id) }, :foreign_key => :volume_group virtual_column :aligned, :type => :boolean @@ -16,6 +16,7 @@ def volume_group # Override volume_group getter to prevent the special physical linkage from coming through vg = read_attribute(:volume_group) return nil if vg.respond_to?(:starts_with?) && vg.starts_with?(Volume::PHYSICAL_VOLUME_GROUP) + vg end @@ -123,6 +124,7 @@ def volume_group def self.partition_type_name(partition_type) return PARTITION_TYPE_NAMES[partition_type] if PARTITION_TYPE_NAMES.key?(partition_type) + UNKNOWN_PARTITION_TYPE end @@ -142,5 +144,5 @@ def aligned? # The alignment of hidden volumes affects the performance of the logical volumes that are based on them. start_address % alignment_boundary == 0 end - alias_method :aligned, :aligned? + alias aligned aligned? end diff --git a/app/models/patch.rb b/app/models/patch.rb index 3ee1b02c6dd..ec5b50005f1 100644 --- a/app/models/patch.rb +++ b/app/models/patch.rb @@ -18,7 +18,7 @@ def self.refresh_patches(host, hashes) end def self.process_array(parent, hashes) - EmsRefresh.save_patches_inventory(parent, hashes) unless hashes.blank? + EmsRefresh.save_patches_inventory(parent, hashes) if hashes.present? end def self.highest_patch_level diff --git a/app/models/pglogical_subscription.rb b/app/models/pglogical_subscription.rb index a85ff53d164..3f6d026f935 100644 --- a/app/models/pglogical_subscription.rb +++ b/app/models/pglogical_subscription.rb @@ -50,11 +50,11 @@ def save def self.save_all!(subscription_list) errors = [] subscription_list.each do |s| - begin - s.save!(false) - rescue => e - errors << "Failed to save subscription to #{s.host}: #{e.message}" - end + + s.save!(false) + rescue => e + errors << "Failed to save subscription to #{s.host}: #{e.message}" + end EvmDatabase.restart_failover_monitor_service_queue @@ -62,6 +62,7 @@ def self.save_all!(subscription_list) unless errors.empty? raise errors.join("\n") end + subscription_list end @@ -152,7 +153,7 @@ def self.dsn_attributes(dsn) attrs = PG::DSNParser.parse(dsn) attrs.select! { |k, _v| [:dbname, :host, :user, :port].include?(k) } port = attrs.delete(:port) - attrs[:port] = port.to_i unless port.blank? + attrs[:port] = port.to_i if port.present? attrs end private_class_method :dsn_attributes @@ -203,6 +204,7 @@ def safe_delete self.class.with_connection_error_handling { pglogical.drop_subscription(id, true) } rescue PG::InternalError => e raise unless e.message =~ /could not connect to publisher/ || e.message =~ /replication slot .* does not exist/ + connection.transaction do disable self.class.with_connection_error_handling do @@ -235,6 +237,7 @@ def update_subscription # sets this instance's password field to the one in the subscription dsn in the database def find_password return password if password.present? + s = subscription_attributes.symbolize_keys dsn_hash = PG::DSNParser.parse(s.delete(:subscription_dsn)) self.password = dsn_hash[:password] @@ -260,7 +263,7 @@ def create_subscription end self.class.with_connection_error_handling do - pglogical.create_subscription(subscription, conn_info_hash, [MiqPglogical::PUBLICATION_NAME], create_slot: false).check + pglogical.create_subscription(subscription, conn_info_hash, [MiqPglogical::PUBLICATION_NAME], :create_slot => false).check end self end @@ -289,11 +292,9 @@ def remote_region_lsn with_remote_connection(5.seconds) { |conn| conn.xlog_location } end - def with_remote_connection(connect_timeout = 0) + def with_remote_connection(connect_timeout = 0, &block) find_password - MiqRegionRemote.with_remote_connection(host, port || 5432, user, decrypted_password, dbname, "postgresql", connect_timeout) do |conn| - yield conn - end + MiqRegionRemote.with_remote_connection(host, port || 5432, user, decrypted_password, dbname, "postgresql", connect_timeout, &block) end def with_remote_pglogical_client(connect_timeout = 0) diff --git a/app/models/physical_chassis.rb b/app/models/physical_chassis.rb index 08909e7bd1e..cff55be815d 100644 --- a/app/models/physical_chassis.rb +++ b/app/models/physical_chassis.rb @@ -8,7 +8,7 @@ class PhysicalChassis < ApplicationRecord belongs_to :ext_management_system, :foreign_key => :ems_id, :inverse_of => :physical_chassis, :class_name => "ManageIQ::Providers::PhysicalInfraManager" - belongs_to :physical_rack, :foreign_key => :physical_rack_id, :inverse_of => :physical_chassis + belongs_to :physical_rack, :inverse_of => :physical_chassis belongs_to :parent_physical_chassis, :class_name => "PhysicalChassis", :inverse_of => :child_physical_chassis diff --git a/app/models/physical_disk.rb b/app/models/physical_disk.rb index 66407aec67b..a1767f91225 100644 --- a/app/models/physical_disk.rb +++ b/app/models/physical_disk.rb @@ -1,6 +1,6 @@ class PhysicalDisk < ApplicationRecord - belongs_to :physical_storage, :foreign_key => :physical_storage_id, :inverse_of => :physical_disks - belongs_to :canister, :foreign_key => :canister_id, :inverse_of => :physical_disks + belongs_to :physical_storage, :inverse_of => :physical_disks + belongs_to :canister, :inverse_of => :physical_disks acts_as_miq_taggable end diff --git a/app/models/physical_server.rb b/app/models/physical_server.rb index 36059b27271..80b47fbfcbb 100644 --- a/app/models/physical_server.rb +++ b/app/models/physical_server.rb @@ -23,8 +23,8 @@ class PhysicalServer < ApplicationRecord validates :vendor, :inclusion =>{:in => VENDOR_TYPES} belongs_to :ext_management_system, :foreign_key => :ems_id, :inverse_of => :physical_servers, :class_name => "ManageIQ::Providers::PhysicalInfraManager" - belongs_to :physical_rack, :foreign_key => :physical_rack_id, :inverse_of => :physical_servers - belongs_to :physical_chassis, :foreign_key => :physical_chassis_id, :inverse_of => :physical_servers + belongs_to :physical_rack, :inverse_of => :physical_servers + belongs_to :physical_chassis, :inverse_of => :physical_servers has_one :computer_system, :as => :managed_entity, :dependent => :destroy has_one :hardware, :through => :computer_system diff --git a/app/models/physical_server_provision_task/state_machine.rb b/app/models/physical_server_provision_task/state_machine.rb index 83149c526af..a72d6d24454 100644 --- a/app/models/physical_server_provision_task/state_machine.rb +++ b/app/models/physical_server_provision_task/state_machine.rb @@ -1,6 +1,7 @@ module PhysicalServerProvisionTask::StateMachine def run_provision raise MiqException::MiqProvisionError, "Unable to find #{model_class} with id #{source_id.inspect}" if source.blank? + dump_obj(options, "MIQ(#{self.class.name}##{__method__}) options: ", $log, :info) signal :start_provisioning end diff --git a/app/models/physical_storage.rb b/app/models/physical_storage.rb index 7653642903b..e09a1e8c1cc 100644 --- a/app/models/physical_storage.rb +++ b/app/models/physical_storage.rb @@ -166,9 +166,9 @@ def raw_update_physical_storage(_options = {}) def event_where_clause(assoc = :ems_events) case assoc.to_sym when :ems_events, :event_streams - return ["#{events_table_name(assoc)}.physical_storage_id = ?", id] + ["#{events_table_name(assoc)}.physical_storage_id = ?", id] when :policy_events - return ["target_id = ? and target_class = ? ", id, self.class.base_class.name] + ["target_id = ? and target_class = ? ", id, self.class.base_class.name] end end end diff --git a/app/models/picture.rb b/app/models/picture.rb index caf001d1a93..309556fc85f 100644 --- a/app/models/picture.rb +++ b/app/models/picture.rb @@ -6,7 +6,7 @@ class Picture < ApplicationRecord virtual_has_one :image_href, :class_name => "String" - URL_ROOT = Rails.root.join("public").to_s.freeze + URL_ROOT = Rails.public_path.to_s.freeze DEFAULT_DIRECTORY = File.join(URL_ROOT, "pictures").freeze def self.directory @@ -50,6 +50,7 @@ def size def basename @basename ||= begin raise _("must have a numeric id") unless id.kind_of?(Numeric) + "#{id}.#{extension}" end end diff --git a/app/models/policy_event.rb b/app/models/policy_event.rb index 0ef4730fa2f..6aa236f39cd 100644 --- a/app/models/policy_event.rb +++ b/app/models/policy_event.rb @@ -3,7 +3,7 @@ class PolicyEvent < ApplicationRecord belongs_to :miq_event_definition belongs_to :miq_policy - has_many :contents, :class_name => "PolicyEventContent", :dependent => :destroy + has_many :contents, :class_name => "PolicyEventContent", :dependent => :destroy virtual_has_many :miq_actions, :uses => {:contents => :resource} virtual_has_many :miq_policy_sets, :uses => {:contents => :resource} @@ -25,7 +25,7 @@ def self.create_events(target, event, result) :target_class => target.class.base_class.name, :target_name => target.name, :chain_id => chain_id - # TODO: username, + # TODO: username, ) pe.host_id = target.try(:host)&.id diff --git a/app/models/provider.rb b/app/models/provider.rb index 271d91295a8..24b68bd0954 100644 --- a/app/models/provider.rb +++ b/app/models/provider.rb @@ -52,6 +52,7 @@ def default_endpoint def with_provider_connection(options = {}) raise _("no block given") unless block_given? + _log.info("Connecting through #{self.class.name}: [#{name}]") yield connect(options) end @@ -59,7 +60,7 @@ def with_provider_connection(options = {}) def my_zone zone.try(:name).presence || MiqServer.my_zone end - alias_method :zone_name, :my_zone + alias zone_name my_zone def refresh_ems(opts = {}) if missing_credentials? @@ -68,6 +69,7 @@ def refresh_ems(opts = {}) unless authentication_status_ok? raise _("Provider failed last authentication check") end + managers.flat_map { |manager| EmsRefresh.queue_refresh(manager, nil, opts) } end @@ -83,7 +85,7 @@ def destroy_queue :name => msg, :state => MiqTask::STATE_QUEUED, :status => MiqTask::STATUS_OK, - :message => msg, + :message => msg ) self.class._queue_task('destroy', [id], task.id) task.id diff --git a/app/models/provider_tag_mapping.rb b/app/models/provider_tag_mapping.rb index eb66255adad..e604c6a5951 100644 --- a/app/models/provider_tag_mapping.rb +++ b/app/models/provider_tag_mapping.rb @@ -35,6 +35,7 @@ def self.mapper(mapper_parameters = {}) # TODO: expensive. def self.controls_tag?(tag) return false unless tag.classification.try(:read_only) # never touch user-assignable tags. + tag_ids = [tag.id, tag.category.tag_id].uniq where(:tag_id => tag_ids).any? end diff --git a/app/models/provider_tag_mapping/mapper.rb b/app/models/provider_tag_mapping/mapper.rb index b116611ac5c..31c08b36171 100644 --- a/app/models/provider_tag_mapping/mapper.rb +++ b/app/models/provider_tag_mapping/mapper.rb @@ -1,4 +1,5 @@ # coding: utf-8 + class ProviderTagMapping # Performs most of the work of ProviderTagMapping - holds current mappings, # computes applicable tags, and creates/finds Tag records - except actually [un]assigning. @@ -26,7 +27,7 @@ def initialize(mappings, mapper_parameters = {}) :model_class => Tag, # more than needed to identify, doesn't matter much as we use custom save :manager_ref => [:category_tag_id, :entry_name, :entry_description], - #:arel => Tag.all, + # :arel => Tag.all, :custom_save_block => lambda do |_ems, inv_collection| # TODO: O(N) queries, optimize. inv_collection.each do |inv_object| @@ -38,7 +39,7 @@ def initialize(mappings, mapper_parameters = {}) @specific_tags_collection = ::InventoryRefresh::InventoryCollection.new( :name => :mapped_specific_tags, :model_class => Tag, - :manager_ref => [:id], + :manager_ref => [:id] ) end @@ -116,19 +117,17 @@ def map_name_type_value(name, type, value) any_value = @mappings[[name, type, nil]] || [] if !specific_value.empty? specific_value.map { |tag_id| emit_specific_reference(tag_id) } + elsif value.empty? + [] else - if value.empty? - [] # Don't map empty value to any tag. - else - # Note: if the way we compute `entry_name` changes, + # NOTE: if the way we compute `entry_name` changes, # consider what will happen to previously created tags. - any_value.map do |tag_id| - emit_tag_reference( - :category_tag_id => tag_id, - :entry_name => Classification.sanitize_name(value), - :entry_description => value, - ) - end + any_value.map do |tag_id| + emit_tag_reference( + :category_tag_id => tag_id, + :entry_name => Classification.sanitize_name(value), + :entry_description => value + ) end end end @@ -150,13 +149,13 @@ def find_or_create_tag(tag_hash) entry = category.find_entry_by_name(tag_hash[:entry_name]) unless entry category.lock(:exclusive) do - begin - entry = category.add_entry(:name => tag_hash[:entry_name], - :description => tag_hash[:entry_description]) - entry.save! - rescue ActiveRecord::RecordInvalid - entry = category.find_entry_by_name(tag_hash[:entry_name]) - end + + entry = category.add_entry(:name => tag_hash[:entry_name], + :description => tag_hash[:entry_description]) + entry.save! + rescue ActiveRecord::RecordInvalid + entry = category.find_entry_by_name(tag_hash[:entry_name]) + end end entry.tag_id diff --git a/app/models/pxe_image.rb b/app/models/pxe_image.rb index 93e64e5117e..136cf95a895 100644 --- a/app/models/pxe_image.rb +++ b/app/models/pxe_image.rb @@ -8,7 +8,7 @@ class PxeImage < ApplicationRecord acts_as_miq_taggable before_validation do - if self.default_for_windows_changed? && self.default_for_windows? + if default_for_windows_changed? && default_for_windows? pxe_server.pxe_images.where(:default_for_windows => true).update_all(:default_for_windows => false) end true @@ -49,7 +49,7 @@ def create_files_on_server(pxe_server, mac_address, customization_template = nil # arguments from final kernel command line. kernel_args = customization_template&.class&.kernel_args( pxe_server, self, mac_address - ) || { :ks => nil, :ksdevice => nil } + ) || {:ks => nil, :ksdevice => nil} contents = build_pxe_contents(kernel_args) pxe_server.write_file(filepath, contents) diff --git a/app/models/pxe_image_ipxe.rb b/app/models/pxe_image_ipxe.rb index a5d296386e6..fe0065bfb21 100644 --- a/app/models/pxe_image_ipxe.rb +++ b/app/models/pxe_image_ipxe.rb @@ -7,7 +7,7 @@ def build_pxe_contents(kernel_args) end def self.pxe_server_filename(mac_address) - mac_address.gsub(/:/, "-").downcase.strip + mac_address.tr(':', "-").downcase.strip end def self.parse_contents(contents, label) diff --git a/app/models/pxe_image_pxelinux.rb b/app/models/pxe_image_pxelinux.rb index 0057868dfc9..74a276ed18d 100644 --- a/app/models/pxe_image_pxelinux.rb +++ b/app/models/pxe_image_pxelinux.rb @@ -1,23 +1,23 @@ class PxeImagePxelinux < PxeImage def build_pxe_contents(kernel_args) options = super - options.insert(0, "initrd=#{initrd} ") unless initrd.blank? + options.insert(0, "initrd=#{initrd} ") if initrd.present? - pxe = <<-PXE -timeout 0 -default #{name} + pxe = <<~PXE + timeout 0 + default #{name} -label #{name} - menu label #{description} -PXE + label #{name} + menu label #{description} + PXE pxe << " kernel #{kernel}\n" unless kernel.nil? - pxe << " append #{options}\n" unless options.blank? + pxe << " append #{options}\n" if options.present? pxe << "\n" end def self.pxe_server_filename(mac_address) - "01-#{mac_address.gsub(/:/, "-").downcase.strip}" + "01-#{mac_address.tr(':', "-").downcase.strip}" end def self.parse_contents(contents, _label = nil) diff --git a/app/models/pxe_menu.rb b/app/models/pxe_menu.rb index 5b3d7f6b751..52e2572b3b9 100644 --- a/app/models/pxe_menu.rb +++ b/app/models/pxe_menu.rb @@ -6,7 +6,8 @@ class PxeMenu < ApplicationRecord def self.class_from_contents(contents) line = contents.to_s.each_line { |l| break l } - return PxeMenuIpxe if line =~ /^#!\s*ipxe\s*$/ + return PxeMenuIpxe if /^#!\s*ipxe\s*$/.match?(line) + PxeMenuPxelinux end @@ -23,7 +24,7 @@ def synchronize klass = self.class.class_from_contents(contents) if klass != self.class - self.save! + save! # If sublass changes type to a different subclass pxe_images.destroy_all if self.class != PxeMenu diff --git a/app/models/pxe_menu_ipxe.rb b/app/models/pxe_menu_ipxe.rb index 1e973bb4919..0373d6b3b0c 100644 --- a/app/models/pxe_menu_ipxe.rb +++ b/app/models/pxe_menu_ipxe.rb @@ -34,6 +34,7 @@ def self.parse_labels(contents, labels) case key when :label next unless labels.include?(value) + current_item = {:label => value} when :kernel current_item[:kernel], current_item[:kernel_options] = parse_kernel(value) diff --git a/app/models/pxe_menu_pxelinux.rb b/app/models/pxe_menu_pxelinux.rb index c660fc5f874..515df98d8db 100644 --- a/app/models/pxe_menu_pxelinux.rb +++ b/app/models/pxe_menu_pxelinux.rb @@ -48,7 +48,7 @@ def self.parse_append(append) initrd = options.detect { |o| o.starts_with?("initrd=") } initrd &&= initrd[7..-1] - rejects = %w( initrd= ) + rejects = %w[initrd=] options.reject! { |o| o.blank? || rejects.any? { |r| o.starts_with?(r) } } return options.join(' '), initrd end diff --git a/app/models/pxe_server.rb b/app/models/pxe_server.rb index 821a9dc4608..01aabb02bb4 100644 --- a/app/models/pxe_server.rb +++ b/app/models/pxe_server.rb @@ -16,7 +16,7 @@ class PxeServer < ApplicationRecord has_many :pxe_menus, :dependent => :destroy has_many :pxe_images, :dependent => :destroy - has_many :advertised_pxe_images, -> { where("pxe_menu_id IS NOT NULL") }, :class_name => "PxeImage" + has_many :advertised_pxe_images, -> { where.not(:pxe_menu_id => nil) }, :class_name => "PxeImage" has_many :discovered_pxe_images, -> { where(:pxe_menu_id => nil) }, :class_name => "PxeImage" has_many :windows_images, :dependent => :destroy @@ -84,42 +84,42 @@ def sync_pxe_images current = pxe_images.where(:pxe_menu_id => nil).index_by { |i| [i.path, i.name] } with_depot do - begin - file_glob("#{pxe_directory}/*").each do |f| - next unless self.file_file?(f) - - relative_path = Pathname.new(f).relative_path_from(Pathname.new(pxe_directory)).to_s - - contents = file_read(f) - menu_class = PxeMenu.class_from_contents(contents) - image_class = menu_class.corresponding_image - image_list = image_class.parse_contents(contents, File.basename(f)) - - # Deal with multiple images with the same label in a file - incoming = image_list.group_by { |h| h[:label] } - incoming.each_key do |name| - array = incoming[name] - _log.warn("duplicate name <#{name}> in file <#{relative_path}> on PXE Server <#{self.name}>") if array.length > 1 - incoming[name] = array.first - end - incoming.each do |_name, ihash| - image = current.delete([relative_path, ihash[:label]]) - if image.nil? - image = image_class.new - pxe_images << image - end - stats[image.new_record? ? :adds : :updates] += 1 - - image.path = relative_path - image.parsed_contents = ihash - image.save! + file_glob("#{pxe_directory}/*").each do |f| + next unless file_file?(f) + + relative_path = Pathname.new(f).relative_path_from(Pathname.new(pxe_directory)).to_s + + contents = file_read(f) + menu_class = PxeMenu.class_from_contents(contents) + image_class = menu_class.corresponding_image + image_list = image_class.parse_contents(contents, File.basename(f)) + + # Deal with multiple images with the same label in a file + incoming = image_list.group_by { |h| h[:label] } + incoming.each_key do |name| + array = incoming[name] + _log.warn("duplicate name <#{name}> in file <#{relative_path}> on PXE Server <#{self.name}>") if array.length > 1 + incoming[name] = array.first + end + + incoming.each do |_name, ihash| + image = current.delete([relative_path, ihash[:label]]) + if image.nil? + image = image_class.new + pxe_images << image end + stats[image.new_record? ? :adds : :updates] += 1 + + image.path = relative_path + image.parsed_contents = ihash + image.save! end - rescue => err - _log.error("Synchronizing PXE images on PXE Server [#{self.name}]: #{err.class.name}: #{err}") - _log.log_backtrace(err) end + rescue => err + _log.error("Synchronizing PXE images on PXE Server [#{self.name}]: #{err.class.name}: #{err}") + _log.log_backtrace(err) + end stats[:deletes] = current.length @@ -137,31 +137,31 @@ def sync_windows_images current = windows_images.index_by { |i| [i.path, i.index] } with_depot do - begin - file_glob("#{windows_images_directory}/*.wim").each do |f| - next unless self.file_file?(f) - path = Pathname.new(f).relative_path_from(Pathname.new(windows_images_directory)).to_s + file_glob("#{windows_images_directory}/*.wim").each do |f| + next unless file_file?(f) - wim_parser = WimParser.new(File.join(depot_root, f)) - wim_parser.xml_data["images"].each do |image_hash| - index = image_hash["index"] + path = Pathname.new(f).relative_path_from(Pathname.new(windows_images_directory)).to_s - image = current.delete([path, index]) || windows_images.build - stats[image.new_record? ? :adds : :updates] += 1 + wim_parser = WimParser.new(File.join(depot_root, f)) + wim_parser.xml_data["images"].each do |image_hash| + index = image_hash["index"] - image.update( - :name => image_hash["name"], - :description => image_hash["description"].blank? ? nil : image_hash["description"], - :path => path, - :index => index - ) - end + image = current.delete([path, index]) || windows_images.build + stats[image.new_record? ? :adds : :updates] += 1 + + image.update( + :name => image_hash["name"], + :description => image_hash["description"].presence, + :path => path, + :index => index + ) end - rescue => err - _log.error("Synchronizing Windows images on PXE Server [#{name}]: #{err.class.name}: #{err}") - _log.log_backtrace(err) end + rescue => err + _log.error("Synchronizing Windows images on PXE Server [#{name}]: #{err.class.name}: #{err}") + _log.log_backtrace(err) + end stats[:deletes] = current.length diff --git a/app/models/registry_item.rb b/app/models/registry_item.rb index e0624d32d67..94c2fd1d16d 100644 --- a/app/models/registry_item.rb +++ b/app/models/registry_item.rb @@ -69,7 +69,8 @@ def key_name end def image_name - return "registry_string_items" if !format.blank? && format.include?("_SZ") + return "registry_string_items" if format.present? && format.include?("_SZ") + "registry_binary_items" end end diff --git a/app/models/relationship.rb b/app/models/relationship.rb index c4a1c9b9916..0e00578b052 100644 --- a/app/models/relationship.rb +++ b/app/models/relationship.rb @@ -27,6 +27,7 @@ def self.filter_by_resource_type(relationships, options) of_type = Array.wrap(options[:of_type]) except_type = Array.wrap(options[:except_type]) return relationships if of_type.empty? && except_type.empty? + if relationships.kind_of?(Array) || relationships.try(:loaded?) relationships.reject { |r| r.filtered?(of_type, except_type) } else @@ -39,7 +40,7 @@ def self.filter_by_resource_type(relationships, options) # def self.resource(relationship) - relationship.try!(:resource) + relationship&.resource end def self.resources(relationships) diff --git a/app/models/resource_action.rb b/app/models/resource_action.rb index 35bda5e15bd..dd34df0e0ca 100644 --- a/app/models/resource_action.rb +++ b/app/models/resource_action.rb @@ -20,6 +20,7 @@ def ensure_configuration_script_or_automate def readonly? return true if super + resource.readonly? if resource.kind_of?(ServiceTemplate) end @@ -66,15 +67,15 @@ def fqname :ae_instance => ae_instance ).to_s end - alias_method :ae_path, :fqname + alias ae_path fqname def ae_uri uri = ae_path - unless ae_attributes.blank? + if ae_attributes.present? uri << "?" uri << MiqAeEngine::MiqAeUri.hash2query(ae_attributes) end - unless ae_message.blank? + if ae_message.present? uri << "#" uri << ae_message end diff --git a/app/models/resource_action_serializer.rb b/app/models/resource_action_serializer.rb index f288a263528..ffb42b9197c 100644 --- a/app/models/resource_action_serializer.rb +++ b/app/models/resource_action_serializer.rb @@ -1,6 +1,6 @@ class ResourceActionSerializer < Serializer - EXCLUDED_ATTRIBUTES = %w(created_at updated_at id dialog_id resource_id - configuration_template_id configuration_template_type).freeze + EXCLUDED_ATTRIBUTES = %w[created_at updated_at id dialog_id resource_id + configuration_template_id configuration_template_type].freeze def serialize(resource_action) included_attributes(resource_action.attributes) diff --git a/app/models/resource_action_workflow.rb b/app/models/resource_action_workflow.rb index 2bb38cbdccb..f1300ccce5b 100644 --- a/app/models/resource_action_workflow.rb +++ b/app/models/resource_action_workflow.rb @@ -1,6 +1,5 @@ class ResourceActionWorkflow < MiqRequestWorkflow - attr_accessor :dialog - attr_accessor :request_options + attr_accessor :dialog, :request_options attr_reader :target @@ -36,7 +35,7 @@ def update_dialog_field_values(data) def process_request(state) result = {:errors => validate_dialog} - return result unless result[:errors].blank? + return result if result[:errors].present? values = create_values if create_request?(values) @@ -64,7 +63,7 @@ def validate_dialog def create_values create_values_hash.tap do |value| value[:src_id] = @target.id - value[:request_options] = request_options unless request_options.blank? + value[:request_options] = request_options if request_options.present? end end @@ -94,7 +93,7 @@ def create_values_hash end def init_field_hash - @dialog.dialog_fields.each_with_object({}) { |df, result| result[df.name] = df } + @dialog.dialog_fields.index_by { |df| df.name } end def set_value(name, value) diff --git a/app/models/resource_pool.rb b/app/models/resource_pool.rb index aaf4943a6eb..5101408c464 100644 --- a/app/models/resource_pool.rb +++ b/app/models/resource_pool.rb @@ -44,8 +44,8 @@ def resource_pools children(:of_type => 'ResourcePool') end - alias_method :add_resource_pool, :set_child - alias_method :remove_resource_pool, :remove_child + alias add_resource_pool set_child + alias remove_resource_pool remove_child def remove_all_resource_pools remove_all_children(:of_type => 'ResourcePool') @@ -60,17 +60,17 @@ def root_resource_pool def vms_and_templates children(:of_type => 'VmOrTemplate') end - alias_method :direct_vms_and_templates, :vms_and_templates + alias direct_vms_and_templates vms_and_templates def miq_templates vms_and_templates.select { |v| v.kind_of?(MiqTemplate) } end - alias_method :direct_miq_templates, :miq_templates + alias direct_miq_templates miq_templates def vms vms_and_templates.select { |v| v.kind_of?(Vm) } end - alias_method :direct_vms, :vms + alias direct_vms vms def vm_and_template_ids Relationship.resource_pairs_to_ids(child_ids(:of_type => 'VmOrTemplate')) @@ -79,12 +79,12 @@ def vm_and_template_ids def miq_template_ids miq_templates.collect(&:id) end - alias_method :direct_miq_template_ids, :miq_template_ids + alias direct_miq_template_ids miq_template_ids def vm_ids vms.collect(&:id) end - alias_method :direct_vm_ids, :vm_ids + alias direct_vm_ids vm_ids def total_direct_vms_and_templates child_count(:of_type => 'VmOrTemplate') @@ -104,8 +104,8 @@ def total_vms_and_templates descendant_count(:of_type => 'VmOrTemplate') end - alias_method :add_vm, :set_child - alias_method :remove_vm, :remove_child + alias add_vm set_child + alias remove_vm remove_child def remove_all_vms remove_all_children(:of_type => 'Vm') @@ -140,7 +140,7 @@ def parent_datacenter end def parent_folder - detect_ancestor(:of_type => "EmsFolder") { |a| !a.kind_of?(Datacenter) && !%w(host vm).include?(a.name) } # TODO: Fix this to use EmsFolder#hidden? + detect_ancestor(:of_type => "EmsFolder") { |a| !a.kind_of?(Datacenter) && !%w[host vm].include?(a.name) } # TODO: Fix this to use EmsFolder#hidden? end # Overridden from AggregationMixin to provide hosts related to this RP @@ -182,8 +182,8 @@ def v_parent_folder p ? p.name : "" end - alias_method :v_direct_vms, :total_direct_vms - alias_method :v_direct_miq_templates, :total_direct_miq_templates + alias v_direct_vms total_direct_vms + alias v_direct_miq_templates total_direct_miq_templates alias total_vms v_total_vms alias total_miq_templates v_total_miq_templates diff --git a/app/models/scan_item/seeding.rb b/app/models/scan_item/seeding.rb index 2e1a456bac9..7282c792d2b 100644 --- a/app/models/scan_item/seeding.rb +++ b/app/models/scan_item/seeding.rb @@ -1,7 +1,7 @@ module ScanItem::Seeding extend ActiveSupport::Concern - SCAN_ITEMS_DIR = Rails.root.join("product", "scan_items") + SCAN_ITEMS_DIR = Rails.root.join("product/scan_items") # Default ScanItemSets SAMPLE_VM_PROFILE = {:name => "sample", :description => "VM Sample", :mode => 'Vm', :read_only => true}.freeze diff --git a/app/models/security_policy.rb b/app/models/security_policy.rb index 81c6e4ca082..e882b218159 100644 --- a/app/models/security_policy.rb +++ b/app/models/security_policy.rb @@ -10,7 +10,7 @@ class SecurityPolicy < ApplicationRecord belongs_to :cloud_tenant belongs_to :orchestration_stack - has_many :security_policy_rules, :foreign_key => :security_policy_id, :dependent => :destroy + has_many :security_policy_rules, :dependent => :destroy alias rules security_policy_rules virtual_total :rules_count, :security_policy_rules diff --git a/app/models/serializer.rb b/app/models/serializer.rb index d8ceebdc00e..12c45d4ae4a 100644 --- a/app/models/serializer.rb +++ b/app/models/serializer.rb @@ -3,6 +3,7 @@ class Serializer def included_attributes(attributes, all_attributes = false) return attributes if all_attributes + attributes.reject { |key, _| self.class::EXCLUDED_ATTRIBUTES.include?(key) } end end diff --git a/app/models/service.rb b/app/models/service.rb index 111df16de42..5deeb33fcf7 100644 --- a/app/models/service.rb +++ b/app/models/service.rb @@ -94,8 +94,8 @@ class Service < ApplicationRecord attribute :lifecycle_state, :default => 'unprovisioned' attribute :retired, :default => false - validates :visible, :inclusion => { :in => [true, false] } - validates :retired, :inclusion => { :in => [true, false] } + validates :visible, :inclusion => {:in => [true, false]} + validates :retired, :inclusion => {:in => [true, false]} scope :displayed, -> { where(:visible => true) } scope :retired, ->(bool = true) { where(:retired => bool) } @@ -113,7 +113,7 @@ def power_states end # renaming method from custom_actions_mixin - alias_method :custom_service_actions, :custom_actions + alias custom_service_actions custom_actions def custom_actions service_template ? service_template.custom_actions(self) : custom_service_actions(self) end @@ -129,6 +129,7 @@ def power_state 'off' if power_states_match?(:stop) else return 'on' if power_states_match?(:start) + 'off' if power_states_match?(:stop) end end @@ -290,6 +291,7 @@ def map_power_states(action) def update_power_status(action) expected_status = "#{action}_complete" return true if options[:power_status] == expected_status + options[:power_status] = expected_status update(:options => options) end @@ -300,21 +302,21 @@ def update_power_status(action) def process_group_action(action, group_idx, direction) each_group_resource(group_idx) do |svc_rsc| - begin - rsc = svc_rsc.resource - rsc_action = service_action(action, svc_rsc) - rsc_name = "#{rsc.class.name}:#{rsc.id}" + (rsc.respond_to?(:name) ? ":#{rsc.name}" : "") - if rsc_action.nil? - _log.info("Not Processing action for Service:<#{name}:#{id}>, RSC:<#{rsc_name}}> in Group Idx:<#{group_idx}>") - elsif rsc.respond_to?(rsc_action) - _log.info("Processing action <#{rsc_action}> for Service:<#{name}:#{id}>, RSC:<#{rsc_name}}> in Group Idx:<#{group_idx}>") - rsc.send(rsc_action) - else - _log.info("Skipping action <#{rsc_action}> for Service:<#{name}:#{id}>, RSC:<#{rsc.class.name}:#{rsc.id}> in Group Idx:<#{group_idx}>") - end - rescue => err - _log.error("Error while processing Service:<#{name}> Group Idx:<#{group_idx}> Resource<#{rsc_name}>. Message:<#{err}>") + + rsc = svc_rsc.resource + rsc_action = service_action(action, svc_rsc) + rsc_name = "#{rsc.class.name}:#{rsc.id}" + (rsc.respond_to?(:name) ? ":#{rsc.name}" : "") + if rsc_action.nil? + _log.info("Not Processing action for Service:<#{name}:#{id}>, RSC:<#{rsc_name}}> in Group Idx:<#{group_idx}>") + elsif rsc.respond_to?(rsc_action) + _log.info("Processing action <#{rsc_action}> for Service:<#{name}:#{id}>, RSC:<#{rsc_name}}> in Group Idx:<#{group_idx}>") + rsc.send(rsc_action) + else + _log.info("Skipping action <#{rsc_action}> for Service:<#{name}:#{id}>, RSC:<#{rsc.class.name}:#{rsc.id}> in Group Idx:<#{group_idx}>") end + rescue => err + _log.error("Error while processing Service:<#{name}> Group Idx:<#{group_idx}> Resource<#{rsc_name}>. Message:<#{err}>") + end # Setup processing for the next group @@ -362,12 +364,13 @@ def reconfigure_resource_action def reconfigure_dialog return nil unless supports?(:reconfigure) + resource_action = reconfigure_resource_action options = {:target => self, :reconfigure => true} workflow = ResourceActionWorkflow.new(self.options[:dialog], User.current_user, resource_action, options) - DialogSerializer.new.serialize(Array[workflow.dialog], true) + DialogSerializer.new.serialize([workflow.dialog], true) end def raise_final_process_event(action) @@ -434,7 +437,7 @@ def generate_chargeback_report(options = {}) end def chargeback_yaml - yaml = YAML.load_file(Rails.root.join('product', 'chargeback', 'chargeback_vm_monthly.yaml')) + yaml = YAML.load_file(Rails.root.join("product/chargeback/chargeback_vm_monthly.yaml")) yaml["db_options"][:options][:service_id] = id yaml["title"] = chargeback_report_name yaml @@ -457,14 +460,14 @@ def queue_chargeback_report_generation(options = {}) } MiqQueue.submit_job( - :service => "reporting", - :class_name => self.class.name, - :instance_id => id, - :task_id => task.id, + :service => "reporting", + :class_name => self.class.name, + :instance_id => id, + :task_id => task.id, :miq_task_id => task.id, :miq_callback => cb, - :method_name => "generate_chargeback_report", - :args => options + :method_name => "generate_chargeback_report", + :args => options ) _log.info("Added to queue: #{msg}") task diff --git a/app/models/service/dialog_properties/retirement.rb b/app/models/service/dialog_properties/retirement.rb index 0c8bfdaea2e..e794f13c0f2 100644 --- a/app/models/service/dialog_properties/retirement.rb +++ b/app/models/service/dialog_properties/retirement.rb @@ -1,7 +1,7 @@ class Service class DialogProperties class Retirement - RETIREMENT_WARN_FIELD_NAMES = %w(warn_on warn_in_days warn_in_hours warn_offset_days warn_offset_hours).freeze + RETIREMENT_WARN_FIELD_NAMES = %w[warn_on warn_in_days warn_in_hours warn_offset_days warn_offset_hours].freeze def initialize(options, user) @attributes = {} @@ -27,7 +27,7 @@ def parse_options field_name = 'dialog_service_retires_in_days' retires_in_duration(@options[field_name], :days) end - rescue StandardError + rescue $log.error("Error parsing dialog retirement property [#{field_name}] with value [#{@options[field_name].inspect}]. Error: #{$!}") end @@ -87,10 +87,10 @@ def time_now with_user_timezone { Time.zone.now.utc } end - def with_user_timezone + def with_user_timezone(&block) user = @user || User.current_user - user ? user.with_my_timezone { yield } : yield + user ? user.with_my_timezone(&block) : yield end end end diff --git a/app/models/service/linking_workflow.rb b/app/models/service/linking_workflow.rb index 9bd5b8347b5..4872f1358b4 100644 --- a/app/models/service/linking_workflow.rb +++ b/app/models/service/linking_workflow.rb @@ -46,14 +46,14 @@ def run_native_op queue_signal(:refresh) end end - alias_method :start, :run_native_op + alias start run_native_op def post_refresh _log.info("Enter") found_vms = linking_targets not_found_vms = options[:uid_ems_array] - found_vms.pluck(:uid_ems) - _log.warn("VMs not found for linking to service ID [#{service.id}], name [#{service.name}]: #{not_found_vms}") unless not_found_vms.blank? + _log.warn("VMs not found for linking to service ID [#{service.id}], name [#{service.name}]: #{not_found_vms}") if not_found_vms.present? service = linking_service found_vms.each { |vm| service.add_resource!(vm) } diff --git a/app/models/service_awx.rb b/app/models/service_awx.rb index 8d7a43f2907..c80d76607c4 100644 --- a/app/models/service_awx.rb +++ b/app/models/service_awx.rb @@ -3,10 +3,10 @@ class ServiceAwx < Service include ServiceConfigurationMixin include ServiceOrchestrationOptionsMixin - alias_method :job_template, :configuration_script - alias_method :job_template=, :configuration_script= - alias_method :job_options, :stack_options - alias_method :job_options=, :stack_options= + alias job_template configuration_script + alias job_template= configuration_script= + alias job_options stack_options + alias job_options= stack_options= def launch_job job_class = "#{job_template.class.module_parent.name}::#{job_template.class.stack_type}".constantize @@ -32,7 +32,7 @@ def job(_action = nil) def build_stack_options_from_dialog(dialog_options) {:extra_vars => extra_vars_from_dialog(dialog_options)}.tap do |launch_options| - launch_options[:limit] = dialog_options['dialog_limit'] unless dialog_options['dialog_limit'].blank? + launch_options[:limit] = dialog_options['dialog_limit'] if dialog_options['dialog_limit'].present? end end @@ -75,7 +75,7 @@ def service_manageiq_env { 'service' => href_slug }.merge(manageiq_env(evm_owner, miq_group, miq_request_task)) - .merge(request_options_extra_vars) + .merge(request_options_extra_vars) end def request_options_extra_vars diff --git a/app/models/service_container_template.rb b/app/models/service_container_template.rb index ae419de4a99..553c9200423 100644 --- a/app/models/service_container_template.rb +++ b/app/models/service_container_template.rb @@ -5,7 +5,7 @@ class ServiceContainerTemplate < ServiceGeneric def preprocess(action, new_options = {}) return unless action == ResourceAction::PROVISION - unless new_options.blank? + if new_options.present? _log.info("Override with new options:") $log.log_hashes(new_options) end @@ -87,11 +87,11 @@ def action_option_key(action) def parameters_from_dialog params = options[:dialog].each_with_object({}) do |(attr, val), obj| - var_key = attr.sub(/dialog_param_/, '') + var_key = attr.sub("dialog_param_", '') obj[var_key] = val unless var_key == attr end - params.blank? ? {} : params + (params.presence || {}) end def project_name(overrides) @@ -104,6 +104,7 @@ def project_name(overrides) project_name = new_project_name || existing_name raise _("A project is required for the container template provisioning") unless project_name + project_name end diff --git a/app/models/service_orchestration.rb b/app/models/service_orchestration.rb index 479ff5e701d..e3b148753cc 100644 --- a/app/models/service_orchestration.rb +++ b/app/models/service_orchestration.rb @@ -83,6 +83,7 @@ def build_stack_options_from_dialog(dialog_options) def indirect_vms return [] if orchestration_stack.nil? || orchestration_stack.new_record? + orchestration_stack.indirect_vms end @@ -98,11 +99,13 @@ def direct_vms def all_vms return [] if orchestration_stack.nil? || orchestration_stack.new_record? + orchestration_stack.vms end def add_resource(rsc, _options = {}) raise "Service Orchestration subclass does not support add_resource for #{rsc.class.name}" unless rsc.try(:valid_service_orchestration_resource) + super end @@ -131,6 +134,7 @@ def add_stack_to_resource def link_orchestration_template # some orchestration stacks do not have associations with their templates in their provider, we can link them here return if orchestration_stack.nil? || orchestration_stack.orchestration_template + orchestration_stack.update(:orchestration_template => orchestration_template) end diff --git a/app/models/service_order.rb b/app/models/service_order.rb index 06dfdeab7fa..624591f132d 100644 --- a/app/models/service_order.rb +++ b/app/models/service_order.rb @@ -2,9 +2,11 @@ class ServiceOrder < ApplicationRecord STATE_CART = 'cart'.freeze STATE_WISH = 'wish'.freeze STATE_ORDERED = 'ordered'.freeze - REQUEST_ATTRIBUTES = %w(id name approval_state request_state message - created_on fulfilled_on updated_on placed_on).freeze + REQUEST_ATTRIBUTES = %w[id name approval_state request_state message + created_on fulfilled_on updated_on placed_on].freeze + before_create :assign_user + after_create :create_order_name before_destroy :destroy_unprocessed_requests belongs_to :tenant belongs_to :user @@ -14,11 +16,8 @@ class ServiceOrder < ApplicationRecord validates :state, :uniqueness_when_changed => {:scope => [:user_id, :tenant_id]}, :if => :cart? validates :name, :presence => true, :on => :update - before_create :assign_user - after_create :create_order_name - def initialize(*args) - raise NotImplementedError, _("must be implemented in a subclass") if self.class == ServiceOrder + raise NotImplementedError, _("must be implemented in a subclass") if instance_of?(ServiceOrder) super end @@ -51,17 +50,18 @@ def cart? def checkout raise "Invalid operation [checkout] for Service Order in state [#{state}]" if ordered? + _log.info("Service Order checkout for service: #{name}") process_checkout(miq_requests) update(:state => STATE_ORDERED, - :placed_at => Time.zone.now) + :placed_at => Time.zone.now) end def checkout_immediately _log.info("Service Order checkout immediately for service: #{name}") process_checkout(miq_requests) update(:state => STATE_ORDERED, - :placed_at => Time.zone.now) + :placed_at => Time.zone.now) end def process_checkout(miq_requests) @@ -73,6 +73,7 @@ def process_checkout(miq_requests) def clear raise "Invalid operation [clear] for Service Order in state [#{state}]" if ordered? + _log.info("Service Order clear for service: #{name}") destroy_unprocessed_requests end @@ -108,6 +109,7 @@ def self.order_immediately(request, requester) def deep_copy(new_attributes = {}) raise _("Cannot copy a service order in the %{state} state") % {:state => STATE_CART} if state == STATE_CART + dup.tap do |new_service_order| # Set it to nil - the after_create hook will give it the correct name new_service_order.name = nil @@ -117,7 +119,7 @@ def deep_copy(new_attributes = {}) request.class.send(:create, request.attributes.except(*REQUEST_ATTRIBUTES)) end new_attributes.each do |attr, value| - new_service_order.send("#{attr}=", value) if self.class.attribute_names.include?(attr.to_s) + new_service_order.send(:"#{attr}=", value) if self.class.attribute_names.include?(attr.to_s) end new_service_order.save! end @@ -127,6 +129,7 @@ def deep_copy(new_attributes = {}) def destroy_unprocessed_requests return if ordered? + miq_requests.destroy_all end end diff --git a/app/models/service_reconfigure_request.rb b/app/models/service_reconfigure_request.rb index b8c4bdeffd1..c5930befadb 100644 --- a/app/models/service_reconfigure_request.rb +++ b/app/models/service_reconfigure_request.rb @@ -2,8 +2,8 @@ class ServiceReconfigureRequest < MiqRequest TASK_DESCRIPTION = N_('Service Reconfigure') SOURCE_CLASS_NAME = 'Service' - validates_inclusion_of :request_state, :in => %w(pending finished) + ACTIVE_STATES, - :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished" + validates :request_state, :inclusion => {:in => %w[pending finished] + ACTIVE_STATES, + :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} validate :must_have_user delegate :service_template, :to => :source, :allow_nil => true virtual_has_one :provision_dialog diff --git a/app/models/service_reconfigure_task.rb b/app/models/service_reconfigure_task.rb index e146e865620..fce1e37f870 100644 --- a/app/models/service_reconfigure_task.rb +++ b/app/models/service_reconfigure_task.rb @@ -25,7 +25,7 @@ def deliver_to_automate(req_type = request_type, zone = nil) :namespace => ra.ae_namespace, :class_name => ra.ae_class, :instance_name => ra.ae_instance, - :automate_message => ra.ae_message.blank? ? 'create' : ra.ae_message, + :automate_message => (ra.ae_message.presence || 'create'), :attrs => dialog_values, :user_id => get_user.id, :miq_group_id => get_user.current_group_id, @@ -40,7 +40,7 @@ def deliver_to_automate(req_type = request_type, zone = nil) :zone => zone, :tracking_label => tracking_label_id ) - update_and_notify_parent(:state => "pending", :status => "Ok", :message => "Automation Starting") + update_and_notify_parent(:state => "pending", :status => "Ok", :message => "Automation Starting") else update_and_notify_parent(:state => "finished", :status => "Ok", diff --git a/app/models/service_resource.rb b/app/models/service_resource.rb index bad2f55593a..b31484b4e15 100644 --- a/app/models/service_resource.rb +++ b/app/models/service_resource.rb @@ -37,6 +37,7 @@ def resource_description def virtual_column_resource_value(key) return "" if resource.nil? return "" unless resource.respond_to?(key) + resource.send(key) end end diff --git a/app/models/service_retire_request.rb b/app/models/service_retire_request.rb index 5c21a2dab1b..9c1565decf1 100644 --- a/app/models/service_retire_request.rb +++ b/app/models/service_retire_request.rb @@ -1,7 +1,7 @@ class ServiceRetireRequest < MiqRetireRequest TASK_DESCRIPTION = N_('Service Retire').freeze SOURCE_CLASS_NAME = 'Service'.freeze - ACTIVE_STATES = %w(retired) + base_class::ACTIVE_STATES + ACTIVE_STATES = %w[retired] + base_class::ACTIVE_STATES delegate :service_template, :to => :source, :allow_nil => true end diff --git a/app/models/service_retire_task.rb b/app/models/service_retire_task.rb index 5a7aef24bff..a08c0869bec 100644 --- a/app/models/service_retire_task.rb +++ b/app/models/service_retire_task.rb @@ -39,6 +39,7 @@ def create_retire_subtasks(parent_service, parent_task) parent_service.service_resources.collect do |svc_rsc| next if svc_rsc.resource.respond_to?(:retired?) && svc_rsc.resource.retired? next unless svc_rsc.resource.try(:retireable?) + # TODO: the next line deals with the filtering for provisioning # (https://github.com/ManageIQ/manageiq/blob/3921e87915b5a69937b9d4a70bb24ab71b97c165/app/models/service_template/filter.rb#L5) # which should be extended to retirement as part of later work @@ -61,7 +62,7 @@ def create_task(svc_rsc, parent_service, nh, parent_task) :src_ids => [svc_rsc.resource.id], :service_resource_id => svc_rsc.id, :parent_service_id => parent_service.id, - :parent_task_id => parent_task.id, + :parent_task_id => parent_task.id ) task.request_type = task_type.name.underscore[0..-6] task.source = svc_rsc.resource diff --git a/app/models/service_template.rb b/app/models/service_template.rb index c8e8d5ad868..719c704c40f 100644 --- a/app/models/service_template.rb +++ b/app/models/service_template.rb @@ -103,8 +103,8 @@ def self.all_catalog_item_types } ExtManagementSystem.subclasses_supporting(:catalog) - .flat_map(&:catalog_types) - .reduce(builtin_catalog_item_types, :merge) + .flat_map(&:catalog_types) + .reduce(builtin_catalog_item_types, :merge) end end @@ -192,6 +192,7 @@ def destroy def archive raise _("Cannot archive while in use") unless active_requests.empty? + archive! end @@ -218,7 +219,7 @@ def create_service(service_task, parent_svc = nil) nh['visible'] = nh.delete('display') if nh.key?('display') nh['options'][:dialog] = service_task.options[:dialog] - (nh.keys - Service.column_names + %w(created_at guid service_template_id updated_at id type prov_type)).each { |key| nh.delete(key) } + (nh.keys - Service.column_names + %w[created_at guid service_template_id updated_at id type prov_type]).each { |key| nh.delete(key) } # Hide child services by default nh['visible'] = false if parent_svc @@ -237,7 +238,7 @@ def create_service(service_task, parent_svc = nil) service_resources.each do |sr| nh = sr.attributes.dup - %w(id created_at updated_at service_template_id).each { |key| nh.delete(key) } + %w[id created_at updated_at service_template_id].each { |key| nh.delete(key) } svc.add_resource(sr.resource, nh) unless sr.resource.nil? end end @@ -269,11 +270,12 @@ def type_display end def create_tasks_for_service(service_task, parent_svc) - unless parent_svc - return [] unless self.class.include_service_template?(service_task, + if !parent_svc && !self.class.include_service_template?(service_task, service_task.source_id, parent_svc) + return [] end + svc = create_service(service_task, parent_svc) service_task.destination = svc @@ -288,7 +290,7 @@ def create_subtasks(parent_service_task, parent_service) scaling_min = child_svc_rsc.scaling_min 1.upto(scaling_min).each do |scaling_idx| nh = parent_service_task.attributes.dup - %w(id created_on updated_on type state status message).each { |key| nh.delete(key) } + %w[id created_on updated_on type state status message].each { |key| nh.delete(key) } nh['options'] = parent_service_task.options.dup nh['options'].delete(:child_tasks) # Initial Options[:dialog] to an empty hash so we do not pass down dialog values to child services tasks @@ -297,6 +299,7 @@ def create_subtasks(parent_service_task, parent_service) !self.class.include_service_template?(parent_service_task, child_svc_rsc.resource.id, parent_service) + new_task = parent_service_task.class.new(nh) new_task.options.merge!( :src_id => child_svc_rsc.resource.id, @@ -304,7 +307,7 @@ def create_subtasks(parent_service_task, parent_service) :scaling_min => scaling_min, :service_resource_id => child_svc_rsc.id, :parent_service_id => parent_service.id, - :parent_task_id => parent_service_task.id, + :parent_task_id => parent_service_task.id ) new_task.state = 'pending' new_task.status = 'Ok' @@ -321,6 +324,7 @@ def create_subtasks(parent_service_task, parent_service) def set_ownership(service, user) return if user.nil? + service.evm_owner = user if user.current_group $log.info("Setting Service Owning User to Name=#{user.name}, ID=#{user.id}, Group to Name=#{user.current_group.name}, ID=#{user.current_group.id}") @@ -399,13 +403,14 @@ def create_resource_actions(ae_endpoints) resource_action_list.each do |action| ae_endpoint = ae_endpoints[action[:param_key]] next unless ae_endpoint + build_resource_action(ae_endpoint, action) end save! end def self.create_from_options(options) - create!(options.except(:config_info).merge(:options => { :config_info => options[:config_info] })) + create!(options.except(:config_info).merge(:options => {:config_info => options[:config_info]})) end private_class_method :create_from_options @@ -414,6 +419,7 @@ def provision_request(user, options = nil, request_options = {}) request_options[:parent_id] = options.delete('param_parent_request_id') unless options['param_parent_request_id'].nil? result = order(user, options, request_options) raise result[:errors].join(", ") if result[:errors].any? + result[:request] end @@ -430,7 +436,7 @@ def queue_order(user_id, options, request_options) :class_name => self.class.name, :instance_id => id, :method_name => "order", - :args => [user_id, options, request_options], + :args => [user_id, options, request_options] ) end @@ -454,7 +460,7 @@ def order(user_or_id, options = nil, request_options = {}, schedule_time = nil) :interval => {:unit => "once"}, :start_time => time, :tz => "UTC", - }, + } ) {:schedule => schedule} else @@ -517,6 +523,7 @@ def validate_update_config_info(options) if options[:prov_type] && options[:prov_type] != prov_type raise _('prov_type cannot be changed') end + options[:config_info] end diff --git a/app/models/service_template_ansible_playbook.rb b/app/models/service_template_ansible_playbook.rb index e5b1078db29..e22764c8ed5 100644 --- a/app/models/service_template_ansible_playbook.rb +++ b/app/models/service_template_ansible_playbook.rb @@ -109,6 +109,7 @@ def retirement_potential? def check_retirement_potential return true unless retirement_potential? + error_text = 'Destroy aborted. Active Services require retirement resources associated with this instance.' errors.add(:base, error_text) throw :abort @@ -118,6 +119,7 @@ def create_dialogs(config_info) [:provision, :retirement, :reconfigure].each_with_object({}) do |action, hash| info = config_info[action] next unless new_dialog_required?(info) + hash[action] = {:dialog_id => create_new_dialog(info[:new_dialog_name], info[:extra_vars], info[:hosts]).id} end end diff --git a/app/models/service_template_ansible_tower.rb b/app/models/service_template_ansible_tower.rb index 5dec54294ae..7dc93c4778e 100644 --- a/app/models/service_template_ansible_tower.rb +++ b/app/models/service_template_ansible_tower.rb @@ -52,6 +52,7 @@ def self.validate_config_info(options) unless config_info[:configuration_script_id] || config_info[:configuration] raise _('Must provide configuration_script_id or configuration') end + config_info end @@ -74,6 +75,7 @@ def update_service_resources(config_info, _auth_user = nil) def validate_update_config_info(options) super return unless options.key?(:config_info) + self.class.validate_config_info(options) end diff --git a/app/models/service_template_awx.rb b/app/models/service_template_awx.rb index b64acc0a875..df0ffbff7ad 100644 --- a/app/models/service_template_awx.rb +++ b/app/models/service_template_awx.rb @@ -52,6 +52,7 @@ def self.validate_config_info(options) unless config_info[:configuration_script_id] || config_info[:configuration] raise _('Must provide configuration_script_id or configuration') end + config_info end @@ -74,6 +75,7 @@ def update_service_resources(config_info, _auth_user = nil) def validate_update_config_info(options) super return unless options.key?(:config_info) + self.class.validate_config_info(options) end diff --git a/app/models/service_template_catalog.rb b/app/models/service_template_catalog.rb index 4c19f8e0fa9..7835d78fb28 100644 --- a/app/models/service_template_catalog.rb +++ b/app/models/service_template_catalog.rb @@ -3,7 +3,7 @@ class ServiceTemplateCatalog < ApplicationRecord validates :name, :presence => true, :uniqueness_when_changed => {:scope => :tenant_id} belongs_to :tenant - has_many :service_templates, :dependent => :nullify + has_many :service_templates, :dependent => :nullify acts_as_miq_taggable diff --git a/app/models/service_template_orchestration.rb b/app/models/service_template_orchestration.rb index c14841df229..1c6c41ef3cf 100644 --- a/app/models/service_template_orchestration.rb +++ b/app/models/service_template_orchestration.rb @@ -51,6 +51,7 @@ def self.validate_config_info(options) unless (config_info[:template_id] && config_info[:manager_id]) || (config_info[:template] && config_info[:manager]) raise _('Must provide both template_id and manager_id or manager and template') end + config_info end @@ -73,6 +74,7 @@ def update_service_resources(config_info, _auth_user = nil) def validate_update_config_info(options) super return unless options.key?(:config_info) + self.class.validate_config_info(options) end diff --git a/app/models/service_template_provision_request.rb b/app/models/service_template_provision_request.rb index e6a3a12539b..6b8a0a628e3 100644 --- a/app/models/service_template_provision_request.rb +++ b/app/models/service_template_provision_request.rb @@ -1,11 +1,11 @@ class ServiceTemplateProvisionRequest < MiqRequest TASK_DESCRIPTION = N_('Service_Template_Provisioning') SOURCE_CLASS_NAME = 'ServiceTemplate' - ACTIVE_STATES = %w( migrated ) + base_class::ACTIVE_STATES + ACTIVE_STATES = %w[migrated] + base_class::ACTIVE_STATES SERVICE_ORDER_CLASS = '::ServiceOrderCart'.freeze - validates_inclusion_of :request_state, :in => %w( pending finished ) + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished" - validate :must_have_user + validates :request_state, :inclusion => {:in => %w[pending finished] + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} + validate :must_have_user after_create :process_service_order @@ -22,7 +22,7 @@ class ServiceTemplateProvisionRequest < MiqRequest delegate :picture, :to => :service_template, :allow_nil => true - alias_method :user, :get_user + alias user get_user include MiqProvisionQuotaMixin def process_service_order diff --git a/app/models/service_template_provision_task.rb b/app/models/service_template_provision_task.rb index 0cd642eae4a..c7e79b3a060 100644 --- a/app/models/service_template_provision_task.rb +++ b/app/models/service_template_provision_task.rb @@ -15,12 +15,14 @@ def my_zone def provision_priority return 0 if service_resource.nil? + service_resource.provision_index end def sibling_sequence_run_now? return true if miq_request_task.nil? || miq_request_task.miq_request_tasks.count == 1 return false if miq_request_task.miq_request_tasks.detect { |t| t.provision_priority < provision_priority && t.state != "finished" } + true end @@ -29,6 +31,7 @@ def group_sequence_run_now? return true if parent.nil? return false unless parent.group_sequence_run_now? return false unless sibling_sequence_run_now? + true end @@ -105,7 +108,7 @@ def queue_post_provision :instance_id => id, :method_name => "do_post_provision", :zone => my_zone, - :deliver_on => 1.minutes.from_now.utc, + :deliver_on => 1.minute.from_now.utc, :tracking_label => tracking_label_id, :miq_callback => {:class_name => self.class.name, :instance_id => id, :method_name => :execute_callback} ) @@ -160,20 +163,21 @@ def resource_action def service_resource return nil if options[:service_resource_id].blank? + ServiceResource.find_by(:id => options[:service_resource_id]) end def mark_pending_items_as_finished miq_request.miq_request_tasks.each do |s| - if s.state == 'pending' - s.update_and_notify_parent(:state => "finished", :status => "Warn", :message => "Error in Request: #{miq_request.id}. Setting pending Task: #{id} to finished.") unless id == s.id + if s.state == 'pending' && !(id == s.id) + s.update_and_notify_parent(:state => "finished", :status => "Warn", :message => "Error in Request: #{miq_request.id}. Setting pending Task: #{id} to finished.") end end end def before_ae_starts(_options) reload - if state.to_s.downcase.in?(%w(pending queued)) + if state.to_s.downcase.in?(%w[pending queued]) _log.info("Executing #{request_class::TASK_DESCRIPTION} request: [#{description}]") update_and_notify_parent(:state => "active", :status => "Ok", :message => "In Process") end diff --git a/app/models/session.rb b/app/models/session.rb index 40c67d9c9cb..e4be3e380f8 100644 --- a/app/models/session.rb +++ b/app/models/session.rb @@ -27,6 +27,7 @@ def self.purge(ttl, batch_size = 100) def self.purge_one_batch(ttl, batch_size) sessions = where("updated_at <= ?", ttl.seconds.ago.utc).limit(batch_size) return 0 if sessions.size.zero? + sessions = sessions.reject do |s| expires_on = s.raw_data[:expires_on] rescue nil expires_on && expires_on >= Time.zone.now @@ -39,11 +40,11 @@ def self.purge_one_batch(ttl, batch_size) def self.log_off_user_sessions(sessions) # Log off the users associated with the sessions that are eligible for deletion userids = sessions.each_with_object([]) do |s, a| - begin - a << s.raw_data[:userid] - rescue => err - _log.warn("Error '#{err.message}', attempting to load session with id [#{s.id}]") - end + + a << s.raw_data[:userid] + rescue => err + _log.warn("Error '#{err.message}', attempting to load session with id [#{s.id}]") + end User.where(:userid => userids).each do |user| diff --git a/app/models/share.rb b/app/models/share.rb index 086a45e7f45..cb8a13cee51 100644 --- a/app/models/share.rb +++ b/app/models/share.rb @@ -13,9 +13,9 @@ class Share < ApplicationRecord attribute :allow_tenant_inheritance, :default => false - scope :by_tenant_inheritance, ->(tenant) do - where(:tenant => tenant.accessible_tenant_ids(:ancestor_ids), + scope :by_tenant_inheritance, lambda { |tenant| + where(:tenant => tenant.accessible_tenant_ids(:ancestor_ids), :allow_tenant_inheritance => true) .or(where(:tenant => tenant)) - end + } end diff --git a/app/models/snapshot.rb b/app/models/snapshot.rb index 17c85323715..4f74ebf5b4d 100644 --- a/app/models/snapshot.rb +++ b/app/models/snapshot.rb @@ -7,12 +7,12 @@ class Snapshot < ApplicationRecord serialize :disks, Array - after_create :after_create_callback + after_create :after_create_callback EVM_SNAPSHOT_NAME = "EvmSnapshot".freeze def after_create_callback - MiqEvent.raise_evm_event_queue(vm_or_template, "vm_snapshot_complete", attributes) unless self.is_a_type?(:system_snapshot) || self.not_recently_created? + MiqEvent.raise_evm_event_queue(vm_or_template, "vm_snapshot_complete", attributes) unless is_a_type?(:system_snapshot) || not_recently_created? end def self.add_elements(parentObj, xmlNode) @@ -54,11 +54,11 @@ def is_a_type?(stype) end if value == :system_snapshot - return self.is_a_type?(:evm_snapshot) + is_a_type?(:evm_snapshot) elsif value.kind_of?(Regexp) - return !!(value =~ name) + !!(value =~ name) else - return name == value + name == value end end @@ -83,11 +83,11 @@ def self.remove_unused_evm_snapshots(delay) def recently_created? create_time >= ::Settings.ems_refresh.raise_vm_snapshot_complete_if_created_within.to_i_with_method - .seconds.ago.utc + .seconds.ago.utc end def not_recently_created? - !self.recently_created? + !recently_created? end def self.xml_to_hashes(xmlNode, vm_or_template_id) @@ -113,22 +113,22 @@ def self.xml_to_hashes(xmlNode, vm_or_template_id) nh[:disks] = [] e.each_recursive do |e1| total_size += e1.attributes['size_on_disk'].to_i - if e1.name == "disk" - nh[:disks] << e1.attributes.to_h + next unless e1.name == "disk" - # If we do not get a snapshot create time in the header use the file create time - if e.attributes['create_time'].blank? && nh[:create_time].blank? - nh[:create_time] = e1.attributes['cdate_on_disk'] unless e1.attributes['cdate_on_disk'].blank? - end + nh[:disks] << e1.attributes.to_h + + # If we do not get a snapshot create time in the header use the file create time + if e.attributes['create_time'].blank? && nh[:create_time].blank? && e1.attributes['cdate_on_disk'].present? + nh[:create_time] = e1.attributes['cdate_on_disk'] end end nh[:uid] = e.attributes['uid'] - nh[:parent_uid] = e.attributes['parent'] unless e.attributes['parent'].blank? + nh[:parent_uid] = e.attributes['parent'] if e.attributes['parent'].present? nh[:name] = e.attributes['displayname'] nh[:filename] = e.attributes['filename'] nh[:description] = e.attributes['description'] - nh[:create_time] = e.attributes['create_time'] unless e.attributes['create_time'].blank? + nh[:create_time] = e.attributes['create_time'] if e.attributes['create_time'].present? nh[:current] = current == e.attributes['uid'] ? 1 : 0 nh[:total_size] = total_size # We are setting the vm_or_template_id relationship here because the tree relationship @@ -165,6 +165,7 @@ def self.add_snapshot_size_for_ems(parentObj, hashes) # we don't skip linking up data because of a format change. (IE 2009-09-25T20:11:14.000000Z to 2009-09-25T20:11:14.299742Z) def self.normalize_ss_uid(uid) return uid[0, 20] if !uid.nil? && uid.length == 27 && uid[-1, 1] == 'Z' + uid end private_class_method :normalize_ss_uid diff --git a/app/models/storage.rb b/app/models/storage.rb index 26facf53bbb..944030536f0 100644 --- a/app/models/storage.rb +++ b/app/models/storage.rb @@ -3,9 +3,9 @@ class Storage < ApplicationRecord belongs_to :ext_management_system, :foreign_key => :ems_id, :inverse_of => :storages - has_many :vms_and_templates, :foreign_key => :storage_id, :dependent => :nullify, :class_name => "VmOrTemplate" - has_many :miq_templates, :foreign_key => :storage_id - has_many :vms, :foreign_key => :storage_id + has_many :vms_and_templates, :dependent => :nullify, :class_name => "VmOrTemplate" + has_many :miq_templates + has_many :vms has_many :host_storages, :dependent => :destroy has_many :hosts, :through => :host_storages has_many :storage_profile_storages, :dependent => :destroy @@ -17,7 +17,7 @@ class Storage < ApplicationRecord has_many :metrics, :as => :resource # Destroy will be handled by purger has_many :metric_rollups, :as => :resource # Destroy will be handled by purger - has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger + has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger has_many :storage_files, :dependent => :destroy has_many :storage_files_files, -> { where("rsc_type = 'file'") }, :class_name => "StorageFile", :foreign_key => "storage_id" @@ -30,7 +30,7 @@ class Storage < ApplicationRecord scope :available, -> { where(:maintenance => [nil, false]) } - validates_presence_of :name + validates :name, :presence => true include RelationshipMixin self.default_relationship_type = "ems_metadata" @@ -63,7 +63,7 @@ class Storage < ApplicationRecord virtual_column :v_provisioned_percent_of_total, :type => :float virtual_column :total_managed_unregistered_vms, :type => :integer virtual_column :total_managed_registered_vms, :type => :integer - virtual_column :total_unmanaged_vms, :type => :integer # uses is handled via class method that aggregates + virtual_column :total_unmanaged_vms, :type => :integer # uses is handled via class method that aggregates virtual_column :count_of_vmdk_disk_files, :type => :integer delegate :queue_name_for_ems_operations, :to => :ext_management_system, :allow_nil => true @@ -136,10 +136,8 @@ def scan_complete_callback(miq_task_id, status, _message, result) end miq_task.lock(:exclusive) do |locked_miq_task| - if locked_miq_task.context_data[:targets].length == 1 - unless MiqTask.status_ok?(status) - self.task_results = result unless result.nil? - end + if locked_miq_task.context_data[:targets].length == 1 && !MiqTask.status_ok?(status) && !result.nil? + self.task_results = result end if MiqTask.status_error?(status) @@ -181,7 +179,7 @@ def scan_queue_item(miq_task_id) :method_name => 'smartstate_analysis', :args => [miq_task_id], :msg_timeout => self.class.scan_collection_timeout, - :miq_callback => cb, + :miq_callback => cb ) end @@ -269,12 +267,12 @@ def self.scan_watchdog(miq_task_id) miq_task.lock(:exclusive) do |locked_miq_task| locked_miq_task.context_data[:pending].each do |storage_id, qitem_id| qitem = MiqQueue.find_by(:id => qitem_id) - if qitem.nil? - _log.warn("Pending Scan for Storage ID: [#{storage_id}] is missing MiqQueue ID: [#{qitem_id}] - will requeue") - locked_miq_task.context_data[:pending].delete(storage_id) - locked_miq_task.save! - scan_queue(locked_miq_task) - end + next unless qitem.nil? + + _log.warn("Pending Scan for Storage ID: [#{storage_id}] is missing MiqQueue ID: [#{qitem_id}] - will requeue") + locked_miq_task.context_data[:pending].delete(storage_id) + locked_miq_task.save! + scan_queue(locked_miq_task) end end scan_queue_watchdog(miq_task.id) @@ -311,7 +309,7 @@ def self.scan_eligible_storages(zone_name = nil) end def self.create_scan_task(task_name, userid, storages) - context_data = {:targets => storages.collect(&:id).sort, :complete => [], :pending => {}} + context_data = {:targets => storages.collect(&:id).sort, :complete => [], :pending => {}} miq_task = MiqTask.create( :name => task_name, :state => MiqTask::STATE_QUEUED, @@ -483,6 +481,7 @@ def unmanaged_paths(vms = nil) def qmessage?(method_name) return false if $_miq_worker_current_msg.nil? + ($_miq_worker_current_msg.class_name == self.class.name) && ($_miq_worker_current_msg.instance_id = id) && ($_miq_worker_current_msg.method_name == method_name) end @@ -514,12 +513,12 @@ def smartstate_analysis(miq_task_id = nil) _log.warn(message) raise MiqException::MiqUnreachableStorage, _("There are no EMSs with valid credentials connected to Storage: [%{name}] in Zone: [%{zone}].") % - {:name => name, :zone => MiqServer.my_zone} + {:name => name, :zone => MiqServer.my_zone} end ems = ext_management_system - unless smartstate_analysis_count_for_ems_id(ems.id) < ::Settings.storage.max_parallel_scans_per_ems - raise MiqException::MiqQueueRetryLater.new(:deliver_on => Time.now.utc + 1.minute) if qmessage?(method_name) + if !(smartstate_analysis_count_for_ems_id(ems.id) < ::Settings.storage.max_parallel_scans_per_ems) && qmessage?(method_name) + raise MiqException::MiqQueueRetryLater.new(:deliver_on => Time.now.utc + 1.minute) end $_miq_worker_current_msg.update!(:target_id => ems.id) if qmessage?(method_name) @@ -553,9 +552,8 @@ def vm_ids_by_path host_ids = hosts.collect(&:id) return {} if host_ids.empty? - Vm.where(:host_id => host_ids).includes(:storage).inject({}) do |h, v| + Vm.where(:host_id => host_ids).includes(:storage).each_with_object({}) do |v, h| h[File.dirname(v.path)] = v.id - h end end @@ -563,6 +561,7 @@ def vm_ids_by_path def self.get_common_refresh_targets(storages) storages = Array.wrap(storages) return [] if storages.empty? + storages = find(storages) unless storages[0].kind_of?(Storage) objs = storages.collect do |s| @@ -577,17 +576,17 @@ def self.get_common_refresh_targets(storages) def used_space total_space.to_i.zero? ? 0 : total_space.to_i - free_space.to_i end - alias_method :v_used_space, :used_space + alias v_used_space used_space def used_space_percent_of_total total_space.to_i.zero? ? 0.0 : (used_space.to_f / total_space * 1000.0).round / 10.0 end - alias_method :v_used_space_percent_of_total, :used_space_percent_of_total + alias v_used_space_percent_of_total used_space_percent_of_total def free_space_percent_of_total total_space.to_i.zero? ? 0.0 : (free_space.to_f / total_space * 1000.0).round / 10.0 end - alias_method :v_free_space_percent_of_total, :free_space_percent_of_total + alias v_free_space_percent_of_total free_space_percent_of_total def v_total_hosts if @association_cache.include?(:hosts) @@ -610,11 +609,11 @@ def v_total_vms end end - alias_method :v_total_debris_size, :debris_size - alias_method :v_total_snapshot_size, :snapshot_size - alias_method :v_total_memory_size, :vm_ram_size - alias_method :v_total_vm_misc_size, :vm_misc_size - alias_method :v_total_disk_size, :disk_size + alias v_total_debris_size debris_size + alias v_total_snapshot_size snapshot_size + alias v_total_memory_size vm_ram_size + alias v_total_vm_misc_size vm_misc_size + alias v_total_disk_size disk_size def v_debris_percent_of_used used_space.to_i.zero? ? 0.0 : (debris_size.to_f / used_space * 1000.0).round / 10.0 @@ -737,7 +736,7 @@ def perf_capture(interval_name, *_args) ['registered', 'unregistered'].each do |mode| attrs["derived_storage_used_#{mode}".to_sym] ||= 0 - send("#{mode}_vms").each do |vm| + send(:"#{mode}_vms").each do |vm| vm_attrs = {:capture_interval => interval, :resource_name => vm.name} vm_attrs[:derived_storage_vm_count_managed] = 1 vm_attrs["derived_storage_vm_count_#{mode}".to_sym] = 1 @@ -766,13 +765,13 @@ def perf_capture(interval_name, *_args) attrs[col] ||= 0 vm_attrs[col] ||= 0 - unless val.nil? - attrs[col] += val - attrs["derived_storage_used_#{mode}".to_sym] += val - attrs[:derived_storage_used_managed] += val - vm_attrs[col] += val - vm_attrs[:derived_storage_used_managed] += val - end + next if val.nil? + + attrs[col] += val + attrs["derived_storage_used_#{mode}".to_sym] += val + attrs[:derived_storage_used_managed] += val + vm_attrs[col] += val + vm_attrs[:derived_storage_used_managed] += val end vm_perf = obj_perfs.fetch_path(vm.class.name, vm.id, interval_name, hour) diff --git a/app/models/storage_file.rb b/app/models/storage_file.rb index 2103f99b8f0..3680225d262 100644 --- a/app/models/storage_file.rb +++ b/app/models/storage_file.rb @@ -8,6 +8,7 @@ class StorageFile < ApplicationRecord def self.is_snapshot_disk_file(file) return false unless file.ext_name == "vmdk" + basename = File.basename(file.name, ".vmdk") i = basename.rindex('-') test_str = i.nil? ? basename : basename[i + 1..-1] @@ -51,6 +52,7 @@ def self.link_storage_files_to_vms(files, vm_ids_by_path, update = true) path = f.is_directory? ? f.name : File.dirname(f.name) vm_id = vm_ids_by_path[path] next if vm_id.nil? + if update f.update_attribute(:vm_or_template_id, vm_id) else diff --git a/app/models/system_console.rb b/app/models/system_console.rb index 38ae9ce52a0..5deb118be71 100644 --- a/app/models/system_console.rb +++ b/app/models/system_console.rb @@ -31,13 +31,14 @@ def self.allocate_port (port_range_start..port_range_end).each do |port_number| return port_number if used_ports[0].nil? || used_ports[0] > port_number + used_ports.shift if used_ports[0] == port_number end nil end def self.local_address - MiqServer.my_server.ipaddress.blank? ? local_address_fallback : MiqServer.my_server.ipaddress + (MiqServer.my_server.ipaddress.presence || local_address_fallback) end def self.local_address_fallback @@ -59,7 +60,7 @@ def self.launch_proxy(remote_address, remote_port) Process.detach(pid) - return [local_address, local_port, pid] + [local_address, local_port, pid] end def self.kill_proxy_process(pid) @@ -67,8 +68,9 @@ def self.kill_proxy_process(pid) end def self.cleanup_proxy_processes - SystemConsole.where.not(:proxy_pid => nil).where(:host_name => local_address).each do |console| - next unless %w(websocket_closed ticket_invalid).include?(console.proxy_status) + SystemConsole.where.not(:proxy_pid => nil).where(:host_name => local_address).each do |console| + next unless %w[websocket_closed ticket_invalid].include?(console.proxy_status) + kill_proxy_process(console.proxy_pid) console.destroy end @@ -94,8 +96,8 @@ def self.launch_proxy_if_not_local(console_args, originating_server, host_addres if ::Settings.server.console_proxy_disabled || SystemConsole.is_local?(originating_server) console_args.update( - :host_name => host_address, - :port => host_port, + :host_name => host_address, + :port => host_port ) else SystemConsole.cleanup_proxy_processes diff --git a/app/models/tag.rb b/app/models/tag.rb index b6f2f476b54..d8a1258ed76 100644 --- a/app/models/tag.rb +++ b/app/models/tag.rb @@ -18,7 +18,7 @@ class Tag < ApplicationRecord def self.list(object, options = {}) ns = get_namespace(options) if ns[0..7] == "/virtual" - ns.gsub!('/virtual/','') # throw away /virtual + ns.gsub!('/virtual/', '') # throw away /virtual ns, virtual_custom_attribute = MiqExpression.escape_virtual_custom_attribute(ns) predicate = ns.split("/") @@ -61,7 +61,7 @@ def self.tags(options = {}) def self.parse(list) if list.kind_of?(Array) tag_names = list.collect { |tag| tag.try(:to_s) } - return tag_names.compact + tag_names.compact else tag_names = [] @@ -69,7 +69,7 @@ def self.parse(list) list = list.dup # first, pull out the quoted tags - list.gsub!(/\"(.*?)\"\s*/) do + list.gsub!(/"(.*?)"\s*/) do tag_names << $1 "" end @@ -86,7 +86,7 @@ def self.parse(list) # delete any blank tag names tag_names = tag_names.delete_if(&:empty?) - return tag_names.uniq + tag_names.uniq end end @@ -110,8 +110,9 @@ def self.filter_ns(tags, ns) list = [] tags.collect do |tag| next unless tag.name =~ %r{^#{ns}/(.*)$}i + name = $1.include?(" ") ? "'#{$1}'" : $1 - list.push(name) unless name.blank? + list.push(name) if name.present? end list end @@ -129,8 +130,8 @@ def self.lookup_by_classification_name(name) singleton_class.send(:alias_method, :find_by_classification_name, :lookup_by_classification_name) Vmdb::Deprecation.deprecate_methods(singleton_class, :find_by_classification_name => :lookup_by_classification_name) - def ==(comparison_object) - super || name.downcase == comparison_object.to_s.downcase + def ==(other) + super || name.downcase == other.to_s.downcase end def show diff --git a/app/models/tenant.rb b/app/models/tenant.rb index e499bb26aed..799cca8c89b 100644 --- a/app/models/tenant.rb +++ b/app/models/tenant.rb @@ -17,6 +17,8 @@ class Tenant < ApplicationRecord attribute :divisible, :default => true attribute :use_config_for_attributes, :default => false + before_save :nil_blanks + after_create :create_tenant_group, :create_miq_product_features_for_tenant_nodes, :update_miq_product_features_for_tenant_nodes before_destroy :ensure_can_be_destroyed has_ancestry(:orphan_strategy => :restrict) @@ -27,7 +29,6 @@ class Tenant < ApplicationRecord has_many :vms, :inverse_of => :tenant has_many :miq_templates, :inverse_of => :tenant has_many :service_template_catalogs - has_many :service_templates has_many :tenant_quotas has_many :miq_groups @@ -61,9 +62,7 @@ class Tenant < ApplicationRecord virtual_column :parent_name, :type => :string virtual_column :display_type, :type => :string - before_save :nil_blanks after_save -> { MiqProductFeature.invalidate_caches } - after_create :create_tenant_group, :create_miq_product_features_for_tenant_nodes, :update_miq_product_features_for_tenant_nodes def self.scope_by_tenant? true @@ -279,7 +278,7 @@ def self.tenant_and_project_names tenants_by_id = all_tenants_and_projects.index_by(&:id) tenants_and_projects = Rbac.filtered(Tenant.in_my_region.select(:id, :ancestry, :divisible, :use_config_for_attributes, :name)) - .to_a.sort_by { |t| [t.ancestry || "", t.name] } + .to_a.sort_by { |t| [t.ancestry || "", t.name] } tenants_and_projects.partition(&:divisible?).map do |tenants| tenants.map do |t| @@ -303,6 +302,7 @@ def build_tenant_tree data_tenant = [] all_subtenants.each do |subtenant| next unless subtenant.parent_name == name + data_tenant.push(:name => subtenant.name, :id => subtenant.id, :parent => id) if subtenant.all_subtenants.count > 0 data_tenant.concat(subtenant.build_tenant_tree) diff --git a/app/models/tenant_quota.rb b/app/models/tenant_quota.rb index d556765fcb2..f5cc428b2d1 100644 --- a/app/models/tenant_quota.rb +++ b/app/models/tenant_quota.rb @@ -193,10 +193,8 @@ def check_for_over_allocation # Check if the parent has enough quota available to give to the child parent_quota = tenant.parent.tenant_quotas.send(name).take - unless parent_quota.nil? - if parent_quota.available < diff - errors.add(name, "quota is over allocated, parent tenant does not have enough quota") - end + if !parent_quota.nil? && (parent_quota.available < diff) + errors.add(name, "quota is over allocated, parent tenant does not have enough quota") end end end diff --git a/app/models/user.rb b/app/models/user.rb index 2662a956a2e..a6547033ebf 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -8,6 +8,8 @@ class User < ApplicationRecord include CustomActionsMixin include ExternalUrlMixin + before_validation :nil_email_field_if_blank + before_validation :dummy_password_for_external_auth before_destroy :check_reference, :prepend => true has_many :miq_approvals, :as => :approver @@ -29,9 +31,9 @@ class User < ApplicationRecord has_many :sessions, :dependent => :destroy belongs_to :current_group, :class_name => "MiqGroup" has_and_belongs_to_many :miq_groups - scope :superadmins, lambda { + scope :superadmins, lambda { joins(:miq_groups => {:miq_user_role => :miq_product_features}) - .where(:miq_product_features => {:identifier => MiqProductFeature::SUPER_ADMIN_FEATURE }) + .where(:miq_product_features => {:identifier => MiqProductFeature::SUPER_ADMIN_FEATURE}) } virtual_has_many :active_vms, :class_name => "VmOrTemplate" @@ -52,9 +54,9 @@ class User < ApplicationRecord # use authenticate_bcrypt rather than .authenticate to avoid confusion # with the class method of the same name (User.authenticate) - alias_method :authenticate_bcrypt, :authenticate + alias authenticate_bcrypt authenticate - serialize :settings, Hash # Implement settings column as a hash + serialize :settings, Hash # Implement settings column as a hash attribute :failed_login_attempts, :default => 0 @@ -146,8 +148,6 @@ def validate errors.add(:userid, "'system' is reserved for EVM internal operations") unless (userid =~ /^system$/i).nil? end - before_validation :nil_email_field_if_blank - before_validation :dummy_password_for_external_auth before_destroy :destroy_subscribed_widget_sets def check_reference @@ -189,7 +189,7 @@ def change_password(oldpwd, newpwd) end if auth.authenticate(userid, oldpwd) self.password = newpwd - self.save! + save! end end @@ -210,7 +210,7 @@ def fail_login! def ldap_group current_group.try(:description) end - alias_method :miq_group_description, :ldap_group + alias miq_group_description ldap_group def role_allows?(**options) Rbac.role_allows?(:user => self, **options) @@ -282,6 +282,7 @@ def self.lookup_by_identity(username) def self.authorize_user(userid) return if userid.blank? || admin?(userid) + authenticator(userid).authorize_user(userid) end @@ -299,8 +300,7 @@ def logoff def get_expressions(db = nil) sql = ["((search_type=? and search_key is null) or (search_type=? and search_key is null) or (search_type=? and search_key=?))", - 'default', 'global', 'user', userid - ] + 'default', 'global', 'user', userid] unless db.nil? sql[0] += "and db=?" sql << db.to_s @@ -325,6 +325,7 @@ def change_current_group user_groups = miq_group_ids user_groups.delete(current_group_id) raise _("The user's current group cannot be changed because the user does not belong to any other group") if user_groups.empty? + self.current_group = MiqGroup.find_by(:id => user_groups.first) save! end @@ -385,6 +386,7 @@ def self.with_user(user, userid = nil) def self.with_user_group(user, group, &block) return yield if user.nil? + user = User.find(user) unless user.kind_of?(User) if group && group.kind_of?(MiqGroup) user.current_group = group @@ -444,6 +446,7 @@ def self.seed seed_data.each do |user_attributes| user_id = user_attributes[:userid] next if in_my_region.find_by_userid(user_id) + log_attrs = user_attributes.slice(:name, :userid, :group) _log.info("Creating user with parameters #{log_attrs.inspect}") @@ -473,10 +476,10 @@ def self.seed_data def unlock_queue MiqQueue.create_with(:deliver_on => Time.now.utc + ::Settings.authentication.locked_account_timeout.to_i) .put_unless_exists( - :class_name => self.class.name, - :instance_id => id, - :method_name => 'unlock!', - :priority => MiqQueue::MAX_PRIORITY - ) + :class_name => self.class.name, + :instance_id => id, + :method_name => 'unlock!', + :priority => MiqQueue::MAX_PRIORITY + ) end end diff --git a/app/models/vim_performance_analysis.rb b/app/models/vim_performance_analysis.rb index 51ce8e94b31..f8136439c62 100644 --- a/app/models/vim_performance_analysis.rb +++ b/app/models/vim_performance_analysis.rb @@ -105,7 +105,7 @@ def storages_for_compute_target(target) return target.storages if target.kind_of?(Host) if target.kind_of?(EmsCluster) - return target.hosts.collect(&:storages).flatten.compact + target.hosts.collect(&:storages).flatten.compact else raise _("unable to get storages for %{name}") % {:name => target.class} end @@ -116,25 +116,25 @@ def storages_for_compute_target(target) :used => {:metric => :max_cpu_usagemhz_rate_average, :mode => :perf_trend}, :reserved => {:metric => :cpu_reserve, :mode => :current}, :allocated => nil, - :manual => {:value => nil, :mode => :manual} + :manual => {:value => nil, :mode => :manual} }, :vcpus => { :used => {:metric => :num_cpu, :mode => :current}, :reserved => {:metric => :num_cpu, :mode => :current}, :allocated => {:metric => :num_cpu, :mode => :current}, - :manual => {:value => nil, :mode => :manual} + :manual => {:value => nil, :mode => :manual} }, :memory => { :used => {:metric => :max_derived_memory_used, :mode => :perf_trend}, :reserved => {:metric => :memory_reserve, :mode => :current}, :allocated => {:metric => :ram_size, :mode => :current}, - :manual => {:value => nil, :mode => :manual} + :manual => {:value => nil, :mode => :manual} }, :storage => { :used => {:metric => :used_disk_storage, :mode => :current}, :reserved => {:metric => :provisioned_storage, :mode => :current}, :allocated => {:metric => :allocated_disk_storage, :mode => :current}, - :manual => {:value => nil, :mode => :manual} + :manual => {:value => nil, :mode => :manual} } } ########################################################## @@ -157,9 +157,9 @@ def get_vm_needs end vm_perf = VimPerformanceAnalysis.get_daily_perf(@vm, options[:range], options[:ext_options], perf_cols) - vm_ts = vm_perf.last.timestamp unless vm_perf.blank? - [:cpu, :vcpus, :memory, :storage].each_with_object({}) do |type, vm_needs| - vm_needs[type] = vm_consumes(vm_perf, vm_ts, options[:vm_options][type], type) + vm_ts = vm_perf.last.timestamp if vm_perf.present? + [:cpu, :vcpus, :memory, :storage].index_with do |type| + vm_consumes(vm_perf, vm_ts, options[:vm_options][type], type) end end @@ -251,13 +251,14 @@ def offers(perf, ts, options, type, target) avail = measure_object(target, options[:mode], options[:limit_col], perf, ts, type) || 0 avail = (avail * (options[:limit_pct] / 100.0)) unless avail.nil? || options[:limit_pct].blank? end - usage = (usage > reserve) ? usage : reserve # Take the greater of usage or total reserve of child VMs + usage = reserve unless usage > reserve # Take the greater of usage or total reserve of child VMs [avail, usage] end def can_fit(avail, usage, need) return nil if avail.nil? || usage.nil? || need.nil? return 0 unless avail > usage && need > 0 + fits = (avail - usage) / need fits.truncate end @@ -333,16 +334,16 @@ def self.find_child_perf_for_time_period(obj, interval_name, options = {}) rel = Metric::Helper.find_for_interval_name(interval_name, ext_options[:time_profile] || ext_options[:tz], ext_options[:class]) case obj - when MiqEnterprise, MiqRegion then + when MiqEnterprise, MiqRegion rel = rel.where(:resource => obj.storages).or(rel.where(:resource => obj.ext_management_systems)) - when Host then + when Host rel = rel.where(:parent_host_id => obj.id) when EmsCluster rel = rel.where(:parent_ems_cluster_id => obj.id) - when Storage then + when Storage rel = rel.where(:parent_storage_id => obj.id) - when ExtManagementSystem then - rel = rel.where(:parent_ems_id => obj.id).where(:resource_type => %w(Host EmsCluster)) + when ExtManagementSystem + rel = rel.where(:parent_ems_id => obj.id).where(:resource_type => %w[Host EmsCluster]) else raise _("unknown object type: %{class}") % {:class => obj.class} end @@ -362,9 +363,10 @@ def self.find_child_perf_for_time_period(obj, interval_name, options = {}) def self.child_tags_over_time_period(obj, interval_name, options = {}) classifications = Classification.hash_all_by_type_and_name - find_child_perf_for_time_period(obj, interval_name, options.merge(:conditions => "resource_type != 'VmOrTemplate' AND tag_names IS NOT NULL", :select => "resource_type, tag_names")).inject({}) do |h, p| + find_child_perf_for_time_period(obj, interval_name, options.merge(:conditions => "resource_type != 'VmOrTemplate' AND tag_names IS NOT NULL", :select => "resource_type, tag_names")).each_with_object({}) do |p, h| p.tag_names.split("|").each do |t| next if t.starts_with?("power_state") + tag = "#{p.resource_type}/#{t}" next if h.key?(tag) @@ -375,7 +377,6 @@ def self.child_tags_over_time_period(obj, interval_name, options = {}) ent_desc = ent.nil? ? e.titleize : ent.description h[tag] = "#{ui_lookup(:model => p.resource_type)}: #{cat_desc}: #{ent_desc}" end - h end end @@ -410,23 +411,22 @@ def self.group_perf_by_timestamp(obj, perfs, cols = nil) end result.each do |_k, h| - h[:min_max] = h.keys.find_all { |k| k.to_s.starts_with?("min", "max") }.inject({}) do |mm, k| + h[:min_max] = h.keys.find_all { |k| k.to_s.starts_with?("min", "max") }.each_with_object({}) do |k, mm| val = h.delete(k) mm[k] = val unless val.nil? - mm end h.reject! { |k, _v| perf_klass.virtual_attribute?(k) } end - result.inject([]) do |recs, k| + result.each_with_object([]) do |k, recs| _ts, v = k cols.each do |c| next unless v[c].kind_of?(Float) + Metric::Aggregation::Process.column(c, nil, v, counts[k], true, :average) end recs.push(perf_klass.new(v)) - recs end end @@ -435,6 +435,7 @@ def self.calc_slope_from_data(recs, x_attr, y_attr) coordinates = recs.each_with_object([]) do |r, arr| next unless r.respond_to?(x_attr) && r.respond_to?(y_attr) + if r.respond_to?(:inside_time_profile) && r.inside_time_profile == false _log.debug("Class: [#{r.class}], [#{r.resource_type} - #{r.resource_id}], Timestamp: [#{r.timestamp}] is outside of time profile") next @@ -443,7 +444,7 @@ def self.calc_slope_from_data(recs, x_attr, y_attr) # y = r.send(x_attr).to_i # Calculate normal way by using the integer value of the timestamp adj_x_attr = "time_profile_adjusted_#{x_attr}" if r.respond_to?(adj_x_attr) - r.send("#{adj_x_attr}=", (recs.first.send(x_attr).to_i + arr.length.days.to_i)) + r.send(:"#{adj_x_attr}=", (recs.first.send(x_attr).to_i + arr.length.days.to_i)) x = r.send(adj_x_attr).to_i # Calculate by using the number of days out from the first timestamp else x = r.send(x_attr).to_i @@ -453,7 +454,7 @@ def self.calc_slope_from_data(recs, x_attr, y_attr) begin Math.linear_regression(*coordinates) - rescue StandardError => err + rescue => err _log.warn("#{err.message}, calculating slope") unless err.kind_of?(ZeroDivisionError) nil end @@ -464,7 +465,7 @@ def self.get_daily_perf(obj, range, ext_options, perf_cols) ext_options ||= {} Metric::Helper.find_for_interval_name("daily", ext_options[:time_profile] || ext_options[:tz], ext_options[:class]) - .order("timestamp") #.select(perf_cols) - Currently passing perf_cols to select is broken because it includes virtual cols. This is actively being worked on. + .order("timestamp") # .select(perf_cols) - Currently passing perf_cols to select is broken because it includes virtual cols. This is actively being worked on. .where(:resource => obj, :timestamp => Metric::Helper.time_range_from_hash(range)) end @@ -473,12 +474,12 @@ def self.calc_trend_value_at_timestamp(recs, attr, timestamp) return nil if slope.nil? begin - return Math.slope_y_intercept(timestamp.to_f, slope, yint) + Math.slope_y_intercept(timestamp.to_f, slope, yint) rescue RangeError - return nil + nil rescue => err _log.warn("#{err.message}, calculating trend value") - return nil + nil end end @@ -487,12 +488,12 @@ def self.calc_timestamp_at_trend_value(recs, attr, value) return nil if slope.nil? begin - return Time.at(Math.slope_x_intercept(value.to_f, slope, yint)).utc + Time.at(Math.slope_x_intercept(value.to_f, slope, yint)).utc rescue RangeError - return nil + nil rescue => err _log.warn("#{err.message}, calculating timestamp at trend value") - return nil + nil end end end # module VimPerformanceAnalysis diff --git a/app/models/vim_performance_daily.rb b/app/models/vim_performance_daily.rb index b5a8c613f42..fae6f17dc26 100644 --- a/app/models/vim_performance_daily.rb +++ b/app/models/vim_performance_daily.rb @@ -1,5 +1,5 @@ class VimPerformanceDaily < MetricRollup - def self.instances_are_derived?; true; end + def self.instances_are_derived? = true INFO_COLS = [:resource_type, :resource_id, :resource_name] PARENT_COLS = [:parent_host_id, :parent_ems_cluster_id, :parent_storage_id, :parent_ems_id].freeze @@ -34,7 +34,7 @@ def self.process_hourly_for_one_day(recs, options = {}) rid = perf.resource_id key = [perf.capture_interval_name, rtype, rid] - result[key] ||= INFO_COLS.each_with_object({}) { |c, h| h[c] = perf.send(c) } + result[key] ||= INFO_COLS.index_with { |c| perf.send(c) } counts[key] ||= {} if tp && tp.ts_in_profile?(perf.timestamp) == false @@ -79,11 +79,13 @@ def self.process_hourly_for_one_day(recs, options = {}) (options[:reflections] || []).each do |assoc| next if perf.class.virtual_field?(assoc) + result[key][assoc.to_sym] = perf.send(assoc) if perf.respond_to?(assoc) end end return [] if result.empty? + ts_utc = ts.utc.to_time # Don't bother rolling up values if day is outside of time profile @@ -120,10 +122,9 @@ def self.process_hourly_for_one_day(recs, options = {}) results.each do |h| min_max = h.delete(:min_max) - h[:min_max] = h.keys.find_all { |k| k.to_s.starts_with?("min", "max") }.inject({}) do |mm, k| + h[:min_max] = h.keys.find_all { |k| k.to_s.starts_with?("min", "max") }.each_with_object({}) do |k, mm| val = h.delete(k) mm[k] = val unless val.nil? - mm end h[:min_max].merge!(min_max) if min_max.kind_of?(Hash) end @@ -138,6 +139,7 @@ def self.relevant_cols(cols, only_cols) def self.process_only_cols(recs) only_cols = recs.select_values.collect(&:to_sym).presence return unless only_cols + only_cols += only_cols.select { |c| c.to_s.starts_with?("min_", "max_") }.collect { |c| c.to_s[4..-1].to_sym } only_cols += only_cols.select { |c| c.to_s.starts_with?("abs_") }.collect { |c| c.to_s.split("_")[2..-2].join("_").to_sym } if only_cols.detect { |c| c.to_s.starts_with?("v_pct_") } diff --git a/app/models/vim_performance_state.rb b/app/models/vim_performance_state.rb index 2fbad3ac430..398a6a45223 100644 --- a/app/models/vim_performance_state.rb +++ b/app/models/vim_performance_state.rb @@ -27,8 +27,8 @@ class VimPerformanceState < ApplicationRecord :allocated_disk_types, :vm_used_disk_storage ].each do |m| - define_method(m) { state_data[m] } - define_method("#{m}=") { |value| state_data[m] = value } + define_method(m) { state_data[m] } + define_method(:"#{m}=") { |value| state_data[m] = value } end # state_data: @@ -177,6 +177,7 @@ def capture_disk_types if hardware self.allocated_disk_types = hardware.disks.each_with_object({}) do |disk, res| next if disk.size.nil? + type = disk.backing.try(:volume_type) || 'unclassified' res[type] = (res[type] || 0) + disk.size end @@ -189,7 +190,7 @@ def capture_totals end def capture_total(field) - return resource.send("aggregate_#{field}") if resource.respond_to?("aggregate_#{field}") + return resource.send(:"aggregate_#{field}") if resource.respond_to?(:"aggregate_#{field}") field == :memory ? hardware.try(:memory_mb) : hardware.try(:aggregate_cpu_speed) end @@ -278,7 +279,7 @@ def capture_image_tag_names def capture_vm_disk_storage if resource.kind_of?(VmOrTemplate) [:used_disk, :allocated_disk].each do |type| - send("vm_#{type}_storage=", resource.send("#{type}_storage")) + send(:"vm_#{type}_storage=", resource.send(:"#{type}_storage")) end end end diff --git a/app/models/vim_performance_state/purging.rb b/app/models/vim_performance_state/purging.rb index 964e8b1e382..d3dee077cb3 100644 --- a/app/models/vim_performance_state/purging.rb +++ b/app/models/vim_performance_state/purging.rb @@ -5,7 +5,7 @@ module Purging module ClassMethods def purge_mode_and_value - %w(orphaned resource) + %w[orphaned resource] end # remove anything where the resource no longer exists AND diff --git a/app/models/vim_performance_tag.rb b/app/models/vim_performance_tag.rb index c8bbdca1603..650cf332244 100644 --- a/app/models/vim_performance_tag.rb +++ b/app/models/vim_performance_tag.rb @@ -15,37 +15,37 @@ def self.find_and_group_by_tags(options) def self.group_by_tags(recs, options) raise _("no category provided") if options[:category].blank? raise _("option :cat_model must have a value") unless options[:cat_model] + cat_assoc = Object.const_get(options[:cat_model].to_s).table_name.to_sym tp = options.fetch_path(:ext_options, :time_profile) - results = recs.inject(:res => [], :tags => [], :tcols => []) do |h, rec| + results = recs.each_with_object(:res => [], :tags => [], :tcols => []) do |rec, h| tvrecs = build_tag_value_recs(rec, options) - if rec.class.name == "VimPerformanceTag" - rec.inside_time_profile = tp ? tp.ts_in_profile?(rec.timestamp) : true - else - rec.inside_time_profile = tp ? tp.ts_day_in_profile?(rec.timestamp) : true - end + rec.inside_time_profile = if rec.instance_of?(::VimPerformanceTag) + tp ? tp.ts_in_profile?(rec.timestamp) : true + else + tp ? tp.ts_day_in_profile?(rec.timestamp) : true + end tvrecs.each do |tv| - if rec.inside_time_profile == false + if rec.inside_time_profile == false tv.value = tv.assoc_ids = nil _log.debug("Timestamp: [#{rec.timestamp}] is outside of time profile") else tv.value ||= 0 end c = [tv.column_name, tv.tag_name].join("_").to_sym - rec.class.class_eval("attr_accessor #{c.inspect}") + rec.class.class_eval("attr_accessor #{c.inspect}", __FILE__, __LINE__) rec.send(c.to_s + "=", tv.column_name == "assoc_ids" ? tv.assoc_ids : tv.value) h[:tags].push(tv.tag_name).uniq! h[:tcols].push(c.to_s).uniq! end h[:res].push(rec) - h end results[:res].each do |rec| # Default nil values in tag cols to 0 for records with timestamp that falls inside the time profile - results[:tcols].each { |c| rec.send("#{c}=", 0) if rec.send(c).nil? } if rec.inside_time_profile == true + results[:tcols].each { |c| rec.send(:"#{c}=", 0) if rec.send(c).nil? } if rec.inside_time_profile == true # Fill in missing assos ids fill_assoc_ids(rec.timestamp, rec, cat_assoc, results[:tags]) @@ -75,7 +75,7 @@ def self.fill_assoc_ids(_ts, result, assoc, tags) tags.each do |t| assoc_ids_meth = ["assoc_ids", t].join("_").to_s if result.send(assoc_ids_meth).nil? - result.send("#{assoc_ids_meth}=", assoc => {:on => []}) + result.send(:"#{assoc_ids_meth}=", assoc => {:on => []}) end end end diff --git a/app/models/vim_performance_tag_value.rb b/app/models/vim_performance_tag_value.rb index cae3d38faac..fced4abd9e5 100644 --- a/app/models/vim_performance_tag_value.rb +++ b/app/models/vim_performance_tag_value.rb @@ -42,7 +42,7 @@ class VimPerformanceTagValue } def initialize(options = {}) - options.each { |k, v| public_send("#{k}=", v) } + options.each { |k, v| public_send(:"#{k}=", v) } end def self.build_from_performance_record(parent_perf, options = {}) @@ -58,6 +58,7 @@ def self.build_for_association(parent_perf, assoc, options = {}) ts = parent_perf.timestamp children = parent_perf.resource.vim_performance_state_association(ts, assoc).to_a return [] if children.empty? + vim_performance_daily = parent_perf.kind_of?(VimPerformanceDaily) recs = get_metrics(children, ts, parent_perf.capture_interval_name, vim_performance_daily, options[:category]) @@ -79,14 +80,15 @@ def self.build_for_association(parent_perf, assoc, options = {}) association_type = perf.resource_type cats_to_process.each do |category| - if !perf.tag_names.nil? && perf.tag_names.include?(category) - tag_names = perf.tag_names.split(TAG_SEP).select { |t| t.starts_with?(category) } - else - tag_names = ["#{category}/_none_"] - end + tag_names = if !perf.tag_names.nil? && perf.tag_names.include?(category) + perf.tag_names.split(TAG_SEP).select { |t| t.starts_with?(category) } + else + ["#{category}/_none_"] + end tag_names.each do |tag| next if tag.starts_with?("power_state") next if tag.starts_with?("folder_path") + tag_cols.each do |c| value = perf.send(c) c = [c.to_s, tag].join(TAG_SEP).to_sym @@ -126,11 +128,11 @@ def self.build_for_association(parent_perf, assoc, options = {}) def self.get_metrics(resources, timestamp, capture_interval_name, vim_performance_daily, category) if vim_performance_daily - MetricRollup.with_interval_and_time_range("hourly", (timestamp)..(timestamp+1.day)).where(:resource => resources) - .for_tag_names([[category, ""]]) # append trailing slash + MetricRollup.with_interval_and_time_range("hourly", timestamp..(timestamp + 1.day)).where(:resource => resources) + .for_tag_names([[category, ""]]) # append trailing slash else Metric::Helper.class_for_interval_name(capture_interval_name).where(:resource => resources) - .with_interval_and_time_range(capture_interval_name, timestamp) + .with_interval_and_time_range(capture_interval_name, timestamp) end end @@ -138,6 +140,7 @@ def self.get_metrics(resources, timestamp, capture_interval_name, vim_performanc def self.tag_cols(name) return TAG_COLS[name.to_sym] if TAG_COLS.key?(name.to_sym) + TAG_COLS[:default] end end # class VimPerformanceTagValue diff --git a/app/models/vim_performance_trend.rb b/app/models/vim_performance_trend.rb index b1bfee04989..022bd439b6e 100644 --- a/app/models/vim_performance_trend.rb +++ b/app/models/vim_performance_trend.rb @@ -38,27 +38,25 @@ def self.build(perfs, options) # :target_pcts => [70, 80, 90, 100], # } - options[:limit_col] ? options[:limit_col] : "limit" + options[:limit_col] || "limit" # group data by resource name - grouped_objs = perfs.inject({}) do |h, o| + grouped_objs = perfs.each_with_object({}) do |o, h| name = o.resource.name if o.resource h[name] ||= [] h[name].push(o) - h end # calculate trend data for each group - trend_data = grouped_objs.inject({}) do |h, group| + trend_data = grouped_objs.each_with_object({}) do |group, h| name, olist = group h[name] = build_trend_data(options[:trend_col], olist) - h end # build table data - table_data = grouped_objs.inject([]) do |arr, group| + grouped_objs.inject([]) do |arr, group| name, olist = group - olist.sort! { |a, b| a.timestamp <=> b.timestamp } + olist.sort_by!(&:timestamp) limit = olist.last.send(options[:limit_col]) if options[:limit_col] limit ||= options[:limit_val].to_f @@ -71,15 +69,15 @@ def self.build(perfs, options) col_name = "limit_pct_value_#{options[:target_pcts].index(pct) + 1}" pct_of_limit = (limit * pct * 0.01) row[col_name] = calc_value_at_target(pct_of_limit, trend_data[name]) - if row[col_name].nil? - row[col_name] = "Unknown" - elsif row[col_name] < Time.now.utc - row[col_name] = "--------------" - elsif row[col_name] > Time.now.utc + 2.years - row[col_name] = "Over 2 Years" - else - row[col_name] = row[col_name].strftime("%m/%d/%Y") - end + row[col_name] = if row[col_name].nil? + "Unknown" + elsif row[col_name] < Time.now.utc + "--------------" + elsif row[col_name] > Time.now.utc + 2.years + "Over 2 Years" + else + row[col_name].strftime("%m/%d/%Y") + end end # Need to exclude records that are outside time profile when calculating range min and max values @@ -101,7 +99,7 @@ def self.build(perfs, options) row[:max_trend_value] = ordered_by_trend_col.last.send(options[:trend_col]) # calculate start/end trend values - ordered_by_timestamp = olist_in_time_profile.sort_by(&:timestamp) + ordered_by_timestamp = olist_in_time_profile.sort_by(&:timestamp) row[:start_trend_value] = ordered_by_timestamp.first.send(options[:trend_col]) row[:end_trend_value] = ordered_by_timestamp.last.send(options[:trend_col]) @@ -131,21 +129,21 @@ def self.build(perfs, options) arr.push(new(row)) end - table_data + end def self.calc_value_at_target(limit, trend_data) if trend_data.nil? || trend_data[:slope].nil? - return nil + nil else begin result = Math.slope_x_intercept(limit, trend_data[:slope], trend_data[:yint]) - return Time.at(result).utc + Time.at(result).utc rescue RangeError - return nil + nil rescue => err _log.warn("#{err.message}, calculating trend limit for limit=#{limit}, trend_data=#{trend_data.inspect}, intermediate=#{result.inspect}") - return nil + nil end end end @@ -155,6 +153,7 @@ def self.build_trend_data(col, recs) coordinates = recs.collect do |r| next unless r.respond_to?(CHART_X_AXIS_COL) && r.respond_to?(col) + [r.send(CHART_X_AXIS_COL).to_i, r.send(col).to_f] end.compact @@ -162,7 +161,7 @@ def self.build_trend_data(col, recs) trend_data[:slope], trend_data[:yint], trend_data[:corr] = begin Math.linear_regression(*coordinates) - rescue StandardError => err + rescue => err _log.warn("#{err.message}, calculating slope") unless err.kind_of?(ZeroDivisionError) nil end @@ -171,21 +170,21 @@ def self.build_trend_data(col, recs) end TREND_COLS = { - :VmPerformance => { + :VmPerformance => { :cpu_usagemhz_rate_average => {}, :cpu_usage_rate_average => {}, :disk_usage_rate_average => {}, :net_usage_rate_average => {}, :derived_memory_used => {:limit_cols => ["derived_memory_available"]} }, - :HostPerformance => { + :HostPerformance => { :cpu_usagemhz_rate_average => {:limit_cols => ["derived_cpu_available", "derived_cpu_reserved"]}, :cpu_usage_rate_average => {}, :disk_usage_rate_average => {}, :net_usage_rate_average => {}, :derived_memory_used => {:limit_cols => ["derived_memory_available", "derived_memory_reserved"]} }, - :EmsClusterPerformance => { + :EmsClusterPerformance => { :cpu_usagemhz_rate_average => {:limit_cols => ["derived_cpu_available", "derived_cpu_reserved"]}, :cpu_usage_rate_average => {}, :disk_usage_rate_average => {}, @@ -193,13 +192,13 @@ def self.build_trend_data(col, recs) :derived_memory_used => {:limit_cols => ["derived_memory_available", "derived_memory_reserved"]} }, :ExtManagementSystemPerformance => { - :cpu_usagemhz_rate_average => {:limit_cols => %w(derived_cpu_available derived_cpu_reserved)}, + :cpu_usagemhz_rate_average => {:limit_cols => %w[derived_cpu_available derived_cpu_reserved]}, :cpu_usage_rate_average => {}, :disk_usage_rate_average => {}, :net_usage_rate_average => {}, - :derived_memory_used => {:limit_cols => %w(derived_memory_available derived_memory_reserved)} + :derived_memory_used => {:limit_cols => %w[derived_memory_available derived_memory_reserved]} }, - :StoragePerformance => { + :StoragePerformance => { :derived_storage_free => {:limit_cols => ["derived_storage_total"]}, :v_derived_storage_used => {:limit_cols => ["derived_storage_total"]} } @@ -218,14 +217,14 @@ def self.trend_model_details(interval) end def self.trend_limit_cols(db, col, interval) - col = col.starts_with?("min_", "max_") ? col[4..-1] : col + col = col[4..-1] if col.starts_with?("min_", "max_") return [] unless TREND_COLS[db.to_sym] return [] unless TREND_COLS[db.to_sym][col.to_sym] return [] unless TREND_COLS[db.to_sym][col.to_sym][:limit_cols] - TREND_COLS[db.to_sym][col.to_sym][:limit_cols].inject([]) do |arr, col| + + TREND_COLS[db.to_sym][col.to_sym][:limit_cols].each_with_object([]) do |col, arr| cols = interval == "daily" ? ["max_#{col}"] : [col] # add in max if daily cols.each { |c| arr.push([Dictionary.gettext([db, c.to_s].join("."), :type => "column"), c]) } - arr end end @@ -242,11 +241,11 @@ def self.report_cols(options) "slope" ] col_order.each do |c| - if c.ends_with?("_trend_value") - col_headers << "#{Dictionary.gettext([options[:trend_db], c].join("."), :type => "column", :notfound => :titleize)} - #{Dictionary.gettext([options[:trend_db], options[:trend_col]].join("."), :type => "column", :notfound => :titleize)}" - else - col_headers << Dictionary.gettext([options[:trend_db], c].join("."), :type => "column", :notfound => :titleize) - end + col_headers << if c.ends_with?("_trend_value") + "#{Dictionary.gettext([options[:trend_db], c].join("."), :type => "column", :notfound => :titleize)} - #{Dictionary.gettext([options[:trend_db], options[:trend_col]].join("."), :type => "column", :notfound => :titleize)}" + else + Dictionary.gettext([options[:trend_db], c].join("."), :type => "column", :notfound => :titleize) + end end if options[:limit_col] diff --git a/app/models/vm.rb b/app/models/vm.rb index f5fee297dc0..9134a40e4d5 100644 --- a/app/models/vm.rb +++ b/app/models/vm.rb @@ -21,10 +21,10 @@ def self.corresponding_model module_parent::Template end end - class << self; alias_method :corresponding_template_model, :corresponding_model; end + class << self; alias corresponding_template_model corresponding_model; end delegate :corresponding_model, :to => :class - alias_method :corresponding_template_model, :corresponding_model + alias corresponding_template_model corresponding_model def validate_remote_console_vmrc_support raise(MiqException::RemoteConsoleNotSupportedError, @@ -87,7 +87,8 @@ def running_processes require 'win32/miq-wmi' cred = my_zone_obj.auth_user_pwd(:windows_domain) ipaddresses.each do |ipaddr| - break unless pl.blank? + break if pl.present? + _log.info("Running processes for VM:[#{id}:#{name}] IP:[#{ipaddr}] Logon:[#{cred[0]}]") begin wmi = WMIHelper.connectServer(ipaddr, *cred) @@ -117,9 +118,9 @@ def remote_console_url=(url, user_id) def supported_consoles { - :html5 => html5_support, - :vmrc => vmrc_support, - :native => native_support + :html5 => html5_support, + :vmrc => vmrc_support, + :native => native_support } end diff --git a/app/models/vm/operations.rb b/app/models/vm/operations.rb index 879297cffc0..a7d5049f06e 100644 --- a/app/models/vm/operations.rb +++ b/app/models/vm/operations.rb @@ -24,22 +24,22 @@ module Vm::Operations end supports :launch_vmrc_console do - begin - validate_remote_console_vmrc_support - rescue => err - _('VM VMRC Console error: %{error}') % {:error => err} - end + + validate_remote_console_vmrc_support + rescue => err + _('VM VMRC Console error: %{error}') % {:error => err} + end supports :launch_native_console do validate_native_console_support - rescue StandardError => err + rescue => err _('VM NATIVE Console error: %{error}') % {:error => err} end supports :collect_running_processes do reason = N_('VM Process collection is only available for Windows VMs.') unless ['windows'].include?(platform) - reason ||= N_('VM Process collection is only available for Runnable VMs.') unless self.runnable? + reason ||= N_('VM Process collection is only available for Runnable VMs.') unless runnable? reason ||= N_('VM Process collection is only available while the VM is powered on.') unless state == "on" reason ||= N_('VM Process collection requires credentials set at the Zone level.') if my_zone.nil? || my_zone_obj.auth_user_pwd(:windows_domain).nil? reason ||= N_('VM Process collection requires an IP address for the VM.') if ipaddresses.blank? @@ -50,6 +50,7 @@ module Vm::Operations def ipv4_address return public_address unless public_address.nil? + ipaddresses.find { |ip| IPAddr.new(ip).ipv4? && !ip.starts_with?('192') } end diff --git a/app/models/vm_cloud_reconfigure_request.rb b/app/models/vm_cloud_reconfigure_request.rb index feaf4260967..6461bedf53c 100644 --- a/app/models/vm_cloud_reconfigure_request.rb +++ b/app/models/vm_cloud_reconfigure_request.rb @@ -1,10 +1,10 @@ class VmCloudReconfigureRequest < MiqRequest TASK_DESCRIPTION = N_('VM Cloud Reconfigure').freeze SOURCE_CLASS_NAME = 'Vm'.freeze - ACTIVE_STATES = %w(reconfigured) + base_class::ACTIVE_STATES + ACTIVE_STATES = %w[reconfigured] + base_class::ACTIVE_STATES - validates :request_state, :inclusion => { :in => %w(pending finished) + ACTIVE_STATES, - :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} + validates :request_state, :inclusion => {:in => %w[pending finished] + ACTIVE_STATES, + :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} validate :must_have_user include MiqProvisionQuotaMixin diff --git a/app/models/vm_migrate_request.rb b/app/models/vm_migrate_request.rb index 19798a45134..3c822023e88 100644 --- a/app/models/vm_migrate_request.rb +++ b/app/models/vm_migrate_request.rb @@ -1,10 +1,10 @@ class VmMigrateRequest < MiqRequest TASK_DESCRIPTION = N_('VM Migrate') SOURCE_CLASS_NAME = 'Vm' - ACTIVE_STATES = %w( migrated ) + base_class::ACTIVE_STATES + ACTIVE_STATES = %w[migrated] + base_class::ACTIVE_STATES - validates_inclusion_of :request_state, :in => %w( pending finished ) + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished" - validate :must_have_user + validates :request_state, :inclusion => {:in => %w[pending finished] + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} + validate :must_have_user include MiqProvisionQuotaMixin def vm diff --git a/app/models/vm_migrate_task.rb b/app/models/vm_migrate_task.rb index d3e8ffc14d5..32b1d179e7d 100644 --- a/app/models/vm_migrate_task.rb +++ b/app/models/vm_migrate_task.rb @@ -26,13 +26,13 @@ def self.get_description(req_obj) new_settings = [] host_name = req_obj.get_option_last(:placement_host_name) - new_settings << "Host: #{host_name}" unless host_name.blank? + new_settings << "Host: #{host_name}" if host_name.present? respool_name = req_obj.get_option_last(:placement_rp_name) - new_settings << "Resource Pool: #{respool_name}" unless respool_name.blank? + new_settings << "Resource Pool: #{respool_name}" if respool_name.present? folder_name = req_obj.get_option_last(:placement_folder_name) new_settings << "Folder: #{folder_name}" if folder_name.present? storage = req_obj.get_option_last(:placement_ds_name) - new_settings << "Storage: #{storage}" unless storage.blank? + new_settings << "Storage: #{storage}" if storage.present? "#{request_class::TASK_DESCRIPTION} for: #{name} - #{new_settings.join(", ")}" end diff --git a/app/models/vm_migrate_workflow/dialog_field_validation.rb b/app/models/vm_migrate_workflow/dialog_field_validation.rb index ff14ffac36a..c40e67f7733 100644 --- a/app/models/vm_migrate_workflow/dialog_field_validation.rb +++ b/app/models/vm_migrate_workflow/dialog_field_validation.rb @@ -1,8 +1,9 @@ module VmMigrateWorkflow::DialogFieldValidation def validate_placement(field, values, dlg, fld, value) # check the :placement_auto flag, then make sure the field is not blank - return nil unless value.blank? - return nil unless get_value(values[field]).blank? + return nil if value.present? + return nil if get_value(values[field]).present? + "#{required_description(dlg, fld)} is required" end end diff --git a/app/models/vm_or_template.rb b/app/models/vm_or_template.rb index 3a3cc2e515d..131743110ca 100644 --- a/app/models/vm_or_template.rb +++ b/app/models/vm_or_template.rb @@ -20,6 +20,7 @@ class VmOrTemplate < ApplicationRecord include Snapshotting attr_accessor :surrogate_host + @surrogate_host = nil include ProviderObjectMixin @@ -55,10 +56,10 @@ class VmOrTemplate < ApplicationRecord "unknown" => "Unknown" } - POWER_OPS = %w(start stop suspend reset shutdown_guest standby_guest reboot_guest) - REMOTE_REGION_TASKS = POWER_OPS + %w(retire_now) + POWER_OPS = %w[start stop suspend reset shutdown_guest standby_guest reboot_guest] + REMOTE_REGION_TASKS = POWER_OPS + %w[retire_now] - validates_presence_of :name, :location + validates :name, :location, :presence => true validates :vendor, :inclusion => {:in => VENDOR_TYPES.keys} has_one :operating_system, :dependent => :destroy @@ -102,7 +103,7 @@ class VmOrTemplate < ApplicationRecord has_many :filesystems, :as => :resource, :dependent => :destroy has_many :directories, -> { where("rsc_type = 'dir'") }, :as => :resource, :class_name => "Filesystem" - has_many :files, -> { where("rsc_type = 'file'") }, :as => :resource, :class_name => "Filesystem" + has_many :files, -> { where("rsc_type = 'file'") }, :as => :resource, :class_name => "Filesystem" has_many :scan_histories, :dependent => :destroy has_many :lifecycle_events, :class_name => "LifecycleEvent" @@ -113,12 +114,11 @@ class VmOrTemplate < ApplicationRecord has_many :metrics, :as => :resource # Destroy will be handled by purger has_many :metric_rollups, :as => :resource # Destroy will be handled by purger - has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger + has_many :vim_performance_states, :as => :resource # Destroy will be handled by purger has_many :storage_files, :dependent => :destroy has_many :storage_files_files, -> { where("rsc_type = 'file'") }, :class_name => "StorageFile" - # EMS Events has_many :ems_events, ->(vmt) { unscope(:where => :vm_or_template_id).where(["vm_or_template_id = ? OR dest_vm_or_template_id = ?", vmt.id, vmt.id]).order(:timestamp) }, :class_name => "EmsEvent", :inverse_of => :vm_or_template @@ -174,7 +174,7 @@ class VmOrTemplate < ApplicationRecord virtual_column :has_rdm_disk, :type => :boolean, :uses => {:hardware => :disks} virtual_column :disks_aligned, :type => :string, :uses => {:hardware => {:hard_disks => :partitions_aligned}} - virtual_has_many :processes, :class_name => "OsProcess", :uses => {:operating_system => :processes} + virtual_has_many :processes, :class_name => "OsProcess", :uses => {:operating_system => :processes} virtual_has_many :event_logs, :uses => {:operating_system => :event_logs} virtual_has_many :lans, :uses => {:hardware => {:nics => :lan}} virtual_has_many :child_resources, :class_name => "VmOrTemplate" @@ -232,10 +232,10 @@ def from_infra_manager? where(arel_table[:template].eq(true).and(arel_table[:ems_id].eq(nil)).or(arel_table[:host_id].eq(nil))) end) - alias_method :datastores, :storages # Used by web-services to return datastores as the property name + alias datastores storages # Used by web-services to return datastores as the property name - alias_method :parent_cluster, :ems_cluster - alias_method :owning_cluster, :ems_cluster + alias parent_cluster ems_cluster + alias owning_cluster ems_cluster # Add virtual columns/methods for specific things derived from advanced_settings REQUIRED_ADVANCED_SETTINGS = { @@ -256,12 +256,12 @@ def from_infra_manager? as = advanced_settings.detect { |setting| setting.name == k } return nil if as.nil? || as.value.nil? - return case t - when :boolean then ActiveRecord::Type::Boolean.new.cast(as.value) - when :integer then as.value.to_i - when :float then as.value.to_f - else as.value.to_s - end + case t + when :boolean then ActiveRecord::Type::Boolean.new.cast(as.value) + when :integer then as.value.to_i + when :float then as.value.to_f + else as.value.to_s + end end virtual_column m, :type => t, :uses => :advanced_settings @@ -279,11 +279,12 @@ def from_infra_manager? ] disk_methods.each do |k, t| - m = "disk_#{i}_#{k}".to_sym + m = "disk_#{i}_#{k}".to_sym define_method(m) do return nil if hardware.nil? return nil if hardware.hard_disks.length < i + hardware.hard_disks[i - 1].send(k) end @@ -335,12 +336,13 @@ def to_s def is_evm_appliance? !!miq_server end - alias_method :is_evm_appliance, :is_evm_appliance? + alias is_evm_appliance is_evm_appliance? # Determines if the VM is on an EMS or Host def registered? # TODO: Vmware specific return false if template? && ems_id.nil? + host_id.present? end @@ -429,27 +431,27 @@ def self.validate_task(task, vm, options) # VM has no host or storage affiliation if vm.storage.nil? - task.error("#{vm.name}: There is no owning Host or Datastore for this VM, "\ + task.error("#{vm.name}: There is no owning Host or Datastore for this VM, " \ "'#{options[:task]}' is not allowed") return false end # VM belongs to a storage/repository location # TODO: The following never gets run since the invoke tasks invokes it as a job, and only tasks get to this point ? - unless %w(scan sync).include?(options[:task]) + unless %w[scan sync].include?(options[:task]) task.error("#{vm.name}: There is no owning Host for this VM, '#{options[:task]}' is not allowed") return false end spid = ::Settings.repository_scanning.defaultsmartproxy - if spid.nil? # No repo scanning SmartProxy configured + if spid.nil? # No repo scanning SmartProxy configured task.error("#{vm.name}: No Default Repository SmartProxy is configured, contact your EVM administrator") return false elsif MiqProxy.exists?(spid) == false task.error("#{vm.name}: The Default Repository SmartProxy no longer exists, contact your EVM Administrator") return false end - if MiqProxy.find(spid).state != "on" # Repo scanning host iagent s not running - task.error("#{vm.name}: The Default Repository SmartProxy, '#{sp.name}', is not running. "\ + if MiqProxy.find(spid).state != "on" # Repo scanning host iagent s not running + task.error("#{vm.name}: The Default Repository SmartProxy, '#{sp.name}', is not running. " \ "'#{options[:task]}' not attempted") return false end @@ -459,18 +461,18 @@ def self.validate_task(task, vm, options) # override def self.task_invoked_by(options) - %w(scan sync).include?(options[:task]) ? :job : super + %w[scan sync].include?(options[:task]) ? :job : super end private_class_method :task_invoked_by # override def self.task_arguments(options) case options[:task] - when "scan", "sync" then + when "scan", "sync" [options[:userid]] - when "remove_snapshot", "revert_to_snapshot" then + when "remove_snapshot", "revert_to_snapshot" [options[:snap_selected]] - when "create_snapshot" then + when "create_snapshot" [options[:name], options[:description], options[:memory]] else super @@ -605,22 +607,22 @@ def service_pack def self.rss_fails_policy(_name, options) order(options[:orderby]).limit(options[:limit_to_count]).each_with_object([]) do |vm, result| rec = OpenStruct.new(vm.attributes) - if vm.host.nil? - rec.host_name = "unknown" - else - rec.host_name = vm.host.name - end + rec.host_name = if vm.host.nil? + "unknown" + else + vm.host.name + end rec.vm_id = vm.id rec.reason = [] presult = vm.enforce_policy("rsop") - if presult[:result] == false - presult[:details].each do |p| - rec.reason.push(p["description"]) unless p["result"] - end - if rec.reason != [] - rec.reason = rec.reason.join(", ") - result.push(rec) - end + next unless presult[:result] == false + + presult[:details].each do |p| + rec.reason.push(p["description"]) unless p["result"] + end + if rec.reason != [] + rec.reason = rec.reason.join(", ") + result.push(rec) end end end @@ -636,7 +638,7 @@ def vendor_display # TODO: Vmware specific URI methods? Next 3 methods def self.location2uri(location, scheme = "file") pat = %r{^(file|http|miq)://([^/]*)/(.+)$} - unless pat =~ location + unless pat&.match?(location) # location = scheme<<"://"< path} unless path =~ %r{^//[^/].*/.+$} + raise _("path, '%{path}', is malformed") % {:path => path} unless %r{^//[^/].*/.+$}.match?(path) + # path is a UNC storage_name = path.split("/")[0..3].join("/") path.split("/")[4..path.length].join("/") if path.length > 4 - #VMFS + # VMFS elsif path.starts_with?("[") - raise _("path, '%{path}', is malformed") % {:path => path} unless path =~ /^\[[^\]].+\].*$/ + raise _("path, '%{path}', is malformed") % {:path => path} unless /^\[[^\]].+\].*$/.match?(path) + # path is a VMWare storage name /^\[(.*)\](.*)$/ =~ path storage_name = $1 temp_path = $2.strip # Some esx servers add a leading "/". # This needs to be stripped off to allow matching on location - temp_path.sub(/^\//,'') + temp_path.sub(/^\//, '') # local else raise _("path, '%{path}', is malformed") % {:path => path} @@ -789,14 +793,14 @@ def parent_resource_pool parent(:of_type => "ResourcePool") end end - alias_method :owning_resource_pool, :parent_resource_pool + alias owning_resource_pool parent_resource_pool def parent_blue_folder with_relationship_type('ems_metadata') do parent(:of_type => "EmsFolder") end end - alias_method :owning_blue_folder, :parent_blue_folder + alias owning_blue_folder parent_blue_folder def parent_blue_folders(*args) f = parent_blue_folder @@ -805,6 +809,7 @@ def parent_blue_folders(*args) def under_blue_folder?(folder) return false unless folder.kind_of?(EmsFolder) + parent_blue_folders.any? { |f| f == folder } end @@ -812,36 +817,36 @@ def parent_blue_folder_path(*args) f = parent_blue_folder f.nil? ? "" : f.folder_path(*args) end - alias_method :owning_blue_folder_path, :parent_blue_folder_path + alias owning_blue_folder_path parent_blue_folder_path def parent_folder ems_cluster.try(:parent_folder) end - alias_method :owning_folder, :parent_folder - alias_method :parent_yellow_folder, :parent_folder + alias owning_folder parent_folder + alias parent_yellow_folder parent_folder def parent_folders(*args) f = parent_folder f.nil? ? [] : f.folder_path_objs(*args) end - alias_method :parent_yellow_folders, :parent_folders + alias parent_yellow_folders parent_folders def parent_folder_path(*args) f = parent_folder f.nil? ? "" : f.folder_path(*args) end - alias_method :owning_folder_path, :parent_folder_path - alias_method :parent_yellow_folder_path, :parent_folder_path + alias owning_folder_path parent_folder_path + alias parent_yellow_folder_path parent_folder_path def parent_datacenter ems_cluster.try(:parent_datacenter) end - alias_method :owning_datacenter, :parent_datacenter + alias owning_datacenter parent_datacenter def parent_blue_folder_display_path parent_blue_folder_path(:exclude_non_display_folders => true) end - alias_method :v_parent_blue_folder_display_path, :parent_blue_folder_display_path + alias v_parent_blue_folder_display_path parent_blue_folder_display_path def lans !hardware.nil? ? hardware.nics.collect(&:lan).compact : [] @@ -851,16 +856,16 @@ def lans def ems_host_list params = {} [ext_management_system, "ems", host, "host"].each_slice(2) do |ems, type| - if ems - params[type] = { - :hostname => ems.hostname, - :ipaddress => ems.ipaddress, - :username => ems.authentication_userid, - :password => ems.authentication_password_encrypted, - :class_name => ems.class.name - } - params[type][:port] = ems.port if ems.respond_to?(:port) && !ems.port.blank? - end + next unless ems + + params[type] = { + :hostname => ems.hostname, + :ipaddress => ems.ipaddress, + :username => ems.authentication_userid, + :password => ems.authentication_password_encrypted, + :class_name => ems.class.name + } + params[type][:port] = ems.port if ems.respond_to?(:port) && ems.port.present? end params end @@ -969,14 +974,15 @@ def log_proxies_vm_config def log_proxies_format_instance(object) return 'Nil' if object.nil? + "#{object.class.name}:#{object.id}-#{object.name}:#{object.try(:state)}" end def storage2proxies - @storage_proxies ||= begin + # Support vixDisk scanning of VMware VMs from the vmdb server - miq_server_proxies - end + @storage_proxies ||= miq_server_proxies + end def storage2active_proxies(all_proxy_list = nil) @@ -987,8 +993,8 @@ def storage2active_proxies(all_proxy_list = nil) # MiqServer coresident proxy needs to contact the host and provide credentials. # Remove any MiqServer instances if we do not have credentials - rsc = self.scan_via_ems? ? ext_management_system : host - proxies.delete_if { |p| MiqServer === p } if rsc && !rsc.authentication_status_ok? + rsc = scan_via_ems? ? ext_management_system : host + proxies.delete_if { |p| p.kind_of?(MiqServer) } if rsc && !rsc.authentication_status_ok? _log.debug("proxies2.length = #{proxies.length}") proxies @@ -1011,7 +1017,7 @@ def miq_server_proxies case vendor when 'vmware' # VM cannot be scanned by server if they are on a repository - return [] if storage_id.blank? || self.repository_vm? + return [] if storage_id.blank? || repository_vm? when 'microsoft' return [] if storage_id.blank? else @@ -1033,7 +1039,7 @@ def miq_server_proxies miq_servers = srs.select do |svr| (svr.vm_scan_host_affinity? ? host_server_ids.detect { |id| id == svr.id } : host_server_ids.empty?) && - (svr.vm_scan_storage_affinity? ? all_storage_server_ids.detect { |id| id == svr.id } : storage_server_ids.empty?) + (svr.vm_scan_storage_affinity? ? all_storage_server_ids.detect { |id| id == svr.id } : storage_server_ids.empty?) end _log.debug("miq_servers1.length = #{miq_servers.length}") @@ -1058,10 +1064,15 @@ def repository_vm? # TODO: Vmware specfic def template=(val) return val unless val ^ template # Only continue if toggling setting + write_attribute(:template, val) - self.type = corresponding_model.name if (self.template? && self.kind_of?(Vm)) || (!self.template? && self.kind_of?(MiqTemplate)) - d = self.template? ? [/\.vmx$/, ".vmtx", 'never'] : [/\.vmtx$/, ".vmx", state == 'never' ? 'unknown' : raw_power_state] + self.type = corresponding_model.name if (template? && kind_of?(Vm)) || (!template? && kind_of?(MiqTemplate)) + d = if template? + [/\.vmx$/, ".vmtx", 'never'] + else + [/\.vmtx$/, ".vmx", state == 'never' ? 'unknown' : raw_power_state] + end self.location = location.sub(d[0], d[1]) unless location.nil? self.raw_power_state = d[2] end @@ -1163,13 +1174,14 @@ def rhevm_config_path # /rhev/data-center//mastersd/master/vms//.ovf/ datacenter = parent_datacenter return location if datacenter.blank? + File.join('/rhev/data-center', datacenter.uid_ems, 'mastersd/master/vms', uid_ems, location) end def state (power_state || "unknown").downcase end - alias_method :current_state, :state + alias current_state state # Override raw_power_state= attribute setter in order to impose side effects # of setting previous_state and updating state_changed_on @@ -1180,19 +1192,18 @@ def raw_power_state=(new_state) self.previous_state = raw_power_state self.state_changed_on = Time.now.utc super - self.power_state = calculate_power_state + self.power_state = calculate_power_state end - new_state end def self.calculate_power_state(raw_power_state) - (raw_power_state == "never") ? "never" : "unknown" + raw_power_state == "never" ? "never" : "unknown" end def archived? ems_id.nil? && storage_id.nil? end - alias_method :archived, :archived? + alias archived archived? virtual_attribute :archived, :boolean, :arel => (lambda do |t| t.grouping(t[:ems_id].eq(nil).and(t[:storage_id].eq(nil))) end) @@ -1200,7 +1211,7 @@ def archived? def orphaned? ems_id.nil? && !storage_id.nil? end - alias_method :orphaned, :orphaned? + alias orphaned orphaned? virtual_attribute :orphaned, :boolean, :arel => (lambda do |t| t.grouping(t[:ems_id].eq(nil).and(t[:storage_id].not_eq(nil))) end) @@ -1208,7 +1219,7 @@ def orphaned? def active? !archived? && !orphaned? && !retired? && !template? end - alias_method :active, :active? + alias active active? # in sql nil != false ==> false virtual_attribute :active, :boolean, :arel => (lambda do |t| t.grouping(t[:ems_id].not_eq(nil) @@ -1224,28 +1235,29 @@ def disconnected? virtual_attribute :disconnected, :boolean, :arel => (lambda do |t| t.grouping(t[:connection_state].not_eq(nil).and(t[:connection_state].not_eq("connected"))) end) - alias_method :disconnected, :disconnected? + alias disconnected disconnected? def normalized_state return self["normalized_state"] if has_attribute?("normalized_state") - %w(archived orphaned template retired disconnected).each do |s| - return s if send("#{s}?") + %w[archived orphaned template retired disconnected].each do |s| + return s if send(:"#{s}?") end return power_state.downcase unless power_state.nil? + "unknown" end virtual_attribute :normalized_state, :string, :arel => (lambda do |t| t.grouping( Arel::Nodes::Case.new - .when(arel_table[:archived]).then(Arel.sql("\'archived\'")) - .when(arel_table[:orphaned]).then(Arel.sql("\'orphaned\'")) - .when(t[:template].eq(t.create_true)).then(Arel.sql("\'template\'")) - .when(t[:retired].eq(t.create_true)).then(Arel.sql("\'retired\'")) - .when(arel_table[:disconnected]).then(Arel.sql("\'disconnected\'")) + .when(arel_table[:archived]).then(Arel.sql("'archived'")) + .when(arel_table[:orphaned]).then(Arel.sql("'orphaned'")) + .when(t[:template].eq(t.create_true)).then(Arel.sql("'template'")) + .when(t[:retired].eq(t.create_true)).then(Arel.sql("'retired'")) + .when(arel_table[:disconnected]).then(Arel.sql("'disconnected'")) .else(t.lower( - t.coalesce([t[:power_state], Arel.sql("\'unknown\'")]) - )) + t.coalesce([t[:power_state], Arel.sql("'unknown'")]) + )) ) end) @@ -1261,7 +1273,7 @@ def classify_with_parent_folder_path_queue(add = true) def classify_with_parent_folder_path(add = true) [:blue, :yellow].each do |folder_type| - path = send("parent_#{folder_type}_folder_path") + path = send(:"parent_#{folder_type}_folder_path") next if path.blank? cat = self.class.folder_category(folder_type) @@ -1300,9 +1312,9 @@ def self.folder_entry(ent_desc, cat) def event_where_clause(assoc = :ems_events) case assoc.to_sym when :ems_events, :event_streams - return ["vm_or_template_id = ? OR dest_vm_or_template_id = ? ", id, id] + ["vm_or_template_id = ? OR dest_vm_or_template_id = ? ", id, id] when :policy_events - return ["target_id = ? and target_class = ? ", id, self.class.base_class.name] + ["target_id = ? and target_class = ? ", id, self.class.base_class.name] end end @@ -1322,14 +1334,14 @@ def v_owning_folder o ? o.name : "" end - alias_method :v_owning_folder_path, :owning_folder_path + alias v_owning_folder_path owning_folder_path def v_owning_blue_folder o = owning_blue_folder o ? o.name : "" end - alias_method :v_owning_blue_folder_path, :owning_blue_folder_path + alias v_owning_blue_folder_path owning_blue_folder_path def v_owning_datacenter o = owning_datacenter @@ -1337,7 +1349,7 @@ def v_owning_datacenter end def v_is_a_template - self.template?.to_s.capitalize + template?.to_s.capitalize end # technically it is capitalized, but for sorting, not a concern # but we do need nil to become false @@ -1354,6 +1366,7 @@ def event_threshold?(options = {:time_threshold => 30.minutes, :event_types => [ raise _("option :event_types is required") unless options[:event_types] raise _("option :time_threshold is required") unless options[:time_threshold] raise _("option :freq_threshold is required") unless options[:freq_threshold] + EmsEvent .where(:event_type => options[:event_types]) .where("vm_or_template_id = :id OR dest_vm_or_template_id = :id", :id => id) @@ -1366,7 +1379,11 @@ def reconfigured_hardware_value?(options) raise _(":hdw_attr required") if attr.nil? operator = options[:operator] || ">" - operator = operator.downcase == "increased" ? ">" : operator.downcase == "decreased" ? "<" : operator + operator = if operator.downcase == "increased" + ">" + else + operator.downcase == "decreased" ? "<" : operator + end current_state, prev_state = drift_states.order("timestamp DESC").limit(2) if current_state.nil? || prev_state.nil? @@ -1454,11 +1471,13 @@ def disks_aligned return "Unknown" if dlist.empty? return "True" if dlist.all? { |d| d.partitions_aligned == "True" } return "False" if dlist.any? { |d| d.partitions_aligned == "False" } + "Unknown" end def memory_exceeds_current_host_headroom return false if host.nil? + (ram_size > host.current_memory_headroom) end @@ -1501,6 +1520,7 @@ def has_required_host? def has_active_ems? return true unless ext_management_system.nil? + false end @@ -1518,11 +1538,11 @@ def perf_rollup_parents(interval_name = nil) def self.vms_by_ipaddress(ipaddress) ipaddresses = ipaddress.split(',') Network.where("ipaddress in (?)", ipaddresses).each do |network| - begin - vm = network.hardware.vm - yield(vm) - rescue - end + + vm = network.hardware.vm + yield(vm) + rescue + end end @@ -1550,7 +1570,7 @@ def self.miq_expression_includes_any_ipaddresses_arel(ipaddress) hardwares = Hardware.arel_table match_grouping = networks[:ipaddress].matches("%#{ipaddress}%") - .or(networks[:ipv6address].matches("%#{ipaddress}%")) + .or(networks[:ipv6address].matches("%#{ipaddress}%")) query = hardwares.project(1) .join(networks).on(networks[:hardware_id].eq(hardwares[:id])) @@ -1622,10 +1642,10 @@ def console_supported?(_type) def non_generic_charts_available? false end - alias_method :cpu_ready_available?, :non_generic_charts_available? - alias_method :cpu_mhz_available?, :non_generic_charts_available? - alias_method :cpu_percent_available?, :non_generic_charts_available? - alias_method :memory_mb_available?, :non_generic_charts_available? + alias cpu_ready_available? non_generic_charts_available? + alias cpu_mhz_available? non_generic_charts_available? + alias cpu_percent_available? non_generic_charts_available? + alias memory_mb_available? non_generic_charts_available? def self.includes_template?(ids) MiqTemplate.where(:id => ids).exists? @@ -1641,10 +1661,11 @@ def reconfigurable? def self.reconfigurable?(ids) vms = VmOrTemplate.where(:id => ids) return false if vms.blank? + vms.all?(&:reconfigurable?) end - PUBLIC_TEMPLATE_CLASSES = %w(ManageIQ::Providers::Openstack::CloudManager::Template).freeze + PUBLIC_TEMPLATE_CLASSES = %w[ManageIQ::Providers::Openstack::CloudManager::Template].freeze def self.tenant_id_clause(user_or_group) template_tenant_ids = MiqTemplate.accessible_tenant_ids(user_or_group, Rbac.accessible_tenant_ids_strategy(MiqTemplate)) @@ -1700,7 +1721,7 @@ def calculate_power_state end # deprecated, use unsupported_reason(:action) instead - def check_feature_support(message_prefix) + def check_feature_support(_message_prefix) reason = unsupported_reason(:action) [!reason, reason] end diff --git a/app/models/vm_or_template/operations.rb b/app/models/vm_or_template/operations.rb index 05369c4c149..ee987e98a17 100644 --- a/app/models/vm_or_template/operations.rb +++ b/app/models/vm_or_template/operations.rb @@ -6,7 +6,7 @@ module VmOrTemplate::Operations include Relocation include Snapshot - alias_method :ruby_clone, :clone + alias ruby_clone clone def raw_clone(_name, _folder, _pool = nil, _host = nil, _datastore = nil, _powerOn = false, _template_flag = false, _transform = nil, _config = nil, _customization = nil, _disk = nil) raise NotImplementedError, _("must be implemented in a subclass") @@ -118,8 +118,6 @@ def log_user_event(user_event) $log.info(user_event) end - private - # # UI button validation methods # diff --git a/app/models/vm_or_template/operations/relocation.rb b/app/models/vm_or_template/operations/relocation.rb index f8aea1dce39..2786b399505 100644 --- a/app/models/vm_or_template/operations/relocation.rb +++ b/app/models/vm_or_template/operations/relocation.rb @@ -66,6 +66,7 @@ def raw_move_into_folder(_folder) def move_into_folder(folder_or_id) raise _("VM has no EMS, unable to move VM into a new folder") unless ext_management_system + folder = folder_or_id.kind_of?(Integer) ? EmsFolder.find(folder_or_id) : folder_or_id if parent_blue_folder == folder @@ -94,6 +95,7 @@ def move_into_folder_queue(userid, folder) def migrate_via_ids(host_id, pool_id = nil, priority = "defaultPriority", state = nil) host = Host.find_by(:id => host_id) raise _("Host with ID=%{host_id} was not found") % {:host_id => host_id} if host.nil? + pool = pool_id && ResourcePool.find_by(:id => pool_id) migrate(host, pool, priority, state) end diff --git a/app/models/vm_or_template/right_sizing.rb b/app/models/vm_or_template/right_sizing.rb index 3fd1f7a5369..aec4477ca6d 100644 --- a/app/models/vm_or_template/right_sizing.rb +++ b/app/models/vm_or_template/right_sizing.rb @@ -52,10 +52,10 @@ def metric_rollup_vattr_arel(col) where_clause = metric_rollup_table[:time_profile_id].in(tp_ids) - .and(metric_rollup_table[:capture_interval_name].eq("daily")) - .and(metric_rollup_table[:timestamp].between(timestamp)) - .and(metric_rollup_table[:resource_type].eq("VmOrTemplate")) - .and(metric_rollup_table[:resource_id].eq(t[:id])) + .and(metric_rollup_table[:capture_interval_name].eq("daily")) + .and(metric_rollup_table[:timestamp].between(timestamp)) + .and(metric_rollup_table[:resource_type].eq("VmOrTemplate")) + .and(metric_rollup_table[:resource_id].eq(t[:id])) t.grouping( metric_rollup_table.project(select_clause) @@ -115,38 +115,38 @@ def mem_recommendation_minimum MEMORY_RECOMMENDATION_ROUND_TO_NEAREST = 4 RIGHT_SIZING_MODES.each do |mode, meth| - define_method("#{mode}_recommended_vcpus") do - base_recommended(send(meth[:cpu]), cpu_total_cores, self.class.cpu_recommendation_minimum) unless cpu_total_cores.nil? + define_method(:"#{mode}_recommended_vcpus") do + base_recommended(send(meth[:cpu]), cpu_total_cores, self.class.cpu_recommendation_minimum) unless cpu_total_cores.nil? end - define_method("#{mode}_recommended_mem") do + define_method(:"#{mode}_recommended_mem") do base_recommended(send(meth[:mem]), ram_size, self.class.mem_recommendation_minimum, MEMORY_RECOMMENDATION_ROUND_TO_NEAREST) unless ram_size.nil? end - define_method("#{mode}_vcpus_recommended_change_pct") do - base_change_percentage(send("#{mode}_recommended_vcpus"), cpu_total_cores) unless cpu_total_cores.nil? + define_method(:"#{mode}_vcpus_recommended_change_pct") do + base_change_percentage(send(:"#{mode}_recommended_vcpus"), cpu_total_cores) unless cpu_total_cores.nil? end - define_method("#{mode}_mem_recommended_change_pct") do - base_change_percentage(send("#{mode}_recommended_mem"), ram_size) unless ram_size.nil? + define_method(:"#{mode}_mem_recommended_change_pct") do + base_change_percentage(send(:"#{mode}_recommended_mem"), ram_size) unless ram_size.nil? end - define_method("#{mode}_vcpus_recommended_change") do - base_change(send("#{mode}_recommended_vcpus"), cpu_total_cores) unless cpu_total_cores.nil? + define_method(:"#{mode}_vcpus_recommended_change") do + base_change(send(:"#{mode}_recommended_vcpus"), cpu_total_cores) unless cpu_total_cores.nil? end - define_method("#{mode}_mem_recommended_change") do - base_change(send("#{mode}_recommended_mem"), ram_size) unless ram_size.nil? + define_method(:"#{mode}_mem_recommended_change") do + base_change(send(:"#{mode}_recommended_mem"), ram_size) unless ram_size.nil? end end ##################################################### # BACKWARD COMPATIBILITY for REPORTS THAT USE THESE ##################################################### - alias_method :recommended_vcpus, :aggressive_recommended_vcpus - alias_method :recommended_mem, :aggressive_recommended_mem - alias_method :overallocated_vcpus_pct, :aggressive_vcpus_recommended_change_pct - alias_method :overallocated_mem_pct, :aggressive_mem_recommended_change_pct + alias recommended_vcpus aggressive_recommended_vcpus + alias recommended_mem aggressive_recommended_mem + alias overallocated_vcpus_pct aggressive_vcpus_recommended_change_pct + alias overallocated_mem_pct aggressive_mem_recommended_change_pct def max_cpu_usage_rate_average_max_over_time_period end_date = Time.now.utc.beginning_of_day - 1 @@ -154,17 +154,18 @@ def max_cpu_usage_rate_average_max_over_time_period perfs.collect do |p| # Ignore any CPU bursts to 100% 15 minutes after VM booted next if (p.abs_max_cpu_usage_rate_average_value == 100.0) && boot_time && (p.abs_max_cpu_usage_rate_average_timestamp <= (boot_time + 15.minutes)) + p.abs_max_cpu_usage_rate_average_value end.compact.max end - alias_method :cpu_usage_rate_average_max_over_time_period, :max_cpu_usage_rate_average_max_over_time_period + alias cpu_usage_rate_average_max_over_time_period max_cpu_usage_rate_average_max_over_time_period def max_mem_usage_absolute_average_max_over_time_period end_date = Time.now.utc.beginning_of_day - 1 perfs = VimPerformanceAnalysis.find_perf_for_time_period(self, "daily", :end_date => end_date, :days => Metric::LongTermAverages::AVG_DAYS) perfs.collect(&:abs_max_mem_usage_absolute_average_value).compact.max end - alias_method :mem_usage_absolute_average_max_over_time_period, :max_mem_usage_absolute_average_max_over_time_period + alias mem_usage_absolute_average_max_over_time_period max_mem_usage_absolute_average_max_over_time_period def cpu_usagemhz_rate_average_max_over_time_period end_date = Time.now.utc.beginning_of_day - 1 @@ -191,6 +192,7 @@ def base_recommended(max, actual, min = nil, round_to_nearest = nil) def base_change(recommended, actual) return if actual.nil? || recommended.nil? + actual - recommended end diff --git a/app/models/vm_or_template/scanning.rb b/app/models/vm_or_template/scanning.rb index fbd54cd8826..6f40edf8c7c 100644 --- a/app/models/vm_or_template/scanning.rb +++ b/app/models/vm_or_template/scanning.rb @@ -5,9 +5,9 @@ module VmOrTemplate::Scanning def scan(userid = "system", options = {}) # Check if there are any current scan jobs already waiting to run j = VmScan.where(:state => 'waiting_to_start') - .where(:sync_key => guid) - .pluck(:id) - unless j.blank? + .where(:sync_key => guid) + .pluck(:id) + if j.present? _log.info("VM scan job will not be added due to existing scan job waiting to be processed. VM ID:[#{id}] Name:[#{name}] Guid:[#{guid}] Existing Job IDs [#{j.join(", ")}]") return nil end @@ -33,8 +33,8 @@ def raw_scan(userid = "system", options = {}) self.last_scan_attempt_on = Time.now.utc save - job = scan_job_class.create_job(options) - return job + scan_job_class.create_job(options) + rescue => err _log.log_backtrace(err) raise diff --git a/app/models/vm_or_template/snapshotting.rb b/app/models/vm_or_template/snapshotting.rb index 5d6feb81cd5..c05900643f9 100644 --- a/app/models/vm_or_template/snapshotting.rb +++ b/app/models/vm_or_template/snapshotting.rb @@ -26,48 +26,56 @@ def oldest_snapshot def v_snapshot_oldest_name oldest = oldest_snapshot return nil if oldest.nil? + oldest.name end def v_snapshot_oldest_description oldest = oldest_snapshot return nil if oldest.nil? + oldest.description end def v_snapshot_oldest_total_size oldest = oldest_snapshot return nil if oldest.nil? + oldest.total_size end def v_snapshot_oldest_timestamp oldest = oldest_snapshot return nil if oldest.nil? + oldest.create_time end def v_snapshot_newest_name newest = newest_snapshot return nil if newest.nil? + newest.name end def v_snapshot_newest_description newest = newest_snapshot return nil if newest.nil? + newest.description end def v_snapshot_newest_total_size newest = newest_snapshot return nil if newest.nil? + newest.total_size end def v_snapshot_newest_timestamp newest = newest_snapshot return nil if newest.nil? + newest.create_time end diff --git a/app/models/vm_reconfigure_request.rb b/app/models/vm_reconfigure_request.rb index 5fd62340e95..300d4bfa5fb 100644 --- a/app/models/vm_reconfigure_request.rb +++ b/app/models/vm_reconfigure_request.rb @@ -1,10 +1,10 @@ class VmReconfigureRequest < MiqRequest TASK_DESCRIPTION = N_('VM Reconfigure') SOURCE_CLASS_NAME = 'Vm' - ACTIVE_STATES = %w( reconfigured ) + base_class::ACTIVE_STATES + ACTIVE_STATES = %w[reconfigured] + base_class::ACTIVE_STATES - validates_inclusion_of :request_state, :in => %w( pending finished ) + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished" - validate :must_have_user + validates :request_state, :inclusion => {:in => %w[pending finished] + ACTIVE_STATES, :message => "should be pending, #{ACTIVE_STATES.join(", ")} or finished"} + validate :must_have_user include MiqProvisionQuotaMixin def self.request_limits(options) @@ -31,7 +31,7 @@ def self.request_limits(options) end result[:max__number_of_sockets] = all_vcpus.min - result[:max__vm_memory] = all_memory.min + result[:max__vm_memory] = all_memory.min result[:max__cores_per_socket] = all_cores_per_socket.min result[:max__total_vcpus] = all_total_vcpus.min @@ -47,7 +47,7 @@ def self.validate_request(options) # Check if memory value is divisible by 4 and within the allowed limits mem = options[:vm_memory] - unless mem.blank? + if mem.present? mem = mem.to_i errors << "Memory value must be less than #{limits[:max__vm_memory]} MB. Current value: #{mem} MB" if mem > limits[:max__vm_memory] errors << "Memory value must be greater than #{limits[:min__vm_memory]} MB. Current value: #{mem} MB" if mem < limits[:min__vm_memory] @@ -56,7 +56,7 @@ def self.validate_request(options) # Check if cpu value is within the allowed limits cpus = options[:number_of_sockets] - unless cpus.blank? + if cpus.present? cpus = cpus.to_i errors << "Processor value must be less than #{limits[:max__number_of_sockets]}. Current value: #{cpus}" if cpus > limits[:max__number_of_sockets] errors << "Processor value must be greater than #{limits[:min__number_of_sockets]}. Current value: #{cpus}" if cpus < limits[:min__number_of_sockets] @@ -64,7 +64,7 @@ def self.validate_request(options) # Check if cpu value is within the allowed limits cores = options[:cores_per_socket] - unless cores.blank? + if cores.present? cores = cores.to_i errors << "The Cores per Socket value must be less than #{limits[:max__cores_per_socket]}. Current value: #{cores}" if cores > limits[:max__cores_per_socket] errors << "The Cores per Socket value must be greater than #{limits[:min__cores_per_socket]}. Current value: #{cores}" if cores < limits[:min__cores_per_socket] @@ -73,10 +73,11 @@ def self.validate_request(options) # Check if the total number of cpu value is within the allowed limits unless cpus.blank? || cores.blank? total_vcpus = (cores * cpus) - errors << "The total number of cpus must be less than #{limits[:max__total_vcpus]}. Current value: #{total_vcpus}" if total_vcpus > limits[:max__total_vcpus] + errors << "The total number of cpus must be less than #{limits[:max__total_vcpus]}. Current value: #{total_vcpus}" if total_vcpus > limits[:max__total_vcpus] end return false if errors.blank? + errors end diff --git a/app/models/vm_retire_request.rb b/app/models/vm_retire_request.rb index ba31b1116d7..f0f8518706e 100644 --- a/app/models/vm_retire_request.rb +++ b/app/models/vm_retire_request.rb @@ -1,7 +1,7 @@ class VmRetireRequest < MiqRetireRequest TASK_DESCRIPTION = N_('VM Retire').freeze SOURCE_CLASS_NAME = 'Vm'.freeze - ACTIVE_STATES = %w(retired) + base_class::ACTIVE_STATES + ACTIVE_STATES = %w[retired] + base_class::ACTIVE_STATES default_value_for(:source_id) { |r| r.get_option(:src_ids) } attribute :source_type, :default => SOURCE_CLASS_NAME diff --git a/app/models/vm_scan.rb b/app/models/vm_scan.rb index f5dad89bfe0..ec47de3806e 100644 --- a/app/models/vm_scan.rb +++ b/app/models/vm_scan.rb @@ -15,22 +15,22 @@ def self.current_job_timeout(timeout_adjustment = 1) def load_transitions self.state ||= 'initialize' { - :initializing => {'initialize' => 'waiting_to_start'}, - :start => {'waiting_to_start' => 'checking_policy'}, - :before_scan => {'checking_policy' => 'before_scan'}, - :start_scan => {'before_scan' => 'scanning'}, - :after_scan => {'scanning' => 'after_scan'}, - :synchronize => {'after_scan' => 'synchronizing'}, - :finish => {'synchronizing' => 'finished', - 'aborting' => 'finished'}, - :data => {'scanning' => 'scanning', - 'synchronizing' => 'synchronizing', - 'finished' => 'finished'}, - :scan_retry => {'scanning' => 'scanning'}, - :abort_retry => {'scanning' => 'scanning'}, - :abort_job => {'*' => 'aborting'}, - :cancel => {'*' => 'canceling'}, - :error => {'*' => '*'}, + :initializing => {'initialize' => 'waiting_to_start'}, + :start => {'waiting_to_start' => 'checking_policy'}, + :before_scan => {'checking_policy' => 'before_scan'}, + :start_scan => {'before_scan' => 'scanning'}, + :after_scan => {'scanning' => 'after_scan'}, + :synchronize => {'after_scan' => 'synchronizing'}, + :finish => {'synchronizing' => 'finished', + 'aborting' => 'finished'}, + :data => {'scanning' => 'scanning', + 'synchronizing' => 'synchronizing', + 'finished' => 'finished'}, + :scan_retry => {'scanning' => 'scanning'}, + :abort_retry => {'scanning' => 'scanning'}, + :abort_job => {'*' => 'aborting'}, + :cancel => {'*' => 'canceling'}, + :error => {'*' => '*'}, } end @@ -72,7 +72,7 @@ def check_policy_complete(from_zone, status, message, result) if prof_policies scan_profiles = [] prof_policies.each { |p| scan_profiles += p[:result] unless p[:result].nil? } - options[:scan_profiles] = scan_profiles unless scan_profiles.blank? + options[:scan_profiles] = scan_profiles if scan_profiles.present? save end end @@ -129,7 +129,7 @@ def config_ems_list end def create_scan_args - scan_args = { 'ems' => config_ems_list } + scan_args = {'ems' => config_ems_list} # Check if Policy returned scan profiles to use, otherwise use the default profile if available. scan_args["vmScanProfiles"] = options[:scan_profiles] || vm.scan_profile_list @@ -149,8 +149,7 @@ def call_synchronize options[:categories] = vm.scan_profile_categories(scan_args["vmScanProfiles"]) vm.sync_metadata(options[:categories], "taskid" => jobid, - "host" => host - ) + "host" => host) rescue Timeout::Error message = "timed out attempting to synchronize, aborting" _log.error(message) @@ -222,6 +221,7 @@ def process_data(*args) begin raise _("Unable to find Vm") if vm.nil? + inputs = {:vm => vm, :host => vm.host} MiqEvent.raise_evm_job_event(vm, {:type => "scan", :suffix => "complete"}, inputs) rescue => err @@ -283,6 +283,7 @@ def call_abort_retry(*args) if message.to_s.include?("Could not find VM: [") && options[:scan_count].to_i.zero? # We may need to skip calling the retry if this method is called twice. return if skip_retry == true + options[:scan_count] = options[:scan_count].to_i + 1 EmsRefresh.refresh(vm) vm.reload @@ -327,22 +328,21 @@ def abort_retry(*args) end # All other signals - alias_method :initializing, :dispatch_start - alias_method :start, :call_check_policy - alias_method :synchronize, :call_synchronize - alias_method :abort_job, :process_abort - alias_method :cancel, :process_cancel - alias_method :finish, :process_finished - alias_method :error, :process_error + alias initializing dispatch_start + alias start call_check_policy + alias synchronize call_synchronize + alias abort_job process_abort + alias cancel process_cancel + alias finish process_finished + alias error process_error private def log_user_event(user_event) - begin - vm.log_user_event(user_event) - rescue => err - _log.warn("Failed to log user event with EMS. Error: [#{err.class.name}]: #{err} Event message [#{user_event}]") - end - end + vm.log_user_event(user_event) + rescue => err + _log.warn("Failed to log user event with EMS. Error: [#{err.class.name}]: #{err} Event message [#{user_event}]") + + end end diff --git a/app/models/vmdb_database_lock.rb b/app/models/vmdb_database_lock.rb index 2759dd8e59f..1796874e9e0 100644 --- a/app/models/vmdb_database_lock.rb +++ b/app/models/vmdb_database_lock.rb @@ -4,8 +4,9 @@ class VmdbDatabaseLock < ApplicationRecord def blocking_lock return unless granted == false + blocking_lock_relation.where(:granted => true) - .find_by(['pid != ?', pid]) + .find_by(['pid != ?', pid]) end def self.display_name(number = 1) diff --git a/app/models/volume.rb b/app/models/volume.rb index c49756fdffc..f083fc36f30 100644 --- a/app/models/volume.rb +++ b/app/models/volume.rb @@ -4,8 +4,8 @@ class Volume < ApplicationRecord p = Partition.quoted_table_name v = Volume.quoted_table_name Partition.select("DISTINCT #{p}.*") - .joins("JOIN #{v} ON #{v}.hardware_id = #{p}.hardware_id AND #{v}.volume_group = #{p}.volume_group") - .where("#{v}.id" => id).to_sql + .joins("JOIN #{v} ON #{v}.hardware_id = #{p}.hardware_id AND #{v}.volume_group = #{p}.volume_group") + .where("#{v}.id" => id).to_sql }, :foreign_key => :volume_group virtual_column :free_space_percent, :type => :float @@ -17,16 +17,19 @@ def volume_group # Override volume_group getter to prevent the special physical linkage from coming through vg = read_attribute(:volume_group) return nil if vg.respond_to?(:starts_with?) && vg.starts_with?(PHYSICAL_VOLUME_GROUP) + vg end def free_space_percent return nil if size.nil? || size == 0 || free_space.nil? + Float(free_space) / size * 100 end def used_space_percent return nil if size.nil? || size == 0 || used_space.nil? + Float(used_space) / size * 100 end @@ -68,25 +71,25 @@ def self.add_elements(parent, xmlNode) end nhv = nh[:volume] - unless nhv.nil? - name = nhv[:name] - found = parent.hardware.volumes.where(:name => name).order(:id) - - # Handle duplicate volume names (Generally only in the case of Windows with blank volume names) - if found.length > 1 - dup_volumes[name] = found.collect(&:id) if dup_volumes[name].nil? - found_id = dup_volumes[name].shift - found = found.detect { |f| f.id == found_id } - else - found = found[0] - end - found.nil? ? new_volumes << nhv : found.update(nhv) + next if nhv.nil? + + name = nhv[:name] + found = parent.hardware.volumes.where(:name => name).order(:id) + + # Handle duplicate volume names (Generally only in the case of Windows with blank volume names) + if found.length > 1 + dup_volumes[name] = found.collect(&:id) if dup_volumes[name].nil? + found_id = dup_volumes[name].shift + found = found.detect { |f| f.id == found_id } + else + found = found[0] + end + found.nil? ? new_volumes << nhv : found.update(nhv) - deletes[:volumes].each_with_index do |ele, i| - if ele[1] == name - deletes[:volumes].delete_at(i) - break - end + deletes[:volumes].each_with_index do |ele, i| + if ele[1] == name + deletes[:volumes].delete_at(i) + break end end end @@ -156,6 +159,7 @@ def self.xml_to_hashes(parent, xmlNode) def self.find_disk_by_controller(parent, controller) return parent.hardware.disks.find_by(:controller_type => $1, :location => $2) if controller =~ /^([^0-9]+)([0-9]:[0-9]):[0-9]$/ + nil end end diff --git a/app/models/zone.rb b/app/models/zone.rb index 33e000db171..4499730ec49 100644 --- a/app/models/zone.rb +++ b/app/models/zone.rb @@ -1,5 +1,5 @@ class Zone < ApplicationRecord - validates_presence_of :name, :description + validates :name, :description, :presence => true validates :name, :unique_within_region => true serialize :settings, Hash @@ -31,9 +31,9 @@ class Zone < ApplicationRecord has_many :vm_hardwares, :class_name => 'Hardware', :through => :vms_and_templates, :source => :hardware virtual_has_many :active_miq_servers, :class_name => "MiqServer" + after_create :create_server_if_podified before_destroy :remove_servers_if_podified before_destroy :check_zone_in_use_on_destroy - after_create :create_server_if_podified include AuthenticationMixin @@ -70,6 +70,7 @@ def self.create_maintenance_zone MiqRegion.my_region.update(:maintenance_zone => zone) rescue ActiveRecord::RecordInvalid raise if zone.errors[:name].blank? + retry end _log.info("Creating maintenance zone...") @@ -243,6 +244,7 @@ def message_for_invalid_delete return _("cannot delete default zone") if name == "default" return _("cannot delete maintenance zone") if maintenance? return _("zone name '%{name}' is used by a server") % {:name => name} if !MiqEnvironment::Command.is_podified? && miq_servers.present? + _("zone name '%{name}' is used by a provider") % {:name => name} if ext_management_systems.present? end diff --git a/bin/bundle b/bin/bundle index 66e9889e8b4..f19acf5b5cc 100755 --- a/bin/bundle +++ b/bin/bundle @@ -1,3 +1,3 @@ #!/usr/bin/env ruby -ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) +ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__) load Gem.bin_path('bundler', 'bundle') diff --git a/bin/rails b/bin/rails index 5191e6927af..07396602377 100755 --- a/bin/rails +++ b/bin/rails @@ -1,4 +1,4 @@ #!/usr/bin/env ruby -APP_PATH = File.expand_path('../../config/application', __FILE__) +APP_PATH = File.expand_path('../config/application', __dir__) require_relative '../config/boot' require 'rails/commands' diff --git a/bin/setup b/bin/setup index ac5c016f20e..7548fa6168e 100755 --- a/bin/setup +++ b/bin/setup @@ -2,15 +2,15 @@ require_relative '../lib/manageiq/environment' if ARGV.any? - puts <<-EOS -Usage: bin/setup - -Environment Variable Options: - SKIP_DATABASE_SETUP Skip the creation, migration, and seeding of the database. - SKIP_UI_UPDATE Skip the update of UI assets. - SKIP_TEST_RESET Skip the creation of the test enviroment. Defaults to - true in production mode since the tasks do not exist. -EOS + puts <<~EOS + Usage: bin/setup + + Environment Variable Options: + SKIP_DATABASE_SETUP Skip the creation, migration, and seeding of the database. + SKIP_UI_UPDATE Skip the update of UI assets. + SKIP_TEST_RESET Skip the creation of the test enviroment. Defaults to + true in production mode since the tasks do not exist. + EOS exit 1 end diff --git a/bin/update b/bin/update index 45d897941be..385218a377b 100755 --- a/bin/update +++ b/bin/update @@ -2,16 +2,16 @@ require_relative '../lib/manageiq/environment' if ARGV.any? - puts <<-EOS -Usage: bin/update - -Environment Variable Options: - SKIP_DATABASE_SETUP Skip the migration and seeding of the database. - SKIP_UI_UPDATE Skip the update of UI assets. - SKIP_AUTOMATE_RESET Skip the reset of the automate domain. - SKIP_TEST_RESET Skip the creation of the test enviroment. Defaults to - true in production mode since the tasks do not exist. -EOS + puts <<~EOS + Usage: bin/update + + Environment Variable Options: + SKIP_DATABASE_SETUP Skip the migration and seeding of the database. + SKIP_UI_UPDATE Skip the update of UI assets. + SKIP_AUTOMATE_RESET Skip the reset of the automate domain. + SKIP_TEST_RESET Skip the creation of the test enviroment. Defaults to + true in production mode since the tasks do not exist. + EOS exit 1 end @@ -23,7 +23,7 @@ Dir.chdir(ManageIQ::Environment::APP_ROOT) do puts '== Installing dependencies ==' ManageIQ::Environment.install_bundler ManageIQ::Environment.bundle_config - ManageIQ::Environment.bundle_update(force: true) + ManageIQ::Environment.bundle_update(:force => true) ui_thread = ManageIQ::Environment.update_ui_thread unless ENV["SKIP_UI_UPDATE"] diff --git a/config.ru b/config.ru index 44c9cbf33c2..749b1c540ab 100644 --- a/config.ru +++ b/config.ru @@ -1,4 +1,4 @@ # This file is used by Rack-based servers to start the application. -require ::File.expand_path('../config/environment', __FILE__) +require File.expand_path('config/environment', __dir__) run(Vmdb::Application) diff --git a/config/application.rb b/config/application.rb index 657c30be383..671a9495d9f 100644 --- a/config/application.rb +++ b/config/application.rb @@ -1,5 +1,5 @@ -require File.expand_path('../boot', __FILE__) -require File.expand_path('../preinitializer', __FILE__) +require File.expand_path('boot', __dir__) +require File.expand_path('preinitializer', __dir__) require 'rails' require 'active_record/railtie' require 'action_controller/railtie' @@ -80,7 +80,7 @@ class Application < Rails::Application config.assets.version = '1.0' # Set the manifest file name so that we are sure it gets overwritten on updates - config.assets.manifest = Rails.root.join("public/assets/.sprockets-manifest.json").to_s + config.assets.manifest = Rails.public_path.join('assets/.sprockets-manifest.json').to_s # Disable ActionCable's request forgery protection # This is basically matching a set of allowed origins which is not good for us @@ -117,15 +117,15 @@ class Application < Rails::Application # # https://bugs.ruby-lang.org/issues/14372 # - config.autoload_paths << Rails.root.join("app", "models", "aliases").to_s - config.autoload_paths << Rails.root.join("app", "models", "mixins").to_s + config.autoload_paths << Rails.root.join("app/models/aliases").to_s + config.autoload_paths << Rails.root.join("app/models/mixins").to_s config.autoload_paths << Rails.root.join("lib").to_s - config.autoload_paths << Rails.root.join("lib", "services").to_s + config.autoload_paths << Rails.root.join("lib/services").to_s - config.autoload_once_paths << Rails.root.join("lib", "vmdb", "console_methods.rb").to_s + config.autoload_once_paths << Rails.root.join("lib/vmdb/console_methods.rb").to_s require_relative '../lib/request_started_on_middleware' - config.middleware.use RequestStartedOnMiddleware + config.middleware.use(RequestStartedOnMiddleware) # enable to log session id for every request # require_relative '../lib/request_log_session_middleware' @@ -156,7 +156,7 @@ class Application < Rails::Application require 'vmdb_helper' end - # Note: If an initializer doesn't have an after, Rails will add one based + # NOTE: If an initializer doesn't have an after, Rails will add one based # on the top to bottom order of initializer calls in the file. # Because this is easy to mess up, keep your initializers in order. initializer :load_inflections, :before => :init_vmdb_plugins do diff --git a/config/boot.rb b/config/boot.rb index 4e61be7a686..b202572d5fe 100644 --- a/config/boot.rb +++ b/config/boot.rb @@ -1,7 +1,7 @@ require 'rubygems' # Set up gems listed in the Gemfile. -ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) +ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__) require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE']) # add the lib dir of the engine if we are running as a dummy app for an engine diff --git a/config/environment.rb b/config/environment.rb index b00e88bf54e..2285c782855 100644 --- a/config/environment.rb +++ b/config/environment.rb @@ -1,5 +1,5 @@ # Load the rails application -require File.expand_path('../application', __FILE__) +require File.expand_path('application', __dir__) # Initialize the rails application Vmdb::Application.initialize! diff --git a/config/environments/test.rb b/config/environments/test.rb index 356c5734f04..49abae0a37a 100644 --- a/config/environments/test.rb +++ b/config/environments/test.rb @@ -15,7 +15,7 @@ # Configure static asset server for tests with Cache-Control for performance config.public_file_server.enabled = true - config.public_file_server.headers = { 'Cache-Control' => 'public, max-age=3600' } + config.public_file_server.headers = {'Cache-Control' => 'public, max-age=3600'} # Avoid potential warnings and race conditions config.assets.configure do |env| @@ -35,7 +35,7 @@ config.action_dispatch.show_exceptions = true # Disable request forgery protection in test environment - config.action_controller.allow_forgery_protection = false + config.action_controller.allow_forgery_protection = false # Tell Action Mailer not to deliver emails to the real world. # The :test delivery method accumulates sent emails in the @@ -49,8 +49,8 @@ # Any exception that gets past our ApplicationController's rescue_from # should just be raised intact - config.middleware.delete(::ActionDispatch::ShowExceptions) - config.middleware.delete(::ActionDispatch::DebugExceptions) + config.middleware.delete(ActionDispatch::ShowExceptions) + config.middleware.delete(ActionDispatch::DebugExceptions) # Customize any additional options below... diff --git a/config/initializers/active_metrics.rb b/config/initializers/active_metrics.rb index 14aa45db64c..51b498e3392 100644 --- a/config/initializers/active_metrics.rb +++ b/config/initializers/active_metrics.rb @@ -1,2 +1,2 @@ -# Note: The legacy Postgresql adapter ignores everything except the adapter name here, just stealing the current ActiveRecord connection. +# NOTE: The legacy Postgresql adapter ignores everything except the adapter name here, just stealing the current ActiveRecord connection. ActiveMetrics::Base.establish_connection(:adapter => "miq_postgres", :database => "manageiq_metrics") diff --git a/config/initializers/as_to_time.rb b/config/initializers/as_to_time.rb index f75513d4abf..5ea24e97d11 100644 --- a/config/initializers/as_to_time.rb +++ b/config/initializers/as_to_time.rb @@ -2,7 +2,7 @@ require 'active_support/deprecation' class String - alias_method :old_to_time, :to_time + alias old_to_time to_time OBJ = Object.new diff --git a/config/initializers/fast_gettext.rb b/config/initializers/fast_gettext.rb index 35508934264..34345e87eb9 100644 --- a/config/initializers/fast_gettext.rb +++ b/config/initializers/fast_gettext.rb @@ -12,7 +12,7 @@ Vmdb::FastGettextHelper.register_locales Vmdb::FastGettextHelper.register_human_localenames - gettext_options = %w(--sort-by-msgid --location --no-wrap) + gettext_options = %w[--sort-by-msgid --location --no-wrap] Rails.application.config.gettext_i18n_rails.msgmerge = gettext_options + ["--no-fuzzy-matching"] Rails.application.config.gettext_i18n_rails.xgettext = gettext_options + ["--add-comments=TRANSLATORS"] diff --git a/config/initializers/marshal_autoloader.rb b/config/initializers/marshal_autoloader.rb index f9d2b5aa212..57d75c1d8da 100644 --- a/config/initializers/marshal_autoloader.rb +++ b/config/initializers/marshal_autoloader.rb @@ -17,6 +17,7 @@ def load(data) raise error end end + module Marshal class << self prepend MarshalAutoloader diff --git a/config/initializers/postgres_required_versions.rb b/config/initializers/postgres_required_versions.rb index 94dc7071432..cb7d2945016 100644 --- a/config/initializers/postgres_required_versions.rb +++ b/config/initializers/postgres_required_versions.rb @@ -1,4 +1,4 @@ -ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.prepend Module.new { +ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.prepend(Module.new do def initialize(*args) super check_version if respond_to?(:check_version) @@ -12,7 +12,7 @@ def check_version end if postgresql_version >= 14_00_00 - $stderr.puts msg + warn msg end end -} +end) diff --git a/config/initializers/rack_attack.rb b/config/initializers/rack_attack.rb index a5ebe6bbe4b..8d6f4d77bb2 100644 --- a/config/initializers/rack_attack.rb +++ b/config/initializers/rack_attack.rb @@ -6,12 +6,12 @@ # to be done manually. Rails.application.middleware.use(Rack::Attack) - api_login_limit = proc { ::Settings.server.rate_limiting.api_login.limit } - api_login_period = proc { ::Settings.server.rate_limiting.api_login.period.to_i_with_method } - request_limit = proc { ::Settings.server.rate_limiting.request.limit } - request_period = proc { ::Settings.server.rate_limiting.request.period.to_i_with_method } - ui_login_limit = proc { ::Settings.server.rate_limiting.ui_login.limit } - ui_login_period = proc { ::Settings.server.rate_limiting.ui_login.period.to_i_with_method } + api_login_limit = proc { Settings.server.rate_limiting.api_login.limit } + api_login_period = proc { Settings.server.rate_limiting.api_login.period.to_i_with_method } + request_limit = proc { Settings.server.rate_limiting.request.limit } + request_period = proc { Settings.server.rate_limiting.request.period.to_i_with_method } + ui_login_limit = proc { Settings.server.rate_limiting.ui_login.limit } + ui_login_period = proc { Settings.server.rate_limiting.ui_login.period.to_i_with_method } # Throttle all requests by IP # diff --git a/config/initializers/yaml_autoloader.rb b/config/initializers/yaml_autoloader.rb index 525cc114fa7..a981af2fa0c 100644 --- a/config/initializers/yaml_autoloader.rb +++ b/config/initializers/yaml_autoloader.rb @@ -10,8 +10,8 @@ # # This is still needed in some areas for zeitwerk, such as YAML files for tests in the manageiq-providers-vmware # that reference a constant: RbVmomi::VIM::TaskEvent -Psych::Visitors::ToRuby.prepend Module.new { +Psych::Visitors::ToRuby.prepend(Module.new do def resolve_class(klass_name) (class_loader.class != Psych::ClassLoader::Restricted && klass_name && klass_name.safe_constantize) || super end -} +end) diff --git a/config/preinitializer.rb b/config/preinitializer.rb index 1b5fb0914b5..8e603180613 100644 --- a/config/preinitializer.rb +++ b/config/preinitializer.rb @@ -1,6 +1,6 @@ # Optional logging of requires if ENV["REQUIRE_LOG"] - $req_log_path = File.join(File.dirname(__FILE__), %w(.. log)) + $req_log_path = File.join(File.dirname(__FILE__), %w[.. log]) require 'require_with_logging' end diff --git a/config/routes.rb b/config/routes.rb index bb7f41b11e8..fb58b86ac00 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -1,7 +1,7 @@ Vmdb::Application.routes.draw do if Rails.env.development? && ENV['MOUNT_REMOTE_CONSOLE_PROXY'] logger = Logger.new(STDOUT) - logger.level = Logger.const_get(::Settings.log.level_remote_console.upcase) + logger.level = Logger.const_get(Settings.log.level_remote_console.upcase) mount RemoteConsole::RackServer.new(:logger => logger) => '/ws/console' end end diff --git a/db/fixtures/tools/create_customization_templates_fixture.rb b/db/fixtures/tools/create_customization_templates_fixture.rb index 25f978bdee9..749b6d5914f 100644 --- a/db/fixtures/tools/create_customization_templates_fixture.rb +++ b/db/fixtures/tools/create_customization_templates_fixture.rb @@ -4,6 +4,4 @@ attrs[:system] = true attrs end -File.open(CustomizationTemplate.seed_file_name, "w") do |f| - f.write(recs.to_yaml) -end +File.write(CustomizationTemplate.seed_file_name, recs.to_yaml) diff --git a/db/fixtures/tools/create_pxe_image_types_fixture.rb b/db/fixtures/tools/create_pxe_image_types_fixture.rb index 5d293fadbbe..89670c12b71 100644 --- a/db/fixtures/tools/create_pxe_image_types_fixture.rb +++ b/db/fixtures/tools/create_pxe_image_types_fixture.rb @@ -1,4 +1,2 @@ recs = PxeImageType.order(Arel.sql("LOWER(name)")).collect { |r| r.attributes.except("id").symbolize_keys } -File.open(PxeImageType.seed_file_name, "w") do |f| - f.write(recs.to_yaml) -end +File.write(PxeImageType.seed_file_name, recs.to_yaml) diff --git a/lib/active_metrics/connection_adapters/hawkular_metrics_adapter.rb b/lib/active_metrics/connection_adapters/hawkular_metrics_adapter.rb index fa77cdba14a..cf729a99aee 100644 --- a/lib/active_metrics/connection_adapters/hawkular_metrics_adapter.rb +++ b/lib/active_metrics/connection_adapters/hawkular_metrics_adapter.rb @@ -12,7 +12,7 @@ def self.create_connection(config) Hawkular::Metrics::Client.new( URI::HTTP.build(:host => hostname, :port => port).to_s, config.slice(:username, :password), - :tenant => db, + :tenant => db ) end diff --git a/lib/active_metrics/connection_adapters/influxdb_adapter.rb b/lib/active_metrics/connection_adapters/influxdb_adapter.rb index ef9600b67c1..5c595e25ce7 100644 --- a/lib/active_metrics/connection_adapters/influxdb_adapter.rb +++ b/lib/active_metrics/connection_adapters/influxdb_adapter.rb @@ -32,7 +32,7 @@ def build_point(timestamp:, metric_name:, value:, resource: nil, resource_type: { :series => SERIES, :timestamp => (timestamp.to_f * 1000).to_i, # ms precision - :values => { metric_name.to_sym => value }, + :values => {metric_name.to_sym => value}, :tags => tags.symbolize_keys.merge( :resource_type => resource ? resource.class.base_class.name : resource_type, :resource_id => resource ? resource.id : resource_id diff --git a/lib/active_metrics/connection_adapters/miq_postgres_adapter.rb b/lib/active_metrics/connection_adapters/miq_postgres_adapter.rb index 4b1ebbd8a82..212f44d2934 100644 --- a/lib/active_metrics/connection_adapters/miq_postgres_adapter.rb +++ b/lib/active_metrics/connection_adapters/miq_postgres_adapter.rb @@ -5,7 +5,7 @@ module ConnectionAdapters class MiqPostgresAdapter < AbstractAdapter include Vmdb::Logging - # TODO Use the actual configuration from the initializer or whatever + # TODO: Use the actual configuration from the initializer or whatever def self.create_connection(_config) ActiveRecord::Base.connection end @@ -29,7 +29,7 @@ def write_rows(resources, interval_name, start_time, end_time, data) :saver_strategy => :batch, :arel => samples_arel, :complete => false, - :model_class => klass, + :model_class => klass ) log_header = "[#{interval_name}]" diff --git a/lib/active_metrics/connection_adapters/miq_postgres_legacy_adapter.rb b/lib/active_metrics/connection_adapters/miq_postgres_legacy_adapter.rb index fa8c9d961e3..7f72f6670ed 100644 --- a/lib/active_metrics/connection_adapters/miq_postgres_legacy_adapter.rb +++ b/lib/active_metrics/connection_adapters/miq_postgres_legacy_adapter.rb @@ -5,7 +5,7 @@ module ConnectionAdapters class MiqPostgresLegacyAdapter < AbstractAdapter include Vmdb::Logging - # TODO Use the actual configuration from the initializer or whatever + # TODO: Use the actual configuration from the initializer or whatever def self.create_connection(_config) ActiveRecord::Base.connection end diff --git a/lib/acts_as_ar_model.rb b/lib/acts_as_ar_model.rb index 70aa219a9fc..e7835663b5a 100644 --- a/lib/acts_as_ar_model.rb +++ b/lib/acts_as_ar_model.rb @@ -26,7 +26,7 @@ def self.includes_to_references(_inc) [] end - class << self; alias_method :base_model, :base_class; end + class << self; alias base_model base_class; end # # Column methods @@ -82,7 +82,7 @@ def initialize def attributes=(values) values.each do |attr, value| - send("#{attr}=", value) + send(:"#{attr}=", value) end end @@ -93,12 +93,12 @@ def attributes def [](attr) @attributes[attr.to_s] end - alias_method :read_attribute, :[] + alias read_attribute [] def []=(attr, value) @attributes[attr.to_s] = value end - alias_method :write_attribute, :[]= + alias write_attribute []= end def self.set_columns_hash(hash) @@ -111,7 +111,7 @@ def self.set_columns_hash(hash) read_attribute(attribute) end - define_method("#{attribute}=") do |val| + define_method(:"#{attribute}=") do |val| write_attribute(attribute, val) end end diff --git a/lib/acts_as_ar_scope.rb b/lib/acts_as_ar_scope.rb index 733b8c6da6b..e23d6399890 100644 --- a/lib/acts_as_ar_scope.rb +++ b/lib/acts_as_ar_scope.rb @@ -13,7 +13,7 @@ class << self end def self.all(*args) - if args.empty? || args.size == 1 && args.first.respond_to?(:empty?) && args.first.empty? + if args.empty? || (args.size == 1 && args.first.respond_to?(:empty?) && args.first.empty?) # avoid warnings aar_scope else diff --git a/lib/ansible/content.rb b/lib/ansible/content.rb index 31b36b09f06..93574b1a0b9 100644 --- a/lib/ansible/content.rb +++ b/lib/ansible/content.rb @@ -1,6 +1,6 @@ module Ansible class Content - PLUGIN_CONTENT_DIR = Rails.root.join("content", "ansible_consolidated").to_s.freeze + PLUGIN_CONTENT_DIR = Rails.root.join("content/ansible_consolidated").to_s.freeze attr_accessor :path @@ -12,7 +12,7 @@ def fetch_galaxy_roles return true unless requirements_file.exist? require "awesome_spawn" - AwesomeSpawn.run!("ansible-galaxy", :params => ["install", :roles_path= => roles_dir, :role_file= => requirements_file]) + AwesomeSpawn.run!("ansible-galaxy", :params => ["install", {:roles_path= => roles_dir, :role_file= => requirements_file}]) end def self.fetch_plugin_galaxy_roles diff --git a/lib/ansible/runner/credential/machine_credential.rb b/lib/ansible/runner/credential/machine_credential.rb index 6ef20e000b9..a7b48f13f3c 100644 --- a/lib/ansible/runner/credential/machine_credential.rb +++ b/lib/ansible/runner/credential/machine_credential.rb @@ -30,7 +30,7 @@ def become_args SSH_KEY = "^SSH [pP]assword".freeze BECOME_KEY = "^BECOME [pP]assword".freeze - SSH_UNLOCK_KEY = "^Enter passphrase for [a-zA-Z0-9\-\/]+\/ssh_key_data:".freeze + SSH_UNLOCK_KEY = "^Enter passphrase for [a-zA-Z0-9-/]+/ssh_key_data:".freeze def write_password_file password_hash = initialize_password_data password_hash[SSH_KEY] = auth.password if auth.password diff --git a/lib/ansible/runner/credential/network_credential.rb b/lib/ansible/runner/credential/network_credential.rb index d0f4313586c..89c37ca1c36 100644 --- a/lib/ansible/runner/credential/network_credential.rb +++ b/lib/ansible/runner/credential/network_credential.rb @@ -28,7 +28,7 @@ def write_config_files private - SSH_UNLOCK_KEY = "^Enter passphrase for [a-zA-Z0-9\-\/]+\/ssh_key_data:".freeze + SSH_UNLOCK_KEY = "^Enter passphrase for [a-zA-Z0-9-/]+/ssh_key_data:".freeze def write_password_file password_data = initialize_password_data password_data[SSH_UNLOCK_KEY] ||= auth.ssh_key_unlock || "" diff --git a/lib/ansible/runner/credential/openstack_credential.rb b/lib/ansible/runner/credential/openstack_credential.rb index 44f334580b8..e59e21a85bf 100644 --- a/lib/ansible/runner/credential/openstack_credential.rb +++ b/lib/ansible/runner/credential/openstack_credential.rb @@ -10,7 +10,7 @@ def self.auth_type # https://github.com/ansible/awx/blob/1242ee2b/awx/main/models/credential/injectors.py#L70-L96 # def env_vars - { "OS_CLIENT_CONFIG_FILE" => os_credentials_file } + {"OS_CLIENT_CONFIG_FILE" => os_credentials_file} end def write_config_files diff --git a/lib/ansible/runner/credential/vault_credential.rb b/lib/ansible/runner/credential/vault_credential.rb index b8baf922475..3140fe6fe1f 100644 --- a/lib/ansible/runner/credential/vault_credential.rb +++ b/lib/ansible/runner/credential/vault_credential.rb @@ -7,7 +7,7 @@ def self.auth_type def env_vars if auth.vault_password.present? - { "ANSIBLE_VAULT_PASSWORD_FILE" => vault_password_file } + {"ANSIBLE_VAULT_PASSWORD_FILE" => vault_password_file} else {} end diff --git a/lib/ansible/runner/response.rb b/lib/ansible/runner/response.rb index 41b9e7aa232..3970001e76d 100644 --- a/lib/ansible/runner/response.rb +++ b/lib/ansible/runner/response.rb @@ -72,15 +72,15 @@ def parse_stdout(stdout) # output is JSON per new line stdout.each_line do |line| - # TODO(lsmola) we can remove exception handling when this is fixed - # https://github.com/ansible/ansible-runner/issues/89#issuecomment-404236832 , so it fails early if there is - # a non json line - begin - data = JSON.parse(line) - parsed_stdout << data if data.kind_of?(Hash) - rescue => e - _log.warn("Couldn't parse JSON from: #{e}") - end + # TODO(lsmola) we can remove exception handling when this is fixed + # https://github.com/ansible/ansible-runner/issues/89#issuecomment-404236832 , so it fails early if there is + # a non json line + + data = JSON.parse(line) + parsed_stdout << data if data.kind_of?(Hash) + rescue => e + _log.warn("Couldn't parse JSON from: #{e}") + end parsed_stdout diff --git a/lib/charting.rb b/lib/charting.rb index 64204cd5d79..df50b0ea2f7 100644 --- a/lib/charting.rb +++ b/lib/charting.rb @@ -3,4 +3,4 @@ # * We assign the old toplevel constant to the new constant. # * We can't include rails deprecate_constant globally, so we use ruby's. Charting = ManageIQ::Reporting::Charting -Object.deprecate_constant :Charting +Object.deprecate_constant(:Charting) diff --git a/lib/code_coverage.rb b/lib/code_coverage.rb index 8c7a14d0916..fe8ac57629d 100644 --- a/lib/code_coverage.rb +++ b/lib/code_coverage.rb @@ -5,7 +5,7 @@ module CodeCoverage # invoked the 'run_hook' so they must reset the environment for the new process. HOOK_FILE = Pathname.new(__dir__).join("..", "config", "coverage_hook.rb").freeze def self.run_hook - # Note: We use 'load' here because require would only load the hook once, + # NOTE: We use 'load' here because require would only load the hook once, # in the server but not when the child fork starts. Shared memory is hard. load HOOK_FILE if File.exist?(HOOK_FILE) end diff --git a/lib/container_orchestrator.rb b/lib/container_orchestrator.rb index 9aa2f8ccc3a..13332bea5c6 100644 --- a/lib/container_orchestrator.rb +++ b/lib/container_orchestrator.rb @@ -26,7 +26,7 @@ def create_deployment(name) yield(definition) if block_given? kube_apps_connection.create_deployment(definition) rescue KubeException => e - raise unless e.message =~ /already exists/ + raise unless /already exists/.match?(e.message) end def create_service(name, selector, port) @@ -34,7 +34,7 @@ def create_service(name, selector, port) yield(definition) if block_given? kube_connection.create_service(definition) rescue KubeException => e - raise unless e.message =~ /already exists/ + raise unless /already exists/.match?(e.message) end def create_secret(name, data) @@ -42,7 +42,7 @@ def create_secret(name, data) yield(definition) if block_given? kube_connection.create_secret(definition) rescue KubeException => e - raise unless e.message =~ /already exists/ + raise unless /already exists/.match?(e.message) end def delete_deployment(name) @@ -50,19 +50,19 @@ def delete_deployment(name) scale(name, 0) kube_apps_connection.delete_deployment(name, my_namespace) rescue KubeException => e - raise unless e.message =~ /not found/ + raise unless /not found/.match?(e.message) end def delete_service(name) kube_connection.delete_service(name, my_namespace) rescue KubeException => e - raise unless e.message =~ /not found/ + raise unless /not found/.match?(e.message) end def delete_secret(name) kube_connection.delete_secret(name, my_namespace) rescue KubeException => e - raise unless e.message =~ /not found/ + raise unless /not found/.match?(e.message) end def get_deployments @@ -92,7 +92,7 @@ def get_pod_by_namespace_and_hostname(namespace, hostname) # container orchestrator process itself, as it uses environment info # that only the running orchestrator pod will have. def my_pod - get_pod_by_namespace_and_hostname(my_namespace, ENV["HOSTNAME"]) + get_pod_by_namespace_and_hostname(my_namespace, ENV.fetch("HOSTNAME", nil)) end def my_node_affinity_arch_values @@ -100,7 +100,7 @@ def my_node_affinity_arch_values i.matchExpressions&.each { |a| return(a.values) if a.key == "kubernetes.io/arch" } end - return ["amd64"] + ["amd64"] end private @@ -125,15 +125,15 @@ def raw_connect(uri) Kubeclient::Client.new( uri, - :auth_options => { :bearer_token_file => TOKEN_FILE }, + :auth_options => {:bearer_token_file => TOKEN_FILE}, :ssl_options => ssl_options ) end def manager_uri(path) URI::HTTPS.build( - :host => ENV["KUBERNETES_SERVICE_HOST"], - :port => ENV["KUBERNETES_SERVICE_PORT"], + :host => ENV.fetch("KUBERNETES_SERVICE_HOST", nil), + :port => ENV.fetch("KUBERNETES_SERVICE_PORT", nil), :path => path ) end diff --git a/lib/container_orchestrator/object_definition.rb b/lib/container_orchestrator/object_definition.rb index 1811318cb02..f5e10b84225 100644 --- a/lib/container_orchestrator/object_definition.rb +++ b/lib/container_orchestrator/object_definition.rb @@ -15,18 +15,18 @@ def deployment_definition(name) :template => { :metadata => {:name => name, :labels => common_labels.merge(:name => name)}, :spec => { - :affinity => { + :affinity => { :nodeAffinity => { :requiredDuringSchedulingIgnoredDuringExecution => { :nodeSelectorTerms => [{ :matchExpressions => [ {:key => "kubernetes.io/arch", :operator => "In", :values => ContainerOrchestrator.new.my_node_affinity_arch_values} - ]} - ] + ] + }] } } }, - :serviceAccountName => ENV["WORKER_SERVICE_ACCOUNT"], + :serviceAccountName => ENV.fetch("WORKER_SERVICE_ACCOUNT", nil), :containers => [{ :name => name, :env => default_environment, @@ -44,7 +44,7 @@ def deployment_definition(name) {:name => "encryption-key", :readOnly => true, :mountPath => "/run/secrets/manageiq/application"}, ] }], - :volumes => [ + :volumes => [ { :name => "database-secret", :secret => { @@ -170,17 +170,17 @@ def default_environment [ {:name => "GUID", :value => MiqServer.my_guid}, {:name => "HOME", :value => Rails.root.join("tmp").to_s}, - {:name => "APPLICATION_DOMAIN", :value => ENV["APPLICATION_DOMAIN"]}, - {:name => "MEMCACHED_SERVER", :value => ENV["MEMCACHED_SERVER"]}, - {:name => "MEMCACHED_SERVICE_NAME", :value => ENV["MEMCACHED_SERVICE_NAME"]}, - {:name => "WORKER_HEARTBEAT_FILE", :value => Rails.root.join("tmp", "worker.hb").to_s}, + {:name => "APPLICATION_DOMAIN", :value => ENV.fetch("APPLICATION_DOMAIN", nil)}, + {:name => "MEMCACHED_SERVER", :value => ENV.fetch("MEMCACHED_SERVER", nil)}, + {:name => "MEMCACHED_SERVICE_NAME", :value => ENV.fetch("MEMCACHED_SERVICE_NAME", nil)}, + {:name => "WORKER_HEARTBEAT_FILE", :value => Rails.root.join("tmp/worker.hb").to_s}, {:name => "WORKER_HEARTBEAT_METHOD", :value => "file"}, ] + database_environment + memcached_environment + messaging_environment end def database_environment [ - {:name => "DATABASE_SSL_MODE", :value => ENV["DATABASE_SSL_MODE"]}, + {:name => "DATABASE_SSL_MODE", :value => ENV.fetch("DATABASE_SSL_MODE", nil)}, ] end @@ -188,8 +188,8 @@ def memcached_environment return [] unless ENV["MEMCACHED_ENABLE_SSL"].present? [ - {:name => "MEMCACHED_ENABLE_SSL", :value => ENV["MEMCACHED_ENABLE_SSL"]}, - {:name => "MEMCACHED_SSL_CA", :value => ENV["MEMCACHED_SSL_CA"]}, + {:name => "MEMCACHED_ENABLE_SSL", :value => ENV.fetch("MEMCACHED_ENABLE_SSL", nil)}, + {:name => "MEMCACHED_SSL_CA", :value => ENV.fetch("MEMCACHED_SSL_CA", nil)}, ] end @@ -197,13 +197,13 @@ def messaging_environment return [] unless ENV["MESSAGING_TYPE"].present? [ - {:name => "MESSAGING_PORT", :value => ENV["MESSAGING_PORT"]}, - {:name => "MESSAGING_TYPE", :value => ENV["MESSAGING_TYPE"]}, - {:name => "MESSAGING_SSL_CA", :value => ENV["MESSAGING_SSL_CA"]}, - {:name => "MESSAGING_SASL_MECHANISM", :value => ENV["MESSAGING_SASL_MECHANISM"]}, - {:name => "MESSAGING_HOSTNAME", :value => ENV["MESSAGING_HOSTNAME"]}, - {:name => "MESSAGING_PASSWORD", :value => ENV["MESSAGING_PASSWORD"]}, - {:name => "MESSAGING_USERNAME", :value => ENV["MESSAGING_USERNAME"]} + {:name => "MESSAGING_PORT", :value => ENV.fetch("MESSAGING_PORT", nil)}, + {:name => "MESSAGING_TYPE", :value => ENV.fetch("MESSAGING_TYPE", nil)}, + {:name => "MESSAGING_SSL_CA", :value => ENV.fetch("MESSAGING_SSL_CA", nil)}, + {:name => "MESSAGING_SASL_MECHANISM", :value => ENV.fetch("MESSAGING_SASL_MECHANISM", nil)}, + {:name => "MESSAGING_HOSTNAME", :value => ENV.fetch("MESSAGING_HOSTNAME", nil)}, + {:name => "MESSAGING_PASSWORD", :value => ENV.fetch("MESSAGING_PASSWORD", nil)}, + {:name => "MESSAGING_USERNAME", :value => ENV.fetch("MESSAGING_USERNAME", nil)} ] end @@ -222,7 +222,7 @@ def my_namespace end def app_name - ENV["APP_NAME"] + ENV.fetch("APP_NAME", nil) end def app_name_label @@ -257,11 +257,11 @@ def owner_references end def pod_name - ENV['POD_NAME'] + ENV.fetch('POD_NAME', nil) end def pod_uid - ENV["POD_UID"] + ENV.fetch("POD_UID", nil) end end end diff --git a/lib/content_exporter.rb b/lib/content_exporter.rb index a2d7acd7ed9..0668525ad7c 100644 --- a/lib/content_exporter.rb +++ b/lib/content_exporter.rb @@ -1,7 +1,7 @@ module ContentExporter def self.export_to_hash(initial_hash, key, elements) hash = initial_hash.dup - %w(id created_on updated_on).each { |k| hash.delete(k) } + %w[id created_on updated_on].each { |k| hash.delete(k) } hash[key] = elements.collect { |e| e.export_to_array.first[key] unless e.nil? } hash end diff --git a/lib/ems_event_helper.rb b/lib/ems_event_helper.rb index ebac715ae4e..9f2ea509dc4 100644 --- a/lib/ems_event_helper.rb +++ b/lib/ems_event_helper.rb @@ -3,6 +3,7 @@ class EmsEventHelper def initialize(event) raise ArgumentError, "event must be an EmsEvent" unless event.kind_of?(EmsEvent) + @event = event end diff --git a/lib/evm_database.rb b/lib/evm_database.rb index 592435d7906..9faab84d25c 100644 --- a/lib/evm_database.rb +++ b/lib/evm_database.rb @@ -96,6 +96,7 @@ def self.seed_primordial def self.seed_rest return if skip_seeding? + seed(OTHER_SEEDABLE_CLASSES + seedable_plugin_classes) end @@ -118,7 +119,7 @@ def self.seeded? end def self.skip_seeding? - ENV['SKIP_SEEDING'] && seeded_primordially? + ENV.fetch('SKIP_SEEDING', nil) && seeded_primordially? end private_class_method :skip_seeding? @@ -142,14 +143,14 @@ def self.seed_classes(classes) rescue Timeout::Error _log.error("Seeding... Timed out after #{lock_timeout} seconds") raise - rescue StandardError => err + rescue => err _log.log_backtrace(err) raise end private_class_method :seed_classes def self.host - (ActiveRecord::Base.configurations[ENV['RAILS_ENV']] || {})['host'] + (ActiveRecord::Base.configurations[ENV.fetch('RAILS_ENV', nil)] || {})['host'] end def self.local? @@ -185,7 +186,7 @@ def self.run_failover_monitor(monitor = nil) require 'manageiq-postgres_ha_admin' ManageIQ::PostgresHaAdmin.logger = Vmdb.logger - monitor ||= ManageIQ::PostgresHaAdmin::FailoverMonitor.new(Rails.root.join("config", "ha_admin.yml")) + monitor ||= ManageIQ::PostgresHaAdmin::FailoverMonitor.new(Rails.root.join("config/ha_admin.yml")) configure_rails_handler(monitor) configure_logical_replication_handlers(monitor) @@ -195,11 +196,11 @@ def self.run_failover_monitor(monitor = nil) end def self.configure_rails_handler(monitor) - file_path = Rails.root.join("config", "database.yml") + file_path = Rails.root.join("config/database.yml") rails_handler = ManageIQ::PostgresHaAdmin::RailsConfigHandler.new(:file_path => file_path, :environment => Rails.env) _log.info("Configuring database failover for #{file_path}'s #{Rails.env} environment") - rails_handler.after_failover do |new_conn_info| + rails_handler.after_failover do |_new_conn_info| # refresh the rails connection info after the config handler changed database.yml begin ActiveRecord::Base.remove_connection diff --git a/lib/extensions/ar_adapter/ar_dba.rb b/lib/extensions/ar_adapter/ar_dba.rb index a35e95ff8cd..7461c499c99 100644 --- a/lib/extensions/ar_adapter/ar_dba.rb +++ b/lib/extensions/ar_adapter/ar_dba.rb @@ -29,7 +29,7 @@ def client_connections , query FROM pg_stat_activity ORDER BY 1, 2 - SQL + SQL end # Taken from: https://github.com/bucardo/check_postgres/blob/2.19.0/check_postgres.pl#L3492 @@ -106,9 +106,9 @@ def table_bloat ) AS sml WHERE schemaname = 'public' ORDER BY 1 - SQL + SQL - integer_columns = %w( + integer_columns = %w[ otta pages pagesize @@ -116,11 +116,11 @@ def table_bloat wasted_bytes wasted_pages wasted_size - ) + ] - float_columns = %w( + float_columns = %w[ percent_bloat - ) + ] data.each do |datum| integer_columns.each { |c| datum[c] = datum[c].to_i } @@ -201,9 +201,9 @@ def index_bloat ) AS sml WHERE schemaname = 'public' ORDER BY 1, 2 - SQL + SQL - integer_columns = %w( + integer_columns = %w[ otta pages pagesize @@ -211,11 +211,11 @@ def index_bloat wasted_bytes wasted_pages wasted_size - ) + ] - float_columns = %w( + float_columns = %w[ percent_bloat - ) + ] data.each do |datum| integer_columns.each { |c| datum[c] = datum[c].to_i } @@ -360,9 +360,9 @@ def database_bloat ) AS sml WHERE schemaname = 'public' ORDER BY 1, 2 - SQL + SQL - integer_columns = %w( + integer_columns = %w[ otta pages pagesize @@ -370,11 +370,11 @@ def database_bloat wasted_bytes wasted_pages wasted_size - ) + ] - float_columns = %w( + float_columns = %w[ percent_bloat - ) + ] data.each do |datum| integer_columns.each { |c| datum[c] = datum[c].to_i } @@ -404,9 +404,9 @@ def table_statistics FROM pg_stat_all_tables WHERE schemaname NOT IN ('pg_catalog', 'information_schema') ORDER BY relname ASC ; - SQL + SQL - integer_columns = %w( + integer_columns = %w[ table_scans sequential_rows_read index_scans @@ -417,14 +417,14 @@ def table_statistics rows_hot_updated rows_live rows_dead - ) + ] - timestamp_columns = %w( + timestamp_columns = %w[ last_vacuum_date last_autovacuum_date last_analyze_date last_autoanalyze_date - ) + ] data.each do |datum| integer_columns.each { |c| datum[c] = datum[c].to_i } @@ -464,7 +464,7 @@ def table_size AND relname NOT LIKE 'pg_%' ORDER BY reltuples DESC , relpages DESC ; - SQL + SQL stats.each do |s| s["rows"] = s["rows"].to_f.to_i @@ -582,20 +582,20 @@ def table_metrics_bloat(table_name) WHERE schemaname = 'public' AND tablename = '#{table_name}' ORDER BY 1 - SQL + SQL - integer_columns = %w( + integer_columns = %w[ otta pages rows wasted_bytes wasted_pages wasted_size - ) + ] - float_columns = %w( + float_columns = %w[ percent_bloat - ) + ] data.each do |datum| integer_columns.each { |c| datum[c] = datum[c].to_i } @@ -625,9 +625,9 @@ def table_metrics_analysis(table_name) WHERE schemaname NOT IN ('pg_catalog', 'information_schema') AND relname = '#{table_name}' ORDER BY relname ASC ; - SQL + SQL - integer_columns = %w( + integer_columns = %w[ table_scans sequential_rows_read index_scans @@ -638,14 +638,14 @@ def table_metrics_analysis(table_name) rows_hot_updated rows_live rows_dead - ) + ] - timestamp_columns = %w( + timestamp_columns = %w[ last_vacuum_date last_autovacuum_date last_analyze_date last_autoanalyze_date - ) + ] data.each do |datum| integer_columns.each { |c| datum[c] = datum[c].to_i } @@ -658,14 +658,14 @@ def table_metrics_analysis(table_name) def table_metrics_total_size(table_name) select_value(<<-SQL, "Table Metrics Total Size").to_i SELECT pg_total_relation_size('#{table_name}'::regclass) AS total_table_size; - SQL + SQL end def number_of_db_connections select_value(<<-SQL, "DB Client Connections").to_i SELECT count(*) as active_connections FROM pg_stat_activity - SQL + SQL end def index_metrics_bloat(index_name) @@ -732,9 +732,9 @@ def index_metrics_bloat(index_name) ) AS sml WHERE iname = '#{index_name}' ORDER BY 1, 2 - SQL + SQL - integer_columns = %w( + integer_columns = %w[ otta pages pagesize @@ -742,11 +742,11 @@ def index_metrics_bloat(index_name) wasted_bytes wasted_pages wasted_size - ) + ] - float_columns = %w( + float_columns = %w[ percent_bloat - ) + ] data.each do |datum| integer_columns.each { |c| datum[c] = datum[c].to_i } @@ -769,18 +769,18 @@ def index_metrics_analysis(index_name) FROM pg_stat_user_indexes WHERE schemaname NOT IN ('pg_catalog', 'information_schema') AND indexrelname = '#{index_name}' ; - SQL + SQL - integer_columns = %w( + integer_columns = %w[ table_id index_id index_scans index_rows_read index_rows_fetched - ) + ] data.each do |datum| - integer_columns.each { |c| datum[c] = datum[c].to_i } + integer_columns.each { |c| datum[c] = datum[c].to_i } end data.to_a @@ -789,9 +789,8 @@ def index_metrics_analysis(index_name) def index_metrics_total_size(_index_name) select_value(<<-SQL, "Index Metrics - Size").to_i SELECT pg_total_relation_size('#{table_name}'::regclass) - pg_relation_size('#{table_name}') AS index_size; - SQL + SQL end - # # DBA operations # @@ -802,14 +801,14 @@ def data_directory SELECT setting AS path FROM pg_settings WHERE name = 'data_directory' - SQL + SQL end # Fetch PostgreSQL last start date/time def last_start_time start_time = select_value(<<-SQL, "Select last start date/time") SELECT pg_postmaster_start_time() - SQL + SQL ActiveRecord::Type::DateTime.new.deserialize(start_time) end diff --git a/lib/extensions/ar_adapter/ar_kill.rb b/lib/extensions/ar_adapter/ar_kill.rb index b7a79ee69e4..0edc18fd117 100644 --- a/lib/extensions/ar_adapter/ar_kill.rb +++ b/lib/extensions/ar_adapter/ar_kill.rb @@ -13,19 +13,19 @@ def kill(pid) FROM pg_stat_activity WHERE pid = #{pid_numeric} AND datname = #{quote(current_database)} - SQL + SQL item = data.first if item.nil? _log.info("SPID=[#{pid_numeric}] not found") else _log.info("Sending CANCEL Request for SPID=[#{pid_numeric}], age=[#{item['age']}], query=[#{item['query']}]") - result = select(<<-SQL, "Cancel SPID") + select(<<-SQL, "Cancel SPID") SELECT pg_cancel_backend(#{pid_numeric}) FROM pg_stat_activity WHERE datname = #{quote(current_database)} - SQL - result + SQL + end end end diff --git a/lib/extensions/ar_base_model.rb b/lib/extensions/ar_base_model.rb index 7f435fbe528..b9e06a08c41 100644 --- a/lib/extensions/ar_base_model.rb +++ b/lib/extensions/ar_base_model.rb @@ -1,7 +1,7 @@ module ActiveRecord class Base class << self - alias_method :base_model, :base_class + alias base_model base_class def model_suffix name[base_model.name.length..-1] diff --git a/lib/extensions/ar_migration.rb b/lib/extensions/ar_migration.rb index bca22756220..52578284c7e 100644 --- a/lib/extensions/ar_migration.rb +++ b/lib/extensions/ar_migration.rb @@ -24,11 +24,11 @@ def self.update_local_migrations_ran(version, direction) return unless schema_migrations_ran_exists? if direction == :up - if version == SCHEMA_MIGRATIONS_RAN_MIGRATION - to_add = ActiveRecord::SchemaMigration.normalized_versions << version - else - to_add = [version] - end + to_add = if version == SCHEMA_MIGRATIONS_RAN_MIGRATION + ActiveRecord::SchemaMigration.normalized_versions << version + else + [version] + end to_add.each do |v| binds = [ @@ -77,6 +77,7 @@ def region_number def wait_for_migration? return false unless schema_migrations_ran_class + # We need to unscope here since in_region doesn't override the default scope of in_my_region # see https://github.com/ManageIQ/activerecord-id_regions/issues/11 !schema_migrations_ran_class.unscoped.in_region(region_number).where(:version => version).exists? diff --git a/lib/extensions/ar_miq_set.rb b/lib/extensions/ar_miq_set.rb index 980eca04699..c726fa90f4c 100644 --- a/lib/extensions/ar_miq_set.rb +++ b/lib/extensions/ar_miq_set.rb @@ -97,8 +97,7 @@ module ClassMethods # this class save the *_type column with the full sub-class's name as opposed to the # base model class. This is needed so that tagging works properly. Once tagging is reworked # to handle the base model class name this can be removed and real STI can be used. - def descends_from_active_record?; false; end - # + def descends_from_active_record? = false def model_class @model_class ||= name[0..-4].constantize diff --git a/lib/extensions/ar_number_of.rb b/lib/extensions/ar_number_of.rb index b83a6fee1c2..4f598215436 100644 --- a/lib/extensions/ar_number_of.rb +++ b/lib/extensions/ar_number_of.rb @@ -2,7 +2,7 @@ module ActiveRecord class Base def number_of(assoc) @number_of ||= {} - @number_of[assoc.to_sym] ||= send(assoc).try!(:size) || 0 + @number_of[assoc.to_sym] ||= send(assoc)&.size || 0 end end end diff --git a/lib/extensions/ar_order.rb b/lib/extensions/ar_order.rb index 8b330682795..c0c7296ad84 100644 --- a/lib/extensions/ar_order.rb +++ b/lib/extensions/ar_order.rb @@ -4,32 +4,32 @@ module ActiveRecord module ConnectionAdapters module PostgreSQL module SchemaStatements - def columns_for_distinct(columns, orders) #:nodoc: - order_columns = orders.reject(&:blank?).map { |s| - # Convert Arel node to string - unless s.is_a?(String) - if s.kind_of?(Arel::Nodes::Ordering) - s = s.expr - keep_order = true - end - if s.respond_to?(:to_sql) - s = s.to_sql - else # for Arel::Nodes::Attribute - engine = Arel::Table.engine - collector = Arel::Collectors::SQLString.new - collector = engine.connection.visitor.accept s, collector - s = collector.value - end - end + def columns_for_distinct(columns, orders) # :nodoc: + order_columns = orders.reject(&:blank?).map do |s| + # Convert Arel node to string + unless s.kind_of?(String) + if s.kind_of?(Arel::Nodes::Ordering) + s = s.expr + keep_order = true + end + if s.respond_to?(:to_sql) + s = s.to_sql + else # for Arel::Nodes::Attribute + engine = Arel::Table.engine + collector = Arel::Collectors::SQLString.new + collector = engine.connection.visitor.accept(s, collector) + s = collector.value + end + end # If we haven't already removed the order clause, # Remove any ASC/DESC modifiers - if keep_order - s - else - s.gsub(/\s+(?:ASC|DESC)\b/i, "") - .gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, "") - end - }.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" } + if keep_order + s + else + s.gsub(/\s+(?:ASC|DESC)\b/i, "") + .gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, "") + end + end.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" } (order_columns << super).join(", ") end diff --git a/lib/extensions/ar_preloader.rb b/lib/extensions/ar_preloader.rb index d1b23f44f82..9055a3919e9 100644 --- a/lib/extensions/ar_preloader.rb +++ b/lib/extensions/ar_preloader.rb @@ -3,7 +3,7 @@ module ActiveRecordPreloadScopes def records_for(ids) # use our logic if passing in [ActiveRecord::Base] or passing in a loaded Relation/scope unless (preload_scope.kind_of?(Array) && preload_scope.first.kind_of?(ActiveRecord::Base)) || - preload_scope.try(:loaded?) + preload_scope.try(:loaded?) return super end diff --git a/lib/extensions/ar_region.rb b/lib/extensions/ar_region.rb index b8957ae5b36..71be3b16e6d 100644 --- a/lib/extensions/ar_region.rb +++ b/lib/extensions/ar_region.rb @@ -2,7 +2,7 @@ module ArRegion extend ActiveSupport::Concern included do - cache_with_timeout(:id_to_miq_region) { Hash.new } + cache_with_timeout(:id_to_miq_region) { {} } end module ClassMethods diff --git a/lib/extensions/ar_table_lock.rb b/lib/extensions/ar_table_lock.rb index 88a58ec816b..6edeee7be5c 100644 --- a/lib/extensions/ar_table_lock.rb +++ b/lib/extensions/ar_table_lock.rb @@ -9,7 +9,7 @@ module ArTableLock # details on locks can be found on postgres docs: # http://www.postgresql.org/docs/9.5/static/explicit-locking.html # - def with_lock(timeout = 60.seconds) + def with_lock(timeout = 60.seconds, &block) lock = "SHARE ROW EXCLUSIVE" transaction do @@ -18,7 +18,7 @@ def with_lock(timeout = 60.seconds) _log.debug("Acquired lock on #{name} (table: #{table_name}...") begin - Timeout.timeout(timeout) { yield } + Timeout.timeout(timeout, &block) ensure _log.debug("Releasing lock on #{name} (table: #{table_name}...") end diff --git a/lib/extensions/ar_taggable.rb b/lib/extensions/ar_taggable.rb index 0aa2dd1af08..012fe8409e4 100644 --- a/lib/extensions/ar_taggable.rb +++ b/lib/extensions/ar_taggable.rb @@ -43,6 +43,7 @@ def find_tagged_with(options = {}) tag_ids = Tag.for_names(tag_names, Tag.get_namespace(options)).pluck(:id) if options[:all] return none if tag_ids.length != tag_names.length + with_all_tags(tag_ids) else with_any_tags(tag_ids) @@ -96,7 +97,7 @@ def tag_attribute(attribute_name, namespace) ) end) - define_method("#{attribute_name}_tags") do + define_method(:"#{attribute_name}_tags") do Tag.filter_ns(tags, namespace) end @@ -108,7 +109,7 @@ def tag_attribute(attribute_name, namespace) end end - alias_method "#{plural_attribute_name}?", plural_attribute_name + alias_method :"#{plural_attribute_name}?", plural_attribute_name end end # module SingletonMethods @@ -120,11 +121,11 @@ def tag_with(list, options = {}) tag = Tag.arel_table tagging = Tagging.arel_table Tagging.joins(:tag) - .where(:taggable_id => id) - .where(:taggable_type => self.class.base_class.name) - .where(tagging[:tag_id].eq(tag[:id])) - .where(tag[:name].matches("#{ns}/%")) - .destroy_all + .where(:taggable_id => id) + .where(:taggable_type => self.class.base_class.name) + .where(tagging[:tag_id].eq(tag[:id])) + .where(tag[:name].matches("#{ns}/%")) + .destroy_all # Apply new tags Tag.parse(list).each do |name| @@ -140,7 +141,8 @@ def tag_add(list, options = {}) # Apply new tags Tag.transaction do Tag.parse(list).each do |name| - next if self.is_tagged_with?(name, options) + next if is_tagged_with?(name, options) + name = File.join(ns, name) tag = Tag.where(:name => name).first_or_create tag.taggings.create(:taggable => self) @@ -157,6 +159,7 @@ def tag_remove(list, options = {}) name = File.join(ns, name) tag = Tag.find_by(:name => name) next if tag.nil? + tag.taggings.where(:taggable => self).destroy_all end end @@ -174,7 +177,8 @@ def tagged_with(options = {}) def is_tagged_with?(tag, options = {}) ns = Tag.get_namespace(options) - return is_vtagged_with?(tag, options) if ns[0..7] == "/virtual" || tag[0..7] == "/virtual" + return is_vtagged_with?(tag, options) if ns[0..7] == "/virtual" || tag[0..7] == "/virtual" + # self.tagged_with(options).include?(File.join(ns ,tag)) Array(tags).include?(File.join(ns, tag)) end @@ -185,7 +189,7 @@ def is_vtagged_with?(tag, options = {}) subject = self parts = File.join(ns, tag.split("/")).split("/")[2..-1] # throw away /virtual object = parts.pop - object = object.gsub(/%2f/, "/") unless object.nil? # decode embedded slashes + object = object.gsub("%2f", "/") unless object.nil? # decode embedded slashes attr = parts.pop begin # resolve any intermediate relationships, throw an error if any of them return multiple results @@ -201,14 +205,14 @@ def is_vtagged_with?(tag, options = {}) relationship = "self" macro = :has_one end - if macro == :has_one || macro == :belongs_to + if [:has_one, :belongs_to].include?(macro) value = subject.public_send(relationship).public_send(attr) - return object.downcase == value.to_s.downcase + object.downcase == value.to_s.downcase else subject.send(relationship).any? { |o| o.send(attr).to_s == object } end rescue NoMethodError - return false + false end end @@ -217,7 +221,7 @@ def is_tagged_with_grouping?(list, options = {}) list.each do |inner_list| inner_result = false inner_list.each do |tag| - if self.is_tagged_with?(tag, options) + if is_tagged_with?(tag, options) inner_result = true break end diff --git a/lib/extensions/ar_to_model_hash.rb b/lib/extensions/ar_to_model_hash.rb index 86b56690dbc..b2e287901d7 100644 --- a/lib/extensions/ar_to_model_hash.rb +++ b/lib/extensions/ar_to_model_hash.rb @@ -23,7 +23,7 @@ def to_model_hash_options_fixup(options) cols = (options["cols"] || options["columns"] || []) cols += (options["key"] || []).compact - ret[:columns] = cols.uniq.sort.collect(&:to_sym) unless cols.blank? + ret[:columns] = cols.uniq.sort.collect(&:to_sym) if cols.present? includes = options["include"] if includes @@ -34,7 +34,7 @@ def to_model_hash_options_fixup(options) ret[:include] = includes.each_with_object({}) do |(k, v), h| sub_options = to_model_hash_options_fixup(v) - h[k.to_sym] = sub_options.blank? ? nil : sub_options + h[k.to_sym] = sub_options.presence end end @@ -59,7 +59,8 @@ def to_model_hash_attrs(options) columns << :id columns.each_with_object({:class => self.class.name}) do |c, h| - next unless self.respond_to?(c) + next unless respond_to?(c) + value = send(c) h[c.to_sym] = value unless value.nil? end @@ -72,7 +73,7 @@ def to_model_hash_recursive(options, result = nil) case spec when Symbol, String - if self.respond_to?(spec) + if respond_to?(spec) recs = send(spec) if recs.kind_of?(ActiveRecord::Base) || (recs.kind_of?(Array) && recs.first.kind_of?(ActiveRecord::Base)) single_rec = !recs.kind_of?(Array) @@ -85,7 +86,8 @@ def to_model_hash_recursive(options, result = nil) spec.each { |s| to_model_hash_recursive(s, result) } when Hash spec.each do |k, v| - next unless self.respond_to?(k) + next unless respond_to?(k) + if k == :tags recs = tags.collect { |t| Classification.tag_to_model_hash(t) } else diff --git a/lib/extensions/ar_types.rb b/lib/extensions/ar_types.rb index a7eb7822216..7c7dcd959be 100644 --- a/lib/extensions/ar_types.rb +++ b/lib/extensions/ar_types.rb @@ -1,9 +1,9 @@ require 'active_record/connection_adapters/postgresql_adapter' ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.module_eval do - prepend Module.new { + prepend(Module.new do def initialize_type_map(m = type_map) super m.alias_type('xid', 'varchar') end - } + end) end diff --git a/lib/extensions/ar_yaml.rb b/lib/extensions/ar_yaml.rb index 60a75589510..93b818b36b6 100644 --- a/lib/extensions/ar_yaml.rb +++ b/lib/extensions/ar_yaml.rb @@ -5,12 +5,12 @@ module AttributeAccessorThatYamls def encode_with(coder) super - Array(self.class.attrs_that_yaml).each { |attr| coder[attr.to_s] = instance_variable_get("@#{attr}") } + Array(self.class.attrs_that_yaml).each { |attr| coder[attr.to_s] = instance_variable_get(:"@#{attr}") } end def init_with(coder) super - Array(self.class.attrs_that_yaml).each { |attr| instance_variable_set("@#{attr}", coder[attr.to_s]) } + Array(self.class.attrs_that_yaml).each { |attr| instance_variable_set(:"@#{attr}", coder[attr.to_s]) } self end @@ -20,17 +20,17 @@ def init_with(coder) module ClassMethods def attr_accessor_that_yamls(*args) - module_eval { attr_accessor *args } + module_eval { attr_accessor(*args) } append_to_attrs_that_yaml(*args) end def attr_reader_that_yamls(*args) - module_eval { attr_reader *args } + module_eval { attr_reader(*args) } append_to_attrs_that_yaml(*args) end def attr_writer_that_yamls(*args) - module_eval { attr_writer *args } + module_eval { attr_writer(*args) } append_to_attrs_that_yaml(*args) end diff --git a/lib/extensions/as_include_concern.rb b/lib/extensions/as_include_concern.rb index 901efad923e..e87c5c1ea4b 100644 --- a/lib/extensions/as_include_concern.rb +++ b/lib/extensions/as_include_concern.rb @@ -51,6 +51,7 @@ def include_concern(mod) require_dependency to_include.underscore rescue LoadError => err raise unless err.message.include?(to_include.underscore) + to_include = mod require_dependency to_include.underscore end diff --git a/lib/extensions/descendant_loader.rb b/lib/extensions/descendant_loader.rb index 488ab2500f0..f1f8f52326c 100644 --- a/lib/extensions/descendant_loader.rb +++ b/lib/extensions/descendant_loader.rb @@ -90,11 +90,11 @@ def classes_in(filename) # will definitely be defined inside the innermost containining # scope. We're just not sure how that scope plays out relative # to its parents. - if (container_name = scope_names.pop) - define_combos = scoped_name(container_name, name_combinations(scope_names)) - else - define_combos = search_combos.dup - end + define_combos = if (container_name = scope_names.pop) + scoped_name(container_name, name_combinations(scope_names)) + else + search_combos.dup + end [search_combos, define_combos, flatten_name(name), flatten_name(sklass)] end.compact @@ -200,8 +200,8 @@ def save_cache! def classes_in(filename) t = File.mtime(filename) - if (entry = cache[filename]) - return entry[:parsed] if entry[:mtime] == t + if (entry = cache[filename]) && (entry[:mtime] == t) + return entry[:parsed] end super.tap do |data| @@ -219,7 +219,7 @@ def descendants_paths def class_inheritance_relationships @class_inheritance_relationships ||= begin children = Hash.new { |h, k| h[k] = [] } - Dir.glob(descendants_paths.map{|path| Pathname.new(path).join('**/*.rb')}) do |file| + Dir.glob(descendants_paths.map { |path| Pathname.new(path).join('**/*.rb') }) do |file| classes_in(file).each do |search_scopes, define_scopes, name, sklass| possible_names = scoped_name(name, define_scopes) possible_superklasses = scoped_name(sklass, search_scopes) diff --git a/lib/extensions/require_nested.rb b/lib/extensions/require_nested.rb index 907daf8f428..86819912ea8 100644 --- a/lib/extensions/require_nested.rb +++ b/lib/extensions/require_nested.rb @@ -7,6 +7,7 @@ def require_nested(name) filename = name.to_s.underscore if self == Object if Rails.application.config.cache_classes raise LoadError, "No such file to load -- #{filename}" unless ActiveSupport::Dependencies.search_for_file(filename) + autoload name, filename else require_dependency filename diff --git a/lib/extensions/yaml_load_aliases.rb b/lib/extensions/yaml_load_aliases.rb index 7b87d208175..aa1d6faaf79 100644 --- a/lib/extensions/yaml_load_aliases.rb +++ b/lib/extensions/yaml_load_aliases.rb @@ -4,7 +4,7 @@ module YamlLoadAliases def safe_load(yaml, permitted_classes: [], aliases: false, **kwargs) # permitted_classes kwarg is provided because rails 6.1.7.x expects it as a defined kwarg. See: https://github.com/rails/rails/blob/9ab33753b6bab1809fc73d35b98a5c1d0c96ba1b/activerecord/lib/active_record/coders/yaml_column.rb#L52 permitted_classes += YamlPermittedClasses.permitted_classes - super(yaml, permitted_classes: permitted_classes, aliases: true, **kwargs) + super(yaml, :permitted_classes => permitted_classes, :aliases => true, **kwargs) rescue Psych::DisallowedClass => err # Temporary hack to fallback to psych 3 behavior to go back to unsafe load if it's a disallowed class. # See: https://stackoverflow.com/questions/71191685/visit-psych-nodes-alias-unknown-alias-default-psychbadalias/71192990#71192990 diff --git a/lib/generators/manageiq/plugin/plugin_generator.rb b/lib/generators/manageiq/plugin/plugin_generator.rb index d83014c78dd..5f2d215abf5 100644 --- a/lib/generators/manageiq/plugin/plugin_generator.rb +++ b/lib/generators/manageiq/plugin/plugin_generator.rb @@ -43,7 +43,7 @@ def create_plugin_files template "bin/setup" template "bin/update" template "bin/before_install" - chmod "bin", 0755 & ~File.umask, :verbose => false + chmod "bin", 0o755 & ~File.umask, :verbose => false empty_directory_with_keep_file "bundler.d" template "config/secrets.defaults.yml" template "config/settings.yml" @@ -232,7 +232,7 @@ def capture(*args) with_output_buffer { value = yield(*args) }.presence || value end - def with_output_buffer(buf = nil) #:nodoc: + def with_output_buffer(buf = nil) # :nodoc: unless buf buf = "" if output_buffer && output_buffer.respond_to?(:encoding) diff --git a/lib/generators/manageiq/provider/provider_generator.rb b/lib/generators/manageiq/provider/provider_generator.rb index 54867d45cd0..24b0e2074fc 100644 --- a/lib/generators/manageiq/provider/provider_generator.rb +++ b/lib/generators/manageiq/provider/provider_generator.rb @@ -26,7 +26,7 @@ def self.manager_types :desc => "Generate default class scaffolding (Default: --scaffolding)" class_option :manager_type, :type => :string, - :desc => "What type of manager to create, required if building scaffolding (Options: #{manager_types.keys.join(", ")})" + :desc => "What type of manager to create, required if building scaffolding (Options: #{manager_types.keys.join(", ")})" def create_provider_files empty_directory "spec/models/#{plugin_path}" diff --git a/lib/git_worktree.rb b/lib/git_worktree.rb index 564c594473c..26da3804c30 100644 --- a/lib/git_worktree.rb +++ b/lib/git_worktree.rb @@ -2,8 +2,9 @@ class GitWorktree attr_accessor :name, :email, :base_name + ENTRY_KEYS = [:path, :dev, :ino, :mode, :gid, :uid, :ctime, :mtime] - DEFAULT_FILE_MODE = 0100644 + DEFAULT_FILE_MODE = 0o100644 LOCK_REFERENCE = 'refs/locks' def self.checkout_at(url, directory, options = {}) @@ -53,6 +54,7 @@ def initialize(options = {}) def delete_repo return false unless @repo + @repo.close FileUtils.rm_rf(@path) true @@ -71,12 +73,14 @@ def branches(where = nil) def branch=(name) branch = find_branch(name) raise GitWorktreeException::BranchMissing, name unless branch + @commit_sha = branch.target.oid end def branch_info(name) branch = find_branch(name) raise GitWorktreeException::BranchMissing, name unless branch + ref = branch.resolve {:time => ref.target.time, :message => ref.target.message, :commit_sha => ref.target.oid} end @@ -92,12 +96,14 @@ def tags def tag=(name) tag = find_tag(name) raise GitWorktreeException::TagMissing, name unless tag + @commit_sha = tag.target.oid end def tag_info(name) tag = find_tag(name) raise GitWorktreeException::TagMissing, name unless tag + {:time => tag.target.time, :message => tag.target.message, :commit_sha => tag.target.oid} end @@ -123,19 +129,15 @@ def add(path, data, default_entry_keys = {}) entry = {} entry[:path] = path ENTRY_KEYS.each { |key| entry[key] = default_entry_keys[key] if default_entry_keys.key?(key) } - entry[:oid] = @repo.write(data, :blob) + entry[:oid] = @repo.write(data, :blob) entry[:mode] ||= DEFAULT_FILE_MODE entry[:mtime] ||= Time.now current_index.add(entry) end - def remove(path) - current_index.remove(path) - end + delegate :remove, :to => :current_index - def remove_dir(path) - current_index.remove_dir(path) - end + delegate :remove_dir, :to => :current_index def file_exists?(path) !!find_entry(path) @@ -184,12 +186,14 @@ def file_attributes(fname) walker.push(@repo.ref(local_ref).target.oid) commit = walker.find { |c| c.diff(:paths => [fname]).size > 0 } return {} unless commit + {:updated_on => commit.time.gmtime, :updated_by => commit.author[:name]} end def file_list tree = lookup_commit_tree return [] unless tree + tree.walk(:preorder).collect { |root, entry| "#{root}#{entry[:name]}" } end @@ -215,6 +219,7 @@ def mv_file_with_new_contents(old_file, new_path, new_data, default_entry_keys = def mv_file(old_file, new_file) entry = current_index[old_file] return unless entry + entry[:path] = new_file current_index.add(entry) remove(old_file) @@ -222,6 +227,7 @@ def mv_file(old_file, new_file) def mv_dir(old_dir, new_dir) raise GitWorktreeException::DirectoryAlreadyExists, new_dir if find_entry(new_dir) + old_dir = fix_path_mv(old_dir) new_dir = fix_path_mv(new_dir) updates = current_index.entries.select { |entry| entry[:path].start_with?(old_dir) } @@ -346,7 +352,7 @@ def merge(commit, rebase = false) end def rebase(commit, merge_index, parent) - commit_obj = commit if commit.class == Rugged::Commit + commit_obj = commit if commit.instance_of?(Rugged::Commit) commit_obj ||= @repo.lookup(commit) Rugged::Commit.create(@repo, :author => commit_obj.author, :committer => commit_obj.author, @@ -373,7 +379,7 @@ def process_repo(options) def create_repo @repo = @bare ? Rugged::Repository.init_at(@path, :bare) : Rugged::Repository.init_at(@path) - @repo.config['user.name'] = @username if @username + @repo.config['user.name'] = @username if @username @repo.config['user.email'] = @email if @email @repo.config['merge.ff'] = 'only' if @fast_forward_merge end @@ -403,14 +409,17 @@ def fix_path_mv(dir_name) def get_tree(path) return lookup_commit_tree if path.empty? + entry = get_tree_entry(path) raise GitWorktreeException::GitEntryMissing, path unless entry raise GitWorktreeException::GitEntryNotADirectory, path unless entry[:type] == :tree + @repo.lookup(entry[:oid]) end def lookup_commit_tree return nil if !@commit_sha && !@repo.branches['master'] + ct = @commit_sha ? @repo.lookup(@commit_sha) : @repo.branches['master'].target ct.tree if ct end @@ -433,6 +442,7 @@ def current_index unless @repo.empty? tree = lookup_commit_tree raise ArgumentError, "Cannot locate commit tree" unless tree + @current_tree_oid = tree.oid index.read_tree(tree) end @@ -481,6 +491,7 @@ def differences_with_current(commit) result = [] delta.diff.each_line do |line| next unless line.addition? || line.deletion? + result << "+ #{line.content.to_str}" if line.addition? result << "- #{line.content.to_str}" if line.deletion? end diff --git a/lib/git_worktree_exception.rb b/lib/git_worktree_exception.rb index 0cfa6515789..13f5d42923c 100644 --- a/lib/git_worktree_exception.rb +++ b/lib/git_worktree_exception.rb @@ -1,6 +1,7 @@ module GitWorktreeException class GitConflicts < RuntimeError attr_reader :conflicts + def initialize(conflicts) @conflicts = conflicts super diff --git a/lib/httpd_dbus_api.rb b/lib/httpd_dbus_api.rb index d80b0babb95..4c9869ca342 100644 --- a/lib/httpd_dbus_api.rb +++ b/lib/httpd_dbus_api.rb @@ -21,8 +21,8 @@ def user_groups(userid) private def dbus_api(request_url) - host = ENV["HTTPD_DBUS_API_SERVICE_HOST"] - port = ENV["HTTPD_DBUS_API_SERVICE_PORT"] + host = ENV.fetch("HTTPD_DBUS_API_SERVICE_HOST", nil) + port = ENV.fetch("HTTPD_DBUS_API_SERVICE_PORT", nil) conn = Faraday.new(:url => "http://#{host}:#{port}") do |faraday| faraday.options[:open_timeout] = @options[:open_timeout] || 5 # Net::HTTP open_timeout faraday.options[:timeout] = @options[:timeout] || 30 # Net::HTTP read_timeout @@ -44,6 +44,7 @@ def dbus_api(request_url) end raise(body["error"]) if response.status >= 400 + body["result"] end end diff --git a/lib/manageiq.rb b/lib/manageiq.rb index b5071edc02c..b674c9e5752 100644 --- a/lib/manageiq.rb +++ b/lib/manageiq.rb @@ -5,22 +5,20 @@ module ManageIQ # Defined in the same fashion as Rails.env def self.env - @_env ||= begin - if defined?(Rails) - Rails.env - else - ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "development") - end - end + @_env ||= if defined?(Rails) + Rails.env + else + ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "development") + end + end def self.root - @_root ||= begin - if defined?(Rails) - Rails.root - else - Pathname.new(File.expand_path("../..", __FILE__)) - end - end + @_root ||= if defined?(Rails) + Rails.root + else + Pathname.new(File.expand_path('..', __dir__)) + end + end end diff --git a/lib/manageiq/deep_delete.rb b/lib/manageiq/deep_delete.rb index dbde63871aa..07f74966c3e 100644 --- a/lib/manageiq/deep_delete.rb +++ b/lib/manageiq/deep_delete.rb @@ -160,17 +160,17 @@ def refs_callbacks(klass) # in_batches for delete or destroy (not nullify) # similar to: # scope.in_batches(of: batch_size, :load => true).destroy_all.count - # + # # @block takes a subscope and returns a count - def batch(scope, batch_size: 1000, &block) - pk = scope.primary_key + def batch(scope, batch_size: 1000) + pk = scope.primary_key total = 0 loop do if (id = scope.order(pk).limit(1).offset(batch_size).pluck(pk).first) - total += block.call(scope.where("#{pk} < ?", id)) + total += yield(scope.where("#{pk} < ?", id)) else - return total += block.call(scope) + return total += yield(scope) end end end diff --git a/lib/manageiq/environment.rb b/lib/manageiq/environment.rb index c8d70174cb4..0f20a27ecf3 100644 --- a/lib/manageiq/environment.rb +++ b/lib/manageiq/environment.rb @@ -11,7 +11,7 @@ def self.manageiq_plugin_setup(plugin_root = nil) # determine plugin root dir. Assume we are called from a 'bin/' script in the plugin root plugin_root ||= Pathname.new(caller_locations.last.absolute_path).dirname.parent - manageiq_plugin_update(plugin_root, force_bundle_update: false) + manageiq_plugin_update(plugin_root, :force_bundle_update => false) end def self.manageiq_plugin_update(plugin_root = nil, force_bundle_update: true) @@ -24,7 +24,7 @@ def self.manageiq_plugin_update(plugin_root = nil, force_bundle_update: true) setup_gemfile_lock if ci? install_bundler(plugin_root) bundle_config(plugin_root) - bundle_update(plugin_root, force: force_bundle_update) + bundle_update(plugin_root, :force => force_bundle_update) unless skip_database_reset? # Update the local development database @@ -46,6 +46,7 @@ def self.ensure_config_files config_files.each do |source, dest| file = APP_ROOT.join(dest) next if file.exist? + puts "Copying #{file} from template..." FileUtils.cp(APP_ROOT.join(source), file) end @@ -82,6 +83,7 @@ def self.setup_gemfile_lock !ENV["GITHUB_REF_NAME"].to_s.start_with?("dependabot/") # Dependabot makes branches in the core repo raise "Missing Gemfile.lock.release" unless APP_ROOT.join("Gemfile.lock.release").file? + FileUtils.cp(APP_ROOT.join("Gemfile.lock.release"), APP_ROOT.join("Gemfile.lock")) end diff --git a/lib/manageiq/network_discovery/discovery.rb b/lib/manageiq/network_discovery/discovery.rb index 6c0ca7fb3bc..a24dd71f13e 100644 --- a/lib/manageiq/network_discovery/discovery.rb +++ b/lib/manageiq/network_discovery/discovery.rb @@ -28,9 +28,11 @@ def self.scan_host(ost) if ping raise ArgumentError, "must pass discover_types" if ost.discover_types.blank? + # Trigger probes ost.discover_types.each do |type| next unless PROVIDERS_BY_TYPE.include?(type) + klass = Object.const_get(PROVIDERS_BY_TYPE[type]) $log&.info("#{klass}: probing ip = #{ost.ipaddr}") klass.probe(ost) diff --git a/lib/manageiq/network_discovery/port.rb b/lib/manageiq/network_discovery/port.rb index 81e442dbd97..1274fc0d40a 100644 --- a/lib/manageiq/network_discovery/port.rb +++ b/lib/manageiq/network_discovery/port.rb @@ -23,7 +23,7 @@ def self.open?(ost, port) rescue Timeout::Error => err $log&.debug("Port scan timeout: ip = #{ost.ipaddr}, port = #{port}, #{err}") false - rescue StandardError => err + rescue => err $log&.debug("Port scan error: ip = #{ost.ipaddr}, port = #{port}, #{err}") false end diff --git a/lib/manageiq/reporting/charting.rb b/lib/manageiq/reporting/charting.rb index 96d24c2fe93..4100543fc66 100644 --- a/lib/manageiq/reporting/charting.rb +++ b/lib/manageiq/reporting/charting.rb @@ -15,7 +15,7 @@ class << self :serialized, :deserialized, :js_load_statement # javascript statement to reload charts - ] => :instance + ] => :instance end # discovery diff --git a/lib/manageiq/reporting/charting/c3_charting.rb b/lib/manageiq/reporting/charting/c3_charting.rb index 5a93a6d8c22..afc21454787 100644 --- a/lib/manageiq/reporting/charting/c3_charting.rb +++ b/lib/manageiq/reporting/charting/c3_charting.rb @@ -29,7 +29,7 @@ def format # called from each ApplicationController instance def load_helpers(klass) klass.instance_eval do - helper ManageIQ::Reporting::Formatter::C3Helper + helper(ManageIQ::Reporting::Formatter::C3Helper) end end @@ -42,7 +42,7 @@ def data_ok?(data) def sample_chart(options, _report_theme) sample = { - :data => { + :data => { :axis => {}, :tooltip => {}, :columns => [ @@ -52,7 +52,7 @@ def sample_chart(options, _report_theme) ], }, :miqChart => options[:graph_type], - :miq => { :zoomed => false } + :miq => {:zoomed => false} } sample[:data][:groups] = [['data1', 'data2', 'data3']] if options[:graph_type].include?('Stacked') sample @@ -69,7 +69,7 @@ def chart_names_for_select # list of themes - in options_for_select format def chart_themes_for_select - [%w(Default default)] + [%w[Default default]] end def serialized(data) diff --git a/lib/manageiq/reporting/formatter.rb b/lib/manageiq/reporting/formatter.rb index 6d06c26b1ae..ae94d645f92 100644 --- a/lib/manageiq/reporting/formatter.rb +++ b/lib/manageiq/reporting/formatter.rb @@ -21,19 +21,19 @@ module Formatter # Deprecate the constants within ReportFormatter with a helpful replacement. module ReportFormatter include ActiveSupport::Deprecation::DeprecatedConstantAccessor - deprecate_constant 'BLANK_VALUE', 'ManageIQ::Reporting::Formatter::BLANK_VALUE' - deprecate_constant 'CRLF', 'ManageIQ::Reporting::Formatter::CRLF' - deprecate_constant 'LABEL_LENGTH', 'ManageIQ::Reporting::Formatter::LABEL_LENGTH' - deprecate_constant 'LEGEND_LENGTH', 'ManageIQ::Reporting::Formatter::LEGEND_LENGTH' + deprecate_constant :BLANK_VALUE, 'ManageIQ::Reporting::Formatter::BLANK_VALUE' + deprecate_constant :CRLF, 'ManageIQ::Reporting::Formatter::CRLF' + deprecate_constant :LABEL_LENGTH, 'ManageIQ::Reporting::Formatter::LABEL_LENGTH' + deprecate_constant :LEGEND_LENGTH, 'ManageIQ::Reporting::Formatter::LEGEND_LENGTH' - deprecate_constant 'C3Formatter', 'ManageIQ::Reporting::Formatter::C3' - deprecate_constant 'C3Series', 'ManageIQ::Reporting::Formatter::C3Series' - deprecate_constant 'C3Charting', 'ManageIQ::Reporting::Formatter::C3Charting' - deprecate_constant 'ChartCommon', 'ManageIQ::Reporting::Formatter::ChartCommon' - deprecate_constant 'Converter', 'ManageIQ::Reporting::Formatter::Converter' - deprecate_constant 'ReportHTML', 'ManageIQ::Reporting::Formatter::Html' - deprecate_constant 'ReportRenderer', 'ManageIQ::Reporting::Formatter::ReportRenderer' - deprecate_constant 'ReportText', 'ManageIQ::Reporting::Formatter::Text' - deprecate_constant 'ReportTimeline', 'ManageIQ::Reporting::Formatter::Timeline' - deprecate_constant 'TimelineMessage', 'ManageIQ::Reporting::Formatter::TimelineMessage' + deprecate_constant :C3Formatter, 'ManageIQ::Reporting::Formatter::C3' + deprecate_constant :C3Series, 'ManageIQ::Reporting::Formatter::C3Series' + deprecate_constant :C3Charting, 'ManageIQ::Reporting::Formatter::C3Charting' + deprecate_constant :ChartCommon, 'ManageIQ::Reporting::Formatter::ChartCommon' + deprecate_constant :Converter, 'ManageIQ::Reporting::Formatter::Converter' + deprecate_constant :ReportHTML, 'ManageIQ::Reporting::Formatter::Html' + deprecate_constant :ReportRenderer, 'ManageIQ::Reporting::Formatter::ReportRenderer' + deprecate_constant :ReportText, 'ManageIQ::Reporting::Formatter::Text' + deprecate_constant :ReportTimeline, 'ManageIQ::Reporting::Formatter::Timeline' + deprecate_constant :TimelineMessage, 'ManageIQ::Reporting::Formatter::TimelineMessage' end diff --git a/lib/manageiq/reporting/formatter/c3.rb b/lib/manageiq/reporting/formatter/c3.rb index bcccf692c53..0e94d90db19 100644 --- a/lib/manageiq/reporting/formatter/c3.rb +++ b/lib/manageiq/reporting/formatter/c3.rb @@ -97,6 +97,7 @@ def build_document_header end return if mri.graph[:columns].blank? + column = grouped_by_tag_category? ? mri.graph[:columns][0].split(/_+/)[0..-2].join('_') : mri.graph[:columns][0] format, options = javascript_format(column, nil) return unless format @@ -116,7 +117,7 @@ def chart_is_2d? end def chart_is_stacked? - %w(StackedBar StackedColumn StackedArea).include?(mri.graph[:type]) + %w[StackedBar StackedColumn StackedArea].include?(mri.graph[:type]) end # change structure of chart JSON to performance chart with timeseries data diff --git a/lib/manageiq/reporting/formatter/c3_helper.rb b/lib/manageiq/reporting/formatter/c3_helper.rb index d1db93d3856..4bd71aaaa78 100644 --- a/lib/manageiq/reporting/formatter/c3_helper.rb +++ b/lib/manageiq/reporting/formatter/c3_helper.rb @@ -13,7 +13,7 @@ def c3chart_remote(url, opts = {}) ManageIQ.charts.c3["#{chart_id}"] = chart; miqSparkleOff(); }); - EOJ + EOJ end def c3chart_local(data, opts = {}) @@ -24,7 +24,7 @@ def c3chart_local(data, opts = {}) var data = #{data.to_json}; var chart = c3.generate(chartData('#{data[:miqChart]}', data, { bindto: "##{chart_id}" })); ManageIQ.charts.c3["#{chart_id}"] = chart; - EOJ + EOJ end end end diff --git a/lib/manageiq/reporting/formatter/chart_common.rb b/lib/manageiq/reporting/formatter/chart_common.rb index 6549303cb89..c8488a9e434 100644 --- a/lib/manageiq/reporting/formatter/chart_common.rb +++ b/lib/manageiq/reporting/formatter/chart_common.rb @@ -7,7 +7,7 @@ def slice_legend(string, limit = LEGEND_LENGTH) when Date, Time, DateTime, ActiveSupport::TimeWithZone string.iso8601(3) else - string.to_s.gsub(/\n/, ' ').truncate(limit) + string.to_s.tr("\n", ' ').truncate(limit) end end @@ -28,6 +28,7 @@ def build_document_header def build_document_body return no_records_found_chart if mri.table.nil? || mri.table.data.blank? + maxcols = 8 fun = case graph_options[:chart_type] when :performance then :build_performance_chart # performance chart (time based) @@ -48,11 +49,11 @@ def build_performance_chart_area(maxcols) tz = mri.get_time_zone(Time.zone.name) mri.graph[:columns].each_with_index do |col, col_idx| - next if col_idx >= maxcols + allnil = true tip = graph_options[:trendtip] if col.starts_with?("trend") && graph_options[:trendtip] - categories = [] # Store categories and series counts in an array of arrays + categories = [] # Store categories and series counts in an array of arrays series = series_class.new mri.table.data.each_with_index do |r, d_idx| rec_time = r["timestamp"].in_time_zone(tz) @@ -77,6 +78,7 @@ def build_performance_chart_area(maxcols) def rounded_value(value) return 0 if value.blank? + value.round(graph_options[:decimals] || 0) end @@ -90,6 +92,7 @@ def build_performance_chart_pie(_maxcols) cat = cat_cnt > 6 ? 'Others' : r["resource_name"] val = rounded_value(r[col]) next if val == 0 + if cat.starts_with?("Others") && categories[-1].starts_with?("Others") # Are we past the top 10? categories[-1] = "Others" series.add_to_value(-1, val) # Accumulate the series value @@ -135,7 +138,8 @@ def build_util_ts_chart_column :function => { :name => "mhz_to_human_size", :precision => "1" - }}) + } + }) when "Memory" mri.format(tip_key, r[tip_key].to_f * 1024 * 1024, :format => format_bytes_human_size_1) when "Disk" @@ -191,13 +195,11 @@ def build_reporting_chart_dim2 save2_nonblank = nonblank_or_default(save2) counts[save1_nonblank] = Hash.new(0) counter = 0 - else - if save2 != r[sort2].to_s # only the second sort field changed, save the count - counts[save1_nonblank][save2_nonblank] = counter - save2 = r[sort2].to_s - save2_nonblank = nonblank_or_default(save2) - counter = 0 - end + elsif save2 != r[sort2].to_s + counts[save1_nonblank][save2_nonblank] = counter + save2 = r[sort2].to_s + save2_nonblank = nonblank_or_default(save2) + counter = 0 # only the second sort field changed, save the count end counter += 1 end @@ -227,13 +229,13 @@ def build_reporting_chart_dim2 a.push(:value => hash1[val2[0]], :tooltip => "#{key1} / #{val2[0]}") end - val2[0] = val2[0].to_s.gsub(/\\/, ' \ ') + val2[0] = val2[0].to_s.gsub("\\", ' \ ') add_series(val2[0].to_s, series) end if other.present? && show_other # Sum up the other sort2 counts by sort1 value series = series_class.new - counts.each do |key1, hash1| # Go thru each sort1 key and hash count + counts.each do |key1, hash1| # Go thru each sort1 key and hash count # Add in all of the remaining sort2 key counts ocount = other.reduce(0) { |a, e| a + hash1[e[0]] } series.push(:value => ocount, @@ -286,7 +288,7 @@ def build_numeric_chart_simple sorted_data = mri.table.data.sort_by { |row| row[data_column_name] || 0 } series = sorted_data.reverse.take(keep) - .each_with_object(series_class.new(pie_type? ? :pie : :flat)) do |row, a| + .each_with_object(series_class.new(pie_type? ? :pie : :flat)) do |row, a| tooltip = row[sort1] tooltip = _('no value') if tooltip.blank? a.push(:value => row[data_column_name], @@ -316,7 +318,7 @@ def build_numeric_chart_grouped categories = [] series = sorted_data.reverse.take(keep) - .each_with_object(series_class.new(pie_type? ? :pie : :flat)) do |(key, data), a| + .each_with_object(series_class.new(pie_type? ? :pie : :flat)) do |(key, data), a| tooltip = key tooltip = _('no value') if key.blank? a.push(:value => data[aggreg][raw_column_name], @@ -351,8 +353,8 @@ def build_numeric_chart_grouped_2dim def_range_key2 = subtotals.keys.map { |key| key.split('__')[1] || '' }.sort.uniq - group_sums = groups.keys.each_with_object({}) do |key1, h| - h[key1] = def_range_key2.inject(0) do |sum, key2| + group_sums = groups.keys.index_with do |key1| + def_range_key2.inject(0) do |sum, key2| sub_key = "#{key1}__#{key2}" subtotals.key?(sub_key) ? sum + subtotals[sub_key][aggreg][raw_column_name] : sum end @@ -389,14 +391,16 @@ def build_numeric_chart_grouped_2dim :tooltip => "#{key1} / #{val2}") end - series.push(:value => other[val2], - :tooltip => "Other / #{val2}") if show_other + if show_other + series.push(:value => other[val2], + :tooltip => "Other / #{val2}") + end label = val2 if val2.kind_of?(String) - label = label.to_s.gsub(/\\/, ' \ ') + label = label.to_s.gsub("\\", ' \ ') label = _('no value') if label.blank? add_series(label, series) end - groups.keys.collect { |k| k.blank? ? _('no value') : k } + groups.keys.collect { |k| (k.presence || _('no value')) } end def pie_type? @@ -406,7 +410,7 @@ def pie_type? def build_reporting_chart_other save_key = nil counter = 0 - categories = [] # Store categories and series counts in an array of arrays + categories = [] # Store categories and series counts in an array of arrays mri.table.data.each_with_index do |r, d_idx| category_changed = save_key != r[mri.sortby[0]] not_first_iteration = d_idx > 0 @@ -428,7 +432,8 @@ def build_reporting_chart_other kept_categories.map { |cat| [nonblank_or_default(cat.first), cat.last] } series = kept_categories.each_with_object( - series_class.new(pie_type? ? :pie : :flat)) do |cat, a| + series_class.new(pie_type? ? :pie : :flat) + ) do |cat, a| a.push(:value => cat.last, :tooltip => cat.first) end @@ -450,11 +455,12 @@ def build_performance_chart(maxcols) # Utilization timestamp charts def build_util_ts_chart(_maxcols) - build_util_ts_chart_column if %w(Column ColumnThreed).index(mri.graph[:type]) + build_util_ts_chart_column if %w[Column ColumnThreed].index(mri.graph[:type]) end def build_reporting_chart_numeric(_maxcols) return no_records_found_chart(_('Invalid chart definition')) unless mri.graph[:column].present? + if mri.group.nil? build_numeric_chart_simple else diff --git a/lib/manageiq/reporting/formatter/html.rb b/lib/manageiq/reporting/formatter/html.rb index 108cd73283f..8e75f57bf44 100644 --- a/lib/manageiq/reporting/formatter/html.rb +++ b/lib/manageiq/reporting/formatter/html.rb @@ -18,6 +18,7 @@ def build_html_title def pad(str, len) return "".ljust(len) if str.nil? + str = str.slice(0, len) # truncate long strings str.ljust(len) # pad with whitespace end diff --git a/lib/manageiq/reporting/formatter/text.rb b/lib/manageiq/reporting/formatter/text.rb index c1acad79bd9..1de2dfe53a5 100644 --- a/lib/manageiq/reporting/formatter/text.rb +++ b/lib/manageiq/reporting/formatter/text.rb @@ -16,16 +16,16 @@ def calculate_max_col_widths end end mri.table.data.each do |r| - mri.col_formats ||= [] # Backward compat - create empty array for formats + mri.col_formats ||= [] # Backward compat - create empty array for formats mri.col_order.each_with_index do |f, i| - unless ["", ""].include?(mri.db) - data = mri.format(f, + data = if ["", ""].include?(mri.db) + r[f].to_s + else + mri.format(f, r[f], - :format => mri.col_formats[i] ? mri.col_formats[i] : :_default_, + :format => mri.col_formats[i] || :_default_, :tz => tz) - else - data = r[f].to_s - end + end if !@max_col_width[i] || data.length > @max_col_width[i] @max_col_width[i] = data.length end @@ -56,6 +56,7 @@ def calculate_filter_names(tag) def build_document_header mri = options.mri raise "No settings configured for Table" if mri.table.nil? + calculate_max_col_widths @hr = hr @@ -74,6 +75,7 @@ def build_document_header end return if mri.headers.empty? + c = mri.headers.dup # Remove headers of hidden columns mri.col_order.each_with_index do |f, i| @@ -101,9 +103,10 @@ def build_document_body counter = 0 row_limit = mri.rpt_options && mri.rpt_options[:row_limit] ? mri.rpt_options[:row_limit] : 0 - use_table = mri.sub_table ? mri.sub_table : mri.table + use_table = mri.sub_table || mri.table use_table.data.each_with_index do |r, d_idx| break if row_limit != 0 && d_idx > row_limit - 1 + line = [] line_wrapper = false # Clear line wrapper flag if [""].include?(mri.db) && r[0] == "% Match:" @@ -111,19 +114,19 @@ def build_document_body elsif [""].include?(mri.db) && r[0] == "Changed:" line_wrapper = true # Wrap drift changed lines with header rows end - mri.col_formats ||= [] # Backward compat - create empty array for formats + mri.col_formats ||= [] # Backward compat - create empty array for formats mri.col_order.each_with_index do |f, i| next if mri.column_is_hidden?(f) - unless ["", ""].include?(mri.db) - data = mri.format(f, + data = if ["", ""].include?(mri.db) + r[f].to_s + else + mri.format(f, r[f], - :format => mri.col_formats[i] ? mri.col_formats[i] : :_default_, + :format => mri.col_formats[i] || :_default_, :tz => tz) - else - data = r[f].to_s - end - if options.alignment.eql? :center + end + if options.alignment.eql?(:center) line << data.center(@max_col_width[i]) else align = data.kind_of?(Numeric) ? :rjust : :ljust @@ -153,12 +156,10 @@ def build_document_body end # see if a final group line needs to be written - if ["y", "c"].include?(mri.group) && !mri.sortby.nil? - if mri.group == "c" - s += @hr + if ["y", "c"].include?(mri.group) && !mri.sortby.nil? && (mri.group == "c") + s += @hr t = " Total for #{save_val}: #{counter} ".center(@line_len - 2) s += fit_to_width("|#{t}|" + CRLF) - end end s += @hr @@ -168,33 +169,33 @@ def build_document_body def build_document_footer mri = options.mri tz = mri.get_time_zone(Time.zone.name) - if !mri.user_categories.blank? || !mri.categories.blank? || !mri.conditions.nil? || !mri.display_filter.nil? + if mri.user_categories.present? || mri.categories.present? || !mri.conditions.nil? || !mri.display_filter.nil? output << fit_to_width(@hr) - unless mri.user_categories.blank? + if mri.user_categories.present? user_filters = mri.user_categories.flatten - unless user_filters.blank? + if user_filters.present? customer_name = Tenant.root_tenant.name user_filter = "User assigned " + customer_name + " Tag filters:" t = user_filter.ljust(@line_len - 2) output << fit_to_width("|#{t}|" + CRLF) user_filters.each do |filters| tag_val = " " + calculate_filter_names(filters) - tag_val1 = tag_val + " " * (@line_len - tag_val.length - 2) + tag_val1 = tag_val + (" " * (@line_len - tag_val.length - 2)) output << fit_to_width("|#{tag_val1}|" + CRLF) end end end - unless mri.categories.blank? + if mri.categories.present? categories = mri.categories.flatten - unless categories.blank? + if categories.present? customer_name = Tenant.root_tenant.name customer_name_title = "Report based " + customer_name + " Tag filters:" - t = customer_name_title + " " * (@line_len - customer_name_title.length - 2) + t = customer_name_title + (" " * (@line_len - customer_name_title.length - 2)) output << fit_to_width("|#{t}|" + CRLF) categories.each do |filters| tag_val = " " + calculate_filter_names(filters) - tag_val1 = tag_val + " " * (@line_len - tag_val.length - 2) + tag_val1 = tag_val + (" " * (@line_len - tag_val.length - 2)) output << fit_to_width("|#{tag_val1}|" + CRLF) end end @@ -207,7 +208,7 @@ def build_document_footer output << fit_to_width("|#{t}|" + CRLF) # Clean up the conditions for display - tables = mri.conditions[:field].split("-")[0].split(".") # Get the model and tables + tables = mri.conditions[:field].split("-")[0].split(".") # Get the model and tables field = Dictionary.gettext(tables[0], :type => :model, :notfound => :titleize) # Start with the model tables[1..-1].each do |t| # Add on any tables field += "." + Dictionary.gettext(t, :type => :table, :notfound => :titleize) @@ -216,7 +217,7 @@ def build_document_footer field += " : " + Dictionary.gettext(mri.conditions[:field].split("-")[1], :type => :column, :notfound => :titleize) filter_val = " " + field + " " + mri.conditions[:operator] + " " + mri.conditions[:string].to_s - t = filter_val + " " * (@line_len - filter_val.length - 2) + t = filter_val + (" " * (@line_len - filter_val.length - 2)) output << fit_to_width("|#{t}|" + CRLF) else filter_fields = "Report based filter fields:" @@ -233,14 +234,14 @@ def build_document_footer t = filter_fields.ljust(@line_len - 2) output << fit_to_width("|#{t}|" + CRLF) filter_val = mri.display_filter.to_human - t = filter_val + " " * (@line_len - filter_val.length - 2) + t = filter_val + (" " * (@line_len - filter_val.length - 2)) output << fit_to_width("|#{t}|" + CRLF) end end output << fit_to_width(@hr) # Label footer with last run on time of selected report or current time for other downloads - last_run_on = mri.rpt_options && mri.rpt_options[:last_run_on] || Time.zone.now + last_run_on = (mri.rpt_options && mri.rpt_options[:last_run_on]) || Time.zone.now cr = format_timezone(last_run_on, tz).to_s f = cr.center(@line_len - 2) output << fit_to_width("|#{f}|" + CRLF) @@ -253,12 +254,12 @@ def build_document_footer # "+------------------+" def hr columns = options.mri.table.column_names - if columns.include?("id") # Use 1 less column if "id" is present - @line_len = @max_col_width.inject((columns.length - 1) * 3) { |s, e| s + e } + 1 - else - @line_len = @max_col_width.inject(columns.length * 3) { |s, e| s + e } - end - "+" + "-" * (@line_len - 2) + "+" + CRLF + @line_len = if columns.include?("id") # Use 1 less column if "id" is present + @max_col_width.inject((columns.length - 1) * 3) { |s, e| s + e } + 1 + else + @max_col_width.inject(columns.length * 3) { |s, e| s + e } + end + "+" + ("-" * (@line_len - 2)) + "+" + CRLF end end end diff --git a/lib/manageiq/reporting/formatter/timeline.rb b/lib/manageiq/reporting/formatter/timeline.rb index 4e205279cc7..dc2c22e9305 100644 --- a/lib/manageiq/reporting/formatter/timeline.rb +++ b/lib/manageiq/reporting/formatter/timeline.rb @@ -29,11 +29,11 @@ def build_document_body @events = [] @events_data = [] tlfield = mri.timeline[:field].split("-") # Split the table and field - if tlfield.first.include?(".") # If table has a period (from a sub table) - col = tlfield.first.split(".").last + "." + tlfield.last # use subtable.field - else - col = tlfield.last # Not a subtable, just grab the field name - end + col = if tlfield.first.include?(".") # If table has a period (from a sub table) + tlfield.first.split(".").last + "." + tlfield.last # use subtable.field + else + tlfield.last # Not a subtable, just grab the field name + end # some of the OOTB reports have db as EventStream or PolicyEvent, # those do not have event categories, so need to go thru else block for such reports. @@ -58,7 +58,7 @@ def build_document_body end else mri.table.data.each_with_index do |row, _d_idx| - tl_event(row, col) # Add this row to the tl event xml + tl_event(row, col) # Add this row to the tl event xml end @events.push(:data => [@events_data]) end @@ -72,8 +72,9 @@ def tl_event(row, col) mri = options.mri tz = mri.get_time_zone(Time.zone.name) etime = row[col] - return if etime.nil? # Skip nil dates - Sprint 41 + return if etime.nil? # Skip nil dates - Sprint 41 return if !@start_time.nil? && etime < @start_time # Skip if before start time limit + # START of TIMELINE TIMEZONE Code mri.extras[:tl_position] ||= format_timezone(etime.to_time, tz, 'raw') if mri.timeline[:position] && mri.timeline[:position] == "First" @@ -82,14 +83,14 @@ def tl_event(row, col) # if there is item with current time or greater then use that else, use right most one. if format_timezone(etime.to_time, tz, 'raw') >= format_timezone(Time.now, tz, 'raw') && format_timezone(etime.to_time, tz, 'raw') <= format_timezone(mri.extras[:tl_position], tz, 'raw') mri.extras[:tl_position] = format_timezone(etime.to_time, tz, 'raw') - else - mri.extras[:tl_position] = format_timezone(etime.to_time, tz, 'raw') if format_timezone(etime.to_time, tz, 'raw') > format_timezone(mri.extras[:tl_position], tz, 'raw') + elsif format_timezone(etime.to_time, tz, 'raw') > format_timezone(mri.extras[:tl_position], tz, 'raw') + mri.extras[:tl_position] = format_timezone(etime.to_time, tz, 'raw') end - else - mri.extras[:tl_position] = format_timezone(etime.to_time, tz, 'raw') if format_timezone(etime.to_time, tz, 'raw') > format_timezone(mri.extras[:tl_position], tz, 'raw') + elsif format_timezone(etime.to_time, tz, 'raw') > format_timezone(mri.extras[:tl_position], tz, 'raw') + mri.extras[:tl_position] = format_timezone(etime.to_time, tz, 'raw') end # END of TIMELINE TIMEZONE Code - if row["id"] # Make sure id column is present + if row["id"] # Make sure id column is present rec = mri.db.constantize.find_by_id(row['id']) end unless rec.nil? @@ -103,7 +104,7 @@ def tl_event(row, col) ems_storage = false if rec[:ems_id] && ExtManagementSystem.exists?(rec[:ems_id]) ems = ExtManagementSystem.find(rec[:ems_id]) - ems_cloud = true if ems.kind_of?(EmsCloud) + ems_cloud = true if ems.kind_of?(EmsCloud) ems_container = true if ems.kind_of?(::ManageIQ::Providers::ContainerManager) ems_storage = true if ems.kind_of?(::ManageIQ::Providers::StorageManager) end @@ -125,7 +126,7 @@ def tl_event(row, col) end end else - e_title = rec[:name] ? rec[:name] : row[mri.col_order.first].to_s + e_title = rec[:name] || row[mri.col_order.first].to_s end end e_title ||= ems ? ems.name : "No VM, Host, or MS" @@ -173,6 +174,7 @@ def tl_event(row, col) col_order.each_with_index do |co, co_idx| value = tl_message.message_html(co) next if value.to_s.empty? || co == "id" + event_data[co] = { :value => value, :text => headers[co_idx] diff --git a/lib/manageiq/reporting/formatter/timeline_message.rb b/lib/manageiq/reporting/formatter/timeline_message.rb index 7a391ade087..86bde6a19dd 100644 --- a/lib/manageiq/reporting/formatter/timeline_message.rb +++ b/lib/manageiq/reporting/formatter/timeline_message.rb @@ -2,7 +2,7 @@ module ManageIQ module Reporting module Formatter class TimelineMessage - TIMELINE_TIME_COLUMNS = %w(created_on timestamp).freeze + TIMELINE_TIME_COLUMNS = %w[created_on timestamp].freeze def initialize(row, event, flags, db) @row, @event, @flags, @db = row, event, flags, db diff --git a/lib/manageiq/session/memory_store_adapter.rb b/lib/manageiq/session/memory_store_adapter.rb index cff59f599f3..cfd21dbaef4 100644 --- a/lib/manageiq/session/memory_store_adapter.rb +++ b/lib/manageiq/session/memory_store_adapter.rb @@ -8,7 +8,7 @@ module Session # data will only persist for as long as the ruby interpreter # instance does. class MemoryStore < AbstractStore - GLOBAL_HASH_TABLE = {} #:nodoc: + GLOBAL_HASH_TABLE = {} # :nodoc: def find_session(_req, session_id) session_id ||= generate_sid diff --git a/lib/manageiq/util/memory_logging.rb b/lib/manageiq/util/memory_logging.rb index 1a5269ac599..bcc2ca34ed5 100644 --- a/lib/manageiq/util/memory_logging.rb +++ b/lib/manageiq/util/memory_logging.rb @@ -6,13 +6,13 @@ module ManageIQ::Util::MemoryLogging def memory_logger(message, &block) debug(message) if block - yield if block_given? + yield if block debug(message) end def memory_logger_with_gc(message, &block) debug(message) if block - yield if block_given? + yield if block GC.start GC.start GC.start diff --git a/lib/miq_environment.rb b/lib/miq_environment.rb index 32cb8e16ea3..32515395480 100644 --- a/lib/miq_environment.rb +++ b/lib/miq_environment.rb @@ -47,11 +47,13 @@ class Command def self.supports_systemd? return @supports_systemd unless @supports_systemd.nil? + @supports_systemd = is_appliance? && !is_container? && supports_command?('systemctl') end def self.supports_nohup_and_backgrounding? return @supports_nohup unless @supports_nohup.nil? + @supports_nohup = is_appliance? && supports_command?('nohup') end @@ -61,6 +63,7 @@ def self.is_production_build? def self.is_container? return @is_container unless @is_container.nil? + @is_container = ENV["CONTAINER"] == "true" end @@ -74,6 +77,7 @@ def self.is_podified? def self.is_appliance? return @is_appliance unless @is_appliance.nil? + @is_appliance = ENV["APPLIANCE"] == "true" end @@ -87,6 +91,7 @@ def self.is_production? def self.is_linux? return @is_linux unless @is_linux.nil? + @is_linux = (Sys::Platform::IMPL == :linux) end diff --git a/lib/miq_expression.rb b/lib/miq_expression.rb index 7b3cae7e9f7..182d73dcb95 100644 --- a/lib/miq_expression.rb +++ b/lib/miq_expression.rb @@ -8,7 +8,7 @@ class MiqExpression include Vmdb::Logging attr_accessor :exp, :context_type, :preprocess_options - config = YAML.load(ERB.new(File.read(Rails.root.join("config", "miq_expression.yml"))).result) # rubocop:disable Security/YAMLLoad + config = YAML.load(ERB.new(File.read(Rails.root.join("config/miq_expression.yml"))).result) # rubocop:disable Security/YAMLLoad BASE_TABLES = config[:base_tables] INCLUDE_TABLES = config[:include_tables] EXCLUDE_COLUMNS = config[:exclude_columns] @@ -43,7 +43,7 @@ def valid?(component = exp) when "not", "!" valid?(component[operator]) when "find" - validate_set = Set.new(%w(checkall checkany checkcount search)) + validate_set = Set.new(%w[checkall checkany checkcount search]) validate_keys = component[operator].keys.select { |k| validate_set.include?(k) } validate_keys.all? { |k| valid?(component[operator][k]) } else @@ -62,6 +62,7 @@ def valid?(component = exp) def set_tagged_target(model, associations = []) each_atom(exp) do |atom| next unless atom.key?("tag") + tag = Tag.parse(atom["tag"]) tag.model = model tag.associations = associations @@ -71,6 +72,7 @@ def set_tagged_target(model, associations = []) def self.proto? return @proto if defined?(@proto) + @proto = ::Settings.product.proto end @@ -84,9 +86,9 @@ def self.to_human(exp) when "tag" tag = [exp["ns"], exp["tag"]].join("/") if exp["include"] == "none" - return "Not Tagged With #{tag}" + "Not Tagged With #{tag}" else - return "Tagged With #{tag}" + "Tagged With #{tag}" end when "script" if exp["expr"] == "true" @@ -132,6 +134,7 @@ def self._to_human(exp, options = {}) check = "checkany" if exp[operator].include?("checkany") check = "checkcount" if exp[operator].include?("checkcount") raise _("expression malformed, must contain one of 'checkall', 'checkany', 'checkcount'") unless check + check =~ /^check(.*)$/ mode = $1.upcase clause = "FIND" + " " + _to_human(exp[operator]["search"]) + " CHECK " + mode + " " + _to_human(exp[operator][check], :include_table => false).strip @@ -269,6 +272,7 @@ def self._to_ruby(exp, context_type, tz) # So we have to trick it by replacing the value with the description. description = MiqExpression.get_entry_details(op_args["tag"]).inject("") do |s, t| break(t.first) if t.last == op_args["value"] + s end val = op_args["tag"].split(".").last.split("-").join(".") @@ -287,10 +291,11 @@ def self._to_ruby(exp, context_type, tz) op_args[check][op]["field"] = "" end raise _("expression malformed, must contain one of 'checkall', 'checkany', 'checkcount'") unless check + check =~ /^check(.*)$/ mode = $1.downcase clause = "" + _to_ruby(op_args["search"], context_type, tz) + "" \ - "" + _to_ruby(op_args[check], context_type, tz) + "" + "" + _to_ruby(op_args[check], context_type, tz) + "" when "key exists" clause, = operands2rubyvalue(operator, op_args, context_type) when "value exists" @@ -324,7 +329,7 @@ def to_sql(tz = nil) pexp, seen = prune_exp(pexp, MODE_SQL) attrs = {:supported_by_sql => (seen == MODE_SQL)} sql = to_arel(pexp, tz).to_sql if pexp.present? - incl = includes_for_sql unless sql.blank? + incl = includes_for_sql if sql.present? [sql, incl, attrs] end @@ -470,27 +475,27 @@ def sql_supports_atom?(exp) elsif exp[operator].key?("field") Field.parse(exp[operator]["field"]).attribute_supported_by_sql? else - return false + false end when "includes" # Support includes operator using "LIKE" only if first operand is in main table if exp[operator].key?("field") && (!exp[operator]["field"].include?(".") || (exp[operator]["field"].include?(".") && exp[operator]["field"].split(".").length == 2)) - return field_in_sql?(exp[operator]["field"]) + field_in_sql?(exp[operator]["field"]) else # TODO: Support includes operator for sub-sub-tables - return false + false end when "includes any", "includes all", "includes only" # Support this only from the main model (for now) if exp[operator].keys.include?("field") && exp[operator]["field"].split(".").length == 1 model, field = exp[operator]["field"].split("-") method = "miq_expression_#{operator.downcase.tr(' ', '_')}_#{field}_arel" - return model.constantize.respond_to?(method) + model.constantize.respond_to?(method) else - return false + false end when "find", "regular expression matches", "regular expression does not match", "key exists", "value exists" - return false + false else # => false if operand is a tag return false if exp[operator].keys.include?("tag") @@ -501,7 +506,7 @@ def sql_supports_atom?(exp) # => TODO: support count of child relationship return false if exp[operator].key?("count") - return field_in_sql?(exp[operator]["field"]) && value_in_sql?(exp[operator]["value"]) + field_in_sql?(exp[operator]["field"]) && value_in_sql?(exp[operator]["value"]) end end @@ -520,6 +525,7 @@ def field_in_sql?(field) def attribute_supported_by_sql?(field) return false unless col_details[field] + col_details[field][:sql_support] end # private attribute_supported_by_sql? -- tests only @@ -580,16 +586,16 @@ def evaluate(obj, tz = nil) end def self.evaluate_atoms(exp, obj) - exp = exp.kind_of?(self) ? copy_hash(exp.exp) : exp + exp = copy_hash(exp.exp) if exp.kind_of?(self) exp["result"] = new(exp).evaluate(obj) operators = exp.keys operators.each do |k| - if %w(and or).include?(k.to_s.downcase) # and/or atom is an array of atoms + if %w[and or].include?(k.to_s.downcase) # and/or atom is an array of atoms exp[k].each do |atom| evaluate_atoms(atom, obj) end - elsif %w(not !).include?(k.to_s.downcase) # not atom is a hash expression + elsif %w[not !].include?(k.to_s.downcase) # not atom is a hash expression evaluate_atoms(exp[k], obj) else next @@ -664,6 +670,7 @@ def self.value2human(val, options = {}) elsif first first = nil next unless options[:include_model] == true + Dictionary.gettext(t, :type => :model, :notfound => :titleize) else Dictionary.gettext(t, :type => :table, :notfound => :titleize) @@ -744,9 +751,11 @@ def self.quote(val, typ) val.to_s.inspect when :date return "nil" if val.blank? # treat nil value as empty string + "Date.new(#{val.year},#{val.month},#{val.day})" when :datetime return "nil" if val.blank? # treat nil value as empty string + val = val.utc "Time.utc(#{val.year},#{val.month},#{val.day},#{val.hour},#{val.min},#{val.sec})" when :integer, :decimal, :fixnum @@ -796,7 +805,8 @@ def self.quote_human(val, typ) case typ&.to_sym when :integer, :decimal, :fixnum, :float return val.to_i unless val.to_s.number_with_method? || typ == :float - if val =~ /^([0-9\.,]+)\.([a-z]+)$/ + + if val =~ /^([0-9.,]+)\.([a-z]+)$/ val, sfx = $1, $2 if sfx.ends_with?("bytes") && FORMAT_BYTE_SUFFIXES.key?(sfx.to_sym) "#{val} #{FORMAT_BYTE_SUFFIXES[sfx.to_sym]}" @@ -825,12 +835,12 @@ def self.quote_human(val, typ) # eval("/" + regexp_string + "/") # ``` def self.re_escape(s) - Regexp.escape(s).gsub(/\//, '\/') + Regexp.escape(s).gsub("/", '\/') end # Escape any unescaped forward slashes and/or interpolation def self.sanitize_regular_expression(string) - string.gsub(%r{\\*/}, "\\/").gsub(/\\*#/, "\\\#") + string.gsub(%r{\\*/}, "\\/").gsub(/\\*#/, "\\#") end def self.escape_virtual_custom_attribute(attribute) @@ -897,11 +907,12 @@ def self.model_details(model, opts = {:typ => "all", :include_model => true, :in result = [] TAG_CLASSES.invert.each do |name, tc| next if tc.constantize.base_class == model.constantize.base_class + path = [model, name].join(".") result.concat(tag_details(path, opts)) end @classifications = nil - return tags_for_model.concat(result.sort! { |a, b| a.to_s <=> b.to_s }) + return tags_for_model.concat(result.sort_by!(&:to_s)) end relats = get_relats(model) @@ -911,7 +922,7 @@ def self.model_details(model, opts = {:typ => "all", :include_model => true, :in @column_cache ||= {} key = "#{model}_#{opts[:interval]}_#{opts[:include_model] || false}" @column_cache[key] = nil if model == "ChargebackVm" - @column_cache[key] ||= get_column_details(relats[:columns], model, model, opts).sort! { |a, b| a.to_s <=> b.to_s } + @column_cache[key] ||= get_column_details(relats[:columns], model, model, opts).sort_by!(&:to_s) result.concat(@column_cache[key]) unless opts[:disallow_loading_virtual_custom_attributes] @@ -987,7 +998,7 @@ def self.tag_details(path, opts) field = [prefix, opts[:userid]].join("_") result.push([value2human(field, opts), field]) end - result.sort! { |a, b| a.to_s <=> b.to_s } + result.sort_by!(&:to_s) end def self.get_relats(model) @@ -1002,7 +1013,7 @@ def self.miq_adv_search_lists(model, what, extra_options = {}) options = {:include_model => true}.merge(extra_options) case what.to_sym - when :exp_available_fields then + when :exp_available_fields @miq_adv_search_lists[model.to_s][:exp_available_fields] ||= MiqExpression.model_details(model, options.merge(:typ => "field", :disallow_loading_virtual_custom_attributes => false)) when :exp_available_counts then @miq_adv_search_lists[model.to_s][:exp_available_counts] ||= MiqExpression.model_details(model, options.merge(:typ => "count")) when :exp_available_finds then @miq_adv_search_lists[model.to_s][:exp_available_finds] ||= MiqExpression.model_details(model, options.merge(:typ => "find")) @@ -1074,6 +1085,7 @@ def self.build_relats(model, parent = {}, seen = []) parent[:assoc_path].include?(assoc.to_s.singularize) || parent[:direction] == :up || parent[:multivalue] + seen.push(seen_key) result[:reflections][assoc] = build_relats(assoc_class, new_parent, seen) end @@ -1111,6 +1123,7 @@ def self.get_column_details(column_names, class_path, assoc_path, opts) includes = ["^.*derived_storage.*$", "^timestamp$", "v_date", "v_time", "resource_name"] column_names = column_names.collect do |c| next(c) if includes.include?(c) + c if includes.detect { |incl| c.match(incl) } end.compact when base_model.starts_with?("Container") @@ -1132,6 +1145,7 @@ def self.get_column_details(column_names, class_path, assoc_path, opts) end end next unless col + field_class_path = "#{class_path}-#{col}" field_assoc_path = "#{assoc_path}-#{col}" [value2human(field_class_path, :include_model => include_model), field_assoc_path] @@ -1140,7 +1154,7 @@ def self.get_column_details(column_names, class_path, assoc_path, opts) def self.get_col_operators(field) col_type = - if field == :count || field == :regkey + if [:count, :regkey].include?(field) field else Target.parse(field.to_s).column_type || :string @@ -1174,15 +1188,15 @@ def self.get_entry_details(field) if ns == "managed" cat = field.split("-").last catobj = Classification.lookup_by_name(cat) - return catobj ? catobj.entries.collect { |e| [e.description, e.name] } : [] - elsif ns == "user_tag" || ns == "user" + catobj ? catobj.entries.collect { |e| [e.description, e.name] } : [] + elsif ["user_tag", "user"].include?(ns) cat = field.split("-").last - return ::Tag.where("name like ?", "/user/#{cat}%").select(:name).collect do |t| + ::Tag.where("name like ?", "/user/#{cat}%").select(:name).collect do |t| tag_name = t.name.split("/").last [tag_name, tag_name] end else - return field + field end end @@ -1205,7 +1219,7 @@ def self.atom_error(field, operator, value) case dt when :string, :text - return false + false when :integer, :fixnum, :decimal, :float return false if send((dt == :float ? :numeric? : :integer?), value) @@ -1218,7 +1232,7 @@ def self.atom_error(field, operator, value) value = "#{value.split(".")[0..-2].join(".")} #{sfx}" end - return _("Value '%{value}' is not a valid %{value_name}") % {:value => value, :value_name => dt_human} + _("Value '%{value}' is not a valid %{value_name}") % {:value => value, :value_name => dt_human} when :date, :datetime return false if operator.downcase.include?("empty") @@ -1228,24 +1242,27 @@ def self.atom_error(field, operator, value) values_converted = values.collect do |v| return _("Date/Time value must not be blank") if value.blank? + v_cvt = begin - RelativeDatetime.normalize(v, "UTC") - rescue - nil - end + RelativeDatetime.normalize(v, "UTC") + rescue + nil + end return _("Value '%{value}' is not valid") % {:value => v} if v_cvt.nil? + v_cvt end if values_converted.length > 1 && values_converted[0] > values_converted[1] return _("Invalid Date/Time range, %{first_value} comes before %{second_value}") % {:first_value => values[1], :second_value => values[0]} end - return false + false when :boolean - unless operator.downcase.include?("null") || %w(true false).include?(value) + unless operator.downcase.include?("null") || %w[true false].include?(value) return _("Value must be true or false") end - return false + + false when :regexp begin Regexp.new(value).match("foo") @@ -1253,9 +1270,9 @@ def self.atom_error(field, operator, value) return _("Regular expression '%{value}' is invalid, '%{error_message}'") % {:value => value, :error_message => err.message} end - return false + false else - return false + false end end @@ -1288,14 +1305,15 @@ def self.integer?(n) n2 = n.delete(',') # strip out commas begin Integer(n2) - return true + true rescue return false unless n.number_with_method? + begin n2 = n.to_f_with_method - return (n2.to_i == n2) + (n2.to_i == n2) rescue - return false + false end end end @@ -1305,14 +1323,15 @@ def self.numeric?(n) n2 = n.delete(',') # strip out commas begin Float(n2) - return true + true rescue return false unless n.number_with_method? + begin n.to_f_with_method - return true + true rescue - return false + false end end end @@ -1320,6 +1339,7 @@ def self.numeric?(n) # Is an MiqExpression or an expression hash a quick_search def self.quick_search?(exp) return exp.quick_search? if exp.kind_of?(self) + _quick_search?(exp) end @@ -1334,6 +1354,7 @@ def self._quick_search?(e) e.any? { |e_exp| _quick_search?(e_exp) } when Hash return true if e["value"] == :user_input + e.values.any? { |e_exp| _quick_search?(e_exp) } else false @@ -1376,7 +1397,7 @@ def fields(expression = exp) def convert_size_in_units_to_integer(exp) return if (column_details = col_details[exp.values.first["field"]]).nil? # attempt to do conversion only if db type of column is integer and value to compare to is String - return unless column_details[:data_type] == :integer && (value = exp.values.first["value"]).class == String + return unless column_details[:data_type] == :integer && (value = exp.values.first["value"]).instance_of?(String) sub_type = column_details[:format_sub_type] @@ -1464,16 +1485,20 @@ def to_arel(exp, tz) when "and" operands = exp[operator].each_with_object([]) do |operand, result| next if operand.blank? + arel = to_arel(operand, tz) next if arel.blank? + result << arel end Arel::Nodes::Grouping.new(Arel::Nodes::And.new(operands)) when "or" operands = exp[operator].each_with_object([]) do |operand, result| next if operand.blank? + arel = to_arel(operand, tz) next if arel.blank? + result << arel end first, *rest = operands diff --git a/lib/miq_expression/count_field.rb b/lib/miq_expression/count_field.rb index 1f84a41dbde..163b850f662 100644 --- a/lib/miq_expression/count_field.rb +++ b/lib/miq_expression/count_field.rb @@ -1,7 +1,7 @@ class MiqExpression::CountField < MiqExpression::Target REGEX = / (?([[:upper:]][[:alnum:]]*(::)?)+) -\.(?[a-z_\.]+) +\.(?[a-z_.]+) /x def self.parse(field) diff --git a/lib/miq_expression/field.rb b/lib/miq_expression/field.rb index 6d0a83ec972..cacaddacb13 100644 --- a/lib/miq_expression/field.rb +++ b/lib/miq_expression/field.rb @@ -3,13 +3,13 @@ class MiqExpression::Field < MiqExpression::Target \A (?(?>[[:upper:]][[:alnum:]]+(?:::[[:upper:]][[:alnum:]]+)*)) (?!.*\b(managed|user_tag)\b) -(?:\.(?[a-z][0-9a-z_\.]+))? +(?:\.(?[a-z][0-9a-z_.]+))? - (?: (?#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}[a-z0-9A-Z]+[:_\-.\/[:alnum:]]*)| (?[a-z]+(_[[:alnum:]]+)*) ) -/x.freeze +/x def self.parse(field) parsed_params = parse_params(field) || return diff --git a/lib/miq_expression/relative_datetime.rb b/lib/miq_expression/relative_datetime.rb index dab2bffe610..e8006454eeb 100644 --- a/lib/miq_expression/relative_datetime.rb +++ b/lib/miq_expression/relative_datetime.rb @@ -32,14 +32,14 @@ def self.normalize(rel_time, tz, mode = "beginning", is_date = nil) if interval == "quarters" ts = Time.now.in_time_zone(tz).beginning_of_quarter - coerce((ts - (value.to_i * 3.months)).send("#{mode}_of_quarter"), is_date) + coerce((ts - (value.to_i * 3.months)).send(:"#{mode}_of_quarter"), is_date) else - coerce(value.to_i.send(interval).ago.in_time_zone(tz).send("#{mode}_of_#{interval.singularize}"), is_date) + coerce(value.to_i.send(interval).ago.in_time_zone(tz).send(:"#{mode}_of_#{interval.singularize}"), is_date) end elsif rt == "today" - coerce(Time.now.in_time_zone(tz).send("#{mode}_of_day"), is_date) + coerce(Time.now.in_time_zone(tz).send(:"#{mode}_of_day"), is_date) elsif rt == "yesterday" - coerce(1.day.ago.in_time_zone(tz).send("#{mode}_of_day"), is_date) + coerce(1.day.ago.in_time_zone(tz).send(:"#{mode}_of_day"), is_date) elsif rt == "now" t = Time.now.in_time_zone(tz) coerce(mode == "beginning" ? t.beginning_of_hour : t.end_of_hour, is_date) @@ -47,13 +47,14 @@ def self.normalize(rel_time, tz, mode = "beginning", is_date = nil) # Assume it's an absolute date or time value_is_date = !rel_time.include?(":") ts = Time.use_zone(tz) { Time.zone.parse(rel_time) } - ts = ts.send("#{mode}_of_day") if mode && value_is_date + ts = ts.send(:"#{mode}_of_day") if mode && value_is_date coerce(ts, is_date) end end def self.coerce(value, is_date) return value if is_date.nil? + is_date ? value.to_date : value.utc end end diff --git a/lib/miq_expression/subst_mixin.rb b/lib/miq_expression/subst_mixin.rb index 7ca9a13fe23..ed0c6798399 100644 --- a/lib/miq_expression/subst_mixin.rb +++ b/lib/miq_expression/subst_mixin.rb @@ -2,6 +2,7 @@ module MiqExpression::SubstMixin include ActiveSupport::Concern def exp_build_table_or_nil(exp) return nil if exp == {"???" => "???"} + exp_build_table(exp) end @@ -28,9 +29,9 @@ def exp_build_table(exp, quick_search = false) @exp_token += 1 exp[:token] = @exp_token exp_table.push(quick_search ? "NOT" : ["NOT", @exp_token]) # No token if building quick search exp - exp_table.push("(") unless %w(and or).include?(exp["not"].keys.first) # No parens if and/or under me + exp_table.push("(") unless %w[and or].include?(exp["not"].keys.first) # No parens if and/or under me exp_table += exp_build_table(exp["not"], quick_search) - exp_table.push(")") unless %w(and or).include?(exp["not"].keys.first) # No parens if and/or under me + exp_table.push(")") unless %w[and or].include?(exp["not"].keys.first) # No parens if and/or under me else @exp_token ||= 0 @exp_token += 1 @@ -58,7 +59,7 @@ def exp_build_table(exp, quick_search = false) # Go thru an expression and replace the quick search tokens def exp_replace_qs_tokens(exp, tokens) key = exp.keys.first - if %w(and or).include?(key) + if %w[and or].include?(key) exp[key].each { |e| exp_replace_qs_tokens(e, tokens) } elsif key == "not" exp_replace_qs_tokens(exp[key], tokens) @@ -77,7 +78,7 @@ def exp_replace_qs_tokens(exp, tokens) # Find an expression atom based on the token def exp_find_by_token(exp, token, parent_is_not = false) - if exp.kind_of?(Array) # Is this and AND or OR + if exp.kind_of?(Array) # Is this and AND or OR result = nil exp.find do |e| result = exp_find_by_token(e, token) # Look for token diff --git a/lib/miq_expression/tag.rb b/lib/miq_expression/tag.rb index c894042901b..c9c24654cb7 100644 --- a/lib/miq_expression/tag.rb +++ b/lib/miq_expression/tag.rb @@ -13,6 +13,7 @@ class MiqExpression::Tag < MiqExpression::Target def self.parse(field) return unless field.include?('managed') || field.include?('user_tag') + parsed_params = parse_params(field) || return managed = parsed_params[:namespace] == self::MANAGED_NAMESPACE new(parsed_params[:model_name], parsed_params[:associations], parsed_params[:column], managed) diff --git a/lib/miq_expression/target.rb b/lib/miq_expression/target.rb index d5d9c34b74a..c8f058599fd 100644 --- a/lib/miq_expression/target.rb +++ b/lib/miq_expression/target.rb @@ -68,11 +68,12 @@ def tag? end def numeric? - %i(fixnum integer decimal float).include?(column_type) + %i[fixnum integer decimal float].include?(column_type) end def plural? return false if reflections.empty? + [:has_many, :has_and_belongs_to_many].include?(reflections.last.macro) end @@ -120,7 +121,7 @@ def target def tag_path_with(value = nil) # encode embedded / characters in values since / is used as a tag seperator - "#{tag_path}#{value.nil? ? '' : '/' + value.to_s.gsub(/\//, "%2f")}" + "#{tag_path}#{value.nil? ? '' : '/' + value.to_s.gsub("/", "%2f")}" end def exclude_col_by_preprocess_options?(options) diff --git a/lib/miq_ldap.rb b/lib/miq_ldap.rb index cbe50e62fca..869e54c0d6f 100644 --- a/lib/miq_ldap.rb +++ b/lib/miq_ldap.rb @@ -82,20 +82,20 @@ def resolve_host(hosts, port) end addresses.each do |address| - begin - $log.info("MiqLdap.connection: Connecting to IP Address [#{address}]") if $log - @conn = TCPSocket.new(address, port) - valid_address = true - break - rescue => err - $log.debug("Warning: '#{err.message}', connecting to IP Address [#{address}]") - end + + $log.info("MiqLdap.connection: Connecting to IP Address [#{address}]") if $log + @conn = TCPSocket.new(address, port) + valid_address = true + break + rescue => err + $log.debug("Warning: '#{err.message}', connecting to IP Address [#{address}]") + end return selected_host if valid_address end - raise Net::LDAP::Error.new("unable to establish a connection to server") + raise Net::LDAP::Error, "unable to establish a connection to server" end def bind(username, password) @@ -113,7 +113,7 @@ def bind(username, password) end rescue Exception => err _log.error("Binding to LDAP: Host: [#{@ldap.host}], User: [#{username}], '#{err.message}'") - return false + false end end @@ -131,14 +131,16 @@ def get(dn, attrs = nil) _log.error("'#{err.message}'") end return nil unless result + # puts "result: #{result.inspect}" result.first end def self.get_attr(obj, attr) return nil unless obj.attribute_names.include?(attr) + val = obj.send(attr) - val = val.length == 1 ? val.first : val + val = val.first if val.length == 1 # The BERParser#read_ber adds the method "ber_identifier" to strings and arrays (line 122 in ber.rb) via instance_eval # This singleton method causes TypeError: singleton can't be dumped during Marshal.dump @@ -159,30 +161,31 @@ def search(opts, &blk) def _search(opts, seen = nil, &_blk) raw_opts = opts.dup - opts[:scope] = scope(opts[:scope]) if opts[:scope] - if opts[:filter] - opts[:filter] = filter_construct(opts[:filter]) unless opts[:filter].kind_of?(Net::LDAP::Filter) + opts[:scope] = scope(opts[:scope]) if opts[:scope] + if opts[:filter] && !opts[:filter].kind_of?(Net::LDAP::Filter) + opts[:filter] = filter_construct(opts[:filter]) end opts[:return_referrals] = @follow_referrals seen ||= {:objects => [], :referrals => {}} _log.debug("opts: #{opts.inspect}") - if block_given? + if _blk opts[:return_result] = false - return @ldap.search(opts) { |entry| yield entry if block_given? } + @ldap.search(opts) { |entry| yield entry if _blk } else result = @ldap.search(opts) unless ldap_result_ok? _log.warn("LDAP Search unsuccessful, '#{@ldap.get_operation_result.message}', Code: [#{@ldap.get_operation_result.code}], Host: [#{@ldap.host}]") return [] end - return @follow_referrals ? chase_referrals(result, raw_opts, seen) : result + @follow_referrals ? chase_referrals(result, raw_opts, seen) : result end end def ldap_result_ok?(follow_referrals = @follow_referrals) return true if @ldap.get_operation_result.code == 0 return true if @ldap.get_operation_result.code == 10 && follow_referrals + false end @@ -264,6 +267,7 @@ def self.filter_groups_only def normalize(dn) return if dn.nil? + dn.split(",").collect { |i| i.downcase.strip }.join(",") end @@ -303,21 +307,22 @@ def fqusername(username) user_prefix = "cn" if user_prefix == "dn" case user_type when "samaccountname" - return "#{@domain_prefix}\\#{username}" unless @domain_prefix.blank? - return username + return "#{@domain_prefix}\\#{username}" if @domain_prefix.present? + + username when "upn", "userprincipalname" return username if @user_suffix.blank? - return "#{username}@#{@user_suffix}" + "#{username}@#{@user_suffix}" when "mail" username = "#{username}@#{@user_suffix}" unless @user_suffix.blank? || upn?(username) dbuser = User.lookup_by_email(username.downcase) dbuser ||= User.lookup_by_userid(username.downcase) return dbuser.userid if dbuser && dbuser.userid - return username + username when "dn" - return "#{user_prefix}=#{username},#{@user_suffix}" + "#{user_prefix}=#{username},#{@user_suffix}" end end @@ -379,14 +384,14 @@ def get_user_info(username, user_type = nil) udata[:sid] = MiqLdap.get_sid(user) managers = [] - user[:manager].each { |m| managers << get(m) } unless user[:manager].blank? + user[:manager].each { |m| managers << get(m) } if user[:manager].present? udata[:manager] = managers.empty? ? nil : MiqLdap.get_attr(managers.first, :displayname) udata[:manager_phone] = managers.empty? ? nil : MiqLdap.get_attr(managers.first, :telephonenumber) udata[:manager_mail] = managers.empty? ? nil : MiqLdap.get_attr(managers.first, :mail) assistants = [] delegates = user[:publicdelegates] - delegates.each { |d| assistants << get(d) } unless delegates.nil? + delegates.each { |d| assistants << get(d) } unless delegates.nil? udata[:assistant] = assistants.empty? ? nil : MiqLdap.get_attr(assistants.first, :displayname) udata[:assistant_phone] = assistants.empty? ? nil : MiqLdap.get_attr(assistants.first, :telephonenumber) udata[:assistant_mail] = assistants.empty? ? nil : MiqLdap.get_attr(assistants.first, :mail) @@ -441,6 +446,7 @@ def get_organizationalunits(basedn = nil, filter = nil) filter ||= "(ObjectCategory=organizationalUnit)" result = search(:base => basedn, :scope => :sub, :filter => filter) return nil unless result + result.collect { |o| [get_attr(o, :dn), get_attr(o, :name)] } end @@ -472,7 +478,7 @@ def self.sid_to_s(data) sid << data.ord.to_s rid = "" - (6).downto(1) do |i| + 6.downto(1) do |i| rid += byte2hex(data[i, 1].ord) end sid << rid.to_i.to_s diff --git a/lib/miq_memcached.rb b/lib/miq_memcached.rb index d6f185694cb..f7de2c0ef00 100644 --- a/lib/miq_memcached.rb +++ b/lib/miq_memcached.rb @@ -17,7 +17,7 @@ def self.default_client_options # Direct Dalli Clients won't use connection pool but will be threadsafe. # ActiveSupport::Cache::MemCacheStore and ManageIQ::Session::MemCacheStoreAdapter # use threadsafe but also accept connection pool options. - :threadsafe => true + :threadsafe => true } if ENV["MEMCACHED_ENABLE_SSL"] diff --git a/lib/miq_pglogical.rb b/lib/miq_pglogical.rb index eb925f99c52..024a0024f6c 100644 --- a/lib/miq_pglogical.rb +++ b/lib/miq_pglogical.rb @@ -6,7 +6,7 @@ class MiqPglogical include ConnectionHandling PUBLICATION_NAME = 'miq'.freeze - ALWAYS_EXCLUDED_TABLES = %w(ar_internal_metadata schema_migrations repl_events repl_monitor repl_nodes).freeze + ALWAYS_EXCLUDED_TABLES = %w[ar_internal_metadata schema_migrations repl_events repl_monitor repl_nodes].freeze # :nodoc: # @@ -33,11 +33,13 @@ def provider? def configure_provider return if provider? + create_replication_set end def destroy_provider return unless provider? + self.class.with_connection_error_handling { pglogical.drop_publication(PUBLICATION_NAME) } end @@ -95,7 +97,7 @@ def excludes end def self.excludes - YAML.load_file(Rails.root.join("config", "replication_exclude_tables.yml"))[:exclude_tables] | ALWAYS_EXCLUDED_TABLES + YAML.load_file(Rails.root.join("config/replication_exclude_tables.yml"))[:exclude_tables] | ALWAYS_EXCLUDED_TABLES end def self.save_global_region(subscriptions_to_save, subscriptions_to_remove) diff --git a/lib/miq_pglogical/connection_handling.rb b/lib/miq_pglogical/connection_handling.rb index 1bb33267443..710807b8083 100644 --- a/lib/miq_pglogical/connection_handling.rb +++ b/lib/miq_pglogical/connection_handling.rb @@ -41,7 +41,7 @@ def with_connection_error_handling retry end - def pglogical(refresh = false) + def pglogical(_refresh = false) # TODO: Review if the reasons behind the previous caching / refreshing # of the PG::LogicalReplication::Client # diff --git a/lib/patches/ruport_patch.rb b/lib/patches/ruport_patch.rb index 72cd1f2b3c2..7d1243218d2 100644 --- a/lib/patches/ruport_patch.rb +++ b/lib/patches/ruport_patch.rb @@ -14,7 +14,6 @@ def sort_rows_by(col_names = nil, options = {}, &block) end module Ruport - # Handles preventing CSV injection attacks by adding an apostrophe to all # fields that could potentially be a formula that executes a function. # @@ -29,8 +28,8 @@ module Ruport # just using the raw CSV in a scripting language like Ruby/Python. # class Formatter::SafeCSV < Formatter::CSV - renders :csv, :for => [ Controller::Row, Controller::Table, - Controller::Group, Controller::Grouping ] + renders :csv, :for => [Controller::Row, Controller::Table, + Controller::Group, Controller::Grouping] def build_table_body data.each do |row| diff --git a/lib/pdf_generator.rb b/lib/pdf_generator.rb index b4f917af821..c210612a227 100644 --- a/lib/pdf_generator.rb +++ b/lib/pdf_generator.rb @@ -4,7 +4,7 @@ def self.new end def self.instance - @instance ||= self.new + @instance ||= new end def self.pdf_from_string(html_string, stylesheet) @@ -30,7 +30,7 @@ def available? end def self.detect_available_generator - self.subclasses.detect(&:available?) || NullPdfGenerator + subclasses.detect(&:available?) || NullPdfGenerator end private_class_method :detect_available_generator @@ -38,7 +38,7 @@ def self.sanitize_html(html_string) # strip out bad attachment_fu URLs # and remove asset ids on images html_string.gsub('.com:/', '.com/') - .gsub(/src=["'](\S+)\?\d*["']/i, 'src="\1"') + .gsub(/src=["'](\S+)\?\d*["']/i, 'src="\1"') end private_class_method :sanitize_html diff --git a/lib/pdf_generator/prince_pdf_generator.rb b/lib/pdf_generator/prince_pdf_generator.rb index a1b6281784d..db84139bebe 100644 --- a/lib/pdf_generator/prince_pdf_generator.rb +++ b/lib/pdf_generator/prince_pdf_generator.rb @@ -2,11 +2,12 @@ class PrincePdfGenerator < PdfGenerator include Vmdb::Logging def self.executable return @executable if defined?(@executable) + @executable = `which prince 2> /dev/null`.chomp end def self.available? - !executable.blank? + executable.present? end def executable @@ -18,7 +19,7 @@ def pdf_from_string(html_string, stylesheet) :params => { :input => "html", :style => stylesheet, - :log => Rails.root.join("log", "prince.log"), + :log => Rails.root.join("log/prince.log"), :output => "-", # Write to stdout "-" => nil # Read from stdin }, diff --git a/lib/pid_file.rb b/lib/pid_file.rb index 990a7254d4b..bbe59842cde 100644 --- a/lib/pid_file.rb +++ b/lib/pid_file.rb @@ -15,8 +15,10 @@ def self.remove(fname) def pid return nil unless File.file?(@fname) + data = File.read(@fname).strip return nil if data.empty? || !/\d+/.match(data) + data.to_i end @@ -26,15 +28,17 @@ def remove def create(remove_on_exit = true) FileUtils.mkdir_p(File.dirname(@fname)) - File.open(@fname, "w") { |f| f.write(Process.pid) } + File.write(@fname, Process.pid) at_exit { PidFile.remove(@fname) } if remove_on_exit end def running?(regexp = nil) pid = self.pid return false if pid.nil? + command_line = MiqProcess.command_line(pid) return false if command_line.blank? + unless regexp.nil? regexp = Regexp.new(regexp) if regexp.kind_of?(String) return false if regexp.match(command_line).nil? diff --git a/lib/rbac/authorizer.rb b/lib/rbac/authorizer.rb index 7a2b66684ed..a5f2f262e10 100644 --- a/lib/rbac/authorizer.rb +++ b/lib/rbac/authorizer.rb @@ -53,6 +53,7 @@ def user_role_allows?(user, **options) def user_role_allows_any?(user, **options) return false if user.miq_user_role.nil? + user.miq_user_role.allows_any?(**options) end end diff --git a/lib/rbac/filterer.rb b/lib/rbac/filterer.rb index 7f98250ee4f..69f29e4c779 100644 --- a/lib/rbac/filterer.rb +++ b/lib/rbac/filterer.rb @@ -6,7 +6,7 @@ class Filterer # Classes should be added to this list ONLY after: # 1. Tagging has been enabled in the UI # 2. Class contains acts_as_miq_taggable - CLASSES_THAT_PARTICIPATE_IN_RBAC = %w( + CLASSES_THAT_PARTICIPATE_IN_RBAC = %w[ Authentication AvailabilityZone CloudNetwork @@ -67,11 +67,11 @@ class Filterer Switch VmOrTemplate WindowsImage - ) + ] - TAGGABLE_FILTER_CLASSES = CLASSES_THAT_PARTICIPATE_IN_RBAC - %w(EmsFolder MiqRequest) + %w(MiqGroup User Tenant) + TAGGABLE_FILTER_CLASSES = CLASSES_THAT_PARTICIPATE_IN_RBAC - %w[EmsFolder MiqRequest] + %w[MiqGroup User Tenant] - NETWORK_MODELS_FOR_BELONGSTO_FILTER = %w( + NETWORK_MODELS_FOR_BELONGSTO_FILTER = %w[ CloudNetwork CloudSubnet FloatingIp @@ -79,9 +79,9 @@ class Filterer NetworkPort NetworkRouter SecurityGroup - ).freeze + ].freeze - BELONGSTO_FILTER_CLASSES = %w( + BELONGSTO_FILTER_CLASSES = %w[ Container ContainerBuild ContainerGroup @@ -101,7 +101,7 @@ class Filterer ResourcePool Storage VmOrTemplate - ) + NETWORK_MODELS_FOR_BELONGSTO_FILTER + ] + NETWORK_MODELS_FOR_BELONGSTO_FILTER # key: descendant::klass # value: @@ -163,10 +163,10 @@ class Filterer # Classes inherited from these classes or mixins are allowing ownership feature on the target model, # scope user_or_group_owned is required on target model - OWNERSHIP_CLASSES = %w( + OWNERSHIP_CLASSES = %w[ OwnershipMixin MiqRequest - ).freeze + ].freeze ADDITIONAL_TENANT_CLASSES = %w[ServiceTemplate].freeze PRODUCT_FEATURE_CLASSES = %w[MiqShortcut].freeze @@ -227,6 +227,7 @@ def search(options = {}) if options.key?(:targets) && options[:targets].kind_of?(Array) && options[:targets].empty? return [], {:auth_count => 0} end + targets = options[:targets] scope = options[:named_scope] @@ -430,6 +431,7 @@ def include_references(scope, klass, references, exp_includes) # @param includes [Array, Hash] def add_joins(klass, scope, includes) return scope unless includes + includes = Array(includes) unless includes.kind_of?(Enumerable) includes.each do |association, value| reflection = klass.reflect_on_association(association) @@ -495,6 +497,7 @@ def rbac_class(scope) # VmPerformance => VmOrTemplate return klass.name[0..-12].constantize.base_class end + nil end @@ -587,6 +590,7 @@ def scope_by_ids(scope, filtered_ids) def get_belongsto_filter_object_ids(klass, filter) return nil if !BELONGSTO_FILTER_CLASSES.include?(safe_base_class(klass).name) || filter.blank? + get_belongsto_matches(filter, rbac_class(klass)).collect(&:id) end @@ -594,6 +598,7 @@ def get_managed_filter_object_ids(scope, filter) klass = scope.respond_to?(:klass) ? scope.klass : scope return nil if !TAGGABLE_FILTER_CLASSES.include?(safe_base_class(klass).name) || filter.blank? return scope.where(filter.to_sql.first) if filter.kind_of?(MiqExpression) + scope.find_tags_by_grouping(filter, :ns => '*').reorder(nil) end @@ -641,7 +646,7 @@ def scope_to_cloud_tenant(scope, user, miq_group) def scope_for_user_role_group(klass, scope, miq_group, user, managed_filters) user_or_group = miq_group || user - if user_or_group.try!(:self_service?) && klass != MiqUserRole + if user_or_group&.self_service? && klass != MiqUserRole scope.where(:id => klass == User ? user.id : miq_group.id) else role = user_or_group.miq_user_role @@ -748,7 +753,7 @@ def get_user_info(user, userid, miq_group, miq_group_id) def lookup_user_group(user, userid, miq_group, miq_group_id) user ||= (userid && User.lookup_by_userid(userid)) || User.current_user - miq_group_id ||= miq_group.try!(:id) + miq_group_id ||= miq_group&.id return [user, user.current_group] if user && user.current_group_id.to_s == miq_group_id.to_s group = if user @@ -768,7 +773,7 @@ def lookup_user_group(user, userid, miq_group, miq_group_id) # for reports, user is currently nil, so use the group filter # the user.get_filters delegates to user.current_group anyway def lookup_user_filters(miq_group) - filters = miq_group.try!(:get_filters).try!(:dup) || {} + filters = miq_group&.get_filters&.dup || {} filters["managed"] ||= [] filters["belongsto"] ||= [] filters @@ -839,11 +844,13 @@ def apply_select(klass, scope, extra_cols) def get_belongsto_matches(blist, klass) return get_belongsto_matches_for_host(blist) if klass <= Host return get_belongsto_matches_for_storage(blist) if klass == Storage + association_name = klass.base_model.to_s.tableize blist.flat_map do |bfilter| vcmeta_list = MiqFilter.belongsto2object_list(bfilter) next [] if vcmeta_list.empty? + # typically, this is the only one we want: vcmeta = vcmeta_list.last @@ -856,13 +863,13 @@ def get_belongsto_matches(blist, klass) end def belongsto_association_filtered?(vcmeta, klass) - if [ExtManagementSystem, Host].any? { |x| vcmeta.kind_of?(x) } + if [ExtManagementSystem, Host].any? { |x| vcmeta.kind_of?(x) } && associated_belongsto_models.any? do |associated| + klass <= associated && vcmeta.respond_to?(associated.base_model.to_s.tableize) + end # Eject early if klass(requested for RBAC check) is allowed to be filtered by # belongsto filtering generally and whether relation (based on the klass) exists on object # from belongsto filter at all. - return true if associated_belongsto_models.any? do |associated| - klass <= associated && vcmeta.respond_to?(associated.base_model.to_s.tableize) - end + return true end if vcmeta.kind_of?(ManageIQ::Providers::NetworkManager) @@ -897,7 +904,7 @@ def get_belongsto_matches_for_host(blist) vcmeta = MiqFilter.belongsto2object(bfilter) next unless vcmeta - subtree = vcmeta.subtree + subtree = vcmeta.subtree clusters += subtree.grep(EmsCluster) hosts += subtree.grep(Host) end diff --git a/lib/remote_console/rack_server.rb b/lib/remote_console/rack_server.rb index 2372a994780..a9233932d5b 100644 --- a/lib/remote_console/rack_server.rb +++ b/lib/remote_console/rack_server.rb @@ -42,13 +42,13 @@ def initialize(options = {}) @proxy.select(1000) @proxy.each_ready do |left, right| - begin - @adapters[left].fetch(64.kilobytes) { |data| @adapters[right].issue(data) } # left -> right - rescue IOError, IO::WaitReadable, IO::WaitWritable - cleanup(:info, "Closing RemoteConsole proxy for VM %{vm_id}", left, right) - rescue StandardError => ex - cleanup(:error, "RemoteConsole proxy for VM %{vm_id} errored with #{ex} #{ex.backtrace.join("\n")}", left, right) - end + + @adapters[left].fetch(64.kilobytes) { |data| @adapters[right].issue(data) } # left -> right + rescue IOError, IO::WaitReadable, IO::WaitWritable + cleanup(:info, "Closing RemoteConsole proxy for VM %{vm_id}", left, right) + rescue => ex + cleanup(:error, "RemoteConsole proxy for VM %{vm_id} errored with #{ex} #{ex.backtrace.join("\n")}", left, right) + end end end @@ -72,7 +72,7 @@ def call(env) # Determine if the transmitter thread is alive or crashed def healthy? - %w(run sleep).include?(@transmitter.status) + %w[run sleep].include?(@transmitter.status) end private @@ -93,7 +93,7 @@ def init_proxy(env, secret) @adapters[ws_sock] = ServerAdapter.new(record, env, ws_sock) @proxy.push(ws_sock, console_sock) - rescue StandardError => ex + rescue => ex cleanup(:error, "RemoteConsole proxy for VM %{vm_id} errored with #{ex} #{ex.backtrace.join("\n")}", console_sock, ws_sock, record) RACK_404 else diff --git a/lib/report_formatter.rb b/lib/report_formatter.rb index 4ab3b28ac10..6667ffb68ae 100644 --- a/lib/report_formatter.rb +++ b/lib/report_formatter.rb @@ -3,4 +3,4 @@ # * We assign the old toplevel constant to the new constant. # * We can't include rails deprecate_constant globally, so we use ruby's. ReportFormatter = ManageIQ::Reporting::Formatter -Object.deprecate_constant :ReportFormatter +Object.deprecate_constant(:ReportFormatter) diff --git a/lib/request_log_session_middleware.rb b/lib/request_log_session_middleware.rb index 59fbcddb4a3..8aff3561d4d 100644 --- a/lib/request_log_session_middleware.rb +++ b/lib/request_log_session_middleware.rb @@ -20,7 +20,7 @@ def cookies(env) env["HTTP_COOKIE"].split(/\s*;\s*/).map do |keyval| keyval.split('=') end.to_h - rescue StandardError + rescue {} end end diff --git a/lib/services/dialog_field_visibility_service.rb b/lib/services/dialog_field_visibility_service.rb index af407237f3e..c382b71c0fe 100644 --- a/lib/services/dialog_field_visibility_service.rb +++ b/lib/services/dialog_field_visibility_service.rb @@ -1,15 +1,5 @@ class DialogFieldVisibilityService - attr_accessor :auto_placement_visibility_service - attr_accessor :number_of_vms_visibility_service - attr_accessor :service_template_fields_visibility_service - attr_accessor :network_visibility_service - attr_accessor :sysprep_auto_logon_visibility_service - attr_accessor :retirement_visibility_service - attr_accessor :customize_fields_visibility_service - attr_accessor :sysprep_custom_spec_visibility_service - attr_accessor :request_type_visibility_service - attr_accessor :pxe_iso_visibility_service - attr_accessor :linked_clone_visibility_service + attr_accessor :auto_placement_visibility_service, :number_of_vms_visibility_service, :service_template_fields_visibility_service, :network_visibility_service, :sysprep_auto_logon_visibility_service, :retirement_visibility_service, :customize_fields_visibility_service, :sysprep_custom_spec_visibility_service, :request_type_visibility_service, :pxe_iso_visibility_service, :linked_clone_visibility_service def initialize( auto_placement_visibility_service = AutoPlacementVisibilityService.new, diff --git a/lib/services/dialog_import_service.rb b/lib/services/dialog_import_service.rb index c24b2690c8c..9d561bf2b51 100644 --- a/lib/services/dialog_import_service.rb +++ b/lib/services/dialog_import_service.rb @@ -114,9 +114,10 @@ def build_associations(dialog, association_list) association_list.each do |association| association.each_value do |value| value.each do |responder| - next if fields.select { |field| field.name == responder }.empty? + next if fields.none? { |field| field.name == responder } + DialogFieldAssociation.create!(:trigger_id => fields.find { |field| field.name.include?(association.keys.first) }.id, - :respond_id => fields.find { |field| field.name == responder }.id) + :respond_id => fields.find { |field| field.name == responder }.id) end end end @@ -127,7 +128,7 @@ def build_association_list(dialog) dialog["dialog_tabs"].flat_map do |tab| tab["dialog_groups"].flat_map do |group| group["dialog_fields"].flat_map do |field| - associations << { field["name"] => field["dialog_field_responders"] } if field["dialog_field_responders"].present? + associations << {field["name"] => field["dialog_field_responders"]} if field["dialog_field_responders"].present? end end end @@ -144,6 +145,7 @@ def create_import_file_upload(file_contents) def import_from_dialogs(dialogs) raise ParsedNonDialogYamlError if dialogs.empty? + dialogs.each do |dialog| dialog.except!(:blueprint_id, 'blueprint_id') # blueprint_id might appear in some old dialogs, but no longer exists new_or_existing_dialog = Dialog.where(:label => dialog["label"]).first_or_create @@ -182,7 +184,7 @@ def absolute_position(dialog_fields) field_position = f.position dialog_group_position = f.dialog_group.position dialog_tab_position = f.dialog_group.dialog_tab.position - index = field_position + dialog_group_position * 1000 + dialog_tab_position * 100_000 + index = field_position + (dialog_group_position * 1000) + (dialog_tab_position * 100_000) {:name => f.name, :position => index} end end diff --git a/lib/services/locale_resolver.rb b/lib/services/locale_resolver.rb index d7caa92f02f..b41065525fa 100644 --- a/lib/services/locale_resolver.rb +++ b/lib/services/locale_resolver.rb @@ -13,6 +13,7 @@ def initialize(user, headers = {}) def resolve return user_locale if set?(user_locale) return server_locale if set?(server_locale) + headers["Accept-Language"] end diff --git a/lib/services/resource_sharer.rb b/lib/services/resource_sharer.rb index 3ddd343c4e1..7e60b0942f1 100644 --- a/lib/services/resource_sharer.rb +++ b/lib/services/resource_sharer.rb @@ -1,8 +1,8 @@ class ResourceSharer - WHITELISTED_RESOURCE_TYPES = %w( + WHITELISTED_RESOURCE_TYPES = %w[ VmOrTemplate ServiceTemplate - ).freeze + ].freeze include ActiveModel::Model @@ -24,8 +24,7 @@ def self.valid_share?(share) new(:user => share.user, :resource => share.resource, :tenants => [share.tenant], - :features => share.miq_product_features - ).valid? + :features => share.miq_product_features).valid? end ## @@ -63,6 +62,7 @@ def share def rbac_visibility return unless user && resource + unless Rbac::Filterer.filtered_object(resource, :user => user).present? errors.add(:user, "is not authorized to share this resource") end @@ -94,6 +94,7 @@ def allowed_resource_type def valid_tenants return unless tenants + unless tenants.respond_to?(:all?) && tenants.all? { |t| t.kind_of?(Tenant) } errors.add(:tenants, "must be an array of Tenant objects") end diff --git a/lib/task_helpers/development/replication.rb b/lib/task_helpers/development/replication.rb index 83af37ffc80..4ee8ff41b8d 100644 --- a/lib/task_helpers/development/replication.rb +++ b/lib/task_helpers/development/replication.rb @@ -47,7 +47,7 @@ def teardown regions = REMOTES + [GLOBAL] regions.each do |r| - run_command("dropdb -U '#{PG_USER}' -h #{PG_HOST} #{database(r)}", env: {"PGPASSWORD" => PG_PASS}, raise_on_error: false) + run_command("dropdb -U '#{PG_USER}' -h #{PG_HOST} #{database(r)}", :env => {"PGPASSWORD" => PG_PASS}, :raise_on_error => false) end end @@ -92,18 +92,18 @@ def command_environment(region) end def configure_global_region(region) - run_command("bin/rails r 'TaskHelpers::Development::Replication.configure_global_region_script'", env: command_environment(region)) - run_command("psql #{database_url(region)} -c 'SELECT * FROM pg_subscription;'", raise_on_error: false) + run_command("bin/rails r 'TaskHelpers::Development::Replication.configure_global_region_script'", :env => command_environment(region)) + run_command("psql #{database_url(region)} -c 'SELECT * FROM pg_subscription;'", :raise_on_error => false) end def configure_remote_region(region) - run_command("bin/rails r 'MiqRegion.replication_type = :remote'", env: command_environment(region)) + run_command("bin/rails r 'MiqRegion.replication_type = :remote'", :env => command_environment(region)) run_command("psql #{database_url(region)} -c 'SELECT * FROM pg_publication;'") end def create_region(region) - run_command("bin/rake evm:db:region", env: command_environment(region).merge("DISABLE_DATABASE_ENVIRONMENT_CHECK" => "true")) - run_command("bin/rails r 'EvmDatabase.seed_primordial'", env: command_environment(region)) + run_command("bin/rake evm:db:region", :env => command_environment(region).merge("DISABLE_DATABASE_ENVIRONMENT_CHECK" => "true")) + run_command("bin/rails r 'EvmDatabase.seed_primordial'", :env => command_environment(region)) ensure FileUtils.rm_f(guid_file) end @@ -124,11 +124,11 @@ def run_command(command, raise_on_error: true, env: {}) end def teardown_global_subscription_for_region(region) - run_command("psql #{database_url(GLOBAL)} -c 'DROP SUBSCRIPTION region_#{region}_subscription;'", raise_on_error: false) + run_command("psql #{database_url(GLOBAL)} -c 'DROP SUBSCRIPTION region_#{region}_subscription;'", :raise_on_error => false) end def teardown_remote_publication(region) - run_command("psql #{database_url(region)} -c 'DROP PUBLICATION miq;'", raise_on_error: false) + run_command("psql #{database_url(region)} -c 'DROP PUBLICATION miq;'", :raise_on_error => false) end end end diff --git a/lib/task_helpers/exports/customization_templates.rb b/lib/task_helpers/exports/customization_templates.rb index 1372bb4221a..ea62ed3718a 100644 --- a/lib/task_helpers/exports/customization_templates.rb +++ b/lib/task_helpers/exports/customization_templates.rb @@ -1,7 +1,7 @@ module TaskHelpers class Exports class CustomizationTemplates - EXCLUDE_ATTRS = %i(created_at updated_at id pxe_image_type_id class).freeze + EXCLUDE_ATTRS = %i[created_at updated_at id pxe_image_type_id class].freeze def export(options = {}) export_dir = options[:directory] @@ -23,9 +23,9 @@ def export(options = {}) def pxe_image_type_hash(pxe_image_type) if pxe_image_type - { :pxe_image_type => pxe_image_type.to_model_hash.reject { |key| EXCLUDE_ATTRS.include?(key) } } + {:pxe_image_type => pxe_image_type.to_model_hash.reject { |key| EXCLUDE_ATTRS.include?(key) }} else - { :pxe_image_type => {} } + {:pxe_image_type => {}} end end end diff --git a/lib/task_helpers/exports/provision_dialogs.rb b/lib/task_helpers/exports/provision_dialogs.rb index 70daefb500c..b22618612a6 100644 --- a/lib/task_helpers/exports/provision_dialogs.rb +++ b/lib/task_helpers/exports/provision_dialogs.rb @@ -1,7 +1,7 @@ module TaskHelpers class Exports class ProvisionDialogs - EXCLUDE_ATTRS = %i(file_mtime created_at updated_at id class).freeze + EXCLUDE_ATTRS = %i[file_mtime created_at updated_at id class].freeze def export(options = {}) export_dir = options[:directory] diff --git a/lib/task_helpers/exports/roles.rb b/lib/task_helpers/exports/roles.rb index a9cdc3bc696..58f832deddc 100644 --- a/lib/task_helpers/exports/roles.rb +++ b/lib/task_helpers/exports/roles.rb @@ -1,7 +1,7 @@ module TaskHelpers class Exports class Roles - EXCLUDE_ATTRS = %w(created_at updated_at id).freeze + EXCLUDE_ATTRS = %w[created_at updated_at id].freeze def export(options = {}) export_dir = options[:directory] diff --git a/lib/task_helpers/exports/scan_profiles.rb b/lib/task_helpers/exports/scan_profiles.rb index 5325a1b8a29..4937e48da55 100644 --- a/lib/task_helpers/exports/scan_profiles.rb +++ b/lib/task_helpers/exports/scan_profiles.rb @@ -11,9 +11,9 @@ def export(options = {}) profile = ScanItem.get_profile(scan_item_set.name).first.dup - %w(id created_on updated_on).each { |key| profile.delete(key) } + %w[id created_on updated_on].each { |key| profile.delete(key) } profile['definition'].each do |dd| - %w(id created_on updated_on description).each { |key| dd.delete(key) } + %w[id created_on updated_on description].each { |key| dd.delete(key) } end scan_profile = profile.to_yaml diff --git a/lib/task_helpers/exports/tags.rb b/lib/task_helpers/exports/tags.rb index cfc9cd54604..de50ee072ea 100644 --- a/lib/task_helpers/exports/tags.rb +++ b/lib/task_helpers/exports/tags.rb @@ -2,7 +2,7 @@ module TaskHelpers class Exports class Tags # Tag Categories that are not visible in the UI and should not be exported - SPECIAL_TAGS = %w(/managed/folder_path_yellow /managed/folder_path_blue /managed/user/role).freeze + SPECIAL_TAGS = %w[/managed/folder_path_yellow /managed/folder_path_blue /managed/user/role].freeze def export(options = {}) export_dir = options[:directory] diff --git a/lib/task_helpers/imports/alert_sets.rb b/lib/task_helpers/imports/alert_sets.rb index 0c082a67f08..d140e7ee492 100644 --- a/lib/task_helpers/imports/alert_sets.rb +++ b/lib/task_helpers/imports/alert_sets.rb @@ -9,7 +9,7 @@ def import(options) begin alertsets = YAML.load_file(filename) import_alert_sets(alertsets) - rescue StandardError => err + rescue => err $log.error("Error importing #{filename} : #{err.message}") warn("Error importing #{filename} : #{err.message}") end diff --git a/lib/task_helpers/imports/alerts.rb b/lib/task_helpers/imports/alerts.rb index f753339a422..c87e73886c1 100644 --- a/lib/task_helpers/imports/alerts.rb +++ b/lib/task_helpers/imports/alerts.rb @@ -9,7 +9,7 @@ def import(options) begin alerts = YAML.load_file(filename) import_alerts(alerts) - rescue StandardError => err + rescue => err $log.error("Error importing #{filename} : #{err.message}") warn("Error importing #{filename} : #{err.message}") end diff --git a/lib/task_helpers/imports/custom_buttons.rb b/lib/task_helpers/imports/custom_buttons.rb index a805970d944..1ecc789de17 100644 --- a/lib/task_helpers/imports/custom_buttons.rb +++ b/lib/task_helpers/imports/custom_buttons.rb @@ -10,7 +10,7 @@ def import(options) begin import_custom_buttons(filename, options[:connect_dialog_by_name]) - rescue StandardError + rescue raise StandardError, "Error importing #{filename} at #{$@}" end end @@ -60,10 +60,10 @@ def add_associations(obj, new_obj) # may contain dialog_label,delete it, then find and connect dialog (optionally) dialog_label = assoc.last.first['attributes'].delete('dialog_label') resource_action = create_object(*assoc).first - if @connect_dialog - resource_action.dialog = Dialog.in_region(MiqRegion.my_region_number).find_by(:label => dialog_label) if dialog_label + if @connect_dialog && dialog_label + resource_action.dialog = Dialog.in_region(MiqRegion.my_region_number).find_by(:label => dialog_label) end - new_obj.send("#{assoc.first}=", resource_action) + new_obj.send(:"#{assoc.first}=", resource_action) end end diff --git a/lib/task_helpers/imports/customization_templates.rb b/lib/task_helpers/imports/customization_templates.rb index b3b8184a20a..1518860f8f7 100644 --- a/lib/task_helpers/imports/customization_templates.rb +++ b/lib/task_helpers/imports/customization_templates.rb @@ -70,7 +70,7 @@ def get_pxe_image_type(pxe_image_hash) ["Cannot import because there is no :name for :pxe_image_type"]) end - if pxe_image_hash.key?(:provision_type) && !%w(vm host).include?(pxe_image_hash[:provision_type]) + if pxe_image_hash.key?(:provision_type) && !%w[vm host].include?(pxe_image_hash[:provision_type]) raise CustomizationTemplateYamlError.new("Customization Template error", ["Cannot import because :provision_type for :pxe_image_type must be vm or host"]) end diff --git a/lib/task_helpers/imports/policies.rb b/lib/task_helpers/imports/policies.rb index 452f381c731..0db4ebcc6e3 100644 --- a/lib/task_helpers/imports/policies.rb +++ b/lib/task_helpers/imports/policies.rb @@ -11,7 +11,7 @@ def import(options = {}) begin policies = YAML.load_file(filename) import_policies(policies) - rescue StandardError => err + rescue => err warn("Error importing #{filename} : #{err.message}") end end diff --git a/lib/task_helpers/imports/policy_sets.rb b/lib/task_helpers/imports/policy_sets.rb index 2016a394001..0e7b2929f2e 100644 --- a/lib/task_helpers/imports/policy_sets.rb +++ b/lib/task_helpers/imports/policy_sets.rb @@ -11,7 +11,7 @@ def import(options = {}) begin policysets = YAML.load_file(filename) import_policysets(policysets) - rescue StandardError => err + rescue => err warn("Error importing #{filename} : #{err.message}") end end diff --git a/lib/task_helpers/imports/reports.rb b/lib/task_helpers/imports/reports.rb index 1f369865c1f..7e5ca7aab8e 100644 --- a/lib/task_helpers/imports/reports.rb +++ b/lib/task_helpers/imports/reports.rb @@ -8,9 +8,9 @@ def import(options = {}) Dir.glob(glob) do |filename| $log.info("Importing Reports from: #{filename}") - report_options = { :userid => 'admin', - :overwrite => options[:overwrite], - :save => true } + report_options = {:userid => 'admin', + :overwrite => options[:overwrite], + :save => true} begin report_fd = File.open(filename, 'r') diff --git a/lib/task_helpers/imports/scan_profiles.rb b/lib/task_helpers/imports/scan_profiles.rb index fdcd683bbcd..e0f8cbe841c 100644 --- a/lib/task_helpers/imports/scan_profiles.rb +++ b/lib/task_helpers/imports/scan_profiles.rb @@ -10,7 +10,7 @@ def import(options) begin import_scan_profile(filename) - rescue StandardError => err + rescue => err $log.error("Error importing #{filename} : #{err.message}") warn("Error importing #{filename} : #{err.message}") end @@ -37,6 +37,7 @@ def import_scan_profile(filename) items.each do |item| next if item['filename'] + if item['guid'].nil? item['guid'] = SecureRandom.uuid end diff --git a/lib/task_helpers/imports/schedules.rb b/lib/task_helpers/imports/schedules.rb index eb1c08c414a..eb38d6f17cf 100644 --- a/lib/task_helpers/imports/schedules.rb +++ b/lib/task_helpers/imports/schedules.rb @@ -10,7 +10,7 @@ def import(options = {}) begin MiqSchedule.import(File.open(filename, 'r')) - rescue StandardError => err + rescue => err warn("Error importing #{filename} : #{err.message}") end end diff --git a/lib/task_helpers/imports/tags.rb b/lib/task_helpers/imports/tags.rb index 21036414ad0..c9f238266ec 100644 --- a/lib/task_helpers/imports/tags.rb +++ b/lib/task_helpers/imports/tags.rb @@ -37,10 +37,10 @@ def import(options = {}) private # Tag Categories that are not visible in the UI and should not be imported - SPECIAL_TAGS = %w(/managed/folder_path_yellow /managed/folder_path_blue /managed/user/role).freeze + SPECIAL_TAGS = %w[/managed/folder_path_yellow /managed/folder_path_blue /managed/user/role].freeze - UPDATE_CAT_FIELDS = %w(description example_text show perf_by_tag).freeze - UPDATE_ENTRY_FIELDS = %w(description name).freeze + UPDATE_CAT_FIELDS = %w[description example_text show perf_by_tag].freeze + UPDATE_ENTRY_FIELDS = %w[description name].freeze REGION_NUMBER = MiqRegion.my_region_number.freeze @@ -48,6 +48,7 @@ def import_tags(tag_categories) tag_categories.each do |tag_category| tag = tag_category["ns"] ? "#{tag_category["ns"]}/#{tag_category["name"]}" : "/managed/#{tag_category["name"]}" next if SPECIAL_TAGS.include?(tag) + Classification.transaction do import_classification(tag_category) end @@ -55,7 +56,7 @@ def import_tags(tag_categories) end def import_classification(tag_category) - ns = tag_category["ns"] ? tag_category["ns"] : "/managed" + ns = tag_category["ns"] || "/managed" tag_category["name"] = tag_category["name"].to_s tag_category.delete("parent_id") @@ -88,6 +89,7 @@ def import_entries(classification, entries) end next if tag_entry.valid? + tag_entry.errors.full_messages.each do |message| errors << "Entry #{index}: #{message}" end diff --git a/lib/task_helpers/imports/widgets.rb b/lib/task_helpers/imports/widgets.rb index fbdc231e6b5..a54e82b1728 100644 --- a/lib/task_helpers/imports/widgets.rb +++ b/lib/task_helpers/imports/widgets.rb @@ -8,9 +8,9 @@ def import(options = {}) Dir.glob(glob) do |filename| $log.info("Importing Widgets from: #{filename}") - widget_options = { :userid => 'admin', - :overwrite => options[:overwrite], - :save => true } + widget_options = {:userid => 'admin', + :overwrite => options[:overwrite], + :save => true} begin widget_fd = File.open(filename, 'r') diff --git a/lib/tasks/datasize.rake b/lib/tasks/datasize.rake index e8598e4a29c..7a4dd3557be 100644 --- a/lib/tasks/datasize.rake +++ b/lib/tasks/datasize.rake @@ -2,7 +2,7 @@ namespace :db do desc 'Print data size for entire database' task :size => :environment do - database_name = ActiveRecord::Base.connection.instance_variable_get("@config")[:database] + database_name = ActiveRecord::Base.connection.instance_variable_get(:@config)[:database] sql = "SELECT pg_size_pretty(pg_database_size('#{database_name}'));" puts ActiveRecord::Base.connection.execute(sql)[0]["pg_size_pretty"] end diff --git a/lib/tasks/evm.rake b/lib/tasks/evm.rake index 4fb032a800b..98477dbfd6c 100644 --- a/lib/tasks/evm.rake +++ b/lib/tasks/evm.rake @@ -64,6 +64,7 @@ namespace :evm do desc "Determine if the configured encryption key is valid" task :validate_encryption_key => :environment do raise "Invalid encryption key" unless EvmApplication.encryption_key_valid? + puts "Encryption key valid" end diff --git a/lib/tasks/evm_application.rb b/lib/tasks/evm_application.rb index f58a9ba90f5..fe472aa095b 100644 --- a/lib/tasks/evm_application.rb +++ b/lib/tasks/evm_application.rb @@ -6,7 +6,7 @@ class EvmApplication def self.start puts "Running EVM in background..." - command_line = "#{Gem.ruby} #{Rails.root.join(*%w(lib workers bin evm_server.rb)).expand_path}" + command_line = "#{Gem.ruby} #{Rails.root.join(*%w[lib workers bin evm_server.rb]).expand_path}" env_options = {} env_options["EVMSERVER"] = "true" if MiqEnvironment::Command.is_appliance? @@ -30,7 +30,7 @@ def self.kill def self.server_state MiqServer.my_server.status rescue => error - :no_db if error.message =~ /Connection refused/i + :no_db if /Connection refused/i.match?(error.message) end def self.status(include_remotes = false) @@ -55,6 +55,7 @@ def self.status(include_remotes = false) def self.output_status(data, footnote = nil) return if data.blank? + duplicate_columns = redundant_columns(data) duplicate_columns.delete("Status") # always show status puts data.tableize(:columns => (data.first.keys - duplicate_columns.keys)) @@ -71,6 +72,7 @@ def self.output_status(data, footnote = nil) def self.redundant_columns(data, column_names = nil, dups = {}) return dups if data.size <= 1 + column_names ||= data.first.keys column_names.each do |col_header| values = data.collect { |row| row[col_header] }.uniq @@ -82,6 +84,7 @@ def self.redundant_columns(data, column_names = nil, dups = {}) def self.compact_date(date) return "" unless date + date < 1.day.ago ? date.strftime("%Y-%m-%d") : date.strftime("%H:%M:%S%Z") end private_class_method :compact_date @@ -104,7 +107,7 @@ def self.compact_queue_uri(queue_name, uri) def self.servers_status(servers) data = servers.collect do |s| { - "Region" => s.region_number, + "Region" => s.region_number, "Zone" => s.zone.name, "Server" => (s.name || "UNKNOWN") + (s.is_master ? "*" : ""), "Status" => s.status, @@ -128,18 +131,18 @@ def self.workers_status(servers) mb_threshold = w.worker_settings[:memory_threshold] simple_type = w.type&.gsub(/(ManageIQ::Providers::|Manager|Worker|Miq)/, '') { - "Region" => s.region_number, - "Zone" => s.zone.name, - "Type" => simple_type, - "Status" => w.status.sub("stopping", "stop pending"), - "PID" => w.pid, - "SPID" => w.sql_spid, - "Server" => s.name, - "Queue" => compact_queue_uri(w.queue_name, w.uri), - "Started" => compact_date(w.started_on), - "Heartbeat" => compact_date(w.last_heartbeat), + "Region" => s.region_number, + "Zone" => s.zone.name, + "Type" => simple_type, + "Status" => w.status.sub("stopping", "stop pending"), + "PID" => w.pid, + "SPID" => w.sql_spid, + "Server" => s.name, + "Queue" => compact_queue_uri(w.queue_name, w.uri), + "Started" => compact_date(w.started_on), + "Heartbeat" => compact_date(w.last_heartbeat), "System UID" => w.system_uid, - "MB Usage" => mb_usage ? "#{mb_usage / 1.megabyte}/#{mb_threshold / 1.megabyte}" : "" + "MB Usage" => mb_usage ? "#{mb_usage / 1.megabyte}/#{mb_threshold / 1.megabyte}" : "" } end end @@ -178,6 +181,7 @@ def self.deployment_status return "new_deployment" if context.current_version.zero? || MiqServer.none? return "new_replica" if MiqServer.my_server.nil? return "upgrade" if context.needs_migration? + "redeployment" rescue PG::ConnectionBad, ActiveRecord::NoDatabaseError => err raise unless err.message.match?(/database "[^"]+" does not exist/) diff --git a/lib/tasks/evm_automate.rake b/lib/tasks/evm_automate.rake index 6bc06537dfc..a7d08e83133 100644 --- a/lib/tasks/evm_automate.rake +++ b/lib/tasks/evm_automate.rake @@ -1,5 +1,5 @@ module EvmAutomate - $:.push File.expand_path(File.join(Rails.root, %w{.. lib util xml})) + $:.push File.expand_path(Rails.root.join(%w[.. lib util xml]).to_s) def self.log(level, msg) $log.send(level, msg) @@ -12,6 +12,7 @@ module EvmAutomate def self.simulate(domain, namespace, class_name, instance_name) user = User.super_admin raise "Need a admin user to run simulation" unless user + MiqAeEngine.resolve_automation_object(instance_name, user, {}, @@ -35,6 +36,7 @@ module EvmAutomate def self.extract_methods(method_folder) MiqAeMethod.all.sort_by(&:fqname).each do |m| next unless m.location == 'inline' + write_method_data(File.join(method_folder, m.ae_class.fqname), m['name'], m['data']) end end @@ -42,17 +44,17 @@ end namespace :evm do namespace :automate do - desc 'Backup all automate domains to a zip file or backup folder.' task :backup => :environment do raise 'Must specify a backup zip file' if ENV['BACKUP_ZIP_FILE'].blank? + puts "Datastore backup starting" - zip_file = ENV['BACKUP_ZIP_FILE'] + zip_file = ENV.fetch('BACKUP_ZIP_FILE', nil) begin MiqAeDatastore.backup('zip_file' => zip_file, 'overwrite' => (ENV['OVERWRITE'].to_s.downcase == 'true')) rescue => err - STDERR.puts err.message + warn err.message exit(1) end end @@ -68,10 +70,10 @@ namespace :evm do task :usage => :environment do puts "The following automate tasks are available" puts " Import - Usage: rake evm:automate:import PREVIEW=true DOMAIN=domain_name " \ - "IMPORT_AS=new_domain_name IMPORT_DIR=./model_export|ZIP_FILE=filename|YAML_FILE=filename " \ - "SYSTEM=true|false ENABLED=true|false OVERWRITE=true|false" - puts " Export - Usage: rake evm:automate:export DOMAIN=domain_name " \ - "EXPORT_AS=new_domain_name NAMESPACE=sample CLASS=methods EXPORT_DIR=./model_export|ZIP_FILE=filename|YAML_FILE=filename" + "IMPORT_AS=new_domain_name IMPORT_DIR=./model_export|ZIP_FILE=filename|YAML_FILE=filename " \ + "SYSTEM=true|false ENABLED=true|false OVERWRITE=true|false" + puts " Export - Usage: rake evm:automate:export DOMAIN=domain_name " \ + "EXPORT_AS=new_domain_name NAMESPACE=sample CLASS=methods EXPORT_DIR=./model_export|ZIP_FILE=filename|YAML_FILE=filename" puts " Backup - Usage: rake evm:automate:backup BACKUP_ZIP_FILE=filename OVERWRITE=false" puts " Restore - Usage: rake evm:automate:restore BACKUP_ZIP_FILE=filename" puts " Clear - Usage: rake evm:automate:clear" @@ -91,93 +93,96 @@ namespace :evm do desc 'Lists automate classes' task :list_class => :environment do - namespace = ENV["NAMESPACE"] + namespace = ENV.fetch("NAMESPACE", nil) puts "Listing automate classes#{" in #{namespace}" if namespace}" EvmAutomate.list_class(namespace) end desc 'Export automate model information to a folder or zip file. ENV options DOMAIN,NAMESPACE,CLASS,EXPORT_DIR|ZIP_FILE|YAML_FILE' task :export => :environment do - begin - domain = ENV['DOMAIN'] - raise "Must specify domain for export:" if domain.nil? - zip_file = ENV['ZIP_FILE'] - export_dir = ENV['EXPORT_DIR'] - yaml_file = ENV['YAML_FILE'] - if zip_file.nil? && export_dir.nil? && yaml_file.nil? - zip_file = "./#{domain}.zip" - puts "No export location specified. Exporting domain: #{domain} to: #{zip_file}" - end - export_options = {'export_dir' => export_dir, - 'zip_file' => zip_file, - 'yaml_file' => yaml_file, - 'namespace' => ENV['NAMESPACE'], - 'class' => ENV['CLASS'], - 'overwrite' => ENV['OVERWRITE'].to_s.downcase == 'true'} - export_options['export_as'] = ENV['EXPORT_AS'] if ENV['EXPORT_AS'].present? - MiqAeExport.new(domain, export_options).export - rescue => err - STDERR.puts err.backtrace - STDERR.puts err.message - exit(1) + + domain = ENV.fetch('DOMAIN', nil) + raise "Must specify domain for export:" if domain.nil? + + zip_file = ENV.fetch('ZIP_FILE', nil) + export_dir = ENV.fetch('EXPORT_DIR', nil) + yaml_file = ENV.fetch('YAML_FILE', nil) + if zip_file.nil? && export_dir.nil? && yaml_file.nil? + zip_file = "./#{domain}.zip" + puts "No export location specified. Exporting domain: #{domain} to: #{zip_file}" end + export_options = {'export_dir' => export_dir, + 'zip_file' => zip_file, + 'yaml_file' => yaml_file, + 'namespace' => ENV.fetch('NAMESPACE', nil), + 'class' => ENV.fetch('CLASS', nil), + 'overwrite' => ENV['OVERWRITE'].to_s.downcase == 'true'} + export_options['export_as'] = ENV['EXPORT_AS'] if ENV['EXPORT_AS'].present? + MiqAeExport.new(domain, export_options).export + rescue => err + warn err.backtrace + warn err.message + exit(1) + end desc 'Import automate model information from an export folder or zip file. ' task :import => :environment do - begin - raise "Must specify domain for import:" if ENV['DOMAIN'].blank? && ENV['GIT_URL'].blank? - if ENV['YAML_FILE'].blank? && ENV['IMPORT_DIR'].blank? && ENV['ZIP_FILE'].blank? && ENV['GIT_URL'].blank? - raise 'Must specify either a directory with exported automate model or a zip file or a http based git url' - end - preview = ENV['PREVIEW'] ||= 'true' - raise 'Preview must be true or false' unless %w{true false}.include?(preview) - mode = ENV['MODE'] ||= 'add' - import_as = ENV['IMPORT_AS'] - overwrite = (ENV['OVERWRITE'] ||= 'false').casecmp('true').zero? - import_options = {'preview' => (preview.to_s.downcase == 'true'), - 'mode' => mode.to_s.downcase, - 'namespace' => ENV['NAMESPACE'], - 'class' => ENV['CLASS'], - 'overwrite' => overwrite, - 'import_as' => import_as} - if ENV['ZIP_FILE'].present? - puts "Importing automate domain: #{ENV['DOMAIN']} from file #{ENV['ZIP_FILE']}" - import_options['zip_file'] = ENV['ZIP_FILE'] - elsif ENV['IMPORT_DIR'].present? - puts "Importing automate domain: #{ENV['DOMAIN']} from directory #{ENV['IMPORT_DIR']}" - import_options['import_dir'] = ENV['IMPORT_DIR'] - elsif ENV['YAML_FILE'].present? - puts "Importing automate domain: #{ENV['DOMAIN']} from file #{ENV['YAML_FILE']}" - import_options['yaml_file'] = ENV['YAML_FILE'] - elsif ENV['GIT_URL'].present? - puts "Importing automate domain from url #{ENV['GIT_URL']}" - ENV['DOMAIN'] = nil - import_options['git_url'] = ENV['GIT_URL'] - import_options['overwrite'] = true - import_options['userid'] = ENV['USERID'] - import_options['password'] = ENV['PASSWORD'] - import_options['ref'] = ENV['REF'] || MiqAeGitImport::DEFAULT_BRANCH - import_options['ref_type'] = ENV['REF_TYPE'] || MiqAeGitImport::BRANCH - import_options['verify_ssl'] = ENV['VERIFY_SSL'] || OpenSSL::SSL::VERIFY_PEER - end - %w(SYSTEM ENABLED).each do |name| - if ENV[name].present? - raise "#{name} must be true or false" unless %w(true false).include?(ENV[name]) - import_options[name.downcase] = ENV[name] - end - end - MiqAeImport.new(ENV['DOMAIN'], import_options).import - rescue => err - STDERR.puts err.backtrace - STDERR.puts err.message - exit(1) + + raise "Must specify domain for import:" if ENV['DOMAIN'].blank? && ENV['GIT_URL'].blank? + if ENV['YAML_FILE'].blank? && ENV['IMPORT_DIR'].blank? && ENV['ZIP_FILE'].blank? && ENV['GIT_URL'].blank? + raise 'Must specify either a directory with exported automate model or a zip file or a http based git url' + end + + preview = ENV['PREVIEW'] ||= 'true' + raise 'Preview must be true or false' unless %w[true false].include?(preview) + + mode = ENV['MODE'] ||= 'add' + import_as = ENV.fetch('IMPORT_AS', nil) + overwrite = (ENV['OVERWRITE'] ||= 'false').casecmp('true').zero? + import_options = {'preview' => (preview.to_s.downcase == 'true'), + 'mode' => mode.to_s.downcase, + 'namespace' => ENV.fetch('NAMESPACE', nil), + 'class' => ENV.fetch('CLASS', nil), + 'overwrite' => overwrite, + 'import_as' => import_as} + if ENV['ZIP_FILE'].present? + puts "Importing automate domain: #{ENV.fetch('DOMAIN', nil)} from file #{ENV['ZIP_FILE']}" + import_options['zip_file'] = ENV['ZIP_FILE'] + elsif ENV['IMPORT_DIR'].present? + puts "Importing automate domain: #{ENV.fetch('DOMAIN', nil)} from directory #{ENV['IMPORT_DIR']}" + import_options['import_dir'] = ENV['IMPORT_DIR'] + elsif ENV['YAML_FILE'].present? + puts "Importing automate domain: #{ENV.fetch('DOMAIN', nil)} from file #{ENV['YAML_FILE']}" + import_options['yaml_file'] = ENV['YAML_FILE'] + elsif ENV['GIT_URL'].present? + puts "Importing automate domain from url #{ENV['GIT_URL']}" + ENV['DOMAIN'] = nil + import_options['git_url'] = ENV['GIT_URL'] + import_options['overwrite'] = true + import_options['userid'] = ENV.fetch('USERID', nil) + import_options['password'] = ENV.fetch('PASSWORD', nil) + import_options['ref'] = ENV['REF'] || MiqAeGitImport::DEFAULT_BRANCH + import_options['ref_type'] = ENV['REF_TYPE'] || MiqAeGitImport::BRANCH + import_options['verify_ssl'] = ENV['VERIFY_SSL'] || OpenSSL::SSL::VERIFY_PEER + end + %w[SYSTEM ENABLED].each do |name| + next unless ENV[name].present? + raise "#{name} must be true or false" unless %w[true false].include?(ENV[name]) + + import_options[name.downcase] = ENV.fetch(name, nil) end + MiqAeImport.new(ENV.fetch('DOMAIN', nil), import_options).import + rescue => err + warn err.backtrace + warn err.message + exit(1) + end desc 'Extract automate methods' task :extract_methods => :environment do - method_folder = ENV["FOLDER"] ||= './automate_methods' + method_folder = ENV["FOLDER"] ||= './automate_methods' puts "Extracting automate methods from database to folder: #{method_folder} ..." EvmAutomate.extract_methods(method_folder) puts "The automate methods have been extracted." @@ -185,51 +190,53 @@ namespace :evm do desc 'Method simulation' task :simulate => :environment do - begin - puts "Automate simulation starting" - domain = ENV["DOMAIN"] - namespace = ENV["NAMESPACE"] - class_name = ENV["CLASS"] - instance_name = ENV["INSTANCE"] - err_msg = "" - err_msg << "Must specify automate model domain\n" if domain.nil? - err_msg << "Must specify automate model namespace\n" if namespace.nil? - err_msg << "Must specify automate model class\n" if class_name.nil? - err_msg << "Must specify automate model instance\n" if instance_name.nil? - unless err_msg.empty? - err_msg << "Usage DOMAIN=customer NAMESPACE=sample CLASS=Methods INSTANCE=Inspectme\n " - raise err_msg - end - EvmAutomate.simulate(domain, namespace, class_name, instance_name) - puts "Automate simulation ending" - rescue => err - STDERR.puts err.message - exit(1) + + puts "Automate simulation starting" + domain = ENV.fetch("DOMAIN", nil) + namespace = ENV.fetch("NAMESPACE", nil) + class_name = ENV.fetch("CLASS", nil) + instance_name = ENV.fetch("INSTANCE", nil) + err_msg = "" + err_msg << "Must specify automate model domain\n" if domain.nil? + err_msg << "Must specify automate model namespace\n" if namespace.nil? + err_msg << "Must specify automate model class\n" if class_name.nil? + err_msg << "Must specify automate model instance\n" if instance_name.nil? + unless err_msg.empty? + err_msg << "Usage DOMAIN=customer NAMESPACE=sample CLASS=Methods INSTANCE=Inspectme\n " + raise err_msg end + EvmAutomate.simulate(domain, namespace, class_name, instance_name) + puts "Automate simulation ending" + rescue => err + warn err.message + exit(1) + end desc 'Restore automate domains from a backup zip file or folder.' task :restore => :environment do - begin - raise 'Must specify a backup zip file' if ENV['BACKUP_ZIP_FILE'].blank? - puts "Importing automate domains from file #{ENV['BACKUP_ZIP_FILE']}" - MiqAeDatastore.restore(ENV['BACKUP_ZIP_FILE']) - rescue => err - STDERR.puts err.message - exit(1) - end + + raise 'Must specify a backup zip file' if ENV['BACKUP_ZIP_FILE'].blank? + + puts "Importing automate domains from file #{ENV.fetch('BACKUP_ZIP_FILE', nil)}" + MiqAeDatastore.restore(ENV.fetch('BACKUP_ZIP_FILE', nil)) + rescue => err + warn err.message + exit(1) + end desc 'Convert the legacy automation model to new format ENV options FILE,DOMAIN,EXPORT_DIR|ZIP_FILE|YAML_FILE' task :convert => :environment do puts "Convert automation model from the legacy xml file" - domain_name = ENV["DOMAIN"] + domain_name = ENV.fetch("DOMAIN", nil) raise "Must specify the DOMAIN name to convert as" if domain_name.nil? + export_options = {} - zip_file = ENV['ZIP_FILE'] - export_dir = ENV['EXPORT_DIR'] - yaml_file = ENV['YAML_FILE'] - overwrite = (ENV['OVERWRITE'] ||= 'false').downcase.==('true') + zip_file = ENV.fetch('ZIP_FILE', nil) + export_dir = ENV.fetch('EXPORT_DIR', nil) + yaml_file = ENV.fetch('YAML_FILE', nil) + overwrite = (ENV['OVERWRITE'] ||= 'false').downcase == ('true') export_options['zip_file'] = zip_file if zip_file export_options['export_dir'] = export_dir if export_dir @@ -238,11 +245,15 @@ namespace :evm do raise "Must specify the ZIP_FILE or EXPORT_DIR or YAML_FILE to store converted model" if zip_file.nil? && export_dir.nil? && yaml_file.nil? - model_filename = ENV["FILE"] - raise "Must specify legacy automation backup file xml to " + \ - "convert to the new automate model: - Usage FILE='xml_filename'" if model_filename.nil? - raise "Automation file to use for conversion does not " + \ - "exist: #{model_filename}" unless File.exist?(model_filename) + model_filename = ENV.fetch("FILE", nil) + if model_filename.nil? + raise "Must specify legacy automation backup file xml to " + \ + "convert to the new automate model: - Usage FILE='xml_filename'" + end + unless File.exist?(model_filename) + raise "Automation file to use for conversion does not " + \ + "exist: #{model_filename}" + end puts "Converting the automation model from the xml file: #{model_filename}" MiqAeDatastore.convert(model_filename, domain_name, export_options) puts "The automate model has been converted from : #{model_filename}" diff --git a/lib/tasks/evm_dba.rake b/lib/tasks/evm_dba.rake index a687b96c403..21ba7d46395 100644 --- a/lib/tasks/evm_dba.rake +++ b/lib/tasks/evm_dba.rake @@ -15,20 +15,20 @@ namespace :evm do # Start the EVM Database silently - not to be a visible rake task task :silent_start do - begin - LinuxAdmin::Service.new(ENV.fetch("APPLIANCE_PG_SERVICE")).start - rescue AwesomeSpawn::CommandResultError - # ignore issues (ala silent) - end + + LinuxAdmin::Service.new(ENV.fetch("APPLIANCE_PG_SERVICE")).start + rescue AwesomeSpawn::CommandResultError + # ignore issues (ala silent) + end # Stop the EVM Database silently - not to be a visible rake task task :silent_stop do - begin - LinuxAdmin::Service.new(ENV.fetch("APPLIANCE_PG_SERVICE")).stop - rescue AwesomeSpawn::CommandResultError - # ignore issues (ala silent) - end + + LinuxAdmin::Service.new(ENV.fetch("APPLIANCE_PG_SERVICE")).stop + rescue AwesomeSpawn::CommandResultError + # ignore issues (ala silent) + end desc "Seed the ManageIQ EVM Database (VMDB) with defaults" @@ -54,13 +54,13 @@ namespace :evm do desc 'Set the region of the current ManageIQ EVM Database (VMDB)' task :region => "evm:db:reset" do - region = ENV["REGION"] + region = ENV.fetch("REGION", nil) puts "Initializing region and database..." AwesomeSpawn.run!("bin/rails runner", :params => ["MiqDatabase.seed; MiqRegion.seed"]) rescue => err message = err.kind_of?(AwesomeSpawn::CommandResultError) ? err.result.error : err.message - STDERR.puts "Encountered issue setting up Database using region #{region}: #{message}\n" + warn "Encountered issue setting up Database using region #{region}: #{message}\n" raise end end diff --git a/lib/tasks/evm_plugins.rake b/lib/tasks/evm_plugins.rake index bd7b3870dd3..f2a3356b5ab 100644 --- a/lib/tasks/evm_plugins.rake +++ b/lib/tasks/evm_plugins.rake @@ -13,9 +13,9 @@ namespace :evm do when "human" details.each_value do |detail| puts "#{detail[:name]}:" - puts detail + puts(detail .except(:name) - .map { |k, v| " #{k}: #{v}" } + .map { |k, v| " #{k}: #{v}" }) end else raise "Invalid format #{format.inspect}" diff --git a/lib/tasks/evm_rake_helper.rb b/lib/tasks/evm_rake_helper.rb index 6d81ef4280a..166df6a5d53 100644 --- a/lib/tasks/evm_rake_helper.rb +++ b/lib/tasks/evm_rake_helper.rb @@ -3,8 +3,8 @@ module EvmRakeHelper # For some rake tasks, the database.yml may not yet be setup and is not required anyway. # Note: Rails will not actually use the configuration and connect until you issue a query. def self.with_dummy_database_url_configuration - before, ENV["DATABASE_URL"] = ENV["DATABASE_URL"], "postgresql:///not_existing_db?host=/var/lib/postgresql" - before_db_check, ENV["PERFORM_DB_CONNECTABLE_CHECK"] = ENV["PERFORM_DB_CONNECTABLE_CHECK"], "false" + before, ENV["DATABASE_URL"] = ENV.fetch("DATABASE_URL", nil), "postgresql:///not_existing_db?host=/var/lib/postgresql" + before_db_check, ENV["PERFORM_DB_CONNECTABLE_CHECK"] = ENV.fetch("PERFORM_DB_CONNECTABLE_CHECK", nil), "false" yield ensure # ENV['x'] = nil deletes the key because ENV accepts only string values diff --git a/lib/tasks/evm_settings.rake b/lib/tasks/evm_settings.rake index 80bc298097a..f5ecd679bc5 100644 --- a/lib/tasks/evm_settings.rake +++ b/lib/tasks/evm_settings.rake @@ -49,7 +49,7 @@ module EvmSettings def self.log(level, msg) $log.send(level, "EVM:Settings Task: #{msg}") - STDERR.puts "#{level}: #{msg}" if level != INFO + warn "#{level}: #{msg}" if level != INFO end private_class_method :log diff --git a/lib/tasks/locale.rake b/lib/tasks/locale.rake index 29b69c6b51b..e5810378335 100644 --- a/lib/tasks/locale.rake +++ b/lib/tasks/locale.rake @@ -135,7 +135,7 @@ namespace :locale do desc "Extract model attribute names and virtual column names" task "store_model_attributes" => :environment do require 'gettext_i18n_rails/model_attributes_finder' - require_relative 'model_attribute_override.rb' + require_relative 'model_attribute_override' attributes_file = 'locale/model_attributes.rb' File.unlink(attributes_file) if File.exist?(attributes_file) @@ -195,7 +195,7 @@ namespace :locale do # TODO: Rake tasks such as delete_pot_file, plugin:find, and report_changes take arguments and conflict with # the assumption that rake makes: already invoked tasks should not be invoked again as the result should be the same. # We should make these methods with arguments and not use rake tasks in this way. - Rake.application.tasks.each { |t| t.reenable if (t.already_invoked && !t.kind_of?(Rake::FileTask)) } + Rake.application.tasks.each { |t| t.reenable if t.already_invoked && !t.kind_of?(Rake::FileTask) } Rake::Task['locale:delete_pot_file'].invoke(plugin.root) # Delete plugin's pot file if it exists to avoid weird file timestamp issues Rake::Task['locale:plugin:find'].invoke(plugin.to_s.sub('::Engine', '')) # will warn and exit 1 if any engine fails pot_file = Dir.glob("#{plugin.root.join('locale')}/*.pot")[0] @@ -208,7 +208,7 @@ namespace :locale do "https://raw.githubusercontent.com/ManageIQ/react-ui-components/#{checkout_branch}/locale/react-ui-components.pot" ] - tmp_dir = Rails.root.join('locale', 'tmp').to_s + tmp_dir = Rails.root.join("locale/tmp").to_s Dir.mkdir(tmp_dir, 0o700) extra_pots.each do |url| pot_file = "#{tmp_dir}/#{url.split('/')[-1]}" @@ -216,10 +216,10 @@ namespace :locale do pot_files << pot_file end - system('rmsgcat', '--sort-by-msgid', '-o', Rails.root.join('locale', 'manageiq-all.pot').to_s, Rails.root.join('locale', 'manageiq.pot').to_s, *pot_files) - system('mv', '-v', Rails.root.join('locale', 'manageiq-all.pot').to_s, Rails.root.join('locale', 'manageiq.pot').to_s) - system('rmsgmerge', '--sort-by-msgid', '--no-fuzzy-matching', '-o', Rails.root.join('locale', 'en', 'manageiq-all.po').to_s, Rails.root.join('locale', 'en', 'manageiq.po').to_s, Rails.root.join('locale', 'manageiq.pot').to_s) - system('mv', '-v', Rails.root.join('locale', 'en', 'manageiq-all.po').to_s, Rails.root.join('locale', 'en', 'manageiq.po').to_s) + system('rmsgcat', '--sort-by-msgid', '-o', Rails.root.join("locale/manageiq-all.pot").to_s, Rails.root.join("locale/manageiq.pot").to_s, *pot_files) + system('mv', '-v', Rails.root.join("locale/manageiq-all.pot").to_s, Rails.root.join("locale/manageiq.pot").to_s) + system('rmsgmerge', '--sort-by-msgid', '--no-fuzzy-matching', '-o', Rails.root.join("locale/en/manageiq-all.po").to_s, Rails.root.join("locale/en/manageiq.po").to_s, Rails.root.join("locale/manageiq.pot").to_s) + system('mv', '-v', Rails.root.join("locale/en/manageiq-all.po").to_s, Rails.root.join("locale/en/manageiq.po").to_s) system('rm', '-rf', tmp_dir) remove_line_numbers(Rails.root.join('locale/manageiq.pot')) @@ -230,9 +230,9 @@ namespace :locale do task "report_changes", [:verbose] do |_t, args| require 'poparser' - old_pot = PoParser.parse(File.read(Rails.root.join('locale', 'manageiq.pot'))).to_h.collect { |item| item[:msgid] }.sort + old_pot = PoParser.parse(File.read(Rails.root.join("locale/manageiq.pot"))).to_h.collect { |item| item[:msgid] }.sort Rake::Task['locale:update_all'].invoke - new_pot = PoParser.parse(File.read(Rails.root.join('locale', 'manageiq.pot'))).to_h.collect { |item| item[:msgid] }.sort + new_pot = PoParser.parse(File.read(Rails.root.join("locale/manageiq.pot"))).to_h.collect { |item| item[:msgid] }.sort diff = new_pot - old_pot puts "--------------------------------------------------" puts "Current string / word count: %{str} / %{word}" % {:str => old_pot.length, :word => old_pot.join(' ').split.size} @@ -291,56 +291,56 @@ namespace :locale do desc "Convert PO files from all plugins to JS files" task "po_to_json" => :environment do - begin - require_relative 'gettext_task_override.rb' - require_relative 'po_to_json_override.rb' - require Rails.root.join('lib/manageiq/environment') - require Rails.root.join("lib/vmdb/gettext/domains") - - po_files = {} - - Vmdb::Gettext::Domains.po_paths.each do |path| - files = ::Pathname.glob(::File.join(path, "**", "*.po")).sort - files.each do |file| - locale = file.dirname.basename.to_s - po_files[locale] ||= [] - po_files[locale].push(file) - end - end - combined_dir = File.join(Rails.root, "locale/combined") - Dir.mkdir(combined_dir, 0o700) - po_files.each do |locale, files| - files.each do |file| - unless system "msgfmt --check #{file}" - puts "Fatal error running 'msgfmt --check' on file: #{file}. Review the output above." - exit 1 - end - end + require_relative 'gettext_task_override' + require_relative 'po_to_json_override' + require Rails.root.join('lib/manageiq/environment') + require Rails.root.join("lib/vmdb/gettext/domains") - dir = File.join(combined_dir, locale) - po = File.join(dir, 'manageiq.po') - Dir.mkdir(dir, 0o700) - puts "Generating po from\n#{files.sort.map { |f| "- #{f}" }.join("\n")}" - system "rmsgcat --sort-by-msgid -o #{po} #{files.join(' ')}" - puts - end + po_files = {} - # create webpack file for including bootstrap-datepicker language packs - File.open(::ManageIQ::UI::Classic::Engine.root.join('app/javascript/packs/bootstrap-datepicker-languages.js'), "w+") do |f| - f.puts("// This file is automatically generated by rake task 'locale:po_to_json'") - po_files.keys.sort.each do |lang| - next if lang == 'en' + Vmdb::Gettext::Domains.po_paths.each do |path| + files = Pathname.glob(File.join(path, "**", "*.po")).sort + files.each do |file| + locale = file.dirname.basename.to_s + po_files[locale] ||= [] + po_files[locale].push(file) + end + end - f.puts("require('bootstrap-datepicker/dist/locales/bootstrap-datepicker." + lang.sub('_', '-') + ".min.js');") + combined_dir = Rails.root.join("locale/combined").to_s + Dir.mkdir(combined_dir, 0o700) + po_files.each do |locale, files| + files.each do |file| + unless system "msgfmt --check #{file}" + puts "Fatal error running 'msgfmt --check' on file: #{file}. Review the output above." + exit 1 end end - # This depends on PoToJson overrides as defined in lib/tasks/po_to_json_override.rb - Rake::Task['gettext:po_to_json'].invoke - ensure - system "rm -rf #{combined_dir}" + dir = File.join(combined_dir, locale) + po = File.join(dir, 'manageiq.po') + Dir.mkdir(dir, 0o700) + puts "Generating po from\n#{files.sort.map { |f| "- #{f}" }.join("\n")}" + system "rmsgcat --sort-by-msgid -o #{po} #{files.join(' ')}" + puts end + + # create webpack file for including bootstrap-datepicker language packs + File.open(ManageIQ::UI::Classic::Engine.root.join('app/javascript/packs/bootstrap-datepicker-languages.js'), "w+") do |f| + f.puts("// This file is automatically generated by rake task 'locale:po_to_json'") + po_files.keys.sort.each do |lang| + next if lang == 'en' + + f.puts("require('bootstrap-datepicker/dist/locales/bootstrap-datepicker." + lang.sub('_', '-') + ".min.js');") + end + end + + # This depends on PoToJson overrides as defined in lib/tasks/po_to_json_override.rb + Rake::Task['gettext:po_to_json'].invoke + ensure + system "rm -rf #{combined_dir}" + end desc "Create display names for models" diff --git a/lib/tasks/model_attribute_override.rb b/lib/tasks/model_attribute_override.rb index 50170327daf..fe7b97afa44 100644 --- a/lib/tasks/model_attribute_override.rb +++ b/lib/tasks/model_attribute_override.rb @@ -4,13 +4,13 @@ def model_attributes(model, ignored_tables, ignored_cols) return [] if model.abstract_class? && Rails::VERSION::MAJOR < 3 if model.abstract_class? - model.direct_descendants.reject {|m| ignored?(m.table_name, ignored_tables)}.inject([]) do |attrs, m| + model.direct_descendants.reject { |m| ignored?(m.table_name, ignored_tables) }.inject([]) do |attrs, m| attrs.push(model_attributes(m, ignored_tables, ignored_cols)).flatten.uniq end elsif !ignored?(model.table_name, ignored_tables) && @existing_tables.include?(model.table_name) - list = model.virtual_attribute_names + + model.virtual_attribute_names + model.columns.reject { |c| ignored?(c.name, ignored_cols) }.collect { |c| c.name } - list + else [] end diff --git a/lib/tasks/po_to_json_override.rb b/lib/tasks/po_to_json_override.rb index 0f11672a2d5..8198cf8e749 100644 --- a/lib/tasks/po_to_json_override.rb +++ b/lib/tasks/po_to_json_override.rb @@ -22,6 +22,7 @@ def parse_header values[""][0].split("\n").each do |line| next if line.empty? + build_header_for(line) end diff --git a/lib/tasks/release.rake b/lib/tasks/release.rake index 482f915c59c..7308adeb33f 100644 --- a/lib/tasks/release.rake +++ b/lib/tasks/release.rake @@ -2,15 +2,15 @@ desc "Release a new project version" task :release do require 'pathname' - version = ENV["RELEASE_VERSION"] + version = ENV.fetch("RELEASE_VERSION", nil) if version.nil? || version.empty? - STDERR.puts "ERROR: You must set the env var RELEASE_VERSION to the proper value." + warn "ERROR: You must set the env var RELEASE_VERSION to the proper value." exit 1 end branch = `git rev-parse --abbrev-ref HEAD`.chomp if branch == "master" - STDERR.puts "ERROR: You cannot cut a release from the master branch." + warn "ERROR: You cannot cut a release from the master branch." exit 1 end @@ -72,21 +72,21 @@ namespace :release do task :new_branch do require 'pathname' - branch = ENV["RELEASE_BRANCH"] + branch = ENV.fetch("RELEASE_BRANCH", nil) if branch.nil? || branch.empty? - STDERR.puts "ERROR: You must set the env var RELEASE_BRANCH to the proper value." + warn "ERROR: You must set the env var RELEASE_BRANCH to the proper value." exit 1 end - next_branch = ENV["RELEASE_BRANCH_NEXT"] + next_branch = ENV.fetch("RELEASE_BRANCH_NEXT", nil) if next_branch.nil? || next_branch.empty? - STDERR.puts "ERROR: You must set the env var RELEASE_BRANCH_NEXT to the proper value." + warn "ERROR: You must set the env var RELEASE_BRANCH_NEXT to the proper value." exit 1 end current_branch = `git rev-parse --abbrev-ref HEAD`.chomp if current_branch == "master" - STDERR.puts "ERROR: You cannot do new branch tasks from the master branch." + warn "ERROR: You cannot do new branch tasks from the master branch." exit 1 end @@ -133,21 +133,21 @@ namespace :release do task :new_branch_master do require 'pathname' - branch = ENV["RELEASE_BRANCH"] + branch = ENV.fetch("RELEASE_BRANCH", nil) if branch.nil? || branch.empty? - STDERR.puts "ERROR: You must set the env var RELEASE_BRANCH to the proper value." + warn "ERROR: You must set the env var RELEASE_BRANCH to the proper value." exit 1 end - next_branch = ENV["RELEASE_BRANCH_NEXT"] + next_branch = ENV.fetch("RELEASE_BRANCH_NEXT", nil) if next_branch.nil? || next_branch.empty? - STDERR.puts "ERROR: You must set the env var RELEASE_BRANCH_NEXT to the proper value." + warn "ERROR: You must set the env var RELEASE_BRANCH_NEXT to the proper value." exit 1 end current_branch = `git rev-parse --abbrev-ref HEAD`.chomp if current_branch != "master" - STDERR.puts "ERROR: You cannot do master branch tasks from a non-master branch (#{current_branch})." + warn "ERROR: You cannot do master branch tasks from a non-master branch (#{current_branch})." exit 1 end @@ -178,9 +178,9 @@ namespace :release do desc "Generate the Gemfile.lock.release file" task :generate_lockfile do - branch = ENV["RELEASE_BRANCH"] + branch = ENV.fetch("RELEASE_BRANCH", nil) if branch.nil? || branch.empty? - STDERR.puts "ERROR: You must set the env var RELEASE_BRANCH to the proper value." + warn "ERROR: You must set the env var RELEASE_BRANCH to the proper value." exit 1 end @@ -193,7 +193,7 @@ namespace :release do global_bundler_d = Pathname.new(Dir.home).join(".bundler.d") if (local_bundler_d.exist? && local_bundler_d.glob("*.rb").any?) || (global_bundler_d.exist? && global_bundler_d.glob("*.rb").any?) - STDERR.puts "ERROR: You cannot run generate_lockfile with bundler-inject files present." + warn "ERROR: You cannot run generate_lockfile with bundler-inject files present." exit 1 end diff --git a/lib/tasks/test.rake b/lib/tasks/test.rake index 505350cc095..6b61fefc4a8 100644 --- a/lib/tasks/test.rake +++ b/lib/tasks/test.rake @@ -1,4 +1,4 @@ -require_relative './evm_test_helper' +require_relative 'evm_test_helper' if defined?(RSpec) namespace :test do @@ -19,13 +19,13 @@ if defined?(RSpec) end EvmRakeHelper.with_dummy_database_url_configuration do - begin - puts "** Confirming rails environment does not connect to the database" - Rake::Task['environment'].invoke - rescue ActiveRecord::NoDatabaseError - STDERR.write "Detected Rails environment trying to connect to the database! Check the backtrace for an initializer trying to access the database.\n\n" - raise - end + + puts "** Confirming rails environment does not connect to the database" + Rake::Task['environment'].invoke + rescue ActiveRecord::NoDatabaseError + STDERR.write "Detected Rails environment trying to connect to the database! Check the backtrace for an initializer trying to access the database.\n\n" + raise + end end @@ -35,8 +35,8 @@ if defined?(RSpec) end task :setup_region do - ENV["REGION"] ||= (rand(99) + 1).to_s # Ensure we have a random, non-0, region - puts "** Preparing database with REGION #{ENV["REGION"]}" + ENV["REGION"] ||= rand(1..99).to_s # Ensure we have a random, non-0, region + puts "** Preparing database with REGION #{ENV.fetch("REGION", nil)}" end task :spec_deps => [:initialize, 'evm:compile_sti_loader'] diff --git a/lib/tasks/test_providers_common.rake b/lib/tasks/test_providers_common.rake index 1dccaab96f6..a53addde309 100644 --- a/lib/tasks/test_providers_common.rake +++ b/lib/tasks/test_providers_common.rake @@ -1,4 +1,4 @@ -require_relative './evm_test_helper' +require_relative 'evm_test_helper' if defined?(RSpec) namespace :test do diff --git a/lib/tasks/test_vmdb.rake b/lib/tasks/test_vmdb.rake index a3ab25c7907..58a9f2d53c7 100644 --- a/lib/tasks/test_vmdb.rake +++ b/lib/tasks/test_vmdb.rake @@ -1,4 +1,4 @@ -require_relative "./evm_test_helper" +require_relative "evm_test_helper" if defined?(RSpec) && defined?(RSpec::Core::RakeTask) diff --git a/lib/token_store/sql_store.rb b/lib/token_store/sql_store.rb index 112c256ee52..b6c6bb62728 100644 --- a/lib/token_store/sql_store.rb +++ b/lib/token_store/sql_store.rb @@ -18,6 +18,7 @@ def write(token, data, _options = nil) def read(token, _options = nil) record = Session.find_by(:session_id => session_key(token)) return nil unless record + data = record.raw_data if data[:expires_on] > Time.zone.now data @@ -30,6 +31,7 @@ def read(token, _options = nil) def delete(token) record = Session.find_by(:session_id => session_key(token)) return nil unless record + record.destroy! end diff --git a/lib/unique_within_region_validator.rb b/lib/unique_within_region_validator.rb index 39181b2d73f..196d3b776a1 100644 --- a/lib/unique_within_region_validator.rb +++ b/lib/unique_within_region_validator.rb @@ -13,7 +13,7 @@ # class UniqueWithinRegionValidator < ActiveModel::EachValidator def validate_each(record, attribute, value) - return if value.nil? || !record.send("#{attribute}_changed?") + return if value.nil? || !record.send(:"#{attribute}_changed?") match_case = options.key?(:match_case) ? options[:match_case] : true record_base_class = record.class.base_class.default_scoped diff --git a/lib/uniqueness_when_changed_validator.rb b/lib/uniqueness_when_changed_validator.rb index c01f0854c30..838d5d67b50 100644 --- a/lib/uniqueness_when_changed_validator.rb +++ b/lib/uniqueness_when_changed_validator.rb @@ -7,7 +7,7 @@ class UniquenessWhenChangedValidator < ActiveRecord::Validations::UniquenessVali # Examples: # validates :name, :uniqueness_when_changed => true def validate_each(record, attribute, value) - return if value.nil? || !record.send("#{attribute}_changed?") + return if value.nil? || !record.send(:"#{attribute}_changed?") super end diff --git a/lib/uuid_mixin.rb b/lib/uuid_mixin.rb index f9f3b2c3093..27a94c4cd51 100644 --- a/lib/uuid_mixin.rb +++ b/lib/uuid_mixin.rb @@ -12,6 +12,6 @@ def dup private def default_name_to_guid - self.name ||= self.guid if self.respond_to?(:guid) && self.respond_to?(:name) && self.respond_to?(:name=) + self.name ||= guid if respond_to?(:guid) && respond_to?(:name) && respond_to?(:name=) end end diff --git a/lib/vmdb/appliance.rb b/lib/vmdb/appliance.rb index d1042e2ce41..b9ee12f4131 100644 --- a/lib/vmdb/appliance.rb +++ b/lib/vmdb/appliance.rb @@ -3,7 +3,7 @@ module Vmdb module Appliance def self.VERSION - @EVM_VERSION ||= File.read(File.join(File.expand_path(Rails.root), "VERSION")).strip + @EVM_VERSION ||= File.read(Rails.root.join("VERSION").to_s).strip end def self.BUILD @@ -15,7 +15,7 @@ def self.CODENAME end def self.RELEASE - release_file = File.join(File.expand_path(Rails.root), "RELEASE") + release_file = Rails.root.join("RELEASE").to_s File.exist?(release_file) ? File.read(release_file).strip : self.CODENAME end @@ -83,11 +83,12 @@ def self.log_config(*args) def self.log_server_identity return unless MiqEnvironment::Command.is_appliance? + # this is the request to overwrite a small file in the vmdb/log directory for each time the evm server is restarted. # the file must not be named ".log" or it will be removed by logrotate, and it must contain the Server GUID (by which the appliance is known in the vmdb, # the build identifier of the appliance as it is being started, the appliance hostname and the name of the appliance as configured from our configuration screen. - last_startup_file = Rails.root.join("log", "last_startup.txt") + last_startup_file = Rails.root.join("log/last_startup.txt") last_startup_file.delete if last_startup_file.exist? begin @@ -105,7 +106,7 @@ def self.log_server_identity s.assigned_server_roles.includes(:server_role).each { |r| startup.info("Role: #{r.server_role.name}, Priority: #{r.priority}") } issue = `cat /etc/issue 2> /dev/null` rescue nil - startup.info("OS: #{issue.chomp}") unless issue.blank? + startup.info("OS: #{issue.chomp}") if issue.present? network = get_network unless network.empty? @@ -113,13 +114,13 @@ def self.log_server_identity network.each { |k, v| startup.info("#{k}: #{v}") } end mem = `cat /proc/meminfo 2> /dev/null` rescue nil - startup.info("System Memory Information:\n#{mem}") unless mem.blank? + startup.info("System Memory Information:\n#{mem}") if mem.present? cpu = `cat /proc/cpuinfo 2> /dev/null` rescue nil - startup.info("CPU Information:\n#{cpu}") unless cpu.blank? + startup.info("CPU Information:\n#{cpu}") if cpu.present? fstab = `cat /etc/fstab 2> /dev/null` rescue nil - startup.info("fstab information:\n#{fstab}") unless fstab.blank? + startup.info("fstab information:\n#{fstab}") if fstab.present? ensure startup.close rescue nil end @@ -131,21 +132,21 @@ def self.log_diagnostics init_diagnostics @diags.each do |diag| begin - if diag[:cmd].kind_of?(Proc) - res = diag[:cmd].call - else - res = AwesomeSpawn.run(diag[:cmd], :params => diag[:params]).output - end + res = if diag[:cmd].kind_of?(Proc) + diag[:cmd].call + else + AwesomeSpawn.run(diag[:cmd], :params => diag[:params]).output + end rescue => e $log.warn("Diagnostics: [#{diag[:msg]}] command [#{diag[:cmd]}] failed with error [#{e}]") - next # go to next diagnostic command if this one blew up + next # go to next diagnostic command if this one blew up end - $log.info("Diagnostics: [#{diag[:msg]}]\n#{res}") unless res.blank? + $log.info("Diagnostics: [#{diag[:msg]}]\n#{res}") if res.present? end end def self.get_build - build_file = File.join(File.expand_path(Rails.root), "BUILD") + build_file = Rails.root.join("BUILD").to_s if File.exist?(build_file) build = File.read(build_file).strip.split("-").last @@ -199,7 +200,7 @@ def self.init_diagnostics @diags ||= [ {:cmd => "top", :params => [:b, {:n => 1}], :msg => "Uptime, top processes, and memory usage"}, # batch mode - 1 iteration {:cmd => "pstree", :params => [:a, :p], :msg => "Process tree"}, - {:cmd => "df", :params => [:all, :local, :human_readable, :print_type], :msg => "File system disk usage"}, # All including dummy fs, local, human readable, file system type + {:cmd => "df", :params => [:all, :local, :human_readable, :print_type], :msg => "File system disk usage"}, # All including dummy fs, local, human readable, file system type {:cmd => "mount", :msg => "Mounted file systems"}, {:cmd => "ifconfig", :params => [:a], :msg => "Currently active interfaces"}, # -a display all interfaces which are currently available, even if down {:cmd => "route", :msg => "IP Routing table"}, @@ -207,8 +208,8 @@ def self.init_diagnostics {:cmd => "netstat", :params => [:statistics], :msg => "Network statistics"}, {:cmd => -> { File.read('/etc/hosts') if File.exist?('/etc/hosts') }, :msg => "Hosts file contents"}, {:cmd => -> { File.read('/etc/fstab') if File.exist?('/etc/fstab') }, :msg => "FStab file contents"}, - {:cmd => -> { installed_gems }, :msg => "Installed Ruby Gems" }, - {:cmd => -> { installed_rpms }, :msg => "Installed RPMs" }, + {:cmd => -> { installed_gems }, :msg => "Installed Ruby Gems"}, + {:cmd => -> { installed_rpms }, :msg => "Installed RPMs"}, ] end private_class_method :init_diagnostics diff --git a/lib/vmdb/deprecation.rb b/lib/vmdb/deprecation.rb index 81b3f7e9294..9d547939adb 100644 --- a/lib/vmdb/deprecation.rb +++ b/lib/vmdb/deprecation.rb @@ -4,8 +4,8 @@ def self.instance @instance ||= ActiveSupport::Deprecation.new("T-release", "ManageIQ").tap { |d| d.behavior = default_behavior } end - def self.method_missing(method_name, *args, &block) - instance.respond_to?(method_name) ? instance.send(method_name, *args, &block) : super + def self.method_missing(method_name, ...) + instance.respond_to?(method_name) ? instance.send(method_name, ...) : super end def self.respond_to_missing?(method, _include_private = false) @@ -28,6 +28,7 @@ def self.default_log def self.proc_for_default_log return unless default_log + proc do |message, callstack| default_log.warn(message) default_log.debug { callstack.join("\n ") } diff --git a/lib/vmdb/fast_gettext_helper.rb b/lib/vmdb/fast_gettext_helper.rb index 9c8ff5ce06f..397cbf98497 100644 --- a/lib/vmdb/fast_gettext_helper.rb +++ b/lib/vmdb/fast_gettext_helper.rb @@ -10,7 +10,7 @@ def self.register_human_localenames FastGettext.available_locales.each do |locale| FastGettext.human_available_locales << [human_locale_names[locale] || "locale_name", locale] end - FastGettext.human_available_locales.sort! { |a, b| a[0] <=> b[0] } + FastGettext.human_available_locales.sort_by! { |a| a[0] } end def self.fix_i18n_available_locales @@ -35,8 +35,8 @@ def self.supported_locales private_class_method def self.supported_locales_files Vmdb::Plugins.to_a.unshift(Rails) - .map { |source| source.root.join("config", "supported_locales.yml") } - .select(&:exist?) + .map { |source| source.root.join("config", "supported_locales.yml") } + .select(&:exist?) end def self.find_available_locales diff --git a/lib/vmdb/gettext/domains.rb b/lib/vmdb/gettext/domains.rb index bd40ec5d337..3035f0e416c 100644 --- a/lib/vmdb/gettext/domains.rb +++ b/lib/vmdb/gettext/domains.rb @@ -25,7 +25,7 @@ def self.add_domain(name, path, type = :po) case type.to_sym when :po po_paths << path - else :mo + else mo_paths << path end end diff --git a/lib/vmdb/global_methods.rb b/lib/vmdb/global_methods.rb index 2199f22d28e..c23366261f5 100644 --- a/lib/vmdb/global_methods.rb +++ b/lib/vmdb/global_methods.rb @@ -30,8 +30,8 @@ def copy_array(arrayin) # Had to add timezone methods here, they are being called from models # returns formatted time in specified timezone and format def format_timezone(time, timezone = Time.zone.name, ftype = "view") - timezone = timezone.name if timezone.kind_of?(ActiveSupport::TimeZone) # If a Timezone object comes in, just get the name - if !time.blank? + timezone = timezone.name if timezone.kind_of?(ActiveSupport::TimeZone) # If a Timezone object comes in, just get the name + if time.present? new_time = time.in_time_zone(timezone) case ftype when "gtl" # for gtl views @@ -52,7 +52,7 @@ def format_timezone(time, timezone = Time.zone.name, ftype = "view") else # for summary screens new_time = I18n.l(new_time) end - else # if time is nil + else # if time is nil new_time = "" end new_time diff --git a/lib/vmdb/initializer.rb b/lib/vmdb/initializer.rb index 1eae159f817..cb95da3deae 100644 --- a/lib/vmdb/initializer.rb +++ b/lib/vmdb/initializer.rb @@ -1,7 +1,7 @@ module Vmdb module Initializer def self.init - _log.info("Initializing Application: Program Name: #{$PROGRAM_NAME}, PID: #{Process.pid}, ENV['EVMSERVER']: #{ENV['EVMSERVER']}") + _log.info("Initializing Application: Program Name: #{$PROGRAM_NAME}, PID: #{Process.pid}, ENV['EVMSERVER']: #{ENV.fetch('EVMSERVER', nil)}") check_db_connectable if perform_db_connectable_check? # UiWorker called in Development Mode diff --git a/lib/vmdb/loggers.rb b/lib/vmdb/loggers.rb index 50b51806654..e58840fb466 100644 --- a/lib/vmdb/loggers.rb +++ b/lib/vmdb/loggers.rb @@ -14,6 +14,7 @@ def self.rails_logger module Loggers def self.init return if @initialized + create_loggers @initialized = true end diff --git a/lib/vmdb/loggers/fog_logger.rb b/lib/vmdb/loggers/fog_logger.rb index 2ed4fad9e34..f97bdc3d567 100644 --- a/lib/vmdb/loggers/fog_logger.rb +++ b/lib/vmdb/loggers/fog_logger.rb @@ -1,4 +1,4 @@ -require_relative "./instrument" +require_relative "instrument" module Vmdb::Loggers class FogLogger < ManageIQ::Loggers::Base diff --git a/lib/vmdb/loggers/instrument.rb b/lib/vmdb/loggers/instrument.rb index ebdf52999cd..3ee3791df65 100644 --- a/lib/vmdb/loggers/instrument.rb +++ b/lib/vmdb/loggers/instrument.rb @@ -10,7 +10,7 @@ def instrument(name, params = {}) when "excon.request" then [:debug, message_for_excon_request(params)] when "excon.response" then [:debug, message_for_excon_response(params)] when "excon.error" then [:debug, message_for_excon_error(params)] - else [:debug, message_for_other(params)] + else [:debug, message_for_other(params)] end send(method, "#{name.ljust(14)} #{message}") diff --git a/lib/vmdb/loggers/io_logger.rb b/lib/vmdb/loggers/io_logger.rb index 64bc7461723..e93ff6e471f 100644 --- a/lib/vmdb/loggers/io_logger.rb +++ b/lib/vmdb/loggers/io_logger.rb @@ -24,6 +24,7 @@ def <<(string) def dump_buffer @buffer.each_line do |l| next if l.empty? + line = [@prefix, l].join(" ").strip @logger.send(@level, line) end diff --git a/lib/vmdb/loggers/provider_sdk_logger.rb b/lib/vmdb/loggers/provider_sdk_logger.rb index e9d3f6e165f..b28495963fb 100644 --- a/lib/vmdb/loggers/provider_sdk_logger.rb +++ b/lib/vmdb/loggers/provider_sdk_logger.rb @@ -16,10 +16,10 @@ def <<(msg) class Formatter < ManageIQ::Loggers::Base::Formatter def call(severity, datetime, progname, msg) - msg = msg.sub(/Bearer(.*?)\"/, 'Bearer [FILTERED] "') - msg = msg.sub(/SharedKey(.*?)\"/, 'SharedKey [FILTERED] "') + msg = msg.sub(/Bearer(.*?)"/, 'Bearer [FILTERED] "') + msg = msg.sub(/SharedKey(.*?)"/, 'SharedKey [FILTERED] "') msg = msg.sub(/client_secret=(.*?)&/, "client_secret=[FILTERED]&") - msg = msg.sub(/apikey=(.*?)\"/, 'apikey=[FILTERED]"') + msg = msg.sub(/apikey=(.*?)"/, 'apikey=[FILTERED]"') super(severity, datetime, progname, msg) end end diff --git a/lib/vmdb/logging.rb b/lib/vmdb/logging.rb index ca143864b77..e236664295d 100644 --- a/lib/vmdb/logging.rb +++ b/lib/vmdb/logging.rb @@ -49,13 +49,13 @@ def _log module ClassLogging def instance_logger - @instance_logger ||= LogProxy.new(name, '#', Hash.new) + @instance_logger ||= LogProxy.new(name, '#', {}) end def _log - @_log ||= LogProxy.new(name, '.', Hash.new) + @_log ||= LogProxy.new(name, '.', {}) end end - ::Module.send(:include, ClassLogging) + ::Module.include ClassLogging end diff --git a/lib/vmdb/permission_stores.rb b/lib/vmdb/permission_stores.rb index 5dc8e5b9a95..7af11e2959a 100644 --- a/lib/vmdb/permission_stores.rb +++ b/lib/vmdb/permission_stores.rb @@ -12,8 +12,8 @@ def self.unsupported private_class_method def self.permission_files Vmdb::Plugins.to_a.unshift(Rails) - .map { |source| source.root.join("config", "permissions.yml") } - .select(&:exist?) + .map { |source| source.root.join("config", "permissions.yml") } + .select(&:exist?) end attr_reader :unsupported diff --git a/lib/vmdb/plugins.rb b/lib/vmdb/plugins.rb index 4edcc4d07a1..0cb79c75822 100644 --- a/lib/vmdb/plugins.rb +++ b/lib/vmdb/plugins.rb @@ -7,8 +7,8 @@ class Plugins include Enumerable - def self.method_missing(m, *args, &block) - instance.respond_to?(m) ? instance.send(m, *args, &block) : super + def self.method_missing(m, ...) + instance.respond_to?(m) ? instance.send(m, ...) : super end def self.respond_to_missing?(*args) @@ -30,8 +30,8 @@ def init end def details - each_with_object({}) do |engine, hash| - hash[engine] = { + index_with do |engine| + { :name => engine.name, :version => version(engine), :path => engine.root.to_s @@ -70,14 +70,13 @@ def ansible_content # Ansible content (playbooks and roles) for internal use by provider plugins, # not exposed to Automate, and to be run by ansible_runner def ansible_runner_content - @ansible_runner_content ||= begin - map do |engine| - content_dir = engine.root.join("content", "ansible_runner") + @ansible_runner_content ||= map do |engine| + content_dir = engine.root.join("content", "ansible_runner") next unless File.exist?(content_dir.join("roles/requirements.yml")) [engine, content_dir] - end.compact - end + end.compact + end def automate_domains @@ -90,7 +89,7 @@ def automate_domains end def miq_widgets_content - @miq_widgets_content ||= Dir.glob(Rails.root.join("product/dashboard/widgets", "*")) + flat_map { |engine| content_directories(engine, "dashboard/widgets") } + @miq_widgets_content ||= Dir.glob(Rails.root.join("product/dashboard/widgets/*")) + flat_map { |engine| content_directories(engine, "dashboard/widgets") } end def provider_plugins @@ -105,9 +104,8 @@ def asset_paths end def systemd_units - @systemd_units ||= begin - flat_map { |engine| engine.root.join("systemd").glob("*.*") } - end + @systemd_units ||= flat_map { |engine| engine.root.join("systemd").glob("*.*") } + end def load_inflections diff --git a/lib/vmdb/plugins/ansible_content.rb b/lib/vmdb/plugins/ansible_content.rb index 520b7b2e231..bc5b93827ac 100644 --- a/lib/vmdb/plugins/ansible_content.rb +++ b/lib/vmdb/plugins/ansible_content.rb @@ -5,6 +5,7 @@ class AnsibleContent def initialize(path) raise "#{path} does not exist" unless File.directory?(path) + roles_path = Pathname.new(path) @path = roles_path.split.first end diff --git a/lib/vmdb/plugins/asset_path.rb b/lib/vmdb/plugins/asset_path.rb index 6574c8069ea..93bcc416969 100644 --- a/lib/vmdb/plugins/asset_path.rb +++ b/lib/vmdb/plugins/asset_path.rb @@ -4,10 +4,7 @@ module Vmdb class Plugins class AssetPath - attr_reader :name - attr_reader :path - attr_reader :namespace - attr_reader :node_modules + attr_reader :name, :path, :namespace, :node_modules def self.asset_path(engine) engine.root.join('app', 'javascript') @@ -39,7 +36,7 @@ def development_gem? # also used in update:ui task to determine where to copy config files def self.node_root - Rails.root.join('vendor', 'node_root') + Rails.root.join("vendor/node_root") end end end diff --git a/lib/vmdb/plugins/automate_domain.rb b/lib/vmdb/plugins/automate_domain.rb index dfe7809730d..e7a1c5066b8 100644 --- a/lib/vmdb/plugins/automate_domain.rb +++ b/lib/vmdb/plugins/automate_domain.rb @@ -1,12 +1,11 @@ module Vmdb class Plugins class AutomateDomain - attr_reader :datastores_path - attr_reader :name - attr_reader :path + attr_reader :datastores_path, :name, :path def initialize(path) raise "#{path} is not a directory" unless File.directory?(path) + @path = Pathname.new(path) @datastores_path = @path.split.first @name = config.fetch_path("object", "attributes", "name") diff --git a/lib/vmdb/settings.rb b/lib/vmdb/settings.rb index 1a3b1bc1099..6020c15e6df 100644 --- a/lib/vmdb/settings.rb +++ b/lib/vmdb/settings.rb @@ -84,6 +84,7 @@ def self.save_yaml!(resource, contents) def self.destroy!(resource, keys) return if keys.blank? + settings_path = File.join("/", keys.collect(&:to_s)) resource.settings_changes.where("key LIKE ?", "#{settings_path}%").destroy_all end diff --git a/lib/vmdb/settings/activator.rb b/lib/vmdb/settings/activator.rb index 9e4effa36a9..6fb99891246 100644 --- a/lib/vmdb/settings/activator.rb +++ b/lib/vmdb/settings/activator.rb @@ -41,7 +41,7 @@ def session(data) def server(data) MiqServer.my_server&.config_activated(data) - rescue StandardError + rescue nil end end diff --git a/lib/vmdb/settings/database_source.rb b/lib/vmdb/settings/database_source.rb index 65e150820e1..90c7a76781a 100644 --- a/lib/vmdb/settings/database_source.rb +++ b/lib/vmdb/settings/database_source.rb @@ -24,6 +24,7 @@ def initialize(resource, class_name) def self.sources_for(resource) return [] if resource.nil? + resource_class = resource == :my_server ? "MiqServer" : resource.class.name hierarchy_index = SETTINGS_HIERARCHY.index(resource_class) SETTINGS_HIERARCHY[0..hierarchy_index].collect do |class_name| @@ -37,6 +38,7 @@ def self.parent_sources_for(resource) def resource return my_server if @resource_instance == :my_server + @resource_instance.reload if @resource_instance.persisted? @resource_instance end @@ -55,12 +57,13 @@ def load private - SETTINGS_HIERARCHY = %w(MiqRegion Zone MiqServer).freeze + SETTINGS_HIERARCHY = %w[MiqRegion Zone MiqServer].freeze def settings_holder resource = self.resource return nil if resource.nil? return resource if resource.class.name == settings_holder_class_name + resource.public_send(settings_holder_class_name.underscore) end diff --git a/lib/vmdb/settings/validator.rb b/lib/vmdb/settings/validator.rb index 92be5a21ed5..5a9c718c4d3 100644 --- a/lib/vmdb/settings/validator.rb +++ b/lib/vmdb/settings/validator.rb @@ -36,12 +36,12 @@ def webservices(data) keys = data.each_pair.to_a.transpose.first.to_set - if keys.include?(:mode) && !%w(invoke disable).include?(data.mode) + if keys.include?(:mode) && !%w[invoke disable].include?(data.mode) valid = false errors << [:mode, "webservices mode, \"#{data.mode}\", invalid. Should be one of: invoke or disable"] end - if keys.include?(:contactwith) && !%w(ipaddress hostname).include?(data.contactwith) + if keys.include?(:contactwith) && !%w[ipaddress hostname].include?(data.contactwith) valid = false errors << [:contactwith, "webservices contactwith, \"#{data.contactwith}\", invalid. Should be one of: ipaddress or hostname"] end @@ -118,23 +118,19 @@ def server(data) keys = data.each_pair.to_a.transpose.first.to_set - if keys.include?(:listening_port) - unless is_numeric?(data.listening_port) || data.listening_port.blank? - valid = false + if keys.include?(:listening_port) && !(is_numeric?(data.listening_port) || data.listening_port.blank?) + valid = false errors << [:listening_port, "listening_port, \"#{data.listening_port}\", invalid. Should be numeric"] - end end - if keys.include?(:session_store) && !%w(sql memory cache).include?(data.session_store) + if keys.include?(:session_store) && !%w[sql memory cache].include?(data.session_store) valid = false errors << [:session_store, "session_store, \"#{data.session_store}\", invalid. Should be one of \"sql\", \"memory\", \"cache\""] end - if keys.include?(:zone) - unless Zone.in_my_region.find_by(:name => data.zone) - valid = false + if keys.include?(:zone) && !Zone.in_my_region.find_by(:name => data.zone) + valid = false errors << [:zone, "zone, \"#{data.zone}\", invalid. Should be a valid Zone"] - end end if keys.include?(:rate_limiting) @@ -164,7 +160,7 @@ def smtp(data) keys = data.each_pair.to_a.transpose.first.to_set - if keys.include?(:authentication) && !%w(login plain none).include?(data.authentication) + if keys.include?(:authentication) && !%w[login plain none].include?(data.authentication) valid = false errors << [:mode, "authentication, \"#{data.mode}\", invalid. Should be one of: login, plain, or none"] end @@ -179,7 +175,7 @@ def smtp(data) errors << [:port, "\"#{data.port}\", invalid. Should be numeric"] end - if keys.include?(:from) && data.from !~ /^\A([\w\.\-\+]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\z$/i + if keys.include?(:from) && data.from !~ /^\A([\w.\-+]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\z$/i valid = false errors << [:from, "\"#{data.from}\", invalid. Should be a valid email address"] end diff --git a/lib/vmdb/settings_walker.rb b/lib/vmdb/settings_walker.rb index 6f452d60b54..d4386338fb8 100644 --- a/lib/vmdb/settings_walker.rb +++ b/lib/vmdb/settings_walker.rb @@ -51,7 +51,7 @@ def walk(settings, path = [], &block) # @param settings (see .walk) def walk_passwords(settings) walk(settings) do |key, value, _path, owner| - yield(key, value, owner) if PASSWORD_FIELDS.any? { |p| key.to_s.include?(p.to_s) } && !(value.is_a?(settings.class) || value.is_a?(Array)) + yield(key, value, owner) if PASSWORD_FIELDS.any? { |p| key.to_s.include?(p.to_s) } && !(value.kind_of?(settings.class) || value.kind_of?(Array)) end end diff --git a/lib/vmdb/util.rb b/lib/vmdb/util.rb index 7d0328c041f..03cfc9c8bde 100644 --- a/lib/vmdb/util.rb +++ b/lib/vmdb/util.rb @@ -27,13 +27,12 @@ def self.http_proxy_uri(proxy_config = :default) def self.compressed_log_patterns # From a log file create an array of strings containing the date patterns - log_dir = File.join(Rails.root, "log") + log_dir = Rails.root.join("log").to_s gz_pattern = File.join(log_dir, "*[0-9][0-9].gz") - Dir.glob(gz_pattern).inject([]) do |arr, f| - f.match(/.+-(\d+\.gz)/) + Dir.glob(gz_pattern).each_with_object([]) do |f, arr| + f =~ /.+-(\d+\.gz)/ name = File.join(log_dir, "*#{$1}") arr << name unless $1.nil? || arr.include?(name) - arr end end @@ -43,11 +42,12 @@ def self.get_evm_log_for_date(pattern) files.find { |f| f.match(/\/evm\.log/) } end - LOG_TIMESTAMP_REGEX = /\[(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6})\s#/.freeze + LOG_TIMESTAMP_REGEX = /\[(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6})\s#/ def self.log_timestamp(str) return nil unless str - t = Time.parse(str) + + t = Time.parse(str) Time.utc(t.year, t.month, t.day, t.hour, t.min, t.sec, 0) end @@ -83,6 +83,7 @@ def self.log_duration_gz(filename) gz.each_line do |line| line_count += 1 next unless line =~ LOG_TIMESTAMP_REGEX + start_time_str ||= $1 end_time_str = $1 end @@ -98,7 +99,7 @@ def self.log_duration_gz(filename) end rescue Exception => e _log.error(e.to_s) - return [] + [] end end @@ -118,11 +119,11 @@ def self.zip_logs(zip_filename, dirs, userid = "system") dirs.each do |dir| dir = Rails.root.join(dir) unless Pathname.new(dir).absolute? Dir.glob(dir).each do |file| - begin - entry, _mtime = add_zip_entry(zip, file, zfile) - rescue => e - _log.error("Failed to add file: [#{entry}]. Error information: #{e.message}") - end + + entry, _mtime = add_zip_entry(zip, file, zfile) + rescue => e + _log.error("Failed to add file: [#{entry}]. Error information: #{e.message}") + end end zip.close @@ -154,8 +155,8 @@ def self.add_zip_entry(zip, file_path, zfile) def self.zip_entry_from_path(path) rails_root_directories = Rails.root.to_s.split("/") within_rails_root = path.split("/")[0, rails_root_directories.length] == rails_root_directories - entry = within_rails_root ? Pathname.new(path).relative_path_from(Rails.root).to_s : "ROOT#{path}" - entry + within_rails_root ? Pathname.new(path).relative_path_from(Rails.root).to_s : "ROOT#{path}" + end private_class_method :zip_entry_from_path diff --git a/lib/workers/bin/run_single_worker.rb b/lib/workers/bin/run_single_worker.rb index 8abf72067dc..d13b2233060 100755 --- a/lib/workers/bin/run_single_worker.rb +++ b/lib/workers/bin/run_single_worker.rb @@ -74,7 +74,7 @@ def all_role_names opt_parser.abort(opt_parser.help) unless worker_class unless MiqWorkerType.find_by(:worker_type => worker_class) - STDERR.puts "ERR: `#{worker_class}` WORKER CLASS NOT FOUND! Please run with `-l` to see possible worker class names." + warn "ERR: `#{worker_class}` WORKER CLASS NOT FOUND! Please run with `-l` to see possible worker class names." exit 1 end @@ -90,7 +90,7 @@ def all_role_names worker_class = worker_class.constantize unless worker_class.has_required_role? - STDERR.puts "ERR: Server roles are not sufficient for `#{worker_class}` worker." + warn "ERR: Server roles are not sufficient for `#{worker_class}` worker." exit 1 end @@ -130,7 +130,7 @@ def all_role_names raise rescue Exception => err MiqWorker::Runner.safe_log(worker, "An unhandled error has occurred: #{err}\n#{err.backtrace.join("\n")}", :error) - STDERR.puts("ERROR: An unhandled error has occurred: #{err}. See log for details.") rescue nil + warn("ERROR: An unhandled error has occurred: #{err}. See log for details.") rescue nil exit 1 ensure FileUtils.rm_f(worker.heartbeat_file) diff --git a/lib/workers/evm_server.rb b/lib/workers/evm_server.rb index ad25a0bf8fc..2125317e963 100644 --- a/lib/workers/evm_server.rb +++ b/lib/workers/evm_server.rb @@ -80,7 +80,7 @@ def refresh_servers_to_monitor end end - def self.start(*args) + def self.start(*_args) new.start end @@ -165,16 +165,16 @@ def save_local_network_info ipaddr, hostname, mac_address = MiqServer.get_network_information - if ipaddr =~ Regexp.union(Resolv::IPv4::Regex, Resolv::IPv6::Regex).freeze + if ipaddr&.match?(Regexp.union(Resolv::IPv4::Regex, Resolv::IPv6::Regex).freeze) server_hash[:ipaddress] = config_hash[:host] = ipaddr end if hostname.present? && hostname.hostname? - hostname = nil if hostname =~ /.*localhost.*/ + hostname = nil if /.*localhost.*/.match?(hostname) server_hash[:hostname] = config_hash[:hostname] = hostname end - unless mac_address.blank? + if mac_address.present? server_hash[:mac_address] = mac_address end @@ -210,9 +210,9 @@ def reset_server_runtime_info end def log_server_info - _log.info("Server IP Address: #{@current_server.ipaddress}") unless @current_server.ipaddress.blank? - _log.info("Server Hostname: #{@current_server.hostname}") unless @current_server.hostname.blank? - _log.info("Server MAC Address: #{@current_server.mac_address}") unless @current_server.mac_address.blank? + _log.info("Server IP Address: #{@current_server.ipaddress}") if @current_server.ipaddress.present? + _log.info("Server Hostname: #{@current_server.hostname}") if @current_server.hostname.present? + _log.info("Server MAC Address: #{@current_server.mac_address}") if @current_server.mac_address.present? _log.info("Server GUID: #{MiqServer.my_guid}") _log.info("Server Zone: #{MiqServer.my_zone}") _log.info("Server Role: #{MiqServer.my_role}") diff --git a/product/script/export_policy_profiles.rb b/product/script/export_policy_profiles.rb index a7616d17c23..ba655d8416d 100644 --- a/product/script/export_policy_profiles.rb +++ b/product/script/export_policy_profiles.rb @@ -13,16 +13,16 @@ dir = Dir.pwd MiqPolicySet.all.each do |ps| - begin - contents = ps.export_to_yaml if ext == "yaml" - contents = ps.export_to_xml if ext == "xml" - fname = File.join(dir, "policy_profile#{ps.id}.#{ext}") - puts "Creating #{fname}" - f = File.new(fname, "w") - f << contents - f.close - rescue ActiveRecord::RecordNotFound - next - end + contents = ps.export_to_yaml if ext == "yaml" + contents = ps.export_to_xml if ext == "xml" + + fname = File.join(dir, "policy_profile#{ps.id}.#{ext}") + puts "Creating #{fname}" + f = File.new(fname, "w") + f << contents + f.close +rescue ActiveRecord::RecordNotFound + next + end diff --git a/product/script/reports_to_multi_yamls.rb b/product/script/reports_to_multi_yamls.rb index abaf73ad7b3..0d2afdc62ae 100644 --- a/product/script/reports_to_multi_yamls.rb +++ b/product/script/reports_to_multi_yamls.rb @@ -17,7 +17,7 @@ reports.each do |r| ctr += 1 name = File.join(Dir.pwd, "#{ctr}_#{r["name"]}.yaml") - File.open(name, "w") { |f| f.write(YAML.dump(r)) } + File.write(name, YAML.dump(r)) puts "Created file '#{name}'" end diff --git a/spec/database/database_schema_spec.rb b/spec/database/database_schema_spec.rb index 30570bb6c90..112a338ff39 100644 --- a/spec/database/database_schema_spec.rb +++ b/spec/database/database_schema_spec.rb @@ -43,6 +43,7 @@ ActiveRecord::Base.connection.select_all(query).each do |col| column_whitelist = whitelist[col["table_name"]] next if column_whitelist && column_whitelist.include?(col["column_name"]) + message << "Column #{col["column_name"]} in table #{col["table_name"]} is either named improperly (_id is reserved for actual id columns) or needs to be of type bigint\n" end raise message unless message.empty? diff --git a/spec/factories/availability_zone.rb b/spec/factories/availability_zone.rb index 987bf61db18..1f45440c559 100644 --- a/spec/factories/availability_zone.rb +++ b/spec/factories/availability_zone.rb @@ -1,6 +1,6 @@ FactoryBot.define do factory :availability_zone do - sequence(:name) { |n| "availability_zone_#{seq_padded_for_sorting(n)}" } + sequence(:name) { |n| "availability_zone_#{seq_padded_for_sorting(n)}" } end factory :availability_zone_amazon, :parent => :availability_zone, :class => "ManageIQ::Providers::Amazon::CloudManager::AvailabilityZone" diff --git a/spec/factories/chargeback_rate.rb b/spec/factories/chargeback_rate.rb index 1e93129b08d..a8744e18443 100644 --- a/spec/factories/chargeback_rate.rb +++ b/spec/factories/chargeback_rate.rb @@ -14,7 +14,8 @@ evaluator.detail_params.each do |factory_name, chargeback_rate_params| next unless chargeback_rate_params next unless chargeback_rate_params[:tiers].kind_of?(Array) - params_hash = { :tiers_params => chargeback_rate_params[:tiers], :per_time => evaluator.per_time } + + params_hash = {:tiers_params => chargeback_rate_params[:tiers], :per_time => evaluator.per_time} params_hash.merge!(chargeback_rate_params[:detail]) if chargeback_rate_params[:detail] params = [factory_name, :tiers, params_hash] @@ -31,7 +32,7 @@ trait :with_compute_details do after(:create) do |chargeback_rate, evaluator| - %i( + %i[ chargeback_rate_detail_cpu_used chargeback_rate_detail_cpu_allocated chargeback_rate_detail_cpu_cores_used @@ -41,10 +42,10 @@ chargeback_rate_detail_memory_allocated chargeback_rate_detail_memory_used chargeback_rate_detail_net_io_used - ).each do |factory_name| + ].each do |factory_name| chargeback_rate.chargeback_rate_details << FactoryBot.create(factory_name, - :tiers_with_three_intervals, - :per_time => evaluator.per_time) + :tiers_with_three_intervals, + :per_time => evaluator.per_time) end end end @@ -53,15 +54,15 @@ rate_type { 'Storage' } after(:create) do |chargeback_rate, evaluator| - %i( + %i[ chargeback_rate_detail_storage_used chargeback_rate_detail_storage_allocated chargeback_rate_detail_fixed_storage_cost chargeback_rate_detail_fixed_storage_cost - ).each do |factory_name| + ].each do |factory_name| chargeback_rate.chargeback_rate_details << FactoryBot.create(factory_name, - :tiers_with_three_intervals, - :per_time => evaluator.per_time) + :tiers_with_three_intervals, + :per_time => evaluator.per_time) end end end diff --git a/spec/factories/chargeback_rate_detail.rb b/spec/factories/chargeback_rate_detail.rb index 3e93b2bbc41..2191a385b6b 100644 --- a/spec/factories/chargeback_rate_detail.rb +++ b/spec/factories/chargeback_rate_detail.rb @@ -11,7 +11,7 @@ after(:create) do |chargeback_rate_detail, evaluator| if evaluator.tiers_params evaluator.tiers_params.each do |tier| - chargeback_rate_detail.chargeback_tiers << FactoryBot.create(*[:chargeback_tier, tier]) + chargeback_rate_detail.chargeback_tiers << FactoryBot.create(:chargeback_tier, tier) end else chargeback_rate_detail.chargeback_tiers << FactoryBot.create(:chargeback_tier) @@ -51,23 +51,23 @@ end factory :chargeback_rate_detail_cpu_used, :parent => :chargeback_rate_detail do - per_unit { "megahertz" } + per_unit { "megahertz" } chargeable_field { FactoryBot.build(:chargeable_field_cpu_used) } end factory :chargeback_rate_detail_cpu_cores_used, :parent => :chargeback_rate_detail do - per_unit { "cores" } + per_unit { "cores" } chargeable_field { FactoryBot.build(:chargeable_field_cpu_cores_used) } end factory :chargeback_rate_detail_cpu_cores_allocated, :parent => :chargeback_rate_detail do - per_unit { "cores" } + per_unit { "cores" } chargeable_field { FactoryBot.build(:chargeable_field_cpu_cores_allocated) } end factory :chargeback_rate_detail_cpu_allocated, :traits => [:daily], :parent => :chargeback_rate_detail do - per_unit { "cpu" } + per_unit { "cpu" } chargeable_field { FactoryBot.build(:chargeable_field_cpu_allocated) } end diff --git a/spec/factories/chargeback_rate_detail_measure.rb b/spec/factories/chargeback_rate_detail_measure.rb index 0f451fdc1d6..245c49cd5d5 100644 --- a/spec/factories/chargeback_rate_detail_measure.rb +++ b/spec/factories/chargeback_rate_detail_measure.rb @@ -2,8 +2,8 @@ factory :chargeback_rate_detail_measure do step { "1024" } name { "Bytes Units" } - units_display { %w(B KB MB GB TB) } - units { %w(bytes kilobytes megabytes gigabytes terabytes) } + units_display { %w[B KB MB GB TB] } + units { %w[bytes kilobytes megabytes gigabytes terabytes] } end factory :chargeback_measure_bytes, :parent => :chargeback_rate_detail_measure @@ -11,14 +11,14 @@ factory :chargeback_measure_hz, :parent => :chargeback_rate_detail_measure do name { 'Hz Units' } step { '1000' } - units_display { %w(Hz KHz MHz GHz THz) } - units { %w(hertz kilohertz megahertz gigahertz teraherts) } + units_display { %w[Hz KHz MHz GHz THz] } + units { %w[hertz kilohertz megahertz gigahertz teraherts] } end factory :chargeback_measure_bps, :parent => :chargeback_rate_detail_measure do name { 'Bytes per Second Units' } step { '1000' } - units_display { %w(Bps KBps MBps GBps) } - units { %w(bps kbps mbps gbps) } + units_display { %w[Bps KBps MBps GBps] } + units { %w[bps kbps mbps gbps] } end end diff --git a/spec/factories/cloud_tenants.rb b/spec/factories/cloud_tenants.rb index 10e2a28a8dd..3cdcb2795fd 100644 --- a/spec/factories/cloud_tenants.rb +++ b/spec/factories/cloud_tenants.rb @@ -6,9 +6,9 @@ end factory :cloud_tenant_openstack, - :class => "ManageIQ::Providers::Openstack::CloudManager::CloudTenant", + :class => "ManageIQ::Providers::Openstack::CloudManager::CloudTenant", :parent => :cloud_tenant factory :cloud_tenant_nsxt, - :class => "ManageIQ::Providers::Nsxt::CloudManager::CloudTenant", + :class => "ManageIQ::Providers::Nsxt::CloudManager::CloudTenant", :parent => :cloud_tenant end diff --git a/spec/factories/compliance_detail.rb b/spec/factories/compliance_detail.rb index 970d40e6e86..cc42da430e3 100644 --- a/spec/factories/compliance_detail.rb +++ b/spec/factories/compliance_detail.rb @@ -1,7 +1,7 @@ FactoryBot.define do factory :compliance_detail do sequence(:id) { |n| 10_000_000 + n } - created_on { DateTime .current } + created_on { DateTime.current } updated_on { DateTime.current } miq_policy_desc { 'Policy description' } miq_policy_result { true } diff --git a/spec/factories/configuration_script.rb b/spec/factories/configuration_script.rb index ba874879175..c0204bca334 100644 --- a/spec/factories/configuration_script.rb +++ b/spec/factories/configuration_script.rb @@ -2,7 +2,7 @@ factory :configuration_script_base do sequence(:name) { |n| "Configuration_script_base_#{seq_padded_for_sorting(n)}" } sequence(:manager_ref) { SecureRandom.random_number(100) } - variables { { :instance_ids => ['i-3434'] } } + variables { {:instance_ids => ['i-3434']} } end factory :configuration_script_payload, :class => "ConfigurationScriptPayload", :parent => :configuration_script_base diff --git a/spec/factories/customization_spec_factory.rb b/spec/factories/customization_spec_factory.rb index eda1afc9f27..3790bcb1706 100644 --- a/spec/factories/customization_spec_factory.rb +++ b/spec/factories/customization_spec_factory.rb @@ -3,6 +3,6 @@ sequence(:name) { |n| "customization_spec_#{seq_padded_for_sorting(n)}" } typ { "Windows" } sequence(:description) { |n| "Customization spec #{seq_padded_for_sorting(n)}" } - spec { { :options => {} } } + spec { {:options => {}} } end end diff --git a/spec/factories/dialog.rb b/spec/factories/dialog.rb index 96e993f853b..ffe67141f40 100644 --- a/spec/factories/dialog.rb +++ b/spec/factories/dialog.rb @@ -9,7 +9,7 @@ # skip validate_children callback for general dialog testing to_create do |instance| class << instance - def validate_children; true; end + def validate_children = true end instance.save! end diff --git a/spec/factories/dialog_group.rb b/spec/factories/dialog_group.rb index 2f13fc56515..1e9f476355e 100644 --- a/spec/factories/dialog_group.rb +++ b/spec/factories/dialog_group.rb @@ -9,7 +9,7 @@ # skip validate_children callback for general dialog testing to_create do |instance| class << instance - def validate_children; true; end + def validate_children = true end instance.save! end diff --git a/spec/factories/dialog_tab.rb b/spec/factories/dialog_tab.rb index 1c8a4839f71..fc46349a355 100644 --- a/spec/factories/dialog_tab.rb +++ b/spec/factories/dialog_tab.rb @@ -9,7 +9,7 @@ # skip validate_children callback for general dialog testing to_create do |instance| class << instance - def validate_children; true; end + def validate_children = true end instance.save! end diff --git a/spec/factories/entitlement.rb b/spec/factories/entitlement.rb index d457d51e375..39e53dcf50b 100644 --- a/spec/factories/entitlement.rb +++ b/spec/factories/entitlement.rb @@ -8,8 +8,8 @@ after :build do |entitlement, e| if e.role || e.features entitlement.miq_user_role = FactoryBot.create(:miq_user_role, - :features => e.features, - :role => e.role) + :features => e.features, + :role => e.role) end end end diff --git a/spec/factories/ext_management_system.rb b/spec/factories/ext_management_system.rb index abe8eccd194..3ddde34cffa 100644 --- a/spec/factories/ext_management_system.rb +++ b/spec/factories/ext_management_system.rb @@ -241,7 +241,7 @@ factory :ems_openstack_infra_with_authentication, :parent => :ems_openstack_infra do - authtype { %w(default amqp) } + authtype { %w[default amqp] } end factory :ems_vmware_cloud, @@ -305,7 +305,7 @@ factory :ems_openstack_with_authentication, :parent => :ems_openstack do - authtype { %w(default amqp) } + authtype { %w[default amqp] } end factory :ems_openstack_network, @@ -318,7 +318,6 @@ :class => "ManageIQ::Providers::Nuage::NetworkManager", :parent => :ems_network - factory :ems_nsxt_network, :aliases => ["manageiq/providers/nsxt/network_manager"], :class => "ManageIQ::Providers::Nsxt::NetworkManager", @@ -382,7 +381,7 @@ trait(:configuration_workflow) do after(:create) do |x| - type = (x.type.split("::")[0..2] + %w(AutomationManager ConfigurationWorkflow)).join("::") + type = (x.type.split("::")[0..2] + %w[AutomationManager ConfigurationWorkflow]).join("::") x.configuration_scripts << FactoryBot.create(:configuration_workflow, :type => type) end end @@ -397,9 +396,9 @@ :aliases => ["manageiq/providers/embedded_ansible/automation_manager"], :class => "ManageIQ::Providers::EmbeddedAnsible::AutomationManager", :parent => :embedded_automation_manager do - provider { + provider do raise "DO NOT USE! Use :provider_embedded_ansible and reference the automation_manager from that record" - } + end end # Leaf classes for provisioning_manager diff --git a/spec/factories/filesystem.rb b/spec/factories/filesystem.rb index 8ab29a14671..09073d50bcf 100644 --- a/spec/factories/filesystem.rb +++ b/spec/factories/filesystem.rb @@ -1,6 +1,6 @@ FactoryBot.define do factory :filesystem do - sequence(:name) { |n| "filesystem_#{seq_padded_for_sorting(n)}" } + sequence(:name) { |n| "filesystem_#{seq_padded_for_sorting(n)}" } size { 200 } end diff --git a/spec/factories/generic_object_definition.rb b/spec/factories/generic_object_definition.rb index e69f2b415cd..51c82499c1b 100644 --- a/spec/factories/generic_object_definition.rb +++ b/spec/factories/generic_object_definition.rb @@ -5,9 +5,9 @@ trait :with_methods_attributes_associations do properties do { - :methods => %w(add_vms remove_vms), - :attributes => { 'powered_on' => 'boolean', 'widget' => 'string' }, - :associations => { 'vms' => 'Vm', 'services' => 'Service' } + :methods => %w[add_vms remove_vms], + :attributes => {'powered_on' => 'boolean', 'widget' => 'string'}, + :associations => {'vms' => 'Vm', 'services' => 'Service'} } end end diff --git a/spec/factories/host.rb b/spec/factories/host.rb index dca3ecafd82..10f760bf57c 100644 --- a/spec/factories/host.rb +++ b/spec/factories/host.rb @@ -40,7 +40,7 @@ end # Type specific subclasses - factory(:host_vmware, :parent => :host, :class => "ManageIQ::Providers::Vmware::InfraManager::Host") do + factory(:host_vmware, :parent => :host, :class => "ManageIQ::Providers::Vmware::InfraManager::Host") do ems_ref_type { "HostSystem" } end factory(:host_vmware_esx, :parent => :host_vmware, :class => "ManageIQ::Providers::Vmware::InfraManager::HostEsx") do @@ -60,7 +60,7 @@ vmm_vendor { "unknown" } ems_ref { "openstack-perf-host" } uid_ems { "openstack-perf-host-nova-instance" } - association :ems_cluster, factory: :ems_cluster_openstack + association :ems_cluster, :factory => :ems_cluster_openstack end factory :host_openstack_infra_compute, :parent => :host_openstack_infra, diff --git a/spec/factories/miq_ae_class.rb b/spec/factories/miq_ae_class.rb index 6dddbd240d5..be3edf3c098 100644 --- a/spec/factories/miq_ae_class.rb +++ b/spec/factories/miq_ae_class.rb @@ -16,15 +16,15 @@ evaluator.ae_instances.each do |name, values| FactoryBot.create(:miq_ae_instance, - :class_id => aeclass.id, - :name => name, - 'values' => values) + :class_id => aeclass.id, + :name => name, + 'values' => values) end evaluator.ae_methods.each do |name, aemethod| FactoryBot.create(:miq_ae_method, - {:class_id => aeclass.id, - :name => name}.merge(aemethod)) + {:class_id => aeclass.id, + :name => name}.merge(aemethod)) end end end diff --git a/spec/factories/miq_ae_domain.rb b/spec/factories/miq_ae_domain.rb index 8ad8e435b61..6570d33d838 100644 --- a/spec/factories/miq_ae_domain.rb +++ b/spec/factories/miq_ae_domain.rb @@ -77,12 +77,12 @@ after :create do |aedomain, evaluator| args = {} - args[:name] = evaluator.ae_class if evaluator.respond_to?('ae_class') - args[:namespace] = "#{aedomain.name}/#{evaluator.ae_namespace}" if evaluator.respond_to?('ae_namespace') - items = %w(ae_fields ae_instances ae_methods) + args[:name] = evaluator.ae_class if evaluator.respond_to?(:ae_class) + args[:namespace] = "#{aedomain.name}/#{evaluator.ae_namespace}" if evaluator.respond_to?(:ae_namespace) + items = %w[ae_fields ae_instances ae_methods] items.each { |f| args[f] = evaluator.respond_to?(f) ? evaluator.send(f) : {} } - FactoryBot.create(:miq_ae_class, :with_instances_and_methods, args) if evaluator.respond_to?('ae_class') + FactoryBot.create(:miq_ae_class, :with_instances_and_methods, args) if evaluator.respond_to?(:ae_class) end end end diff --git a/spec/factories/miq_ae_instance.rb b/spec/factories/miq_ae_instance.rb index 3820b0c16b5..543d5f7e790 100644 --- a/spec/factories/miq_ae_instance.rb +++ b/spec/factories/miq_ae_instance.rb @@ -14,6 +14,7 @@ unless evaluator.values.empty? aeinstance.ae_values << aeinstance.ae_class.ae_fields.collect do |field| next unless evaluator.values.key?(field.name) + FactoryBot.build(:miq_ae_value, {:field_id => field.id}.merge(evaluator.values[field.name])) end end diff --git a/spec/factories/miq_alert.rb b/spec/factories/miq_alert.rb index 87bd2d723ab..779cc3ff2fd 100644 --- a/spec/factories/miq_alert.rb +++ b/spec/factories/miq_alert.rb @@ -5,12 +5,12 @@ end factory :miq_alert_vm, :parent => :miq_alert do - options { { :notifications => {} } } + options { {:notifications => {}} } db { "Vm" } end factory :miq_alert_host, :parent => :miq_alert do - options { { :notifications => {} } } + options { {:notifications => {}} } db { "Host" } end end diff --git a/spec/factories/miq_group.rb b/spec/factories/miq_group.rb index 6e978a8fd96..e6424453937 100644 --- a/spec/factories/miq_group.rb +++ b/spec/factories/miq_group.rb @@ -18,10 +18,10 @@ after :build do |g, e| if e.role || e.features || e.miq_user_role_id || e.miq_user_role g.entitlement = FactoryBot.create(:entitlement, - :features => e.features, - :role => e.role, - :miq_user_role_id => e.miq_user_role_id, - :miq_user_role => e.miq_user_role) + :features => e.features, + :role => e.role, + :miq_user_role_id => e.miq_user_role_id, + :miq_user_role => e.miq_user_role) end end diff --git a/spec/factories/miq_report.rb b/spec/factories/miq_report.rb index 8e97f5d6617..1dc25a1fe2c 100644 --- a/spec/factories/miq_report.rb +++ b/spec/factories/miq_report.rb @@ -13,9 +13,9 @@ sequence(:name) { |n| "Files #{seq_padded_for_sorting(n)}" } db { 'Filesystem' } title { 'Files' } - cols { %w(name base_name file_version size contents_available permissions updated_on mtime) } - col_order { %w(name base_name file_version size contents_available permissions updated_on mtime) } - headers { %w(Name File\ Name File\ Version Size Contents\ Available Permissions Collected\ On Last\ Modified) } + cols { %w[name base_name file_version size contents_available permissions updated_on mtime] } + col_order { %w[name base_name file_version size contents_available permissions updated_on mtime] } + headers { %w[Name File\ Name File\ Version Size Contents\ Available Permissions Collected\ On Last\ Modified] } sortby { ["name"] } order { "Ascending" } end diff --git a/spec/factories/miq_request_task.rb b/spec/factories/miq_request_task.rb index be9dbf81987..cf6f4df4492 100644 --- a/spec/factories/miq_request_task.rb +++ b/spec/factories/miq_request_task.rb @@ -44,10 +44,10 @@ end # Retire Tasks - factory :service_retire_task, :parent => :miq_retire_task, :class => "ServiceRetireTask" do + factory :service_retire_task, :parent => :miq_retire_task, :class => "ServiceRetireTask" do state { 'pending' } end - factory :vm_retire_task, :parent => :miq_retire_task, :class => "VmRetireTask" do + factory :vm_retire_task, :parent => :miq_retire_task, :class => "VmRetireTask" do state { 'pending' } end factory :orchestration_stack_retire_task, :parent => :miq_retire_task, :class => "OrchestrationStackRetireTask" do diff --git a/spec/factories/miq_request_workflow.rb b/spec/factories/miq_request_workflow.rb index 2cca394e129..d959fa5dbac 100644 --- a/spec/factories/miq_request_workflow.rb +++ b/spec/factories/miq_request_workflow.rb @@ -20,8 +20,8 @@ factory :miq_provision_virt_workflow, :class => "MiqProvisionVirtWorkflow", :parent => :miq_provision_workflow factory :miq_provision_virt_workflow_vmware, - :class => "ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow", - :parent => :miq_provision_virt_workflow + :class => "ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow", + :parent => :miq_provision_virt_workflow factory :miq_provision_orch_workflow, :class => "MiqProvisionOrchWorkflow", :parent => :miq_provision_virt_workflow end diff --git a/spec/factories/miq_schedule.rb b/spec/factories/miq_schedule.rb index 32af6a5a504..6a01a16e840 100644 --- a/spec/factories/miq_schedule.rb +++ b/spec/factories/miq_schedule.rb @@ -20,7 +20,7 @@ factory :miq_automate_schedule, :class => :MiqSchedule do run_at = {:start_time => "2010-07-08 04:10:00 Z", :interval => {:unit => "daily", :value => "1"}} sched_action = {:method => "automation_request"} - filter = {:uri_parts => {:instance => 'test', :message => 'create'}, :ui => { :ui_attrs => [], :ui_object => {} }, :parameters => {'request' => 'test_request', 'key1' => 'value1'}} + filter = {:uri_parts => {:instance => 'test', :message => 'create'}, :ui => {:ui_attrs => [], :ui_object => {}}, :parameters => {'request' => 'test_request', 'key1' => 'value1'}} sequence(:name) { |n| "automate_schedule_#{seq_padded_for_sorting(n)}" } description { "test_automation" } resource_type { "AutomationRequest" } diff --git a/spec/factories/miq_server.rb b/spec/factories/miq_server.rb index bf75df0fcae..26c91adc196 100644 --- a/spec/factories/miq_server.rb +++ b/spec/factories/miq_server.rb @@ -9,7 +9,7 @@ version { '9.9.9.9' } factory :miq_server_in_default_zone do - zone { FactoryBot.build(:zone, :name => "default") } + zone { FactoryBot.build(:zone, :name => "default") } end end end diff --git a/spec/factories/miq_user_role.rb b/spec/factories/miq_user_role.rb index a31a4280e5c..39d84e8ca84 100644 --- a/spec/factories/miq_user_role.rb +++ b/spec/factories/miq_user_role.rb @@ -21,11 +21,9 @@ user.read_only = seeded_role[:read_only] user.settings = seeded_role[:settings] end - if e_features.blank? - # admins now using a feature instead of a roll - if evaluator.role == "super_administrator" - e_features = MiqProductFeature::SUPER_ADMIN_FEATURE - end +# admins now using a feature instead of a roll + if e_features.blank? && (evaluator.role == "super_administrator") + e_features = MiqProductFeature::SUPER_ADMIN_FEATURE end end diff --git a/spec/factories/network_service.rb b/spec/factories/network_service.rb index 9f06b7b2aaf..eeff6520dd0 100644 --- a/spec/factories/network_service.rb +++ b/spec/factories/network_service.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :network_service do - sequence(:name) {|n| "network_service_#{seq_padded_for_sorting(n)}"} - sequence(:description) {|n| "network_service_description_#{seq_padded_for_sorting(n)}"} - sequence(:ems_ref) {|n| "ems_ref_#{seq_padded_for_sorting(n)}"} + sequence(:name) { |n| "network_service_#{seq_padded_for_sorting(n)}" } + sequence(:description) { |n| "network_service_description_#{seq_padded_for_sorting(n)}" } + sequence(:ems_ref) { |n| "ems_ref_#{seq_padded_for_sorting(n)}" } end factory :network_service_nsxt, diff --git a/spec/factories/network_service_entry.rb b/spec/factories/network_service_entry.rb index 82798b11786..0c3b5294830 100644 --- a/spec/factories/network_service_entry.rb +++ b/spec/factories/network_service_entry.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :network_service_entry do - sequence(:name) {|n| "network_service_entry_#{seq_padded_for_sorting(n)}"} - sequence(:description) {|n| "network_service_entry_description_#{seq_padded_for_sorting(n)}"} - sequence(:ems_ref) {|n| "ems_ref_#{seq_padded_for_sorting(n)}"} + sequence(:name) { |n| "network_service_entry_#{seq_padded_for_sorting(n)}" } + sequence(:description) { |n| "network_service_entry_description_#{seq_padded_for_sorting(n)}" } + sequence(:ems_ref) { |n| "ems_ref_#{seq_padded_for_sorting(n)}" } end factory :network_service_entry_nsxt, diff --git a/spec/factories/provider.rb b/spec/factories/provider.rb index 92a5b6fd208..d71a71ff108 100644 --- a/spec/factories/provider.rb +++ b/spec/factories/provider.rb @@ -9,8 +9,8 @@ after(:build) do |provider| provider.authentications << FactoryBot.build(:authentication, - :userid => "admin", - :password => "smartvm") + :userid => "admin", + :password => "smartvm") end end diff --git a/spec/factories/provider_tag_mapping.rb b/spec/factories/provider_tag_mapping.rb index 98687272287..29f49e494ee 100644 --- a/spec/factories/provider_tag_mapping.rb +++ b/spec/factories/provider_tag_mapping.rb @@ -20,10 +20,10 @@ tag do category = FactoryBot.create(:classification, - :name => category_name, - :description => category_description, - :single_value => true, - :read_only => true) + :name => category_name, + :description => category_description, + :single_value => true, + :read_only => true) category.tag end end diff --git a/spec/factories/scan_item.rb b/spec/factories/scan_item.rb index 3c0aefd6015..3a92af74c6f 100644 --- a/spec/factories/scan_item.rb +++ b/spec/factories/scan_item.rb @@ -2,7 +2,7 @@ factory :scan_item factory :scan_item_category_default, :parent => :scan_item do - attributes do + attributes do { "item_type" => "category", "definition" => { @@ -13,7 +13,7 @@ end factory :scan_item_category_test, :parent => :scan_item do - attributes do + attributes do { "item_type" => "category", "definition" => { @@ -24,6 +24,6 @@ end factory :scan_item_file, :parent => :scan_item do - attributes { {"item_type" => "file", "definition" => {}} } + attributes { {"item_type" => "file", "definition" => {}} } end end diff --git a/spec/factories/security_policy.rb b/spec/factories/security_policy.rb index bbcfb8e29d8..2e4d71a1956 100644 --- a/spec/factories/security_policy.rb +++ b/spec/factories/security_policy.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :security_policy do - sequence(:name) {|n| "security_policy_#{seq_padded_for_sorting(n)}"} - sequence(:description) {|n| "security_policy_description_#{seq_padded_for_sorting(n)}"} - sequence(:ems_ref) {|n| "ems_ref_#{seq_padded_for_sorting(n)}"} + sequence(:name) { |n| "security_policy_#{seq_padded_for_sorting(n)}" } + sequence(:description) { |n| "security_policy_description_#{seq_padded_for_sorting(n)}" } + sequence(:ems_ref) { |n| "ems_ref_#{seq_padded_for_sorting(n)}" } end factory :security_policy_nsxt, diff --git a/spec/factories/security_policy_rule.rb b/spec/factories/security_policy_rule.rb index 020491b9b69..680508af8d2 100644 --- a/spec/factories/security_policy_rule.rb +++ b/spec/factories/security_policy_rule.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :security_policy_rule do - sequence(:name) {|n| "security_policy_rule_#{seq_padded_for_sorting(n)}"} - sequence(:description) {|n| "security_policy_rule_description_#{seq_padded_for_sorting(n)}"} - sequence(:ems_ref) {|n| "ems_ref_#{seq_padded_for_sorting(n)}"} + sequence(:name) { |n| "security_policy_rule_#{seq_padded_for_sorting(n)}" } + sequence(:description) { |n| "security_policy_rule_description_#{seq_padded_for_sorting(n)}" } + sequence(:ems_ref) { |n| "ems_ref_#{seq_padded_for_sorting(n)}" } end factory :security_policy_rule_nsxt, diff --git a/spec/factories/server_role.rb b/spec/factories/server_role.rb index d6cdba93d95..539bcb2516f 100644 --- a/spec/factories/server_role.rb +++ b/spec/factories/server_role.rb @@ -1,5 +1,5 @@ FactoryBot.define do factory :server_role do - sequence(:name) { |i| "role#{i}"} + sequence(:name) { |i| "role#{i}" } end end diff --git a/spec/factories/service_template_catalog.rb b/spec/factories/service_template_catalog.rb index 8c1cfc3721e..c60ca1825b5 100644 --- a/spec/factories/service_template_catalog.rb +++ b/spec/factories/service_template_catalog.rb @@ -1,5 +1,5 @@ FactoryBot.define do factory :service_template_catalog do - sequence(:name) { |num| "service_template_catalog_#{num}" } + sequence(:name) { |num| "service_template_catalog_#{num}" } end end diff --git a/spec/factories/storage_file.rb b/spec/factories/storage_file.rb index fd946bae1f0..45ff8e9ded9 100644 --- a/spec/factories/storage_file.rb +++ b/spec/factories/storage_file.rb @@ -1,6 +1,6 @@ FactoryBot.define do factory :storage_file do - sequence(:name) { |n| "path/to/file#{seq_padded_for_sorting(n)}/file#{n}.log" } - vm_or_template_id { 1000 } + sequence(:name) { |n| "path/to/file#{seq_padded_for_sorting(n)}/file#{n}.log" } + vm_or_template_id { 1000 } end end diff --git a/spec/factories/user.rb b/spec/factories/user.rb index c3deb54025f..2841315199b 100644 --- a/spec/factories/user.rb +++ b/spec/factories/user.rb @@ -24,14 +24,16 @@ end trait :with_miq_edit_features do - features { %w( - miq_ae_class_edit + features do + %w[ + miq_ae_class_edit miq_ae_domain_edit miq_ae_class_copy miq_ae_instance_copy miq_ae_method_copy miq_ae_namespace_edit - ) } + ] + end end end diff --git a/spec/factories/vim_performance_state.rb b/spec/factories/vim_performance_state.rb index db02222af7c..14d18fe2cdc 100644 --- a/spec/factories/vim_performance_state.rb +++ b/spec/factories/vim_performance_state.rb @@ -1,6 +1,6 @@ FactoryBot.define do factory :vim_performance_state, :class => :VimPerformanceState do timestamp { Time.now.utc } - state_data {{}} + state_data { {} } end end diff --git a/spec/factories/vm_or_template.rb b/spec/factories/vm_or_template.rb index 50702a17ea3..1dd1d4baff6 100644 --- a/spec/factories/vm_or_template.rb +++ b/spec/factories/vm_or_template.rb @@ -19,7 +19,7 @@ end factory(:vm, :class => "Vm", :parent => :vm_or_template) - factory(:vm_cloud, :class => "VmCloud", :parent => :vm) { cloud { true } } + factory(:vm_cloud, :class => "VmCloud", :parent => :vm) { cloud { true } } factory(:vm_infra, :class => "VmInfra", :parent => :vm) factory(:vm_server, :class => "VmServer", :parent => :vm) factory(:template_cloud, :class => "TemplateCloud", :parent => :template) { cloud { true } } @@ -53,7 +53,6 @@ factory(:template_redhat, :class => "ManageIQ::Providers::Redhat::InfraManager::Template", :parent => :template_infra) { vendor { "redhat" } } factory(:template_ovirt, :class => "ManageIQ::Providers::Ovirt::InfraManager::Template", :parent => :template_infra) { vendor { "ovirt" } } - factory :template_vmware, :class => "ManageIQ::Providers::Vmware::InfraManager::Template", :parent => "template_infra" do location { |x| "[storage] #{x.name}/#{x.name}.vmtx" } vendor { "vmware" } diff --git a/spec/i18n/locale_name_spec.rb b/spec/i18n/locale_name_spec.rb index c838d773f78..62d763adee5 100644 --- a/spec/i18n/locale_name_spec.rb +++ b/spec/i18n/locale_name_spec.rb @@ -9,14 +9,14 @@ end it "all entries in human_locale_names.yml are valid" do - YAML.load_file(Rails.root.join('config', 'human_locale_names.yaml'))['human_locale_names'].each do |locale_name, human_locale_name| + YAML.load_file(Rails.root.join("config/human_locale_names.yaml"))['human_locale_names'].each do |locale_name, human_locale_name| expect(human_locale_name).not_to be_empty expect(human_locale_name).not_to eq(locale_name) end end it "all languages have properly set human_locale_name" do - human_locale_names = YAML.load_file(Rails.root.join('config', 'human_locale_names.yaml'))['human_locale_names'] + human_locale_names = YAML.load_file(Rails.root.join("config/human_locale_names.yaml"))['human_locale_names'] locales = Vmdb::FastGettextHelper.find_available_locales expect(human_locale_names.keys.sort).to eq(locales.sort) diff --git a/spec/lib/acts_as_ar_model_spec.rb b/spec/lib/acts_as_ar_model_spec.rb index e043bbe1906..699e62c5d0b 100644 --- a/spec/lib/acts_as_ar_model_spec.rb +++ b/spec/lib/acts_as_ar_model_spec.rb @@ -1,14 +1,14 @@ RSpec.describe ActsAsArModel do # id is a default column included regardless if it's in the set_columns_hash - let(:col_names_strs) { %w(str id int flt dt) } + let(:col_names_strs) { %w[str id int flt dt] } let(:base_class) do Class.new(ActsAsArModel) do set_columns_hash( - :str => :string, - :int => :integer, - :flt => :float, - :dt => :datetime, + :str => :string, + :int => :integer, + :flt => :float, + :dt => :datetime ) end end diff --git a/spec/lib/acts_as_ar_scope_spec.rb b/spec/lib/acts_as_ar_scope_spec.rb index c1c09188c4f..145d99f32aa 100644 --- a/spec/lib/acts_as_ar_scope_spec.rb +++ b/spec/lib/acts_as_ar_scope_spec.rb @@ -7,8 +7,8 @@ def self.vm_ids @vm_ids ||= [] end - def self.vm_ids=(new_ids) - @vm_ids = new_ids + class << self + attr_writer :vm_ids end def self.aar_scope diff --git a/spec/lib/ansible/content_spec.rb b/spec/lib/ansible/content_spec.rb index 77a94d61969..e492f476438 100644 --- a/spec/lib/ansible/content_spec.rb +++ b/spec/lib/ansible/content_spec.rb @@ -20,8 +20,8 @@ expected_params = [ "install", - :roles_path= => roles_dir, - :role_file= => roles_requirements + {:roles_path= => roles_dir, + :role_file= => roles_requirements} ] expect(AwesomeSpawn).to receive(:run!).with("ansible-galaxy", :params => expected_params) @@ -34,8 +34,8 @@ expected_params = [ "install", - :roles_path= => roles_dir, - :role_file= => roles_requirements + {:roles_path= => roles_dir, + :role_file= => roles_requirements} ] expect(AwesomeSpawn).to receive(:run!).with("ansible-galaxy", :params => expected_params) diff --git a/spec/lib/ansible/runner/credential/azure_credential_spec.rb b/spec/lib/ansible/runner/credential/azure_credential_spec.rb index 535e1362d4a..8ad92573901 100644 --- a/spec/lib/ansible/runner/credential/azure_credential_spec.rb +++ b/spec/lib/ansible/runner/credential/azure_credential_spec.rb @@ -113,7 +113,7 @@ end it "adds AWS_SECURITY_TOKEN if present" do - auth.update!(:options => { :subscription => "subscription_id" }) + auth.update!(:options => {:subscription => "subscription_id"}) expected = { "AZURE_AD_USER" => "manageiq-azure", "AZURE_PASSWORD" => "azure_password", diff --git a/spec/lib/ansible/runner/credential/google_credential_spec.rb b/spec/lib/ansible/runner/credential/google_credential_spec.rb index 133b78b491b..57123b659e5 100644 --- a/spec/lib/ansible/runner/credential/google_credential_spec.rb +++ b/spec/lib/ansible/runner/credential/google_credential_spec.rb @@ -19,7 +19,7 @@ { :userid => "manageiq@gmail.com", :auth_key => "key_data", - :options => { :project => "google_project" } + :options => {:project => "google_project"} } end diff --git a/spec/lib/ansible/runner/credential/machine_credential_spec.rb b/spec/lib/ansible/runner/credential/machine_credential_spec.rb index 60e70d12092..bb77f82a6a6 100644 --- a/spec/lib/ansible/runner/credential/machine_credential_spec.rb +++ b/spec/lib/ansible/runner/credential/machine_credential_spec.rb @@ -86,9 +86,9 @@ def password_hash cred.write_config_files expect(password_hash).to eq( - "^SSH [pP]assword" => "secret", - "^BECOME [pP]assword" => "othersecret", - "^Enter passphrase for [a-zA-Z0-9\-\/]+\/ssh_key_data:" => "keypass" + "^SSH [pP]assword" => "secret", + "^BECOME [pP]assword" => "othersecret", + "^Enter passphrase for [a-zA-Z0-9-/]+/ssh_key_data:" => "keypass" ) expect(File.read(key_file)).to eq("key_data\n") @@ -110,18 +110,18 @@ def password_hash end context "with an existing password_file" do - let(:ssh_unlock_key) { "^Enter passphrase for [a-zA-Z0-9\-\/]+\/ssh_key_data:" } + let(:ssh_unlock_key) { "^Enter passphrase for [a-zA-Z0-9-/]+/ssh_key_data:" } def existing_env_password_file(data) cred # initialize the dir File.write(password_file, data.to_yaml) end it "clobbers existing ssh key unlock keys" do - existing_data = { ssh_unlock_key => "hunter2" } + existing_data = {ssh_unlock_key => "hunter2"} expected_data = { "^SSH [pP]assword" => "secret", "^BECOME [pP]assword" => "othersecret", - ssh_unlock_key => "keypass" + ssh_unlock_key => "keypass" } existing_env_password_file(existing_data) cred.write_config_files @@ -131,7 +131,7 @@ def existing_env_password_file(data) it "appends data if not setting ssh_unlock_key" do auth.update!(:auth_key_password => nil) - existing_data = { ssh_unlock_key => "hunter2" } + existing_data = {ssh_unlock_key => "hunter2"} added_data = { "^SSH [pP]assword" => "secret", "^BECOME [pP]assword" => "othersecret" diff --git a/spec/lib/ansible/runner/credential/network_credential_spec.rb b/spec/lib/ansible/runner/credential/network_credential_spec.rb index e595382abaf..b7cb9072bc3 100644 --- a/spec/lib/ansible/runner/credential/network_credential_spec.rb +++ b/spec/lib/ansible/runner/credential/network_credential_spec.rb @@ -69,7 +69,7 @@ { :userid => "user", :password => "pass", - :options => { :authorize => true } + :options => {:authorize => true} } end @@ -110,7 +110,7 @@ def password_hash end context "with an auth_key" do - let(:auth_attributes) { { :auth_key => "key_data" } } + let(:auth_attributes) { {:auth_key => "key_data"} } it "writes the network_ssh_key_file" do cred.write_config_files @@ -127,13 +127,13 @@ def password_hash end context "with authorize set" do - let(:ssh_unlock_key) { "^Enter passphrase for [a-zA-Z0-9\-\/]+\/ssh_key_data:" } + let(:ssh_unlock_key) { "^Enter passphrase for [a-zA-Z0-9-/]+/ssh_key_data:" } let(:auth_attributes) do { :userid => "user", :password => "pass", :auth_key_password => "key_pass", - :options => { :authorize => true } + :options => {:authorize => true} } end diff --git a/spec/lib/ansible/runner/credential/openstack_credential_spec.rb b/spec/lib/ansible/runner/credential/openstack_credential_spec.rb index 32046c01693..29ef4185d85 100644 --- a/spec/lib/ansible/runner/credential/openstack_credential_spec.rb +++ b/spec/lib/ansible/runner/credential/openstack_credential_spec.rb @@ -41,7 +41,7 @@ describe "#env_vars" do it "sets OS_CLIENT_CONFIG_FILE" do filename = File.join(@base_dir, "os_credentials") - expected = { "OS_CLIENT_CONFIG_FILE" => filename } + expected = {"OS_CLIENT_CONFIG_FILE" => filename} expect(cred.env_vars).to eq(expected) end end diff --git a/spec/lib/ansible/runner/credential/rhv_credential_spec.rb b/spec/lib/ansible/runner/credential/rhv_credential_spec.rb index ce25e042d3f..97035484a68 100644 --- a/spec/lib/ansible/runner/credential/rhv_credential_spec.rb +++ b/spec/lib/ansible/runner/credential/rhv_credential_spec.rb @@ -19,7 +19,7 @@ { :userid => "manageiq-rhv", :password => "rhv_password", - :options => { :host => "rhv_host" } + :options => {:host => "rhv_host"} } end diff --git a/spec/lib/ansible/runner/credential/vault_credential_spec.rb b/spec/lib/ansible/runner/credential/vault_credential_spec.rb index 42b86de9e50..bec51882d6c 100644 --- a/spec/lib/ansible/runner/credential/vault_credential_spec.rb +++ b/spec/lib/ansible/runner/credential/vault_credential_spec.rb @@ -16,7 +16,7 @@ let(:auth) { FactoryBot.create(:embedded_ansible_vault_credential, auth_attributes) } let(:cred) { described_class.new(auth.id, @base_dir) } - let(:auth_attributes) { { :password => "vault_secret" } } + let(:auth_attributes) { {:password => "vault_secret"} } let(:vault_filename) { File.join(@base_dir, "vault_password") } describe "#command_line" do @@ -28,7 +28,7 @@ describe "#env_vars" do context "with a password" do it "passes --vault-password-file" do - expected = { "ANSIBLE_VAULT_PASSWORD_FILE" => vault_filename } + expected = {"ANSIBLE_VAULT_PASSWORD_FILE" => vault_filename} expect(cred.env_vars).to eq(expected) end end diff --git a/spec/lib/ansible/runner_spec.rb b/spec/lib/ansible/runner_spec.rb index 03e0d0c09bb..cf052f1d8fa 100644 --- a/spec/lib/ansible/runner_spec.rb +++ b/spec/lib/ansible/runner_spec.rb @@ -120,7 +120,7 @@ expect(described_class).to receive(:python_path).and_call_original expect(described_class).to receive(:manageiq_venv_path).and_return(manageiq_venv_path) - stub_ansible_raw(ansible_exists: true) + stub_ansible_raw(:ansible_exists => true) expect(AwesomeSpawn).to receive(:run) do |command, options| expect(command).to eq("ansible-runner") @@ -327,28 +327,28 @@ it "with manageiq_venv_path valid and ansible_python_version valid" do expect(described_class).to receive(:manageiq_venv_path).and_return(manageiq_venv_path) - stub_ansible_raw(ansible_exists: true) + stub_ansible_raw(:ansible_exists => true) expect(described_class.send(:python_path)).to eq([manageiq_venv_path, ansible_python_path].join(File::PATH_SEPARATOR)) end it "with manageiq_venv_path valid and ansible_python_version nil" do expect(described_class).to receive(:manageiq_venv_path).and_return(manageiq_venv_path) - stub_ansible_raw(ansible_exists: false) + stub_ansible_raw(:ansible_exists => false) expect(described_class.send(:python_path)).to eq(manageiq_venv_path) end it "with manageiq_venv_path nil and ansible_python_version valid" do expect(described_class).to receive(:manageiq_venv_path).and_return(nil) - stub_ansible_raw(ansible_exists: true) + stub_ansible_raw(:ansible_exists => true) expect(described_class.send(:python_path)).to eq(ansible_python_path) end it "with manageiq_venv_path nil and ansible_python_version nil" do expect(described_class).to receive(:manageiq_venv_path).and_return(nil) - stub_ansible_raw(ansible_exists: false) + stub_ansible_raw(:ansible_exists => false) expect(described_class.send(:python_path)).to eq("") end diff --git a/spec/lib/container_orchestrator_spec.rb b/spec/lib/container_orchestrator_spec.rb index 4ef9d7fd3a5..9b3145c6668 100644 --- a/spec/lib/container_orchestrator_spec.rb +++ b/spec/lib/container_orchestrator_spec.rb @@ -94,9 +94,9 @@ it "sets database environment variables" do stub_const("ENV", ENV.to_h.merge( - "DATABASE_NAME" => "vmdb_production", - "DATABASE_SSL_MODE" => "verify-full", - )) + "DATABASE_NAME" => "vmdb_production", + "DATABASE_SSL_MODE" => "verify-full" + )) expect(subject.send(:default_environment)).to include({:name => "DATABASE_SSL_MODE", :value => "verify-full"}) expect(subject.send(:default_environment)).not_to include({:name => "DATABASE_NAME", :valueFrom => {:secretKeyRef => {:key => "dbname", :name => "postgresql-secrets"}}}) @@ -120,7 +120,7 @@ it "sets the messaging env vars" do expect(subject.send(:default_environment)).to include( {:name => "MEMCACHED_ENABLE_SSL", :value => "true"}, - {:name => "MEMCACHED_SSL_CA", :value => "/etc/pki/ca-trust/source/anchors/root.crt"}, + {:name => "MEMCACHED_SSL_CA", :value => "/etc/pki/ca-trust/source/anchors/root.crt"} ) end end @@ -150,20 +150,20 @@ deployment_definition = subject.send(:deployment_definition, "test") expect(deployment_definition.fetch_path(:spec, :template, :spec, :containers, 0, :volumeMounts)).to include({ - :mountPath => "/.postgresql", - :name => "pg-root-certificate", - :readOnly => true - }) + :mountPath => "/.postgresql", + :name => "pg-root-certificate", + :readOnly => true + }) expect(deployment_definition.fetch_path(:spec, :template, :spec, :volumes)).to include({ - :name => "pg-root-certificate", - :secret => { - :secretName => "postgresql-secrets", - :items => [ + :name => "pg-root-certificate", + :secret => { + :secretName => "postgresql-secrets", + :items => [ :key => "rootcertificate", :path => "root.crt", ], - } - }) + } + }) end it "mounts the root CA certificate" do @@ -172,30 +172,30 @@ deployment_definition = subject.send(:deployment_definition, "test") expect(deployment_definition.fetch_path(:spec, :template, :spec, :containers, 0, :volumeMounts)).to include({ - :mountPath => "/etc/pki/ca-trust/source/anchors", - :name => "internal-root-certificate", - :readOnly => true - }) + :mountPath => "/etc/pki/ca-trust/source/anchors", + :name => "internal-root-certificate", + :readOnly => true + }) expect(deployment_definition.fetch_path(:spec, :template, :spec, :volumes)).to include({ - :name => "internal-root-certificate", - :secret => { - :secretName => "some-secret-name", - :items => [ + :name => "internal-root-certificate", + :secret => { + :secretName => "some-secret-name", + :items => [ :key => "root_crt", :path => "root.crt", ], - } - }) + } + }) end it "includes node affinities" do deployment_definition = subject.send(:deployment_definition, "test") expect(deployment_definition.fetch_path(:spec, :template, :spec, :affinity, :nodeAffinity, :requiredDuringSchedulingIgnoredDuringExecution, :nodeSelectorTerms, 0, :matchExpressions, 0)).to include({ - :key => "kubernetes.io/arch", - :operator => "In", - :values => ["amd64", "arm64"], - }) + :key => "kubernetes.io/arch", + :operator => "In", + :values => ["amd64", "arm64"], + }) end end diff --git a/spec/lib/ems_event_helper_spec.rb b/spec/lib/ems_event_helper_spec.rb index b901f71e84e..90e2575270c 100644 --- a/spec/lib/ems_event_helper_spec.rb +++ b/spec/lib/ems_event_helper_spec.rb @@ -5,120 +5,112 @@ @zone = FactoryBot.create(:zone) @ems = FactoryBot.create(:ems_vmware, - :zone => @zone, - :name => 'vc7', - :hostname => 'vc7.manageiq.com', - :ipaddress => '10.10.10.2' - ) + :zone => @zone, + :name => 'vc7', + :hostname => 'vc7.manageiq.com', + :ipaddress => '10.10.10.2') @storage = FactoryBot.create(:storage, - :name => 'StarM1-Demo5', - :store_type => 'VMFS' - ) + :name => 'StarM1-Demo5', + :store_type => 'VMFS') @host = FactoryBot.create(:host, - :name => 'host7', - :ext_management_system => @ems, - :vmm_vendor => 'vmware', - :vmm_version => '4.0.0', - :vmm_product => 'ESX', - :vmm_buildnumber => 261974, - :ipaddress => '192.168.252.28', - :hostname => 'host7.manageiq.com' - ) + :name => 'host7', + :ext_management_system => @ems, + :vmm_vendor => 'vmware', + :vmm_version => '4.0.0', + :vmm_product => 'ESX', + :vmm_buildnumber => 261974, + :ipaddress => '192.168.252.28', + :hostname => 'host7.manageiq.com') @vm = FactoryBot.create(:vm_vmware, - :ext_management_system => @ems, - :name => 'vm42', - :location => 'vm42/vm42.vmx', - :storage => @storage - ) + :ext_management_system => @ems, + :name => 'vm42', + :location => 'vm42/vm42.vmx', + :storage => @storage) @username = 'fred' @chain_id = 12345 @ems_events = [] @ems_events << FactoryBot.create(:ems_event, - :event_type => 'PowerOnVM_Task', - :message => 'Task: Power On virtual machine', - :host_name => @host.ipaddress, - :timestamp => Time.now, - :ext_management_system => @ems, - :host => @host, - :vm => @vm, - :vm_name => @vm.name, - :vm_location => @vm.path, - :source => 'VC', - :chain_id => @chain_id, - :is_task => false, - :username => @username - ) + :event_type => 'PowerOnVM_Task', + :message => 'Task: Power On virtual machine', + :host_name => @host.ipaddress, + :timestamp => Time.now, + :ext_management_system => @ems, + :host => @host, + :vm => @vm, + :vm_name => @vm.name, + :vm_location => @vm.path, + :source => 'VC', + :chain_id => @chain_id, + :is_task => false, + :username => @username) @ems_events << FactoryBot.create(:ems_event, - :event_type => 'VmStartingEvent', - :message => "#{@vm.name} on host #{@host.ipaddress} in DC1 is starting", - :host_name => @host.ipaddress, - :timestamp => Time.now, - :ext_management_system => @ems, - :host => @host, - :vm => @vm, - :vm_name => @vm.name, - :vm_location => @vm.path, - :source => 'VC', - :chain_id => @chain_id, - :is_task => false, - :username => @username - ) + :event_type => 'VmStartingEvent', + :message => "#{@vm.name} on host #{@host.ipaddress} in DC1 is starting", + :host_name => @host.ipaddress, + :timestamp => Time.now, + :ext_management_system => @ems, + :host => @host, + :vm => @vm, + :vm_name => @vm.name, + :vm_location => @vm.path, + :source => 'VC', + :chain_id => @chain_id, + :is_task => false, + :username => @username) @ems_events << FactoryBot.create(:ems_event, - :event_type => 'VmPoweredOnEvent', - :message => "#{@vm.name} on #{@host.ipaddress} in DC1 is powered on", - :host_name => @host.ipaddress, - :timestamp => Time.now, - :ext_management_system => @ems, - :host => @host, - :vm => @vm, - :vm_name => @vm.name, - :vm_location => @vm.path, - :source => 'VC', - :chain_id => @chain_id, - :is_task => false, - :username => @username - ) + :event_type => 'VmPoweredOnEvent', + :message => "#{@vm.name} on #{@host.ipaddress} in DC1 is powered on", + :host_name => @host.ipaddress, + :timestamp => Time.now, + :ext_management_system => @ems, + :host => @host, + :vm => @vm, + :vm_name => @vm.name, + :vm_location => @vm.path, + :source => 'VC', + :chain_id => @chain_id, + :is_task => false, + :username => @username) @ems_events << FactoryBot.create(:ems_event, - :event_type => 'PowerOnVM_Task_Complete', - :message => 'PowerOnVM_Task Completed', - :host_name => @host.ipaddress, - :timestamp => Time.now, - :ext_management_system => @ems, - :host => @host, - :vm => @vm, - :vm_name => @vm.name, - :vm_location => @vm.path, - :source => 'EVM', - :chain_id => 12345, - :is_task => false, - :username => @username - ) + :event_type => 'PowerOnVM_Task_Complete', + :message => 'PowerOnVM_Task Completed', + :host_name => @host.ipaddress, + :timestamp => Time.now, + :ext_management_system => @ems, + :host => @host, + :vm => @vm, + :vm_name => @vm.name, + :vm_location => @vm.path, + :source => 'EVM', + :chain_id => 12345, + :is_task => false, + :username => @username) @miq_event_vm_start = FactoryBot.create(:miq_event_definition, :name => 'vm_start', :description => 'VM Power On') @policy_set = FactoryBot.create(:miq_policy_set) @policy = FactoryBot.create(:miq_policy, :towhat => 'Vm', :active => true, :mode => 'control') automate_options = {:ae_message => 'create', :ae_hash => {"kevin" => "1", "q" => "1"}} - @action = FactoryBot.create(:miq_action, :description => 'create_incident', :action_type => 'custom_automation', :options => automate_options) + @action = FactoryBot.create(:miq_action, :description => 'create_incident', :action_type => 'custom_automation', :options => automate_options) @policy_set.add_member(@policy) - @policy_content = FactoryBot.create(:miq_policy_content, - :miq_policy => @policy, - :miq_action => @action, - :miq_event_definition => @miq_event_vm_start, - :qualifier => 'success', - :success_sequence => 1, - :success_synchronous => true) + @policy_content = FactoryBot.create(:miq_policy_content, + :miq_policy => @policy, + :miq_action => @action, + :miq_event_definition => @miq_event_vm_start, + :qualifier => 'success', + :success_sequence => 1, + :success_synchronous => true) @vm.add_policy(@policy) end it "should handle event properly" do - routine = [ { "policy" => ["src_vm", "vm_start"] } ] + routine = [{"policy" => ["src_vm", "vm_start"]}] policy = routine.first["policy"] event = @ems_events.last diff --git a/spec/lib/evm_database_spec.rb b/spec/lib/evm_database_spec.rb index db5c7aa761b..146a18a11e4 100644 --- a/spec/lib/evm_database_spec.rb +++ b/spec/lib/evm_database_spec.rb @@ -136,7 +136,7 @@ def simulate_full_seed EvmSpecHelper.local_miq_server described_class.raise_server_event("db_failover_executed") record = MiqQueue.last - expect(record.class_name). to eq "MiqEvent" + expect(record.class_name).to eq "MiqEvent" expect(record.method_name).to eq "raise_evm_event" expect(record.args[1]).to eq "db_failover_executed" end @@ -213,8 +213,8 @@ def simulate_full_seed expect(handlers.count).to eq(3) handlers.select! { |h| h.kind_of?(ManageIQ::PostgresHaAdmin::LogicalReplicationConfigHandler) } expect(handlers.count).to eq(2) - expect(%w(sub_id_1 sub_id_2)).to include(handlers.first.subscription) - expect(%w(sub_id_1 sub_id_2)).to include(handlers.last.subscription) + expect(%w[sub_id_1 sub_id_2]).to include(handlers.first.subscription) + expect(%w[sub_id_1 sub_id_2]).to include(handlers.last.subscription) expect(handlers.first.subscription).not_to eq(handlers.last.subscription) end end diff --git a/spec/lib/extensions/ar_base_model_spec.rb b/spec/lib/extensions/ar_base_model_spec.rb index b84aabc7c98..66cc27d285c 100644 --- a/spec/lib/extensions/ar_base_model_spec.rb +++ b/spec/lib/extensions/ar_base_model_spec.rb @@ -2,7 +2,7 @@ context "with a test class" do let(:test_class) do Class.new(ActiveRecord::Base) do - def self.name; "TestClass"; end + def self.name = "TestClass" end end @@ -17,7 +17,7 @@ def self.name; "TestClass"; end context "with a subclass" do let(:test_class_foo) do Class.new(test_class) do - def self.name; "TestClassFoo"; end + def self.name = "TestClassFoo" end end diff --git a/spec/lib/extensions/ar_base_spec.rb b/spec/lib/extensions/ar_base_spec.rb index d8db3016d3e..5d2550c5a2d 100644 --- a/spec/lib/extensions/ar_base_spec.rb +++ b/spec/lib/extensions/ar_base_spec.rb @@ -2,7 +2,7 @@ context "with a test class" do let(:test_class) do Class.new(ActiveRecord::Base) do - def self.name; "TestClass"; end + def self.name = "TestClass" end end diff --git a/spec/lib/extensions/ar_dba_spec.rb b/spec/lib/extensions/ar_dba_spec.rb index 5501387ff44..68bac413082 100644 --- a/spec/lib/extensions/ar_dba_spec.rb +++ b/spec/lib/extensions/ar_dba_spec.rb @@ -9,7 +9,7 @@ describe "#xlog_location_diff" do it "returns the correct xlog difference" do - expect(connection.xlog_location_diff("18/72F84A48", "18/72F615B8")). to eq(144_528) + expect(connection.xlog_location_diff("18/72F84A48", "18/72F615B8")).to eq(144_528) end end diff --git a/spec/lib/extensions/ar_nested_count_by_spec.rb b/spec/lib/extensions/ar_nested_count_by_spec.rb index fabd1406459..993a727dce2 100644 --- a/spec/lib/extensions/ar_nested_count_by_spec.rb +++ b/spec/lib/extensions/ar_nested_count_by_spec.rb @@ -19,7 +19,7 @@ end it "should count by state, zone and role" do - expect(MiqQueue.nested_count_by(%w(state zone role))).to eq( + expect(MiqQueue.nested_count_by(%w[state zone role])).to eq( MiqQueue::STATE_READY => { zone1.name => {"role1" => 3}, zone3.name => {"role3" => 1}, @@ -41,15 +41,15 @@ it "should respect nested where, and support individual args (vs an array)" do expect(MiqQueue.where(:zone => zone3.name).nested_count_by("role", "state")).to eq( "role3" => {MiqQueue::STATE_READY => 1}, - "role2" => {MiqQueue::STATE_ERROR => 1}, + "role2" => {MiqQueue::STATE_ERROR => 1} ) end it "should count by role and state" do - expect(MiqQueue.nested_count_by(%w(role state))).to eq( + expect(MiqQueue.nested_count_by(%w[role state])).to eq( "role1" => {"dequeue" => 4, "ready" => 3, "error" => 1}, "role2" => {"dequeue" => 1, "error" => 1}, - "role3" => {"ready" => 1, "warn" => 1}, + "role3" => {"ready" => 1, "warn" => 1} ) end end diff --git a/spec/lib/extensions/ar_taggable_spec.rb b/spec/lib/extensions/ar_taggable_spec.rb index b84f23129f5..42f58ebf152 100644 --- a/spec/lib/extensions/ar_taggable_spec.rb +++ b/spec/lib/extensions/ar_taggable_spec.rb @@ -210,8 +210,8 @@ class TestModel < ApplicationRecord end it "#tag_list" do - expect(Host.find_by(:name => "HOST1").tag_list(:ns => "/test", :cat => "tags").split).to match_array %w(red blue yellow) - expect(Vm.find_by(:name => "VM1").tag_list(:ns => "/test/tags").split).to match_array %w(red blue yellow) + expect(Host.find_by(:name => "HOST1").tag_list(:ns => "/test", :cat => "tags").split).to match_array %w[red blue yellow] + expect(Vm.find_by(:name => "VM1").tag_list(:ns => "/test/tags").split).to match_array %w[red blue yellow] end it "#to_tag" do diff --git a/spec/lib/extensions/ar_yaml_spec.rb b/spec/lib/extensions/ar_yaml_spec.rb index 58799bbc290..14c3f2503d5 100644 --- a/spec/lib/extensions/ar_yaml_spec.rb +++ b/spec/lib/extensions/ar_yaml_spec.rb @@ -10,22 +10,22 @@ inst = Vm.new inst.access1 = 1 inst.access2 = 2 - result = YAML.safe_load(YAML.dump(inst), :permitted_classes => [Vm, ActiveModel::Attribute.const_get(:FromDatabase), ActiveModel::Attribute::const_get(:FromUser), ActiveModel::Attribute::UserProvidedDefault, ActiveModel::Type::String]) + result = YAML.safe_load(YAML.dump(inst), :permitted_classes => [Vm, ActiveModel::Attribute.const_get(:FromDatabase), ActiveModel::Attribute.const_get(:FromUser), ActiveModel::Attribute::UserProvidedDefault, ActiveModel::Type::String]) expect(result.access1).to eq(1) expect(result.access2).to eq(2) end it "attr_reader_that_yamls" do inst = Vm.new - inst.instance_variable_set("@read1", 1) - result = YAML.safe_load(YAML.dump(inst), :permitted_classes => [Vm, ActiveModel::Attribute.const_get(:FromDatabase), ActiveModel::Attribute::const_get(:FromUser), ActiveModel::Attribute::UserProvidedDefault, ActiveModel::Type::String]) + inst.instance_variable_set(:@read1, 1) + result = YAML.safe_load(YAML.dump(inst), :permitted_classes => [Vm, ActiveModel::Attribute.const_get(:FromDatabase), ActiveModel::Attribute.const_get(:FromUser), ActiveModel::Attribute::UserProvidedDefault, ActiveModel::Type::String]) expect(result.read1).to eq(1) end it "attr_writer_that_yamls" do inst = Vm.new inst.write1 = 1 - result = YAML.safe_load(YAML.dump(inst), :permitted_classes => [Vm, ActiveModel::Attribute.const_get(:FromDatabase), ActiveModel::Attribute::const_get(:FromUser), ActiveModel::Attribute::UserProvidedDefault, ActiveModel::Type::String]) - expect(result.instance_variable_get("@write1")).to eq(1) + result = YAML.safe_load(YAML.dump(inst), :permitted_classes => [Vm, ActiveModel::Attribute.const_get(:FromDatabase), ActiveModel::Attribute.const_get(:FromUser), ActiveModel::Attribute::UserProvidedDefault, ActiveModel::Type::String]) + expect(result.instance_variable_get(:@write1)).to eq(1) end end diff --git a/spec/lib/extensions/database_configuration_spec.rb b/spec/lib/extensions/database_configuration_spec.rb index a07a898bd43..09a41847371 100644 --- a/spec/lib/extensions/database_configuration_spec.rb +++ b/spec/lib/extensions/database_configuration_spec.rb @@ -25,14 +25,14 @@ context "when DATABASE_URL is set" do around(:each) do |example| - begin - old_env = ENV.delete('DATABASE_URL') - ENV['DATABASE_URL'] = 'postgres://' - example.run - ensure - # ENV['x'] = nil deletes the key because ENV accepts only string values - ENV['DATABASE_URL'] = old_env - end + + old_env = ENV.delete('DATABASE_URL') + ENV['DATABASE_URL'] = 'postgres://' + example.run + ensure + # ENV['x'] = nil deletes the key because ENV accepts only string values + ENV['DATABASE_URL'] = old_env + end it "ignores a missing file" do diff --git a/spec/lib/git_worktree_spec.rb b/spec/lib/git_worktree_spec.rb index 697b0d7f588..412a8058c07 100644 --- a/spec/lib/git_worktree_spec.rb +++ b/spec/lib/git_worktree_spec.rb @@ -4,10 +4,10 @@ @git_db = "TestGit.git" @ae_db_dir = Dir.mktmpdir @default_hash = {:a => "one", :b => "two", :c => "three"} - @dirnames = %w(A B c) + @dirnames = %w[A B c] @repo_path = File.join(@ae_db_dir, @git_db) - @filenames = %w(A/File1.YamL B/File2.YamL c/File3.YAML) - @deleted_names = %w(A A/File1.YamL) + @filenames = %w[A/File1.YamL B/File2.YamL c/File3.YAML] + @deleted_names = %w[A A/File1.YamL] @conflict_file = 'A/File1.YamL' @master_url = "file://#{@repo_path}" @repo_options = {:path => @repo_path, @@ -26,7 +26,7 @@ def add_files_to_bare_repo(flist) flist.each { |f| @ae_db.add(f, YAML.dump(@default_hash.merge(:fname => f))) } @ae_db.save_changes("files added") - @ae_db.instance_variable_get('@repo').head.target.oid + @ae_db.instance_variable_get(:@repo).head.target.oid end def clone(url, add_options = {}) @@ -91,7 +91,7 @@ def open_existing_repo end it "#entries in A" do - expect(@ae_db.entries("A")).to match_array(%w(File1.YamL)) + expect(@ae_db.entries("A")).to match_array(%w[File1.YamL]) end it "get list of files" do @@ -109,8 +109,8 @@ def open_existing_repo end it "rename directory" do - filenames = %w(AAA/File1.YamL B/File2.YamL c/File3.YAML) - dirnames = %w(AAA B c) + filenames = %w[AAA/File1.YamL B/File2.YamL c/File3.YAML] + dirnames = %w[AAA B c] @ae_db.mv_dir('A', "AAA") @ae_db.save_changes("directories moved") expect(@ae_db.file_list).to match_array(filenames + dirnames) @@ -121,24 +121,24 @@ def open_existing_repo end it "move directories with similar names" do - filenames = %w(A/A/A/File1.YamL A/A/Aile2.YamL) + filenames = %w[A/A/A/File1.YamL A/A/Aile2.YamL] filenames.each { |f| @ae_db.add(f, YAML.dump(@default_hash.merge(:fname => f))) } @ae_db.send(:commit, "extra files_added").tap { |cid| @ae_db.send(:merge, cid) } @ae_db.mv_dir('A', "AAA") @ae_db.save_changes("directories moved") - filenames = %w(AAA/File1.YamL B/File2.YamL c/File3.YAML AAA/A/A/File1.YamL AAA/A/Aile2.YamL) - dirnames = %w(AAA B c AAA/A AAA/A/A) + filenames = %w[AAA/File1.YamL B/File2.YamL c/File3.YAML AAA/A/A/File1.YamL AAA/A/Aile2.YamL] + dirnames = %w[AAA B c AAA/A AAA/A/A] expect(@ae_db.file_list).to match_array(filenames + dirnames) end it "move intermediate directories with similar names" do - filenames = %w(A/A/A/File1.YamL A/A/Aile2.YamL) + filenames = %w[A/A/A/File1.YamL A/A/Aile2.YamL] filenames.each { |f| @ae_db.add(f, YAML.dump(@default_hash.merge(:fname => f))) } @ae_db.send(:commit, "extra files_added").tap { |cid| @ae_db.send(:merge, cid) } @ae_db.mv_dir('A/A', "AAA") @ae_db.save_changes("directories moved") - filenames = %w(A/File1.YamL B/File2.YamL c/File3.YAML AAA/A/File1.YamL AAA/Aile2.YamL) - dirnames = %w(AAA B c AAA/A A) + filenames = %w[A/File1.YamL B/File2.YamL c/File3.YAML AAA/A/File1.YamL AAA/Aile2.YamL] + dirnames = %w[AAA B c AAA/A A] expect(@ae_db.file_list).to match_array(filenames + dirnames) end @@ -159,14 +159,14 @@ def open_existing_repo end it "rename file with new contents" do - filenames = %w(A/File11.YamL B/File2.YamL c/File3.YAML) + filenames = %w[A/File11.YamL B/File2.YamL c/File3.YAML] @ae_db.mv_file_with_new_contents('A/File1.YamL', 'A/File11.YamL', "Hello") @ae_db.save_changes("file renamed") expect(@ae_db.file_list).to match_array(filenames + @dirnames) end it "rename file" do - filenames = %w(A/File11.YamL B/File2.YamL c/File3.YAML) + filenames = %w[A/File11.YamL B/File2.YamL c/File3.YAML] @ae_db.mv_file('A/File1.YamL', 'A/File11.YamL') @ae_db.save_changes("file renamed") expect(@ae_db.file_list).to match_array(filenames + @dirnames) @@ -179,9 +179,9 @@ def open_existing_repo new_db = open_existing_repo new_db.add(@conflict_file, YAML.dump(@default_hash.merge(:fname => "second_one"))) new_db.save_changes("overlapping commit") - expect { @ae_db.send(:merge, commit) }.to raise_error { |error| + expect { @ae_db.send(:merge, commit) }.to(raise_error do |error| expect(error).to be_a(GitWorktreeException::GitConflicts) - } + end) end it "clone repo" do @@ -266,11 +266,11 @@ def open_existing_repo describe "#branches" do it "all branches" do - expect(test_repo.branches).to match_array(%w(master branch1 branch2 symbolic)) + expect(test_repo.branches).to match_array(%w[master branch1 branch2 symbolic]) end it "local branches only" do - expect(test_repo.branches(:local)).to match_array(%w(master branch1 branch2 symbolic)) + expect(test_repo.branches(:local)).to match_array(%w[master branch1 branch2 symbolic]) end it "remote branches only" do @@ -282,7 +282,7 @@ def open_existing_repo it "get list of files in a branch" do test_repo.branch = 'branch2' - expect(test_repo.file_list).to match_array(%w(file1 file2 file3 file4)) + expect(test_repo.file_list).to match_array(%w[file1 file2 file3 file4]) end end @@ -308,7 +308,7 @@ def open_existing_repo describe "#branches" do it "all branches" do - expect(test_repo.branches).to match_array(%w(branch1 branch2)) + expect(test_repo.branches).to match_array(%w[branch1 branch2]) end end @@ -316,7 +316,7 @@ def open_existing_repo it "get list of files in a branch" do test_repo.branch = 'branch2' - expect(test_repo.file_list).to match_array(%w(file1 file2 file3 file4)) + expect(test_repo.file_list).to match_array(%w[file1 file2 file3 file4]) end end end @@ -327,14 +327,14 @@ def open_existing_repo describe "#tags" do it "get list of tags" do - expect(test_repo.tags).to match_array(%w(tag1 tag2)) + expect(test_repo.tags).to match_array(%w[tag1 tag2]) end end describe "#file_list" do it "get list of files in a tag" do test_repo.tag = 'tag2' - expect(test_repo.file_list).to match_array(%w(file1 file2 file3 file4)) + expect(test_repo.file_list).to match_array(%w[file1 file2 file3 file4]) end end @@ -352,20 +352,20 @@ def open_existing_repo end describe "#new" do - let(:git_repo_path) { Rails.root.join("spec", "fixtures", "git_repos", "branch_and_tag.git") } + let(:git_repo_path) { Rails.root.join("spec/fixtures/git_repos/branch_and_tag.git") } it "raises an exception if SSH requested, but rugged is not compiled with SSH support" do require "rugged" expect(Rugged).to receive(:features).and_return([:threads, :https]) - expect { + expect do GitWorktree.new(:path => git_repo_path, :ssh_private_key => "fake key\nfile content") - }.to raise_error(GitWorktreeException::InvalidCredentialType) + end.to raise_error(GitWorktreeException::InvalidCredentialType) end end describe "#with_remote_options" do - let(:git_repo_path) { Rails.root.join("spec", "fixtures", "git_repos", "branch_and_tag.git") } + let(:git_repo_path) { Rails.root.join("spec/fixtures/git_repos/branch_and_tag.git") } subject do repo.with_remote_options do |cred_options| diff --git a/spec/lib/httpd_dbus_api_spec.rb b/spec/lib/httpd_dbus_api_spec.rb index 6f286a97940..ccc5b465365 100644 --- a/spec/lib/httpd_dbus_api_spec.rb +++ b/spec/lib/httpd_dbus_api_spec.rb @@ -14,7 +14,7 @@ } end - let(:jdoe_user_groups) { %w(evmgroup-super_administrator evmgroup-user) } + let(:jdoe_user_groups) { %w[evmgroup-super_administrator evmgroup-user] } let(:jim_userid) { "jim" } let(:jim_attrs_error) { "Unable to get attributes for user #{jim_userid} - No such user" } @@ -25,19 +25,19 @@ ENV["HTTPD_DBUS_API_SERVICE_PORT"] = "3400" stub_request(:get, "http://1.2.3.4:3400/api/user_attrs/#{jdoe_userid}") - .to_return(:status => 200, :body => { "result" => jdoe_user_attrs }.to_json) + .to_return(:status => 200, :body => {"result" => jdoe_user_attrs}.to_json) stub_request(:get, "http://1.2.3.4:3400/api/user_attrs/#{jdoe_userid}?attributes=givenname,sn") - .to_return(:status => 200, :body => { "result" => jdoe_user_attrs.slice("givenname", "sn") }.to_json) + .to_return(:status => 200, :body => {"result" => jdoe_user_attrs.slice("givenname", "sn")}.to_json) stub_request(:get, "http://1.2.3.4:3400/api/user_attrs/#{jim_userid}") - .to_return(:status => 400, :body => { "error" => jim_attrs_error }.to_json) + .to_return(:status => 400, :body => {"error" => jim_attrs_error}.to_json) stub_request(:get, "http://1.2.3.4:3400/api/user_groups/#{jdoe_userid}") - .to_return(:status => 200, :body => { "result" => jdoe_user_groups }.to_json) + .to_return(:status => 200, :body => {"result" => jdoe_user_groups}.to_json) stub_request(:get, "http://1.2.3.4:3400/api/user_groups/#{jim_userid}") - .to_return(:status => 400, :body => { "error" => jim_groups_error }.to_json) + .to_return(:status => 400, :body => {"error" => jim_groups_error}.to_json) end context "user_attrs" do @@ -46,7 +46,7 @@ end it "converts attribute list to comma separated attributes parameter" do - expect(described_class.new.user_attrs(jdoe_userid, %w(givenname sn))) + expect(described_class.new.user_attrs(jdoe_userid, %w[givenname sn])) .to match(jdoe_user_attrs.slice("givenname", "sn")) end diff --git a/spec/lib/manageiq/reporting/formatter/c3_spec.rb b/spec/lib/manageiq/reporting/formatter/c3_spec.rb index 5098a69acc7..cd94b732ae5 100644 --- a/spec/lib/manageiq/reporting/formatter/c3_spec.rb +++ b/spec/lib/manageiq/reporting/formatter/c3_spec.rb @@ -100,7 +100,7 @@ context '#C&U charts without grouping' do let(:report) { cu_chart_without_grouping } before do - render_report(report, &proc { |e| e.options.graph_options = { :chart_type => :performance } }) + render_report(report, &proc { |e| e.options.graph_options = {:chart_type => :performance} }) end it "has right data" do @@ -127,7 +127,7 @@ context '#C&U charts with grouping' do let(:report) { cu_chart_with_grouping } before do - render_report(report, &proc { |e| e.options.graph_options = { :chart_type => :performance } }) + render_report(report, &proc { |e| e.options.graph_options = {:chart_type => :performance} }) end it "has right data" do @@ -154,7 +154,7 @@ context '#C&U charts with no data' do let(:report) { cu_chart_with_grouping } before do - render_report(report, &proc { |e| e.options.graph_options = { :chart_type => :performance } }) + render_report(report, &proc { |e| e.options.graph_options = {:chart_type => :performance} }) end it "has right empty data description" do diff --git a/spec/lib/manageiq/reporting/formatter/chart_common_spec.rb b/spec/lib/manageiq/reporting/formatter/chart_common_spec.rb index cd56330f03c..e9c1672d1d5 100644 --- a/spec/lib/manageiq/reporting/formatter/chart_common_spec.rb +++ b/spec/lib/manageiq/reporting/formatter/chart_common_spec.rb @@ -19,10 +19,10 @@ it "builds a daily chart with all nils" do report = MiqReport.new( :db => "VimPerformanceDaily", - :cols => cols = %w(timestamp cpu_usagemhz_rate_average min_cpu_usagemhz_rate_average max_cpu_usagemhz_rate_average trend_max_cpu_usagemhz_rate_average resource.cpu_usagemhz_rate_average_high_over_time_period resource.cpu_usagemhz_rate_average_low_over_time_period), + :cols => cols = %w[timestamp cpu_usagemhz_rate_average min_cpu_usagemhz_rate_average max_cpu_usagemhz_rate_average trend_max_cpu_usagemhz_rate_average resource.cpu_usagemhz_rate_average_high_over_time_period resource.cpu_usagemhz_rate_average_low_over_time_period], :include => { "resource" => { - "columns" => %w(cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period derived_memory_used_high_over_time_period derived_memory_used_low_over_time_period), + "columns" => %w[cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period derived_memory_used_high_over_time_period derived_memory_used_low_over_time_period], } }, :col_order => cols, @@ -32,18 +32,18 @@ :group => "n", :graph => { :type => "Line", - :columns => %w(cpu_usagemhz_rate_average min_cpu_usagemhz_rate_average max_cpu_usagemhz_rate_average trend_max_cpu_usagemhz_rate_average resource.cpu_usagemhz_rate_average_high_over_time_period resource.cpu_usagemhz_rate_average_low_over_time_period), + :columns => %w[cpu_usagemhz_rate_average min_cpu_usagemhz_rate_average max_cpu_usagemhz_rate_average trend_max_cpu_usagemhz_rate_average resource.cpu_usagemhz_rate_average_high_over_time_period resource.cpu_usagemhz_rate_average_low_over_time_period], :legends => nil, :max_col_size => nil }, :dims => nil, :col_formats => nil, :col_options => nil, - :rpt_options => nil, + :rpt_options => nil ) report.table = Ruport::Data::Table.new( - :column_names => %w(timestamp cpu_usagemhz_rate_average min_cpu_usagemhz_rate_average max_cpu_usagemhz_rate_average trend_max_cpu_usagemhz_rate_average), + :column_names => %w[timestamp cpu_usagemhz_rate_average min_cpu_usagemhz_rate_average max_cpu_usagemhz_rate_average trend_max_cpu_usagemhz_rate_average], :data => [["Sun, 20 Mar 2016 00:00:00 UTC +00:00", 0.0, nil, nil, 0]] ) diff --git a/spec/lib/manageiq/reporting/formatter/text_spec.rb b/spec/lib/manageiq/reporting/formatter/text_spec.rb index 334bf5372b8..31b89f84da4 100644 --- a/spec/lib/manageiq/reporting/formatter/text_spec.rb +++ b/spec/lib/manageiq/reporting/formatter/text_spec.rb @@ -24,7 +24,7 @@ +--------------------------------------------------------------------+ | #{described_class.format_timezone(Time.zone.now)} | +--------------------------------------------------------------------+ - TABLE + TABLE result.lines.each_with_index do |line, index| expect(line.strip).to eq(expected.lines[index].strip) end @@ -45,7 +45,7 @@ +--------------------------------------------------------------------+ | #{described_class.format_timezone(Time.zone.now)} | +--------------------------------------------------------------------+ - TABLE + TABLE result.lines.each_with_index do |line, index| expect(line.strip).to eq(expected.lines[index].strip) end @@ -69,7 +69,7 @@ +--------------------------------------------------------------------+ | #{described_class.format_timezone(Time.zone.now)} | +--------------------------------------------------------------------+ - TABLE + TABLE result.lines.each_with_index do |line, index| expect(line.strip).to eq(expected.lines[index].strip) end diff --git a/spec/lib/manageiq/reporting/formatter/timeline_spec.rb b/spec/lib/manageiq/reporting/formatter/timeline_spec.rb index 68534917662..adbfe9543ee 100644 --- a/spec/lib/manageiq/reporting/formatter/timeline_spec.rb +++ b/spec/lib/manageiq/reporting/formatter/timeline_spec.rb @@ -4,12 +4,12 @@ let(:ems) { FactoryBot.create(:ems_redhat, :id => 42) } let(:event) do FactoryBot.create(:ems_event, - :event_type => 'CONTAINER_CREATED', - :ems_id => 6, - :container_group_name => 'hawkular-cassandra-1-wb1z6', - :container_namespace => 'openshift-infra', - :container_name => 'hawkular-cassandra-1', - :ext_management_system => ems) + :event_type => 'CONTAINER_CREATED', + :ems_id => 6, + :container_group_name => 'hawkular-cassandra-1-wb1z6', + :container_namespace => 'openshift-infra', + :container_name => 'hawkular-cassandra-1', + :ext_management_system => ems) end flags = {:ems_cloud => false, @@ -33,8 +33,8 @@ let(:vm) { FactoryBot.create(:vm_redhat, :id => 42) } let(:event) do FactoryBot.create(:ems_event, - :event_type => 'VM_CREATED', - :vm_or_template => vm) + :event_type => 'VM_CREATED', + :vm_or_template => vm) end flags = {:ems_cloud => false, @@ -58,10 +58,10 @@ let(:vm) { FactoryBot.create(:vm_redhat, :id => 42, :name => 'Test VM') } let(:event) do FactoryBot.create(:policy_event, - :event_type => 'vm_poweroff', - :target_id => 42, - :target_name => vm.name, - :target_class => 'VmOrTemplate') + :event_type => 'vm_poweroff', + :target_id => 42, + :target_name => vm.name, + :target_class => 'VmOrTemplate') end tests = {'event_type' => 'vm_poweroff', @@ -98,15 +98,15 @@ def stub_ems_event(event_type) before do @report = FactoryBot.create(:miq_report, - :db => "EventStream", - :col_order => %w(id name event_type timestamp), - :headers => %w(id name event_type timestamp), - :timeline => {:field => "EmsEvent-timestamp", :position => "Last"}) + :db => "EventStream", + :col_order => %w[id name event_type timestamp], + :headers => %w[id name event_type timestamp], + :timeline => {:field => "EmsEvent-timestamp", :position => "Last"}) @report.rpt_options = {:categories => {:power => {:display_name => "Power Activity", - :include_set => %w(VmPoweredOffEvent VmPoweredOnEvent), + :include_set => %w[VmPoweredOffEvent VmPoweredOnEvent], :regexes => []}, :snapshot => {:display_name => "Snapshot Activity", - :include_set => %w(AlarmCreatedEvent AlarmRemovedEvent), + :include_set => %w[AlarmCreatedEvent AlarmRemovedEvent], :regexes => []}}} data = [] @@ -125,7 +125,7 @@ def stub_ems_event(event_type) end @report.table = Ruport::Data::Table.new( - :column_names => %w(id name event_type timestamp), + :column_names => %w[id name event_type timestamp], :data => data ) end @@ -166,8 +166,8 @@ def stub_ems_event(event_type) @report = FactoryBot.create( :miq_report, :db => "EventStream", - :col_order => %w(id name event_type timestamp), - :headers => %w(id name event_type timestamp), + :col_order => %w[id name event_type timestamp], + :headers => %w[id name event_type timestamp], :timeline => {:field => "EmsEvent-timestamp", :position => "Last"} ) @report.rpt_options = { @@ -179,7 +179,7 @@ def stub_ems_event(event_type) }, :snapshot => { :display_name => "Snapshot Activity", - :include_set => %w(AlarmCreatedEvent AlarmRemovedEvent), + :include_set => %w[AlarmCreatedEvent AlarmRemovedEvent], :regexes => [] } } @@ -210,7 +210,7 @@ def stub_ems_event(event_type) end @report.table = Ruport::Data::Table.new( - :column_names => %w(id name event_type timestamp), + :column_names => %w[id name event_type timestamp], :data => data ) end @@ -250,15 +250,15 @@ def stub_ems_event(event_type) before do @report = FactoryBot.create(:miq_report, - :db => "EventStream", - :col_order => %w(id name event_type timestamp vm_location), - :headers => %w(id name event_type timestamp vm_location), - :timeline => {:field => "EmsEvent-timestamp", :position => "Last"}) + :db => "EventStream", + :col_order => %w[id name event_type timestamp vm_location], + :headers => %w[id name event_type timestamp vm_location], + :timeline => {:field => "EmsEvent-timestamp", :position => "Last"}) @report.rpt_options = {:categories => {:power => {:display_name => "Power Activity", - :include_set => %w(VmPoweredOffEvent VmPoweredOnEvent), + :include_set => %w[VmPoweredOffEvent VmPoweredOnEvent], :regexes => []}, :snapshot => {:display_name => "Snapshot Activity", - :include_set => %w(AlarmCreatedEvent AlarmRemovedEvent), + :include_set => %w[AlarmCreatedEvent AlarmRemovedEvent], :regexes => []}}} data = [Ruport::Data::Record.new("id" => stub_ems_event("VmPoweredOffEvent").id, @@ -268,7 +268,7 @@ def stub_ems_event(event_type) "timestamp" => Time.zone.now)] @report.table = Ruport::Data::Table.new( - :column_names => %w(id name event_type timestamp vm_location), + :column_names => %w[id name event_type timestamp vm_location], :data => data ) end diff --git a/spec/lib/manageiq_spec.rb b/spec/lib/manageiq_spec.rb index dedfaf3f116..47c7ced5bbb 100644 --- a/spec/lib/manageiq_spec.rb +++ b/spec/lib/manageiq_spec.rb @@ -10,7 +10,7 @@ RSpec.describe ManageIQ do def without_rails(rb_cmd) - miq_lib_file = Rails.root.join("lib", "manageiq.rb") + miq_lib_file = Rails.root.join("lib/manageiq.rb") `#{Gem.ruby} -e 'require "#{miq_lib_file}"; print #{rb_cmd}'` end diff --git a/spec/lib/miq_expression/count_field_spec.rb b/spec/lib/miq_expression/count_field_spec.rb index 5f08f0f3a03..92d70c8ecfc 100644 --- a/spec/lib/miq_expression/count_field_spec.rb +++ b/spec/lib/miq_expression/count_field_spec.rb @@ -34,7 +34,7 @@ end it "can handle multiple associations" do - count_field = described_class.new(Vm, %w(hardware disks)) + count_field = described_class.new(Vm, %w[hardware disks]) expect(count_field.to_s).to eq("Vm.hardware.disks") end end diff --git a/spec/lib/miq_expression/field_spec.rb b/spec/lib/miq_expression/field_spec.rb index ed4ec0be5d0..555782a690e 100644 --- a/spec/lib/miq_expression/field_spec.rb +++ b/spec/lib/miq_expression/field_spec.rb @@ -69,7 +69,7 @@ it "can parse the associations when there are many present" do field = "Vm.host.hardware-id" - expect(described_class.parse(field).associations).to eq(%w(host hardware)) + expect(described_class.parse(field).associations).to eq(%w[host hardware]) end it "will return nil when given a field with unsupported syntax" do @@ -94,7 +94,7 @@ it 'parses field with numbers in association' do field = 'Vm.win32_services-dependencies' expect(described_class.parse(field)).to have_attributes(:model => Vm, - :associations => %w(win32_services), + :associations => %w[win32_services], :column => 'dependencies') end @@ -163,7 +163,7 @@ end it "returns the reflections of fields with multiple associations" do - field = described_class.new(Vm, %w(host hardware), "guest_os") + field = described_class.new(Vm, %w[host hardware], "guest_os") expect(field.reflections).to match([an_object_having_attributes(:klass => Host), an_object_having_attributes(:klass => Hardware)]) end @@ -179,7 +179,7 @@ end it "raises an error if the field has invalid associations" do - field = described_class.new(Vm, %w(foo bar), "name") + field = described_class.new(Vm, %w[foo bar], "name") expect { field.reflections }.to raise_error(/One or more associations are invalid: foo, bar/) end end diff --git a/spec/lib/miq_expression/subst_mixin_spec.rb b/spec/lib/miq_expression/subst_mixin_spec.rb index bc96a25bc50..c22b93bd2c1 100644 --- a/spec/lib/miq_expression/subst_mixin_spec.rb +++ b/spec/lib/miq_expression/subst_mixin_spec.rb @@ -15,15 +15,14 @@ exp = { "and" => - [ - {"=" => {"field" => "ManageIQ::Providers::InfraManager::Vm-active", "value" => "true"}, :token => 1}, + [ + {"=" => {"field" => "ManageIQ::Providers::InfraManager::Vm-active", "value" => "true"}, :token => 1}, {"or" => - [ - {"=" => {"count" => "ManageIQ::Providers::InfraManager::Vm.advanced_settings", "value" => "1"}, :token => 2}, + [ + {"=" => {"count" => "ManageIQ::Providers::InfraManager::Vm.advanced_settings", "value" => "1"}, :token => 2}, {"=" => {"count" => "ManageIQ::Providers::InfraManager::Vm.storages", "value" => "1"}, :token => 3} - ] - } - ] + ]} + ] } result = test_obj.exp_find_by_token(exp, 2) expect(result).to eq("=" => {"count" => "ManageIQ::Providers::InfraManager::Vm.advanced_settings", "value" => "1"}, :token => 2) @@ -33,11 +32,11 @@ exp = { "and" => - [ - {"=" => {"field" => "ManageIQ::Providers::InfraManager::Vm-active", "value" => "true"}, :token => 1}, + [ + {"=" => {"field" => "ManageIQ::Providers::InfraManager::Vm-active", "value" => "true"}, :token => 1}, {"CONTAINS" => {"tag" => "ManageIQ::Providers::InfraManager::Vm.managed-prov_max_cpu", "value" => "2"}, :token => 2}, {"CONTAINS" => {"tag" => "ManageIQ::Providers::InfraManager::Vm.managed-prov_max_retirement_days", "value" => "60"}, :token => 3} - ] + ] } result = test_obj.exp_find_by_token(exp, 3) expect(result).to eq("CONTAINS" => {"tag" => "ManageIQ::Providers::InfraManager::Vm.managed-prov_max_retirement_days", "value" => "60"}, :token => 3) diff --git a/spec/lib/miq_expression/tag_spec.rb b/spec/lib/miq_expression/tag_spec.rb index 2e7b76a5157..56ace60eb03 100644 --- a/spec/lib/miq_expression/tag_spec.rb +++ b/spec/lib/miq_expression/tag_spec.rb @@ -59,7 +59,7 @@ it "with model.associations.associations.managed-in_tag" do tag = "Vm.service.user.managed-service_level" expect(described_class.parse(tag)).to have_attributes(:model => Vm, - :associations => %w(service user), + :associations => %w[service user], :namespace => "/managed/service_level") end diff --git a/spec/lib/miq_expression/target_spec.rb b/spec/lib/miq_expression/target_spec.rb index 413ab7ae6f9..89bee6a83d7 100644 --- a/spec/lib/miq_expression/target_spec.rb +++ b/spec/lib/miq_expression/target_spec.rb @@ -3,15 +3,15 @@ subject { described_class.parse(@field)&.column_type } let(:string_custom_attribute) do FactoryBot.create(:custom_attribute, - :name => "foo", - :value => "string", - :resource_type => 'ExtManagementSystem') + :name => "foo", + :value => "string", + :resource_type => 'ExtManagementSystem') end let(:date_custom_attribute) do FactoryBot.create(:custom_attribute, - :name => "foo", - :value => DateTime.current, - :resource_type => 'ExtManagementSystem') + :name => "foo", + :value => DateTime.current, + :resource_type => 'ExtManagementSystem') end it "with model-field__with_pivot_table_suffix" do diff --git a/spec/lib/miq_expression_spec.rb b/spec/lib/miq_expression_spec.rb index 6a3ffc4514d..fcc0a83deb6 100644 --- a/spec/lib/miq_expression_spec.rb +++ b/spec/lib/miq_expression_spec.rb @@ -3,7 +3,7 @@ let(:vm) { FactoryBot.create(:vm) } let!(:custom_attribute) { FactoryBot.create(:custom_attribute, :name => 'my_attribute_1', :resource => vm) } let(:extra_fields) do - %w(start_date + %w[start_date end_date interval_name display_range @@ -12,7 +12,7 @@ label_name id vm_id - vm_name) + vm_name] end it 'lists custom attributes in ChargebackVm' do @@ -463,12 +463,12 @@ it "generates the SQL for an INCLUDES ANY with expression method" do sql, * = MiqExpression.new("INCLUDES ANY" => {"field" => "Vm-ipaddresses", "value" => "foo"}).to_sql - expected_sql = <<-EXPECTED.strip_heredoc.split("\n").join(" ") + expected_sql = <<~EXPECTED.split("\n").join(" ") 1 = (SELECT 1 FROM "hardwares" INNER JOIN "networks" ON "networks"."hardware_id" = "hardwares"."id" WHERE "hardwares"."vm_or_template_id" = "vms"."id" - AND (\"networks\".\"ipaddress\" ILIKE '%foo%' OR \"networks\".\"ipv6address\" ILIKE '%foo%') + AND ("networks"."ipaddress" ILIKE '%foo%' OR "networks"."ipv6address" ILIKE '%foo%') LIMIT 1) EXPECTED expect(sql).to eq(expected_sql) @@ -593,7 +593,7 @@ it "generates the SQL for a CONTAINS expression with has_many field" do sql, * = MiqExpression.new("CONTAINS" => {"field" => "Vm.guest_applications-name", "value" => "foo"}).to_sql - expected = "\"vms\".\"id\" IN (SELECT \"vms\".\"id\" FROM \"vms\" INNER JOIN \"guest_applications\" ON "\ + expected = "\"vms\".\"id\" IN (SELECT \"vms\".\"id\" FROM \"vms\" INNER JOIN \"guest_applications\" ON " \ "\"guest_applications\".\"vm_or_template_id\" = \"vms\".\"id\" WHERE \"guest_applications\".\"name\" = 'foo')" expect(sql).to eq(expected) end @@ -790,7 +790,7 @@ end it "generates the SQL for a FROM expression with a value of 'Yesterday'/'Today' for a date field" do - exp = described_class.new("FROM" => {"field" => "Vm-retires_on", "value" => %w(Yesterday Today)}) + exp = described_class.new("FROM" => {"field" => "Vm-retires_on", "value" => %w[Yesterday Today]}) sql, * = exp.to_sql("Asia/Jakarta") expect(sql).to eq(%q("vms"."retires_on" BETWEEN '2011-01-10 17:00:00' AND '2011-01-12 16:59:59.999999')) end @@ -1121,7 +1121,9 @@ "FIND" => { "checkany" => {"FROM" => {"field" => "Host.vms-last_scan_on", "value" => ["2011-01-08 17:00", "2011-01-09 23:30:59"]}}, - "search" => {"IS NOT NULL" => {"field" => "Host.vms-description"}}}) + "search" => {"IS NOT NULL" => {"field" => "Host.vms-description"}} + } + ) result = Host.all.to_a.select { |rec| filter.lenient_evaluate(rec) } expect(result).to contain_exactly(host3, host5) end @@ -1146,7 +1148,8 @@ "FIND" => { "search" => {"FROM" => {"field" => "Host.vms-last_scan_on", "value" => ["2011-01-08 17:00", "2011-01-09 23:30:59"]}}, - "checkall" => {"IS NOT NULL" => {"field" => "Host.vms-description"}}} + "checkall" => {"IS NOT NULL" => {"field" => "Host.vms-description"}} + } ) result = Host.all.to_a.select { |rec| filter.lenient_evaluate(rec) } expect(result).to eq([host2]) @@ -1535,7 +1538,7 @@ end it "does not escape escaped forward slashes for values in REGULAR EXPRESSION MATCHES expressions" do - value = "\/foo\/bar" + value = "/foo/bar" actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby expected = "/virtual/name =~ /\\/foo\\/bar/" expect(actual).to eq(expected) @@ -1591,13 +1594,13 @@ end it "does not escape escaped forward slashes for values in REGULAR EXPRESSION DOES NOT MATCH expressions" do - value = "\/foo\/bar" + value = "/foo/bar" actual = described_class.new("REGULAR EXPRESSION DOES NOT MATCH" => {"field" => "Vm-name", "value" => value}).to_ruby expected = "/virtual/name !~ /\\/foo\\/bar/" expect(actual).to eq(expected) end - # Note: To debug these tests, the following may be helpful: + # NOTE: To debug these tests, the following may be helpful: # puts "Expression Raw: #{filter.exp.inspect}" # puts "Expression in Human: #{filter.to_human}" # puts "Expression in Ruby: #{filter.to_ruby}" @@ -2101,7 +2104,7 @@ end it "generates the RUBY for a FROM expression with a value of 'Yesterday'/'Today' for a date field" do - exp = described_class.new("FROM" => {"field" => "Vm-retires_on", "value" => %w(Yesterday Today)}) + exp = described_class.new("FROM" => {"field" => "Vm-retires_on", "value" => %w[Yesterday Today]}) ruby, * = exp.to_ruby("Asia/Jakarta") expect(ruby).to eq("!(val=/virtual/retires_on&.to_time).nil? and val >= Time.utc(2011,1,10,17,0,0) and val <= Time.utc(2011,1,12,16,59,59)") end @@ -2408,8 +2411,8 @@ "value" => "X"}}, "checkall" => {"=" => {"field" => "Vm.advanced_settings-read_only", "value" => "true"}}}) - expect(exp.to_human).to eq('FIND VM and Instance.Advanced Settings : '\ - 'Name STARTS WITH "X" CHECK ALL Read Only = "true"') + expect(exp.to_human).to eq('FIND VM and Instance.Advanced Settings : ' \ + 'Name STARTS WITH "X" CHECK ALL Read Only = "true"') end it "generates a human readable string for a FIND/CHECK expression with alias" do @@ -2544,7 +2547,7 @@ exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2011-01-10 8:00", "2011-01-10 17:00"]}) expect(exp.to_human).to eq('VM and Instance : Last Analysis Time ' \ - 'FROM "2011-01-10 8:00" THROUGH "2011-01-10 17:00"') + 'FROM "2011-01-10 8:00" THROUGH "2011-01-10 17:00"') end end end @@ -2604,19 +2607,17 @@ Tenant.seed cat = FactoryBot.create(:classification, - :description => "Auto Approve - Max CPU", - :name => "prov_max_cpu", - :single_value => true, - :show => true, - ) + :description => "Auto Approve - Max CPU", + :name => "prov_max_cpu", + :single_value => true, + :show => true) cat.add_entry(:description => "1", :read_only => "0", :syntax => "string", :name => "1", :example_text => nil, :default => true, - :single_value => "1" - ) + :single_value => "1") end context "with :typ=>tag" do @@ -2636,8 +2637,7 @@ :typ => "tag", :include_model => true, :include_my_tags => true, - :userid => "admin" - ) + :userid => "admin") expect(result.map(&:first)).to include("VM or Template.My Company Tags : Auto Approve - Max CPU") end @@ -2797,13 +2797,11 @@ expect(subject).to contain_exactly("=", "!=", "<", "<=", ">=", ">") end -=begin - # there is no example of fields with fixnum datatype available for expression builder - it "returns list of available operations for field type 'fixnum'" do - @field = ? - expect(subject).to eq(["=", "!=", "<", "<=", ">=", ">", "RUBY"]) - end -=end + # # there is no example of fields with fixnum datatype available for expression builder + # it "returns list of available operations for field type 'fixnum'" do + # @field = ? + # expect(subject).to eq(["=", "!=", "<", "<=", ">=", ">", "RUBY"]) + # end it "returns list of available operations for field type 'string_set'" do @field = "ManageIQ::Providers::InfraManager::Vm-hostnames" @@ -2855,7 +2853,7 @@ :format_sub_type => nil, :include => {}, :tag => false, - :sql_support => false, + :sql_support => false ) end @@ -2868,7 +2866,7 @@ :format_sub_type => :bytes, :include => {}, :tag => false, - :sql_support => false, + :sql_support => false ) end @@ -2881,7 +2879,7 @@ :format_sub_type => :boolean, :include => {}, :tag => false, - :sql_support => true, + :sql_support => true ) end @@ -2894,7 +2892,7 @@ :format_sub_type => nil, :include => {}, :tag => false, - :sql_support => false, + :sql_support => false ) end @@ -2908,7 +2906,7 @@ :format_sub_type => :string, :include => {}, :tag => true, - :sql_support => false, + :sql_support => false ) end @@ -2921,7 +2919,7 @@ :format_sub_type => :string, :include => {}, :tag => true, - :sql_support => true, + :sql_support => true ) end @@ -2934,7 +2932,7 @@ :format_sub_type => :string, :include => {:host => {}}, :tag => true, - :sql_support => true, + :sql_support => true ) end @@ -2947,7 +2945,7 @@ :format_sub_type => :integer, :include => {}, :tag => false, - :sql_support => true, + :sql_support => true ) end @@ -2960,7 +2958,7 @@ :format_sub_type => :string, :include => {:guest_applications => {}}, :tag => false, - :sql_support => true, + :sql_support => true ) end @@ -2973,7 +2971,7 @@ :format_sub_type => :bytes, :include => {}, :tag => false, - :sql_support => false, + :sql_support => false ) end @@ -2986,7 +2984,7 @@ :format_sub_type => nil, :include => {}, :tag => false, - :sql_support => false, + :sql_support => false ) end @@ -2999,7 +2997,7 @@ :format_sub_type => nil, :include => {}, :tag => false, - :sql_support => false, + :sql_support => false ) end @@ -3012,7 +3010,7 @@ :format_sub_type => :boolean, :include => {}, :tag => false, - :sql_support => false, + :sql_support => false ) end end @@ -3257,12 +3255,13 @@ expect(result).to include( ">=" => {"field" => "Vm-num_cpu", "value" => "2"}, - "result" => false) + "result" => false + ) end end describe ".operands2rubyvalue" do - RSpec.shared_examples :coerces_value_to_integer do |value| + RSpec.shared_examples :coerces_value_to_integer do |_value| it 'coerces the value to an integer' do expect(subject.last).to eq(0) end @@ -3431,16 +3430,16 @@ it "returns the added classification when no_cache option is used" do Tenant.seed FactoryBot.create(:classification, - :name => "first_classification", - :description => "First Classification", - :children => [FactoryBot.create(:classification)]) + :name => "first_classification", + :description => "First Classification", + :children => [FactoryBot.create(:classification)]) actual = described_class.tag_details(nil, {}) expect(actual).to eq([["My Company Tags : First Classification", "managed-first_classification"]]) FactoryBot.create(:classification, - :name => "second_classification", - :description => "Second Classification", - :children => [FactoryBot.create(:classification)]) + :name => "second_classification", + :description => "Second Classification", + :children => [FactoryBot.create(:classification)]) actual = described_class.tag_details(nil, :no_cache => true) expect(actual).to eq([["My Company Tags : First Classification", "managed-first_classification"], ["My Company Tags : Second Classification", "managed-second_classification"]]) end diff --git a/spec/lib/miq_ldap_spec.rb b/spec/lib/miq_ldap_spec.rb index 3296a48c7b4..252ef07598d 100644 --- a/spec/lib/miq_ldap_spec.rb +++ b/spec/lib/miq_ldap_spec.rb @@ -42,7 +42,7 @@ end end - let(:users) { %w(rock@mycompany.com smith@mycompany.com will@mycompany.com john@mycompany.com) } + let(:users) { %w[rock@mycompany.com smith@mycompany.com will@mycompany.com john@mycompany.com] } it "gets user information" do if @userid @@ -52,6 +52,7 @@ users.sort.each do |u| udata = ldap.get_user_info(u) next if udata.nil? + # puts "\nUser Data for #{udata[:display_name]}:" udata.sort_by { |k, _v| k.to_s }.each { |k, v| puts "\t#{k}: #{v}" } diff --git a/spec/lib/postponed_translation_spec.rb b/spec/lib/postponed_translation_spec.rb index 904e3f92ddb..a0e80bee2bd 100644 --- a/spec/lib/postponed_translation_spec.rb +++ b/spec/lib/postponed_translation_spec.rb @@ -8,8 +8,8 @@ expect(pt.translate).to eq("Test foo5") pt = PostponedTranslation.new("Test %{bar}") do - {:bar => "foo"} - end + {:bar => "foo"} + end expect(pt.translate).to eq("Test foo") end end diff --git a/spec/lib/rbac/filterer_spec.rb b/spec/lib/rbac/filterer_spec.rb index cf9b908103b..66e58805f3d 100644 --- a/spec/lib/rbac/filterer_spec.rb +++ b/spec/lib/rbac/filterer_spec.rb @@ -152,7 +152,7 @@ def combine_filtered_ids(user_filtered_ids, belongsto_filtered_ids, managed_filt # MiqRequest for owner group let!(:miq_request_user_owner) { FactoryBot.create(:miq_provision_request, :tenant => owner_tenant, :requester => owner_user) } # User for owner group - let(:user_a) { FactoryBot.create(:user, :miq_groups => [owner_group]) } + let(:user_a) { FactoryBot.create(:user, :miq_groups => [owner_group]) } # MiqRequests for other group let!(:miq_request_user_a) { FactoryBot.create(:miq_provision_request, :tenant => owner_tenant, :requester => other_user) } @@ -371,10 +371,10 @@ def combine_filtered_ids(user_filtered_ids, belongsto_filtered_ids, managed_filt end end - %w( + %w[ automation_manager_authentication ManageIQ::Providers::AutomationManager::Authentication embedded_automation_manager_authentication ManageIQ::Providers::EmbeddedAutomationManager::Authentication - ).slice(2) do |factory, klass| + ].slice(2) do |factory, klass| context "searching for instances of #{klass}" do let!(:automation_manager_authentication) { FactoryBot.create(factory) } automation_manager_authentication.tag_with('/managed/environment/prod', :ns => '*') @@ -631,7 +631,7 @@ def combine_filtered_ids(user_filtered_ids, belongsto_filtered_ids, managed_filt let(:nonsql_expression) { {"=" => {"field" => "Vm-vendor_display", "value" => "VMware"}} } let(:raw_expression) { nonsql_expression } let(:expression) { MiqExpression.new(raw_expression) } - let(:search_attributes) { { :class => "Vm", :filter => expression } } + let(:search_attributes) { {:class => "Vm", :filter => expression} } let(:results) { subject.search(search_attributes).first } before { [owned_vm, other_vm] } @@ -647,8 +647,8 @@ def combine_filtered_ids(user_filtered_ids, belongsto_filtered_ids, managed_filt end context "with a partial non-sql filter" do - let(:sql_expression) { { "IS EMPTY" => { "field" => "Vm.host-name" } } } - let(:raw_expression) { { "AND" => [nonsql_expression, sql_expression] } } + let(:sql_expression) { {"IS EMPTY" => {"field" => "Vm.host-name"}} } + let(:raw_expression) { {"AND" => [nonsql_expression, sql_expression]} } it "finds the Vms" do expect(results.to_a).to match_array [owned_vm, other_vm] @@ -656,7 +656,7 @@ def combine_filtered_ids(user_filtered_ids, belongsto_filtered_ids, managed_filt end it "includes references" do - expect(subject).to receive(:include_references).with(anything, ::Vm, nil, {:host => {}}) + expect(subject).to receive(:include_references).with(anything, Vm, nil, {:host => {}}) .and_call_original expect(subject).to receive(:warn).never results @@ -702,7 +702,7 @@ def combine_filtered_ids(user_filtered_ids, belongsto_filtered_ids, managed_filt context "with a miq_expression filter on vms" do let(:expression) { MiqExpression.new("=" => {"field" => "Vm-vendor", "value" => "vmware"}) } - let(:search_attributes) { { :class => "Vm", :filter => expression } } + let(:search_attributes) { {:class => "Vm", :filter => expression} } let(:results) { described_class.search(search_attributes).first } before { [owned_vm, other_vm] } @@ -733,7 +733,7 @@ def combine_filtered_ids(user_filtered_ids, belongsto_filtered_ids, managed_filt context "with :extra_cols on a Service" do let(:extra_cols) { [:owned_by_current_user] } - let(:search_attributes) { { :class => "Service", :extra_cols => extra_cols } } + let(:search_attributes) { {:class => "Service", :extra_cols => extra_cols} } let(:results) { described_class.search(search_attributes).first } before { FactoryBot.create :service, :evm_owner => owner_user } @@ -1506,11 +1506,10 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) @timestamps.each do |t, v| [@host1, @host2].each do |h| h.metric_rollups << FactoryBot.create(:metric_rollup_host_hr, - :timestamp => t, - :cpu_usage_rate_average => v, - :cpu_ready_delta_summation => v * 1000, # Multiply by a factor of 1000 to make it more realistic and enable testing virtual col v_pct_cpu_ready_delta_summation - :sys_uptime_absolute_latest => v - ) + :timestamp => t, + :cpu_usage_rate_average => v, + :cpu_ready_delta_summation => v * 1000, # Multiply by a factor of 1000 to make it more realistic and enable testing virtual col v_pct_cpu_ready_delta_summation + :sys_uptime_absolute_latest => v) end end end @@ -2065,11 +2064,13 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) describe ".search" do let!(:network_object) do return network_manager if network_model == ManageIQ::Providers::NetworkManager + FactoryBot.create(network_model.underscore, :ext_management_system => network_manager) end let!(:network_object_with_different_network_manager) do return network_manager_1 if network_model == ManageIQ::Providers::NetworkManager + FactoryBot.create(network_model.underscore, :ext_management_system => network_manager_1) end @@ -2158,7 +2159,7 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) end context 'with network models' do - NETWORK_MODELS = %w( + NETWORK_MODELS = %w[ CloudNetwork CloudSubnet FloatingIp @@ -2166,7 +2167,7 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) NetworkPort NetworkRouter SecurityGroup - ).freeze + ].freeze NETWORK_MODELS.each do |network_model| describe ".search" do @@ -2214,7 +2215,7 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) FactoryBot.create(:host, :name => "Host4", :hostname => "host4.local") ].each_with_index do |host, i| grp = i + 1 - guest_os = %w(_none_ windows ubuntu windows ubuntu)[grp] + guest_os = %w[_none_ windows ubuntu windows ubuntu][grp] vm = FactoryBot.build(:vm_vmware, :name => "Test Group #{grp} VM #{i}") vm.hardware = FactoryBot.build(:hardware, :cpu_sockets => (grp * 2), :memory_mb => (grp * 1.megabytes), :guest_os => guest_os) vm.host = host @@ -2410,9 +2411,9 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) before do 4.times do |i| FactoryBot.create(:vm_vmware, - :name => "Test VM #{i}", - :connection_state => i < 2 ? 'connected' : 'disconnected', - :miq_group => i.even? ? group : group2) + :name => "Test VM #{i}", + :connection_state => i < 2 ? 'connected' : 'disconnected', + :miq_group => i.even? ? group : group2) end end @@ -2637,7 +2638,7 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) FactoryBot.create(:vm_vmware, :location => "a") FactoryBot.create(:vm_vmware, :location => "b") FactoryBot.create(:vm_vmware, :location => "a") - expect(described_class.filtered(Vm.all.order(:location)).map(&:location)).to eq(%w(a a b)) + expect(described_class.filtered(Vm.all.order(:location)).map(&:location)).to eq(%w[a a b]) end it "returns empty array for out-of-bounds condition when limits cannot be applied in SQL" do @@ -2664,8 +2665,8 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) let(:klass) { VmOrTemplate } let(:scope) { klass.all } - let(:include_for_find) { { :miq_server => {} } } - let(:exp_includes) { { :host => {} } } + let(:include_for_find) { {:miq_server => {}} } + let(:exp_includes) { {:host => {}} } it "adds include_for_find .references to the scope" do method_args = [scope, klass, include_for_find, nil] @@ -2690,7 +2691,7 @@ def get_rbac_results_for_and_expect_objects(klass, expected_objects) context "if the include is polymorphic" do let(:klass) { MetricRollup } - let(:include_for_find) { { :resource => {} } } + let(:include_for_find) { {:resource => {}} } it "does not add .references to the scope" do method_args = [scope, klass, include_for_find, nil] diff --git a/spec/lib/rbac_spec.rb b/spec/lib/rbac_spec.rb index 7fe4bf829ec..9f4565d3412 100644 --- a/spec/lib/rbac_spec.rb +++ b/spec/lib/rbac_spec.rb @@ -4,9 +4,9 @@ describe ".resources_shared_with" do let(:user) do FactoryBot.create(:user, - :role => "user", - :tenant => FactoryBot.create(:tenant, :name => "Tenant under root"), - :features => user_allowed_feature) + :role => "user", + :tenant => FactoryBot.create(:tenant, :name => "Tenant under root"), + :features => user_allowed_feature) end let(:user_allowed_feature) { "service" } let(:resource_to_be_shared) { FactoryBot.create(:vm_vmware, :tenant => user.current_tenant) } @@ -20,8 +20,8 @@ end let(:sharee) do FactoryBot.create(:user, - :miq_groups => [FactoryBot.create(:miq_group, - :tenant => FactoryBot.create(:tenant, :name => "Sibling tenant"))]) + :miq_groups => [FactoryBot.create(:miq_group, + :tenant => FactoryBot.create(:tenant, :name => "Sibling tenant"))]) end before { Tenant.seed } @@ -43,14 +43,14 @@ let(:siblings_child) { FactoryBot.create(:tenant, :parent => sibling_tenant, :name => "Sibling's child tenant") } let(:sharee) do FactoryBot.create(:user, - :miq_groups => [FactoryBot.create(:miq_group, - :tenant => siblings_child)]) + :miq_groups => [FactoryBot.create(:miq_group, + :tenant => siblings_child)]) end let!(:share) do - ResourceSharer.new(:user => user, - :resource => resource_to_be_shared, - :tenants => tenants, - :features => features, + ResourceSharer.new(:user => user, + :resource => resource_to_be_shared, + :tenants => tenants, + :features => features, :allow_tenant_inheritance => allow_tenant_inheritance) end let(:tenants) { [sibling_tenant] } diff --git a/spec/lib/services/auto_placement_visibility_service_spec.rb b/spec/lib/services/auto_placement_visibility_service_spec.rb index 2432c39a5e1..d158cc5c5e8 100644 --- a/spec/lib/services/auto_placement_visibility_service_spec.rb +++ b/spec/lib/services/auto_placement_visibility_service_spec.rb @@ -7,7 +7,7 @@ it "adds values to the field names to hide" do expect(subject.determine_visibility(auto_placement)).to eq( - :hide => %i( + :hide => %i[ placement_host_name placement_ds_name host_filter @@ -17,7 +17,7 @@ rp_filter placement_rp_name placement_dc_name - ), + ], :edit => [] ) end @@ -29,7 +29,7 @@ it "adds values to the field names to edit" do expect(subject.determine_visibility(auto_placement)).to eq( :hide => [], - :edit => %i( + :edit => %i[ placement_host_name placement_ds_name host_filter @@ -39,7 +39,7 @@ rp_filter placement_rp_name placement_dc_name - ) + ] ) end end diff --git a/spec/lib/services/customize_fields_visibility_service_spec.rb b/spec/lib/services/customize_fields_visibility_service_spec.rb index 83b94d744a1..123fb912e90 100644 --- a/spec/lib/services/customize_fields_visibility_service_spec.rb +++ b/spec/lib/services/customize_fields_visibility_service_spec.rb @@ -10,7 +10,7 @@ it "returns a list of pxe customization fields to edit" do expect(subject.determine_visibility(platform, supports_customization_template, customize_fields_list)).to eq( :hide => [], - :edit => %i( + :edit => %i[ addr_mode customization_template_id customization_template_script @@ -33,7 +33,7 @@ sysprep_product_key sysprep_timezone sysprep_domain_admin - ) + ] ) end end @@ -43,7 +43,7 @@ context "when the customize_fields_list contains only items from exclude list" do let(:customize_fields_list) do - %i( + %i[ sysprep_spec_override sysprep_custom_spec sysprep_enabled @@ -55,7 +55,7 @@ gateway dns_servers dns_suffixes - ) + ] end let(:platform) { "linux" } @@ -67,7 +67,7 @@ end context "when the customize_fields_list contains linux_domain_name" do - let(:customize_fields_list) { %i(linux_domain_name potato) } + let(:customize_fields_list) { %i[linux_domain_name potato] } context "when the platform is linux" do let(:platform) { "linux" } @@ -77,9 +77,9 @@ platform, supports_customization_template, customize_fields_list - )).to eq( - :hide => [:potato], :edit => [:linux_domain_name] - ) + )).to eq( + :hide => [:potato], :edit => [:linux_domain_name] + ) end end @@ -91,9 +91,9 @@ platform, supports_customization_template, customize_fields_list - )).to eq( - :hide => [], :edit => [] - ) + )).to eq( + :hide => [], :edit => [] + ) end end end diff --git a/spec/lib/services/dialog_field_visibility_service_spec.rb b/spec/lib/services/dialog_field_visibility_service_spec.rb index 1b1bd83e972..4960e4790eb 100644 --- a/spec/lib/services/dialog_field_visibility_service_spec.rb +++ b/spec/lib/services/dialog_field_visibility_service_spec.rb @@ -171,8 +171,8 @@ .to receive(:determine_visibility).with( platform, supports_customization_template, customize_fields_list ).and_return( - :hide => %i(customize_fields_hide number_hide), # Forces uniq - :edit => %i(customize_fields_edit number_edit retirement_hide) # Forces uniq and removal of intersection + :hide => %i[customize_fields_hide number_hide], # Forces uniq + :edit => %i[customize_fields_edit number_edit retirement_hide] # Forces uniq and removal of intersection ) allow(sysprep_custom_spec_visibility_service) @@ -200,7 +200,7 @@ it "adds the values to the field names to hide, edit, and show without duplicates or intersections" do result = subject.determine_visibility(options) - expect(result[:hide]).to match_array(%i( + expect(result[:hide]).to match_array(%i[ auto_hide customize_fields_hide linked_clone_hide @@ -211,8 +211,8 @@ service_template_request_hide sysprep_auto_logon_hide sysprep_custom_spec_hide - )) - expect(result[:edit]).to match_array(%i( + ]) + expect(result[:edit]).to match_array(%i[ auto_edit customize_fields_edit linked_clone_edit @@ -223,7 +223,7 @@ retirement_edit sysprep_auto_logon_edit sysprep_custom_spec_edit - )) + ]) expect(result[:show]).to match_array([:linked_clone_show]) end end diff --git a/spec/lib/services/dialog_import_service_spec.rb b/spec/lib/services/dialog_import_service_spec.rb index 3a9c7f4d5a0..d3266bfb0e4 100644 --- a/spec/lib/services/dialog_import_service_spec.rb +++ b/spec/lib/services/dialog_import_service_spec.rb @@ -9,7 +9,7 @@ shared_context "DialogImportService dialog setup" do let(:dialog_fields) do [{"name" => "FavoriteColor", "label" => "Favorite Color"}, - {"name" => "dialog_field_2", "dialog_field_responders" => ["dialog_field"] }] + {"name" => "dialog_field_2", "dialog_field_responders" => ["dialog_field"]}] end let(:dialog_groups) do @@ -258,7 +258,7 @@ let(:import_file_upload) do double("ImportFileUpload", :id => 123, :uploaded_content => yaml_data) end - let(:dialogs_to_import) { %w(Test Test2) } + let(:dialogs_to_import) { %w[Test Test2] } before do allow(import_file_upload).to receive(:destroy) @@ -528,7 +528,7 @@ let(:field1) { instance_double("DialogField", :id => 123, :name => "field1") } let(:field2) { instance_double("DialogField", :id => 321, :name => "field2") } let(:dialog_fields) { [field1, field2] } - let(:association_list) { [{"field1" => %w(responder1 field2)}] } + let(:association_list) { [{"field1" => %w[responder1 field2]}] } it "creates dialog field associations" do expect do @@ -548,13 +548,13 @@ } end - let(:field1) { {"name" => "field1", "dialog_field_responders" => %w(field2 field3)} } - let(:field2) { {"name" => "field2", "dialog_field_responders" => %w(field3)} } + let(:field1) { {"name" => "field1", "dialog_field_responders" => %w[field2 field3]} } + let(:field2) { {"name" => "field2", "dialog_field_responders" => %w[field3]} } let(:field3) { {"name" => "field3", "dialog_field_responders" => []} } it "creates an association list of ids based on names" do expect(dialog_import_service.build_association_list(dialog)).to eq( - [{"field1" => %w(field2 field3)}, {"field2" => %w(field3)}] + [{"field1" => %w[field2 field3]}, {"field2" => %w[field3]}] ) end diff --git a/spec/lib/services/linked_clone_visibility_service_spec.rb b/spec/lib/services/linked_clone_visibility_service_spec.rb index 2eb9c68ce43..ca9faefa779 100644 --- a/spec/lib/services/linked_clone_visibility_service_spec.rb +++ b/spec/lib/services/linked_clone_visibility_service_spec.rb @@ -14,7 +14,7 @@ it "adds values to the field names to edit" do expect(subject.determine_visibility(provision_type, linked_clone, snapshot_count)).to eq( :hide => [], - :edit => %i(linked_clone snapshot), + :edit => %i[linked_clone snapshot], :show => [] ) end @@ -54,7 +54,7 @@ it "adds values to the field names to hide" do expect(subject.determine_visibility(provision_type, linked_clone, snapshot_count)).to eq( - :hide => %i(linked_clone snapshot), + :hide => %i[linked_clone snapshot], :edit => [], :show => [] ) diff --git a/spec/lib/services/network_visibility_service_spec.rb b/spec/lib/services/network_visibility_service_spec.rb index 8755f3d63da..980b715c2b0 100644 --- a/spec/lib/services/network_visibility_service_spec.rb +++ b/spec/lib/services/network_visibility_service_spec.rb @@ -10,7 +10,7 @@ it "adds the network values to the edit values" do expect(subject.determine_visibility(sysprep_enabled, supports_pxe, supports_iso, addr_mode)).to eq( :hide => [], - :edit => %i(addr_mode dns_suffixes dns_servers ip_addr subnet_mask gateway) + :edit => %i[addr_mode dns_suffixes dns_servers ip_addr subnet_mask gateway] ) end end @@ -45,8 +45,8 @@ it "adds the correct values to the edit and hide values" do expect(subject.determine_visibility(sysprep_enabled, supports_pxe, supports_iso, addr_mode)).to eq( - :hide => %i(ip_addr subnet_mask gateway), - :edit => %i(addr_mode dns_suffixes dns_servers) + :hide => %i[ip_addr subnet_mask gateway], + :edit => %i[addr_mode dns_suffixes dns_servers] ) end end @@ -119,7 +119,7 @@ it "adds the correct values to the hide values" do expect(subject.determine_visibility(sysprep_enabled, supports_pxe, supports_iso, addr_mode)).to eq( - :hide => %i(addr_mode ip_addr subnet_mask gateway dns_servers dns_suffixes), + :hide => %i[addr_mode ip_addr subnet_mask gateway dns_servers dns_suffixes], :edit => [] ) end diff --git a/spec/lib/services/number_of_vms_visibility_service_spec.rb b/spec/lib/services/number_of_vms_visibility_service_spec.rb index 84ac32604db..33001a422b6 100644 --- a/spec/lib/services/number_of_vms_visibility_service_spec.rb +++ b/spec/lib/services/number_of_vms_visibility_service_spec.rb @@ -10,7 +10,7 @@ it "adds values to field names to hide and edit" do expect(subject.determine_visibility(number_of_vms, platform)).to eq( - :hide => %i(sysprep_computer_name linux_host_name floating_ip_address), + :hide => %i[sysprep_computer_name linux_host_name floating_ip_address], :edit => [:ip_addr] ) end @@ -21,7 +21,7 @@ it "adds values to field names to hide and edit" do expect(subject.determine_visibility(number_of_vms, platform)).to eq( - :hide => %i(sysprep_computer_name linux_host_name floating_ip_address), + :hide => %i[sysprep_computer_name linux_host_name floating_ip_address], :edit => [:ip_addr] ) end @@ -36,8 +36,8 @@ it "adds values to field names to hide and edit" do expect(subject.determine_visibility(number_of_vms, platform)).to eq( - :hide => %i(ip_addr sysprep_computer_name), - :edit => %i(floating_ip_address linux_host_name) + :hide => %i[ip_addr sysprep_computer_name], + :edit => %i[floating_ip_address linux_host_name] ) end end @@ -47,8 +47,8 @@ it "adds values to field names to hide and edit" do expect(subject.determine_visibility(number_of_vms, platform)).to eq( - :hide => %i(ip_addr linux_host_name), - :edit => %i(floating_ip_address sysprep_computer_name) + :hide => %i[ip_addr linux_host_name], + :edit => %i[floating_ip_address sysprep_computer_name] ) end end diff --git a/spec/lib/services/pxe_iso_visibility_service_spec.rb b/spec/lib/services/pxe_iso_visibility_service_spec.rb index cfbc3d15696..348cd879470 100644 --- a/spec/lib/services/pxe_iso_visibility_service_spec.rb +++ b/spec/lib/services/pxe_iso_visibility_service_spec.rb @@ -11,7 +11,7 @@ it "returns the values to be edit and hidden" do expect(subject.determine_visibility(supports_iso, supports_pxe)).to eq( :hide => [], - :edit => %i(pxe_image_id pxe_server_id iso_image_id) + :edit => %i[pxe_image_id pxe_server_id iso_image_id] ) end end @@ -22,7 +22,7 @@ it "returns the values to be edit and hidden" do expect(subject.determine_visibility(supports_iso, supports_pxe)).to eq( :hide => [:iso_image_id], - :edit => %i(pxe_image_id pxe_server_id) + :edit => %i[pxe_image_id pxe_server_id] ) end end @@ -36,7 +36,7 @@ it "returns the values to be edit and hidden" do expect(subject.determine_visibility(supports_iso, supports_pxe)).to eq( - :hide => %i(pxe_image_id pxe_server_id), + :hide => %i[pxe_image_id pxe_server_id], :edit => [:iso_image_id] ) end @@ -47,7 +47,7 @@ it "returns the values to be edit and hidden" do expect(subject.determine_visibility(supports_iso, supports_pxe)).to eq( - :hide => %i(pxe_image_id pxe_server_id iso_image_id), + :hide => %i[pxe_image_id pxe_server_id iso_image_id], :edit => [] ) end diff --git a/spec/lib/services/request_type_visibility_service_spec.rb b/spec/lib/services/request_type_visibility_service_spec.rb index 3d8827ebd1f..4b406baa52b 100644 --- a/spec/lib/services/request_type_visibility_service_spec.rb +++ b/spec/lib/services/request_type_visibility_service_spec.rb @@ -6,7 +6,7 @@ let(:request_type) { :clone_to_template } it "returns the values to be hidden" do - expect(subject.determine_visibility(request_type)).to eq(:hide => %i(vm_filter vm_auto_start)) + expect(subject.determine_visibility(request_type)).to eq(:hide => %i[vm_filter vm_auto_start]) end end diff --git a/spec/lib/services/resource_sharer_spec.rb b/spec/lib/services/resource_sharer_spec.rb index 3389f24839b..4b8f3727f00 100644 --- a/spec/lib/services/resource_sharer_spec.rb +++ b/spec/lib/services/resource_sharer_spec.rb @@ -11,8 +11,8 @@ let(:user) do FactoryBot.create(:user, - :role => "user", - :features => user_allowed_feature) + :role => "user", + :features => user_allowed_feature) end let(:user_allowed_feature) { "service" } let(:resource_to_be_shared) { FactoryBot.create(:miq_template) } @@ -42,7 +42,7 @@ let(:features) { MiqProductFeature.find_by(:identifier => "host") } let(:user_allowed_feature) { "service" } - before { EvmSpecHelper.seed_specific_product_features(%w(host service)) } + before { EvmSpecHelper.seed_specific_product_features(%w[host service]) } it "is invalid" do expect(subject).not_to be_valid @@ -54,7 +54,7 @@ let(:features) { MiqProductFeature.find_by(:identifier => "host") } let(:user_allowed_feature) { "host_edit" } - before { EvmSpecHelper.seed_specific_product_features(%w(host)) } + before { EvmSpecHelper.seed_specific_product_features(%w[host]) } it "is invalid" do expect(subject).not_to be_valid @@ -66,7 +66,7 @@ let(:features) { MiqProductFeature.find_by(:identifier => "host_edit") } let(:user_allowed_feature) { "everything" } - before { EvmSpecHelper.seed_specific_product_features(%w(host_edit everything)) } + before { EvmSpecHelper.seed_specific_product_features(%w[host_edit everything]) } it "is valid" do expect(subject).to be_valid @@ -88,14 +88,14 @@ context "attempting to share a resource the user doesn't have access to via RBAC" do let(:user) do FactoryBot.create(:user, - :role => "user", - :features => user_allowed_feature, - :tenant => FactoryBot.create(:tenant, :name => "Tenant under root")) + :role => "user", + :features => user_allowed_feature, + :tenant => FactoryBot.create(:tenant, :name => "Tenant under root")) end let(:resource_to_be_shared) do FactoryBot.create(:miq_template, - :tenant => FactoryBot.create(:tenant, - :name => "Sibling tenant")) + :tenant => FactoryBot.create(:tenant, + :name => "Sibling tenant")) end let(:tenants) { [user.current_tenant] } # Attempt to share a resource in Sibling tenant to one's own tenant diff --git a/spec/lib/services/service_template_fields_visibility_service_spec.rb b/spec/lib/services/service_template_fields_visibility_service_spec.rb index 6d4f0a76a03..6084796d71d 100644 --- a/spec/lib/services/service_template_fields_visibility_service_spec.rb +++ b/spec/lib/services/service_template_fields_visibility_service_spec.rb @@ -7,7 +7,7 @@ it "adds values to field names to hide" do expect(subject.determine_visibility(service_template_request)).to eq( - :hide => %i(vm_description schedule_type schedule_time) + :hide => %i[vm_description schedule_type schedule_time] ) end end diff --git a/spec/lib/task_helpers/exports/custom_buttons_spec.rb b/spec/lib/task_helpers/exports/custom_buttons_spec.rb index 453ed3cedb1..0089e96b524 100644 --- a/spec/lib/task_helpers/exports/custom_buttons_spec.rb +++ b/spec/lib/task_helpers/exports/custom_buttons_spec.rb @@ -244,8 +244,8 @@ }], }, }], - "custom_button" => [{ - "attributes" => { + "custom_button" => [{ + "attributes" => { "guid" => custom_button2.guid, "description" => "Button Two", "applies_to_class" => "Vm", diff --git a/spec/lib/task_helpers/exports/customization_templates_spec.rb b/spec/lib/task_helpers/exports/customization_templates_spec.rb index 3b318c403b8..3ad783449c5 100644 --- a/spec/lib/task_helpers/exports/customization_templates_spec.rb +++ b/spec/lib/task_helpers/exports/customization_templates_spec.rb @@ -8,33 +8,33 @@ let(:provision_type2) { "vm" } let(:content1) do - { :name => template_name, - :description => template_desc, - :script => template_script, - :type => template_type, - :pxe_image_type => { + {:name => template_name, + :description => template_desc, + :script => template_script, + :type => template_type, + :pxe_image_type => { :name => image_type_name1 - } } + }} end let(:content2) do - { :name => template_name, - :description => template_desc, - :script => template_script, - :type => template_type, - :pxe_image_type => { + {:name => template_name, + :description => template_desc, + :script => template_script, + :type => template_type, + :pxe_image_type => { :name => image_type_name2, :provision_type => provision_type2 - } } + }} end let(:content3) do - { :name => template_name, - :description => template_desc, - :script => template_script, - :type => template_type, - :system => true, - :pxe_image_type => {} } + {:name => template_name, + :description => template_desc, + :script => template_script, + :type => template_type, + :system => true, + :pxe_image_type => {}} end let(:export_dir) do @@ -43,25 +43,25 @@ before do pit1 = FactoryBot.create(:pxe_image_type, - :name => image_type_name1) + :name => image_type_name1) pit2 = FactoryBot.create(:pxe_image_type, - :name => image_type_name2, - :provision_type => provision_type2) + :name => image_type_name2, + :provision_type => provision_type2) FactoryBot.create(:customization_template, - :name => template_name, - :type => template_type, - :description => template_desc, - :script => template_script, - :pxe_image_type => pit1) + :name => template_name, + :type => template_type, + :description => template_desc, + :script => template_script, + :pxe_image_type => pit1) FactoryBot.create(:customization_template, - :name => template_name, - :type => template_type, - :description => template_desc, - :script => template_script, - :pxe_image_type => pit2) + :name => template_name, + :type => template_type, + :description => template_desc, + :script => template_script, + :pxe_image_type => pit2) CustomizationTemplate.create!(:name => template_name, :type => template_type, diff --git a/spec/lib/task_helpers/exports/provision_dialogs_spec.rb b/spec/lib/task_helpers/exports/provision_dialogs_spec.rb index 9a00e9c388e..fc5a4964f89 100644 --- a/spec/lib/task_helpers/exports/provision_dialogs_spec.rb +++ b/spec/lib/task_helpers/exports/provision_dialogs_spec.rb @@ -41,7 +41,7 @@ let(:content2) do { :dialogs => { - :buttons => %i(submit cancel) + :buttons => %i[submit cancel] } } end @@ -52,18 +52,18 @@ before do FactoryBot.create(:miq_dialog, - :dialog_type => dialog_type1, - :name => dialog_name1, - :description => dialog_desc1, - :content => content, - :default => true) + :dialog_type => dialog_type1, + :name => dialog_name1, + :description => dialog_desc1, + :content => content, + :default => true) FactoryBot.create(:miq_dialog, - :dialog_type => dialog_type2, - :name => dialog_name2, - :description => dialog_desc2, - :content => content, - :default => false) + :dialog_type => dialog_type2, + :name => dialog_name2, + :description => dialog_desc2, + :content => content, + :default => false) end after do @@ -105,11 +105,11 @@ before do FactoryBot.create(:miq_dialog, - :dialog_type => dialog_type3, - :name => dialog_name2, - :description => dialog_desc2, - :content => content2, - :default => false) + :dialog_type => dialog_type3, + :name => dialog_name2, + :description => dialog_desc2, + :content => content2, + :default => false) end it 'exports the dialogs to different files' do diff --git a/spec/lib/task_helpers/exports/reports_spec.rb b/spec/lib/task_helpers/exports/reports_spec.rb index 515d44af4e6..473f1b03ab4 100644 --- a/spec/lib/task_helpers/exports/reports_spec.rb +++ b/spec/lib/task_helpers/exports/reports_spec.rb @@ -8,26 +8,26 @@ :name => "Test Report", :rpt_type => "Custom", :tz => "Eastern Time (US & Canada)", - :col_order => %w(name boot_time disks_aligned), - :cols => %w(name boot_time disks_aligned), - :db_options => { :rpt_type => "ChargebackContainerProject" }, - "include" => { "columns" => %w(col1 col2) }) + :col_order => %w[name boot_time disks_aligned], + :cols => %w[name boot_time disks_aligned], + :db_options => {:rpt_type => "ChargebackContainerProject"}, + "include" => {"columns" => %w[col1 col2]}) FactoryBot.create(:miq_report, :name => "Test Report 2", :rpt_type => "Custom", :tz => "Eastern Time (US & Canada)", - :col_order => %w(name boot_time disks_aligned), - :cols => %w(name boot_time disks_aligned), - :db_options => { :rpt_type => "ChargebackContainerProject" }, - "include" => { "columns" => %w(col1 col2) }) + :col_order => %w[name boot_time disks_aligned], + :cols => %w[name boot_time disks_aligned], + :db_options => {:rpt_type => "ChargebackContainerProject"}, + "include" => {"columns" => %w[col1 col2]}) FactoryBot.create(:miq_report, :name => "Default Test Report", :rpt_type => "Default", :tz => "Eastern Time (US & Canada)", - :col_order => %w(name boot_time disks_aligned), - :cols => %w(name boot_time disks_aligned), - :db_options => { :rpt_type => "ChargebackContainerProject" }, - "include" => { "columns" => %w(col1 col2) }) + :col_order => %w[name boot_time disks_aligned], + :cols => %w[name boot_time disks_aligned], + :db_options => {:rpt_type => "ChargebackContainerProject"}, + "include" => {"columns" => %w[col1 col2]}) end after do diff --git a/spec/lib/task_helpers/exports_spec.rb b/spec/lib/task_helpers/exports_spec.rb index 076cc976b4d..27e90b42c1d 100644 --- a/spec/lib/task_helpers/exports_spec.rb +++ b/spec/lib/task_helpers/exports_spec.rb @@ -69,16 +69,16 @@ describe '.exclude_attributes' do let(:all_attributes) do - { "id" => 1, - "name" => "EvmRole-super_administrator", - "read_only" => true, - "created_at" => Time.zone.now, - "updated_at" => Time.zone.now, - "settings" => nil } + {"id" => 1, + "name" => "EvmRole-super_administrator", + "read_only" => true, + "created_at" => Time.zone.now, + "updated_at" => Time.zone.now, + "settings" => nil} end it 'removes selected attributes' do - filtered_attributes = TaskHelpers::Exports.exclude_attributes(all_attributes, %w(created_at updated_at id)) + filtered_attributes = TaskHelpers::Exports.exclude_attributes(all_attributes, %w[created_at updated_at id]) expect(filtered_attributes).to match("name" => "EvmRole-super_administrator", "read_only" => true, "settings" => nil) end end diff --git a/spec/lib/task_helpers/imports/alert_sets_spec.rb b/spec/lib/task_helpers/imports/alert_sets_spec.rb index 4490a6b59b4..92035b2a5c0 100644 --- a/spec/lib/task_helpers/imports/alert_sets_spec.rb +++ b/spec/lib/task_helpers/imports/alert_sets_spec.rb @@ -6,7 +6,7 @@ let(:alert_set_two_guid) { "a16168b2-2605-11e7-a475-02420ebf1c88" } it 'should import all .yaml files in a specified directory' do - options = { :source => data_dir } + options = {:source => data_dir} expect do TaskHelpers::Imports::AlertSets.new.import(options) end.to_not output.to_stderr @@ -16,7 +16,7 @@ end it 'should import a specified alert export file' do - options = { :source => "#{data_dir}/#{alert_set_file}" } + options = {:source => "#{data_dir}/#{alert_set_file}"} expect do TaskHelpers::Imports::AlertSets.new.import(options) end.to_not output.to_stderr @@ -26,7 +26,7 @@ end it 'should fail to import a specified alert file' do - options = { :source => "#{data_dir}/#{bad_alert_set_file}" } + options = {:source => "#{data_dir}/#{bad_alert_set_file}"} expect do TaskHelpers::Imports::AlertSets.new.import(options) end.to output.to_stderr diff --git a/spec/lib/task_helpers/imports/alerts_spec.rb b/spec/lib/task_helpers/imports/alerts_spec.rb index 457ec2a2946..d15804014d7 100644 --- a/spec/lib/task_helpers/imports/alerts_spec.rb +++ b/spec/lib/task_helpers/imports/alerts_spec.rb @@ -6,7 +6,7 @@ let(:alert_two_guid) { "d2dcbbf8-25fb-11e7-a475-02420ebf1c88" } it 'should import all .yaml files in a specified directory' do - options = { :source => data_dir } + options = {:source => data_dir} expect do TaskHelpers::Imports::Alerts.new.import(options) end.to_not output.to_stderr @@ -15,7 +15,7 @@ end it 'should import a specified alert export file' do - options = { :source => "#{data_dir}/#{alert_file}" } + options = {:source => "#{data_dir}/#{alert_file}"} expect do TaskHelpers::Imports::Alerts.new.import(options) end.to_not output.to_stderr @@ -25,7 +25,7 @@ end it 'should fail to import a specified alert file' do - options = { :source => "#{data_dir}/#{bad_alert_file}" } + options = {:source => "#{data_dir}/#{bad_alert_file}"} expect do TaskHelpers::Imports::Alerts.new.import(options) end.to output.to_stderr diff --git a/spec/lib/task_helpers/imports/custom_buttons_spec.rb b/spec/lib/task_helpers/imports/custom_buttons_spec.rb index 9554e855ac6..e4832491234 100644 --- a/spec/lib/task_helpers/imports/custom_buttons_spec.rb +++ b/spec/lib/task_helpers/imports/custom_buttons_spec.rb @@ -154,7 +154,7 @@ def assert_test_custom_button_set_present expect(cbs.custom_buttons.count).to eq(3) expect(cbs.description).to eq(custom_button_set_description) expect(cbs.custom_buttons.first.resource_action.ae_namespace).to eq(resource_action_ae_namespace) - expect(cbs.custom_buttons.pluck(:userid)).to eq(%w(admin admin admin)) + expect(cbs.custom_buttons.pluck(:userid)).to eq(%w[admin admin admin]) end def assert_imports_only_custom_button_set_one diff --git a/spec/lib/task_helpers/imports/customization_templates_spec.rb b/spec/lib/task_helpers/imports/customization_templates_spec.rb index ed6d55911a6..2dfc8d345c5 100644 --- a/spec/lib/task_helpers/imports/customization_templates_spec.rb +++ b/spec/lib/task_helpers/imports/customization_templates_spec.rb @@ -7,7 +7,7 @@ let(:existing_pit_name) { "RHEL-6" } let(:new_pit_name) { "RHEL-7" } let(:new_pit_pt) { "vm" } - let(:options) { { :source => source } } + let(:options) { {:source => source} } describe "when the source is a directory" do let(:source) { data_dir } diff --git a/spec/lib/task_helpers/imports/generic_object_definitions_spec.rb b/spec/lib/task_helpers/imports/generic_object_definitions_spec.rb index ec08a2c33c6..0a4b694af7c 100644 --- a/spec/lib/task_helpers/imports/generic_object_definitions_spec.rb +++ b/spec/lib/task_helpers/imports/generic_object_definitions_spec.rb @@ -1,7 +1,7 @@ RSpec.describe TaskHelpers::Imports::GenericObjectDefinitions do describe "#import" do let(:data_dir) { File.join(File.expand_path(__dir__), 'data', 'generic_object_definitions') } - let(:options) { { :source => source, :overwrite => overwrite } } + let(:options) { {:source => source, :overwrite => overwrite} } let(:god_name1) { "Apep" } let(:god_name2) { "Apophis" } let(:god_file1) { "apep.yaml" } @@ -18,7 +18,7 @@ 'created' => :datetime, 'retirement' => :datetime }, - :associations => { 'cloud_tenant' => 'CloudTenant' }, + :associations => {'cloud_tenant' => 'CloudTenant'}, :methods => ['kick', 'laugh_at', 'punch', 'parseltongue'] } end diff --git a/spec/lib/task_helpers/imports/provision_dialogs_spec.rb b/spec/lib/task_helpers/imports/provision_dialogs_spec.rb index 15f27c65c76..8eb3a3c2cd1 100644 --- a/spec/lib/task_helpers/imports/provision_dialogs_spec.rb +++ b/spec/lib/task_helpers/imports/provision_dialogs_spec.rb @@ -8,7 +8,7 @@ let(:dialog_two_desc) { 'Test2 Sample VM Provisioning Dialog (Template)' } describe "#import" do - let(:options) { { :source => source } } + let(:options) { {:source => source} } describe "when the source is a directory" do let(:source) { data_dir } diff --git a/spec/lib/task_helpers/imports/reports_spec.rb b/spec/lib/task_helpers/imports/reports_spec.rb index 0c9cdd8780f..605b529e2a8 100644 --- a/spec/lib/task_helpers/imports/reports_spec.rb +++ b/spec/lib/task_helpers/imports/reports_spec.rb @@ -11,7 +11,7 @@ let(:rpt_db2) { "ChargebackVm" } let(:attr_err_file) { "Test_Report_attr_error.yml" } let(:runt_err_file) { "Test_Report_runtime_error.yml" } - let(:options) { { :source => source, :overwrite => overwrite } } + let(:options) { {:source => source, :overwrite => overwrite} } before do FactoryBot.create(:user_admin, :userid => "admin") diff --git a/spec/lib/task_helpers/imports/roles_spec.rb b/spec/lib/task_helpers/imports/roles_spec.rb index c0da9922a48..b96be1d4a5b 100644 --- a/spec/lib/task_helpers/imports/roles_spec.rb +++ b/spec/lib/task_helpers/imports/roles_spec.rb @@ -6,7 +6,7 @@ let(:role_two_name) { 'Role Import Test 2' } before do - EvmSpecHelper.seed_specific_product_features(%w( + EvmSpecHelper.seed_specific_product_features(%w[ dashboard dashboard_add dashboard_view @@ -17,7 +17,7 @@ miq_policy vm about - )) + ]) end describe "#import" do @@ -71,7 +71,7 @@ def assert_test_role_two_present r = MiqUserRole.find_by(:name => role_two_name) expect(r.name).to eq(role_two_name) expect(r.read_only).to be false - expect(r.feature_identifiers).to match_array(%w(dashboard vm)) + expect(r.feature_identifiers).to match_array(%w[dashboard vm]) expect(r.settings).to be nil end end diff --git a/spec/lib/task_helpers/imports/widgets_spec.rb b/spec/lib/task_helpers/imports/widgets_spec.rb index 9d91eb0056b..8d6905d4ca4 100644 --- a/spec/lib/task_helpers/imports/widgets_spec.rb +++ b/spec/lib/task_helpers/imports/widgets_spec.rb @@ -7,11 +7,11 @@ let(:widget_name2) { "Test Widget Import" } let(:widget_title1) { "Test Widget" } let(:widget_title2) { "Test Widget Import" } - let(:widget_cols1) { %w(name power_state last_scan_on) } - let(:widget_cols2) { %w(name power_state) } + let(:widget_cols1) { %w[name power_state last_scan_on] } + let(:widget_cols2) { %w[name power_state] } let(:attr_err_file) { "Test_Widget_attr_error.yml" } let(:runt_err_file) { "Test_Widget_runtime_error.yml" } - let(:options) { { :source => source } } + let(:options) { {:source => source} } before do EvmSpecHelper.local_miq_server diff --git a/spec/lib/tasks/evm_application_spec.rb b/spec/lib/tasks/evm_application_spec.rb index 2254ee3d3fa..05d02c594ce 100644 --- a/spec/lib/tasks/evm_application_spec.rb +++ b/spec/lib/tasks/evm_application_spec.rb @@ -1,6 +1,6 @@ require "tempfile" require "fileutils" -require Rails.root.join("lib", "tasks", "evm_application") +require Rails.root.join("lib/tasks/evm_application") RSpec.describe EvmApplication do context ".server_state" do @@ -18,7 +18,7 @@ end describe ".servers_status" do - let(:local_zone) { FactoryBot.create(:zone, :name => 'A Zone') } + let(:local_zone) { FactoryBot.create(:zone, :name => 'A Zone') } let(:local) { EvmSpecHelper.local_miq_server(:started_on => 1.hour.ago, :last_heartbeat => 2.days.ago, :zone => local_zone) } let(:remote) { EvmSpecHelper.remote_miq_server(:is_master => true, :last_heartbeat => nil) } let!(:ui) { FactoryBot.create(:miq_ui_worker, :miq_server => local, :pid => 80_000) } @@ -62,7 +62,7 @@ end describe ".worker_status" do - let(:local_zone) { FactoryBot.create(:zone, :name => 'A Zone') } + let(:local_zone) { FactoryBot.create(:zone, :name => 'A Zone') } let(:local) { EvmSpecHelper.local_miq_server(:started_on => 1.hour.ago, :last_heartbeat => 2.days.ago, :zone => local_zone) } let(:remote) { EvmSpecHelper.remote_miq_server(:is_master => true, :last_heartbeat => nil) } let!(:ui) { FactoryBot.create(:miq_ui_worker, :miq_server => local, :pid => 80_000, :system_uid => "1-ui-7f658c8654-5ln9g") } @@ -73,46 +73,46 @@ expect(described_class.workers_status([local, remote])).to eq( [ { - "Region" => local.region_number, - "Zone" => local.zone.name, - "Type" => "Ui", - "Status" => "ready", - "PID" => ui.pid, - "SPID" => nil, - "Server" => local.name, - "Queue" => "", - "Started" => "", - "Heartbeat" => "", + "Region" => local.region_number, + "Zone" => local.zone.name, + "Type" => "Ui", + "Status" => "ready", + "PID" => ui.pid, + "SPID" => nil, + "Server" => local.name, + "Queue" => "", + "Started" => "", + "Heartbeat" => "", "System UID" => "1-ui-7f658c8654-5ln9g", - "MB Usage" => "", + "MB Usage" => "", }, { - "Region" => remote.region_number, - "Zone" => remote.zone.name, - "Type" => "Base::Refresh", - "Status" => "ready", - "PID" => refresh.pid, - "SPID" => nil, - "Server" => remote.name, - "Queue" => "", - "Started" => "", - "Heartbeat" => "", + "Region" => remote.region_number, + "Zone" => remote.zone.name, + "Type" => "Base::Refresh", + "Status" => "ready", + "PID" => refresh.pid, + "SPID" => nil, + "Server" => remote.name, + "Queue" => "", + "Started" => "", + "Heartbeat" => "", "System UID" => nil, - "MB Usage" => "", + "MB Usage" => "", }, { - "Region" => remote.region_number, - "Zone" => remote.zone.name, - "Type" => "Generic", - "Status" => "ready", - "PID" => generic.pid, - "SPID" => nil, - "Server" => remote.name, - "Queue" => "", - "Started" => "", - "Heartbeat" => "", + "Region" => remote.region_number, + "Zone" => remote.zone.name, + "Type" => "Generic", + "Status" => "ready", + "PID" => generic.pid, + "SPID" => nil, + "Server" => remote.name, + "Queue" => "", + "Started" => "", + "Heartbeat" => "", "System UID" => "1-generic-5cf45d7656-h6jzs", - "MB Usage" => "", + "MB Usage" => "", }, ] ) @@ -122,18 +122,18 @@ describe ".status" do def header(col, adjust = :rjust) hdr = col == :WID ? "ID" : col.to_s # edge case - hdr.gsub("_", " ").send(adjust, send("#{col.downcase}_padding")) + hdr.tr("_", " ").send(adjust, send(:"#{col.downcase}_padding")) end def line_for(col) - "-" * send("#{col.downcase}_padding") + "-" * send(:"#{col.downcase}_padding") end def pad(val, col, adjust = :rjust) - val.to_s.send(adjust, send("#{col.downcase}_padding")) + val.to_s.send(adjust, send(:"#{col.downcase}_padding")) end - let(:local_zone) { FactoryBot.create(:zone, :name => 'A Zone') } + let(:local_zone) { FactoryBot.create(:zone, :name => 'A Zone') } let(:local) { EvmSpecHelper.local_miq_server(:started_on => 1.hour.ago, :last_heartbeat => 2.days.ago, :zone => local_zone) } let(:remote) { EvmSpecHelper.remote_miq_server(:is_master => true, :last_heartbeat => nil) } let(:rgn) { local.region_number } @@ -152,21 +152,20 @@ def pad(val, col, adjust = :rjust) context "for just the local server" do it "displays server status for the local server and its workers" do - expected_output = <<~SERVER_INFO Checking EVM status... - #{header(:Region) } | #{header(:Zone, :ljust)} | Server | Status | PID | SPID | Workers | Version | #{header(:Started, :ljust) } | #{header(:Heartbeat, :ljust) } | MB Usage | Roles - -#{line_for(:Region)}-|-#{line_for(:Zone) }-|--------------------------|---------|-----|------|---------|---------|-#{line_for(:Started) }-|-#{line_for(:Heartbeat) }-|----------|------- - #{pad(rgn, :Region)} | #{local.zone.name } | #{ local.name } | started | | | 1 | 9.9.9.9 | #{local_started_on } | #{local_heartbeat } | | + #{header(:Region)} | #{header(:Zone, :ljust)} | Server | Status | PID | SPID | Workers | Version | #{header(:Started, :ljust)} | #{header(:Heartbeat, :ljust)} | MB Usage | Roles + -#{line_for(:Region)}-|-#{line_for(:Zone)}-|--------------------------|---------|-----|------|---------|---------|-#{line_for(:Started)}-|-#{line_for(:Heartbeat)}-|----------|------- + #{pad(rgn, :Region)} | #{local.zone.name} | #{local.name} | started | | | 1 | 9.9.9.9 | #{local_started_on} | #{local_heartbeat} | | * marks a master appliance - #{header(:Region) } | #{header(:Zone, :ljust)} | Type | Status | #{header(:PID) } | SPID | Server | Queue | Started | Heartbeat | System UID | MB Usage - -#{line_for(:Region)}-|-#{line_for(:Zone) }-|------|--------|-#{line_for(:PID) }-|------|--------------------------|-------|---------|-----------|------------|---------- - #{pad(rgn, :Region)} | #{local.zone.name } | Ui | ready | #{pad(ui.pid, :PID) } | | #{ local.name } | | | | | + #{header(:Region)} | #{header(:Zone, :ljust)} | Type | Status | #{header(:PID)} | SPID | Server | Queue | Started | Heartbeat | System UID | MB Usage + -#{line_for(:Region)}-|-#{line_for(:Zone)}-|------|--------|-#{line_for(:PID)}-|------|--------------------------|-------|---------|-----------|------------|---------- + #{pad(rgn, :Region)} | #{local.zone.name} | Ui | ready | #{pad(ui.pid, :PID)} | | #{local.name} | | | | | SERVER_INFO - expect { EvmApplication.status }.to output(expected_output).to_stdout + expect { EvmApplication.status }.to output(expected_output).to_stdout end end @@ -180,19 +179,19 @@ def pad(val, col, adjust = :rjust) it "displays server status for the all servers and workers" do expected_output = <<~SERVER_INFO Checking EVM status... - #{header(:Zone, :ljust) } | Server | Status | Workers | #{header(:Started, :ljust) } | #{header(:Heartbeat, :ljust).rstrip} - -#{line_for(:Zone) }-|---------------------------|---------|---------|-#{line_for(:Started) }-|-#{line_for(:Heartbeat)}- - #{pad(local.zone.name, :Zone, :ljust) } | #{ local.name } | started | 1 | #{local_started_on } | #{local_heartbeat} - #{pad(remote.zone.name, :Zone, :ljust)} | #{ remote.name }* | started | 2 | #{remote_started_on } | + #{header(:Zone, :ljust)} | Server | Status | Workers | #{header(:Started, :ljust)} | #{header(:Heartbeat, :ljust).rstrip} + -#{line_for(:Zone)}-|---------------------------|---------|---------|-#{line_for(:Started)}-|-#{line_for(:Heartbeat)}- + #{pad(local.zone.name, :Zone, :ljust)} | #{local.name} | started | 1 | #{local_started_on} | #{local_heartbeat} + #{pad(remote.zone.name, :Zone, :ljust)} | #{remote.name}* | started | 2 | #{remote_started_on} | All rows have the values: Region=#{rgn}, Version=9.9.9.9 * marks a master appliance - #{header(:Zone, :ljust) } | Type | Status | #{header(:PID) } | Server - -#{line_for(:Zone) }-|---------------|--------|-#{line_for(:PID) }-|-------------------------- - #{pad(local.zone.name, :Zone, :ljust) } | Ui | ready | #{pad(ui.pid, :PID) } | #{ local.name } - #{pad(remote.zone.name, :Zone, :ljust)} | Base::Refresh | ready | #{pad(refresh.pid, :PID)} | #{ remote.name } - #{pad(remote.zone.name, :Zone, :ljust)} | Generic | ready | #{pad(generic.pid, :PID)} | #{ remote.name } + #{header(:Zone, :ljust)} | Type | Status | #{header(:PID)} | Server + -#{line_for(:Zone)}-|---------------|--------|-#{line_for(:PID)}-|-------------------------- + #{pad(local.zone.name, :Zone, :ljust)} | Ui | ready | #{pad(ui.pid, :PID)} | #{local.name} + #{pad(remote.zone.name, :Zone, :ljust)} | Base::Refresh | ready | #{pad(refresh.pid, :PID)} | #{remote.name} + #{pad(remote.zone.name, :Zone, :ljust)} | Generic | ready | #{pad(generic.pid, :PID)} | #{remote.name} All rows have the values: Region=#{rgn} SERVER_INFO diff --git a/spec/lib/unique_within_region_validator_spec.rb b/spec/lib/unique_within_region_validator_spec.rb index 5a873f8af29..324d37a7fc0 100644 --- a/spec/lib/unique_within_region_validator_spec.rb +++ b/spec/lib/unique_within_region_validator_spec.rb @@ -22,26 +22,26 @@ end end - let(:test_name) { "thename" } + let(:test_name) { "thename" } let(:in_first_region_id) do case_sensitive_class.create!( - :id => case_sensitive_class.id_in_region(1, 0), - :name => test_name, + :id => case_sensitive_class.id_in_region(1, 0), + :name => test_name ).id end let(:also_in_first_region_id) do case_sensitive_class.create!( - :id => case_sensitive_class.id_in_region(2, 0), - :name => test_name.upcase, + :id => case_sensitive_class.id_in_region(2, 0), + :name => test_name.upcase ).id end let(:in_second_region_id) do case_sensitive_class.create!( - :id => case_sensitive_class.id_in_region(2, 1), - :name => test_name, + :id => case_sensitive_class.id_in_region(2, 1), + :name => test_name ).id end diff --git a/spec/lib/uuid_mixin_spec.rb b/spec/lib/uuid_mixin_spec.rb index 48ae7973b30..9aba095927b 100644 --- a/spec/lib/uuid_mixin_spec.rb +++ b/spec/lib/uuid_mixin_spec.rb @@ -1,7 +1,7 @@ RSpec.describe UuidMixin do let(:test_class) do Class.new(ActiveRecord::Base) do - def self.name; "TestClass"; end + def self.name = "TestClass" self.table_name = "service_templates" include UuidMixin end diff --git a/spec/lib/vmdb/appliance_spec.rb b/spec/lib/vmdb/appliance_spec.rb index b2011b58547..4a06466913e 100644 --- a/spec/lib/vmdb/appliance_spec.rb +++ b/spec/lib/vmdb/appliance_spec.rb @@ -12,13 +12,13 @@ :password => "passw0rd", :ldaphost => "my_ldap_host", }, - :database => { + :database => { :maintenance => { :reindex_schedule => "1 * * * *", :reindex_tables => %w[Metric MiqQueue] } }, - :log => { + :log => { :secret_filter => secret_filter } } @@ -26,7 +26,7 @@ before do stub_settings(fake_settings) - allow(::Settings).to receive(:to_hash).and_return(fake_settings) + allow(Settings).to receive(:to_hash).and_return(fake_settings) described_class.log_config(:logger => ManageIQ::Loggers::Base.new(logger_io)) end diff --git a/spec/lib/vmdb/gettext/domains_spec.rb b/spec/lib/vmdb/gettext/domains_spec.rb index d9eda3176dd..12a1b8886e9 100644 --- a/spec/lib/vmdb/gettext/domains_spec.rb +++ b/spec/lib/vmdb/gettext/domains_spec.rb @@ -1,7 +1,7 @@ RSpec.describe Vmdb::Gettext::Domains do context ".add_domain" do let(:name) { "test" } - let(:path) { "/dev/null"} + let(:path) { "/dev/null" } it "sets mo_paths" do described_class.add_domain(name, path, :mo) diff --git a/spec/lib/vmdb/plugins_spec.rb b/spec/lib/vmdb/plugins_spec.rb index 1b4b2602a47..280e40d4abc 100644 --- a/spec/lib/vmdb/plugins_spec.rb +++ b/spec/lib/vmdb/plugins_spec.rb @@ -14,14 +14,14 @@ describe ".plugin_for_class" do context "with a class in core" do it "returns nil" do - expect(described_class.plugin_for_class(::MiqGenericWorker)).to be_nil + expect(described_class.plugin_for_class(MiqGenericWorker)).to be_nil end end context "with a class in a plugin" do it "returns the plugin" do - expect(described_class.plugin_for_class(::ApplicationController)).to eq(ManageIQ::UI::Classic::Engine) - expect(described_class.plugin_for_class(::Api::BaseController)).to eq(ManageIQ::Api::Engine) + expect(described_class.plugin_for_class(ApplicationController)).to eq(ManageIQ::UI::Classic::Engine) + expect(described_class.plugin_for_class(Api::BaseController)).to eq(ManageIQ::Api::Engine) end end @@ -135,12 +135,12 @@ def clear_versions_caches before { clear_versions_caches } after { clear_versions_caches } - def with_temp_dir(options) + def with_temp_dir(options, &block) Dir.mktmpdir("plugins_spec") do |dir| allow(engine).to receive(:root).and_return(Pathname.new(dir)) if options[:symlinked] - with_temp_symlink(dir) { |ln| yield ln } + with_temp_symlink(dir, &block) else yield dir end @@ -183,7 +183,7 @@ def with_temp_git_dir(options) end def with_spec(type, options = {}) - raise "Unexpected type '#{type}'" unless %i(git path_with_git path).include?(type) + raise "Unexpected type '#{type}'" unless %i[git path_with_git path].include?(type) source = if type == :git diff --git a/spec/lib/vmdb/settings_spec.rb b/spec/lib/vmdb/settings_spec.rb index bc6d141eb17..130393a4973 100644 --- a/spec/lib/vmdb/settings_spec.rb +++ b/spec/lib/vmdb/settings_spec.rb @@ -7,10 +7,10 @@ described_class.walk do |key, value, path, owning| expect(owning).to be_kind_of(Config::Options) - if %i(a d e).include?(key) + if %i[a d e].include?(key) expect(value).to be_kind_of(Config::Options) value = value.to_hash - elsif %i(i).include?(key) + elsif %i[i].include?(key) expect(value).to be_kind_of(Array) value.each { |v| expect(v).to be_kind_of(Config::Options) } value = value.collect(&:to_hash) @@ -20,7 +20,7 @@ end expect(walked).to eq [ - #key value path + # key value path [:a, {:b => 'c'}, [:a]], [:b, 'c', [:a, :b]], [:d, {:e => {:f => 'g'}}, [:d]], @@ -38,7 +38,7 @@ :hash: - &1 A: *1 - CONFIG + CONFIG expect { described_class.walk(y) { |_k, _v, _p, _o| } }.not_to raise_error end @@ -127,16 +127,15 @@ _delete = miq_server.settings_changes.create!(:key => "/api/authentication_timeout", :value => "1.hour") described_class.save!(miq_server, - :api => { - :token_ttl => "2.hours", # Updated - :authentication_timeout => default, # Deleted (back to default) - }, - :drift_states => { - :history => { - :keep_drift_states => "1.hour" # Added - } - } - ) + :api => { + :token_ttl => "2.hours", # Updated + :authentication_timeout => default, # Deleted (back to default) + }, + :drift_states => { + :history => { + :keep_drift_states => "1.hour" # Added + } + }) miq_server.reload expect(miq_server.settings_changes.count).to eq 2 @@ -207,13 +206,12 @@ encrypted = ManageIQ::Password.encrypt(password) described_class.save!(miq_server, - :authentication => { - :mode => "ldap", - :ldaphost => "localhost", - :bind_pwd => password, - :user_proxies => [{:bind_pwd => password}] - } - ) + :authentication => { + :mode => "ldap", + :ldaphost => "localhost", + :bind_pwd => password, + :user_proxies => [{:bind_pwd => password}] + }) miq_server.reload @@ -695,7 +693,7 @@ {"api" => {"token_ttl" => "2.minutes"}}.to_yaml ) - expect(::Settings.api.token_ttl).to eq("2.minutes") + expect(Settings.api.token_ttl).to eq("2.minutes") end it ".validate", :providers_common => true do diff --git a/spec/lib/vmdb/util_spec.rb b/spec/lib/vmdb/util_spec.rb index dc0d34c9795..59ed981d60b 100644 --- a/spec/lib/vmdb/util_spec.rb +++ b/spec/lib/vmdb/util_spec.rb @@ -1,7 +1,7 @@ RSpec.describe VMDB::Util do context ".http_proxy_uri" do it "without config settings" do - stub_settings(:http_proxy => { :default => {} }) + stub_settings(:http_proxy => {:default => {}}) expect(described_class.http_proxy_uri).to be_nil end @@ -33,7 +33,7 @@ end it "with host, port, user" do - stub_settings_merge(:http_proxy => {:default => {:host => "1.2.3.4", :port => 4321, :user => "testuser", + stub_settings_merge(:http_proxy => {:default => {:host => "1.2.3.4", :port => 4321, :user => "testuser", :password => nil}}) expect(described_class.http_proxy_uri).to eq(URI::Generic.build(:scheme => "http", :host => "1.2.3.4", :port => 4321, :userinfo => "testuser")) diff --git a/spec/lib/workers/evm_server_spec.rb b/spec/lib/workers/evm_server_spec.rb index 49d39088211..f036cea38eb 100644 --- a/spec/lib/workers/evm_server_spec.rb +++ b/spec/lib/workers/evm_server_spec.rb @@ -80,7 +80,7 @@ expect(subject.servers_to_monitor.map(&:id)).not_to include(server.id) end - # Note: this is a very important spec + # NOTE: this is a very important spec # A lot of the data about the current server is stored as instance variables # so losing the particular instance we're using to do worker management would # be a big problem @@ -183,7 +183,7 @@ end received_ids = [] - subject.send(:as_each_server) { received_ids << ::Settings.special.settings[:id] } + subject.send(:as_each_server) { received_ids << Settings.special.settings[:id] } expect(received_ids).to match_array(MiqServer.pluck(:id)) end end diff --git a/spec/models/audit_event_spec.rb b/spec/models/audit_event_spec.rb index 5c106af3cbf..812fd19efa9 100644 --- a/spec/models/audit_event_spec.rb +++ b/spec/models/audit_event_spec.rb @@ -30,13 +30,13 @@ bad = ["bad", "worse"] ok.each do |sev| - event = AuditEvent.new(:event => "test_valid_severity", :message => "test_valid_severity - message", :status => "success") + event = AuditEvent.new(:event => "test_valid_severity", :message => "test_valid_severity - message", :status => "success") event.severity = sev expect(event).to be_valid end bad.each do |sev| - event = AuditEvent.new(:event => "test_invalid_severity", :message => "test_invalid_severity - message", :status => "success") + event = AuditEvent.new(:event => "test_invalid_severity", :message => "test_invalid_severity - message", :status => "success") event.severity = sev expect(event).not_to be_valid end diff --git a/spec/models/authentication_spec.rb b/spec/models/authentication_spec.rb index a15754050c8..7e639073249 100644 --- a/spec/models/authentication_spec.rb +++ b/spec/models/authentication_spec.rb @@ -26,8 +26,8 @@ end it "should create the authentication events and event sets" do - events = %w(ems_auth_changed ems_auth_valid ems_auth_invalid ems_auth_unreachable ems_auth_incomplete ems_auth_error - host_auth_changed host_auth_valid host_auth_invalid host_auth_unreachable host_auth_incomplete host_auth_error) + events = %w[ems_auth_changed ems_auth_valid ems_auth_invalid ems_auth_unreachable ems_auth_incomplete ems_auth_error + host_auth_changed host_auth_valid host_auth_invalid host_auth_unreachable host_auth_incomplete host_auth_error] events.each { |event| expect(MiqEventDefinition.exists?(:name => event)).to be_truthy } expect(MiqEventDefinitionSet.exists?(:name => 'auth_validation')).to be_truthy end diff --git a/spec/models/authenticator/httpd_spec.rb b/spec/models/authenticator/httpd_spec.rb index 50c01542b0c..c0467c6c158 100644 --- a/spec/models/authenticator/httpd_spec.rb +++ b/spec/models/authenticator/httpd_spec.rb @@ -123,26 +123,26 @@ describe '#find_or_initialize_user' do let(:user_attrs_simple) do - { :username => "sal", - :fullname => "Test User Sal", - :firstname => "Salvadore", - :lastname => "Bigs", - :email => "sal_email@example.com", - :domain => "example.com" } + {:username => "sal", + :fullname => "Test User Sal", + :firstname => "Salvadore", + :lastname => "Bigs", + :email => "sal_email@example.com", + :domain => "example.com"} end - let(:identity_simple) { [user_attrs_simple, %w(mumble bumble bee)] } + let(:identity_simple) { [user_attrs_simple, %w[mumble bumble bee]] } let(:user_attrs_upn) do - { :username => "sal@example.com", - :fullname => "Test User Sal", - :firstname => "Salvadore", - :lastname => "Bigs", - :email => "sal_email@example.com", - :domain => "example.com" } + {:username => "sal@example.com", + :fullname => "Test User Sal", + :firstname => "Salvadore", + :lastname => "Bigs", + :email => "sal_email@example.com", + :domain => "example.com"} end - let(:identity_upn) { [user_attrs_upn, %w(mumble bumble bee)] } + let(:identity_upn) { [user_attrs_upn, %w[mumble bumble bee]] } let!(:upn_sal) { FactoryBot.create(:user, :userid => 'sal@example.com') } @@ -461,7 +461,7 @@ def authenticate context "when user record is for a different region" do let(:my_region_number) { ApplicationRecord.my_region_number } let(:other_region) { ApplicationRecord.my_region_number + 1 } - let(:other_region_id) { other_region * ApplicationRecord.rails_sequence_factor + 1 } + let(:other_region_id) { (other_region * ApplicationRecord.rails_sequence_factor) + 1 } it "does not modify the user record when userid is in username format" do sally_username = FactoryBot.create(:user, :userid => 'sally', :id => other_region_id) @@ -652,7 +652,7 @@ def authenticate end it "should return user attributes hash for valid user" do - requested_attrs = %w(mail givenname sn displayname domainname) + requested_attrs = %w[mail givenname sn displayname domainname] jdoe_attrs = [{"mail" => ["jdoe@example.com"], "givenname" => ["John"], @@ -708,12 +708,12 @@ def authenticate super().merge('X-Remote-User-Groups' => 'wibble@fqdn,bubble@fqdn') end let(:user_attrs) do - { :username => "testuser", - :fullname => "Test User", - :firstname => "Alice", - :lastname => "Aardvark", - :email => "testuser@example.com", - :domain => "example.com" } + {:username => "testuser", + :fullname => "Test User", + :firstname => "Alice", + :lastname => "Aardvark", + :email => "testuser@example.com", + :domain => "example.com"} end it "handles a comma separated grouplist" do @@ -728,12 +728,12 @@ def authenticate super().merge('X-Remote-User-Groups' => CGI.escape('spécial_char@fqdn:moré@fqdn')) end let(:user_attrs) do - { :username => "testuser", - :fullname => "Test User", - :firstname => "Alice", - :lastname => "Aardvark", - :email => "testuser@example.com", - :domain => "example.com" } + {:username => "testuser", + :fullname => "Test User", + :firstname => "Alice", + :lastname => "Aardvark", + :email => "testuser@example.com", + :domain => "example.com"} end it "handles group names with escaped special characters" do @@ -756,12 +756,12 @@ def authenticate } end let(:user_attrs) do - { :username => "testuser", - :fullname => "Test User", - :firstname => "Alice", - :lastname => "Aardvark", - :email => "testuser@example.com", - :domain => "example.com" } + {:username => "testuser", + :fullname => "Test User", + :firstname => "Alice", + :lastname => "Aardvark", + :email => "testuser@example.com", + :domain => "example.com"} end it "handles nil group names" do diff --git a/spec/models/authenticator/ldap_spec.rb b/spec/models/authenticator/ldap_spec.rb index e0cd8056f63..f8bf6e38f29 100644 --- a/spec/models/authenticator/ldap_spec.rb +++ b/spec/models/authenticator/ldap_spec.rb @@ -78,7 +78,7 @@ def normalize(dn) :givenname => 'Alice', :sn => 'Aardvark', :mail => ['alice@example.com', 'a.aardvark@example.com'], - :groups => %w(wibble bubble), + :groups => %w[wibble bubble], } end let(:bob_data) do @@ -89,7 +89,7 @@ def normalize(dn) :givenname => 'Bob', :sn => 'Builderson', :mail => 'bob@example.com', - :groups => %w(wibble bubble), + :groups => %w[wibble bubble], } end let(:betty_data) do @@ -100,7 +100,7 @@ def normalize(dn) :givenname => 'Betty', :sn => 'Builderson', :mail => 'betty@example.com', - :groups => %w(wibble bubble), + :groups => %w[wibble bubble], } end let(:sam_data) do @@ -111,7 +111,7 @@ def normalize(dn) :givenname => nil, :sn => nil, :mail => 'sam@example.com', - :groups => %w(wibble bubble), + :groups => %w[wibble bubble], } end @@ -285,10 +285,9 @@ def authenticate expect(MiqQueue.count).to eq 1 expect(MiqQueue.first.args.last(2)).to eq( ["user_created", - { - :event_details => "User creation successful for User: Bob Builderson with ID: bob" - } - ] + { + :event_details => "User creation successful for User: Bob Builderson with ID: bob" + }] ) end end @@ -456,7 +455,7 @@ def authenticate end context "with no matching groups" do - let(:bob_data) { super().merge(:groups => %w(bubble trouble)) } + let(:bob_data) { super().merge(:groups => %w[bubble trouble]) } it "enqueues an authorize task" do expect(subject).to receive(:authorize_queue).and_return(123) diff --git a/spec/models/automate_workspace_spec.rb b/spec/models/automate_workspace_spec.rb index 5eb29b131c6..72f7ed6f5c3 100644 --- a/spec/models/automate_workspace_spec.rb +++ b/spec/models/automate_workspace_spec.rb @@ -4,14 +4,14 @@ let(:password) { "ca$hc0w" } let(:encrypted) { ManageIQ::Password.encrypt(password) } let(:input) do - { "objects" => { - "root" => { - "var1" => "1", - "var2" => "password::#{encrypted}", - "var3" => "password::v2:{c8qTeiuz6JgbBOiDqp3eiQ==}" - } - }, - "method_parameters" => {"arg1" => "password::#{encrypted}"} } + {"objects" => { + "root" => { + "var1" => "1", + "var2" => "password::#{encrypted}", + "var3" => "password::v2:{c8qTeiuz6JgbBOiDqp3eiQ==}" + } + }, + "method_parameters" => {"arg1" => "password::#{encrypted}"}} end it "raises error on invalid hash" do @@ -20,7 +20,7 @@ it "properly merges the hash with the new output" do hash = {'objects' => {'root' => {'a' => 1}}, 'state_vars' => {'b' => 2}} - partial_hash = {'objects' => {'root' => {'c' => 1}}, 'state_vars' => {} } + partial_hash = {'objects' => {'root' => {'c' => 1}}, 'state_vars' => {}} merged_hash = {'objects' => {'root' => {'a' => 1, 'c' => 1}}, 'state_vars' => {'b' => 2}} aw.merge_output!(hash) diff --git a/spec/models/automation_request_spec.rb b/spec/models/automation_request_spec.rb index b28b9cf7a69..28df3af5af6 100644 --- a/spec/models/automation_request_spec.rb +++ b/spec/models/automation_request_spec.rb @@ -132,7 +132,7 @@ "request_state" => "pending", "status" => "Ok", "approval_state" => "approved", - "userid" => admin.userid.to_s, + "userid" => admin.userid.to_s ) expect(ar.options).to include( :namespace => "SYSTEM", diff --git a/spec/models/binary_blob_spec.rb b/spec/models/binary_blob_spec.rb index 98813dec0a5..4befa74034f 100644 --- a/spec/models/binary_blob_spec.rb +++ b/spec/models/binary_blob_spec.rb @@ -34,7 +34,7 @@ it '#binary= with more data than the max parts size' do data = "test log data" - data *= ((BinaryBlobPart.default_part_size) / data.length * 2) + data *= (BinaryBlobPart.default_part_size / data.length * 2) @blob.binary = data.dup # binary= destroys the source data, so dup it expect(@blob.binary.length).to eq(data.length) end diff --git a/spec/models/blacklisted_event_spec.rb b/spec/models/blacklisted_event_spec.rb index 80cd74d6a41..f88881d26b0 100644 --- a/spec/models/blacklisted_event_spec.rb +++ b/spec/models/blacklisted_event_spec.rb @@ -20,9 +20,8 @@ it 'does not re-seed existing event filters' do User.current_user = FactoryBot.create(:user) filter = FactoryBot.create(:blacklisted_event, - :event_name => 'AlarmActionTriggeredEvent', - :provider_model => 'ManageIQ::Providers::Vmware::InfraManager' - ) + :event_name => 'AlarmActionTriggeredEvent', + :provider_model => 'ManageIQ::Providers::Vmware::InfraManager') filter_attrs = filter.attributes described_class.seed @@ -70,6 +69,5 @@ expect($audit_log).to receive(:info).with(a_string_including("changed")).never f.update(:enabled => f.enabled) end - end end diff --git a/spec/models/chargeable_field_spec.rb b/spec/models/chargeable_field_spec.rb index 1aaaa69a19e..edd6ebe983f 100644 --- a/spec/models/chargeable_field_spec.rb +++ b/spec/models/chargeable_field_spec.rb @@ -19,7 +19,7 @@ end it 'returns list of columns for main chargeback metric rollup query' do - expected_columns = %w( + expected_columns = %w[ id tag_names resource_id @@ -32,7 +32,7 @@ derived_vm_used_disk_storage disk_usage_rate_average net_usage_rate_average - ) + ] expect(described_class.cols_on_metric_rollup).to eq(expected_columns) end diff --git a/spec/models/chargeback/consumption_with_rollups_spec.rb b/spec/models/chargeback/consumption_with_rollups_spec.rb index eb9757f6de9..50355c31505 100644 --- a/spec/models/chargeback/consumption_with_rollups_spec.rb +++ b/spec/models/chargeback/consumption_with_rollups_spec.rb @@ -57,7 +57,7 @@ def pluck_rollup(metric_rollup_records) let(:consumption) { described_class.new(pluck_rollup([metric_rollup]), starting_date, starting_date + 1.day) } it 'returns array of tags' do - expect(consumption.tag_list_with_prefix).to match_array(%w(vm/tag/managed/operations/analysis_failed vm/tag/managed/environment/prod vm/tag/managed/environment/dev)) + expect(consumption.tag_list_with_prefix).to match_array(%w[vm/tag/managed/operations/analysis_failed vm/tag/managed/environment/prod vm/tag/managed/environment/dev]) end end end @@ -76,7 +76,7 @@ def pluck_rollup(metric_rollup_records) let(:consumption) { described_class.new(pluck_rollup([metric_rollup_container]), starting_date, starting_date + 1.day) } it 'returns array of tags' do - expect(consumption.tag_list_with_prefix).to match_array(%w(container_image/tag/managed/environment/cont container_image/tag/managed/environment/cust container_image/tag/managed/environment/stage container_image/label/managed/version/1.2/_label-1/test/1.0.0\ \ rc_2 container_image/label/managed/escaped:{version%2F1%2E2%2F%5Flabel%2D1}/escaped:{test%2F1%2E0%2E0%20%20rc%5F2})) + expect(consumption.tag_list_with_prefix).to match_array(%w[container_image/tag/managed/environment/cont container_image/tag/managed/environment/cust container_image/tag/managed/environment/stage container_image/label/managed/version/1.2/_label-1/test/1.0.0\ \ rc_2 container_image/label/managed/escaped:{version%2F1%2E2%2F%5Flabel%2D1}/escaped:{test%2F1%2E0%2E0%20%20rc%5F2}]) end end end diff --git a/spec/models/chargeback_container_image_spec.rb b/spec/models/chargeback_container_image_spec.rb index c548b267e9d..6ee9f86fa28 100644 --- a/spec/models/chargeback_container_image_spec.rb +++ b/spec/models/chargeback_container_image_spec.rb @@ -1,7 +1,7 @@ RSpec.describe ChargebackContainerImage do include Spec::Support::ChargebackHelper - let(:base_options) { {:interval_size => 2, :end_interval_offset => 0, :ext_options => {:tz => 'UTC'} } } + let(:base_options) { {:interval_size => 2, :end_interval_offset => 0, :ext_options => {:tz => 'UTC'}} } let(:hourly_rate) { 0.01 } let(:count_hourly_rate) { 1.00 } let(:starting_date) { Time.parse('2012-09-01 23:59:59Z').utc } @@ -45,7 +45,7 @@ :limit_memory_bytes => 1.megabytes, :limit_cpu_cores => 1.0) cat = FactoryBot.create(:classification, :description => "Environment", :name => "environment", :single_value => true, :show => true) c = FactoryBot.create(:classification, :name => "prod", :description => "Production", :parent_id => cat.id) - ChargebackRate.set_assignments(:compute, [{ :cb_rate => chargeback_rate, :tag => [c, "container_image"] }]) + ChargebackRate.set_assignments(:compute, [{:cb_rate => chargeback_rate, :tag => [c, "container_image"]}]) @tag = c.tag @project.tag_with(@tag.name, :ns => '*') @@ -136,7 +136,7 @@ before do @image.docker_labels << @label @image.save - ChargebackRate.set_assignments(:compute, [{ :cb_rate => chargeback_rate, :label => [@label, "container_image"] }]) + ChargebackRate.set_assignments(:compute, [{:cb_rate => chargeback_rate, :label => [@label, "container_image"]}]) add_metric_rollups_for(@image, month_beginning...month_end, 12.hours, metric_rollup_params) @@ -174,7 +174,7 @@ end end - let(:rate_assignment_options) { {:cb_rate => chargeback_rate, :object => MiqEnterprise.first } } + let(:rate_assignment_options) { {:cb_rate => chargeback_rate, :object => MiqEnterprise.first} } before do ChargebackRate.set_assignments(:compute, [rate_assignment_options]) diff --git a/spec/models/chargeback_container_project_spec.rb b/spec/models/chargeback_container_project_spec.rb index d39fe6342fa..b835be57911 100644 --- a/spec/models/chargeback_container_project_spec.rb +++ b/spec/models/chargeback_container_project_spec.rb @@ -1,7 +1,7 @@ RSpec.describe ChargebackContainerProject do include Spec::Support::ChargebackHelper - let(:base_options) { {:interval_size => 2, :end_interval_offset => 0, :ext_options => {:tz => 'UTC'} } } + let(:base_options) { {:interval_size => 2, :end_interval_offset => 0, :ext_options => {:tz => 'UTC'}} } let(:hourly_rate) { 0.01 } let(:starting_date) { Time.parse('2012-09-01 23:59:59Z').utc } let(:ts) { starting_date.in_time_zone(Metric::Helper.get_time_zone(base_options[:ext_options])) } diff --git a/spec/models/chargeback_rate_detail_measure_spec.rb b/spec/models/chargeback_rate_detail_measure_spec.rb index b65700d7e70..c4acadb76c9 100644 --- a/spec/models/chargeback_rate_detail_measure_spec.rb +++ b/spec/models/chargeback_rate_detail_measure_spec.rb @@ -25,7 +25,7 @@ it "is invalid with a units_display lenght diferent that the units lenght" do expect(FactoryBot.build(:chargeback_rate_detail_measure, - :units => %w(Bs KBs GBs), - :units_display => %w(kbps mbps))).not_to be_valid + :units => %w[Bs KBs GBs], + :units_display => %w[kbps mbps])).not_to be_valid end end diff --git a/spec/models/chargeback_rate_detail_spec.rb b/spec/models/chargeback_rate_detail_spec.rb index 24a5375537c..aec72fa0035 100644 --- a/spec/models/chargeback_rate_detail_spec.rb +++ b/spec/models/chargeback_rate_detail_spec.rb @@ -4,8 +4,8 @@ it "is invalid without a valid chargeback_rate" do invalid_chargeback_rate_id = (ChargebackRate.maximum(:id) || -1) + 1 chargeback_rate_detail = FactoryBot.build(:chargeback_rate_detail, - :chargeable_field => field, - :chargeback_rate_id => invalid_chargeback_rate_id) + :chargeable_field => field, + :chargeback_rate_id => invalid_chargeback_rate_id) expect(chargeback_rate_detail).to be_invalid expect(chargeback_rate_detail.errors[:chargeback_rate]).to include(/can't be blank/) end @@ -19,7 +19,7 @@ it 'loads chargeback rates from yml for Compute metrics' do rates = ChargebackRateDetail.default_rate_details_for('Compute') - expected_metrics = %w( + expected_metrics = %w[ derived_vm_numvcpus derived_vm_numvcpu_cores cpu_usagemhz_rate_average @@ -30,19 +30,19 @@ derived_memory_available derived_memory_used net_usage_rate_average - ) + ] expect(rates.map { |x| x.chargeable_field.metric }).to match_array(expected_metrics) end it 'loads chargeback rates from yml for Storage metrics' do rates = ChargebackRateDetail.default_rate_details_for('Storage') - expected_metrics = %w( + expected_metrics = %w[ fixed_storage_1 fixed_storage_2 derived_vm_allocated_disk_storage derived_vm_used_disk_storage - ) + ] expect(rates.map { |x| x.chargeable_field.metric }).to match_array(expected_metrics) end @@ -69,7 +69,7 @@ let(:cbt3) { FactoryBot.build(:chargeback_tier, :start => 50, :finish => Float::INFINITY, :fixed_rate => 1.0, :variable_rate => 0.1) } let(:cbd) do FactoryBot.build(:chargeback_rate_detail, :chargeback_tiers => [cbt3, cbt2, cbt1], - :chargeable_field => field) + :chargeable_field => field) end it "finds proper rate according the value" do @@ -82,16 +82,16 @@ context 'with rate adjustment' do let(:measure) do FactoryBot.build(:chargeback_rate_detail_measure, - :units_display => %w(B KB MB GB TB), - :units => %w(bytes kilobytes megabytes gigabytes terabytes)) + :units_display => %w[B KB MB GB TB], + :units => %w[bytes kilobytes megabytes gigabytes terabytes]) end let(:field) { FactoryBot.create(:chargeable_field_storage_allocated, :detail_measure => measure) } let(:cbd) do # This charges per gigabyte, tiers are per gigabytes FactoryBot.build(:chargeback_rate_detail, - :chargeback_tiers => [cbt1, cbt2, cbt3], - :chargeable_field => field, - :per_unit => 'gigabytes') + :chargeback_tiers => [cbt1, cbt2, cbt3], + :chargeable_field => field, + :per_unit => 'gigabytes') end it 'finds proper tier for the value' do expect(cbd.find_rate(0.0)).to eq([cbt1.fixed_rate, cbt1.variable_rate]) @@ -106,7 +106,7 @@ let(:consumption) { instance_double('Consumption', :hours_in_month => (30.days / 1.hour)) } it '#hourly_cost' do - cvalue = 42.0 + cvalue = 42.0 fixed_rate = 5.0 variable_rate = 8.26 tier_start = 0 @@ -114,18 +114,18 @@ per_time = 'monthly' per_unit = 'megabytes' cbd = FactoryBot.build(:chargeback_rate_detail, - :chargeable_field => field, - :per_time => per_time, - :per_unit => per_unit, - :enabled => true) + :chargeable_field => field, + :per_time => per_time, + :per_unit => per_unit, + :enabled => true) cbt = FactoryBot.create(:chargeback_tier, - :chargeback_rate_detail_id => cbd.id, - :start => tier_start, - :finish => tier_finish, - :fixed_rate => fixed_rate, - :variable_rate => variable_rate) + :chargeback_rate_detail_id => cbd.id, + :start => tier_start, + :finish => tier_finish, + :fixed_rate => fixed_rate, + :variable_rate => variable_rate) cbd.update(:chargeback_tiers => [cbt]) - expect(cbd.hourly_cost(cvalue, consumption)).to eq(cvalue * cbd.hourly(variable_rate, consumption) + cbd.hourly(fixed_rate, consumption)) + expect(cbd.hourly_cost(cvalue, consumption)).to eq((cvalue * cbd.hourly(variable_rate, consumption)) + cbd.hourly(fixed_rate, consumption)) cbd.chargeable_field = FactoryBot.build(:chargeable_field_fixed_compute_1) expect(cbd.hourly_cost(1, consumption)).to eq(cbd.hourly(variable_rate, consumption) + cbd.hourly(fixed_rate, consumption)) @@ -167,7 +167,7 @@ end let(:monthly_rate) { FactoryBot.build(:chargeback_rate_detail, :per_time => 'monthly') } - let(:weekly_consumption) { Chargeback::ConsumptionWithRollups.new([], Time.current - 1.week, Time.current) } + let(:weekly_consumption) { Chargeback::ConsumptionWithRollups.new([], 1.week.ago, Time.current) } it 'monhtly rate returns correct hourly(_rate) when consumption slice is weekly' do expect(monthly_rate.hourly(rate, weekly_consumption)).to eq(rate / (30.days / 1.hour)) end @@ -191,8 +191,8 @@ expect(cbd.friendly_rate).to eq(friendly_rate) cbd = FactoryBot.build(:chargeback_rate_detail, - :per_time => 'monthly', - :chargeable_field => FactoryBot.build(:chargeable_field_fixed_compute_1)) + :per_time => 'monthly', + :chargeable_field => FactoryBot.build(:chargeable_field_fixed_compute_1)) cbt = FactoryBot.create(:chargeback_tier, :start => 0, :chargeback_rate_detail_id => cbd.id, :finish => Float::INFINITY, :fixed_rate => 1.0, :variable_rate => 2.0) cbd.update(:chargeback_tiers => [cbt]) @@ -228,8 +228,8 @@ it "#per_unit_display_with_measurements" do cbdm = FactoryBot.create(:chargeback_rate_detail_measure, - :units_display => %w(B KB MB GB TB), - :units => %w(bytes kilobytes megabytes gigabytes terabytes)) + :units_display => %w[B KB MB GB TB], + :units => %w[bytes kilobytes megabytes gigabytes terabytes]) field = FactoryBot.create(:chargeable_field, :detail_measure => cbdm) cbd = FactoryBot.build(:chargeback_rate_detail, :per_unit => 'megabytes', :chargeable_field => field) expect(cbd.per_unit_display).to eq('MB') @@ -245,14 +245,14 @@ it 'is valid without per_unit' do ['cpu', nil].each do |per_unit| cbd = FactoryBot.build(:chargeback_rate_detail, - :chargeable_field => field, - :per_unit => per_unit) + :chargeable_field => field, + :per_unit => per_unit) cbt = FactoryBot.create(:chargeback_tier, - :chargeback_rate_detail_id => cbd.id, - :start => 0, - :finish => Float::INFINITY, - :fixed_rate => 0.0, - :variable_rate => 0.0) + :chargeback_rate_detail_id => cbd.id, + :start => 0, + :finish => Float::INFINITY, + :fixed_rate => 0.0, + :variable_rate => 0.0) cbd.update(:chargeback_tiers => [cbt]) expect(cbd).to be_valid end @@ -261,13 +261,13 @@ it "diferents_per_units_rates_should_have_the_same_cost" do # should be the same cost. bytes to megabytes and gigabytes to megabytes cbd_bytes = FactoryBot.build(:chargeback_rate_detail, - :chargeable_field => field, - :per_unit => 'bytes', - :per_time => 'monthly') + :chargeable_field => field, + :per_unit => 'bytes', + :per_time => 'monthly') cbd_gigabytes = FactoryBot.build(:chargeback_rate_detail, - :chargeable_field => field, - :per_unit => 'gigabytes', - :per_time => 'monthly') + :chargeable_field => field, + :per_unit => 'gigabytes', + :per_time => 'monthly') expect(cbd_bytes.hourly_cost(100, consumption)).to eq(cbd_gigabytes.hourly_cost(100, consumption)) end diff --git a/spec/models/chargeback_vm/ongoing_time_period_spec.rb b/spec/models/chargeback_vm/ongoing_time_period_spec.rb index df29f1f05b7..eb849947179 100644 --- a/spec/models/chargeback_vm/ongoing_time_period_spec.rb +++ b/spec/models/chargeback_vm/ongoing_time_period_spec.rb @@ -19,24 +19,23 @@ :created_on => start_of_all_intervals) vm.tag_with(tag.name, :ns => '*') host = FactoryBot.create(:host, :hardware => FactoryBot.create(:hardware, - :memory_mb => 8124, :cpu_total_cores => 1, - :cpu_speed => 9576), :vms => [vm]) + :memory_mb => 8124, :cpu_total_cores => 1, + :cpu_speed => 9576), :vms => [vm]) ems_cluster = FactoryBot.create(:ems_cluster, :ext_management_system => ems) ems_cluster.hosts << host storage = FactoryBot.create(:storage_vmware) Range.new(start_of_all_intervals, midle_of_the_first_day, true).step_value(1.hour).each do |time| vm.metric_rollups << FactoryBot.create(:metric_rollup_vm_hr, - :derived_vm_numvcpus => number_of_cpus, - :cpu_usagemhz_rate_average => cpu_usagemhz, - :timestamp => time, - :tag_names => 'environment/prod', - :parent_host_id => host.id, - :parent_ems_cluster_id => ems_cluster.id, - :parent_ems_id => ems.id, - :parent_storage_id => storage.id, - :resource_name => vm.name, - ) + :derived_vm_numvcpus => number_of_cpus, + :cpu_usagemhz_rate_average => cpu_usagemhz, + :timestamp => time, + :tag_names => 'environment/prod', + :parent_host_id => host.id, + :parent_ems_cluster_id => ems_cluster.id, + :parent_ems_id => ems.id, + :parent_storage_id => storage.id, + :resource_name => vm.name) end vm end @@ -56,7 +55,7 @@ :single_value => true, :show => true) c = FactoryBot.create(:classification, :name => 'prod', :description => 'Production', :parent_id => cat.id) chargeback_rate = FactoryBot.create(:chargeback_rate, :detail_params => detail_params) - temp = { :cb_rate => chargeback_rate, :tag => [c, 'vm'] } + temp = {:cb_rate => chargeback_rate, :tag => [c, 'vm']} ChargebackRate.set_assignments(:compute, [temp]) end diff --git a/spec/models/chargeback_vm_spec.rb b/spec/models/chargeback_vm_spec.rb index 3a27ab621f0..71056570355 100644 --- a/spec/models/chargeback_vm_spec.rb +++ b/spec/models/chargeback_vm_spec.rb @@ -224,7 +224,7 @@ def pluck_rollup(metric_rollup_records) let(:parent_classification) { FactoryBot.create(:classification) } let(:classification) { FactoryBot.create(:classification, :parent_id => parent_classification.id) } - let(:rate_assignment_options) { {:cb_rate => storage_chargeback_rate, :object => MiqEnterprise.first } } + let(:rate_assignment_options) { {:cb_rate => storage_chargeback_rate, :object => MiqEnterprise.first} } let(:options) { base_options.merge(:interval => 'daily', :tag => nil, :entity_id => resource.id, :include_metrics => false) } before do @@ -914,7 +914,7 @@ def result_row_by(chargeback_result, date) context "by owner" do let(:user) { FactoryBot.create(:user, :name => 'Test VM Owner', :userid => 'test_user') } - let(:options) { {:interval_size => 4, :owner => user.userid, :ext_options => {:tz => 'Eastern Time (US & Canada)'} } } + let(:options) { {:interval_size => 4, :owner => user.userid, :ext_options => {:tz => 'Eastern Time (US & Canada)'}} } before do @vm1.update_attribute(:evm_owner, user) end @@ -978,7 +978,7 @@ def result_row_by(chargeback_result, date) ChargebackRate.set_assignments(:storage, [rate_assignment_options]) end - it "chooses rate according to cloud_volume\'s tag" do + it "chooses rate according to cloud_volume's tag" do cloud_volume_sdd.tag_with([classification.tag.name], :ns => '*') expect(subject).to eq(storage_chargeback_rate) @@ -1102,7 +1102,7 @@ def result_row_by(chargeback_result, date) before do # fix fixed computes cost tier - we are not using variable part - detail_params[:chargeback_rate_detail_fixed_compute_cost][:tiers] = [{:fixed_rate => count_hourly_rate.to_s }] + detail_params[:chargeback_rate_detail_fixed_compute_cost][:tiers] = [{:fixed_rate => count_hourly_rate.to_s}] vm.tag_with([classification_1_1.tag.name, classification_2_1.tag.name], :ns => '*') @@ -1199,7 +1199,7 @@ def result_row_by(chargeback_result, date) expect(subject.memory_allocated_metric).to eq(memory_available) memory_cost_rate1 = memory_available * hourly_rate * hours_in_month - memory_cost_rate2 = fixed_rate * hours_in_month + memory_available * hourly_rate_2 * hours_in_month + memory_cost_rate2 = (fixed_rate * hours_in_month) + (memory_available * hourly_rate_2 * hours_in_month) expect(subject.memory_allocated_cost).to eq(memory_cost_rate1 + memory_cost_rate2) @@ -1207,7 +1207,7 @@ def result_row_by(chargeback_result, date) expect(subject.memory_used_metric).to eq(used_metric) memory_cost_rate1 = used_metric * hourly_rate * hours_in_month - memory_cost_rate2 = fixed_rate * hours_in_month + used_metric * hourly_rate_2 * hours_in_month + memory_cost_rate2 = (fixed_rate * hours_in_month) + (used_metric * hourly_rate_2 * hours_in_month) expect(subject.memory_used_cost).to eq(memory_cost_rate1 + memory_cost_rate2) expect(subject.memory_cost).to eq(subject.memory_allocated_cost + subject.memory_used_cost) @@ -1351,7 +1351,6 @@ def result_row_by(chargeback_result, date) end context "Group by single tag category" do - let(:options) { base_options.merge(:interval => 'monthly', :groupby_tag => 'environment') } before do add_metric_rollups_for(@vm1, month_beginning...month_end, 12.hours, metric_rollup_params) @@ -1515,7 +1514,7 @@ def find_result_by_vm_name_and_region(chargeback_result, vm_name, region) let(:cores) { 7 } let(:mem_mb) { 1777 } let(:disk_gb) { 7 } - let(:disk_b) { disk_gb * 1024**3 } + let(:disk_b) { disk_gb * (1024**3) } let(:hardware) do FactoryBot.create(:hardware, diff --git a/spec/models/classification_spec.rb b/spec/models/classification_spec.rb index ed68996a7b4..7ef518e585d 100644 --- a/spec/models/classification_spec.rb +++ b/spec/models/classification_spec.rb @@ -234,8 +234,7 @@ ['', 'My Name', 'My_Name_is...', - '123456789_123456789_123456789_123456789_123456789_1' - ].each do |name| + '123456789_123456789_123456789_123456789_123456789_1'].each do |name| cat = Classification.is_category.new(:name => name) expect(cat).to_not be_valid @@ -247,8 +246,7 @@ ['', 'My Name', 'My_Name_is...', - '123456789_123456789_123456789_123456789_123456789_1' - ].each do |name| + '123456789_123456789_123456789_123456789_123456789_1'].each do |name| good_name = Classification.sanitize_name(name) cat = Classification.is_category.new(:name => good_name, :description => name) expect(cat).to be_valid @@ -307,8 +305,7 @@ expect(all_tagged_with(Host, [[full_tag_name(ent11)], [full_tag_name(ent11)]])).to eq([host2]) # failure - expect(all_tagged_with(Host, [[full_tag_name(ent12), full_tag_name(ent11)], [full_tag_name(ent21)]]) - ).not_to eq([host2]) + expect(all_tagged_with(Host, [[full_tag_name(ent12), full_tag_name(ent11)], [full_tag_name(ent21)]])).not_to eq([host2]) expect(all_tagged_with(Host, [[full_tag_name(ent11)], [full_tag_name(ent22)]])).not_to eq([host2]) expect(all_tagged_with(Host, [[full_tag_name(ent12)], [full_tag_name(ent21)]])).not_to eq([host2]) end @@ -446,18 +443,17 @@ describe ".seed" do before do allow(YAML).to receive(:load_file).and_return([ - {:name => "cc", - :description => "Cost Center", - :example_text => "Cost Center", - :read_only => "0", - :syntax => "string", - :show => true, - :default => true, - :single_value => "1", - :entries => [{:description => "Cost Center 001", :name => "001"}, - {:description => "Cost Center 002", :name => "002"}] - }] - ) + {:name => "cc", + :description => "Cost Center", + :example_text => "Cost Center", + :read_only => "0", + :syntax => "string", + :show => true, + :default => true, + :single_value => "1", + :entries => [{:description => "Cost Center 001", :name => "001"}, + {:description => "Cost Center 002", :name => "002"}]} + ]) end context "after seeding" do @@ -491,18 +487,18 @@ cat = Classification.is_category.find_by!(:description => "Cost Center") allow(YAML).to receive(:load_file).and_call_original cat.update!(:name => "new_name") - expect { + expect do 2.times.each { Classification.seed } - }.to_not raise_error + end.to_not raise_error end end it "does not re-seed existing categories" do category = FactoryBot.create(:classification_cost_center, - :description => "user defined", - :example_text => "user defined", - :show => false, - :single_value => "0") + :description => "user defined", + :example_text => "user defined", + :show => false, + :single_value => "0") category_attrs = category.attributes Classification.seed @@ -515,7 +511,7 @@ describe '.find_by_name' do let(:my_region_number) { Classification.my_region_number } let(:other_region) { Classification.my_region_number + 1 } - let(:other_region_id) { other_region * Classification.rails_sequence_factor + 1 } + let(:other_region_id) { (other_region * Classification.rails_sequence_factor) + 1 } before do @local = FactoryBot.create(:classification, :name => "test_category1") @@ -558,7 +554,7 @@ describe '.find_by_names' do let(:my_region_number) { Classification.my_region_number } let(:other_region) { Classification.my_region_number + 1 } - let(:other_region_id) { other_region * Classification.rails_sequence_factor + 1 } + let(:other_region_id) { (other_region * Classification.rails_sequence_factor) + 1 } before do @local = FactoryBot.create(:classification, :name => "test_category1") diff --git a/spec/models/compliance_spec.rb b/spec/models/compliance_spec.rb index a8eb62b4354..055ac91ac11 100644 --- a/spec/models/compliance_spec.rb +++ b/spec/models/compliance_spec.rb @@ -141,10 +141,10 @@ context "no condition" do let(:policy) do FactoryBot.create(:miq_policy, - :mode => 'compliance', - :towhat => 'Vm', - :expression => MiqExpression.new("=" => {"field" => "Vm-retired", "value" => "true"}), - :active => true) + :mode => 'compliance', + :towhat => 'Vm', + :expression => MiqExpression.new("=" => {"field" => "Vm-retired", "value" => "true"}), + :active => true) end let(:policy_set) { FactoryBot.create(:miq_policy_set) } let(:event_definition) { MiqEventDefinition.find_by(:name => "vm_compliance_check") } diff --git a/spec/models/container_group_spec.rb b/spec/models/container_group_spec.rb index b0e573e031c..a78274fbdae 100644 --- a/spec/models/container_group_spec.rb +++ b/spec/models/container_group_spec.rb @@ -11,7 +11,7 @@ group = FactoryBot.create( :container_group, - :name => "group", + :name => "group" ) ems = FactoryBot.create( diff --git a/spec/models/container_quota/purging_spec.rb b/spec/models/container_quota/purging_spec.rb index d229061e008..d8ce7b3ac28 100644 --- a/spec/models/container_quota/purging_spec.rb +++ b/spec/models/container_quota/purging_spec.rb @@ -27,16 +27,16 @@ @old_quota = FactoryBot.create(:container_quota, :deleted_on => deleted_date - 1.day) @old_quota_scope = FactoryBot.create(:container_quota_scope, :container_quota => @old_quota) @old_quota_old_item = FactoryBot.create(:container_quota_item, :container_quota => @old_quota, - :deleted_on => deleted_date - 1.day) + :deleted_on => deleted_date - 1.day) @old_quota_active_item = FactoryBot.create(:container_quota_item, :container_quota => @old_quota, - :deleted_on => nil) + :deleted_on => nil) @purge_date_quota = FactoryBot.create(:container_quota, :deleted_on => deleted_date) @new_quota = FactoryBot.create(:container_quota, :deleted_on => deleted_date + 1.day) @new_quota_scope = FactoryBot.create(:container_quota_scope, :container_quota => @new_quota) @new_quota_old_item = FactoryBot.create(:container_quota_item, :container_quota => @new_quota, - :deleted_on => deleted_date - 1.day) + :deleted_on => deleted_date - 1.day) end def assert_unpurged_ids(model, unpurged_ids) diff --git a/spec/models/container_quota_item/purging_spec.rb b/spec/models/container_quota_item/purging_spec.rb index 97160e917b9..5c3f3494f4e 100644 --- a/spec/models/container_quota_item/purging_spec.rb +++ b/spec/models/container_quota_item/purging_spec.rb @@ -27,23 +27,23 @@ @old_quota = FactoryBot.create(:container_quota, :deleted_on => deleted_date - 1.day) @old_quota_scope = FactoryBot.create(:container_quota_scope, :container_quota => @old_quota) @old_quota_old_item = FactoryBot.create(:container_quota_item, :container_quota => @old_quota, - :deleted_on => deleted_date - 1.day) + :deleted_on => deleted_date - 1.day) @old_quota_purge_date_item = FactoryBot.create(:container_quota_item, :container_quota => @old_quota, - :deleted_on => deleted_date) + :deleted_on => deleted_date) @old_quota_new_item = FactoryBot.create(:container_quota_item, :container_quota => @old_quota, - :deleted_on => deleted_date + 1.day) + :deleted_on => deleted_date + 1.day) # Quota items may get archived as result of quota edits, while parent quota remains active. @active_quota = FactoryBot.create(:container_quota, :deleted_on => nil) @active_quota_scope = FactoryBot.create(:container_quota_scope, :container_quota => @active_quota) @active_quota_old_item = FactoryBot.create(:container_quota_item, :container_quota => @active_quota, - :deleted_on => deleted_date - 1.day) + :deleted_on => deleted_date - 1.day) @active_quota_purge_date_item = FactoryBot.create(:container_quota_item, :container_quota => @active_quota, - :deleted_on => deleted_date) + :deleted_on => deleted_date) @active_quota_new_item = FactoryBot.create(:container_quota_item, :container_quota => @active_quota, - :deleted_on => deleted_date + 1.day) + :deleted_on => deleted_date + 1.day) @active_quota_active_item = FactoryBot.create(:container_quota_item, :container_quota => @active_quota, - :deleted_on => nil) + :deleted_on => nil) end def assert_unpurged_ids(model, unpurged_ids) diff --git a/spec/models/custom_button_event_spec.rb b/spec/models/custom_button_event_spec.rb index 1dc653c3498..7b19b03bfce 100644 --- a/spec/models/custom_button_event_spec.rb +++ b/spec/models/custom_button_event_spec.rb @@ -3,11 +3,11 @@ let(:ae_entry_point) { "/SYSTEM/PROCESS/Request" } let(:cb_event) do FactoryBot.create(:custom_button_event, - :full_data => { - :automate_entry_point => ae_entry_point, - :button_id => custom_button.id, - :button_name => custom_button.name - }) + :full_data => { + :automate_entry_point => ae_entry_point, + :button_id => custom_button.id, + :button_name => custom_button.name + }) end context '#automate_entry_point' do diff --git a/spec/models/custom_button_spec.rb b/spec/models/custom_button_spec.rb index 40afae1620d..4c351126511 100644 --- a/spec/models/custom_button_spec.rb +++ b/spec/models/custom_button_spec.rb @@ -7,13 +7,13 @@ context 'by bigint column' do it 'orders by memory_shares column' do - expect(CustomButton.with_array_order(%w(300 100 200), :applies_to_id).ids).to eq([custom_button_3.id, custom_button_1.id, custom_button_2.id]) + expect(CustomButton.with_array_order(%w[300 100 200], :applies_to_id).ids).to eq([custom_button_3.id, custom_button_1.id, custom_button_2.id]) end end context 'by string column' do it 'orders by name column' do - expect(CustomButton.with_array_order(%w(BBB AAA CCC), :name, :text).ids).to eq([custom_button_2.id, custom_button_1.id, custom_button_3.id]) + expect(CustomButton.with_array_order(%w[BBB AAA CCC], :name, :text).ids).to eq([custom_button_2.id, custom_button_1.id, custom_button_3.id]) end end end @@ -70,16 +70,15 @@ @userid = "guest" uri_path, uri_attributes, uri_message = CustomButton.parse_uri(@ae_uri) @button = FactoryBot.create(:custom_button, - :name => @button_name, - :description => @button_text, - :applies_to_class => @button_class, - :applies_to_id => @target_id, - :uri => @ae_uri, - :uri_path => uri_path, - :uri_attributes => uri_attributes, - :uri_message => uri_message, - :userid => @userid - ) + :name => @button_name, + :description => @button_text, + :applies_to_class => @button_class, + :applies_to_id => @target_id, + :uri => @ae_uri, + :uri_path => uri_path, + :uri_attributes => uri_attributes, + :uri_message => uri_message, + :userid => @userid) end it "creates the proper button" do @@ -129,23 +128,23 @@ vm = FactoryBot.create(:vm_vmware) vm_other = FactoryBot.create(:vm_vmware) button1all = FactoryBot.create(:custom_button, - :applies_to => vm.class, - :name => "foo", - :description => "foo foo") + :applies_to => vm.class, + :name => "foo", + :description => "foo foo") button1vm = FactoryBot.create(:custom_button, - :applies_to => vm, - :name => "bar", - :description => "bar bar") + :applies_to => vm, + :name => "bar", + :description => "bar bar") button2vm = FactoryBot.create(:custom_button, - :applies_to => vm, - :name => "foo", - :description => "foo foo") + :applies_to => vm, + :name => "foo", + :description => "foo foo") expect(described_class.buttons_for(Host)).to eq([]) expect(described_class.buttons_for(Vm)).to eq([button1all]) - expect(described_class.buttons_for(vm)).to match_array([button1vm, button2vm]) + expect(described_class.buttons_for(vm)).to match_array([button1vm, button2vm]) expect(described_class.buttons_for(vm_other)).to eq([]) end @@ -168,39 +167,44 @@ it "applies_to_class" do button_for_all_vms = FactoryBot.create(:custom_button, - :applies_to_class => 'Vm', - :name => @default_name, - :description => @default_description) + :applies_to_class => 'Vm', + :name => @default_name, + :description => @default_description) expect(button_for_all_vms).to be_valid new_host_button = described_class.new( :applies_to_class => 'Host', :name => @default_name, - :description => @default_description) + :description => @default_description + ) expect(new_host_button).to be_valid dup_vm_button = described_class.new( :applies_to_class => 'Vm', :name => @default_name, - :description => @default_description) + :description => @default_description + ) expect(dup_vm_button).not_to be_valid dup_vm_name_button = described_class.new( :applies_to_class => 'Vm', :name => @default_name, - :description => "hello world") + :description => "hello world" + ) expect(dup_vm_name_button).not_to be_valid dup_vm_desc_button = described_class.new( :applies_to_class => 'Vm', :name => "hello", - :description => @default_description) + :description => @default_description + ) expect(dup_vm_desc_button).not_to be_valid new_vm_button = described_class.new( :applies_to_class => 'Vm', :name => "hello", - :description => "hello world") + :description => "hello world" + ) expect(new_vm_button).to be_valid end @@ -208,59 +212,67 @@ vm_other = FactoryBot.create(:vm_vmware) button_for_single_vm = FactoryBot.create(:custom_button, - :applies_to => @vm, - :name => @default_name, - :description => @default_description) + :applies_to => @vm, + :name => @default_name, + :description => @default_description) expect(button_for_single_vm).to be_valid # For same VM dup_vm_button = described_class.new( :applies_to => @vm, :name => @default_name, - :description => @default_description) + :description => @default_description + ) expect(dup_vm_button).not_to be_valid dup_vm_name_button = described_class.new( :applies_to => @vm, :name => @default_name, - :description => "hello world") + :description => "hello world" + ) expect(dup_vm_name_button).not_to be_valid dup_vm_desc_button = described_class.new( :applies_to => @vm, :name => "hello", - :description => @default_description) + :description => @default_description + ) expect(dup_vm_desc_button).not_to be_valid new_vm_button = described_class.new( :applies_to => @vm, :name => "hello", - :description => "hello world") + :description => "hello world" + ) expect(new_vm_button).to be_valid # For other VM dup_vm_button = described_class.new( :applies_to => vm_other, :name => @default_name, - :description => @default_description) + :description => @default_description + ) expect(dup_vm_button).to be_valid dup_vm_name_button = described_class.new( :applies_to => vm_other, :name => @default_name, - :description => "hello world") + :description => "hello world" + ) expect(dup_vm_name_button).to be_valid dup_vm_desc_button = described_class.new( :applies_to => vm_other, :name => "hello", - :description => @default_description) + :description => @default_description + ) expect(dup_vm_desc_button).to be_valid new_vm_button = described_class.new( :applies_to => vm_other, :name => "hello", - :description => "hello world") + :description => "hello world" + ) expect(new_vm_button).to be_valid end end @@ -329,7 +341,7 @@ EvmSpecHelper.local_miq_server(:is_master => true, :zone => Zone.seed) end - %i(invoke invoke_async).each do |method| + %i[invoke invoke_async].each do |method| describe "##{method}", "publishes CustomButtonEvent(s)" do it "with a single VM" do Timecop.freeze(Time.now.utc) do @@ -358,10 +370,10 @@ expect(CustomButtonEvent.count).to eq(3) expect(CustomButtonEvent.find_by(:target_id => vm.id, :target_type => "VmOrTemplate", :source => 'UI')).to have_attributes( - :type => 'CustomButtonEvent', - :event_type => 'button.trigger.start', - :user_id => user.id, - :full_data => a_hash_including(:automate_entry_point => "/SYSTEM/PROCESS/Request") + :type => 'CustomButtonEvent', + :event_type => 'button.trigger.start', + :user_id => user.id, + :full_data => a_hash_including(:automate_entry_point => "/SYSTEM/PROCESS/Request") ) end @@ -374,10 +386,10 @@ expect(CustomButtonEvent.count).to eq(3) expect(CustomButtonEvent.find_by(:target_id => vm.id, :target_type => "VmOrTemplate", :source => 'UI')).to have_attributes( - :type => 'CustomButtonEvent', - :event_type => 'button.trigger.start', - :user_id => user.id, - :full_data => a_hash_including(:automate_entry_point => "/SYSTEM/PROCESS/Request") + :type => 'CustomButtonEvent', + :event_type => 'button.trigger.start', + :user_id => user.id, + :full_data => a_hash_including(:automate_entry_point => "/SYSTEM/PROCESS/Request") ) end end diff --git a/spec/models/dialog/orchestration_template_service_dialog_spec.rb b/spec/models/dialog/orchestration_template_service_dialog_spec.rb index 84cdb667943..ea8e5ba7e21 100644 --- a/spec/models/dialog/orchestration_template_service_dialog_spec.rb +++ b/spec/models/dialog/orchestration_template_service_dialog_spec.rb @@ -59,7 +59,7 @@ :parameters => [ template_param(:label => 'Param 1'), template_param(:label => 'Param 2') - ], + ] ) ] }, @@ -84,14 +84,14 @@ :parameters => [ template_param(:label => 'Param 1'), template_param(:label => 'Param 2') - ], + ] ), OrchestrationTemplate::OrchestrationParameterGroup.new( :label => 'Parameter Group 2', :parameters => [ template_param(:label => 'Param 3'), template_param(:label => 'Param 4') - ], + ] ) ] }, @@ -120,7 +120,7 @@ :parameters => [ template_param(:label => 'Param 3'), template_param(:label => 'Param 4') - ], + ] ) ] }, @@ -146,7 +146,7 @@ :parameters => [ template_param(:label => 'Param 1'), template_param(:label => 'Param 2') - ], + ] ) ] }, @@ -158,7 +158,7 @@ :parameters => [ template_param(:label => 'Param 3'), template_param(:label => 'Param 4') - ], + ] ) ] }, @@ -179,7 +179,7 @@ describe "creation of dropdown parameter fields" do context "when allowed values are given" do let(:param_options) do - constraint = OrchestrationTemplate::OrchestrationParameterAllowed.new(:allowed_values => %w(val1 val2), :allow_multiple => true) + constraint = OrchestrationTemplate::OrchestrationParameterAllowed.new(:allowed_values => %w[val1 val2], :allow_multiple => true) {:default_value => '["val1"]', :constraints => [constraint]} end @@ -188,7 +188,7 @@ DialogFieldDropDownList, :name => "param_user", :default_value => "[\"val1\"]", - :values => [%w(val1 val1), %w(val2 val2)], + :values => [%w[val1 val1], %w[val2 val2]], :reconfigurable => true, :force_multi_value => true) end @@ -205,7 +205,7 @@ DialogFieldDropDownList, :name => "param_user", :default_value => "val1", - :values => [[nil, ""], %w(key1 val1), %w(key2 val2)]) + :values => [[nil, ""], %w[key1 val1], %w[key2 val2]]) end end @@ -269,7 +269,7 @@ def assert_tab_attributes(tab, attributes) def assert_stack_group(group) expect(group).to have_attributes( :label => "Options", - :display => "edit", + :display => "edit" ) fields = group.dialog_fields diff --git a/spec/models/dialog_field_association_validator_spec.rb b/spec/models/dialog_field_association_validator_spec.rb index 77560f32aa9..43842d2ee12 100644 --- a/spec/models/dialog_field_association_validator_spec.rb +++ b/spec/models/dialog_field_association_validator_spec.rb @@ -7,7 +7,7 @@ let(:trivial_associations) { {"a" => ["b"]} } it "doesn't blow up and returns nil" do - expect(dialog_field_association_validator.check_for_circular_references({"a" => []} , [])).to eq(nil) + expect(dialog_field_association_validator.check_for_circular_references({"a" => []}, [])).to eq(nil) expect(dialog_field_association_validator.check_for_circular_references(trivial_associations, "a")).to eq(nil) expect(dialog_field_association_validator.check_for_circular_references(associations, "e")).to eq(nil) expect(dialog_field_association_validator.check_for_circular_references(associations, "c")).to eq(nil) @@ -17,8 +17,8 @@ context "when there are circular references" do let(:trivial_associations) { {"a" => ["b"], "b" => ["a"]} } - let(:associations) { {"a" => %w(b d), "b" => ["c"], "c" => %w(e d), "e" => ["b"]} } - let(:associations1) { {"a" => %w(b d), "b" => ["c"], "d" => ["a"]} } + let(:associations) { {"a" => %w[b d], "b" => ["c"], "c" => %w[e d], "e" => ["b"]} } + let(:associations1) { {"a" => %w[b d], "b" => ["c"], "d" => ["a"]} } it "raises circular reference error and returns problematic fields" do expect { dialog_field_association_validator.check_for_circular_references(trivial_associations, "a") }.to raise_error(DialogFieldAssociationValidator::DialogFieldAssociationCircularReferenceError, 'a already exists in ["a", "b"]') diff --git a/spec/models/dialog_field_drop_down_list_spec.rb b/spec/models/dialog_field_drop_down_list_spec.rb index 73188dc5053..8b699a2598f 100644 --- a/spec/models/dialog_field_drop_down_list_spec.rb +++ b/spec/models/dialog_field_drop_down_list_spec.rb @@ -29,7 +29,7 @@ describe "sorting #values" do before do - df.values = [%w(2 Y), %w(1 Z), %w(3 X)] + df.values = [%w[2 Y], %w[1 Z], %w[3 X]] end context "when the data type is a string" do @@ -38,7 +38,7 @@ context "when sorting by description" do context "when sorting ascending" do it "returns the sorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(3 X), %w(2 Y), %w(1 Z)]) + expect(df.values).to eq([[nil, ""], %w[3 X], %w[2 Y], %w[1 Z]]) end end @@ -46,7 +46,7 @@ let(:sort_order) { :descending } it "returns the sorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(1 Z), %w(2 Y), %w(3 X)]) + expect(df.values).to eq([[nil, ""], %w[1 Z], %w[2 Y], %w[3 X]]) end end end @@ -56,7 +56,7 @@ context "when sorting ascending" do it "returns the sorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(1 Z), %w(2 Y), %w(3 X)]) + expect(df.values).to eq([[nil, ""], %w[1 Z], %w[2 Y], %w[3 X]]) end end @@ -64,7 +64,7 @@ let(:sort_order) { :descending } it "returns the sorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(3 X), %w(2 Y), %w(1 Z)]) + expect(df.values).to eq([[nil, ""], %w[3 X], %w[2 Y], %w[1 Z]]) end end end @@ -74,7 +74,7 @@ context "when sorting ascending" do it "returns the unsorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(2 Y), %w(1 Z), %w(3 X)]) + expect(df.values).to eq([[nil, ""], %w[2 Y], %w[1 Z], %w[3 X]]) end end @@ -82,7 +82,7 @@ let(:sort_order) { :descending } it "returns the unsorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(2 Y), %w(1 Z), %w(3 X)]) + expect(df.values).to eq([[nil, ""], %w[2 Y], %w[1 Z], %w[3 X]]) end end end @@ -96,7 +96,7 @@ context "when sorting ascending" do it "returns the sorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(1 Z), %w(2 Y), %w(3 X)]) + expect(df.values).to eq([[nil, ""], %w[1 Z], %w[2 Y], %w[3 X]]) end end @@ -104,7 +104,7 @@ let(:sort_order) { :descending } it "returns the sorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(3 X), %w(2 Y), %w(1 Z)]) + expect(df.values).to eq([[nil, ""], %w[3 X], %w[2 Y], %w[1 Z]]) end end end @@ -114,7 +114,7 @@ context "when sorting ascending" do it "returns the unsorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(2 Y), %w(1 Z), %w(3 X)]) + expect(df.values).to eq([[nil, ""], %w[2 Y], %w[1 Z], %w[3 X]]) end end @@ -122,7 +122,7 @@ let(:sort_order) { :descending } it "returns the unsorted values with a nil option prepended" do - expect(df.values).to eq([[nil, ""], %w(2 Y), %w(1 Z), %w(3 X)]) + expect(df.values).to eq([[nil, ""], %w[2 Y], %w[1 Z], %w[3 X]]) end end end @@ -139,7 +139,7 @@ let(:dialog_field) do described_class.new(:default_value => default_value, :options => {:force_multi_value => true}, :dynamic => true) end - let(:values) { [%w(test test), %w(test2 test2)] } + let(:values) { [%w[test test], %w[test2 test2]] } let(:default_value) { "test2" } before do @@ -254,28 +254,28 @@ context "when the raw values are already set" do before do - dialog_field.instance_variable_set(:@raw_values, %w(potato potato)) + dialog_field.instance_variable_set(:@raw_values, %w[potato potato]) end it "returns the raw values" do - expect(dialog_field.values).to eq(%w(potato potato)) + expect(dialog_field.values).to eq(%w[potato potato]) end end context "when the raw values are not already set" do context "when the values returned are strings" do before do - allow(DynamicDialogFieldValueProcessor).to receive(:values_from_automate).with(dialog_field).and_return(%w(automate values)) + allow(DynamicDialogFieldValueProcessor).to receive(:values_from_automate).with(dialog_field).and_return(%w[automate values]) end it "returns the values from automate" do - expect(dialog_field.values).to eq(%w(automate values)) + expect(dialog_field.values).to eq(%w[automate values]) end context "when the values returned contain a nil" do before do allow(DynamicDialogFieldValueProcessor).to receive(:values_from_automate).with(dialog_field).and_return( - [[nil, "Choose something!"], %w(1 one), %w(2 two), %w(abc def)] + [[nil, "Choose something!"], %w[1 one], %w[2 two], %w[abc def]] ) end @@ -330,7 +330,7 @@ end it "returns the values from automate" do - expect(dialog_field.values).to eq([[nil, "Choose something!"], %w(1 one), %w(2 two), %w(abc def)]) + expect(dialog_field.values).to eq([[nil, "Choose something!"], %w[1 one], %w[2 two], %w[abc def]]) end end end @@ -377,7 +377,7 @@ context "when the raw values are already set" do before do - dialog_field.instance_variable_set(:@raw_values, [%w(potato potato)]) + dialog_field.instance_variable_set(:@raw_values, [%w[potato potato]]) end context 'and this is a multiselect' do @@ -445,7 +445,7 @@ context "when the raw values are not already set" do before do - dialog_field.values = [%w(original values)] + dialog_field.values = [%w[original values]] end context 'and this is a multiselect' do @@ -479,23 +479,23 @@ before do allow(DynamicDialogFieldValueProcessor).to receive(:values_from_automate).with(dialog_field).and_return( - %w(automate values) + %w[automate values] ) end context "when the raw values are already set" do before do - dialog_field.instance_variable_set(:@raw_values, %w(potato potato)) + dialog_field.instance_variable_set(:@raw_values, %w[potato potato]) end it "updates with the values from automate" do - expect(dialog_field.trigger_automate_value_updates).to eq(%w(automate values)) + expect(dialog_field.trigger_automate_value_updates).to eq(%w[automate values]) end end context "when the raw values are not already set" do it "returns the values from automate" do - expect(dialog_field.trigger_automate_value_updates).to eq(%w(automate values)) + expect(dialog_field.trigger_automate_value_updates).to eq(%w[automate values]) end end end @@ -505,22 +505,22 @@ context "when the raw values are already set" do before do - dialog_field.instance_variable_set(:@raw_values, %w(potato potato)) - dialog_field.values = [%w(original values)] + dialog_field.instance_variable_set(:@raw_values, %w[potato potato]) + dialog_field.values = [%w[original values]] end it "returns the raw values" do - expect(dialog_field.trigger_automate_value_updates).to eq([[nil, ""], %w(original values)]) + expect(dialog_field.trigger_automate_value_updates).to eq([[nil, ""], %w[original values]]) end end context "when the raw values are not already set" do before do - dialog_field.values = [%w(original values)] + dialog_field.values = [%w[original values]] end it "returns the values" do - expect(dialog_field.trigger_automate_value_updates).to eq([[nil, ""], %w(original values)]) + expect(dialog_field.trigger_automate_value_updates).to eq([[nil, ""], %w[original values]]) end it "sets up the default value" do diff --git a/spec/models/dialog_field_importer_spec.rb b/spec/models/dialog_field_importer_spec.rb index 4d9153a238a..dbbce022a35 100644 --- a/spec/models/dialog_field_importer_spec.rb +++ b/spec/models/dialog_field_importer_spec.rb @@ -278,8 +278,8 @@ context "#visible?" do let(:dialog_field) do { - "type" => "DialogFieldTextBox", - "name" => "Something else" + "type" => "DialogFieldTextBox", + "name" => "Something else" } end diff --git a/spec/models/dialog_field_radio_button_spec.rb b/spec/models/dialog_field_radio_button_spec.rb index c2d4d722471..08c34efb05f 100644 --- a/spec/models/dialog_field_radio_button_spec.rb +++ b/spec/models/dialog_field_radio_button_spec.rb @@ -50,7 +50,7 @@ end context "when the checked value is in the list of refreshed values" do - let(:refreshed_values_from_automate) { [%w(123 123), %w(456 456)] } + let(:refreshed_values_from_automate) { [%w[123 123], %w[456 456]] } it "returns the list of refreshed values and checked value as a hash" do expect(dialog_field_radio_button.refresh_json_value("123")).to eq( @@ -63,7 +63,7 @@ end context "when the checked value is not in the list of refreshed values" do - let(:refreshed_values_from_automate) { [%w(123 123)] } + let(:refreshed_values_from_automate) { [%w[123 123]] } it "returns the list of refreshed values and no checked (default) value as a hash" do expect(dialog_field_radio_button.refresh_json_value("321")).to eq( diff --git a/spec/models/dialog_field_serializer_spec.rb b/spec/models/dialog_field_serializer_spec.rb index 4fb4eb0475c..33ffaea9538 100644 --- a/spec/models/dialog_field_serializer_spec.rb +++ b/spec/models/dialog_field_serializer_spec.rb @@ -61,11 +61,11 @@ it 'serializes the dialog_field with the correct attributes' do expect(dialog_field_serializer.serialize(dialog_field, all_attributes)) - .to eq(expected_serialized_values.merge( - "resource_action" => "serialized resource action", - "values" => "dynamic values", - "dialog_field_responders" => dialog_field_responders - )) + .to eq(expected_serialized_values.merge( + "resource_action" => "serialized resource action", + "values" => "dynamic values", + "dialog_field_responders" => dialog_field_responders + )) end end @@ -74,12 +74,12 @@ it 'serializes the dialog_field with all attributes' do expect(dialog_field_serializer.serialize(dialog_field, all_attributes)) - .to include(expected_serialized_values.merge( - 'id' => dialog_field.id, - 'resource_action' => 'serialized resource action', - 'dialog_field_responders' => [], - 'values' => 'dynamic values' - )) + .to include(expected_serialized_values.merge( + 'id' => dialog_field.id, + 'resource_action' => 'serialized resource action', + 'dialog_field_responders' => [], + 'values' => 'dynamic values' + )) end end @@ -93,7 +93,7 @@ 'resource_action' => 'serialized resource action', 'dialog_field_responders' => [], 'values' => nil - )) + )) end end @@ -113,8 +113,8 @@ .to eq(expected_serialized_values.merge( "resource_action" => "serialized resource action", "dialog_field_responders" => dialog_field_responders, - "default_value" => "automate default value", - )) + "default_value" => "automate default value" + )) end end @@ -128,7 +128,7 @@ 'resource_action' => 'serialized resource action', 'dialog_field_responders' => [], "default_value" => "automate default value" - )) + )) end end end @@ -148,7 +148,7 @@ "resource_action" => "serialized resource action", "dialog_field_responders" => dialog_field_responders, "values" => nil - )) + )) end end @@ -160,8 +160,8 @@ .to include(expected_serialized_values.merge( 'id' => dialog_field.id, 'resource_action' => 'serialized resource action', - 'dialog_field_responders' => [], - )) + 'dialog_field_responders' => [] + )) end context 'with associations' do @@ -172,7 +172,7 @@ .to include(expected_serialized_values.merge( "resource_action" => "serialized resource action", "dialog_field_responders" => ["Dialog Field"] - )) + )) end end end @@ -191,7 +191,7 @@ "resource_action" => "serialized resource action", "dialog_field_responders" => dialog_field_responders, "values" => [[nil, ""], %w[one one], %w[two two]] - )) + )) end end @@ -204,7 +204,7 @@ 'id' => dialog_field.id, 'resource_action' => 'serialized resource action', 'dialog_field_responders' => [] - )) + )) end end end @@ -213,9 +213,9 @@ context "when the dialog_field is a tag control type" do let(:dialog_field) do DialogFieldTagControl.new(expected_serialized_values.merge( - :resource_action => resource_action, - :dialog_field_responders => dialog_field_responders - )) + :resource_action => resource_action, + :dialog_field_responders => dialog_field_responders + )) end let(:type) { "DialogFieldTagControl" } @@ -246,7 +246,7 @@ }, "default_value" => "[\"one\", \"two\"]", "values" => "values" - )) + )) end end end diff --git a/spec/models/dialog_field_sorted_item_spec.rb b/spec/models/dialog_field_sorted_item_spec.rb index 4088e926733..74d8146d47b 100644 --- a/spec/models/dialog_field_sorted_item_spec.rb +++ b/spec/models/dialog_field_sorted_item_spec.rb @@ -7,11 +7,11 @@ :dynamic => true, :load_values_on_init => load_values_on_init, :show_refresh_button => show_refresh_button, - :values => [%w(test test), %w(test2 test2)] + :values => [%w[test test], %w[test2 test2]] ) end let(:default_value) { "test2" } - let(:automate_values) { [%w(test1 test1), %w(test2 test2), %w(test3 test3)] } + let(:automate_values) { [%w[test1 test1], %w[test2 test2], %w[test3 test3]] } let(:empty_values) { [[nil, ""]] } before do @@ -137,7 +137,7 @@ let(:dialog_field) do described_class.new(:default_value => default_value, :dynamic => true) end - let(:values) { [%w(test test), %w(test2 test2)] } + let(:values) { [%w[test test], %w[test2 test2]] } let(:default_value) { "test2" } before do @@ -160,7 +160,7 @@ :values => values ) end - let(:values) { [%w(test test), %w(abc abc)] } + let(:values) { [%w[test test], %w[abc abc]] } let(:required) { false } context "when the field is dynamic" do @@ -225,23 +225,23 @@ let(:required) { true } it "returns the values without the first option being a nil option" do - expect(dialog_field.values).to eq([%w(test test), %w(abc abc)]) + expect(dialog_field.values).to eq([%w[test test], %w[abc abc]]) end end context "when the field is not required" do it "returns the values with the first option being a nil 'None' option" do - expect(dialog_field.values).to eq([[nil, ""], %w(test test), %w(abc abc)]) + expect(dialog_field.values).to eq([[nil, ""], %w[test test], %w[abc abc]]) end context "when the values are in a seemingly random order" do - let(:values) { [%w(3 Three), %w(1 One), %w(2 Two)] } + let(:values) { [%w[3 Three], %w[1 One], %w[2 Two]] } before do dialog_field.options[:sort_by] = "none" end it "does not attempt to sort them" do - expect(dialog_field.values).to eq([[nil, ""], %w(3 Three), %w(1 One), %w(2 Two)]) + expect(dialog_field.values).to eq([[nil, ""], %w[3 Three], %w[1 One], %w[2 Two]]) end end end @@ -257,7 +257,7 @@ context "when there is a default value that matches a value in the values list" do let(:default_value) { "2" } - let(:values) { [%w(1 1), %w(2 2), %w(3 3)] } + let(:values) { [%w[1 1], %w[2 2], %w[3 3]] } it "sets the default value to the matching value" do dialog_field.values @@ -265,13 +265,13 @@ end it "returns the values with the first option being a nil 'None' option" do - expect(dialog_field.values).to eq([[nil, ""], %w(1 1), %w(2 2), %w(3 3)]) + expect(dialog_field.values).to eq([[nil, ""], %w[1 1], %w[2 2], %w[3 3]]) end end context "when there is a default value that does not match a value in the values list" do let(:default_value) { "4" } - let(:values) { [%w(1 1), %w(2 2), %w(3 3)] } + let(:values) { [%w[1 1], %w[2 2], %w[3 3]] } it "sets the default value to nil" do dialog_field.values @@ -279,7 +279,7 @@ end it "returns the values with the first option being a nil 'None' option" do - expect(dialog_field.values).to eq([[nil, ""], %w(1 1), %w(2 2), %w(3 3)]) + expect(dialog_field.values).to eq([[nil, ""], %w[1 1], %w[2 2], %w[3 3]]) end end @@ -290,13 +290,13 @@ let(:required) { true } it "returns the values with the first option being a nil 'Choose' option" do - expect(dialog_field.values).to eq([[nil, ""], %w(test test), %w(abc abc)]) + expect(dialog_field.values).to eq([[nil, ""], %w[test test], %w[abc abc]]) end end context "when the field is not required" do it "returns the values with the first option being a nil 'None' option" do - expect(dialog_field.values).to eq([[nil, ""], %w(test test), %w(abc abc)]) + expect(dialog_field.values).to eq([[nil, ""], %w[test test], %w[abc abc]]) end end end @@ -306,7 +306,7 @@ describe "#get_default_value" do let(:dialog_field) { described_class.new(:default_value => default_value, :values => values) } - let(:values) { [%w(value1 text1), %w(value2 text2)] } + let(:values) { [%w[value1 text1], %w[value2 text2]] } context "when the default value is set to nil" do let(:default_value) { nil } @@ -408,7 +408,7 @@ it_behaves_like "DialogFieldSortedItem#normalize_automate_values" it "normalizes the values to an array" do - expect(dialog_field.normalize_automate_values(automate_hash)).to eq([%w(lol 123)]) + expect(dialog_field.normalize_automate_values(automate_hash)).to eq([%w[lol 123]]) end end end diff --git a/spec/models/dialog_field_spec.rb b/spec/models/dialog_field_spec.rb index a5728a3062a..5300ba9587b 100644 --- a/spec/models/dialog_field_spec.rb +++ b/spec/models/dialog_field_spec.rb @@ -1,6 +1,5 @@ RSpec.describe DialogField do context "legacy tests" do - let(:df) { FactoryBot.create(:dialog_field) } it "sets default value for required attribute" do expect(df.required).to eq(false) diff --git a/spec/models/dialog_field_tag_control_spec.rb b/spec/models/dialog_field_tag_control_spec.rb index b4da5bbb129..0476ca0bd5d 100644 --- a/spec/models/dialog_field_tag_control_spec.rb +++ b/spec/models/dialog_field_tag_control_spec.rb @@ -1,6 +1,7 @@ RSpec.describe DialogFieldTagControl do def add_entry(cat, options) raise "entries can only be added to classifications" unless cat.category? + # Inherit from parent classification options.merge!(:read_only => cat.read_only, :syntax => cat.syntax, :single_value => cat.single_value, :ns => cat.ns) options.merge!(:parent_id => cat.id) # Ugly way to set up a child @@ -64,8 +65,7 @@ def add_entry(cat, options) context "dialog field tag control with with options hash and category" do before do @df = FactoryBot.create(:dialog_field_tag_control, :label => 'test tag category', :name => 'test tag category', - :options => {:force_single_value => true, :category_id => 1, :category_name => 'category', :category_description => 'description'} - ) + :options => {:force_single_value => true, :category_id => 1, :category_name => 'category', :category_description => 'description'}) end it "#category" do @@ -89,8 +89,7 @@ def add_entry(cat, options) before do @cat = FactoryBot.create(:classification, :description => "Auto Approve - Max CPU", :name => "prov_max_cpu", :single_value => 1) @df = FactoryBot.create(:dialog_field_tag_control, :label => 'test tag category', :name => 'test tag category', - :options => {:category_id => @cat.id, :category_name => 'category', :category_description => 'description'} - ) + :options => {:category_id => @cat.id, :category_name => 'category', :category_description => 'description'}) end it "#single_value?" do @@ -98,8 +97,7 @@ def add_entry(cat, options) cat = FactoryBot.create(:classification, :description => "Auto Approve - Max Memory", :name => "prov_max_memory", :single_value => 0) df = FactoryBot.create(:dialog_field_tag_control, :label => 'test tag category', :name => 'test tag category', - :options => {:category_id => cat.id, :category_name => 'category', :category_description => 'description'} - ) + :options => {:category_id => cat.id, :category_name => 'category', :category_description => 'description'}) expect(df.single_value?).to be_falsey end @@ -144,7 +142,7 @@ def add_entry(cat, options) it "automate_output_value with multiple values" do tags = [Classification.first, Classification.last] @df.value = tags.collect(&:id).join(",") - expect(@df.automate_output_value.split("\x1F")).to match_array tags.collect { |tag| "#{tag.class.name}::#{tag.id}" } + expect(@df.automate_output_value.split("\x1F")).to match_array(tags.collect { |tag| "#{tag.class.name}::#{tag.id}" }) end end end @@ -203,10 +201,10 @@ def add_entry(cat, options) it "sorts reverse by name converting to integer and adds a blank value to the front" do expect(dialog_field.values).to eq([ - {:id => nil, :name => "", :description => ""}, - {:id => 321, :name => "2dog", :description => "Dog"}, - {:id => 312, :name => "1cat", :description => "Cat"} - ]) + {:id => nil, :name => "", :description => ""}, + {:id => 321, :name => "2dog", :description => "Dog"}, + {:id => 312, :name => "1cat", :description => "Cat"} + ]) end end @@ -217,10 +215,10 @@ def add_entry(cat, options) it "sorts by name converting to integer and adds a blank value to the front" do expect(dialog_field.values).to eq([ - {:id => nil, :name => "", :description => ""}, - {:id => 312, :name => "1cat", :description => "Cat"}, - {:id => 321, :name => "2dog", :description => "Dog"} - ]) + {:id => nil, :name => "", :description => ""}, + {:id => 312, :name => "1cat", :description => "Cat"}, + {:id => 321, :name => "2dog", :description => "Dog"} + ]) end end end @@ -233,10 +231,10 @@ def add_entry(cat, options) it "sorts reverse by name and adds a blank value to the front" do expect(dialog_field.values).to eq([ - {:id => nil, :name => "", :description => ""}, - {:id => 321, :name => "dog", :description => "Dog"}, - {:id => 312, :name => "cat", :description => "Cat"} - ]) + {:id => nil, :name => "", :description => ""}, + {:id => 321, :name => "dog", :description => "Dog"}, + {:id => 312, :name => "cat", :description => "Cat"} + ]) end end @@ -247,10 +245,10 @@ def add_entry(cat, options) it "sorts by name and adds a blank value to the front" do expect(dialog_field.values).to eq([ - {:id => nil, :name => "", :description => ""}, - {:id => 312, :name => "cat", :description => "Cat"}, - {:id => 321, :name => "dog", :description => "Dog"} - ]) + {:id => nil, :name => "", :description => ""}, + {:id => 312, :name => "cat", :description => "Cat"}, + {:id => 321, :name => "dog", :description => "Dog"} + ]) end end end @@ -261,10 +259,10 @@ def add_entry(cat, options) it "returns the available tags in whatever order they came in as with a blank value first" do expect(dialog_field.values).to eq([ - {:id => nil, :name => "", :description => ""}, - {:id => 321, :name => "dog", :description => "Dog"}, - {:id => 312, :name => "cat", :description => "Cat"}, - ]) + {:id => nil, :name => "", :description => ""}, + {:id => 321, :name => "dog", :description => "Dog"}, + {:id => 312, :name => "cat", :description => "Cat"}, + ]) end end end diff --git a/spec/models/dialog_field_text_box_spec.rb b/spec/models/dialog_field_text_box_spec.rb index 5a68851a3c3..41e325b1dea 100644 --- a/spec/models/dialog_field_text_box_spec.rb +++ b/spec/models/dialog_field_text_box_spec.rb @@ -44,7 +44,6 @@ context "when show_refresh_button is false" do let(:show_refresh_button) { false } - context "when load_values_on_init is true" do let(:load_values_on_init) { true } @@ -109,7 +108,7 @@ end context "dialog field text box with protected field" do - let(:df) { FactoryBot.build(:dialog_field_text_box, :label => 'test field', :name => 'test field', :options => {:protected => true}) } + let(:df) { FactoryBot.build(:dialog_field_text_box, :label => 'test field', :name => 'test field', :options => {:protected => true}) } it "#protected?" do expect(df).to be_protected diff --git a/spec/models/dialog_group_serializer_spec.rb b/spec/models/dialog_group_serializer_spec.rb index 6a632d0ac83..10f5916c0e1 100644 --- a/spec/models/dialog_group_serializer_spec.rb +++ b/spec/models/dialog_group_serializer_spec.rb @@ -12,7 +12,8 @@ :display_method => "display method", :display_method_options => "display method options", :label => "label", - :position => 1) + :position => 1 + ) end let(:expected_serialized_values) do @@ -50,7 +51,7 @@ 'dialog_tab_id' => nil, 'id' => nil, 'updated_at' => nil - )) + )) end end end diff --git a/spec/models/dialog_group_spec.rb b/spec/models/dialog_group_spec.rb index f970cfb6867..6793cd1508d 100644 --- a/spec/models/dialog_group_spec.rb +++ b/spec/models/dialog_group_spec.rb @@ -29,9 +29,9 @@ context 'a collection of dialog fields containing two objects with ids and one without an id' do let(:updated_fields) do [ - { 'id' => dialog_fields.first.id, 'label' => 'updated_field_label', 'dialog_field_responders' => []}, - { 'id' => dialog_fields.last.id, 'label' => 'updated_field_label', 'dialog_field_responders' => []}, - { 'name' => 'new field', 'label' => 'new field label', 'dialog_field_responders' => [] } + {'id' => dialog_fields.first.id, 'label' => 'updated_field_label', 'dialog_field_responders' => []}, + {'id' => dialog_fields.last.id, 'label' => 'updated_field_label', 'dialog_field_responders' => []}, + {'name' => 'new field', 'label' => 'new field label', 'dialog_field_responders' => []} ] end it 'creates or updates the dialog fields' do @@ -45,9 +45,9 @@ context 'a collection of dialog fields with resource actions' do let(:updated_fields) do [ - { 'id' => dialog_fields.first.id, 'label' => 'updated_field_label', 'resource_action' => - {'resource_type' => 'DialogField', 'ae_attributes' => {}}, 'dialog_field_responders' => [] }, - { 'id' => dialog_fields.last.id, 'label' => 'updated_field_label', 'resource_action' => + {'id' => dialog_fields.first.id, 'label' => 'updated_field_label', 'resource_action' => + {'resource_type' => 'DialogField', 'ae_attributes' => {}}, 'dialog_field_responders' => []}, + {'id' => dialog_fields.last.id, 'label' => 'updated_field_label', 'resource_action' => {'id' => resource_action.id, 'resource_type' => 'DialogField'}, 'dialog_field_responders' => []} ] end @@ -55,14 +55,14 @@ dialog_group.update_dialog_fields(updated_fields) dialog_group.reload expect(dialog_group.dialog_fields.collect(&:resource_action).collect(&:resource_type)) - .to match_array(%w(DialogField DialogField)) + .to match_array(%w[DialogField DialogField]) end end context 'with a dialog field removed from the dialog fields' do let(:updated_fields) do [ - { 'id' => dialog_fields.first.id, 'dialog_field_responders' => [] } + {'id' => dialog_fields.first.id, 'dialog_field_responders' => []} ] end @@ -75,8 +75,8 @@ context 'it symbolizes a dialog fields options' do let(:updated_fields) do - [{ 'name' => 'new', 'label' => 'new field label', 'type' => 'DialogFieldTagControl', 'options' => - { 'name' => 'foo', 'description' => 'bar'} }] + [{'name' => 'new', 'label' => 'new field label', 'type' => 'DialogFieldTagControl', 'options' => + {'name' => 'foo', 'description' => 'bar'}}] end it 'dialog_field.options has symbolized keys' do diff --git a/spec/models/dialog_spec.rb b/spec/models/dialog_spec.rb index 903d37be730..bb29e211ab1 100644 --- a/spec/models/dialog_spec.rb +++ b/spec/models/dialog_spec.rb @@ -315,15 +315,15 @@ 'id' => dialog_tab.first.id, 'label' => 'updated_label', 'dialog_groups' => [ - { 'id' => dialog_group.first.id, - 'dialog_tab_id' => dialog_tab.first.id, - 'dialog_fields' => - [{ + {'id' => dialog_group.first.id, + 'dialog_tab_id' => dialog_tab.first.id, + 'dialog_fields' => + [{ 'id' => dialog_field.first.id, 'name' => dialog_field.first.name, 'dialog_group_id' => dialog_group.first.id, - 'dialog_field_responders' => %w(dialog_field2) - }] }, + 'dialog_field_responders' => %w[dialog_field2] + }]}, { 'label' => 'group 2', 'dialog_fields' => [{ @@ -372,7 +372,7 @@ [ 'id' => dialog_tab.first.id, 'dialog_groups' => [ - { 'id' => dialog_group.first.id, 'dialog_fields' => [{ 'id' => dialog_field.first.id }] } + {'id' => dialog_group.first.id, 'dialog_fields' => [{'id' => dialog_field.first.id}]} ] ] end diff --git a/spec/models/dialog_tab_serializer_spec.rb b/spec/models/dialog_tab_serializer_spec.rb index f979ad9df37..bbb17dafa1a 100644 --- a/spec/models/dialog_tab_serializer_spec.rb +++ b/spec/models/dialog_tab_serializer_spec.rb @@ -30,7 +30,7 @@ before do allow(dialog_group_serializer).to receive(:serialize).with(dialog_group, boolean) - .and_return("serialized dialog group") + .and_return("serialized dialog group") end context 'when wanting the excluded set of attributes' do @@ -51,7 +51,7 @@ 'dialog_id' => nil, 'id' => nil, 'updated_at' => nil - )) + )) end end end diff --git a/spec/models/dialog_tab_spec.rb b/spec/models/dialog_tab_spec.rb index bfdb4f3dcb5..79ab8635fad 100644 --- a/spec/models/dialog_tab_spec.rb +++ b/spec/models/dialog_tab_spec.rb @@ -38,14 +38,12 @@ context 'a collection of dialog groups containing two objects with ids and one without an id' do let(:updated_groups) do [ - { 'id' => dialog_groups.first.id, - 'label' => 'updated_label', - 'dialog_fields' => [{ 'id' => dialog_fields.first.id}] - }, - { 'id' => dialog_groups.last.id, - 'label' => 'updated_label', - 'dialog_fields' => [{'id' => dialog_fields.last.id}] - }, + {'id' => dialog_groups.first.id, + 'label' => 'updated_label', + 'dialog_fields' => [{'id' => dialog_fields.first.id}]}, + {'id' => dialog_groups.last.id, + 'label' => 'updated_label', + 'dialog_fields' => [{'id' => dialog_fields.last.id}]}, { 'label' => 'a new label', 'dialog_fields' => [{'name' => 'field name', 'label' => 'field label'}] @@ -71,7 +69,7 @@ context 'with a dialog group removed from the dialog groups' do let(:updated_groups) do [ - { 'id' => dialog_groups.first.id, 'dialog_fields' => []} + {'id' => dialog_groups.first.id, 'dialog_fields' => []} ] end diff --git a/spec/models/drift_state/purging_spec.rb b/spec/models/drift_state/purging_spec.rb index ca267ff9ff4..c710c25510d 100644 --- a/spec/models/drift_state/purging_spec.rb +++ b/spec/models/drift_state/purging_spec.rb @@ -13,16 +13,16 @@ stub_settings(@vmdb_config) @rr1 = [ - FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 1, :timestamp => (6.months + 1.days).to_i.seconds.ago.utc), - FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 1, :timestamp => (6.months - 1.days).to_i.seconds.ago.utc) + FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 1, :timestamp => (6.months + 1.day).to_i.seconds.ago.utc), + FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 1, :timestamp => (6.months - 1.day).to_i.seconds.ago.utc) ] @rr2 = [ FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 2, :timestamp => (6.months + 2.days).to_i.seconds.ago.utc), - FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 2, :timestamp => (6.months + 1.days).to_i.seconds.ago.utc), - FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 2, :timestamp => (6.months - 1.days).to_i.seconds.ago.utc) + FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 2, :timestamp => (6.months + 1.day).to_i.seconds.ago.utc), + FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => 2, :timestamp => (6.months - 1.day).to_i.seconds.ago.utc) ] @rr_orphaned = [ - FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => nil, :timestamp => (6.months - 1.days).to_i.seconds.ago.utc) + FactoryBot.create(:drift_state, :resource_type => 'VmOrTemplate', :resource_id => nil, :timestamp => (6.months - 1.day).to_i.seconds.ago.utc) ] end diff --git a/spec/models/ems_cluster_spec.rb b/spec/models/ems_cluster_spec.rb index bb3ba159ac3..991e39c2860 100644 --- a/spec/models/ems_cluster_spec.rb +++ b/spec/models/ems_cluster_spec.rb @@ -44,9 +44,9 @@ it('#total_vms_and_templates') { expect(@cluster.total_vms_and_templates).to eq(4) } - it('#total_vms') { expect(@cluster.total_vms).to eq(2) } + it('#total_vms') { expect(@cluster.total_vms).to eq(2) } - it('#total_miq_templates') { expect(@cluster.total_miq_templates).to eq(2) } + it('#total_miq_templates') { expect(@cluster.total_miq_templates).to eq(2) } it('ResourcePool#v_direct_vms') { expect(@rp1.v_direct_vms).to eq(1) } it('ResourcePool#v_total_vms') { expect(@rp1.v_total_vms).to eq(2) } @@ -98,7 +98,7 @@ it('#total_vms_and_templates') { expect(@cluster.total_vms_and_templates).to eq(4) } - it('#total_vms') { expect(@cluster.total_vms).to eq(2) } + it('#total_vms') { expect(@cluster.total_vms).to eq(2) } it('#total_miq_templates') { expect(@cluster.total_miq_templates).to eq(2) } it('#hosts') { expect(@cluster.hosts).to match_array [@host1, @host2] } @@ -221,8 +221,8 @@ describe "#event_where_clause" do let(:cluster) { FactoryBot.create(:ems_cluster) } # just doing one to avoid db random ordering - let(:vms) { FactoryBot.create_list(:vm, 1, :ems_cluster => cluster)} - let(:hosts) { FactoryBot.create_list(:host, 1, :ems_cluster => cluster)} + let(:vms) { FactoryBot.create_list(:vm, 1, :ems_cluster => cluster) } + let(:hosts) { FactoryBot.create_list(:host, 1, :ems_cluster => cluster) } it "handles empty cluster" do expect(cluster.event_where_clause).to eq(["ems_cluster_id = ?", cluster.id]) end diff --git a/spec/models/ems_event_spec.rb b/spec/models/ems_event_spec.rb index e4710f11da0..9c7d5d1ab6d 100644 --- a/spec/models/ems_event_spec.rb +++ b/spec/models/ems_event_spec.rb @@ -13,7 +13,7 @@ context "container events" do let(:ems_ref) { "test_ems_ref" } let(:ems) { FactoryBot.create(:ems_kubernetes) } - let(:event_hash) { { :ems_ref => "event-ref", :ems_id => ems.id, :event_type => "STUFF_HAPPENED" } } + let(:event_hash) { {:ems_ref => "event-ref", :ems_id => ems.id, :event_type => "STUFF_HAPPENED"} } let(:container_project) { FactoryBot.create(:container_project, :ext_management_system => ems) } context "on node" do @@ -76,7 +76,7 @@ end context ".process_availability_zone_in_event!" do - let(:event_hash) { { :vm_or_template_id => vm.id } } + let(:event_hash) { {:vm_or_template_id => vm.id} } context "when the event has an availability zone" do before do event_hash[:availability_zone_ems_ref] = @availability_zone.ems_ref @@ -588,7 +588,7 @@ before do allow(ems).to receive(:allow_targeted_refresh?).and_return(true) - allow(ems.class.const_get('EventTargetParser')).to receive(:new).and_return(target_parser) + allow(ems.class.const_get(:EventTargetParser)).to receive(:new).and_return(target_parser) expect(target_parser).to receive(:parse).and_return(targets) end @@ -622,9 +622,9 @@ end context 'Physical Storage Events' do - let(:ems) { FactoryBot.create(:ems_storage) } + let(:ems) { FactoryBot.create(:ems_storage) } let(:physical_storage) { FactoryBot.create(:physical_storage, :name => "my-storage", :ems_ref => "ems1", :ext_management_system => ems) } - let(:event_hash) { + let(:event_hash) do { :event_type => "physical_storage_alert", :ems_ref => "1", @@ -632,7 +632,7 @@ :ems_id => ems.id, :message => "description" } - } + end it "test process_physical_storage_in_event!" do event = EmsEvent.add(ems.id, event_hash) diff --git a/spec/models/ems_folder_spec.rb b/spec/models/ems_folder_spec.rb index 2a09ec6b82f..6fa9f0231e6 100644 --- a/spec/models/ems_folder_spec.rb +++ b/spec/models/ems_folder_spec.rb @@ -36,7 +36,7 @@ before do @root = FactoryBot.create(:ems_folder, :name => "root") - @dc = FactoryBot.create(:datacenter, :name => "dc") + @dc = FactoryBot.create(:datacenter, :name => "dc") @dc.parent = @root @vm = FactoryBot.create(:ems_folder, :name => "vm", :hidden => true) diff --git a/spec/models/ems_refresh/save_inventory/save_tags_inventory_spec.rb b/spec/models/ems_refresh/save_inventory/save_tags_inventory_spec.rb index 7bcef24f22e..5160760d98e 100644 --- a/spec/models/ems_refresh/save_inventory/save_tags_inventory_spec.rb +++ b/spec/models/ems_refresh/save_inventory/save_tags_inventory_spec.rb @@ -4,8 +4,8 @@ # @return [Tag] a tag in a category linked to a mapping. def mapped_tag(category_name, tag_name) mapping = FactoryBot.create(:tag_mapping_with_category, - :category_name => category_name, - :category_description => category_name) + :category_name => category_name, + :category_description => category_name) category = mapping.tag.classification entry = category.add_entry(:name => tag_name, :description => tag_name) entry.tag diff --git a/spec/models/ems_refresh/save_inventory_spec.rb b/spec/models/ems_refresh/save_inventory_spec.rb index 18b71ae13b6..3b2191eaeb2 100644 --- a/spec/models/ems_refresh/save_inventory_spec.rb +++ b/spec/models/ems_refresh/save_inventory_spec.rb @@ -147,7 +147,7 @@ context "with disconnected non-dup in the database" do before do - @uid = SecureRandom.uuid + @uid = SecureRandom.uuid @vm1 = FactoryBot.create(:vm_with_ref, :ext_management_system => nil, :uid_ems => @uid) @vm2 = FactoryBot.build(:vm_with_ref, :ext_management_system => @ems, :uid_ems => @uid) end @@ -172,7 +172,7 @@ @ems_ref1 = @vm1.ems_ref @ems_ref2 = @vm2.ems_ref - @vm1.ems_ref = @vm2.ems_ref = nil + @vm1.ems_ref = @vm2.ems_ref = nil @vm1.save @vm2.save end @@ -229,7 +229,7 @@ def raw_data_process(*args) args.collect do |v| - RAW_DATA_ATTRS.each_with_object({}) { |s, h| h[s] = v.send(s) } + RAW_DATA_ATTRS.index_with { |s| v.send(s) } end end diff --git a/spec/models/ems_refresh_spec.rb b/spec/models/ems_refresh_spec.rb index bd9091472d9..c4d2765dfd2 100644 --- a/spec/models/ems_refresh_spec.rb +++ b/spec/models/ems_refresh_spec.rb @@ -62,7 +62,7 @@ let(:targets) do targets = [] - (0..996).each do |i| + 997.times do |i| targets << InventoryRefresh::Target.load( :manager_id => @ems.id, :association => :vms, @@ -215,9 +215,9 @@ def assert_queue_item(expected_targets) end EmsRefresh.refresh([ - [vm1.class, vm1.id], - [vm2.class, vm2.id], - ]) + [vm1.class, vm1.id], + [vm2.class, vm2.id], + ]) end it "ignores an EMS-less (archived) VM" do @@ -226,9 +226,9 @@ def assert_queue_item(expected_targets) vm2 = FactoryBot.create(:vm_vmware, :name => "vm_vmware2", :ext_management_system => nil) expect(ManageIQ::Providers::Vmware::InfraManager::Refresher).to receive(:refresh).with([vm1]) EmsRefresh.refresh([ - [vm1.class, vm1.id], - [vm2.class, vm2.id], - ]) + [vm1.class, vm1.id], + [vm2.class, vm2.id], + ]) end end diff --git a/spec/models/endpoint_spec.rb b/spec/models/endpoint_spec.rb index 634fe7a26f8..2dd491d15dc 100644 --- a/spec/models/endpoint_spec.rb +++ b/spec/models/endpoint_spec.rb @@ -79,7 +79,7 @@ context "certificate_authority" do # openssl req -x509 -newkey rsa:512 -out cert.pem -nodes, all defaults, twice let(:pem1) do - <<-EOPEM.strip_heredoc + <<~EOPEM -----BEGIN CERTIFICATE----- MIIBzTCCAXegAwIBAgIJAOgErvCo3YfDMA0GCSqGSIb3DQEBCwUAMEIxCzAJBgNV BAYTAlhYMRUwEwYDVQQHDAxEZWZhdWx0IENpdHkxHDAaBgNVBAoME0RlZmF1bHQg @@ -95,7 +95,7 @@ EOPEM end let(:pem2) do - <<-EOPEM.strip_heredoc + <<~EOPEM -----BEGIN CERTIFICATE----- MIIBzTCCAXegAwIBAgIJAOpKKx6qCHdIMA0GCSqGSIb3DQEBCwUAMEIxCzAJBgNV BAYTAlhYMRUwEwYDVQQHDAxEZWZhdWx0IENpdHkxHDAaBgNVBAoME0RlZmF1bHQg @@ -125,7 +125,7 @@ endpoint.certificate_authority = "NONSENSE" expect(endpoint).not_to be_valid - endpoint.certificate_authority = <<-EOPEM.strip_heredoc + endpoint.certificate_authority = <<~EOPEM -----BEGIN CERTIFICATE----- ValidBase64InvalidCert== -----END CERTIFICATE----- diff --git a/spec/models/ext_management_system_spec.rb b/spec/models/ext_management_system_spec.rb index f063c05d574..c9e94da1811 100644 --- a/spec/models/ext_management_system_spec.rb +++ b/spec/models/ext_management_system_spec.rb @@ -127,7 +127,7 @@ end it ".ems_infra_discovery_types" do - expected_types = %w(rhevm virtualcenter openstack_infra) + expected_types = %w[rhevm virtualcenter openstack_infra] expect(described_class.ems_infra_discovery_types).to match_array(expected_types) end @@ -241,9 +241,9 @@ context "with multiple endpoints using connection_configurations" do let(:ems) do FactoryBot.build(:ems_openstack, - :hostname => "example.org", - :connection_configurations => [{:endpoint => {:role => "amqp", - :hostname => "amqp.example.org"}}]) + :hostname => "example.org", + :connection_configurations => [{:endpoint => {:role => "amqp", + :hostname => "amqp.example.org"}}]) end it "will contain seperate ampq endpoint" do @@ -260,9 +260,9 @@ context "with multiple endpoints using connection_configurations (string keys)" do let(:ems) do FactoryBot.build(:ems_openstack, - "hostname" => "example.org", - "connection_configurations" => [{"endpoint" => {"role" => "amqp", - "hostname" => "amqp.example.org"}}]) + "hostname" => "example.org", + "connection_configurations" => [{"endpoint" => {"role" => "amqp", + "hostname" => "amqp.example.org"}}]) end it "will contain seperate ampq endpoint" do @@ -279,14 +279,14 @@ context "with multiple endpoints using explicit authtype" do let(:ems) do FactoryBot.build(:ems_openshift, - :connection_configurations => [{:endpoint => {:role => "default", - :hostname => "openshift.example.org"}, - :authentication => {:role => "bearer", - :auth_key => "SomeSecret"}}, - {:endpoint => {:role => "hawkular", - :hostname => "openshift.example.org"}, - :authentication => {:role => "hawkular", - :auth_key => "SomeSecret"}}]) + :connection_configurations => [{:endpoint => {:role => "default", + :hostname => "openshift.example.org"}, + :authentication => {:role => "bearer", + :auth_key => "SomeSecret"}}, + {:endpoint => {:role => "hawkular", + :hostname => "openshift.example.org"}, + :authentication => {:role => "hawkular", + :auth_key => "SomeSecret"}}]) end it "will contain the bearer authentication as default" do @@ -300,12 +300,12 @@ context "with multiple endpoints using implicit default authtype" do let(:ems) do FactoryBot.build(:ems_openshift, - :connection_configurations => [{:endpoint => {:role => "default", - :hostname => "openshift.example.org"}, - :authentication => {:auth_key => "SomeSecret"}}, - {:endpoint => {:role => "hawkular", - :hostname => "openshift.example.org"}, - :authentication => {:auth_key => "SomeSecret"}}]) + :connection_configurations => [{:endpoint => {:role => "default", + :hostname => "openshift.example.org"}, + :authentication => {:auth_key => "SomeSecret"}}, + {:endpoint => {:role => "hawkular", + :hostname => "openshift.example.org"}, + :authentication => {:auth_key => "SomeSecret"}}]) end it "will contain the default authentication (bearer) for default endpoint" do @@ -370,17 +370,17 @@ @ems = FactoryBot.create(:ems_vmware) 2.times do FactoryBot.create(:vm_vmware, - :ext_management_system => @ems, - :hardware => FactoryBot.create(:hardware, - :cpu1x2, - :ram1GB)) + :ext_management_system => @ems, + :hardware => FactoryBot.create(:hardware, + :cpu1x2, + :ram1GB)) end 2.times do FactoryBot.create(:host, - :ext_management_system => @ems, - :hardware => FactoryBot.create(:hardware, - :cpu2x2, - :ram1GB)) + :ext_management_system => @ems, + :hardware => FactoryBot.create(:hardware, + :cpu2x2, + :ram1GB)) end end @@ -432,7 +432,7 @@ expect(@ems.total_vms_suspended).to eq(2) end - %w(total_vms_on total_vms_off total_vms_unknown total_vms_never total_vms_suspended).each do |vcol| + %w[total_vms_on total_vms_off total_vms_unknown total_vms_never total_vms_suspended].each do |vcol| it "should have virtual column #{vcol} " do expect(described_class).to have_virtual_column vcol.to_s, :integer end @@ -763,7 +763,7 @@ def deliver_queue_message(queue_message = MiqQueue.order(:id).first) result = ExtManagementSystem.inventory_status expect(result.size).to eq(2) - expect(result[0]).to eq(%w(region zone kind ems hosts vms)) + expect(result[0]).to eq(%w[region zone kind ems hosts vms]) expect(result[1][4..-1]).to eq([1, 2]) end @@ -773,7 +773,7 @@ def deliver_queue_message(queue_message = MiqQueue.order(:id).first) FactoryBot.create(:container, :ems_id => ems.id) result = ExtManagementSystem.inventory_status expect(result.size).to eq(2) - expect(result[0]).to eq(%w(region zone kind ems containers)) + expect(result[0]).to eq(%w[region zone kind ems containers]) expect(result[1][4..-1]).to eq([2]) end end diff --git a/spec/models/file_depot_swift_spec.rb b/spec/models/file_depot_swift_spec.rb index 501313f5a7e..aa8895c52c8 100644 --- a/spec/models/file_depot_swift_spec.rb +++ b/spec/models/file_depot_swift_spec.rb @@ -20,10 +20,10 @@ it "should return a merged uri with query strings given an empty port" do expect(file_depot_swift.merged_uri(uri, nil)).to eq merged_default_uri - end + end it "should return a merged uri with query strings when given a valid port" do expect(file_depot_swift.merged_uri(uri, "5678")).to eq merged_uri - end + end end end diff --git a/spec/models/filesystem_spec.rb b/spec/models/filesystem_spec.rb index cdf4b10cf19..da6f3761b93 100644 --- a/spec/models/filesystem_spec.rb +++ b/spec/models/filesystem_spec.rb @@ -1,21 +1,21 @@ RSpec.describe Filesystem do let(:filesystem_conf_file_ascii) do - <<-EOT -## NB: Unpolished config file -## This config file was taken directly from the upstream repo, and tweaked just enough to work. -## It has not been audited to ensure that everything present is either Heat controlled or a mandatory as-is setting. -## Please submit patches for any setting that should be deleted or Heat-configurable. '"-"' -## https://git.openstack.org/cgit/openstack/tripleo-image-elements + <<~EOT + ## NB: Unpolished config file + ## This config file was taken directly from the upstream repo, and tweaked just enough to work. + ## It has not been audited to ensure that everything present is either Heat controlled or a mandatory as-is setting. + ## Please submit patches for any setting that should be deleted or Heat-configurable. '"-"' + ## https://git.openstack.org/cgit/openstack/tripleo-image-elements -[DEFAULT] + [DEFAULT] -s3_host=192.0.2.10 -ec2_dmz_host=192.0.2.10 -ec2_url=http://192.0.2.10:8773/services/Cloud + s3_host=192.0.2.10 + ec2_dmz_host=192.0.2.10 + ec2_url=http://192.0.2.10:8773/services/Cloud -my_ip=192.0.2.13 + my_ip=192.0.2.13 EOT end diff --git a/spec/models/firmware_registry/rest_api_depot_spec.rb b/spec/models/firmware_registry/rest_api_depot_spec.rb index dbe4de2c56f..9b9193d1ce8 100644 --- a/spec/models/firmware_registry/rest_api_depot_spec.rb +++ b/spec/models/firmware_registry/rest_api_depot_spec.rb @@ -213,8 +213,8 @@ def assert_counts(counts) end end - def with_vcr(suffix) + def with_vcr(suffix, &block) path = "#{described_class.name.underscore}_#{suffix}" - VCR.use_cassette(path, :match_requests_on => [:method, :path]) { yield } + VCR.use_cassette(path, :match_requests_on => [:method, :path], &block) end end diff --git a/spec/models/firmware_target_spec.rb b/spec/models/firmware_target_spec.rb index f971d33db62..628fb954510 100644 --- a/spec/models/firmware_target_spec.rb +++ b/spec/models/firmware_target_spec.rb @@ -1,6 +1,6 @@ RSpec.describe FirmwareTarget do - let(:attrs) { { :manufacturer => 'manu', :model => 'model' } } - let(:other_attrs) { { :manufacturer => 'other-manu', :model => 'other-model' } } + let(:attrs) { {:manufacturer => 'manu', :model => 'model'} } + let(:other_attrs) { {:manufacturer => 'other-manu', :model => 'other-model'} } subject! { FactoryBot.create(:firmware_target, **attrs) } diff --git a/spec/models/generic_object_definition_spec.rb b/spec/models/generic_object_definition_spec.rb index 8ca9f53217b..bc17a4d16ef 100644 --- a/spec/models/generic_object_definition_spec.rb +++ b/spec/models/generic_object_definition_spec.rb @@ -108,8 +108,8 @@ describe '#add_property_attribute' do let(:definition) do FactoryBot.create(:generic_object_definition, - :name => 'test', - :properties => { :attributes => {:status => "string"}}) + :name => 'test', + :properties => {:attributes => {:status => "string"}}) end it 'adds a new attribute' do @@ -135,8 +135,8 @@ describe '#delete_property_attribute' do let(:definition) do FactoryBot.create(:generic_object_definition, - :name => 'test', - :properties => { :attributes => {:status => "string"}}) + :name => 'test', + :properties => {:attributes => {:status => "string"}}) end it 'does nothing for non-existing attribute' do @@ -159,26 +159,26 @@ describe '#add_property_method' do let(:definition) do FactoryBot.create(:generic_object_definition, - :name => 'test', - :properties => { :methods => %w(method1) }) + :name => 'test', + :properties => {:methods => %w[method1]}) end it 'adds a new method' do definition.add_property_method("add_vms") - expect(definition.properties).to include(:methods => %w(method1 add_vms)) + expect(definition.properties).to include(:methods => %w[method1 add_vms]) end it 'does nothing for existing method' do expect { definition.add_property_method("method1") }.to make_database_queries(:count => 4..8) - expect(definition.properties).to include(:methods => %w(method1)) + expect(definition.properties).to include(:methods => %w[method1]) end end describe '#delete_property_method' do let(:definition) do FactoryBot.create(:generic_object_definition, - :name => 'test', - :properties => { :methods => %w(method1) }) + :name => 'test', + :properties => {:methods => %w[method1]}) end it 'deletes an existing method' do @@ -188,15 +188,15 @@ it 'does nothing for non-existing method' do definition.delete_property_method(:method2) - expect(definition.properties).to include(:methods => %w(method1)) + expect(definition.properties).to include(:methods => %w[method1]) end end describe '#add_property_association' do let(:definition) do FactoryBot.create(:generic_object_definition, - :name => 'test', - :properties => { :associations => { :vms => 'Vm' } }) + :name => 'test', + :properties => {:associations => {:vms => 'Vm'}}) end it 'adds a new association' do @@ -227,8 +227,8 @@ describe '#delete_property_association' do let(:definition) do FactoryBot.create(:generic_object_definition, - :name => 'test', - :properties => {:associations => {:vms => 'Vm'}}) + :name => 'test', + :properties => {:associations => {:vms => 'Vm'}}) end it 'deletes an existing association' do @@ -261,7 +261,7 @@ subject { definition.find_objects(@options) } it 'finds multiple objects' do - @options = { :max_number => 10 } + @options = {:max_number => 10} expect(subject.size).to eq(2) end @@ -354,23 +354,23 @@ before do FactoryBot.create(:custom_button, - :name => "visible button on Generic Object", - :applies_to_class => "GenericObject", - :visibility_expression => true_expression_on_generic) + :name => "visible button on Generic Object", + :applies_to_class => "GenericObject", + :visibility_expression => true_expression_on_generic) FactoryBot.create(:custom_button, - :name => "hidden button on Generic Object", - :applies_to_class => "GenericObject", - :visibility_expression => false_expression_on_generic) + :name => "hidden button on Generic Object", + :applies_to_class => "GenericObject", + :visibility_expression => false_expression_on_generic) FactoryBot.create(:custom_button, - :name => "visible button on Generic Object Definition", - :applies_to_class => "GenericObjectDefinition", - :applies_to_id => definition.id, - :visibility_expression => true_expression_on_definition) + :name => "visible button on Generic Object Definition", + :applies_to_class => "GenericObjectDefinition", + :applies_to_id => definition.id, + :visibility_expression => true_expression_on_definition) FactoryBot.create(:custom_button, - :name => "hidden button on Generic Object Definition", - :applies_to_class => "GenericObjectDefinition", - :applies_to_id => definition.id, - :visibility_expression => false_expression_on_definition) + :name => "hidden button on Generic Object Definition", + :applies_to_class => "GenericObjectDefinition", + :applies_to_id => definition.id, + :visibility_expression => false_expression_on_definition) end it "uses appropriate object: parameter, which is GenericObject or definition for expression evaluation" do diff --git a/spec/models/generic_object_spec.rb b/spec/models/generic_object_spec.rb index 6bf51c73084..a33742be6c3 100644 --- a/spec/models/generic_object_spec.rb +++ b/spec/models/generic_object_spec.rb @@ -20,7 +20,7 @@ :s_time => "datetime" }, :associations => {"vms" => "Vm", "hosts" => "Host"}, - :methods => %w(my_host some_method) + :methods => %w[my_host some_method] } ) end @@ -89,12 +89,12 @@ } go.save! - expect(go.property_attributes['s_time']).to be_within(0.001).of (s_time - 2.days) + expect(go.property_attributes['s_time']).to be_within(0.001).of(s_time - 2.days) expect(go.property_attributes).to include( "flag" => false, "data_read" => data_read + 100.50, "max_number" => max_number + 100, - "server" => "#{server_name}_2", + "server" => "#{server_name}_2" ) end @@ -172,7 +172,7 @@ end describe 'property methods' do - let(:ws) { double("MiqAeWorkspaceRuntime", :root => {"method_result" => "some_return_value"}) } + let(:ws) { double("MiqAeWorkspaceRuntime", :root => {"method_result" => "some_return_value"}) } before { go.ae_user_identity(user) } @@ -213,13 +213,13 @@ end it 'one array parameter' do - options = {:attrs => attrs.merge(:param_1 => %w(p1 p2), :param_1_type=>"Array")} + options = {:attrs => attrs.merge(:param_1 => %w[p1 p2], :param_1_type => "Array")} expect(MiqAeEngine).to receive(:deliver).with(hash_including(options)).and_return(ws) - go.my_host(%w(p1 p2)) + go.my_host(%w[p1 p2]) end it 'one hash parameter' do - options = {:attrs => attrs.merge(:param_1 => {:p1 => 1, :p2 => 2}, :param_1_type=>"Hash")} + options = {:attrs => attrs.merge(:param_1 => {:p1 => 1, :p2 => 2}, :param_1_type => "Hash")} expect(MiqAeEngine).to receive(:deliver).with(hash_including(options)).and_return(ws) go.my_host(:p1 => 1, :p2 => 2) end diff --git a/spec/models/git_repository_spec.rb b/spec/models/git_repository_spec.rb index ef569438973..1d3e68aa255 100644 --- a/spec/models/git_repository_spec.rb +++ b/spec/models/git_repository_spec.rb @@ -37,18 +37,16 @@ context "repo" do let(:gwt) { instance_double('GitWorktree') } let(:verify_ssl) { OpenSSL::SSL::VERIFY_PEER } - let(:branch_list) { %w(b1 b2) } - let(:tag_list) { %w(t1 t2) } + let(:branch_list) { %w[b1 b2] } + let(:tag_list) { %w[t1 t2] } let(:info) { {:time => Time.now.utc, :message => "R2D2", :commit_sha => "abcdef"} } let(:branch_info_hash) do {'b1' => {:time => Time.now.utc, :message => "B1", :commit_sha => "abcdef"}, - 'b2' => {:time => Time.now.utc - 5, :message => "B2", :commit_sha => "123456"} - } + 'b2' => {:time => Time.now.utc - 5, :message => "B2", :commit_sha => "123456"}} end let(:tag_info_hash) do {'t1' => {:time => Time.now.utc, :message => "T1", :commit_sha => "abc12f"}, - 't2' => {:time => Time.now.utc + 5, :message => "T2", :commit_sha => "123456"} - } + 't2' => {:time => Time.now.utc + 5, :message => "T2", :commit_sha => "123456"}} end let(:repo) { FactoryBot.create(:git_repository, :verify_ssl => verify_ssl) } let(:userid) { 'user' } diff --git a/spec/models/host_spec.rb b/spec/models/host_spec.rb index efe370e2ad1..33802cfc521 100644 --- a/spec/models/host_spec.rb +++ b/spec/models/host_spec.rb @@ -48,25 +48,25 @@ expect(DriftState.count).to eq(1) expect(host.drift_states.first.data).to eq({ - :class => "ManageIQ::Providers::Vmware::InfraManager::Host", - :id => host.id, - :name => host.name, - :vmm_vendor_display => "VMware", - :v_total_vms => 0, - - :advanced_settings => [], - :filesystems => [], - :filesystems_custom_attributes => [], - :groups => [], - :guest_applications => [], - :lans => [], - :patches => [], - :switches => [], - :system_services => [], - :tags => [], - :users => [], - :vms => [], - }) + :class => "ManageIQ::Providers::Vmware::InfraManager::Host", + :id => host.id, + :name => host.name, + :vmm_vendor_display => "VMware", + :v_total_vms => 0, + + :advanced_settings => [], + :filesystems => [], + :filesystems_custom_attributes => [], + :groups => [], + :guest_applications => [], + :lans => [], + :patches => [], + :switches => [], + :system_services => [], + :tags => [], + :users => [], + :vms => [], + }) end it "emits cluster policy event when the cluster changes" do @@ -226,7 +226,7 @@ :class_name => @host.class.name, :role => "ems_operations", :zone => @ems.zone.name, - :queue_name => @ems.queue_name_for_ems_operations, + :queue_name => @ems.queue_name_for_ems_operations ) end @@ -315,8 +315,7 @@ it "validate remote, then validate default" do data = {:default => {:userid => "", :password => ""}, - :remote => {:userid => "root", :password => password}, - } + :remote => {:userid => "root", :password => password},} @host.update_authentication(data, :save => false) assert_remote_credentials_validated @@ -385,8 +384,8 @@ def assert_remote_credentials_validated(options = {}) let(:ems) { FactoryBot.build(:ext_management_system) } let(:host) do FactoryBot.build(:host, - :ext_management_system => ems, - :ems_cluster => FactoryBot.build(:ems_cluster)) + :ext_management_system => ems, + :ems_cluster => FactoryBot.build(:ems_cluster)) end it "clears ems and cluster" do host.disconnect_ems(ems) diff --git a/spec/models/import_file_upload_spec.rb b/spec/models/import_file_upload_spec.rb index b3a9f489121..e405d581dae 100644 --- a/spec/models/import_file_upload_spec.rb +++ b/spec/models/import_file_upload_spec.rb @@ -61,13 +61,13 @@ describe "#widget_list" do before do import_file_upload.create_binary_blob( - :binary => <<-BINARY ---- -- MiqWidget: - title: Widget1 -- MiqWidget: - title: widget - not_name: test + :binary => <<~BINARY + --- + - MiqWidget: + title: Widget1 + - MiqWidget: + title: widget + not_name: test BINARY ) allow(MiqWidget).to receive(:exists?).with(:title => "widget").and_return(exists?) diff --git a/spec/models/job/state_machine_spec.rb b/spec/models/job/state_machine_spec.rb index f9b4d38bd8f..25360a50eeb 100644 --- a/spec/models/job/state_machine_spec.rb +++ b/spec/models/job/state_machine_spec.rb @@ -2,7 +2,7 @@ subject(:job) do # Job is expected to be subclassed by something # that implements load_transitions - Class.new(described_class) { + Class.new(described_class) do def load_transitions self.state ||= 'initialize' { @@ -14,7 +14,7 @@ def load_transitions :error => {'*' => '*'} } end - }.new + end.new end it "should transition from one state to another by a signal" do diff --git a/spec/models/log_collection_spec.rb b/spec/models/log_collection_spec.rb index a1bf039abb8..649b83dc2b6 100644 --- a/spec/models/log_collection_spec.rb +++ b/spec/models/log_collection_spec.rb @@ -9,9 +9,8 @@ @log_file = FactoryBot.create(:log_file, :state => "collecting") @miq_server.log_files << @log_file @task = FactoryBot.create(:miq_task, - :miq_server_id => @miq_server.id, - :name => "Zipped log retrieval for #{@miq_server.name}" - ) + :miq_server_id => @miq_server.id, + :name => "Zipped log retrieval for #{@miq_server.name}") end it { expect(@miq_server).to be_log_collection_active } @@ -71,7 +70,7 @@ end end - %w(models dialogs current archive).each do |log_type| + %w[models dialogs current archive].each do |log_type| context "using a #{log_type} log file" do before do @fname = "#{log_type}.zip" diff --git a/spec/models/manageiq/providers/ansible_playbook_workflow_spec.rb b/spec/models/manageiq/providers/ansible_playbook_workflow_spec.rb index 11b7c0cd665..92a7b53190d 100644 --- a/spec/models/manageiq/providers/ansible_playbook_workflow_spec.rb +++ b/spec/models/manageiq/providers/ansible_playbook_workflow_spec.rb @@ -119,7 +119,7 @@ end context "with configuration_script_source_id + playbook_relative_path" do - let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {:configuration_script_source_id => css.id, :playbook_relative_path => playbook_relative_path}, %w[192.0.2.0 192.0.2.1], :poll_interval => 5.minutes] } + let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {:configuration_script_source_id => css.id, :playbook_relative_path => playbook_relative_path}, %w[192.0.2.0 192.0.2.1], {:poll_interval => 5.minutes}] } it "will checkout the git repository to a temp dir before proceeding" do expect_any_instance_of(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::ConfigurationScriptSource).to receive(:checkout_git_repository) @@ -133,7 +133,7 @@ end context "with neither playbook_path nor a configuration_script_source_id, playbook_relative_path pair" do - let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {}, %w[192.0.2.0 192.0.2.1], :poll_interval => 5.minutes] } + let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {}, %w[192.0.2.0 192.0.2.1], {:poll_interval => 5.minutes}] } it "fails" do expect(job).to_not receive(:queue_signal).with(:execute) @@ -143,7 +143,7 @@ end context "with only configuration_script_source_id" do - let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {:configuration_script_source_id => css.id}, %w[192.0.2.0 192.0.2.1], :poll_interval => 5.minutes] } + let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {:configuration_script_source_id => css.id}, %w[192.0.2.0 192.0.2.1], {:poll_interval => 5.minutes}] } it "fails" do expect(job).to_not receive(:queue_signal).with(:execute) @@ -153,7 +153,7 @@ end context "with only playbook_relative_path" do - let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {:playbook_relative_path => playbook_relative_path}, %w[192.0.2.0 192.0.2.1], :poll_interval => 5.minutes] } + let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, {:playbook_relative_path => playbook_relative_path}, %w[192.0.2.0 192.0.2.1], {:poll_interval => 5.minutes}] } it "fails" do expect(job).to_not receive(:queue_signal).with(:execute) diff --git a/spec/models/manageiq/providers/ansible_role_workflow_spec.rb b/spec/models/manageiq/providers/ansible_role_workflow_spec.rb index 246085b431b..489816f5aec 100644 --- a/spec/models/manageiq/providers/ansible_role_workflow_spec.rb +++ b/spec/models/manageiq/providers/ansible_role_workflow_spec.rb @@ -1,6 +1,6 @@ RSpec.describe ManageIQ::Providers::AnsibleRoleWorkflow do let(:job) { described_class.create_job(*options, **job_kwargs).tap { |job| job.state = state } } - let(:role_options) { {:role_name => 'role_name', :roles_path => '/path/to/role', :role_skip_facts => true } } + let(:role_options) { {:role_name => 'role_name', :roles_path => '/path/to/role', :role_skip_facts => true} } let(:options) { [{"ENV" => "VAR"}, {"arg1" => "val1"}, role_options] } let(:job_kwargs) { {:verbosity => 4} } let(:state) { "waiting_to_start" } diff --git a/spec/models/manageiq/providers/base_manager_spec.rb b/spec/models/manageiq/providers/base_manager_spec.rb index 8a9dc8f55a4..c92d33ec936 100644 --- a/spec/models/manageiq/providers/base_manager_spec.rb +++ b/spec/models/manageiq/providers/base_manager_spec.rb @@ -8,12 +8,12 @@ stub_settings_merge( :ems => { :ems_some_provider => { - :blacklisted_event_names => %w(ev1 ev2) + :blacklisted_event_names => %w[ev1 ev2] } } ) allow(described_class).to receive(:provider_name).and_return('SomeProvider') - expect(described_class.default_blacklisted_event_names).to eq(%w(ev1 ev2)) + expect(described_class.default_blacklisted_event_names).to eq(%w[ev1 ev2]) end end diff --git a/spec/models/manageiq/providers/cloud_manager/auth_key_pair_spec.rb b/spec/models/manageiq/providers/cloud_manager/auth_key_pair_spec.rb index bfe4fa4b532..50e672dd32c 100644 --- a/spec/models/manageiq/providers/cloud_manager/auth_key_pair_spec.rb +++ b/spec/models/manageiq/providers/cloud_manager/auth_key_pair_spec.rb @@ -5,8 +5,8 @@ context 'create and delete actions' do it "has methods" do - expect(subject.class.respond_to? :create_key_pair).to be true - expect(subject.respond_to? :delete_key_pair).to be true + expect(subject.class.respond_to?(:create_key_pair)).to be true + expect(subject.respond_to?(:delete_key_pair)).to be true end # TODO(maufart): do we have any special approach to test module methods separately? diff --git a/spec/models/manageiq/providers/cloud_manager/metrics_capture_spec.rb b/spec/models/manageiq/providers/cloud_manager/metrics_capture_spec.rb index 9eccef33ab8..efd689a047b 100644 --- a/spec/models/manageiq/providers/cloud_manager/metrics_capture_spec.rb +++ b/spec/models/manageiq/providers/cloud_manager/metrics_capture_spec.rb @@ -2,7 +2,7 @@ include Spec::Support::MetricHelper let(:miq_server) { EvmSpecHelper.local_miq_server } - let(:ems) { FactoryBot.create(:ems_openstack, :zone => miq_server.zone) } + let(:ems) { FactoryBot.create(:ems_openstack, :zone => miq_server.zone) } describe ".capture_ems_targets" do before do diff --git a/spec/models/manageiq/providers/cloud_manager/provision_workflow_spec.rb b/spec/models/manageiq/providers/cloud_manager/provision_workflow_spec.rb index dc2f26217a3..87c873a61c1 100644 --- a/spec/models/manageiq/providers/cloud_manager/provision_workflow_spec.rb +++ b/spec/models/manageiq/providers/cloud_manager/provision_workflow_spec.rb @@ -30,7 +30,7 @@ end it "should retrieve cloud-init templates when cloning" do - options = {'key' => 'value' } + options = {'key' => 'value'} result = workflow.allowed_customization_templates(options) customization_template = workflow.instance_variable_get(:@values)[:customization_template_script] @@ -38,13 +38,13 @@ expect(customization_template).to eq cloud_init_template.script expect(template_hash).to be_a(Hash) - %i(id name description).each do |attr| + %i[id name description].each do |attr| expect(template_hash[attr]).to eq cloud_init_template.send(attr) end end it "should retrieve sysprep templates when cloning" do - options = {'key' => 'value' } + options = {'key' => 'value'} allow(sysprep_workflow).to receive(:supports_sysprep?).and_return(true) allow(sysprep_workflow).to receive(:load_ar_obj).and_return(template) allow(template).to receive(:platform).and_return('windows') @@ -55,7 +55,7 @@ expect(customization_template).to eq sysprep_template.script expect(template_hash).to be_a(Hash) - %i(id name description).each do |attr| + %i[id name description].each do |attr| expect(template_hash[attr]).to eq sysprep_template.send(attr) end end @@ -109,7 +109,7 @@ context "floating_ips" do it "#get_targets_for_source" do fip1 = FactoryBot.create(:floating_ip, :cloud_network_only => true, - :ext_management_system => ems.network_manager) + :ext_management_system => ems.network_manager) filtered = workflow.send(:get_targets_for_source, ems, :cloud_filter, FloatingIp, 'floating_ips.available') expect(filtered.size).to eq(1) @@ -171,15 +171,15 @@ @cn1 = FactoryBot.create(:cloud_network, :ext_management_system => ems.network_manager, :cidr => "10.0.0./8") @cs1 = FactoryBot.create(:cloud_subnet, :cloud_network => @cn1, - :availability_zone => @az1, - :ext_management_system => ems.network_manager) + :availability_zone => @az1, + :ext_management_system => ems.network_manager) @cs2 = FactoryBot.create(:cloud_subnet, :cloud_network => @cn1, - :availability_zone => @az2, - :ext_management_system => ems.network_manager) - @ip1 = FactoryBot.create(:floating_ip, :cloud_network_only => true, + :availability_zone => @az2, :ext_management_system => ems.network_manager) + @ip1 = FactoryBot.create(:floating_ip, :cloud_network_only => true, + :ext_management_system => ems.network_manager) @ip2 = FactoryBot.create(:floating_ip, :cloud_network_only => false, - :ext_management_system => ems.network_manager) + :ext_management_system => ems.network_manager) end context "#allowed_cloud_networks" do diff --git a/spec/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source_spec.rb b/spec/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source_spec.rb index 582f9b2727b..f0848542fc4 100644 --- a/spec/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source_spec.rb +++ b/spec/models/manageiq/providers/embedded_ansible/automation_manager/configuration_script_source_spec.rb @@ -23,8 +23,6 @@ repo = Spec::Support::FakeAnsibleRepo.new(local_repo, repo_dir_structure) repo.generate repo.git_branch_create("other_branch") - - GitRepository stub_const("GitRepository::GIT_REPO_DIRECTORY", repo_dir) EvmSpecHelper.assign_embedded_ansible_role @@ -104,11 +102,11 @@ def files_in_repository(git_repo_dir) expect(Notification).to receive(:create!).with(notify_creation_args) expect(Notification).to receive(:create!).with(sync_notification_args) - allow_any_instance_of(GitRepository).to receive(:with_worktree).and_raise(::Rugged::NetworkError) + allow_any_instance_of(GitRepository).to receive(:with_worktree).and_raise(Rugged::NetworkError) expect do described_class.create_in_provider(manager.id, params) - end.to raise_error(::Rugged::NetworkError) + end.to raise_error(Rugged::NetworkError) end it "sets the status to 'error' if syncing has a network error" do @@ -331,7 +329,7 @@ def playbooks_for(repo) end describe "#update_in_provider" do - let(:update_params) { { :scm_branch => "other_branch" } } + let(:update_params) { {:scm_branch => "other_branch"} } let(:notify_update_args) { notification_args('update', update_params) } context "with valid params" do @@ -375,12 +373,12 @@ def playbooks_for(repo) expect(Notification).to receive(:create!).with(notify_update_args) expect(Notification).to receive(:create!).with(sync_notification_args) - expect(record.git_repository).to receive(:update_repo).and_raise(::Rugged::NetworkError) + expect(record.git_repository).to receive(:update_repo).and_raise(Rugged::NetworkError) expect do # described_class.last.update_in_provider update_params record.update_in_provider update_params - end.to raise_error(::Rugged::NetworkError) + end.to raise_error(Rugged::NetworkError) end it "sets the status to 'error' if syncing has a network error" do diff --git a/spec/models/manageiq/providers/embedded_ansible/automation_manager/credential_spec.rb b/spec/models/manageiq/providers/embedded_ansible/automation_manager/credential_spec.rb index 51e1e5b3104..aeabb619a91 100644 --- a/spec/models/manageiq/providers/embedded_ansible/automation_manager/credential_spec.rb +++ b/spec/models/manageiq/providers/embedded_ansible/automation_manager/credential_spec.rb @@ -88,9 +88,9 @@ it "#update_in_provider to succeed" do expect(Notification).to receive(:create!).never - previous_params_to_attrs = params_to_attrs.each_with_object({}) do |key, attrs| - attrs[key] = ansible_cred.send(key) - end + previous_params_to_attrs = params_to_attrs.index_with do |key| + ansible_cred.send(key) + end result = ansible_cred.update_in_provider update_params @@ -429,12 +429,12 @@ let(:params_to_attrs) { [:password] } let(:update_params) do { - :name => "Updated Credential", + :name => "Updated Credential", } end let(:update_queue_params) do { - :name => "Updated Credential", + :name => "Updated Credential", } end end diff --git a/spec/models/manageiq/providers/embedded_ansible/provider_spec.rb b/spec/models/manageiq/providers/embedded_ansible/provider_spec.rb index b4860f24e19..150ea99b789 100644 --- a/spec/models/manageiq/providers/embedded_ansible/provider_spec.rb +++ b/spec/models/manageiq/providers/embedded_ansible/provider_spec.rb @@ -10,13 +10,13 @@ context "DefaultAnsibleObjects concern" do context "with no attributes" do - %w(organization credential inventory host).each do |obj_name| + %w[organization credential inventory host].each do |obj_name| it "#default_#{obj_name} returns nil" do - expect(subject.public_send("default_#{obj_name}")).to be_nil + expect(subject.public_send(:"default_#{obj_name}")).to be_nil end it "#default_#{obj_name}= creates a new custom attribute" do - subject.public_send("default_#{obj_name}=", obj_name.length) + subject.public_send(:"default_#{obj_name}=", obj_name.length) expect(subject.default_ansible_objects.find_by(:name => obj_name).value.to_i).to eq(obj_name.length) end end @@ -24,18 +24,18 @@ context "with attributes saved" do before do - %w(organization credential inventory host).each do |obj_name| + %w[organization credential inventory host].each do |obj_name| subject.default_ansible_objects.create(:name => obj_name, :value => obj_name.length) end end - %w(organization credential inventory host).each do |obj_name| + %w[organization credential inventory host].each do |obj_name| it "#default_#{obj_name} returns the saved value" do - expect(subject.public_send("default_#{obj_name}")).to eq(obj_name.length) + expect(subject.public_send(:"default_#{obj_name}")).to eq(obj_name.length) end it "#default_#{obj_name}= doesn't create a second object if we pass the same value" do - subject.public_send("default_#{obj_name}=", obj_name.length) + subject.public_send(:"default_#{obj_name}=", obj_name.length) expect(subject.default_ansible_objects.where(:name => obj_name).count).to eq(1) end end diff --git a/spec/models/manageiq/providers/ems_refresh_workflow_spec.rb b/spec/models/manageiq/providers/ems_refresh_workflow_spec.rb index 1740771676a..e08f2d9d019 100644 --- a/spec/models/manageiq/providers/ems_refresh_workflow_spec.rb +++ b/spec/models/manageiq/providers/ems_refresh_workflow_spec.rb @@ -13,7 +13,7 @@ @job = described_class.create_job({}) end - %w(start poll_native_task refresh poll_refresh post_refresh finish abort_job cancel error).each do |signal| + %w[start poll_native_task refresh poll_refresh post_refresh finish abort_job cancel error].each do |signal| shared_examples_for "allows #{signal} signal" do it signal.to_s do expect(@job).to receive(signal.to_sym) @@ -22,7 +22,7 @@ end end - %w(start poll_native_task refresh poll_refresh post_refresh).each do |signal| + %w[start poll_native_task refresh poll_refresh post_refresh].each do |signal| shared_examples_for "doesn't allow #{signal} signal" do it signal.to_s do expect { @job.signal(signal.to_sym) }.to raise_error(RuntimeError, /#{signal} is not permitted at state #{@job.state}/) diff --git a/spec/models/manageiq/providers/inventory/persister/builder_spec.rb b/spec/models/manageiq/providers/inventory/persister/builder_spec.rb index 1b78ffbb5ea..a363872faab 100644 --- a/spec/models/manageiq/providers/inventory/persister/builder_spec.rb +++ b/spec/models/manageiq/providers/inventory/persister/builder_spec.rb @@ -6,8 +6,8 @@ let(:ems) { FactoryBot.create(:ems_cloud) } let(:adv_settings) { {:strategy => :local_db_find_missing_references, :saver_strategy => :concurrent_safe_batch} } - let(:cloud) { ::ManageIQ::Providers::Inventory::Persister::Builder::CloudManager } - let(:network) { ::ManageIQ::Providers::Inventory::Persister::Builder::NetworkManager } + let(:cloud) { ManageIQ::Providers::Inventory::Persister::Builder::CloudManager } + let(:network) { ManageIQ::Providers::Inventory::Persister::Builder::NetworkManager } # --- association --- @@ -26,15 +26,15 @@ it "derives existing model_class without persister's class" do data = cloud.prepare_data(:vms, persister, :without_sti => true).to_hash - expect(data[:model_class]).to eq ::Vm + expect(data[:model_class]).to eq Vm end it "replaces derived model_class if model_class defined manually" do data = cloud.prepare_data(:vms, persister, :without_sti => true) do |builder| - builder.add_properties(:model_class => ::MiqTemplate) + builder.add_properties(:model_class => MiqTemplate) end.to_hash - expect(data[:model_class]).to eq ::MiqTemplate + expect(data[:model_class]).to eq MiqTemplate end it "doesn't try to derive model_class when disabled" do @@ -45,14 +45,14 @@ it "throws exception if model_class should be namespaced but isn't" do expect { cloud.prepare_data(:vms, persister) }.to raise_error( - ::ManageIQ::Providers::Inventory::Persister::Builder::NotSubclassedError + ManageIQ::Providers::Inventory::Persister::Builder::NotSubclassedError ) end it 'throws exception if model_class not specified' do builder = cloud.prepare_data(:non_existing_ic, persister) - expect { builder.to_inventory_collection }.to raise_error(::ManageIQ::Providers::Inventory::Persister::Builder::MissingModelClassError, /NonExistingIc/) + expect { builder.to_inventory_collection }.to raise_error(ManageIQ::Providers::Inventory::Persister::Builder::MissingModelClassError, /NonExistingIc/) end # --- adv. settings (TODO: link to gui)--- @@ -169,24 +169,24 @@ it 'can add inventory_object_attributes manually' do data = cloud.prepare_data(:tmp, persister, :without_model_class => true) do |builder| - builder.add_inventory_attributes(%i(attr1 attr2 attr3)) + builder.add_inventory_attributes(%i[attr1 attr2 attr3]) end.to_hash - expect(data[:inventory_object_attributes]).to match_array(%i(attr1 attr2 attr3)) + expect(data[:inventory_object_attributes]).to match_array(%i[attr1 attr2 attr3]) end it 'can remove inventory_object_attributes' do data = cloud.prepare_data(:tmp, persister, :without_model_class => true) do |builder| - builder.add_inventory_attributes(%i(attr1 attr2 attr3)) - builder.remove_inventory_attributes(%i(attr2)) + builder.add_inventory_attributes(%i[attr1 attr2 attr3]) + builder.remove_inventory_attributes(%i[attr2]) end.to_hash - expect(data[:inventory_object_attributes]).to match_array(%i(attr1 attr3)) + expect(data[:inventory_object_attributes]).to match_array(%i[attr1 attr3]) end it 'can clear all inventory_object_attributes' do data = cloud.prepare_data(:vms, persister, :without_sti => true) do |builder| - builder.add_inventory_attributes(%i(attr1 attr2 attr3)) + builder.add_inventory_attributes(%i[attr1 attr2 attr3]) builder.clear_inventory_attributes! end.to_hash diff --git a/spec/models/manageiq/providers/inventory/persister/finders_spec.rb b/spec/models/manageiq/providers/inventory/persister/finders_spec.rb index 200b5b7e385..985e5e6f8f3 100644 --- a/spec/models/manageiq/providers/inventory/persister/finders_spec.rb +++ b/spec/models/manageiq/providers/inventory/persister/finders_spec.rb @@ -133,8 +133,8 @@ persister.vms.build(vm_data(1)) - expected_error = "Wrong index for key :vm_or_template, all references under this index must point to default :ref"\ - " called :manager_ref. Any other :ref is not valid. This applies also to nested lazy links." + expected_error = "Wrong index for key :vm_or_template, all references under this index must point to default :ref " \ + "called :manager_ref. Any other :ref is not valid. This applies also to nested lazy links." expect do persister.hardwares.build(hardware_data(1, :vm_or_template => vm_lazy)) end.to(raise_error(expected_error)) @@ -147,8 +147,8 @@ persister.vms.build(vm_data(1)) - expected_error = "Wrong index for key :hardware, all references under this index must point to default :ref called"\ - " :manager_ref. Any other :ref is not valid. This applies also to nested lazy links." + expected_error = "Wrong index for key :hardware, all references under this index must point to default :ref called " \ + ":manager_ref. Any other :ref is not valid. This applies also to nested lazy links." expect do persister.disks.build(disk_data(1, :hardware => hardware_lazy)) end.to(raise_error(expected_error)) diff --git a/spec/models/manageiq/providers/inventory/persister/helpers/spec_mocked_data.rb b/spec/models/manageiq/providers/inventory/persister/helpers/spec_mocked_data.rb index c268762ec7f..6fa0456d09d 100644 --- a/spec/models/manageiq/providers/inventory/persister/helpers/spec_mocked_data.rb +++ b/spec/models/manageiq/providers/inventory/persister/helpers/spec_mocked_data.rb @@ -42,7 +42,7 @@ def initialize_mocked_records :genealogy_parent => @image1, :key_pairs => [@key_pair1], :location => 'host_10_10_10_1.com', - :ext_management_system => @ems, + :ext_management_system => @ems ) ) @vm12 = FactoryBot.create( @@ -52,7 +52,7 @@ def initialize_mocked_records :genealogy_parent => @image1, :key_pairs => [@key_pair1, @key_pair12], :location => 'host_10_10_10_1.com', - :ext_management_system => @ems, + :ext_management_system => @ems ) ) @vm2 = FactoryBot.create( @@ -62,7 +62,7 @@ def initialize_mocked_records :genealogy_parent => @image2, :key_pairs => [@key_pair2], :location => 'host_10_10_10_2.com', - :ext_management_system => @ems, + :ext_management_system => @ems ) ) @vm4 = FactoryBot.create( @@ -98,38 +98,38 @@ def initialize_mocked_records @disk1 = FactoryBot.create( :disk, disk_data(1).merge( - :hardware => @hardware1, + :hardware => @hardware1 ) ) @disk12 = FactoryBot.create( :disk, disk_data(12).merge( - :hardware => @hardware12, + :hardware => @hardware12 ) ) @disk13 = FactoryBot.create( :disk, disk_data(13).merge( - :hardware => @hardware12, + :hardware => @hardware12 ) ) @disk2 = FactoryBot.create( :disk, disk_data(2).merge( - :hardware => @hardware2, + :hardware => @hardware2 ) ) @public_network1 = FactoryBot.create( :network, public_network_data(1).merge( - :hardware => @hardware1, + :hardware => @hardware1 ) ) @public_network12 = FactoryBot.create( :network, public_network_data(12).merge( - :hardware => @hardware12, + :hardware => @hardware12 ) ) @public_network13 = FactoryBot.create( @@ -142,7 +142,7 @@ def initialize_mocked_records @public_network2 = FactoryBot.create( :network, public_network_data(2).merge( - :hardware => @hardware2, + :hardware => @hardware2 ) ) diff --git a/spec/models/manageiq/providers/inventory/persister/local_db_finders_spec.rb b/spec/models/manageiq/providers/inventory/persister/local_db_finders_spec.rb index b6ccf3c654d..5ddf02758c8 100644 --- a/spec/models/manageiq/providers/inventory/persister/local_db_finders_spec.rb +++ b/spec/models/manageiq/providers/inventory/persister/local_db_finders_spec.rb @@ -52,21 +52,21 @@ :flavor => persister.flavors.lazy_find(:ems_ref => flavor_data(1)[:name]), :genealogy_parent => persister.miq_templates.lazy_find(:ems_ref => image_data(1)[:ems_ref]), :key_pairs => [persister.auth_key_pairs.lazy_find(:name => key_pair_data(1)[:name])], - :location => lazy_find_network1, + :location => lazy_find_network1 ) @vm_data102 = vm_data(102).merge( :flavor => persister.flavors.lazy_find(:ems_ref => flavor_data(1)[:name]), :genealogy_parent => persister.miq_templates.lazy_find(:ems_ref => image_data(1)[:ems_ref]), :key_pairs => [persister.auth_key_pairs.lazy_find(:name => key_pair_data(1)[:name])], - :location => lazy_find_network2, + :location => lazy_find_network2 ) @vm_data160 = vm_data(160).merge( :flavor => persister.flavors.lazy_find(:ems_ref => flavor_data(1)[:name]), :genealogy_parent => persister.miq_templates.lazy_find(:ems_ref => image_data(1)[:ems_ref]), :key_pairs => [persister.auth_key_pairs.lazy_find(:name => key_pair_data(1)[:name])], - :location => lazy_find_network60, + :location => lazy_find_network60 ) persister.vms.build(@vm_data101) @@ -180,7 +180,7 @@ :ems_ref => orchestration_stack_resource_data("1_12_1")[:ems_ref] }, :ref => :by_stack_and_ems_ref, - :key => :stack, + :key => :stack ) @network_port1 = network_port_data(1).merge( @@ -310,7 +310,7 @@ :ems_ref => orchestration_stack_resource_data("1_12_1")[:ems_ref] }, :ref => :by_stack_and_ems_ref, - :key => :stack, + :key => :stack ).load expect(persister.orchestration_stacks_resources.index_proxy.send(:local_db_indexes)[:by_stack_and_ems_ref].send(:index).keys).to( @@ -419,12 +419,12 @@ it "checks relation is allowed in index" do persister.add_collection(persister.send(:cloud), :vms) do |builder| builder.add_properties( - :model_class => ::ManageIQ::Providers::CloudManager::Vm, + :model_class => ManageIQ::Providers::CloudManager::Vm, :secondary_refs => {:by_availability_zone_and_name => %i[availability_zone name]} ) end - expect(persister.vms.index_proxy.send(:data_indexes).keys).to match_array(%i(manager_ref by_availability_zone_and_name)) + expect(persister.vms.index_proxy.send(:data_indexes).keys).to match_array(%i[manager_ref by_availability_zone_and_name]) end it "checks relation is on model class" do @@ -439,7 +439,7 @@ persister.add_collection(persister.send(:cloud), :vms, {}, {:without_sti => true}) do |builder| builder.add_properties( :custom_save_block => ->(ems, _ic) { ems }, - :manager_ref => %i(a b c) + :manager_ref => %i[a b c] ) end diff --git a/spec/models/manageiq/providers/inventory/persister/serializing_spec.rb b/spec/models/manageiq/providers/inventory/persister/serializing_spec.rb index f821e414cdf..1d72ecb795f 100644 --- a/spec/models/manageiq/providers/inventory/persister/serializing_spec.rb +++ b/spec/models/manageiq/providers/inventory/persister/serializing_spec.rb @@ -99,7 +99,7 @@ def populate_test_data(persister) {:hardware => lazy_find_hardware, :description => "public"}, :key => :hostname, :default => 'default_value_unknown' - ), + ) ) @hardware_data_1 = hardware_data(1).merge( @@ -113,11 +113,11 @@ def populate_test_data(persister) ) @public_network_data_1 = public_network_data(1).merge( - :hardware => persister.hardwares.lazy_find(:vm_or_template => lazy_find_vm), + :hardware => persister.hardwares.lazy_find(:vm_or_template => lazy_find_vm) ) @disk_data_1 = disk_data(1).merge( - :hardware => persister.hardwares.lazy_find(:vm_or_template => lazy_find_vm), + :hardware => persister.hardwares.lazy_find(:vm_or_template => lazy_find_vm) ) persister.miq_templates.build(@image_data_1) diff --git a/spec/models/manageiq/providers/inventory/persister/test_persister.rb b/spec/models/manageiq/providers/inventory/persister/test_persister.rb index 2d91644be6a..0fbf6d494ca 100644 --- a/spec/models/manageiq/providers/inventory/persister/test_persister.rb +++ b/spec/models/manageiq/providers/inventory/persister/test_persister.rb @@ -2,12 +2,11 @@ class TestPersister < ManageIQ::Providers::Inventory::Persister def initialize_inventory_collections ######### Cloud ########## # Top level models with direct references for Cloud - %i(vms - miq_templates).each do |name| - + %i[vms + miq_templates].each do |name| add_collection(cloud, name, {}, {:without_sti => true}) do |builder| builder.add_properties( - :secondary_refs => {:by_name => [:name], :by_uid_ems_and_name => %i(uid_ems name)} + :secondary_refs => {:by_name => [:name], :by_uid_ems_and_name => %i[uid_ems name]} ) end end @@ -15,7 +14,7 @@ def initialize_inventory_collections add_auth_key_pairs # Child models with references in the Parent InventoryCollections for Cloud - %i(availability_zones + %i[availability_zones hardwares networks disks @@ -23,8 +22,7 @@ def initialize_inventory_collections orchestration_stacks orchestration_templates orchestration_stacks_outputs - orchestration_stacks_parameters).each do |name| - + orchestration_stacks_parameters].each do |name| add_collection(cloud, name, {}, {:without_sti => true}) end @@ -32,11 +30,10 @@ def initialize_inventory_collections ######### Network ################ # Top level models with direct references for Network - %i(cloud_networks + %i[cloud_networks cloud_subnets security_groups - load_balancers).each do |name| - + load_balancers].each do |name| add_collection(network, name, {}, {:without_sti => true}) do |builder| builder.add_properties(:parent => manager.network_manager) end @@ -47,7 +44,7 @@ def initialize_inventory_collections add_floating_ips # Child models with references in the Parent InventoryCollections for Network - %i(firewall_rules + %i[firewall_rules cloud_subnet_network_ports load_balancer_pools load_balancer_pool_members @@ -55,8 +52,7 @@ def initialize_inventory_collections load_balancer_listeners load_balancer_listener_pools load_balancer_health_checks - load_balancer_health_check_members).each do |name| - + load_balancer_health_check_members].each do |name| add_collection(network, name, {}, {:without_sti => true}) do |builder| builder.add_properties(:parent => manager.network_manager) end @@ -66,9 +62,8 @@ def initialize_inventory_collections add_flavors ######## Custom processing of Ancestry ########## - %i(vm_and_miq_template_ancestry - orchestration_stack_ancestry).each do |name| - + %i[vm_and_miq_template_ancestry + orchestration_stack_ancestry].each do |name| add_collection(cloud, name, {}, {:without_model_class => true}) end end @@ -86,7 +81,7 @@ def add_auth_key_pairs # Cloud InventoryCollection def add_orchestration_stacks_resources add_collection(cloud, :orchestration_stacks_resources) do |builder| - builder.add_properties(:secondary_refs => {:by_stack_and_ems_ref => %i(stack ems_ref)}) + builder.add_properties(:secondary_refs => {:by_stack_and_ems_ref => %i[stack ems_ref]}) end end @@ -103,7 +98,7 @@ def add_network_ports builder.add_properties( :manager_uuids => references(:vms) + references(:network_ports) + references(:load_balancers), :parent => manager.network_manager, - :secondary_refs => {:by_device => [:device], :by_device_and_name => %i(device name)} + :secondary_refs => {:by_device => [:device], :by_device_and_name => %i[device name]} ) end end diff --git a/spec/models/manageiq/providers/physical_infra_manager/physical_infra_manager_spec.rb b/spec/models/manageiq/providers/physical_infra_manager/physical_infra_manager_spec.rb index ab9eb21429e..6b4ae83c297 100644 --- a/spec/models/manageiq/providers/physical_infra_manager/physical_infra_manager_spec.rb +++ b/spec/models/manageiq/providers/physical_infra_manager/physical_infra_manager_spec.rb @@ -1,13 +1,13 @@ RSpec.describe ManageIQ::Providers::PhysicalInfraManager do before :all do - @auth = { :user => 'admin', :pass => 'smartvm', :host => 'localhost', :port => '3000' } + @auth = {:user => 'admin', :pass => 'smartvm', :host => 'localhost', :port => '3000'} end it 'will count physical servers' do ps = FactoryBot.create(:physical_server) pim = FactoryBot.create(:ems_physical_infra, - :name => "LXCA", - :hostname => "0.0.0.0") + :name => "LXCA", + :hostname => "0.0.0.0") pim.physical_servers = [ps] expect(pim.total_physical_servers).to be(1) @@ -17,8 +17,8 @@ ps = FactoryBot.create(:physical_server) host = FactoryBot.create(:host) pim = FactoryBot.create(:ems_physical_infra, - :name => "LXCA", - :hostname => "0.0.0.0") + :name => "LXCA", + :hostname => "0.0.0.0") ps.host = host pim.physical_servers = [ps] @@ -30,8 +30,8 @@ host = FactoryBot.create(:host) vm = FactoryBot.create(:vm) pim = FactoryBot.create(:ems_physical_infra, - :name => "LXCA", - :hostname => "0.0.0.0") + :name => "LXCA", + :hostname => "0.0.0.0") host.vms = [vm] ps.host = host @@ -41,15 +41,15 @@ it 'will check supports?(:console) returns false' do ps = FactoryBot.create(:ems_physical_infra, - :name => "LXCA", - :hostname => "0.0.0.0") + :name => "LXCA", + :hostname => "0.0.0.0") expect(ps.supports?(:console)).to be(false) end it 'will check supports?(:native_console) returns false' do ps = FactoryBot.create(:ems_physical_infra, - :name => "LXCA", - :hostname => "0.0.0.0") + :name => "LXCA", + :hostname => "0.0.0.0") expect(ps.supports?(:native_console)).to be(false) end diff --git a/spec/models/metering_container_image_spec.rb b/spec/models/metering_container_image_spec.rb index f357aee25e1..a4e84cc5e33 100644 --- a/spec/models/metering_container_image_spec.rb +++ b/spec/models/metering_container_image_spec.rb @@ -1,6 +1,6 @@ RSpec.describe MeteringContainerImage do include Spec::Support::ChargebackHelper - let(:base_options) { {:interval_size => 2, :end_interval_offset => 0, :ext_options => {:tz => 'UTC'} } } + let(:base_options) { {:interval_size => 2, :end_interval_offset => 0, :ext_options => {:tz => 'UTC'}} } let(:hourly_rate) { 0.01 } let(:count_hourly_rate) { 1.00 } let(:starting_date) { Time.parse('2012-09-01 23:59:59Z').utc } diff --git a/spec/models/metering_vm_spec.rb b/spec/models/metering_vm_spec.rb index c7b0aee4f71..6c0e89452d0 100644 --- a/spec/models/metering_vm_spec.rb +++ b/spec/models/metering_vm_spec.rb @@ -14,7 +14,7 @@ let(:derived_memory_available) { 1000.0 } let(:cpu_usagemhz_rate_average) { 50.0 } let(:disk_usage_rate_average) { 100.0 } - let(:derived_memory_used) { 100.0 } + let(:derived_memory_used) { 100.0 } let(:net_usage_rate_average) { 25.0 } let(:derived_vm_used_disk_storage) { 1.0.gigabytes } let(:derived_vm_allocated_disk_storage) { 4.0.gigabytes } @@ -58,14 +58,14 @@ let(:cores) { 7 } let(:mem_mb) { 1777 } let(:disk_gb) { 7 } - let(:disk_b) { disk_gb * 1024**3 } + let(:disk_b) { disk_gb * (1024**3) } let(:metering_used_hours) { 24 } let(:hardware) do FactoryBot.create(:hardware, - :cpu_total_cores => cores, - :memory_mb => mem_mb, - :disks => [FactoryBot.create(:disk, :size => disk_b)]) + :cpu_total_cores => cores, + :memory_mb => mem_mb, + :disks => [FactoryBot.create(:disk, :size => disk_b)]) end context 'for any virtual machine' do @@ -161,7 +161,7 @@ expect(subject.beginning_of_resource_existence_in_report_interval).to eq(beginning_of_resource_existence) expect(subject.end_of_resource_existence_in_report_interval.to_s).to eq(vm.updated_on.to_s) expect(subject.end_of_resource_existence_in_report_interval.to_s).to eq("2012-09-25 19:00:00 UTC") - expect(subject.existence_hours_metric).to eq(19 * 24 + 19) # from 2012-09-06 00:00:00 UTC to 2012-09-25 19:00:00 UTC + expect(subject.existence_hours_metric).to eq((19 * 24) + 19) # from 2012-09-06 00:00:00 UTC to 2012-09-25 19:00:00 UTC end end diff --git a/spec/models/metric/common_spec.rb b/spec/models/metric/common_spec.rb index 6ab50eff2fd..04367db7f09 100644 --- a/spec/models/metric/common_spec.rb +++ b/spec/models/metric/common_spec.rb @@ -2,9 +2,8 @@ let(:host) { FactoryBot.create(:host) } let(:metric) do FactoryBot.create(:metric_rollup_host_hr, - :resource => host, - :timestamp => Time.now.next_week(:sunday).utc - ) + :resource => host, + :timestamp => Time.now.next_week(:sunday).utc) end describe "#v_month" do @@ -17,37 +16,35 @@ context "#apply_time_profile" do it "with all days and hours selected it should return true" do profile = FactoryBot.create(:time_profile, - :description => "foo", - :profile => {:tz => "New Delhi", - :days => TimeProfile::ALL_DAYS, - :hours => TimeProfile::ALL_HOURS} - ) + :description => "foo", + :profile => {:tz => "New Delhi", + :days => TimeProfile::ALL_DAYS, + :hours => TimeProfile::ALL_HOURS}) res = metric.apply_time_profile(profile) expect(res).to be_truthy end it "with specific days and hours selected it should return false" do profile = FactoryBot.create(:time_profile, - :description => "foo", - :profile => {:tz => "New Delhi", - :days => [1], - :hours => [1]} - ) + :description => "foo", + :profile => {:tz => "New Delhi", + :days => [1], + :hours => [1]}) res = metric.apply_time_profile(profile) expect(res).to be_falsey end it "returns true if time profile were used for aggregation (and rollup record refer to it)" do profile = FactoryBot.create(:time_profile, - :description => "foo", - :profile => {:tz => "New Delhi", - :days => [1], - :hours => [1]}) + :description => "foo", + :profile => {:tz => "New Delhi", + :days => [1], + :hours => [1]}) profile_aggr = FactoryBot.create(:time_profile, - :description => "used_for_daily_aggregation", - :profile => {:tz => "UTC", - :days => (2..4), - :hours => TimeProfile::ALL_HOURS}) + :description => "used_for_daily_aggregation", + :profile => {:tz => "UTC", + :days => (2..4), + :hours => TimeProfile::ALL_HOURS}) metric.time_profile_id = profile_aggr.id res = metric.apply_time_profile(profile) diff --git a/spec/models/metric/config_settings_spec.rb b/spec/models/metric/config_settings_spec.rb index 8026cb88978..2ca29235499 100644 --- a/spec/models/metric/config_settings_spec.rb +++ b/spec/models/metric/config_settings_spec.rb @@ -8,7 +8,6 @@ stub_settings(:performance => {:host_overhead => {:cpu => 1.23}}) expect(described_class.host_overhead_cpu).to eq(1.23) end - end describe ".host_overhead_memory" do @@ -16,6 +15,5 @@ stub_settings(:performance => {:host_overhead => {:memory => 1.23}}) expect(described_class.host_overhead_memory).to eq(1.23) end - end end diff --git a/spec/models/metric/processing_spec.rb b/spec/models/metric/processing_spec.rb index 1ab1c7c226e..ebaa43bc5d7 100644 --- a/spec/models/metric/processing_spec.rb +++ b/spec/models/metric/processing_spec.rb @@ -7,21 +7,21 @@ it "fills all hourly intervals" do perf.save && last_perf.save expect(MetricRollup.count).to eq(2) - described_class.send("extrapolate", MetricRollup, MetricRollup.all) + described_class.send(:extrapolate, MetricRollup, MetricRollup.all) expect(MetricRollup.count).to eq(3) end end context "#create_new_metric" do it "creates a filling record without ID attribute" do - new_perf = described_class.send("create_new_metric", MetricRollup, last_perf, perf, 3600) + new_perf = described_class.send(:create_new_metric, MetricRollup, last_perf, perf, 3600) expect(new_perf.id).to be_nil end it "averages the 2 metric values" do last_perf.derived_vm_numvcpus = 1000 perf.derived_vm_numvcpus = 2000 - new_perf = described_class.send("create_new_metric", MetricRollup, last_perf, perf, 3600) + new_perf = described_class.send(:create_new_metric, MetricRollup, last_perf, perf, 3600) expect(new_perf.derived_vm_numvcpus).to eq(1500) end end @@ -68,10 +68,9 @@ it "with all usage values" do m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usage_rate_average => 50.0, - :cpu_usagemhz_rate_average => 1_500.0, - ) + :resource => vm, + :cpu_usage_rate_average => 50.0, + :cpu_usagemhz_rate_average => 1_500.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -80,9 +79,8 @@ it "with only cpu_usage_rate_average usage value" do m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usage_rate_average => 50.0, - ) + :resource => vm, + :cpu_usage_rate_average => 50.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -91,9 +89,8 @@ it "with only cpu_usagemhz_rate_average usage value" do m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usagemhz_rate_average => 1_500.0, - ) + :resource => vm, + :cpu_usagemhz_rate_average => 1_500.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -111,10 +108,9 @@ it "without hardware" do vm = FactoryBot.create(:vm_vmware) m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usage_rate_average => 50.0, - :cpu_usagemhz_rate_average => 1_500.0 - ) + :resource => vm, + :cpu_usage_rate_average => 50.0, + :cpu_usagemhz_rate_average => 1_500.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -125,21 +121,18 @@ context "on :derived_cpu_available" do let(:vm) do FactoryBot.create(:vm_vmware, :hardware => - FactoryBot.create(:hardware, - :cpu_total_cores => 8, - :cpu_sockets => 4, - :cpu_cores_per_socket => 2, - :cpu_speed => 3_000, - ) - ) + FactoryBot.create(:hardware, + :cpu_total_cores => 8, + :cpu_sockets => 4, + :cpu_cores_per_socket => 2, + :cpu_speed => 3_000)) end it "with all usage values" do m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usage_rate_average => 50.0, - :cpu_usagemhz_rate_average => 1_500.0, - ) + :resource => vm, + :cpu_usage_rate_average => 50.0, + :cpu_usagemhz_rate_average => 1_500.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -148,9 +141,8 @@ it "with only cpu_usage_rate_average usage value" do m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usage_rate_average => 50.0, - ) + :resource => vm, + :cpu_usage_rate_average => 50.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -159,9 +151,8 @@ it "with only cpu_usagemhz_rate_average usage value" do m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usagemhz_rate_average => 1_500.0, - ) + :resource => vm, + :cpu_usagemhz_rate_average => 1_500.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -179,10 +170,9 @@ it "without hardware" do vm = FactoryBot.create(:vm_vmware) m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :cpu_usage_rate_average => 50.0, - :cpu_usagemhz_rate_average => 1_500.0 - ) + :resource => vm, + :cpu_usage_rate_average => 50.0, + :cpu_usagemhz_rate_average => 1_500.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -193,17 +183,14 @@ context "on :derived_memory_available" do let(:vm) do FactoryBot.create(:vm_vmware, :hardware => - FactoryBot.create(:hardware, - :memory_mb => 4_096 - ) - ) + FactoryBot.create(:hardware, + :memory_mb => 4_096)) end it "with usage values" do m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :mem_usage_absolute_average => 50.0, - ) + :resource => vm, + :mem_usage_absolute_average => 50.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) @@ -221,9 +208,8 @@ it "without hardware" do vm = FactoryBot.create(:vm_vmware) m = FactoryBot.create(:metric_rollup_vm_hr, - :resource => vm, - :mem_usage_absolute_average => 50.0, - ) + :resource => vm, + :mem_usage_absolute_average => 50.0) derived_columns = described_class.process_derived_columns(vm, m.attributes.symbolize_keys) diff --git a/spec/models/metric/purging_spec.rb b/spec/models/metric/purging_spec.rb index 35984975181..d1a6fea2fba 100644 --- a/spec/models/metric/purging_spec.rb +++ b/spec/models/metric/purging_spec.rb @@ -26,11 +26,11 @@ q = MiqQueue.all.map { |item| item.attributes.slice("class_name", "method_name") } expect(q).to match_array([ - {"class_name" => described_class.name, "method_name" => "purge_daily"}, - {"class_name" => described_class.name, "method_name" => "purge_hourly"}, - {"class_name" => described_class.name, "method_name" => "purge_realtime"}, - {"class_name" => "MiqTask", "method_name"=>"destroy_older_by_condition"} - ]) + {"class_name" => described_class.name, "method_name" => "purge_daily"}, + {"class_name" => described_class.name, "method_name" => "purge_hourly"}, + {"class_name" => described_class.name, "method_name" => "purge_realtime"}, + {"class_name" => "MiqTask", "method_name" => "destroy_older_by_condition"} + ]) end end @@ -40,13 +40,13 @@ before do @metrics1 = [ - FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm1.id, :timestamp => (6.months + 1.days).ago.utc), - FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm1.id, :timestamp => (6.months - 1.days).ago.utc) + FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm1.id, :timestamp => (6.months + 1.day).ago.utc), + FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm1.id, :timestamp => (6.months - 1.day).ago.utc) ] @metrics2 = [ FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm2.id, :timestamp => (6.months + 2.days).ago.utc), - FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm2.id, :timestamp => (6.months + 1.days).ago.utc), - FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm2.id, :timestamp => (6.months - 1.days).ago.utc) + FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm2.id, :timestamp => (6.months + 1.day).ago.utc), + FactoryBot.create(:metric_rollup_vm_hr, :resource_id => vm2.id, :timestamp => (6.months - 1.day).ago.utc) ] end @@ -84,7 +84,7 @@ it "deletes mid day" do Timecop.freeze('2018-02-01T09:12:00Z') do - (0..16).each do |hours| + 17.times do |hours| FactoryBot.create(:metric_vm_rt, :resource_id => vm1.id, :timestamp => (hours.hours.ago + 1.minute)) end expect(Metric.count).to eq(17) @@ -98,7 +98,7 @@ it "deletes just after midnight" do Timecop.freeze('2018-02-01T02:12:00Z') do - (0..16).each do |hours| + 17.times do |hours| FactoryBot.create(:metric_vm_rt, :resource_id => vm1.id, :timestamp => (hours.hours.ago + 1.minute)) end expect(Metric.count).to eq(17) @@ -113,7 +113,7 @@ EvmSpecHelper.local_miq_server Timecop.freeze('2018-01-01T02:12:00Z') do - (0..16).each do |hours| + 17.times do |hours| FactoryBot.create(:metric_vm_rt, :resource_id => vm1.id, :timestamp => (hours.hours.ago + 1.minute)) end expect(Metric.count).to eq(17) @@ -128,7 +128,7 @@ EvmSpecHelper.local_miq_server Timecop.freeze('2017-03-12T08:12:00Z') do # 2:00am+05 EST is time of change # this is overkill. since we prune every 21 minutes, there will only be ~1 table with data - (0..16).each do |hours| + 17.times do |hours| FactoryBot.create(:metric_vm_rt, :resource_id => vm1.id, :timestamp => (hours.hours.ago + 1.minute)) end expect(Metric.count).to eq(17) @@ -143,7 +143,7 @@ EvmSpecHelper.local_miq_server Timecop.freeze('2017-11-05T08:12:00Z') do # 2:00am+05 EST is time of change # this is overkill. since we prune every 21 minutes, there will only be ~1 table with data - (0..16).each do |hours| + 17.times do |hours| FactoryBot.create(:metric_vm_rt, :resource_id => vm1.id, :timestamp => (hours.hours.ago + 1.minute)) end expect(Metric.count).to eq(17) @@ -169,7 +169,7 @@ it "deletes just after midnight" do Timecop.freeze('2018-02-01T02:12:00Z') do - (0..23).each do |hours| + 24.times do |hours| FactoryBot.create(:metric_vm_rt, :resource_id => vm1.id, :timestamp => (hours.hours.ago + 1.minute)) end expect(Metric.count).to eq(24) diff --git a/spec/models/metric_rollup/chargeback_helper_spec.rb b/spec/models/metric_rollup/chargeback_helper_spec.rb index 54ba9a347e9..0db61f45cc3 100644 --- a/spec/models/metric_rollup/chargeback_helper_spec.rb +++ b/spec/models/metric_rollup/chargeback_helper_spec.rb @@ -13,8 +13,7 @@ let(:vm) do FactoryBot.create(:vm_vmware, :name => 'test_vm', :ems_ref => 'ems_ref', :ems_cluster => ems_cluster, :storage => storage, :host => host, - :ext_management_system => ems - ) + :ext_management_system => ems) end subject { metric_rollup.parents_determining_rate } @@ -22,12 +21,11 @@ context 'metric_rollup record with parents not nil' do let(:metric_rollup) do FactoryBot.build(:metric_rollup_vm_hr, - :resource => vm, - :parent_host => host, - :parent_ems_cluster => ems_cluster, - :parent_ems => ems, - :parent_storage => storage, - ) + :resource => vm, + :parent_host => host, + :parent_ems_cluster => ems_cluster, + :parent_ems => ems, + :parent_storage => storage) end let(:parents_from_rollup) do diff --git a/spec/models/metric_spec.rb b/spec/models/metric_spec.rb index fb135045d74..377e2ac049b 100644 --- a/spec/models/metric_spec.rb +++ b/spec/models/metric_spec.rb @@ -60,17 +60,16 @@ ] cases.each_slice(2) do |t, v| @vm1.metric_rollups << FactoryBot.create(:metric_rollup_vm_hr, - :timestamp => t, - :cpu_usage_rate_average => v, - :cpu_ready_delta_summation => v * 10000, - :sys_uptime_absolute_latest => v, - :min_max => { - :abs_max_cpu_usage_rate_average_value => v, - :abs_max_cpu_usage_rate_average_timestamp => Time.parse(t) + 20.seconds, - :abs_min_cpu_usage_rate_average_value => v, - :abs_min_cpu_usage_rate_average_timestamp => Time.parse(t) + 40.seconds, - } - ) + :timestamp => t, + :cpu_usage_rate_average => v, + :cpu_ready_delta_summation => v * 10000, + :sys_uptime_absolute_latest => v, + :min_max => { + :abs_max_cpu_usage_rate_average_value => v, + :abs_max_cpu_usage_rate_average_timestamp => Time.parse(t) + 20.seconds, + :abs_min_cpu_usage_rate_average_value => v, + :abs_min_cpu_usage_rate_average_timestamp => Time.parse(t) + 40.seconds, + }) end end @@ -87,11 +86,10 @@ ] cases.each_slice(2) do |t, v| @host1.metrics << FactoryBot.create(:metric_host_rt, - :timestamp => t, - :cpu_usage_rate_average => v, - :cpu_usagemhz_rate_average => v, - :sys_uptime_absolute_latest => v - ) + :timestamp => t, + :cpu_usage_rate_average => v, + :cpu_usagemhz_rate_average => v, + :sys_uptime_absolute_latest => v) end cases = [ @@ -105,11 +103,10 @@ ] cases.each_slice(2) do |t, v| @host2.metrics << FactoryBot.create(:metric_host_rt, - :timestamp => t, - :cpu_usage_rate_average => v, - :cpu_usagemhz_rate_average => v, - :sys_uptime_absolute_latest => v - ) + :timestamp => t, + :cpu_usage_rate_average => v, + :cpu_usagemhz_rate_average => v, + :sys_uptime_absolute_latest => v) end end @@ -145,15 +142,14 @@ cases.each_slice(3) do |t, cpu, mem| [@vm1, @vm2].each do |vm| vm.metric_rollups << FactoryBot.create(:metric_rollup_vm_daily, - :timestamp => t, - :cpu_usage_rate_average => cpu, - :mem_usage_absolute_average => mem, - :min_max => { - :max_cpu_usage_rate_average => cpu, - :max_mem_usage_absolute_average => mem, - }, - :time_profile => @time_profile - ) + :timestamp => t, + :cpu_usage_rate_average => cpu, + :mem_usage_absolute_average => mem, + :min_max => { + :max_cpu_usage_rate_average => cpu, + :max_mem_usage_absolute_average => mem, + }, + :time_profile => @time_profile) end end end @@ -217,8 +213,7 @@ it "should handle the only event right before the starting on time (FB15770)" do @ems_cluster = FactoryBot.create(:ems_cluster, :ext_management_system => @ems_vmware) @ems_cluster.metric_rollups << FactoryBot.create(:metric_rollup_vm_hr, - :timestamp => Time.parse("2011-08-12T20:33:12Z") - ) + :timestamp => Time.parse("2011-08-12T20:33:12Z")) options = {:debug_trace => "false", :value => "50", @@ -227,8 +222,7 @@ :column => "v_pct_cpu_ready_delta_summation", :interval_name => "hourly", :starting_on => Time.parse("2011-08-12T20:33:20Z"), - :trend_direction => "none" - } + :trend_direction => "none"} expect(@ems_cluster.performances_maintains_value_for_duration?(options)).to eq(false) end end @@ -247,20 +241,20 @@ @node_a.metric_rollups << FactoryBot.create( :metric_rollup, - :timestamp => rollup_chain_timestamp, - :cpu_usage_rate_average => 50.0, - :capture_interval_name => 'hourly', - :derived_vm_numvcpus => 2, - :parent_ems_id => @ems_kubernetes.id + :timestamp => rollup_chain_timestamp, + :cpu_usage_rate_average => 50.0, + :capture_interval_name => 'hourly', + :derived_vm_numvcpus => 2, + :parent_ems_id => @ems_kubernetes.id ) @node_b.metric_rollups << FactoryBot.create( :metric_rollup, - :timestamp => rollup_chain_timestamp, - :cpu_usage_rate_average => 75.0, - :capture_interval_name => 'hourly', - :derived_vm_numvcpus => 8, - :parent_ems_id => @ems_kubernetes.id + :timestamp => rollup_chain_timestamp, + :cpu_usage_rate_average => 75.0, + :capture_interval_name => 'hourly', + :derived_vm_numvcpus => 8, + :parent_ems_id => @ems_kubernetes.id ) end diff --git a/spec/models/miq_action_spec.rb b/spec/models/miq_action_spec.rb index c9a6695c4ab..0c8d0c0b95f 100644 --- a/spec/models/miq_action_spec.rb +++ b/spec/models/miq_action_spec.rb @@ -91,7 +91,7 @@ context "#raise_automation_event" do before do - @vm = FactoryBot.create(:vm_infra) + @vm = FactoryBot.create(:vm_infra) allow(@vm).to receive(:my_zone).and_return("vm_zone") FactoryBot.create(:miq_event_definition, :name => "raise_automation_event") FactoryBot.create(:miq_event_definition, :name => "vm_start") @@ -191,17 +191,17 @@ context "#action_vm_retire" do before do - @vm = FactoryBot.create(:vm_infra) + @vm = FactoryBot.create(:vm_infra) allow(@vm).to receive(:my_zone).and_return("vm_zone") @event = FactoryBot.create(:miq_event_definition, :name => "assigned_company_tag") @action = FactoryBot.create(:miq_action, :name => "vm_retire") end it "synchronous" do - input = {:synchronous => true} + input = {:synchronous => true} Timecop.freeze do - date = Time.now.utc - 1.day + date = Time.now.utc - 1.day expect(VmOrTemplate).to receive(:retire) do |vms, options| expect(vms).to eq([@vm]) @@ -217,14 +217,14 @@ allow(@vm).to receive_messages(:my_zone => zone.name) Timecop.freeze do - date = Time.now.utc - 1.day + date = Time.now.utc - 1.day @action.action_vm_retire(@action, @vm, input) expect(MiqQueue.count).to eq(1) msg = MiqQueue.first expect(msg.class_name).to eq(@vm.class.name) expect(msg.method_name).to eq('retire') - expect(msg.args).to eq([[@vm], :date => date]) + expect(msg.args).to eq([[@vm], {:date => date}]) expect(msg.zone).to eq(zone.name) end end @@ -243,16 +243,16 @@ end it "avoids non container images" do - error_message = "MIQ(action_container_image_analyze): Unable to perform action [#{action.description}],"\ - " object [#{container_image_registry.inspect}] is not a Container Image" + error_message = "MIQ(action_container_image_analyze): Unable to perform action [#{action.description}], " \ + "object [#{container_image_registry.inspect}] is not a Container Image" expect(MiqPolicy.logger).to receive(:error).with(error_message) expect(action.action_container_image_analyze(action, container_image_registry, :event => event)).to be_nil end it "avoids an event loop" do - error_message = "MIQ(action_container_image_analyze): Invoking action [#{action.description}] for event"\ - " [#{event_loop.description}] would cause infinite loop, skipping" + error_message = "MIQ(action_container_image_analyze): Invoking action [#{action.description}] for event " \ + "[#{event_loop.description}] would cause infinite loop, skipping" expect(MiqPolicy.logger).to receive(:warn).with(error_message) expect(action.action_container_image_analyze(action, container_image, :event => event_loop)).to be_nil @@ -292,7 +292,7 @@ context '.create_default_actions' do context 'seeding default actions from a file with 3 csv rows and some comments' do before do - stub_csv <<-CSV.strip_heredoc + stub_csv <<~CSV name,description audit,Generate Audit Event log,Generate log message @@ -314,7 +314,7 @@ context 'when csv was changed and imported again' do before do - stub_csv <<-CSV.strip_heredoc + stub_csv <<~CSV name,description audit,UPD: Audit Event # log,Generate log message @@ -367,11 +367,11 @@ def stub_csv(data) before do @script_dir = Dir.mktmpdir stub_const('::MiqAction::SCRIPT_DIR', Pathname(@script_dir)) - FileUtils.touch %W( + FileUtils.touch %W[ #{@script_dir}/script2.rb #{@script_dir}/script.1.sh #{@script_dir}/script3 - ) + ] end after do @@ -448,7 +448,7 @@ def stub_csv(data) let(:miq_server) { EvmSpecHelper.local_miq_server } let(:action) { MiqAction.new } - let(:inputs) { { :policy => nil, :synchronous => false } } + let(:inputs) { {:policy => nil, :synchronous => false} } let(:q_options) do { @@ -550,8 +550,8 @@ def stub_csv(data) end let(:request_options) do - { :manageiq_extra_vars => { "event_target" => vm.href_slug, "event_name" => event_name }, - :initiator => 'control' } + {:manageiq_extra_vars => {"event_target" => vm.href_slug, "event_name" => event_name}, + :initiator => 'control'} end shared_examples_for "#workflow check" do @@ -567,30 +567,30 @@ def stub_csv(data) context "use event target" do let(:action_options) do - { :service_template_id => stap.id, - :use_event_target => true } + {:service_template_id => stap.id, + :use_event_target => true} end - let(:dialog_options) { {:hosts => ip1 } } + let(:dialog_options) { {:hosts => ip1} } it_behaves_like "#workflow check" end context "use localhost" do let(:action_options) do - { :service_template_id => stap.id, - :use_localhost => true } + {:service_template_id => stap.id, + :use_localhost => true} end - let(:dialog_options) { {:hosts => 'localhost' } } + let(:dialog_options) { {:hosts => 'localhost'} } it_behaves_like "#workflow check" end context "use hosts" do let(:action_options) do - { :service_template_id => stap.id, - :hosts => "ip1, ip2" } + {:service_template_id => stap.id, + :hosts => "ip1, ip2"} end - let(:dialog_options) { {:hosts => 'ip1, ip2' } } + let(:dialog_options) { {:hosts => 'ip1, ip2'} } it_behaves_like "#workflow check" end diff --git a/spec/models/miq_ae_class_spec.rb b/spec/models/miq_ae_class_spec.rb index e9880c0c0a9..da2bd93fb45 100644 --- a/spec/models/miq_ae_class_spec.rb +++ b/spec/models/miq_ae_class_spec.rb @@ -103,11 +103,11 @@ def set_priority(name, value) set_priority('domain1', 10) set_priority('domain2', 20) set_priority('domain3', 50) - @inst4_list = %w(/DOMAIN3/SYSTEM/PROCESS/inst4 /DOMAIN1/SYSTEM/PROCESS/inst4) - @sorted_inst_list = ['/DOMAIN3/SYSTEM/PROCESS/inst1', '/DOMAIN3/SYSTEM/PROCESS/inst2', - '/DOMAIN3/SYSTEM/PROCESS/inst32', '/DOMAIN3/SYSTEM/PROCESS/inst4', - '/DOMAIN2/SYSTEM/PROCESS/inst31', '/DOMAIN2/SYSTEM/PROCESS/inst41', - '/DOMAIN1/SYSTEM/PROCESS/inst3'] + @inst4_list = %w[/DOMAIN3/SYSTEM/PROCESS/inst4 /DOMAIN1/SYSTEM/PROCESS/inst4] + @sorted_inst_list = ['/DOMAIN3/SYSTEM/PROCESS/inst1', '/DOMAIN3/SYSTEM/PROCESS/inst2', + '/DOMAIN3/SYSTEM/PROCESS/inst32', '/DOMAIN3/SYSTEM/PROCESS/inst4', + '/DOMAIN2/SYSTEM/PROCESS/inst31', '/DOMAIN2/SYSTEM/PROCESS/inst41', + '/DOMAIN1/SYSTEM/PROCESS/inst3'] end it 'invalid path should return an empty array' do @@ -194,7 +194,7 @@ def set_priority(name, value) @cls1 = FactoryBot.create(:miq_ae_class, :name => "cls1", :namespace_id => @ns1.id) @cls2 = FactoryBot.create(:miq_ae_class, :name => "cls2", :namespace_id => @ns1.id) - @d2 = FactoryBot.create(:miq_ae_domain, :name => "domain2", :priority => 2) + @d2 = FactoryBot.create(:miq_ae_domain, :name => "domain2", :priority => 2) @ns2 = FactoryBot.create(:miq_ae_namespace, :name => "ns2", :parent => @d2) end @@ -277,8 +277,8 @@ def set_priority(name, value) end it "produces the expected xml" do - expected_xml = <<-XML - + expected_xml = <<~XML + XML expect(miq_ae_class.to_export_xml).to eq(expected_xml.chomp) @@ -289,8 +289,8 @@ def set_priority(name, value) let(:ae_fields) { [] } it "produces the expected xml" do - expected_xml = <<-XML - + expected_xml = <<~XML + XML expect(miq_ae_class.to_export_xml).to eq(expected_xml.chomp) @@ -343,12 +343,12 @@ def set_priority(name, value) context "waypoint_ids_for_state_machine" do it "check ids" do - create_state_ae_model(:name => 'FRED', :ae_class => 'CLASS1', :ae_namespace => 'A/B/C') - create_state_ae_model(:name => 'FREDDY', :ae_class => 'CLASS2', :ae_namespace => 'C/D/E') - create_ae_model(:name => 'MARIO', :ae_class => 'CLASS3', :ae_namespace => 'C/D/E') + create_state_ae_model(:name => 'FRED', :ae_class => 'CLASS1', :ae_namespace => 'A/B/C') + create_state_ae_model(:name => 'FREDDY', :ae_class => 'CLASS2', :ae_namespace => 'C/D/E') + create_ae_model(:name => 'MARIO', :ae_class => 'CLASS3', :ae_namespace => 'C/D/E') domain_fqnames = %w[FRED FREDDY] ns_fqnames = %w[FRED/A FRED/A/B FRED/A/B/C FREDDY/C FREDDY/C/D FREDDY/C/D/E] - class_fqnames = %w(/FRED/A/B/C/CLASS1 /FREDDY/C/D/E/CLASS2) + class_fqnames = %w[/FRED/A/B/C/CLASS1 /FREDDY/C/D/E/CLASS2] ids = domain_fqnames.collect { |ns| "MiqAeDomain::#{MiqAeNamespace.lookup_by_fqname(ns, false).id}" } ids += ns_fqnames.collect { |ns| "MiqAeNamespace::#{MiqAeNamespace.lookup_by_fqname(ns, false).id}" } ids += class_fqnames.collect { |cls| "MiqAeClass::#{MiqAeClass.lookup_by_fqname(cls).id}" } @@ -356,7 +356,7 @@ def set_priority(name, value) end it "no state machine classes" do - create_ae_model(:name => 'MARIO', :ae_class => 'CLASS3', :ae_namespace => 'C/D/E') + create_ae_model(:name => 'MARIO', :ae_class => 'CLASS3', :ae_namespace => 'C/D/E') expect(MiqAeClass.waypoint_ids_for_state_machines).to be_empty end end diff --git a/spec/models/miq_ae_field_spec.rb b/spec/models/miq_ae_field_spec.rb index f1609fb23b5..1a685e426d2 100644 --- a/spec/models/miq_ae_field_spec.rb +++ b/spec/models/miq_ae_field_spec.rb @@ -15,8 +15,8 @@ context "when default_value is blank" do let(:expected_xml) do - <<-XML - + <<~XML + XML end @@ -28,8 +28,8 @@ context "when default_value is not blank" do let(:default_value) { "default_value" } let(:expected_xml) do - <<-XML -default_value + <<~XML + default_value XML end @@ -141,7 +141,7 @@ end it "should set the updated_by field on save" do - f1 = @c1.ae_fields.create(:name => "field") + f1 = @c1.ae_fields.create(:name => "field") expect(f1.updated_by).to eq('system') end @@ -182,7 +182,7 @@ it "should validate datatypes" do MiqAeField.available_datatypes.each do |datatype| - f = @c1.ae_fields.build(:name => "fname_#{datatype.gsub(/ /,'_')}", + f = @c1.ae_fields.build(:name => "fname_#{datatype.tr(' ', '_')}", :aetype => "attribute", :datatype => datatype) expect(f).to be_valid expect(f.save!).to be_truthy @@ -192,7 +192,7 @@ @c1.ae_fields.destroy_all expect(@c1.reload.ae_fields.length).to eq(0) - %w(foo bar).each do |datatype| + %w[foo bar].each do |datatype| f = @c1.ae_fields.build(:name => "fname_#{datatype}", :aetype => "attribute", :datatype => datatype) expect(f).not_to be_valid end diff --git a/spec/models/miq_ae_instance_spec.rb b/spec/models/miq_ae_instance_spec.rb index f966576037e..82b7a8f815a 100644 --- a/spec/models/miq_ae_instance_spec.rb +++ b/spec/models/miq_ae_instance_spec.rb @@ -203,8 +203,8 @@ end it "produces the expected xml" do - expected_xml = <<-XML - + expected_xml = <<~XML + XML expect(miq_ae_instance.to_export_xml).to eq(expected_xml.chomp) diff --git a/spec/models/miq_ae_method_spec.rb b/spec/models/miq_ae_method_spec.rb index f100b763c0d..a39414c752d 100644 --- a/spec/models/miq_ae_method_spec.rb +++ b/spec/models/miq_ae_method_spec.rb @@ -4,11 +4,11 @@ n1 = FactoryBot.create(:miq_ae_system_domain, :tenant => user.current_tenant) c1 = FactoryBot.create(:miq_ae_class, :namespace_id => n1.id, :name => "foo") f1 = FactoryBot.create(:miq_ae_method, - :class_id => c1.id, - :name => "foo_method", - :scope => "instance", - :language => "ruby", - :location => "inline") + :class_id => c1.id, + :name => "foo_method", + :scope => "instance", + :language => "ruby", + :location => "inline") expect(f1.editable?(user)).to be_falsey end @@ -16,11 +16,11 @@ n1 = FactoryBot.create(:miq_ae_domain, :tenant => user.current_tenant) c1 = FactoryBot.create(:miq_ae_class, :namespace_id => n1.id, :name => "foo") f1 = FactoryBot.create(:miq_ae_method, - :class_id => c1.id, - :name => "foo_method", - :scope => "instance", - :language => "ruby", - :location => "inline") + :class_id => c1.id, + :name => "foo_method", + :scope => "instance", + :language => "ruby", + :location => "inline") expect(f1.editable?(user)).to be_truthy end @@ -153,8 +153,8 @@ end it "produces the expected xml" do - expected_xml = <<-XML - + expected_xml = <<~XML + XML expect(miq_ae_method.to_export_xml).to eq(expected_xml.chomp) @@ -166,11 +166,11 @@ n1 = FactoryBot.create(:miq_ae_namespace, :name => 'ns1', :parent => d1) c1 = FactoryBot.create(:miq_ae_class, :namespace_id => n1.id, :name => "foo") m1 = FactoryBot.create(:miq_ae_method, - :class_id => c1.id, - :name => "foo_method", - :scope => "instance", - :language => "ruby", - :location => "inline") + :class_id => c1.id, + :name => "foo_method", + :scope => "instance", + :language => "ruby", + :location => "inline") expect(m1.domain.name).to eql('dom1') end @@ -179,11 +179,11 @@ n1 = FactoryBot.create(:miq_ae_namespace, :name => 'ns1', :parent => d1) c1 = FactoryBot.create(:miq_ae_class, :namespace_id => n1.id, :name => "foo") m1 = FactoryBot.create(:miq_ae_method, - :class_id => c1.id, - :name => "foo_method", - :scope => "instance", - :language => "ruby", - :location => "inline") + :class_id => c1.id, + :name => "foo_method", + :scope => "instance", + :language => "ruby", + :location => "inline") result = m1.to_export_yaml expect(result['name']).to eql('foo_method') diff --git a/spec/models/miq_ae_value_spec.rb b/spec/models/miq_ae_value_spec.rb index e6126d62128..b28f961f0fe 100644 --- a/spec/models/miq_ae_value_spec.rb +++ b/spec/models/miq_ae_value_spec.rb @@ -17,8 +17,8 @@ context "when the value is blank" do let(:value) { nil } let(:expected_xml) do - <<-XML - + <<~XML + XML end @@ -30,8 +30,8 @@ context "when the value is not blank" do let(:value) { "value" } let(:expected_xml) do - <<-XML -value + <<~XML + value XML end diff --git a/spec/models/miq_alert_eval_internal_spec.rb b/spec/models/miq_alert_eval_internal_spec.rb index 40eef4cbd42..8564eab8ae7 100644 --- a/spec/models/miq_alert_eval_internal_spec.rb +++ b/spec/models/miq_alert_eval_internal_spec.rb @@ -18,7 +18,9 @@ :options => { :event_types => ["MigrateVM_Task_Complete"], :freq_threshold => 3, - :time_threshold => 3.days}} + :time_threshold => 3.days + } + } @alert = FactoryBot.create(:miq_alert_vm, :expression => expression) @alert_prof = FactoryBot.create(:miq_alert_set_vm) @alert_prof.add_member(@alert) @@ -48,7 +50,9 @@ :value_threshold => "250", :rt_time_threshold => 60, :trend_direction => 'none', - :debug_trace => 'false'}} + :debug_trace => 'false' + } + } @alert = FactoryBot.create(:miq_alert_vm, :expression => expression) @alert_prof = FactoryBot.create(:miq_alert_set_vm) @alert_prof.add_member(@alert) @@ -69,7 +73,9 @@ :mode => "internal", :options => { :operator => "Changed", - :hdw_attr => :cpu_affinity}} + :hdw_attr => :cpu_affinity + } + } @alert = FactoryBot.create(:miq_alert_vm, :expression => expression) @alert_prof = FactoryBot.create(:miq_alert_set_vm) @alert_prof.add_member(@alert) @@ -90,7 +96,9 @@ :mode => "internal", :options => { :operator => "Decreased", - :hdw_attr => "memory_mb"}} + :hdw_attr => "memory_mb" + } + } @alert = FactoryBot.create(:miq_alert_vm, :expression => expression) @alert_prof = FactoryBot.create(:miq_alert_set_vm) @alert_prof.add_member(@alert) @@ -117,7 +125,9 @@ :event_log_message_filter_value => "Error in", :event_log_event_id => "12345", :time_threshold => 86400, - :event_log_level => "fatal"}} + :event_log_level => "fatal" + } + } @alert = FactoryBot.create(:miq_alert_vm, :expression => expression) @alert_prof = FactoryBot.create(:miq_alert_set_vm) @alert_prof.add_member(@alert) @@ -139,7 +149,8 @@ :ems_id => 1, :ems_alarm_name => "GT VM CPU Usage", :ems_alarm_mor => "alarm-7" - }} + } + } @alert = FactoryBot.create(:miq_alert_vm, :expression => expression) @alert_prof = FactoryBot.create(:miq_alert_set_vm) @alert_prof.add_member(@alert) @@ -170,7 +181,9 @@ :event_log_message_filter_type => "INCLUDES", :event_log_message_filter_value => "exceeds soft limit", :time_threshold => 86400, - :event_log_level => "warn"}} + :event_log_level => "warn" + } + } @alert = FactoryBot.create(:miq_alert_vm, :expression => expression) @alert_prof = FactoryBot.create(:miq_alert_set_host) @alert_prof.add_member(@alert) diff --git a/spec/models/miq_alert_set_spec.rb b/spec/models/miq_alert_set_spec.rb index 405270ca689..06466c3f873 100644 --- a/spec/models/miq_alert_set_spec.rb +++ b/spec/models/miq_alert_set_spec.rb @@ -10,7 +10,7 @@ expect(provider_profile.miq_alerts.first).to have_attributes( :guid => "ea3acd49-9516-4fde-b828-bf68d254c0cf", :db => "ExtManagementSystem", - :responds_to_events => "datawarehouse_alert", + :responds_to_events => "datawarehouse_alert" ) node_profile = MiqAlertSet.find_by(:guid => "ff0fb114-be03-4685-bebb-b6ae8f13d7ad") expect(node_profile).to have_attributes(:mode => "ContainerNode") @@ -18,7 +18,7 @@ expect(node_profile.miq_alerts.first).to have_attributes( :guid => "efe9d4f0-9c6f-4c67-80b1-05cd83223349", :db => "ContainerNode", - :responds_to_events => "datawarehouse_alert", + :responds_to_events => "datawarehouse_alert" ) end @@ -29,9 +29,10 @@ end def without_file(fname) - # Note: can the file you are moving cause sporadic failures in other threads? + # NOTE: can the file you are moving cause sporadic failures in other threads? raise "no block given" unless block_given? raise "fname is blank" if fname.blank? + tempf = Tempfile.new("temporary_backup") begin FileUtils.mv(fname, tempf.path) diff --git a/spec/models/miq_alert_spec.rb b/spec/models/miq_alert_spec.rb index 3240a160a1e..e3c65da67fc 100644 --- a/spec/models/miq_alert_spec.rb +++ b/spec/models/miq_alert_spec.rb @@ -7,7 +7,7 @@ @vm = FactoryBot.create(:vm_vmware) MiqAlert.seed - @events_to_alerts = MiqAlert.all.inject([]) do |arr, a| + @events_to_alerts = MiqAlert.all.each_with_object([]) do |a, arr| next(arr) if a.responds_to_events.nil? next(arr) unless a.db == "Vm" @@ -19,7 +19,6 @@ event = e.strip arr << [event, a.guid] end - arr end end diff --git a/spec/models/miq_database_spec.rb b/spec/models/miq_database_spec.rb index aff5aafcba3..910e99f3562 100644 --- a/spec/models/miq_database_spec.rb +++ b/spec/models/miq_database_spec.rb @@ -25,9 +25,8 @@ context "existing record" do it "will seed nil values" do FactoryBot.build(:miq_database, - :csrf_secret_token => nil, - :session_secret_token => nil - ).save(:validate => false) + :csrf_secret_token => nil, + :session_secret_token => nil).save(:validate => false) db = MiqDatabase.seed expect(db.csrf_secret_token_encrypted).to be_encrypted @@ -36,9 +35,8 @@ it "will not change existing values" do FactoryBot.create(:miq_database, - :csrf_secret_token => "abc", - :session_secret_token => "def" - ) + :csrf_secret_token => "abc", + :session_secret_token => "def") csrf, session, update_repo = MiqDatabase.all.collect { |db| [db.csrf_secret_token, db.session_secret_token] }.first db = MiqDatabase.seed diff --git a/spec/models/miq_dialog/seeding_spec.rb b/spec/models/miq_dialog/seeding_spec.rb index 5e7b132b17a..4d5ce4e5da0 100644 --- a/spec/models/miq_dialog/seeding_spec.rb +++ b/spec/models/miq_dialog/seeding_spec.rb @@ -12,7 +12,7 @@ before do FileUtils.mkdir_p(dialog_dir) - FileUtils.cp_r(Rails.root.join("product/dialogs/miq_dialogs/miq_provision_dialogs.yaml"), dialog_dir, preserve: true) + FileUtils.cp_r(Rails.root.join("product/dialogs/miq_dialogs/miq_provision_dialogs.yaml"), dialog_dir, :preserve => true) stub_const("MiqDialog::Seeding::DIALOG_DIR", dialog_dir) expect(Vmdb::Plugins).to receive(:flat_map).at_least(:once) { [] } @@ -31,7 +31,7 @@ expect(MiqDialog.where(:name => "testing_dialog")).to_not exist # Add new records - FileUtils.cp_r(data_dir, tmpdir, preserve: true) + FileUtils.cp_r(data_dir, tmpdir, :preserve => true) described_class.seed @@ -52,7 +52,7 @@ # The mtime rounding is granular to the second, so need to be higher # than that for test purposes - FileUtils.touch(dialog_yml, mtime: 1.second.from_now.to_time) + FileUtils.touch(dialog_yml, :mtime => 1.second.from_now.to_time) described_class.seed diff --git a/spec/models/miq_enterprise_spec.rb b/spec/models/miq_enterprise_spec.rb index 3ced3620fb2..397f8cc0b83 100644 --- a/spec/models/miq_enterprise_spec.rb +++ b/spec/models/miq_enterprise_spec.rb @@ -33,7 +33,7 @@ context "with some existing records" do before do - @ems = FactoryBot.create(:ems_vmware) + @ems = FactoryBot.create(:ems_vmware) end it "#vms_and_templates" do @@ -47,8 +47,8 @@ end it "#vms" do - vm = [FactoryBot.create(:vm_vmware, :ext_management_system => @ems), - FactoryBot.create(:vm_vmware, :ext_management_system => @ems)] + vm = [FactoryBot.create(:vm_vmware, :ext_management_system => @ems), + FactoryBot.create(:vm_vmware, :ext_management_system => @ems)] FactoryBot.create(:vm_vmware) diff --git a/spec/models/miq_event_definition_spec.rb b/spec/models/miq_event_definition_spec.rb index db93dcacc20..ee85be6a597 100644 --- a/spec/models/miq_event_definition_spec.rb +++ b/spec/models/miq_event_definition_spec.rb @@ -105,8 +105,8 @@ def create_set!(name) before do com_set = MiqEventDefinitionSet.create(:name => "compliance", :description => "Compliance Events") FactoryBot.create(:miq_event_definition, - :name => "host_compliance_check", - :event_type => "Default").tap { |e| com_set.add_member(e) } + :name => "host_compliance_check", + :event_type => "Default").tap { |e| com_set.add_member(e) } end it 'has all default control policy events with set type' do diff --git a/spec/models/miq_event_spec.rb b/spec/models/miq_event_spec.rb index 2abab839e19..48d1fcbc63a 100644 --- a/spec/models/miq_event_spec.rb +++ b/spec/models/miq_event_spec.rb @@ -174,13 +174,15 @@ ems_event = FactoryBot.create( :ems_event, :event_type => "CloneVM_Task", - :full_data => { "info" => {"task" => "task-5324"}}) + :full_data => {"info" => {"task" => "task-5324"}} + ) FactoryBot.create(:miq_event_definition, :name => event) FactoryBot.create( :miq_event, :event_type => event, :target => vm, - :full_data => {:source_event_id => ems_event.id}) + :full_data => {:source_event_id => ems_event.id} + ) inputs = { :type => vm.class.name, :source_event => ems_event, diff --git a/spec/models/miq_group_spec.rb b/spec/models/miq_group_spec.rb index 845b48c56a2..2889f72bb76 100644 --- a/spec/models/miq_group_spec.rb +++ b/spec/models/miq_group_spec.rb @@ -69,7 +69,6 @@ end end - it "should set group type to 'system' " do expect(subject.group_type).to eq("system") end @@ -101,7 +100,7 @@ end it "should return groups by user name with external authentication" do - memberships = [%w(foo bar)] + memberships = [%w[foo bar]] allow(@ifp_interface).to receive(:GetUserGroups).with('user').and_return(memberships) @@ -109,8 +108,8 @@ end it "should remove FQDN from the groups by user name with external authentication" do - ifp_memberships = [%w(foo@fqdn bar@fqdn)] - memberships = [%w(foo bar)] + ifp_memberships = [%w[foo@fqdn bar@fqdn]] + memberships = [%w[foo bar]] allow(@ifp_interface).to receive(:GetUserGroups).with('user').and_return(ifp_memberships) @@ -127,12 +126,12 @@ :normalize => 'fred flintstone', :bind => true, :get_user_object => 'user object', - :get_memberships => %w(foo bar)) + :get_memberships => %w[foo bar]) allow(MiqLdap).to receive(:new).and_return(miq_ldap) end it "should return LDAP groups by user name" do - expect(MiqGroup.get_ldap_groups_by_user('fred', 'bind_dn', 'password')).to eq(%w(foo bar)) + expect(MiqGroup.get_ldap_groups_by_user('fred', 'bind_dn', 'password')).to eq(%w[foo bar]) end it "should issue an error message when user name could not be bound to LDAP" do @@ -171,25 +170,25 @@ @disk3 = FactoryBot.create(:disk, :device_type => "disk", :size => disk_size, :hardware_id => @hw4.id) @active_vm = FactoryBot.create(:vm_vmware, - :name => "Active VM", - :miq_group_id => miq_group.id, - :ems_id => ems.id, - :storage_id => storage.id, - :hardware => @hw1) + :name => "Active VM", + :miq_group_id => miq_group.id, + :ems_id => ems.id, + :storage_id => storage.id, + :hardware => @hw1) @archived_vm = FactoryBot.create(:vm_vmware, - :name => "Archived VM", - :miq_group_id => miq_group.id, - :hardware => @hw2) + :name => "Archived VM", + :miq_group_id => miq_group.id, + :hardware => @hw2) @orphaned_vm = FactoryBot.create(:vm_vmware, - :name => "Orphaned VM", - :miq_group_id => miq_group.id, - :storage_id => storage.id, - :hardware => @hw3) - @retired_vm = FactoryBot.create(:vm_vmware, - :name => "Retired VM", + :name => "Orphaned VM", :miq_group_id => miq_group.id, - :retired => true, - :hardware => @hw4) + :storage_id => storage.id, + :hardware => @hw3) + @retired_vm = FactoryBot.create(:vm_vmware, + :name => "Retired VM", + :miq_group_id => miq_group.id, + :retired => true, + :hardware => @hw4) end it "#active_vms" do @@ -212,7 +211,7 @@ expect(miq_group.provisioned_storage).to eq(ram_size.megabyte + disk_size) end - %w(allocated_memory allocated_vcpu allocated_storage provisioned_storage).each do |vcol| + %w[allocated_memory allocated_vcpu allocated_storage provisioned_storage].each do |vcol| it "should have virtual column #{vcol} " do expect(described_class).to have_virtual_column vcol.to_s, :integer end @@ -221,11 +220,11 @@ it "when the virtual column is nil" do hw = FactoryBot.create(:hardware, :cpu_sockets => num_cpu, :memory_mb => ram_size) FactoryBot.create(:vm_vmware, - :name => "VM with no disk", - :miq_group_id => miq_group.id, - :ems_id => ems.id, - :storage_id => storage.id, - :hardware => hw) + :name => "VM with no disk", + :miq_group_id => miq_group.id, + :ems_id => ems.id, + :storage_id => storage.id, + :hardware => hw) expect(miq_group.allocated_storage).to eq(disk_size) end end @@ -285,9 +284,9 @@ allow(YAML).to receive(:load_file).with(role_map_path).and_return(role_map) allow(YAML).to receive(:load_file).with(filter_map_path).and_call_original - expect { + expect do MiqGroup.seed - }.to(change { MiqGroup.count }) + end.to(change { MiqGroup.count }) expect(MiqGroup.last.name).to eql('EvmRole-test_role') expect(MiqGroup.last.sequence).to eql(1) end @@ -297,8 +296,8 @@ let!(:group_with_no_entitlement) { tenant.default_miq_group } let!(:group_with_existing_entitlement) do FactoryBot.create(:miq_group, - :tenant_type, - :entitlement => FactoryBot.create(:entitlement, :miq_user_role => nil)) + :tenant_type, + :entitlement => FactoryBot.create(:entitlement, :miq_user_role => nil)) end let(:default_tenant_role) { MiqUserRole.default_tenant_role } @@ -391,9 +390,8 @@ :settings => {:restrictions => {:vms => :user_or_group}} ) group = FactoryBot.create(:miq_group, - :description => "MiqGroup-self_service", - :miq_user_role => role - ) + :description => "MiqGroup-self_service", + :miq_user_role => role) expect(group).to be_self_service end @@ -587,13 +585,13 @@ end it "Returns the expected sui roles" do - allow(MiqProductFeature).to receive(:feature_all_children).with('sui').and_return(%w(sui_role_a sui_role_b sui_role_c)) - %w(sui sui_role_a sui_role_c).each do |ident| + allow(MiqProductFeature).to receive(:feature_all_children).with('sui').and_return(%w[sui_role_a sui_role_b sui_role_c]) + %w[sui sui_role_a sui_role_c].each do |ident| allow(role).to receive(:allows?).with(:identifier => ident).and_return(true) end allow(role).to receive(:allows?).with(:identifier => 'sui_role_b').and_return(false) - expect(subject.sui_product_features).to eq(%w(sui_role_a sui_role_c)) + expect(subject.sui_product_features).to eq(%w[sui_role_a sui_role_c]) end end @@ -626,7 +624,7 @@ describe ".with_roles_excluding" do it "handles multiple columns" do a = FactoryBot.create(:miq_group, :features => "good") - FactoryBot.create(:miq_group, :features => %w(good everything)) + FactoryBot.create(:miq_group, :features => %w[good everything]) FactoryBot.create(:miq_group, :features => "everything") expect(MiqGroup.select(:id, :description).with_roles_excluding("everything")).to match_array([a]) diff --git a/spec/models/miq_policy_spec.rb b/spec/models/miq_policy_spec.rb index 9e84381aa60..8db775146fb 100644 --- a/spec/models/miq_policy_spec.rb +++ b/spec/models/miq_policy_spec.rb @@ -274,7 +274,7 @@ describe ".built_in_policies" do it 'creates built in policies' do policy = described_class.built_in_policies[0] - %w(name description towhat active mode conditions).each do |m| + %w[name description towhat active mode conditions].each do |m| expect(policy.send(m)).not_to be_nil end expect(policy.events).not_to be_empty @@ -289,8 +289,8 @@ FactoryBot.create(:miq_action, :name => "vm_suspend", :action_type => 'default') MiqPolicy.class_variable_set(:@@built_in_policies, nil) @vm = FactoryBot.create(:vm_openstack, - :ext_management_system => FactoryBot.create(:ems_openstack, - :zone => FactoryBot.create(:zone))) + :ext_management_system => FactoryBot.create(:ems_openstack, + :zone => FactoryBot.create(:zone))) end subject { MiqPolicy.enforce_policy(@vm, "vm_resume", {}) } @@ -318,17 +318,17 @@ expect(described_class.create!(:description => 'x')).to have_attributes( :towhat => "Vm", :active => true, - :mode => "control", + :mode => "control" ) end it 'allows override of defaults' do expect(described_class.create!( - :towhat => "Host", :mode => "compliance", :active => false, :description => 'x', - )).to have_attributes( - :towhat => "Host", - :active => false, - :mode => "compliance", + :towhat => "Host", :mode => "compliance", :active => false, :description => 'x' + )).to have_attributes( + :towhat => "Host", + :active => false, + :mode => "compliance" ) end end @@ -340,8 +340,8 @@ it 'reports invalid towhat' do policy = FactoryBot.build(:miq_policy, :towhat => "BobsYourUncle") - towhat_error = "should be one of ContainerGroup, ContainerImage, "\ - "ContainerNode, ContainerProject, ContainerReplicator, "\ + towhat_error = "should be one of ContainerGroup, ContainerImage, " \ + "ContainerNode, ContainerProject, ContainerReplicator, " \ "ExtManagementSystem, Host, PhysicalServer, Vm" expect(policy).not_to be_valid diff --git a/spec/models/miq_product_feature_spec.rb b/spec/models/miq_product_feature_spec.rb index e9cc87797a9..c4ec075c044 100644 --- a/spec/models/miq_product_feature_spec.rb +++ b/spec/models/miq_product_feature_spec.rb @@ -24,7 +24,7 @@ def self.tenant_features_in_hash # - widget_refresh (H) let(:hierarchical_features) do EvmSpecHelper.seed_specific_product_features( - %w(miq_report_widget_editor miq_report_widget_admin widget_refresh widget_edit widget_copy container_dashboard) + %w[miq_report_widget_editor miq_report_widget_admin widget_refresh widget_edit widget_copy container_dashboard] ) end @@ -33,6 +33,7 @@ def assert_product_feature_attributes(pf) expect(pf.keys - described_class::ALLOWED_ATTRIBUTES).to be_empty pf.each do |k, v| next if k == :hidden + expect(v).not_to be_blank, "Identifier: '#{pf[:identifier]}' Key: '#{k}' is blank" end end @@ -140,7 +141,7 @@ def traverse_product_feature_children(pfs, &block) it "creates/updates/deletes records" do MiqProductFeature.seed_features - expect(MiqProductFeature.pluck(:identifier)).to match_array %w(everything dialog_new_editor dialog_edit_editor policy_edit_editor) + expect(MiqProductFeature.pluck(:identifier)).to match_array %w[everything dialog_new_editor dialog_edit_editor policy_edit_editor] end end @@ -181,7 +182,7 @@ def traverse_product_feature_children(pfs, &block) it "creates tenant features" do features = miq_product_feature_class.tenant_features_in_hash - expect(features).to match_array([{ "name" => "Edit (#{root_tenant.name})", "description" => "XXX for tenant #{root_tenant.name}", + expect(features).to match_array([{"name" => "Edit (#{root_tenant.name})", "description" => "XXX for tenant #{root_tenant.name}", "identifier" => "dialog_copy_editor_tenant_#{root_tenant.id}", "tenant_id" => root_tenant.id}, {"name" => "Edit (#{tenant.name})", "description" => "XXX for tenant #{tenant.name}", "identifier" => "dialog_copy_editor_tenant_#{tenant.id}", "tenant_id" => tenant.id}]) @@ -242,7 +243,7 @@ def id_for_model_in_region(model, region) it "add new tenant feature" do features = miq_product_feature_class.tenant_features_in_hash - expect(features).to match_array([{ "name" => "Edit (#{root_tenant.name})", "description" => "XXX for tenant #{root_tenant.name}", + expect(features).to match_array([{"name" => "Edit (#{root_tenant.name})", "description" => "XXX for tenant #{root_tenant.name}", "identifier" => "dialog_copy_editor_tenant_#{root_tenant.id}", "tenant_id" => root_tenant.id}, {"name" => "Edit (#{tenant.name})", "description" => "XXX for tenant #{tenant.name}", "identifier" => "dialog_copy_editor_tenant_#{tenant.id}", "tenant_id" => tenant.id}, @@ -280,7 +281,7 @@ def id_for_model_in_region(model, region) it "removes tenant features" do features = miq_product_feature_class.tenant_features_in_hash - expect(features).to match_array([{ "name" => "Edit (#{root_tenant.name})", "description" => "XXX for tenant #{root_tenant.name}", + expect(features).to match_array([{"name" => "Edit (#{root_tenant.name})", "description" => "XXX for tenant #{root_tenant.name}", "identifier" => "dialog_copy_editor_tenant_#{root_tenant.id}", "tenant_id" => root_tenant.id}, {"name" => "Edit (#{tenant.name})", "description" => "XXX for tenant #{tenant.name}", "identifier" => "dialog_copy_editor_tenant_#{tenant.id}", "tenant_id" => tenant.id}]) @@ -297,18 +298,18 @@ def id_for_model_in_region(model, region) it "returns only visible features" do hierarchical_features expect(MiqProductFeature).not_to receive(:sort_children) - expect(MiqProductFeature.feature_children("miq_report_widget_admin", false)).to match_array(%w(widget_copy widget_edit)) + expect(MiqProductFeature.feature_children("miq_report_widget_admin", false)).to match_array(%w[widget_copy widget_edit]) end it "returns direct children only" do hierarchical_features - expect(MiqProductFeature.feature_children("miq_report_widget_editor")).to match_array(%w(miq_report_widget_admin)) + expect(MiqProductFeature.feature_children("miq_report_widget_editor")).to match_array(%w[miq_report_widget_admin]) end it "sorts features" do hierarchical_features expect(MiqProductFeature).to receive(:sort_children).and_call_original - expect(MiqProductFeature.feature_children("miq_report_widget_admin")).to match_array(%w(widget_copy widget_edit)) + expect(MiqProductFeature.feature_children("miq_report_widget_admin")).to match_array(%w[widget_copy widget_edit]) end end @@ -317,14 +318,16 @@ def id_for_model_in_region(model, region) hierarchical_features expect(MiqProductFeature).not_to receive(:sort_children) expect(MiqProductFeature.feature_all_children("miq_report_widget_editor", false)).to match_array( - %w(widget_copy widget_edit miq_report_widget_admin)) + %w[widget_copy widget_edit miq_report_widget_admin] + ) end it "returns all visible children sorted" do hierarchical_features expect(MiqProductFeature).to receive(:sort_children).and_call_original expect(MiqProductFeature.feature_all_children("miq_report_widget_editor")).to eq( - %w(widget_copy widget_edit miq_report_widget_admin)) + %w[widget_copy widget_edit miq_report_widget_admin] + ) end end @@ -372,9 +375,9 @@ def id_for_model_in_region(model, region) expect { MiqProductFeature.features }.to_not make_database_queries expect(MiqProductFeature.feature_root).to eq("f1") - expect(MiqProductFeature.feature_children("f1")).to match_array(%w(f2 f3)) + expect(MiqProductFeature.feature_children("f1")).to match_array(%w[f2 f3]) expect(MiqProductFeature.feature_children("f2")).to match_array([]) - expect(MiqProductFeature.feature_children("f3")).to match_array(%w(f4 f5)) + expect(MiqProductFeature.feature_children("f3")).to match_array(%w[f4 f5]) expect(MiqProductFeature.feature_parent("f1")).to be_nil expect(MiqProductFeature.feature_parent("f2")).to eq("f1") diff --git a/spec/models/miq_provision/post_install_callback_spec.rb b/spec/models/miq_provision/post_install_callback_spec.rb index 1cd6dc12ce6..40e02af78c3 100644 --- a/spec/models/miq_provision/post_install_callback_spec.rb +++ b/spec/models/miq_provision/post_install_callback_spec.rb @@ -10,7 +10,8 @@ def initialize(destination, phase) @phase = phase.to_s end - def for_destination; end + def for_destination + end def _log @logger ||= Vmdb.logger diff --git a/spec/models/miq_provision/state_machine_spec.rb b/spec/models/miq_provision/state_machine_spec.rb index db107240aff..a8cdfc4cdbd 100644 --- a/spec/models/miq_provision/state_machine_spec.rb +++ b/spec/models/miq_provision/state_machine_spec.rb @@ -9,12 +9,12 @@ let(:task) do FactoryBot.create(:miq_provision_openstack, - :source => template, - :destination => vm, - :state => 'pending', - :status => 'Ok', - :userid => req_user.userid, - :options => options) + :source => template, + :destination => vm, + :state => 'pending', + :status => 'Ok', + :userid => req_user.userid, + :options => options) end context "#prepare_provision" do @@ -33,7 +33,7 @@ :flavor_ref => flavor.ems_ref, :image_ref => template.ems_ref, :name => options[:vm_target_name], - :security_groups => [], + :security_groups => [] ) end diff --git a/spec/models/miq_provision/state_machine_spec_helper.rb b/spec/models/miq_provision/state_machine_spec_helper.rb index bdd5a90ec00..f232611d9a5 100644 --- a/spec/models/miq_provision/state_machine_spec_helper.rb +++ b/spec/models/miq_provision/state_machine_spec_helper.rb @@ -7,7 +7,7 @@ def test_prepare_provision def test_poll_destination_in_vmdb @test_poll_destination_in_vmdb_setup ||= begin - expect(task).to receive(:requeue_phase).twice { requeue_phase } + expect(task).to(receive(:requeue_phase).twice { requeue_phase }) expect(task).to receive(:find_destination_in_vmdb).and_return(nil, nil, vm) end expect(task.destination).to be_nil @@ -63,9 +63,8 @@ def test_poll_destination_powered_off_in_provider_with_callback_url end def test_poll_destination_powered_off_in_provider_no_callback - @test_poll_destination_powered_off_in_provider_no_callback_setup ||= begin - expect(task).to receive(:requeue_phase).twice { requeue_phase(__method__) } - end + @test_poll_destination_powered_off_in_provider_no_callback_setup ||= expect(task).to(receive(:requeue_phase).twice { requeue_phase(__method__) }) + skip_post_install_check { call_method } end diff --git a/spec/models/miq_provision_request_spec.rb b/spec/models/miq_provision_request_spec.rb index 39ee9dfe500..ae2f7b33580 100644 --- a/spec/models/miq_provision_request_spec.rb +++ b/spec/models/miq_provision_request_spec.rb @@ -25,7 +25,7 @@ end it "fails to retrieve the provision class when the vm does not exist" do - expect { described_class.request_task_class_from('options' => {:src_vm_id => -1 }) }.to raise_error(MiqException::MiqProvisionError) + expect { described_class.request_task_class_from('options' => {:src_vm_id => -1}) }.to raise_error(MiqException::MiqProvisionError) end end @@ -147,15 +147,15 @@ context "for cloud and infra providers," do def create_request(user, vm_template, prov_options) FactoryBot.create(:miq_provision_request, :requester => user, - :description => "request", - :tenant => user.current_tenant, - :source => vm_template, - :status => 'Ok', - :process => true, - :request_state => 'active', - :approval_state => 'approved', - :src_vm_id => vm_template.id, - :options => prov_options.merge(:owner_email => user.email, :requester_group => user.miq_groups.first.description)) + :description => "request", + :tenant => user.current_tenant, + :source => vm_template, + :status => 'Ok', + :process => true, + :request_state => 'active', + :approval_state => 'approved', + :src_vm_id => vm_template.id, + :options => prov_options.merge(:owner_email => user.email, :requester_group => user.miq_groups.first.description)) end let(:create_requests) do @@ -166,15 +166,15 @@ def create_request(user, vm_template, prov_options) @vmware_user2 = FactoryBot.create(:user_with_email, :miq_groups => [group]) hardware = FactoryBot.create(:hardware, :cpu1x2, :memory_mb => 512) @vmware_template = FactoryBot.create(:template_vmware, - :ext_management_system => ems, - :hardware => hardware) + :ext_management_system => ems, + :hardware => hardware) prov_options = {:number_of_vms => [2, '2'], :vm_memory => [1024, '1024'], :number_of_cpus => [2, '2']} 2.times { create_request(@vmware_user1, @vmware_template, prov_options) } 2.times { create_request(@vmware_user2, @vmware_template, prov_options) } ems = FactoryBot.create(:ems_google_with_authentication, - :availability_zones => [FactoryBot.create(:availability_zone_google)]) + :availability_zones => [FactoryBot.create(:availability_zone_google)]) google_tenant = FactoryBot.create(:tenant) group = FactoryBot.create(:miq_group, :tenant => google_tenant) @google_user1 = FactoryBot.create(:user_with_email, :miq_groups => [group]) diff --git a/spec/models/miq_provision_request_template_spec.rb b/spec/models/miq_provision_request_template_spec.rb index c656544ba77..8edeeb9ec49 100644 --- a/spec/models/miq_provision_request_template_spec.rb +++ b/spec/models/miq_provision_request_template_spec.rb @@ -2,7 +2,7 @@ let(:user) { FactoryBot.create(:user) } let(:template) do FactoryBot.create(:template_vmware, - :ext_management_system => FactoryBot.create(:ems_vmware_with_authentication)) + :ext_management_system => FactoryBot.create(:ems_vmware_with_authentication)) end let(:parent_svc) { FactoryBot.create(:service, :guid => SecureRandom.uuid, :options => {:dialog => {}}) } let(:bundle_parent_svc) do @@ -10,8 +10,8 @@ end let(:service_resource) do FactoryBot.create(:service_resource, - :resource_type => 'MiqRequest', - :resource_id => service_template_request.id) + :resource_type => 'MiqRequest', + :resource_id => service_template_request.id) end let(:service_template) do FactoryBot.create(:service_template) @@ -21,44 +21,44 @@ end let(:service_template_resource) do FactoryBot.create(:service_resource, - :resource_type => 'ServiceTemplate', - :resource_id => service_template.id) + :resource_type => 'ServiceTemplate', + :resource_id => service_template.id) end let(:bundle_service_template_resource) do FactoryBot.create(:service_resource, - :resource_type => 'ServiceTemplate', - :resource_id => bundle_service_template.id) + :resource_type => 'ServiceTemplate', + :resource_id => bundle_service_template.id) end let(:service_template_request) { FactoryBot.create(:service_template_provision_request, :requester => user) } let(:service_task) do FactoryBot.create(:service_template_provision_task, - :miq_request => service_template_request, - :options => {:service_resource_id => service_resource.id}) + :miq_request => service_template_request, + :options => {:service_resource_id => service_resource.id}) end let(:parent_service_task) do FactoryBot.create(:service_template_provision_task, - :status => 'Ok', - :state => 'pending', - :request_type => 'clone_to_service', - :miq_request => service_template_request, - :options => {:service_resource_id => service_template_resource.id}) + :status => 'Ok', + :state => 'pending', + :request_type => 'clone_to_service', + :miq_request => service_template_request, + :options => {:service_resource_id => service_template_resource.id}) end let(:bundle_service_task) do FactoryBot.create(:service_template_provision_task, - :status => 'Ok', - :state => 'pending', - :request_type => 'clone_to_service', - :miq_request => service_template_request, - :options => {:service_resource_id => bundle_service_template_resource.id}) + :status => 'Ok', + :state => 'pending', + :request_type => 'clone_to_service', + :miq_request => service_template_request, + :options => {:service_resource_id => bundle_service_template_resource.id}) end let(:provision_request_template) do FactoryBot.create(:miq_provision_request_template, - :requester => user, - :src_vm_id => template.id, - :options => { - :src_vm_id => template.id, - :service_resource_id => service_resource.id - }) + :requester => user, + :src_vm_id => template.id, + :options => { + :src_vm_id => template.id, + :service_resource_id => service_resource.id + }) end describe '#create_tasks_for_service' do diff --git a/spec/models/miq_provision_virt_workflow_spec.rb b/spec/models/miq_provision_virt_workflow_spec.rb index fd78aef184f..1a9349d5b83 100644 --- a/spec/models/miq_provision_virt_workflow_spec.rb +++ b/spec/models/miq_provision_virt_workflow_spec.rb @@ -169,7 +169,7 @@ context "#validate email formatting" do context "with specific format regex" do - let(:regex) { {:required_regex => %r{\A[\w!#$\%&'*+/=?`\{|\}~^-]+(?:\.[\w!#$\%&'*+/=?`\{|\}~^-]+)*@(?:[A-Z0-9-]+\.)+[A-Z]{2,6}\Z}i} } + let(:regex) { {:required_regex => %r{\A[\w!#%&'*+/=?`\{|\}~^-]+(?:\.[\w!$%&'*+/=?`\{|\}~^-]+)*@(?:[A-Z0-9-]+\.)+[A-Z]{2,6}\Z}i} } let(:value_email) { 'n@test.com' } let(:value_no_email) { 'n' } @@ -206,7 +206,7 @@ end context "#validate_memory_reservation" do - let(:values) { {:vm_memory => %w(1024 1024)} } + let(:values) { {:vm_memory => %w[1024 1024]} } it "no size" do expect(workflow.validate_memory_reservation(nil, values, {}, {}, nil)).to be_nil diff --git a/spec/models/miq_provision_workflow_spec.rb b/spec/models/miq_provision_workflow_spec.rb index dafb3c14116..ce1df4fca39 100644 --- a/spec/models/miq_provision_workflow_spec.rb +++ b/spec/models/miq_provision_workflow_spec.rb @@ -1,4 +1,4 @@ -silence_warnings { MiqProvisionWorkflow.const_set("DIALOGS_VIA_AUTOMATE", false) } +silence_warnings { MiqProvisionWorkflow.const_set(:DIALOGS_VIA_AUTOMATE, false) } RSpec.describe MiqProvisionWorkflow do let(:admin) { FactoryBot.create(:user_admin) } @@ -14,7 +14,8 @@ it "should not create an MiqRequest when calling from_ws" do expect do ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow.from_ws( - "1.0", admin, "template", "target", false, "cc|001|environment|test", "") + "1.0", admin, "template", "target", false, "cc|001|environment|test", "" + ) end.to raise_error(RuntimeError) end end @@ -41,7 +42,8 @@ it "should create an MiqRequest when calling from_ws" do FactoryBot.create(:classification_cost_center_with_tags) request = ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow.from_ws( - "1.0", admin, "template", "target", false, "cc|001|environment|test", "") + "1.0", admin, "template", "target", false, "cc|001|environment|test", "" + ) expect(request).to be_a_kind_of(MiqRequest) expect(request.options[:vm_tags]).to eq([Classification.lookup_by_name("cc/001").id]) @@ -51,7 +53,8 @@ FactoryBot.create(:classification_cost_center_with_tags) request = ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow.from_ws( "1.1", admin, {'name' => 'template'}, {'vm_name' => 'spec_test'}, nil, - {'cc' => '001', 'environment' => 'test'}, nil, nil, nil) + {'cc' => '001', 'environment' => 'test'}, nil, nil, nil + ) expect(request).to be_a_kind_of(MiqRequest) expect(request.options[:vm_tags]).to eq([Classification.lookup_by_name("cc/001").id]) @@ -62,7 +65,8 @@ request = ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow.from_ws( "1.1", admin, {'name' => 'template'}, {'vm_name' => 'spec_test', 'root_password' => password_input.dup}, # dup because it's mutated {'owner_email' => 'admin'}, {'owner_first_name' => 'test'}, - {'owner_last_name' => 'test'}, nil, nil, nil, nil) + {'owner_last_name' => 'test'}, nil, nil, nil, nil + ) expect(ManageIQ::Password.encrypted?(request.options[:root_password])).to be_truthy expect(ManageIQ::Password.decrypt(request.options[:root_password])).to eq(password_input) @@ -71,7 +75,8 @@ it "should set values when extra '|' are passed in for multiple values" do request = ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow.from_ws( "1.1", admin, {'name' => 'template'}, {'vm_name' => 'spec_test'}, - nil, nil, {'abc' => 'tr|ue', 'blah' => 'na|h'}, nil, nil) + nil, nil, {'abc' => 'tr|ue', 'blah' => 'na|h'}, nil, nil + ) expect(request.options[:ws_values]).to include(:blah => "na|h") end @@ -80,7 +85,8 @@ Vmdb::Deprecation.silenced do request = ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow.from_ws( "1.1", admin, {'name' => 'template'}, {'vm_name' => 'spec_test'}, - nil, nil, "abc=true", nil, nil) + nil, nil, "abc=true", nil, nil + ) expect(request.options[:ws_values]).to include(:abc => "true") end @@ -90,7 +96,8 @@ Vmdb::Deprecation.silenced do request = ManageIQ::Providers::Vmware::InfraManager::ProvisionWorkflow.from_ws( "1.1", admin, "name=template", "vm_name=spec_test", - nil, nil, "abc=true", nil, nil) + nil, nil, "abc=true", nil, nil + ) expect(request.options[:ws_values]).to include(:abc => "true") end diff --git a/spec/models/miq_queue_spec.rb b/spec/models/miq_queue_spec.rb index e8f728d5e20..cda03fa492a 100644 --- a/spec/models/miq_queue_spec.rb +++ b/spec/models/miq_queue_spec.rb @@ -424,27 +424,27 @@ end it "should accept non-Array args (for now)" do - begin - class MiqQueueSpecNonArrayArgs - def self.some_method(single_arg) - single_arg - end + + class MiqQueueSpecNonArrayArgs + def self.some_method(single_arg) + single_arg end + end - msg = MiqQueue.put( - :class_name => "MiqQueueSpecNonArrayArgs", - :method_name => "some_method", - :args => "not_an_array" - ) + msg = MiqQueue.put( + :class_name => "MiqQueueSpecNonArrayArgs", + :method_name => "some_method", + :args => "not_an_array" + ) + + msg_from_db = MiqQueue.find(msg.id) + expect(msg_from_db.args).to eq(["not_an_array"]) - msg_from_db = MiqQueue.find(msg.id) - expect(msg_from_db.args).to eq(["not_an_array"]) + _, _, result = msg_from_db.deliver + expect(result).to eq "not_an_array" + ensure + Object.send(:remove_const, :MiqQueueSpecNonArrayArgs) - _, _, result = msg_from_db.deliver - expect(result).to eq "not_an_array" - ensure - Object.send(:remove_const, :MiqQueueSpecNonArrayArgs) - end end it "defaults :args" do @@ -1050,7 +1050,7 @@ def queue_items expect(YAML).not_to receive(:load_file).with(MiqQueue::MESSAGING_CONFIG_FILE) - expect(ENV["MESSAGING_PASSWORD"]).to be_encrypted + expect(ENV.fetch("MESSAGING_PASSWORD", nil)).to be_encrypted expect(MiqQueue.send(:messaging_client_options)).to eq( :encoding => "json", :host => "server.example.com", @@ -1064,8 +1064,7 @@ def queue_items it "with SSL enabled" do stub_const("ENV", env_vars.to_h.merge("MESSAGING_PASSWORD" => "password", - "MESSAGING_SSL_CA" => "/path/root.crt" - )) + "MESSAGING_SSL_CA" => "/path/root.crt")) expect(YAML).not_to receive(:load_file).with(MiqQueue::MESSAGING_CONFIG_FILE) diff --git a/spec/models/miq_remote_console_worker/runner_spec.rb b/spec/models/miq_remote_console_worker/runner_spec.rb index 9c837db395e..19f08131271 100644 --- a/spec/models/miq_remote_console_worker/runner_spec.rb +++ b/spec/models/miq_remote_console_worker/runner_spec.rb @@ -18,6 +18,7 @@ app.instance_variable_get(:@transmitter).kill loop do break if !app.instance_variable_get(:@transmitter).alive? + sleep 0.1 end subject.check_internal_thread diff --git a/spec/models/miq_report/async_spec.rb b/spec/models/miq_report/async_spec.rb index e47bc988c2a..0ebaa8e43c0 100644 --- a/spec/models/miq_report/async_spec.rb +++ b/spec/models/miq_report/async_spec.rb @@ -7,7 +7,7 @@ :title => "Custom VM report", :rpt_group => "Custom", :rpt_type => "Custom", - :db => "ManageIQ::Providers::InfraManager::Vm", + :db => "ManageIQ::Providers::InfraManager::Vm" ) end diff --git a/spec/models/miq_report/charting_spec.rb b/spec/models/miq_report/charting_spec.rb index f648ecd4422..3ac7a438f0d 100644 --- a/spec/models/miq_report/charting_spec.rb +++ b/spec/models/miq_report/charting_spec.rb @@ -7,14 +7,14 @@ 5.times do |i| vm = FactoryBot.build(:vm_vmware) - vm.evm_owner_id = @user.id if i > 2 + vm.evm_owner_id = @user.id if i > 2 vm.miq_group_id = @user.current_group.id if vm.evm_owner_id || (i > 1) vm.save end @report_theme = 'miq' @show_title = true - @options = MiqReport.graph_options({ :title => "CPU (Mhz)", :type => "Line", :columns => ["col"] }) + @options = MiqReport.graph_options({:title => "CPU (Mhz)", :type => "Line", :columns => ["col"]}) allow(ManageIQ::Reporting::Charting).to receive(:backend).and_return(:c3) allow(ManageIQ::Reporting::Charting).to receive(:format).and_return(:c3) @@ -22,8 +22,8 @@ context 'graph_options' do it 'returns a hash with options' do - expect(MiqReport.graph_options({ :title => "CPU (Mhz)", :type => "Line", :columns => ["col"] })).to include( - :type => "Line", + expect(MiqReport.graph_options({:title => "CPU (Mhz)", :type => "Line", :columns => ["col"]})).to include( + :type => "Line", :title => "CPU (Mhz)" ) end diff --git a/spec/models/miq_report/formats_spec.rb b/spec/models/miq_report/formats_spec.rb index 6ee03fae7c8..7638d0d0e03 100644 --- a/spec/models/miq_report/formats_spec.rb +++ b/spec/models/miq_report/formats_spec.rb @@ -1,11 +1,11 @@ RSpec.describe MiqReport::Formats do describe '.default_format_details_for' do let(:human_mb_details) do - { :description => 'Suffixed Megabytes (MB, GB)', - :columns => nil, - :sub_types => [:megabytes], - :function => {:name => 'mbytes_to_human_size'}, - :precision => 1 } + {:description => 'Suffixed Megabytes (MB, GB)', + :columns => nil, + :sub_types => [:megabytes], + :function => {:name => 'mbytes_to_human_size'}, + :precision => 1} end it 'returns human MB format details for memory_mb' do diff --git a/spec/models/miq_report/formatters/csv_spec.rb b/spec/models/miq_report/formatters/csv_spec.rb index 04fd208c12d..f6f9991a43a 100644 --- a/spec/models/miq_report/formatters/csv_spec.rb +++ b/spec/models/miq_report/formatters/csv_spec.rb @@ -14,7 +14,6 @@ FactoryBot.create(:miq_report_filesystem).tap do |report| report.table = Ruport::Data::Table.new(:data => table_data, :column_names => report.cols) report.col_options = col_options - report end end @@ -32,7 +31,7 @@ context "hidden columns" do let(:col_options) do - { 'name' => {:hidden => true}, 'file_version' => {:hidden => true} } + {'name' => {:hidden => true}, 'file_version' => {:hidden => true}} end let(:csv_output) do @@ -62,8 +61,8 @@ end context "first column starts with '#{prefix}' with '(' present" do - let(:name_1) { %Q{#{prefix}HYPERLINK("example.com/vm/B1","Link to B1")} } - let(:csv_name_1) { %Q{"'#{prefix}HYPERLINK(""example.com/vm/B1"",""Link to B1"")"} } + let(:name_1) { %{#{prefix}HYPERLINK("example.com/vm/B1","Link to B1")} } + let(:csv_name_1) { %{"'#{prefix}HYPERLINK(""example.com/vm/B1"",""Link to B1"")"} } it "escapes the column data" do expect(miq_report_filesystem.to_csv).to eq(csv_output) @@ -71,7 +70,7 @@ end context "first column starts with '#{prefix}' without '!' or '(' present" do - let(:name_1) { "#{prefix}B1" } + let(:name_1) { "#{prefix}B1" } let(:csv_name_1) { "#{prefix}B1" } it "does not escape column data" do diff --git a/spec/models/miq_report/generator_spec.rb b/spec/models/miq_report/generator_spec.rb index 04319464036..0d6ffeb4b45 100644 --- a/spec/models/miq_report/generator_spec.rb +++ b/spec/models/miq_report/generator_spec.rb @@ -28,7 +28,8 @@ :end_offset => 0, :trend_db => "HostPerformance", :interval => "daily", - :target_pcts => [70, 80, 90]}) + :target_pcts => [70, 80, 90]} + ) end it "returns one row for each host" do @@ -94,7 +95,7 @@ :title => "Custom VM report", :rpt_group => "Custom", :rpt_type => "Custom", - :db => "ManageIQ::Providers::InfraManager::Vm", + :db => "ManageIQ::Providers::InfraManager::Vm" ) end @@ -146,32 +147,31 @@ describe "#cols_for_report" do it "uses cols" do - rpt = MiqReport.new(:db => "VmOrTemplate", :cols => %w(vendor version name)) - expect(rpt.cols_for_report).to eq(%w(vendor version name)) + rpt = MiqReport.new(:db => "VmOrTemplate", :cols => %w[vendor version name]) + expect(rpt.cols_for_report).to eq(%w[vendor version name]) end it "uses include" do - rpt = MiqReport.new(:db => "VmOrTemplate", :include => {"host" => { "columns" => %w(name hostname guid)}}) - expect(rpt.cols_for_report).to eq(%w(host.name host.hostname host.guid)) + rpt = MiqReport.new(:db => "VmOrTemplate", :include => {"host" => {"columns" => %w[name hostname guid]}}) + expect(rpt.cols_for_report).to eq(%w[host.name host.hostname host.guid]) end it "uses extra_cols" do rpt = MiqReport.new(:db => "VmOrTemplate") - expect(rpt.cols_for_report(%w(vendor))).to eq(%w(vendor)) + expect(rpt.cols_for_report(%w[vendor])).to eq(%w[vendor]) end it "derives include" do - rpt = MiqReport.new(:db => "VmOrTemplate", :cols => %w(vendor), :col_order =>%w(host.name vendor)) - expect(rpt.cols_for_report).to match_array(%w(vendor host.name)) + rpt = MiqReport.new(:db => "VmOrTemplate", :cols => %w[vendor], :col_order => %w[host.name vendor]) + expect(rpt.cols_for_report).to match_array(%w[vendor host.name]) end it "works with col, col_order and include together" do rpt = MiqReport.new(:db => "VmOrTemplate", - :cols => %w(vendor), - :col_order => %w(host.name host.hostname vendor), - :include => {"host" => { "columns" => %w(name hostname)}} - ) - expect(rpt.cols_for_report).to match_array(%w(vendor host.name host.hostname)) + :cols => %w[vendor], + :col_order => %w[host.name host.hostname vendor], + :include => {"host" => {"columns" => %w[name hostname]}}) + expect(rpt.cols_for_report).to match_array(%w[vendor host.name host.hostname]) end end @@ -184,20 +184,20 @@ it "includes virtual_includes from virtual_attributes that are not sql friendly" do rpt = MiqReport.new(:db => "VmOrTemplate", - :cols => %w(name platform)) + :cols => %w[name platform]) expect(rpt.get_include_for_find).to eq(:platform => {}) end it "does not include sql friendly virtual_attributes" do rpt = MiqReport.new(:db => "VmOrTemplate", - :cols => %w(name v_total_snapshots)) + :cols => %w[name v_total_snapshots]) expect(rpt.get_include_for_find).to be_nil end it "uses include and include_as_hash" do rpt = MiqReport.new(:db => "VmOrTemplate", - :cols => %w(name platform), - :include => {:host => {:columns => %w(name)}, :storage => {:columns => %w(name)}}, + :cols => %w[name platform], + :include => {:host => {:columns => %w[name]}, :storage => {:columns => %w[name]}}, :include_for_find => {:snapshots => {}}) expect(rpt.get_include_for_find).to eq(:platform => {}, :host => {}, :storage => {}, :snapshots => {}) end @@ -207,7 +207,7 @@ rpt = MiqReport.new(:db => "VmOrTemplate", :include => {}, :cols => %w[name v_datastore_path], - :col_order => %w(name host.name storage.name), + :col_order => %w[name host.name storage.name], :include_for_find => {:snapshots => {}}) expect(rpt.get_include_for_find).to eq(:v_datastore_path => {}, :host => {}, :storage => {}, :snapshots => {}) end diff --git a/spec/models/miq_report/import_export_spec.rb b/spec/models/miq_report/import_export_spec.rb index 242468e16f7..20eb60e0f29 100644 --- a/spec/models/miq_report/import_export_spec.rb +++ b/spec/models/miq_report/import_export_spec.rb @@ -6,10 +6,10 @@ :name => "Test Report", :rpt_type => "Custom", :tz => "Eastern Time (US & Canada)", - :col_order => %w(name boot_time disks_aligned), - :cols => %w(name boot_time disks_aligned), + :col_order => %w[name boot_time disks_aligned], + :cols => %w[name boot_time disks_aligned], :db_options => {:rpt_type => "ChargebackContainerProject"}, - "include" => {"columns" => %w(col1 col2)}, + "include" => {"columns" => %w[col1 col2]}, :user_id => @some_user.id, :miq_group_id => @some_group.id) end diff --git a/spec/models/miq_report/search_spec.rb b/spec/models/miq_report/search_spec.rb index ad08c97db4a..48dc8a6cc8a 100644 --- a/spec/models/miq_report/search_spec.rb +++ b/spec/models/miq_report/search_spec.rb @@ -41,7 +41,7 @@ end it "detects a sortable virtual column in a list" do - @miq_report.sortby = %w(name archived id) + @miq_report.sortby = %w[name archived id] order = @miq_report.get_order_info expect(order).to be_truthy expect(stringify_arel(order).join(",")).to match(/name.*ems_id.*null.*id/i) @@ -54,7 +54,7 @@ end it "detects an unsortable virtual column in a list" do - @miq_report.sortby = %w(name is_evm_appliance id) + @miq_report.sortby = %w[name is_evm_appliance id] order = @miq_report.get_order_info expect(order).to be_falsy end @@ -64,14 +64,14 @@ @miq_report.sortby = ["name", "operating_system.product_name"] order = @miq_report.get_order_info expect(order).to be_truthy - expect(stringify_arel(order)).to eq(%w{LOWER("vms"."name") LOWER("operating_systems"."product_name")}) + expect(stringify_arel(order)).to eq(%w[LOWER("vms"."name") LOWER("operating_systems"."product_name")]) end it "works with association where table_name can not be guessed" do @miq_report.sortby = ["name", "linux_initprocesses.name", "evm_owner.name"] order = @miq_report.get_order_info expect(order).to be_truthy - expect(stringify_arel(order)).to eq(%w{LOWER("vms"."name") LOWER("system_services"."name") LOWER("users"."name")}) + expect(stringify_arel(order)).to eq(%w[LOWER("vms"."name") LOWER("system_services"."name") LOWER("users"."name")]) end end diff --git a/spec/models/miq_report/seeding_spec.rb b/spec/models/miq_report/seeding_spec.rb index 8f98ca22a4f..34115e31a40 100644 --- a/spec/models/miq_report/seeding_spec.rb +++ b/spec/models/miq_report/seeding_spec.rb @@ -14,8 +14,8 @@ before do FileUtils.mkdir_p(reports_dir) - FileUtils.cp_r(Rails.root.join("product/reports/520_Events - Policy"), reports_dir, preserve: true) - FileUtils.cp_r(Rails.root.join("product/compare"), tmpdir.join("product"), preserve: true) + FileUtils.cp_r(Rails.root.join("product/reports/520_Events - Policy"), reports_dir, :preserve => true) + FileUtils.cp_r(Rails.root.join("product/compare"), tmpdir.join("product"), :preserve => true) stub_const("MiqReport::Seeding::REPORT_DIR", reports_dir) stub_const("MiqReport::Seeding::COMPARE_DIR", compare_dir) @@ -38,7 +38,7 @@ expect(MiqReport.where(:name => "Testing Compare Name")).to_not exist # Add new records - FileUtils.cp_r(data_dir, tmpdir, preserve: true) + FileUtils.cp_r(data_dir, tmpdir, :preserve => true) described_class.seed @@ -56,7 +56,7 @@ :file_mtime => File.mtime(report_yml).utc.round, :db => "Vm", :cols => ["vendor_display", "name"], - :include => {"operating_system" => {"columns" => ["product_name", "name"]}}, + :include => {"operating_system" => {"columns" => ["product_name", "name"]}} ) expect(compare).to have_attributes( @@ -78,8 +78,8 @@ # The mtime rounding is granular to the second, so need to be higher # than that for test purposes - FileUtils.touch(report_yml, mtime: 1.second.from_now.to_time) - FileUtils.touch(compare_yml, mtime: 1.second.from_now.to_time) + FileUtils.touch(report_yml, :mtime => 1.second.from_now.to_time) + FileUtils.touch(compare_yml, :mtime => 1.second.from_now.to_time) described_class.seed diff --git a/spec/models/miq_report_result/purging_spec.rb b/spec/models/miq_report_result/purging_spec.rb index db7cb866ded..766f57f4ce7 100644 --- a/spec/models/miq_report_result/purging_spec.rb +++ b/spec/models/miq_report_result/purging_spec.rb @@ -15,16 +15,16 @@ stub_settings(settings) @rr1 = [ - FactoryBot.create(:miq_report_result, :miq_report_id => 1, :created_on => (6.months + 1.days).to_i.seconds.ago.utc), - FactoryBot.create(:miq_report_result, :miq_report_id => 1, :created_on => (6.months - 1.days).to_i.seconds.ago.utc) + FactoryBot.create(:miq_report_result, :miq_report_id => 1, :created_on => (6.months + 1.day).to_i.seconds.ago.utc), + FactoryBot.create(:miq_report_result, :miq_report_id => 1, :created_on => (6.months - 1.day).to_i.seconds.ago.utc) ] @rr2 = [ FactoryBot.create(:miq_report_result, :miq_report_id => 2, :created_on => (6.months + 2.days).to_i.seconds.ago.utc), - FactoryBot.create(:miq_report_result, :miq_report_id => 2, :created_on => (6.months + 1.days).to_i.seconds.ago.utc), - FactoryBot.create(:miq_report_result, :miq_report_id => 2, :created_on => (6.months - 1.days).to_i.seconds.ago.utc) + FactoryBot.create(:miq_report_result, :miq_report_id => 2, :created_on => (6.months + 1.day).to_i.seconds.ago.utc), + FactoryBot.create(:miq_report_result, :miq_report_id => 2, :created_on => (6.months - 1.day).to_i.seconds.ago.utc) ] @rr_orphaned = [ - FactoryBot.create(:miq_report_result, :miq_report_id => nil, :created_on => (6.months - 1.days).to_i.seconds.ago.utc) + FactoryBot.create(:miq_report_result, :miq_report_id => nil, :created_on => (6.months - 1.day).to_i.seconds.ago.utc) ] end diff --git a/spec/models/miq_report_result_spec.rb b/spec/models/miq_report_result_spec.rb index 41f506b9a1f..8e0bf163261 100644 --- a/spec/models/miq_report_result_spec.rb +++ b/spec/models/miq_report_result_spec.rb @@ -9,16 +9,16 @@ task = FactoryBot.create(:miq_task) EvmSpecHelper.local_miq_server report = MiqReport.create( - :name => "VMs based on Disk Type", - :title => "VMs using thin provisioned disks", - :rpt_group => "Custom", - :rpt_type => "Custom", - :db => "VmInfra", - :cols => ["name"], - :col_order => ["name"], - :headers => ["Name"], - :order => "Ascending", - :template_type => "report" + :name => "VMs based on Disk Type", + :title => "VMs using thin provisioned disks", + :rpt_group => "Custom", + :rpt_type => "Custom", + :db => "VmInfra", + :cols => ["name"], + :col_order => ["name"], + :headers => ["Name"], + :order => "Ascending", + :template_type => "report" ) report.generate_table(:userid => "admin") task.miq_report_result = report.build_create_results({:userid => "admin"}, task.id) @@ -103,7 +103,7 @@ @report_theme = 'miq' @show_title = true - @options = MiqReport.graph_options({ :title => "CPU (Mhz)", :type => "Line", :columns => ["col"] }) + @options = MiqReport.graph_options({:title => "CPU (Mhz)", :type => "Line", :columns => ["col"]}) allow(ManageIQ::Reporting::Charting).to receive(:detect_available_plugin).and_return(ManageIQ::Reporting::C3Charting) end @@ -131,7 +131,7 @@ report_result = rpt.build_create_results(:userid => "test") report_result.report - report_result.report.extras[:grouping] = { "extra data" => "not saved" } + report_result.report.extras[:grouping] = {"extra data" => "not saved"} report_result.save result_reload = MiqReportResult.last @@ -220,7 +220,7 @@ end it "can serialize and deserialize a CSV" do - csv = CSV.generate { |c| c << %w(foo bar) << %w(baz qux) } + csv = CSV.generate { |c| c << %w[foo bar] << %w[baz qux] } report_result = described_class.new report_result.report_results = csv @@ -229,16 +229,16 @@ end it "can serialize and deserialize a plain text report" do - txt = < u2.userid) expect(MiqReportResult.counts_by_userid).to match_array([ - {:userid => u1.userid, :count => 2}, - {:userid => u2.userid, :count => 1} - ]) + {:userid => u1.userid, :count => 2}, + {:userid => u2.userid, :count => 1} + ]) end end end diff --git a/spec/models/miq_report_spec.rb b/spec/models/miq_report_spec.rb index a92b1d8b895..aa605458cf4 100644 --- a/spec/models/miq_report_spec.rb +++ b/spec/models/miq_report_spec.rb @@ -20,8 +20,8 @@ :rpt_group => "Custom", :rpt_type => "Custom", :db => base_report == "Host" ? "Host" : "ManageIQ::Providers::InfraManager::Vm", - :include => {custom_attributes_field.to_s => {"columns" => %w(name value)}}, - :col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name), + :include => {custom_attributes_field.to_s => {"columns" => %w[name value]}}, + :col_order => %w[miq_custom_attributes.name miq_custom_attributes.value name], :headers => ["EVM Custom Attribute Name", "EVM Custom Attribute Value", "Name"], :order => "Ascending", :sortby => ["miq_custom_attributes.name"] @@ -100,19 +100,19 @@ @user = FactoryBot.create(:user_with_group) @registry = FactoryBot.create(:registry_item, :name => "HKLM\\SOFTWARE\\WindowsFirewall : EnableFirewall", - :data => 0) + :data => 0) @vm = FactoryBot.create(:vm_vmware, :registry_items => [@registry]) EvmSpecHelper.local_miq_server end let(:report) do MiqReport.new(:name => "Custom VM report", :title => "Custom VM report", :rpt_group => "Custom", - :rpt_type => "Custom", :db => "Vm", :cols => %w(name), - :conditions => MiqExpression.new("=" => {"regkey" => "HKLM\\SOFTWARE\\WindowsFirewall", - "regval" => "EnableFirewall", "value" => "0"}), - :col_order => %w(name registry_items.data registry_items.name registry_items.value_name), - :headers => ["Name", "Registry Data", "Registry Name", "Registry Value Name"], - :order => "Ascending") + :rpt_type => "Custom", :db => "Vm", :cols => %w[name], + :conditions => MiqExpression.new("=" => {"regkey" => "HKLM\\SOFTWARE\\WindowsFirewall", + "regval" => "EnableFirewall", "value" => "0"}), + :col_order => %w[name registry_items.data registry_items.name registry_items.value_name], + :headers => ["Name", "Registry Data", "Registry Name", "Registry Value Name"], + :order => "Ascending") end it "can generate a report filtered by registry items" do @@ -147,7 +147,7 @@ :rpt_type => "Custom", :db => "ManageIQ::Providers::InfraManager::Vm", :cols => %w[name num_disks], - :include => { "miq_provision_template" => { "columns" => %w[num_hard_disks] }}, + :include => {"miq_provision_template" => {"columns" => %w[num_hard_disks]}}, :col_order => %w[name miq_provision_template.num_hard_disks num_disks], :headers => ["Name", "Provisioned From Template Number of Hard Disks", "Number of Disks"], :order => "Ascending") @@ -193,9 +193,9 @@ MiqReport.new( :name => "Custom VM report", :title => "Custom VM report", :rpt_group => "Custom", :rpt_type => "Custom", :db => "ManageIQ::Providers::InfraManager::Vm", - :cols => %w(name virtual_custom_attribute_kubernetes_io_hostname virtual_custom_attribute_manageiq_org), + :cols => %w[name virtual_custom_attribute_kubernetes_io_hostname virtual_custom_attribute_manageiq_org], :include => {:custom_attributes => {}}, - :col_order => %w(name virtual_custom_attribute_kubernetes_io_hostname virtual_custom_attribute_manageiq_org), + :col_order => %w[name virtual_custom_attribute_kubernetes_io_hostname virtual_custom_attribute_manageiq_org], :headers => ["Name", custom_column_key_1, custom_column_key_1], :order => "Ascending" ) @@ -205,14 +205,14 @@ let(:report) do MiqReport.new( :name => "Custom VM report", :title => "Custom VM report", :rpt_group => "Custom", :rpt_type => "Custom", - :db => "ContainerImage", - :cols => ['name', - "virtual_custom_attribute_CATTR#{CustomAttributeMixin::SECTION_SEPARATOR}docker_labels", - "virtual_custom_attribute_CATTR#{CustomAttributeMixin::SECTION_SEPARATOR}labels"], - :include => {:custom_attributes => {}}, - :col_order => %w(name CATTR), - :headers => ["Name", custom_column_key_1, custom_column_key_1], - :order => "Ascending" + :db => "ContainerImage", + :cols => ['name', + "virtual_custom_attribute_CATTR#{CustomAttributeMixin::SECTION_SEPARATOR}docker_labels", + "virtual_custom_attribute_CATTR#{CustomAttributeMixin::SECTION_SEPARATOR}labels"], + :include => {:custom_attributes => {}}, + :col_order => %w[name CATTR], + :headers => ["Name", custom_column_key_1, custom_column_key_1], + :order => "Ascending" ) end @@ -288,9 +288,9 @@ end let(:exp_3) do - MiqExpression.new("and" => [{"=" => { "field" => "#{vm_2.type}-active", "value" => "true"}}, - {"or" => [{"IS NOT EMPTY" => { "field" => "#{vm_2.type}-name", "value" => ""}}, - {"IS NOT EMPTY" => { "field" => "#{vm_2.type}-#{virtual_column_key_3}"}}]}]) + MiqExpression.new("and" => [{"=" => {"field" => "#{vm_2.type}-active", "value" => "true"}}, + {"or" => [{"IS NOT EMPTY" => {"field" => "#{vm_2.type}-name", "value" => ""}}, + {"IS NOT EMPTY" => {"field" => "#{vm_2.type}-#{virtual_column_key_3}"}}]}]) end it "generates report with dynamic custom attributes with filtering with field which is not listed in cols" do @@ -354,7 +354,7 @@ host = FactoryBot.create(:host) vm1 = FactoryBot.create(:vm_vmware, :host => host) allow(vm1).to receive(:archived?).and_return(false) - vm2 = FactoryBot.create(:vm_vmware, :host => host) + vm2 = FactoryBot.create(:vm_vmware, :host => host) allow(vm2).to receive(:archived?).and_return(false) allow(Vm).to receive(:find_by).and_return(vm1) @@ -442,7 +442,7 @@ report = MiqReport.new(:db => "Vm") results, attrs = report.paged_view_search( :only => ["name"], - :userid => user.userid, + :userid => user.userid ) expect(results.length).to eq 1 expect(results.data.collect(&:name)).to eq [vm1.name] @@ -466,7 +466,7 @@ vm2.tag_with(tag, :ns => "*") allow(User).to receive_messages(:server_timezone => "UTC") - report = MiqReport.new(:db => "Vm", :sortby => %w(storage.name name), :order => "Ascending", :include => {"storage" => {"columns" => ["name"]}}) + report = MiqReport.new(:db => "Vm", :sortby => %w[storage.name name], :order => "Ascending", :include => {"storage" => {"columns" => ["name"]}}) options = { :only => ["name", "storage.name"], :userid => user.userid, @@ -489,9 +489,9 @@ FactoryBot.create(:vm_vmware, :name => "B", :host => FactoryBot.create(:host, :name => "A")) FactoryBot.create(:vm_vmware, :name => "A", :host => FactoryBot.create(:host, :name => "B")) - report = MiqReport.new(:db => "Vm", :sortby => %w(host_name name), :order => "Descending") + report = MiqReport.new(:db => "Vm", :sortby => %w[host_name name], :order => "Descending") options = { - :only => %w(name host_name), + :only => %w[name host_name], :page => 2, } @@ -513,7 +513,7 @@ value: "HA" ' - results, _attrs = report.paged_view_search(:only => %w(name host_name), :filter => filter) + results, _attrs = report.paged_view_search(:only => %w[name host_name], :filter => filter) expect(results.length).to eq 1 expect(results.data.first["name"]).to eq "VA" expect(results.data.first["host_name"]).to eq "HA" @@ -546,7 +546,7 @@ value: "HA" ' - results, attrs = report.paged_view_search(:only => %w(name host_name), :userid => user.userid, :filter => filter) + results, attrs = report.paged_view_search(:only => %w[name host_name], :userid => user.userid, :filter => filter) expect(results.length).to eq 1 expect(results.data.first["name"]).to eq "VAA" expect(results.data.first["host_name"]).to eq "HAA" @@ -569,7 +569,7 @@ value: "RPA" ' - results, _attrs = report.paged_view_search(:only => %w(name), :filter => filter) + results, _attrs = report.paged_view_search(:only => %w[name], :filter => filter) expect(results.length).to eq 1 expect(results.data.first["name"]).to eq "VA" end @@ -582,12 +582,12 @@ :name => "VMs", :title => "Virtual Machines", :db => "Vm", - :cols => %w(name host_name v_host_vmm_product), - :include => {"host" => {"columns" => %w(name vmm_product)}}, - :col_order => %w(name host.name host.vmm_product), + :cols => %w[name host_name v_host_vmm_product], + :include => {"host" => {"columns" => %w[name vmm_product]}}, + :col_order => %w[name host.name host.vmm_product], :headers => ["Name", "Host", "Host VMM Product"], :order => "Ascending", - :sortby => ["host_name"], + :sortby => ["host_name"] ) options = { @@ -596,8 +596,8 @@ } results, _attrs = report.paged_view_search(options) expect(results.length).to eq 2 - expect(results.data.collect { |rec| rec.data["host_name"] }).to eq(%w(HA HB)) - expect(results.data.collect { |rec| rec.data["v_host_vmm_product"] }).to eq(%w(ESX ESX)) + expect(results.data.collect { |rec| rec.data["host_name"] }).to eq(%w[HA HB]) + expect(results.data.collect { |rec| rec.data["v_host_vmm_product"] }).to eq(%w[ESX ESX]) end end @@ -703,7 +703,7 @@ before do EvmSpecHelper.local_miq_server - rollup_params = {:capture_interval_name => 'daily', :time_profile_id => time_profile.id } + rollup_params = {:capture_interval_name => 'daily', :time_profile_id => time_profile.id} add_metric_rollups_for([vm], first_rollup_timestamp...last_rollup_timestamp, 24.hours, rollup_params) end @@ -737,12 +737,14 @@ report = MiqReport.new( :title => "vim_perf_daily.yaml", :db => "VimPerformanceDaily", - :cols => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available), - :include => { "metric_rollup" => { - "columns" => %w(cpu_usagemhz_rate_average_high_over_time_period + :cols => %w[timestamp cpu_usagemhz_rate_average max_derived_cpu_available], + :include => {"metric_rollup" => { + "columns" => %w[cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period derived_memory_used_high_over_time_period - derived_memory_used_low_over_time_period)}}) + derived_memory_used_low_over_time_period] + }} + ) report.generate_table(:userid => "admin") end @@ -892,8 +894,8 @@ def generate_html_row(is_even, tenant_name, formatted_values) it "uses sort_by if available" do report = MiqReport.new( :db => "Host", - :cols => %w(name hostname smart), - :col_order => %w(name hostname smart), + :cols => %w[name hostname smart], + :col_order => %w[name hostname smart], :sortby => ["hostname"] ) expect(report.sort_col).to eq(1) @@ -902,8 +904,8 @@ def generate_html_row(is_even, tenant_name, formatted_values) it "falls back to first column" do report = MiqReport.new( :db => "Host", - :cols => %w(name hostname smart), - :col_order => %w(name hostname smart), + :cols => %w[name hostname smart], + :col_order => %w[name hostname smart] ) expect(report.sort_col).to eq(0) end @@ -912,24 +914,24 @@ def generate_html_row(is_even, tenant_name, formatted_values) describe ".cols" do it "loads given value" do report = MiqReport.new( - :cols => %w(name) + :cols => %w[name] ) - expect(report.cols).to eq(%w(name)) + expect(report.cols).to eq(%w[name]) end it "falls back to col_order" do report = MiqReport.new( - :col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name) + :col_order => %w[miq_custom_attributes.name miq_custom_attributes.value name] ) - expect(report.cols).to eq(%w(name)) + expect(report.cols).to eq(%w[name]) end it "allows manipulation" do report = MiqReport.new( - :col_order => %w(miq_custom_attributes.name miq_custom_attributes.value name), + :col_order => %w[miq_custom_attributes.name miq_custom_attributes.value name] ) report.cols << "name2" - expect(report.cols).to eq(%w(name name2)) + expect(report.cols).to eq(%w[name name2]) end end @@ -960,9 +962,9 @@ def generate_html_row(is_even, tenant_name, formatted_values) :name => "VMs", :title => "Virtual Machines", :db => "Vm", - :cols => %w(name guid hostname ems_ref vendor), - :col_order => %w(name hostname vendor guid emf_ref), - :headers => %w(Name Host Vendor Guid EMS), + :cols => %w[name guid hostname ems_ref vendor], + :col_order => %w[name hostname vendor guid emf_ref], + :headers => %w[Name Host Vendor Guid EMS], :col_options => {"guid" => {:hidden => true}, "ems_ref" => {:hidden => true}} ) end @@ -1058,7 +1060,7 @@ def user_super_admin? context "chargeback reports" do let(:hourly_rate) { 0.01 } let(:hourly_variable_tier_rate) { {:variable_rate => hourly_rate.to_s} } - let(:detail_params) { {:chargeback_rate_detail_fixed_compute_cost => { :tiers => [hourly_variable_tier_rate] } } } + let(:detail_params) { {:chargeback_rate_detail_fixed_compute_cost => {:tiers => [hourly_variable_tier_rate]}} } let!(:chargeback_rate) do FactoryBot.create(:chargeback_rate, :detail_params => detail_params) end @@ -1066,7 +1068,7 @@ def user_super_admin? { :rpt_group => "Custom", :rpt_type => "Custom", - :include => { :custom_attributes => {} }, + :include => {:custom_attributes => {}}, :group => "y", :template_type => "report", } @@ -1092,15 +1094,15 @@ def user_super_admin? :col_order => ["project_name", "image_name", "display_range", label_report_column], :headers => ["Project Name", "Image Name", "Date Range", nil], :sortby => ["project_name", "image_name", "start_date"], - :db_options => { :rpt_type => "ChargebackContainerImage", - :options => { :interval => "daily", - :interval_size => 28, - :end_interval_offset => 1, - :provider_id => "all", - :entity_id => "all", - :include_metrics => true, - :groupby => "date", - :groupby_tag => nil }}, + :db_options => {:rpt_type => "ChargebackContainerImage", + :options => {:interval => "daily", + :interval_size => 28, + :end_interval_offset => 1, + :provider_id => "all", + :entity_id => "all", + :include_metrics => true, + :groupby => "date", + :groupby_tag => nil}}, :col_options => ChargebackContainerImage.report_col_options ) ) @@ -1121,7 +1123,7 @@ def user_super_admin? :parent_ems_id => ems.id, :tag_names => "") - ChargebackRate.set_assignments(:compute, [{ :cb_rate => chargeback_rate, :label => [label, "container_image"] }]) + ChargebackRate.set_assignments(:compute, [{:cb_rate => chargeback_rate, :label => [label, "container_image"]}]) rpt = report.generate_table(:userid => "admin") expect(rpt.keys).to contain_exactly(project_name, :_total_) row = rpt[project_name][:row] @@ -1143,14 +1145,14 @@ def user_super_admin? :headers => ["Project Name", "Date Range", nil], :sortby => ["project_name", "start_date"], :db_options => {:rpt_type => "ChargebackContainerProject", - :options => { :interval => "daily", - :interval_size => 28, - :end_interval_offset => 1, - :provider_id => "all", - :entity_id => "all", - :include_metrics => true, - :groupby => "date", - :groupby_tag => nil }}, + :options => {:interval => "daily", + :interval_size => 28, + :end_interval_offset => 1, + :provider_id => "all", + :entity_id => "all", + :include_metrics => true, + :groupby => "date", + :groupby_tag => nil}}, :col_options => ChargebackContainerProject.report_col_options ) ) @@ -1162,13 +1164,13 @@ def user_super_admin? project = FactoryBot.create(:container_project, :name => project_name, :ext_management_system => ems, :created_on => 2.days.ago) project.labels << label project.metric_rollups << FactoryBot.create(:metric_rollup_vm_hr, - :with_data, - :timestamp => 1.day.ago, - :resource_id => project.id, - :resource_name => project.name, - :parent_ems_id => ems.id, - :tag_names => "") - ChargebackRate.set_assignments(:compute, [{ :cb_rate => chargeback_rate, :object => ems }]) + :with_data, + :timestamp => 1.day.ago, + :resource_id => project.id, + :resource_name => project.name, + :parent_ems_id => ems.id, + :tag_names => "") + ChargebackRate.set_assignments(:compute, [{:cb_rate => chargeback_rate, :object => ems}]) rpt = report.generate_table(:userid => "admin") row = rpt[project_name][:row] expect(row[label_report_column]).to eq(label_value) @@ -1189,15 +1191,15 @@ def user_super_admin? :headers => ["Vm Name", "Date Range", nil], :sortby => ["vm_name", "start_date"], :db_options => {:rpt_type => "ChargebackVm", - :options => { :interval => "daily", - :interval_size => 28, - :end_interval_offset => 1, - :provider_id => "all", - :entity_id => "all", - :include_metrics => true, - :groupby => "date", - :groupby_tag => nil, - :tag => '/managed/environment/prod'}}, + :options => {:interval => "daily", + :interval_size => 28, + :end_interval_offset => 1, + :provider_id => "all", + :entity_id => "all", + :include_metrics => true, + :groupby => "date", + :groupby_tag => nil, + :tag => '/managed/environment/prod'}}, :col_options => ChargebackVm.report_col_options ) ) @@ -1225,15 +1227,15 @@ def user_super_admin? ems_cluster = FactoryBot.create(:ems_cluster, :ext_management_system => ems) ems_cluster.hosts << host1 vm1.metric_rollups << FactoryBot.create(:metric_rollup_vm_hr, - :with_data, - :timestamp => 1.day.ago, - :resource_id => vm1.id, - :resource_name => vm1.name, - :tag_names => "environment/prod", - :parent_host_id => host1.id, - :parent_ems_cluster_id => ems_cluster.id, - :parent_ems_id => ems.id, - :parent_storage_id => storage.id) + :with_data, + :timestamp => 1.day.ago, + :resource_id => vm1.id, + :resource_name => vm1.name, + :tag_names => "environment/prod", + :parent_host_id => host1.id, + :parent_ems_cluster_id => ems_cluster.id, + :parent_ems_id => ems.id, + :parent_storage_id => storage.id) rpt = report.generate_table(:userid => "admin") row = rpt[vm_name][:row] expect(row[label_report_column]).to eq(label_value) @@ -1258,7 +1260,7 @@ def user_super_admin? let(:time_str_hst) { "02/07/19 08:55:03 HST" } let(:miq_task) { FactoryBot.create(:miq_task) } let(:user) { FactoryBot.create(:user, :settings => {:display => {}}) } - let(:report) { FactoryBot.create(:miq_report, :db => "Vm", :cols => %w(last_sync_on)) } + let(:report) { FactoryBot.create(:miq_report, :db => "Vm", :cols => %w[last_sync_on]) } before do EvmSpecHelper.local_miq_server diff --git a/spec/models/miq_request_spec.rb b/spec/models/miq_request_spec.rb index 3dccfa6dcb4..0ec08dd816e 100644 --- a/spec/models/miq_request_spec.rb +++ b/spec/models/miq_request_spec.rb @@ -5,20 +5,20 @@ context "CONSTANTS" do it "REQUEST_TYPES" do expected_request_types = { - :MiqProvisionRequest => {:template => "VM Provision", :clone_to_vm => "VM Clone", :clone_to_template => "VM Publish"}, - :MiqProvisionRequestTemplate => {:template => "VM Provision Template"}, - :MiqProvisionConfiguredSystemRequest => {:provision_via_foreman => "#{ui_lookup(:ui_title => 'foreman')} Provision"}, - :VmReconfigureRequest => {:vm_reconfigure => "VM Reconfigure"}, - :VmCloudReconfigureRequest => {:vm_cloud_reconfigure => "VM Cloud Reconfigure"}, - :VmMigrateRequest => {:vm_migrate => "VM Migrate"}, - :VmRetireRequest => {:vm_retire => "VM Retire"}, - :ServiceRetireRequest => {:service_retire => "Service Retire"}, - :OrchestrationStackRetireRequest => {:orchestration_stack_retire => "Orchestration Stack Retire"}, - :AutomationRequest => {:automation => "Automation"}, - :ServiceTemplateProvisionRequest => {:clone_to_service => "Service Provision"}, - :ServiceReconfigureRequest => {:service_reconfigure => "Service Reconfigure"}, - :PhysicalServerProvisionRequest => {:provision_physical_server => "Physical Server Provision"}, - :PhysicalServerFirmwareUpdateRequest => {:physical_server_firmware_update => "Physical Server Firmware Update"}, + :MiqProvisionRequest => {:template => "VM Provision", :clone_to_vm => "VM Clone", :clone_to_template => "VM Publish"}, + :MiqProvisionRequestTemplate => {:template => "VM Provision Template"}, + :MiqProvisionConfiguredSystemRequest => {:provision_via_foreman => "#{ui_lookup(:ui_title => 'foreman')} Provision"}, + :VmReconfigureRequest => {:vm_reconfigure => "VM Reconfigure"}, + :VmCloudReconfigureRequest => {:vm_cloud_reconfigure => "VM Cloud Reconfigure"}, + :VmMigrateRequest => {:vm_migrate => "VM Migrate"}, + :VmRetireRequest => {:vm_retire => "VM Retire"}, + :ServiceRetireRequest => {:service_retire => "Service Retire"}, + :OrchestrationStackRetireRequest => {:orchestration_stack_retire => "Orchestration Stack Retire"}, + :AutomationRequest => {:automation => "Automation"}, + :ServiceTemplateProvisionRequest => {:clone_to_service => "Service Provision"}, + :ServiceReconfigureRequest => {:service_reconfigure => "Service Reconfigure"}, + :PhysicalServerProvisionRequest => {:provision_physical_server => "Physical Server Provision"}, + :PhysicalServerFirmwareUpdateRequest => {:physical_server_firmware_update => "Physical Server Firmware Update"}, } expect(described_class::REQUEST_TYPES).to eq(expected_request_types) @@ -26,7 +26,7 @@ end context "A new request" do - let(:event_name) { "hello" } + let(:event_name) { "hello" } let(:miq_request) { FactoryBot.build(:automation_request, :options => {:src_ids => [1]}) } let(:request) { FactoryBot.create(:vm_migrate_request, :requester => fred) } let(:ems) { FactoryBot.create(:ems_vmware) } @@ -160,12 +160,12 @@ end it "approval_state denied" do - provision_request.approval_state = 'denied' + provision_request.approval_state = 'denied' expect(provision_request.request_status).to eq('Error') end it "approval_state pending_approval" do - provision_request.approval_state = 'pending_approval' + provision_request.approval_state = 'pending_approval' expect(provision_request.request_status).to eq('Unknown') end end @@ -263,7 +263,7 @@ end describe ".with_reason_like" do - let(:reason) { %w(abcd abcde cde) } + let(:reason) { %w[abcd abcde cde] } subject { described_class.with_reason_like(pattern).count } before { request.miq_approvals = approvals } @@ -421,9 +421,9 @@ def approvals context '#workflow' do let(:provision_request) do FactoryBot.create(:miq_provision_request, - :requester => fred, - :src_vm_id => template.id, - :options => {:src_vm_id => template.id}) + :requester => fred, + :src_vm_id => template.id, + :options => {:src_vm_id => template.id}) end let(:ems) { FactoryBot.create(:ems_vmware) } let(:template) { FactoryBot.create(:template_vmware, :ext_management_system => ems) } @@ -436,8 +436,8 @@ def approvals it "returns the allowed tags" do FactoryBot.create(:miq_dialog, - :name => "miq_provision_dialogs", - :dialog_type => MiqProvisionWorkflow) + :name => "miq_provision_dialogs", + :dialog_type => MiqProvisionWorkflow) FactoryBot.create(:classification_department_with_tags) @@ -538,8 +538,8 @@ def approvals let(:request) do FactoryBot.create(:miq_provision_request, - :requester => fred, - :options => {:a => "1"}) + :requester => fred, + :options => {:a => "1"}) end it "user_message" do diff --git a/spec/models/miq_request_task/dumping_spec.rb b/spec/models/miq_request_task/dumping_spec.rb index a095f2cd628..c2e9c6a5e08 100644 --- a/spec/models/miq_request_task/dumping_spec.rb +++ b/spec/models/miq_request_task/dumping_spec.rb @@ -10,7 +10,7 @@ it 'accepts an array' do expect(task.class).to receive(:dump_array) - task.class.dump_obj(%w(1 2 3)) + task.class.dump_obj(%w[1 2 3]) end end diff --git a/spec/models/miq_request_workflow_spec.rb b/spec/models/miq_request_workflow_spec.rb index 4ca5442d6a8..6303318cadb 100644 --- a/spec/models/miq_request_workflow_spec.rb +++ b/spec/models/miq_request_workflow_spec.rb @@ -196,7 +196,7 @@ before do dialogs[:dialogs].keys.each do |dialog_name| workflow.get_all_fields(dialog_name).each_pair do |_, field_values| - field_values[:values] = [%w(test 100), %w(test2 0)] + field_values[:values] = [%w[test 100], %w[test2 0]] end end end @@ -468,8 +468,8 @@ end it "returns a detailed formatting message when fail details are defined" do - expect(workflow.validate_regex(nil, {}, {}, regex_with_details, value_no_email)).to eq "'/' must be correctly"\ - " formatted. We are looking for a specific email here." + expect(workflow.validate_regex(nil, {}, {}, regex_with_details, value_no_email)).to eq "'/' must be correctly " \ + "formatted. We are looking for a specific email here." end end @@ -532,8 +532,8 @@ ems_folder.ext_management_system = ems attrs = ems_folder.attributes.merge(:object => workflow.ems_folder_to_hash_struct(ems_folder)) xml_hash = XmlHash::Element.new('EmsFolder', attrs) - hash = { ResourcePool => { resource_pool.id => xml_hash } } - workflow.instance_variable_set("@ems_xml_nodes", hash) + hash = {ResourcePool => {resource_pool.id => xml_hash}} + workflow.instance_variable_set(:@ems_xml_nodes, hash) end it "returns nil if :respool is nil" do @@ -553,8 +553,8 @@ ems_folder.ext_management_system = ems attrs = resource_pool.attributes.merge(:object => resource_pool, :ems => ems) xml_hash = XmlHash::Element.new('ResourcePool', attrs) - hash = { EmsFolder => { ems_folder.id => xml_hash } } - workflow.instance_variable_set("@ems_xml_nodes", hash) + hash = {EmsFolder => {ems_folder.id => xml_hash}} + workflow.instance_variable_set(:@ems_xml_nodes, hash) end it "returns nil if :folder is nil" do @@ -573,8 +573,8 @@ datacenter.ext_management_system = ems attrs = datacenter.attributes.merge(:object => workflow.ems_folder_to_hash_struct(datacenter), :ems => ems) xml_hash = XmlHash::Element.new('EmsFolder', attrs) - hash = { EmsFolder => { datacenter.id => xml_hash } } - workflow.instance_variable_set("@ems_xml_nodes", hash) + hash = {EmsFolder => {datacenter.id => xml_hash}} + workflow.instance_variable_set(:@ems_xml_nodes, hash) end it "returns a datacenter" do @@ -584,7 +584,7 @@ end describe '#validate_data_types?' do - %w(array_integer integer float array).each do |name| + %w[array_integer integer float array].each do |name| let("fld_#{name}".to_sym) { {:error => nil, :data_type => name.to_sym} } end diff --git a/spec/models/miq_schedule_filter_spec.rb b/spec/models/miq_schedule_filter_spec.rb index 9cb2fcd57b7..9331ab0e928 100644 --- a/spec/models/miq_schedule_filter_spec.rb +++ b/spec/models/miq_schedule_filter_spec.rb @@ -10,27 +10,23 @@ @vm4 = FactoryBot.create(:vm_vmware, :name => "Special Test VM") @vm_single_schedule = FactoryBot.create(:miq_schedule, - :resource_type => "Vm", - :sched_action => {:method => "vm_scan"}, - :filter => MiqExpression.new("=" => {"field" => "Vm-name", "value" => "Special Test VM"}) - ) + :resource_type => "Vm", + :sched_action => {:method => "vm_scan"}, + :filter => MiqExpression.new("=" => {"field" => "Vm-name", "value" => "Special Test VM"})) @vm_all_schedule = FactoryBot.create(:miq_schedule, - :resource_type => "Vm", - :sched_action => {:method => "vm_scan"}, - :filter => MiqExpression.new("IS NOT NULL" => {"field" => "Vm-name"}) - ) + :resource_type => "Vm", + :sched_action => {:method => "vm_scan"}, + :filter => MiqExpression.new("IS NOT NULL" => {"field" => "Vm-name"})) # Schedule froma saved search @search = FactoryBot.create(:miq_search, - :db => "Vm", - :filter => MiqExpression.new("=" => {"field" => "Vm-name", "value" => "Test VM 2"}) - ) + :db => "Vm", + :filter => MiqExpression.new("=" => {"field" => "Vm-name", "value" => "Test VM 2"})) @vm_search_schedule = FactoryBot.create(:miq_schedule, - :resource_type => "Vm", - :sched_action => {:method => "vm_scan"}, - :miq_search_id => @search.id - ) + :resource_type => "Vm", + :sched_action => {:method => "vm_scan"}, + :miq_search_id => @search.id) end context "for a scheduled report" do @@ -38,10 +34,9 @@ MiqReport.seed_report("Vendor and Guest OS") @report = MiqReport.first @report_schedule = FactoryBot.create(:miq_schedule, - :resource_type => "MiqReport", - :sched_action => {:method => "run_report"}, - :filter => MiqExpression.new("=" => {"field" => "MiqReport-id", "value" => @report.id}) - ) + :resource_type => "MiqReport", + :sched_action => {:method => "run_report"}, + :filter => MiqExpression.new("=" => {"field" => "MiqReport-id", "value" => @report.id})) end it "should get the correct report" do diff --git a/spec/models/miq_schedule_spec.rb b/spec/models/miq_schedule_spec.rb index 0a6e7ee5914..7b95ffc2e23 100644 --- a/spec/models/miq_schedule_spec.rb +++ b/spec/models/miq_schedule_spec.rb @@ -19,7 +19,7 @@ } end - let(:sched_action) { { :method => "run_report", :options => options } } + let(:sched_action) { {:method => "run_report", :options => options} } let(:miq_report) { FactoryBot.create(:miq_report) } let(:miq_expression) { MiqExpression.new("=" => {"field" => "MiqReport-id", "value" => miq_report.id}) } @@ -71,7 +71,7 @@ end context "SmartState" do - let(:sched_action) { { :method => "vm_scan", :options => options } } + let(:sched_action) { {:method => "vm_scan", :options => options} } it "exports to array" do miq_schedule_array = MiqSchedule.export_to_array([miq_schedule.id], MiqSchedule).first["MiqSchedule"] @@ -144,7 +144,7 @@ end context "with resource (ServiceTemplate)" do - let(:sched_action) { { :method => "vm_scan", :options => options } } + let(:sched_action) { {:method => "vm_scan", :options => options} } let(:template) { FactoryBot.create(:service_template) } let(:schedule_with_template) do FactoryBot.create(:miq_schedule, diff --git a/spec/models/miq_schedule_worker/runner_spec.rb b/spec/models/miq_schedule_worker/runner_spec.rb index f5d72c01097..da82613a4da 100644 --- a/spec/models/miq_schedule_worker/runner_spec.rb +++ b/spec/models/miq_schedule_worker/runner_spec.rb @@ -167,7 +167,7 @@ @schedule_worker.rufus_add_normal_schedule(options) jobs = @schedule_worker.instance_variable_get(:@schedules)[:scheduler] - expect(jobs).to be_all { |job| job.kind_of?(Rufus::Scheduler::Job) } + expect(jobs).to(be_all { |job| job.kind_of?(Rufus::Scheduler::Job) }) end end end @@ -230,15 +230,15 @@ @database_maintenance = { :reindex_schedule => "1 * * * *", - :reindex_tables => %w(Metric MiqQueue MiqWorker), + :reindex_tables => %w[Metric MiqQueue MiqWorker], :vacuum_schedule => "0 2 * * 6", - :vacuum_tables => %w(Vm BinaryBlobPart BinaryBlob CustomizationSpec FirewallRule Host Storage + :vacuum_tables => %w[Vm BinaryBlobPart BinaryBlob CustomizationSpec FirewallRule Host Storage MiqSchedule EventLog PolicyEvent Snapshot Job Network MiqQueue MiqRequestTask MiqWorker MiqServer MiqSearch MiqScsiLun MiqScsiTarget StorageFile - Tagging VimPerformanceState) + Tagging VimPerformanceState] } database_config = { - :maintenance => @database_maintenance, + :maintenance => @database_maintenance, } stub_settings(:database => database_config) purging_intervals = { @@ -262,14 +262,14 @@ scheduled_jobs.each do |job| while_calling_job(job) do case job.tags - when %w(database_operations database_maintenance_reindex_schedule) + when %w[database_operations database_maintenance_reindex_schedule] expect(job.original).to eq(@database_maintenance[:reindex_schedule]) expect(MiqQueue.count).to eq(3) @database_maintenance[:reindex_tables].each do |class_name| message = MiqQueue.where(:class_name => class_name, :method_name => "reindex").first expect(message).to have_attributes(:role => "database_operations", :zone => nil) end - when %w(database_operations database_maintenance_vacuum_schedule) + when %w[database_operations database_maintenance_vacuum_schedule] expect(job.original).to eq(@database_maintenance[:vacuum_schedule]) expect(MiqQueue.count).to eq(@database_maintenance[:vacuum_tables].size) @database_maintenance[:vacuum_tables].each do |class_name| @@ -308,17 +308,17 @@ scheduled_jobs.each do |job| while_calling_job(job) do case job.tags - when %w(database_operations) + when %w[database_operations] expect(job.original).to eq(@metrics_history[:purge_schedule]) expect(MiqQueue.count).to eq(2) - when %w(database_operations database_maintenance_reindex_schedule) + when %w[database_operations database_maintenance_reindex_schedule] expect(job.original).to eq(@database_maintenance[:reindex_schedule]) expect(MiqQueue.count).to eq(3) @database_maintenance[:reindex_tables].each do |class_name| message = MiqQueue.where(:class_name => class_name, :method_name => "reindex").first expect(message).to have_attributes(:role => "database_operations", :zone => nil) end - when %w(database_operations database_maintenance_vacuum_schedule) + when %w[database_operations database_maintenance_vacuum_schedule] expect(job.original).to eq(@database_maintenance[:vacuum_schedule]) expect(MiqQueue.count).to eq(@database_maintenance[:vacuum_tables].size) @database_maintenance[:vacuum_tables].each do |class_name| @@ -356,7 +356,7 @@ @schedule_worker.instance_variable_set(:@active_roles, []) end - context "#schedules_for_all_roles" do + context "#schedules_for_all_roles" do before do @schedule_worker.instance_variable_set(:@active_roles, []) @start_time = Time.utc(2011, 1, 31, 8, 30, 0) @@ -367,10 +367,10 @@ @schedule_worker.schedules_for_all_roles first_in_expectations = { - :vmdb_appliance_log_config => 5, - :status_update => 5, - :log_status => 5, - :log_statistics => 1 + :vmdb_appliance_log_config => 5, + :status_update => 5, + :log_status => 5, + :log_statistics => 1 } first_in_expectations.each do |tag, expected_minutes| @@ -403,10 +403,10 @@ while_calling_job(job) do case job.tags - when %w(event_stream purge_schedule) + when %w[event_stream purge_schedule] messages = MiqQueue.where(:class_name => "EventStream", :method_name => "purge_timer") expect(messages.count).to eq(1) - when %w(policy_event purge_schedule) + when %w[policy_event purge_schedule] messages = MiqQueue.where(:class_name => "PolicyEvent", :method_name => "purge_timer") expect(messages.count).to eq(1) else @@ -530,8 +530,8 @@ def while_calling_job(job) end def raise_unexpected_job_error(job) - raise "Unexpected Job: tags=#{job.tags.inspect}, original=#{job.original.inspect}, "\ - "last_time=#{job.last_time.inspect}, id=#{job.job_id.inspect}, next=#{job.next_time.inspect}, "\ + raise "Unexpected Job: tags=#{job.tags.inspect}, original=#{job.original.inspect}, " \ + "last_time=#{job.last_time.inspect}, id=#{job.job_id.inspect}, next=#{job.next_time.inspect}, " \ "handler=#{job.handler.inspect}" end end diff --git a/spec/models/miq_schedule_worker/scheduler_spec.rb b/spec/models/miq_schedule_worker/scheduler_spec.rb index 6c6c006f7bb..1889f7dd684 100644 --- a/spec/models/miq_schedule_worker/scheduler_spec.rb +++ b/spec/models/miq_schedule_worker/scheduler_spec.rb @@ -20,11 +20,11 @@ it "accepts ruby options" do Timecop.freeze do - work = lambda {} + work = -> {} scheduler.schedule_every(schedule_name, 3.hours, :first_in => 1.hour, :tags => [:first, :tag], &work) job = rufus_scheduler.jobs.first - expect(job.next_time).to eq(1.hours.from_now) - expect(job.tags).to match_array(%w(first tag)) + expect(job.next_time).to eq(1.hour.from_now) + expect(job.tags).to match_array(%w[first tag]) expect(job.callable).to eq(work) end end @@ -33,7 +33,7 @@ Timecop.freeze do scheduler.schedule_every(schedule_name, "3h", :first_in => "1h") {} job = rufus_scheduler.jobs.first - expect(job.next_time).to eq(1.hours.from_now) + expect(job.next_time).to eq(1.hour.from_now) end end @@ -71,12 +71,12 @@ let(:schedule_name) { "schedule_cron" } it "returns the job" do - work = lambda {} + work = -> {} scheduler.schedule_cron(schedule_name, "0 0 * * *", :tags => [:a, :b], &work) job = rufus_scheduler.jobs.first expect(job.rough_frequency).to eq(1.day.to_i) - expect(job.tags).to match_array(%w(a b)) + expect(job.tags).to match_array(%w[a b]) expect(job.callable).to eq(work) end diff --git a/spec/models/miq_search_spec.rb b/spec/models/miq_search_spec.rb index 53941a4afc7..95be77f29f6 100644 --- a/spec/models/miq_search_spec.rb +++ b/spec/models/miq_search_spec.rb @@ -40,7 +40,8 @@ expect(MiqSearch.descriptions).to eq( srchs[0].id.to_s => srchs[0].description, srchs[1].id.to_s => srchs[1].description, - srchs[2].id.to_s => srchs[2].description) + srchs[2].id.to_s => srchs[2].description + ) end it "supports scopes" do @@ -52,15 +53,15 @@ expect(MiqSearch.where(:db => 'Vm').descriptions).to eq( srchs[0].id.to_s => srchs[0].description, - srchs[1].id.to_s => srchs[1].description) + srchs[1].id.to_s => srchs[1].description + ) end end let(:vm_location_search) do FactoryBot.create(:miq_search, - :db => "Vm", - :filter => MiqExpression.new("=" => {"field" => "Vm-location", "value" => "good"}) - ) + :db => "Vm", + :filter => MiqExpression.new("=" => {"field" => "Vm-location", "value" => "good"})) end let(:matched_vms) { FactoryBot.create_list(:vm_vmware, 2, :location => "good") } @@ -158,7 +159,7 @@ before do FileUtils.mkdir_p(fixture_dir) - FileUtils.cp_r(Rails.root.join('db', 'fixtures', 'miq_searches.yml'), search_yml) + FileUtils.cp_r(Rails.root.join("db/fixtures/miq_searches.yml"), search_yml) stub_const("MiqSearch::FIXTURE_DIR", fixture_dir) described_class.seed end diff --git a/spec/models/miq_server/log_management_spec.rb b/spec/models/miq_server/log_management_spec.rb index ce07c6b9ab3..092aa1bce5f 100644 --- a/spec/models/miq_server/log_management_spec.rb +++ b/spec/models/miq_server/log_management_spec.rb @@ -96,32 +96,33 @@ def stub_vmdb_util_methods_for_collection_log it "pg_data_dir set" do allow(@miq_server).to receive_messages(:pg_data_dir => '/var/lib/pgsql/data') - expected = %w(/var/lib/pgsql/data/*.conf /var/lib/pgsql/data/log/* /etc/manageiq/postgresql.conf.d/*) + expected = %w[/var/lib/pgsql/data/*.conf /var/lib/pgsql/data/log/* /etc/manageiq/postgresql.conf.d/*] expect(@miq_server.pg_log_patterns.collect(&:to_s)).to match_array expected end end it "#current_log_patterns" do - stub_settings(:log => {:collection => {:current => {:pattern => %w(/var/log/syslog*)}}}) - allow(@miq_server).to receive_messages(:pg_log_patterns => %w(/var/lib/pgsql/data/*.conf)) - expect(@miq_server.current_log_patterns).to match_array %w(/var/log/syslog* /var/lib/pgsql/data/*.conf) + stub_settings(:log => {:collection => {:current => {:pattern => %w[/var/log/syslog*]}}}) + allow(@miq_server).to receive_messages(:pg_log_patterns => %w[/var/lib/pgsql/data/*.conf]) + expect(@miq_server.current_log_patterns).to match_array %w[/var/log/syslog* /var/lib/pgsql/data/*.conf] end it "#current_log_patterns with pg_logs duplicated in current_log_pattern_configuration" do stub_settings( - :log => {:collection => {:current => {:pattern => %w(/var/log/syslog* /var/lib/pgsql/data/*.conf)}}}) - allow(@miq_server).to receive_messages(:pg_log_patterns => %w(/var/lib/pgsql/data/*.conf)) - expect(@miq_server.current_log_patterns).to match_array %w(/var/log/syslog* /var/lib/pgsql/data/*.conf) + :log => {:collection => {:current => {:pattern => %w[/var/log/syslog* /var/lib/pgsql/data/*.conf]}}} + ) + allow(@miq_server).to receive_messages(:pg_log_patterns => %w[/var/lib/pgsql/data/*.conf]) + expect(@miq_server.current_log_patterns).to match_array %w[/var/log/syslog* /var/lib/pgsql/data/*.conf] end context "post current/historical/models/dialogs" do let(:task) { FactoryBot.create(:miq_task) } - let(:compressed_log_patterns) { [Rails.root.join("log", "evm*.log.gz").to_s] } - let(:current_log_patterns) { [Rails.root.join("log", "evm.log").to_s] } + let(:compressed_log_patterns) { [Rails.root.join("log/evm*.log.gz").to_s] } + let(:current_log_patterns) { [Rails.root.join("log/evm.log").to_s] } let(:compressed_evm_log) { Rails.root.join("evm.log-20180319.gz").to_s } let(:log_start) { Time.zone.parse("2018-05-11 11:33:12 UTC") } let(:log_end) { Time.zone.parse("2018-05-11 15:34:16 UTC") } - let(:daily_log) { Rails.root.join("data", "user", "system", "evm_server_daily.zip").to_s } + let(:daily_log) { Rails.root.join("data/user/system/evm_server_daily.zip").to_s } let(:log_depot) { FactoryBot.create(:file_depot) } let(:region) { MiqRegion.my_region } let(:zone) { @miq_server.zone } @@ -134,19 +135,19 @@ def stub_vmdb_util_methods_for_collection_log allow(@miq_server).to receive(:current_log_patterns).and_return(current_log_patterns) allow(@miq_server).to receive(:backup_automate_dialogs) allow(@miq_server).to receive(:backup_automate_models) - %w(historical_logfile current_logfile).each do |kind| + %w[historical_logfile current_logfile].each do |kind| logfile = FactoryBot.create(:log_file, :historical => kind == "historical_logfile") allow(logfile).to receive(:upload) allow(LogFile).to receive(kind).and_return(logfile) end end - %w( + %w[ Archive post_historical_logs Current post_current_logs Models post_automate_models Dialogs post_automate_dialogs - ).each_slice(2) do |name, method| + ].each_slice(2) do |name, method| it "##{method}" do logfile = nil @@ -186,7 +187,7 @@ def stub_vmdb_util_methods_for_collection_log expect(task.reload).to have_attributes( :message => "#{name} log files from #{@miq_server.name} #{@miq_server.zone.name} MiqServer #{@miq_server.id} are posted", :state => "Active", - :status => "Ok", + :status => "Ok" ) end end diff --git a/spec/models/miq_server/server_monitor_spec.rb b/spec/models/miq_server/server_monitor_spec.rb index 7606b0d1f7b..d082f15c81e 100644 --- a/spec/models/miq_server/server_monitor_spec.rb +++ b/spec/models/miq_server/server_monitor_spec.rb @@ -35,6 +35,7 @@ rolename = "ems_operations" @miq_server.assigned_server_roles.each do |asr| next unless asr.server_role.name == rolename + asr.activate_in_zone end @miq_server.reload @@ -48,6 +49,7 @@ rolename = "event" @miq_server.assigned_server_roles.each do |asr| next unless asr.server_role.name == rolename + asr.activate_in_zone end @miq_server.reload @@ -61,6 +63,7 @@ rolename = "reporting" @miq_server.assigned_server_roles.each do |asr| next unless asr.server_role.name == rolename + asr.activate_in_region end @miq_server.reload @@ -74,6 +77,7 @@ rolename = "scheduler" @miq_server.assigned_server_roles.each do |asr| next unless asr.server_role.name == rolename + asr.activate_in_region end @miq_server.reload @@ -100,6 +104,7 @@ rolename = "ems_operations" @miq_server.assigned_server_roles.each do |asr| next unless asr.server_role.name == rolename + asr.deactivate_in_zone end @miq_server.reload @@ -113,6 +118,7 @@ rolename = "event" @miq_server.assigned_server_roles.each do |asr| next unless asr.server_role.name == rolename + asr.deactivate_in_zone end @miq_server.reload @@ -230,6 +236,7 @@ [@miq_server1, @miq_server2].each do |svr| svr.assigned_server_roles.each do |asr| next unless asr.server_role.name == rolename + asr.activate_in_zone end end diff --git a/spec/models/miq_server/worker_management/kubernetes_spec.rb b/spec/models/miq_server/worker_management/kubernetes_spec.rb index 02f450e1406..58dec242a3f 100644 --- a/spec/models/miq_server/worker_management/kubernetes_spec.rb +++ b/spec/models/miq_server/worker_management/kubernetes_spec.rb @@ -166,7 +166,7 @@ :metadata => { :name => pod_name }, - :spec => { + :spec => { :replicas => 2, :template => { :spec => { @@ -174,7 +174,7 @@ } } }, - :status => { + :status => { :readyReplicas => 2 } ) diff --git a/spec/models/miq_server/worker_management/monitor/system_limits_spec.rb b/spec/models/miq_server/worker_management/monitor/system_limits_spec.rb index 2dff7101d21..449959953ae 100644 --- a/spec/models/miq_server/worker_management/monitor/system_limits_spec.rb +++ b/spec/models/miq_server/worker_management/monitor/system_limits_spec.rb @@ -9,7 +9,7 @@ :start_algorithm: :name: used_swap_percent_lt_value :value: 60 - EOS + EOS ) allow(@server.worker_manager).to receive_messages(:worker_monitor_settings => @monitor_settings) @@ -35,7 +35,7 @@ :start_algorithm: :name: :used_swap_percent_lt_value :value: 20 - EOS + EOS ) allow(@server.worker_manager).to receive_messages(:child_worker_settings => child) diff --git a/spec/models/miq_server/worker_management/monitor_spec.rb b/spec/models/miq_server/worker_management/monitor_spec.rb index 749ba9cfe80..af932d751d3 100644 --- a/spec/models/miq_server/worker_management/monitor_spec.rb +++ b/spec/models/miq_server/worker_management/monitor_spec.rb @@ -3,10 +3,10 @@ let(:server) { EvmSpecHelper.local_miq_server } let(:worker) do FactoryBot.create(:miq_worker, - :type => "MiqGenericWorker", - :miq_server => server, - :pid => 12345, - :last_heartbeat => 5.minutes.ago) + :type => "MiqGenericWorker", + :miq_server => server, + :pid => 12345, + :last_heartbeat => 5.minutes.ago) end before do @@ -37,7 +37,7 @@ let(:server) { EvmSpecHelper.local_miq_server } it "rescues exceptions and moves on" do - allow(MiqWorkerType).to receive(:worker_class_names).and_return(%w(MiqGenericWorker MiqPriorityWorker)) + allow(MiqWorkerType).to receive(:worker_class_names).and_return(%w[MiqGenericWorker MiqPriorityWorker]) allow(MiqGenericWorker).to receive(:sync_workers).and_raise expect(MiqPriorityWorker).to receive(:sync_workers).and_return(:adds => [123]) expect(server.worker_manager.sync_workers).to eq("MiqPriorityWorker"=>{:adds=>[123]}) diff --git a/spec/models/miq_server/worker_monitor_spec.rb b/spec/models/miq_server/worker_monitor_spec.rb index e96b17c4501..9b93e2495b4 100644 --- a/spec/models/miq_server/worker_monitor_spec.rb +++ b/spec/models/miq_server/worker_monitor_spec.rb @@ -42,13 +42,11 @@ describe "#do_system_limit_exceeded" do before do @worker_to_keep = FactoryBot.create(:miq_ems_metrics_processor_worker, - :miq_server => @miq_server, - :memory_usage => 1.gigabytes - ) + :miq_server => @miq_server, + :memory_usage => 1.gigabytes) @worker_to_kill = FactoryBot.create(:miq_ems_metrics_processor_worker, - :miq_server => @miq_server, - :memory_usage => 2.gigabytes - ) + :miq_server => @miq_server, + :memory_usage => 2.gigabytes) end it "will kill the worker with the highest memory" do @@ -86,7 +84,7 @@ @messages = [] @actives = [] - m = FactoryBot.create(:miq_queue, :state => 'ready', :handler => @worker, :msg_timeout => 4.minutes) + m = FactoryBot.create(:miq_queue, :state => 'ready', :handler => @worker, :msg_timeout => 4.minutes) @messages << m @actives << m if m.state == 'dequeue' @@ -107,7 +105,7 @@ it "on worker destroy, will destroy its processed messages" do @worker.destroy - expect(@worker.messages.where("state != ?", "ready").count).to eq(0) + expect(@worker.messages.where.not(:state => "ready").count).to eq(0) expect(@worker.active_messages.size).to eq(0) end diff --git a/spec/models/miq_server_spec.rb b/spec/models/miq_server_spec.rb index 5e4e90af326..a94dc7e5e80 100644 --- a/spec/models/miq_server_spec.rb +++ b/spec/models/miq_server_spec.rb @@ -247,9 +247,9 @@ before do @server_roles = [] [ - ['event', 1], + ['event', 1], ['ems_metrics_coordinator', 1], - ['ems_operations', 0] + ['ems_operations', 0] ].each { |r, max| @server_roles << FactoryBot.create(:server_role, :name => r, :max_concurrent => max) } @miq_server.role = @server_roles.collect(&:name).join(',') diff --git a/spec/models/miq_snmp_spec.rb b/spec/models/miq_snmp_spec.rb index 7913df6fadc..199f09d299f 100644 --- a/spec/models/miq_snmp_spec.rb +++ b/spec/models/miq_snmp_spec.rb @@ -17,7 +17,7 @@ MiqSnmp.trap_v1(:host => ["localhost"], :sysuptime => 1, :specific_trap => "56", - :object_list => [{:oid=>"1.2.3", :var_type=>"Integer", :value=>"1"}]) + :object_list => [{:oid => "1.2.3", :var_type => "Integer", :value => "1"}]) end end end diff --git a/spec/models/miq_task_spec.rb b/spec/models/miq_task_spec.rb index 0c2ceb7c08e..ed0cf74ab83 100644 --- a/spec/models/miq_task_spec.rb +++ b/spec/models/miq_task_spec.rb @@ -54,14 +54,14 @@ end it "should respond to warn instance method properly" do - message = "There may be a fire on your floor" + message = "There may be a fire on your floor" miq_task.warn(message) expect(miq_task.message).to eq(message) expect(miq_task.status).to eq(MiqTask::STATUS_WARNING) end it "should respond to warn class method properly" do - message = "There may be a fire on your floor (class)" + message = "There may be a fire on your floor (class)" MiqTask.warn(miq_task.id, message) miq_task.reload expect(miq_task.message).to eq(message) @@ -69,14 +69,14 @@ end it "should respond to error instance method properly" do - message = "Red Alert" + message = "Red Alert" miq_task.error(message) expect(miq_task.message).to eq(message) expect(miq_task.status).to eq(MiqTask::STATUS_ERROR) end it "should respond to error class method properly" do - message = "Red Alert (class)" + message = "Red Alert (class)" MiqTask.error(miq_task.id, message) miq_task.reload expect(miq_task.message).to eq(message) @@ -151,14 +151,14 @@ expect(miq_task.message).to eq(MiqTask::MESSAGE_TASK_COMPLETED_SUCCESSFULLY) expect(miq_task.task_results).to eq(result) - status = MiqTask::STATUS_ERROR + status = MiqTask::STATUS_ERROR miq_task.queue_callback(state, status, "", result) expect(miq_task.state).to eq(state) expect(miq_task.status).to eq(status) expect(miq_task.message).to eq(MiqTask::MESSAGE_TASK_COMPLETED_UNSUCCESSFULLY) expect(miq_task.task_results).to eq(result) - result = {:c => 1, :d => 2} + result = {:c => 1, :d => 2} miq_task.queue_callback(state, status, message, result) expect(miq_task.state).to eq(state) expect(miq_task.status).to eq(status) @@ -461,7 +461,7 @@ context "task is not active" do it "does not update status to 'Error' if task state is 'Finished'" do miq_task.update(:state => MiqTask::STATE_FINISHED, - :updated_on => miq_task.updated_on - timeout.to_i_with_method) + :updated_on => miq_task.updated_on - timeout.to_i_with_method) MiqTask.update_status_for_timed_out_active_tasks miq_task.reload expect(miq_task.status).not_to eq MiqTask::STATUS_ERROR @@ -469,7 +469,7 @@ it "does not update status to 'Error' if task state is 'Queued'" do miq_task.update(:state => MiqTask::STATE_QUEUED, - :updated_on => miq_task.updated_on - timeout.to_i_with_method) + :updated_on => miq_task.updated_on - timeout.to_i_with_method) MiqTask.update_status_for_timed_out_active_tasks miq_task.reload expect(miq_task.status).not_to eq MiqTask::STATUS_ERROR @@ -485,7 +485,7 @@ it "does not update status to 'Error'" do miq_task.update(:state => MiqTask::STATE_ACTIVE, - :updated_on => miq_task.updated_on - timeout.to_i_with_method) + :updated_on => miq_task.updated_on - timeout.to_i_with_method) MiqTask.update_status_for_timed_out_active_tasks miq_task.reload expect(miq_task.status).not_to eq MiqTask::STATUS_ERROR @@ -540,8 +540,8 @@ def create_test_task(name, status, updated) Timecop.travel(updated) do FactoryBot.create(:miq_task_plain).update(:state => MiqTask::STATE_FINISHED, - :status => status, - :name => name) + :status => status, + :name => name) end end end diff --git a/spec/models/miq_user_role_spec.rb b/spec/models/miq_user_role_spec.rb index fb7e8abd2e2..536687796e3 100644 --- a/spec/models/miq_user_role_spec.rb +++ b/spec/models/miq_user_role_spec.rb @@ -48,18 +48,18 @@ context "testing allows methods" do before do - EvmSpecHelper.seed_specific_product_features(%w( - dashboard_add - dashboard_view - host_compare - host_edit - host_scan - host_show_list - miq_policy - vm - dialog_edit_editor - rbac_tenant_manage_quotas - )) + EvmSpecHelper.seed_specific_product_features(%w[ + dashboard_add + dashboard_view + host_compare + host_edit + host_scan + host_show_list + miq_policy + vm + dialog_edit_editor + rbac_tenant_manage_quotas + ]) feature1 = MiqProductFeature.find_all_by_identifier("dashboard_admin") @role1 = FactoryBot.create(:miq_user_role, :name => "Role1", :miq_product_features => feature1) @@ -71,7 +71,7 @@ @group2 = FactoryBot.create(:miq_group, :description => "Group2", :miq_user_role => @role2) @user2 = FactoryBot.create(:user, :userid => "user2", :miq_groups => [@group2]) - feature3 = MiqProductFeature.find_all_by_identifier(%w(host_show_list host_scan host_edit)) + feature3 = MiqProductFeature.find_all_by_identifier(%w[host_show_list host_scan host_edit]) @role3 = FactoryBot.create(:miq_user_role, :name => "Role3", :miq_product_features => feature3) @group3 = FactoryBot.create(:miq_group, :description => "Group3", :miq_user_role => @role3) @user3 = FactoryBot.create(:user, :userid => "user3", :miq_groups => [@group3]) @@ -135,8 +135,8 @@ end it "should return the correct answer calling allows_any? with default scope => :sub" do - expect(@role1.allows_any?(:identifiers => %w(dashboard_admin dashboard_add dashboard_view miq_policy))).to eq(true) - expect(@role2.allows_any?(:identifiers => %w(dashboard_admin dashboard_add dashboard_view miq_policy))).to eq(true) + expect(@role1.allows_any?(:identifiers => %w[dashboard_admin dashboard_add dashboard_view miq_policy])).to eq(true) + expect(@role2.allows_any?(:identifiers => %w[dashboard_admin dashboard_add dashboard_view miq_policy])).to eq(true) expect(@role3.allows_any?(:identifiers => ["host_view"])).to eq(true) expect(@role3.allows_any?(:identifiers => ["vm"])).to eq(false) expect(@role3.allows_any?(:identifiers => ["everything"])).to eq(true) @@ -145,19 +145,19 @@ describe "#allow?" do it "allows everything" do - EvmSpecHelper.seed_specific_product_features(%w(everything miq_report)) + EvmSpecHelper.seed_specific_product_features(%w[everything miq_report]) user = FactoryBot.create(:user, :features => "everything") expect(user.role_allows?(:identifier => "miq_report")).to be_truthy end it "dissallows unentitled" do - EvmSpecHelper.seed_specific_product_features(%w(miq_report container_dashboard)) + EvmSpecHelper.seed_specific_product_features(%w[miq_report container_dashboard]) user = FactoryBot.create(:user, :features => "container_dashboard") expect(user.role_allows?(:identifier => "miq_report")).to be_falsey end it "allows entitled" do - EvmSpecHelper.seed_specific_product_features(%w(miq_report)) + EvmSpecHelper.seed_specific_product_features(%w[miq_report]) user = FactoryBot.create(:user, :features => "miq_report") expect(user.role_allows?(:identifier => "miq_report")).to be_truthy end @@ -167,13 +167,13 @@ # - render_report_csv (H) it "disallows hidden child with not-entitled parent" do - EvmSpecHelper.seed_specific_product_features(%w(miq_report_view render_report_csv container_dashboard)) + EvmSpecHelper.seed_specific_product_features(%w[miq_report_view render_report_csv container_dashboard]) user = FactoryBot.create(:user, :features => "container_dashboard") expect(user.role_allows?(:identifier => "render_report_csv")).to be_falsey end it "allows hidden child with entitled parent" do - EvmSpecHelper.seed_specific_product_features(%w(miq_report_view render_report_csv)) + EvmSpecHelper.seed_specific_product_features(%w[miq_report_view render_report_csv]) user = FactoryBot.create(:user, :features => "miq_report_view") expect(user.role_allows?(:identifier => "render_report_csv")).to be_truthy end @@ -186,7 +186,7 @@ it "allows hidden child of not entitled, if a sibling is entitled" do EvmSpecHelper.seed_specific_product_features( - %w(miq_report_widget_admin widget_refresh widget_edit widget_copy container_dashboard) + %w[miq_report_widget_admin widget_refresh widget_edit widget_copy container_dashboard] ) user = FactoryBot.create(:user, :features => "widget_edit") expect(user.role_allows?(:identifier => "widget_refresh")).to be_truthy @@ -194,7 +194,7 @@ it "disallows hidden child of not entitled, if no sibling is entitled" do EvmSpecHelper.seed_specific_product_features( - %w(miq_report_widget_admin widget_refresh widget_edit widget_copy container_dashboard) + %w[miq_report_widget_admin widget_refresh widget_edit widget_copy container_dashboard] ) user = FactoryBot.create(:user, :features => "container_dashboard") expect(user.role_allows?(:identifier => "widget_refresh")).to be_falsey @@ -205,7 +205,7 @@ # - profile_new (H) it "allows hidden child of hidden parent" do EvmSpecHelper.seed_specific_product_features( - %w(miq_ae_class_explorer miq_ae_namespace_new container_dashboard) + %w[miq_ae_class_explorer miq_ae_namespace_new container_dashboard] ) user = FactoryBot.create(:user, :features => "container_dashboard") expect(user.role_allows?(:identifier => "miq_ae_namespace_new")).to be_truthy @@ -340,7 +340,7 @@ describe ".with_roles_excluding" do it "handles multiple columns" do a = FactoryBot.create(:miq_user_role, :features => "good") - FactoryBot.create(:miq_user_role, :features => %w(good everything)) + FactoryBot.create(:miq_user_role, :features => %w[good everything]) FactoryBot.create(:miq_user_role, :features => "everything") expect(MiqUserRole.select(:id, :name).with_roles_excluding("everything")).to match_array([a]) diff --git a/spec/models/miq_user_scope_spec.rb b/spec/models/miq_user_scope_spec.rb index 7d559bdede1..14f2441983f 100644 --- a/spec/models/miq_user_scope_spec.rb +++ b/spec/models/miq_user_scope_spec.rb @@ -15,13 +15,13 @@ context "testing get_filters method" do before do @scope1 = MiqUserScope.new( - :view => {:belongsto => - {:_all_ => ["/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter1/EmsFolder|host/EmsCluster|Cluster1", - "/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter2/EmsFolder|host/EmsCluster|Cluster3"]}, - :managed => - {:_all_ => [["/managed/department/accounting", "/managed/department/automotive"], - ["/managed/location/london", "/managed/location/ny"], - ["/managed/service_level/gold", "/managed/service_level/platinum"]]}} + :view => {:belongsto => + {:_all_ => ["/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter1/EmsFolder|host/EmsCluster|Cluster1", + "/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter2/EmsFolder|host/EmsCluster|Cluster3"]}, + :managed => + {:_all_ => [["/managed/department/accounting", "/managed/department/automotive"], + ["/managed/location/london", "/managed/location/ny"], + ["/managed/service_level/gold", "/managed/service_level/platinum"]]}} ) @scope2_exp = MiqExpression.new("=" => {}) @@ -56,25 +56,25 @@ it "should return the correct filters for search" do expect(@scope1.get_filters(:class => Vm, :feature_type => :view)).to eq({ - :belongsto => - ["/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter1/EmsFolder|host/EmsCluster|Cluster1", - "/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter2/EmsFolder|host/EmsCluster|Cluster3"], - :managed => - [["/managed/department/accounting", "/managed/department/automotive"], - ["/managed/location/london", "/managed/location/ny"], - ["/managed/service_level/gold", "/managed/service_level/platinum"]], - :expression => nil - }) + :belongsto => + ["/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter1/EmsFolder|host/EmsCluster|Cluster1", + "/belongsto/ExtManagementSystem|VC1/EmsFolder|Datacenters/EmsFolder|DataCenter2/EmsFolder|host/EmsCluster|Cluster3"], + :managed => + [["/managed/department/accounting", "/managed/department/automotive"], + ["/managed/location/london", "/managed/location/ny"], + ["/managed/service_level/gold", "/managed/service_level/platinum"]], + :expression => nil + }) expect(@scope1.get_filters(:class => Vm, :feature_type => :admin)).to eq({:expression => nil, :belongsto => nil, :managed => nil}) expect(@scope1.get_filters(:class => Vm, :feature_type => :control)).to eq({:expression => nil, :belongsto => nil, :managed => nil}) expect(@scope2.get_filters(:class => Vm, :feature_type => :view)).to eq({ - :belongsto => - ["/belongsto/ExtManagementSystem|VC4 IP 14/EmsFolder|Datacenters/EmsFolder|Prod/EmsFolder|vm/EmsFolder|Discovered virtual machine"], - :managed => - [["/managed/location/chicago", "/managed/location/ny"], ["/managed/environment/dev"]], - :expression => nil - }) + :belongsto => + ["/belongsto/ExtManagementSystem|VC4 IP 14/EmsFolder|Datacenters/EmsFolder|Prod/EmsFolder|vm/EmsFolder|Discovered virtual machine"], + :managed => + [["/managed/location/chicago", "/managed/location/ny"], ["/managed/environment/dev"]], + :expression => nil + }) filters = @scope2.get_filters(:class => Storage, :feature_type => :view) expect(filters[:belongsto]).to be_nil diff --git a/spec/models/miq_widget/chart_content_spec.rb b/spec/models/miq_widget/chart_content_spec.rb index 5effa8ac334..170e7f21a0f 100644 --- a/spec/models/miq_widget/chart_content_spec.rb +++ b/spec/models/miq_widget/chart_content_spec.rb @@ -14,7 +14,7 @@ 5.times do |i| vm = FactoryBot.build(:vm_vmware) - vm.evm_owner_id = @user.id if i > 2 + vm.evm_owner_id = @user.id if i > 2 vm.miq_group_id = @user.current_group.id if vm.evm_owner_id || (i > 1) vm.save end @@ -62,6 +62,6 @@ it '#generate returns valid data' do content = widget.generate_one_content_for_user(@group, @user) - expect(ManageIQ::Reporting::Charting.data_ok? content.contents).to eq(true) + expect(ManageIQ::Reporting::Charting.data_ok?(content.contents)).to eq(true) end end diff --git a/spec/models/miq_widget/import_from_hash_spec.rb b/spec/models/miq_widget/import_from_hash_spec.rb index 9f7ac16da12..268ff067320 100644 --- a/spec/models/miq_widget/import_from_hash_spec.rb +++ b/spec/models/miq_widget/import_from_hash_spec.rb @@ -3,17 +3,15 @@ before do @user = FactoryBot.create(:user_admin) @old_report = FactoryBot.create(:miq_report, - :name => "Test Report", - :rpt_type => "Custom", - :tz => "Eastern Time (US & Canada)", - :col_order => ["name", "boot_time", "disks_aligned"], - :cols => ["name", "boot_time", "disks_aligned"] - ) + :name => "Test Report", + :rpt_type => "Custom", + :tz => "Eastern Time (US & Canada)", + :col_order => ["name", "boot_time", "disks_aligned"], + :cols => ["name", "boot_time", "disks_aligned"]) @old_widget = FactoryBot.create(:miq_widget, - :title => "Test Widget", - :visibility => {:roles => ["_ALL_"]}, - :resource => @old_report - ) + :title => "Test Widget", + :visibility => {:roles => ["_ALL_"]}, + :resource => @old_report) widget_string = MiqWidget.export_to_yaml([@old_widget.id], MiqWidget) @new_widget = YAML.load(widget_string).first["MiqWidget"] diff --git a/spec/models/miq_widget_set_spec.rb b/spec/models/miq_widget_set_spec.rb index 7bb7516c9ad..573818acab7 100644 --- a/spec/models/miq_widget_set_spec.rb +++ b/spec/models/miq_widget_set_spec.rb @@ -97,7 +97,7 @@ end it "works with HashWithIndifferentAccess set_data" do - widget_set = MiqWidgetSet.create(:set_data => HashWithIndifferentAccess.new({:col1 => []})) + widget_set = MiqWidgetSet.create(:set_data => ActiveSupport::HashWithIndifferentAccess.new({:col1 => []})) expect(widget_set.errors[:set_data]).to include("One widget must be selected(set_data)") end diff --git a/spec/models/miq_widget_spec.rb b/spec/models/miq_widget_spec.rb index bebecdd4680..3f436eea91a 100644 --- a/spec/models/miq_widget_spec.rb +++ b/spec/models/miq_widget_spec.rb @@ -203,11 +203,10 @@ def add_dashboard_for_user(db_name, userid, group) context "#contents_for_user" do it "returns user owned widget contents in UTC timezone if user's timezone not specified" do content = FactoryBot.create(:miq_widget_content, - :miq_widget => @widget_report_vendor_and_guest_os, - :user_id => @user1.id, - :miq_group_id => @user1.current_group_id, - :timezone => "UTC", - ) + :miq_widget => @widget_report_vendor_and_guest_os, + :user_id => @user1.id, + :miq_group_id => @user1.current_group_id, + :timezone => "UTC") expect(@widget_report_vendor_and_guest_os.contents_for_user(@user1)).to eq(content) end @@ -235,16 +234,14 @@ def add_dashboard_for_user(db_name, userid, group) it "both user and miq_group owned" do FactoryBot.create(:miq_widget_content, - :miq_widget => @widget_report_vendor_and_guest_os, - :miq_group_id => @group1.id, - :timezone => "Eastern Time (US & Canada)" - ) + :miq_widget => @widget_report_vendor_and_guest_os, + :miq_group_id => @group1.id, + :timezone => "Eastern Time (US & Canada)") content2 = FactoryBot.create(:miq_widget_content, - :miq_widget => @widget_report_vendor_and_guest_os, - :miq_group_id => @group1.id, - :user_id => @user1.id, - :timezone => "UTC" - ) + :miq_widget => @widget_report_vendor_and_guest_os, + :miq_group_id => @group1.id, + :user_id => @user1.id, + :timezone => "UTC") expect(@widget_report_vendor_and_guest_os.contents_for_user(@user1)).to eq(content2) end end @@ -344,8 +341,7 @@ def add_dashboard_for_user(db_name, userid, group) :zone => nil, :class_name => @widget.class.name, :instance_id => @widget.id, - :msg_timeout => 3600 - } + :msg_timeout => 3600} end it "skips task creation and records warn message if MiqTask for generating widget content exists and not finished" do @@ -514,7 +510,7 @@ def add_dashboard_for_user(db_name, userid, group) expect(task.pct_complete).to eq(100) @widget.visibility[:roles] = "_ALL_" - new_user = FactoryBot.create(:user, :userid => "test task", :role => "random") + new_user = FactoryBot.create(:user, :userid => "test task", :role => "random") @widget.create_initial_content_for_user(new_user) q = MiqQueue.first @@ -537,7 +533,7 @@ def add_dashboard_for_user(db_name, userid, group) end it "with multiple timezones in one group" do - user_est = FactoryBot.create(:user, :userid => 'user_est', :miq_groups => [@group2], :settings => {:display => {:timezone => "Eastern Time (US & Canada)"}}) + user_est = FactoryBot.create(:user, :userid => 'user_est', :miq_groups => [@group2], :settings => {:display => {:timezone => "Eastern Time (US & Canada)"}}) expect(user_est.get_timezone).to eq("Eastern Time (US & Canada)") ws = FactoryBot.create(:miq_widget_set, :name => "default", :userid => "user_est", :owner => @group2, :widget_id => @widget.id) @@ -551,7 +547,7 @@ def add_dashboard_for_user(db_name, userid, group) end it "with report_sync" do - user_est = FactoryBot.create(:user, :userid => 'user_est', :miq_groups => [@group2], :settings => {:display => {:timezone => "Eastern Time (US & Canada)"}}) + user_est = FactoryBot.create(:user, :userid => 'user_est', :miq_groups => [@group2], :settings => {:display => {:timezone => "Eastern Time (US & Canada)"}}) expect(user_est.get_timezone).to eq("Eastern Time (US & Canada)") ws = FactoryBot.create(:miq_widget_set, :name => "default", :userid => "user_est", :owner => @group2, :widget_id => @widget.id) @@ -721,11 +717,11 @@ def add_dashboard_for_user(db_name, userid, group) @role = FactoryBot.create(:miq_user_role) @group = FactoryBot.create(:miq_group, :miq_user_role => @role) @user1 = FactoryBot.create(:user, - :settings => {:display => {:timezone => "Eastern Time (US & Canada)"}}, - :miq_groups => [@group]) + :settings => {:display => {:timezone => "Eastern Time (US & Canada)"}}, + :miq_groups => [@group]) @user2 = FactoryBot.create(:user, - :settings => {:display => {:timezone => "Pacific Time (US & Canada)"}}, - :miq_groups => [@group]) + :settings => {:display => {:timezone => "Pacific Time (US & Canada)"}}, + :miq_groups => [@group]) @ws1 = FactoryBot.create(:miq_widget_set, :name => "HOME", :userid => @user1.userid, @@ -758,7 +754,7 @@ def add_dashboard_for_user(db_name, userid, group) end it "contents created for one timezone per group with timezone_matters = false" do - widget.options = {:timezone_matters => false } + widget.options = {:timezone_matters => false} widget.queue_generate_content MiqQueue.first.deliver expect(MiqWidgetContent.count).to eq(1) @@ -826,12 +822,11 @@ def add_dashboard_for_user(db_name, userid, group) @winos_pruduct_name = 'Windows 7 Enterprise' 7.times do |i| vm = FactoryBot.build(:vm_vmware, - :name => "vm_win_#{i}", - :vendor => "vmware", - :operating_system => FactoryBot.create(:operating_system, - :product_name => @winos_pruduct_name, - :name => 'my_pc'), - ) + :name => "vm_win_#{i}", + :vendor => "vmware", + :operating_system => FactoryBot.create(:operating_system, + :product_name => @winos_pruduct_name, + :name => 'my_pc')) vm.miq_group_id = @group2.id vm.save end @@ -839,12 +834,11 @@ def add_dashboard_for_user(db_name, userid, group) @rhos_product_name = 'Red Hat Enterprise Linux 6 (64-bit)' 3.times do |i| vm = FactoryBot.build(:vm_redhat, - :name => "vm_rh_#{i}", - :vendor => "redhat", - :operating_system => FactoryBot.create(:operating_system, - :product_name => @rhos_product_name, - :name => 'my_linux'), - ) + :name => "vm_rh_#{i}", + :vendor => "redhat", + :operating_system => FactoryBot.create(:operating_system, + :product_name => @rhos_product_name, + :name => 'my_linux')) vm.miq_group_id = @group.id vm.save end diff --git a/spec/models/miq_worker/container_common_spec.rb b/spec/models/miq_worker/container_common_spec.rb index de4e4cba6f5..b0f91691270 100644 --- a/spec/models/miq_worker/container_common_spec.rb +++ b/spec/models/miq_worker/container_common_spec.rb @@ -19,7 +19,7 @@ def deployment_name_for(name) :template => { :metadata => {:name => "test", :labels => {:name => "test", :app => "manageiq"}}, :spec => { - :containers => [{ + :containers => [{ :name => "test", :env => [] }] @@ -33,7 +33,7 @@ def deployment_name_for(name) worker = FactoryBot.create(:miq_generic_worker) worker.configure_worker_deployment(test_deployment) - expect(test_deployment.dig(:spec, :template, :spec, :nodeSelector)).to eq("manageiq/zone-#{MiqServer.my_zone}".gsub(" ", "-") => "true") + expect(test_deployment.dig(:spec, :template, :spec, :nodeSelector)).to eq("manageiq/zone-#{MiqServer.my_zone}".tr(" ", "-") => "true") end it "doesn't add a node selector for the default zone" do @@ -277,7 +277,7 @@ def deployment_name_for(name) :requests => { :memory => "250Mi", }, - :limits => { + :limits => { :memory => "600Mi", } } diff --git a/spec/models/miq_worker_spec.rb b/spec/models/miq_worker_spec.rb index 84eb242ab59..2c306fb7071 100644 --- a/spec/models/miq_worker_spec.rb +++ b/spec/models/miq_worker_spec.rb @@ -29,7 +29,7 @@ def check_has_required_role(worker_role_names, expected_result) end before do - active_roles = %w(foo bar).map { |rn| FactoryBot.create(:server_role, :name => rn) } + active_roles = %w[foo bar].map { |rn| FactoryBot.create(:server_role, :name => rn) } @server = EvmSpecHelper.local_miq_server(:active_roles => active_roles) end @@ -77,7 +77,7 @@ def check_has_required_role(worker_role_names, expected_result) it "that is a subset of server roles" do check_has_required_role(["foo"], true) - check_has_required_role(%w(bah foo), true) + check_has_required_role(%w[bah foo], true) end it "that is not a subset of server roles" do @@ -386,13 +386,13 @@ def check_has_required_role(worker_role_names, expected_result) it "include parent entries" do expect(capu_worker.config_settings_path).to eq( - %i(workers worker_base queue_worker_base ems_metrics_collector_worker ems_metrics_collector_worker_amazon) + %i[workers worker_base queue_worker_base ems_metrics_collector_worker ems_metrics_collector_worker_amazon] ) end it "works for high level entries" do expect(MiqEmsMetricsCollectorWorker.config_settings_path).to eq( - %i(workers worker_base queue_worker_base ems_metrics_collector_worker) + %i[workers worker_base queue_worker_base ems_metrics_collector_worker] ) end end @@ -452,21 +452,21 @@ def check_has_required_role(worker_role_names, expected_result) end it "with ENV['APPLIANCE']" do - begin - allow(MiqWorker).to receive(:nice_increment).and_return("10") - allow(@worker).to receive(:worker_options).and_return(:ems_id => 1234, :guid => @worker.guid) - old_env = ENV.delete('APPLIANCE') - ENV['APPLIANCE'] = 'true' - cmd = @worker.command_line - expect(cmd).to start_with("nice -n 10") - expect(cmd).to include("--ems-id 1234") - expect(cmd).to include("--guid #{@worker.guid}") - expect(cmd).to include("--heartbeat") - expect(cmd).to end_with("MiqWorker") - ensure - # ENV['x'] = nil deletes the key because ENV accepts only string values - ENV['APPLIANCE'] = old_env - end + + allow(MiqWorker).to receive(:nice_increment).and_return("10") + allow(@worker).to receive(:worker_options).and_return(:ems_id => 1234, :guid => @worker.guid) + old_env = ENV.delete('APPLIANCE') + ENV['APPLIANCE'] = 'true' + cmd = @worker.command_line + expect(cmd).to start_with("nice -n 10") + expect(cmd).to include("--ems-id 1234") + expect(cmd).to include("--guid #{@worker.guid}") + expect(cmd).to include("--heartbeat") + expect(cmd).to end_with("MiqWorker") + ensure + # ENV['x'] = nil deletes the key because ENV accepts only string values + ENV['APPLIANCE'] = old_env + end end diff --git a/spec/models/mixins/ansible_playbook_mixin_spec.rb b/spec/models/mixins/ansible_playbook_mixin_spec.rb index b34ee057d08..28a6398a5ab 100644 --- a/spec/models/mixins/ansible_playbook_mixin_spec.rb +++ b/spec/models/mixins/ansible_playbook_mixin_spec.rb @@ -1,7 +1,7 @@ RSpec.describe AnsiblePlaybookMixin do let(:test_instance) do Class.new(ActiveRecord::Base) do - def self.name; "TestClass"; end + def self.name = "TestClass" self.table_name = "services" include AnsiblePlaybookMixin end.new diff --git a/spec/models/mixins/assignment_mixin_spec.rb b/spec/models/mixins/assignment_mixin_spec.rb index 231380903c5..31aeff7f903 100644 --- a/spec/models/mixins/assignment_mixin_spec.rb +++ b/spec/models/mixins/assignment_mixin_spec.rb @@ -61,7 +61,7 @@ expect(test_class.assignments).to eq( "vm/tag/managed/environment/test" => [alert_set], - "vm/tag/managed/environment/staging" => [alert_set2], + "vm/tag/managed/environment/staging" => [alert_set2] ) end diff --git a/spec/models/mixins/authentication_mixin_spec.rb b/spec/models/mixins/authentication_mixin_spec.rb index b067a343ee5..71578d2643c 100644 --- a/spec/models/mixins/authentication_mixin_spec.rb +++ b/spec/models/mixins/authentication_mixin_spec.rb @@ -9,7 +9,7 @@ let(:test_class_instance) do Class.new(ActiveRecord::Base) do - def self.name; "TestClass"; end + def self.name = "TestClass" self.table_name = "vms" include AuthenticationMixin end.new @@ -427,7 +427,7 @@ def self.name; "TestClass"; end end it "Host#authentication_check_types_queue with [:ssh, :default], :remember_host => true is passed down to verify_credentials" do - types = %i(ssh default) + types = %i[ssh default] options = {:remember_host => true} @host.authentication_check_types_queue(types, options) conditions = {:class_name => @host.class.base_class.name, :instance_id => @host.id, :method_name => 'authentication_check_types', :role => @host.authentication_check_role} diff --git a/spec/models/mixins/custom_actions_mixin_spec.rb b/spec/models/mixins/custom_actions_mixin_spec.rb index 839e8e9f1cd..ab9350da357 100644 --- a/spec/models/mixins/custom_actions_mixin_spec.rb +++ b/spec/models/mixins/custom_actions_mixin_spec.rb @@ -1,7 +1,7 @@ RSpec.describe CustomActionsMixin do let(:test_class) do Class.new(ActiveRecord::Base) do - def self.name; "TestClass"; end + def self.name = "TestClass" self.table_name = "vms" include CustomActionsMixin end diff --git a/spec/models/mixins/custom_attribute_mixin_spec.rb b/spec/models/mixins/custom_attribute_mixin_spec.rb index 51b4c90726c..ee7cfe627cf 100644 --- a/spec/models/mixins/custom_attribute_mixin_spec.rb +++ b/spec/models/mixins/custom_attribute_mixin_spec.rb @@ -2,7 +2,7 @@ let(:supported_factories) { [:vm_redhat, :host] } let(:test_class) do Class.new(ActiveRecord::Base) do - def self.name; "TestClass"; end + def self.name = "TestClass" self.table_name = "vms" include CustomAttributeMixin end @@ -74,21 +74,21 @@ def self.name; "TestClass"; end expect(object.miq_custom_keys).to eq([]) - key = "foo" + key = "foo" FactoryBot.create(:miq_custom_attribute, - :resource_type => object.class.base_class.name, - :resource_id => object.id, - :name => key, - :value => "bar") + :resource_type => object.class.base_class.name, + :resource_id => object.id, + :name => key, + :value => "bar") expect(object.reload.miq_custom_keys).to eq([key]) key2 = "foobar" FactoryBot.create(:miq_custom_attribute, - :resource_type => object.class.base_class.name, - :resource_id => object.id, - :name => key2, - :value => "bar") + :resource_type => object.class.base_class.name, + :resource_id => object.id, + :name => key2, + :value => "bar") expect(object.reload.miq_custom_keys).to match_array([key, key2]) end end @@ -102,7 +102,7 @@ def self.name; "TestClass"; end it "key with a letter followed by a number" do test_class.add_custom_attribute("fun4all") - expect(test_class.new).to respond_to(:"fun4all") + expect(test_class.new).to respond_to(:fun4all) expect(test_class.new).to respond_to(:"fun4all=") end @@ -139,14 +139,16 @@ def self.name; "TestClass"; end :resource_type => object.class.base_class.name, :resource_id => object.id, :source => source, - :name => key).first).to be_nil + :name => key + ).first).to be_nil object.miq_custom_set(key, "") expect(CustomAttribute.where( :resource_type => object.class.base_class.name, :resource_id => object.id, :source => source, - :name => key).first).to be_nil + :name => key + ).first).to be_nil object.miq_custom_set(key, value) expect(CustomAttribute.where( @@ -154,14 +156,16 @@ def self.name; "TestClass"; end :resource_id => object.id, :source => source, :name => key, - :value => value).first).not_to be_nil + :value => value + ).first).not_to be_nil object.miq_custom_set(key, "") expect(CustomAttribute.where( :resource_type => object.class.base_class.name, :resource_id => object.id, :source => source, - :name => key).first).to be_nil + :name => key + ).first).to be_nil end end @@ -175,10 +179,10 @@ def self.name; "TestClass"; end expect(object.miq_custom_get(key)).to be_nil FactoryBot.create(:miq_custom_attribute, - :resource_type => object.class.base_class.name, - :resource_id => object.id, - :name => key, - :value => value) + :resource_type => object.class.base_class.name, + :resource_id => object.id, + :name => key, + :value => value) expect(object.reload.miq_custom_get(key)).to eq(value) end diff --git a/spec/models/mixins/event_mixin_spec.rb b/spec/models/mixins/event_mixin_spec.rb index 95ac5b2bf50..9b29584877f 100644 --- a/spec/models/mixins/event_mixin_spec.rb +++ b/spec/models/mixins/event_mixin_spec.rb @@ -47,16 +47,16 @@ def event_where_clause(assoc) # 3) Host#event_where_clause does an OR: host_id OR dest_host_id... right now we only do host_id in ems_event_filter # 4) VmOrTemplate#event_where_clause does an OR: vm_or_template_id OR dest_vm_or_template_id, we only do vm_or_template_id in ems_event_filter %w[ - AvailabilityZone availability_zone_id - EmsCluster ems_cluster_id + AvailabilityZone availability_zone_id + EmsCluster ems_cluster_id ExtManagementSystem ems_id - Host host_id - PhysicalChassis physical_chassis_id - PhysicalServer physical_server_id - PhysicalStorage physical_storage_id - PhysicalSwitch physical_switch_id - VmOrTemplate vm_or_template_id - Vm vm_or_template_id + Host host_id + PhysicalChassis physical_chassis_id + PhysicalServer physical_server_id + PhysicalStorage physical_storage_id + PhysicalSwitch physical_switch_id + VmOrTemplate vm_or_template_id + Vm vm_or_template_id ].each_slice(2) do |klass, column| it "#{klass} uses #{column} and target_id and target_type" do obj = FactoryBot.create(klass.tableize.singularize) diff --git a/spec/models/mixins/external_url_spec.rb b/spec/models/mixins/external_url_spec.rb index c6eb9ee36a0..479bd13ff77 100644 --- a/spec/models/mixins/external_url_spec.rb +++ b/spec/models/mixins/external_url_spec.rb @@ -1,7 +1,7 @@ RSpec.describe ExternalUrlMixin do let(:test_class) do Class.new(ActiveRecord::Base) do - def self.name; 'TestClass'; end + def self.name = 'TestClass' self.table_name = 'vms' include ExternalUrlMixin end @@ -22,7 +22,7 @@ def self.name; 'TestClass'; end :resource_type => 'TestClass', :resource_id => test_instance.id ).first.attributes).to include( - 'url' => 'https://www.other.example.com', + 'url' => 'https://www.other.example.com' ) end @@ -36,7 +36,7 @@ def self.name; 'TestClass'; end :resource_type => 'TestClass', :resource_id => test_instance.id ).first.attributes).to include( - 'url' => 'https://www.example.com', + 'url' => 'https://www.example.com' ) end end diff --git a/spec/models/mixins/inter_region_api_method_relay_spec.rb b/spec/models/mixins/inter_region_api_method_relay_spec.rb index 34843b2185a..290c32c69e6 100644 --- a/spec/models/mixins/inter_region_api_method_relay_spec.rb +++ b/spec/models/mixins/inter_region_api_method_relay_spec.rb @@ -200,9 +200,9 @@ def expect_api_call(expected_action, expected_args = nil) end it "raises if the server doesn't have an ip address" do - expect { + expect do described_class.api_client_connection_for_region(region_number) - }.to raise_error("Failed to establish API connection to region #{region_number}") + end.to raise_error("Failed to establish API connection to region #{region_number}") end end @@ -231,9 +231,9 @@ def expect_api_call(expected_action, expected_args = nil) it "raises when the api result is a failure" do expect(api_collection).to receive(action).and_return(api_failure_result) - expect { + expect do described_class.exec_api_call(region, collection_name, action) - }.to raise_error(described_class::InterRegionApiMethodRelayError) + end.to raise_error(described_class::InterRegionApiMethodRelayError) end it "accepts Hash object as api result" do @@ -296,16 +296,16 @@ def expect_api_call(expected_action, expected_args = nil) expect(api_config).to receive(:klass).with(collection_name).and_return(klass) expect(klass).to receive(:find_by).twice.with(:id => 10).and_return(nil, nil) - expect { + expect do described_class.instance_for_resource(resource) - }.to raise_error(InterRegionApiMethodRelay::InterRegionApiMethodRelayError) + end.to raise_error(InterRegionApiMethodRelay::InterRegionApiMethodRelayError) end end context "with an invalid class definition" do describe "#api_relay_method" do it "raises a NotImplementedError if the class does not have an api collection" do - expect { + expect do Class.new do extend InterRegionApiMethodRelay @@ -318,13 +318,13 @@ def test_instance_method end api_relay_method :test_instance_method end - }.to raise_error(NotImplementedError) + end.to raise_error(NotImplementedError) end end describe "#api_relay_class_method" do it "raises a NotImplementedError if the class does not have an api collection" do - expect { + expect do Class.new do extend InterRegionApiMethodRelay @@ -338,13 +338,13 @@ def self.test_instance_method api_relay_class_method :test_instance_method do end end - }.to raise_error(NotImplementedError) + end.to raise_error(NotImplementedError) end it "raises a ArgumentError if no block is defined" do allow(Api::CollectionConfig).to receive(:new).and_return(api_config) allow(api_config).to receive(:name_for_klass).and_return(collection_name) - expect { + expect do Class.new do extend InterRegionApiMethodRelay @@ -357,7 +357,7 @@ def self.test_instance_method end api_relay_class_method :test_instance_method end - }.to raise_error(ArgumentError) + end.to raise_error(ArgumentError) end end end diff --git a/spec/models/mixins/miq_provision_mixin_spec.rb b/spec/models/mixins/miq_provision_mixin_spec.rb index 316c1645b07..894c8e666ad 100644 --- a/spec/models/mixins/miq_provision_mixin_spec.rb +++ b/spec/models/mixins/miq_provision_mixin_spec.rb @@ -51,7 +51,7 @@ def get_option(name) describe ".current_group" do before do owner.update(:current_group => my_group, - :miq_groups => [my_group, my_alt_group]) + :miq_groups => [my_group, my_alt_group]) end let(:my_group) { FactoryBot.create(:miq_group) } let(:my_alt_group) { FactoryBot.create(:miq_group, :description => 'yay') } diff --git a/spec/models/mixins/miq_request_mixin_spec.rb b/spec/models/mixins/miq_request_mixin_spec.rb index 35e894f49de..a3b81d4de78 100644 --- a/spec/models/mixins/miq_request_mixin_spec.rb +++ b/spec/models/mixins/miq_request_mixin_spec.rb @@ -2,6 +2,7 @@ let(:test_class) do Class.new do attr_accessor :options, :userid + include MiqRequestMixin def initialize diff --git a/spec/models/mixins/new_with_type_sti_mixin_spec.rb b/spec/models/mixins/new_with_type_sti_mixin_spec.rb index 530021f2ddc..2b8efb91b0d 100644 --- a/spec/models/mixins/new_with_type_sti_mixin_spec.rb +++ b/spec/models/mixins/new_with_type_sti_mixin_spec.rb @@ -12,7 +12,7 @@ expect(Host.new(:type => "ManageIQ::Providers::Redhat::InfraManager::Host").class).to eq(ManageIQ::Providers::Redhat::InfraManager::Host) expect(Host.new(:type => "ManageIQ::Providers::Vmware::InfraManager::Host").class).to eq(ManageIQ::Providers::Vmware::InfraManager::Host) expect(Host.new(:type => "ManageIQ::Providers::Vmware::InfraManager::HostEsx").class).to eq(ManageIQ::Providers::Vmware::InfraManager::HostEsx) - expect(ManageIQ::Providers::Vmware::InfraManager::Host.new(:type => "ManageIQ::Providers::Vmware::InfraManager::HostEsx").class).to eq(ManageIQ::Providers::Vmware::InfraManager::HostEsx) + expect(ManageIQ::Providers::Vmware::InfraManager::Host.new(:type => "ManageIQ::Providers::Vmware::InfraManager::HostEsx").class).to eq(ManageIQ::Providers::Vmware::InfraManager::HostEsx) expect(Host.new("type" => "Host").class).to eq(Host) expect(Host.new("type" => "ManageIQ::Providers::Redhat::InfraManager::Host").class).to eq(ManageIQ::Providers::Redhat::InfraManager::Host) diff --git a/spec/models/mixins/process_tasks_mixin_spec.rb b/spec/models/mixins/process_tasks_mixin_spec.rb index 4b6e9c0ff33..a8b3d16fe22 100644 --- a/spec/models/mixins/process_tasks_mixin_spec.rb +++ b/spec/models/mixins/process_tasks_mixin_spec.rb @@ -94,7 +94,7 @@ def test_method end context "when the server has an ip address" do - let(:api_connection) { double("ManageIQ::API::Client connection") } + let(:api_connection) { double("ManageIQ::API::Client connection") } before do server.ipaddress = "192.0.2.1" diff --git a/spec/models/mixins/relationship_mixin_spec.rb b/spec/models/mixins/relationship_mixin_spec.rb index 4b4fb057776..ef333cc0ab1 100644 --- a/spec/models/mixins/relationship_mixin_spec.rb +++ b/spec/models/mixins/relationship_mixin_spec.rb @@ -51,7 +51,7 @@ end end - # NOTE for understanding the next 4 contexts: + # NOTE: for understanding the next 4 contexts: # Objects (VMs, Hosts, etc) have associated tree nodes entries in the # relationships table which are linked. If an object must reside in # multiple parts of the tree via having multiple parents, it will need more @@ -66,8 +66,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 1, [@parent], [] - ) + child, 1, [@parent], []) end it "with a root object will link a new tree node for the parent to the existing tree node for the child" do @@ -76,8 +75,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 1, [@parent], [vms[1], vms[2]] - ) + child, 1, [@parent], [vms[1], vms[2]]) end it "with an inner object will link a new tree node for the parent to a second new tree node for the child" do @@ -86,8 +84,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 2, [vms[0], @parent], [vms[3], vms[4]] - ) + child, 2, [vms[0], @parent], [vms[3], vms[4]]) end it "with a leaf object will link a new tree node for the parent to a second new tree node for the child" do @@ -96,8 +93,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 2, [vms[1], @parent], [] - ) + child, 2, [vms[1], @parent], []) end end @@ -110,8 +106,7 @@ assert_parent_child_structure(test_rel_type, parent, 1, [], [@child], - @child, 1, [parent], [] - ) + @child, 1, [parent], []) end it "with a root object will link the existing tree node for the parent to a new tree node for the child" do @@ -120,8 +115,7 @@ assert_parent_child_structure(test_rel_type, parent, 1, [], [vms[1], vms[2], @child], - @child, 1, [parent], [] - ) + @child, 1, [parent], []) end it "with an inner object will link the existing tree node for the parent to a new tree node for the child" do @@ -130,8 +124,7 @@ assert_parent_child_structure(test_rel_type, parent, 1, [vms[0]], [vms[3], vms[4], @child], - @child, 1, [parent], [] - ) + @child, 1, [parent], []) end it "with a leaf object will link the existing tree node for the parent to a new tree node for the child" do @@ -140,8 +133,7 @@ assert_parent_child_structure(test_rel_type, parent, 1, [vms[1]], [@child], - @child, 1, [parent], [] - ) + @child, 1, [parent], []) end end @@ -154,8 +146,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 1, [@parent], [] - ) + child, 1, [@parent], []) end it "on a root object will link a new tree node for the parent to the existing tree node for the child and be the only parent for the child" do @@ -164,8 +155,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 1, [@parent], [vms[1], vms[2]] - ) + child, 1, [@parent], [vms[1], vms[2]]) end it "on an inner object will link a new tree node for the parent to the existing tree node for the child and be the only parent for the child" do @@ -174,8 +164,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 1, [@parent], [vms[3], vms[4]] - ) + child, 1, [@parent], [vms[3], vms[4]]) end it "on a leaf object will link a new tree node for the parent to the existing tree node for the child and be the only parent for the child" do @@ -184,8 +173,7 @@ assert_parent_child_structure(test_rel_type, @parent, 1, [], [child], - child, 1, [@parent], [] - ) + child, 1, [@parent], []) end end @@ -197,8 +185,7 @@ assert_parent_child_structure(test_rel_type, parent, 1, [vms[0]], [child, vms[3], vms[4]], - child, 1, [parent], [vms[5], vms[6], vms[7]] - ) + child, 1, [parent], [vms[5], vms[6], vms[7]]) end end @@ -363,7 +350,7 @@ def self.name it "#parent=" do service6 = ancestry_class.create service6.update(:parent => service1) - expect(service2.with_relationship_type("custom") { service2.parent=(service6) }).to eq(service6) + expect(service2.with_relationship_type("custom") { service2.parent = (service6) }).to eq(service6) service2.save! expect(service6.with_relationship_type("custom") { service6.child_ids }).to eq([service2.id]) end @@ -1052,8 +1039,8 @@ def build_relationship_tree(tree, rel_type = test_rel_type, base_factory = :vm_v # can map to the resource to return all the resources created rels = Hash.new do |hash, key| hash[key] = FactoryBot.create(:relationship, - :resource => FactoryBot.create(base_factory), - :relationship => rel_type) + :resource => FactoryBot.create(base_factory), + :relationship => rel_type) end recurse_relationship_tree(tree) do |parent, child| @@ -1076,17 +1063,17 @@ def assert_parent_child_structure(rel_type, parent, p_rels_count, p_parents, p_c parent.with_relationship_type(rel_type) do expect(parent.relationships.length).to eq(p_rels_count) expect(parent.parents.length).to eq(p_parents.length) - expect(parent.parents).to match_array(p_parents) + expect(parent.parents).to match_array(p_parents) expect(parent.children.length).to eq(p_children.length) - expect(parent.children).to match_array(p_children) + expect(parent.children).to match_array(p_children) end child.with_relationship_type(rel_type) do expect(child.relationships.length).to eq(c_rels_count) expect(child.parents.length).to eq(c_parents.length) - expect(child.parents).to match_array(c_parents) + expect(child.parents).to match_array(c_parents) expect(child.children.length).to eq(c_children.length) - expect(child.children).to match_array(c_children) + expect(child.children).to match_array(c_children) end end end diff --git a/spec/models/mixins/supports_feature_mixin_spec.rb b/spec/models/mixins/supports_feature_mixin_spec.rb index 723a8a41af8..3c86f182342 100644 --- a/spec/models/mixins/supports_feature_mixin_spec.rb +++ b/spec/models/mixins/supports_feature_mixin_spec.rb @@ -231,7 +231,7 @@ def initialize(values = {}) end it "gives reason when implicit dynamic attrs" do - test_class.supports(:implicit_feature) { "dynamically unsupported" unless attr1 } + test_class.supports(:implicit_feature) { "dynamically unsupported" unless attr1 } test_inst = test_class.new(:attr1 => false) expect(test_inst.unsupported_reason(:implicit_feature)).to eq("dynamically unsupported") diff --git a/spec/models/notification_spec.rb b/spec/models/notification_spec.rb index 0560bca4985..31dae4587b1 100644 --- a/spec/models/notification_spec.rb +++ b/spec/models/notification_spec.rb @@ -111,15 +111,14 @@ :created_at => notification.created_at, :text => notification.notification_type.message, :bindings => a_hash_including(:initiator => a_hash_including(:text => user.name), - :extra => a_hash_including(:text => 'information') - ) + :extra => a_hash_including(:text => 'information')) ) end context 'link_to is set' do let(:notification) do FactoryBot.create(:notification, :initiator => user, - :notification_type => FactoryBot.create(:notification_type, :link_to => 'initiator')) + :notification_type => FactoryBot.create(:notification_type, :link_to => 'initiator')) end it 'contains the link to the initiator' do diff --git a/spec/models/openscap_result_spec.rb b/spec/models/openscap_result_spec.rb index 59c2a408579..af487d1bb64 100644 --- a/spec/models/openscap_result_spec.rb +++ b/spec/models/openscap_result_spec.rb @@ -18,7 +18,8 @@ it "parses results" do rule_results = [ [1, double(:result => 'result_1')], - [2, double(:result => 'result_2')]] + [2, double(:result => 'result_2')] + ] benchmark_items = {1 => double(:severity => 'Bad', :idents => [], :title => "Bad"), 2 => double(:severity => 'Not That Bad', :idents => [], :title => "Not That Bad")} @@ -29,13 +30,15 @@ :name => '1', :result => "result_1", :title => "Bad", - :severity => "Bad") + :severity => "Bad" + ) expect(openscap_result.openscap_rule_results[1]).to have_attributes( :openscap_result_id => 17, :name => '2', :result => "result_2", :title => "Not That Bad", - :severity => "Not That Bad") + :severity => "Not That Bad" + ) end end end diff --git a/spec/models/orchestration_stack/retirement_management_spec.rb b/spec/models/orchestration_stack/retirement_management_spec.rb index 47641e5d064..7b328829afd 100644 --- a/spec/models/orchestration_stack/retirement_management_spec.rb +++ b/spec/models/orchestration_stack/retirement_management_spec.rb @@ -132,7 +132,7 @@ @stack.mark_retired @stack.reload expect(@stack.retired).to be_truthy - expect(@stack.retires_on).to be_between(Time.zone.now - 1.hour, Time.zone.now + 1.second) + expect(@stack.retires_on).to be_between(1.hour.ago, 1.second.from_now) expect(@stack.retirement_state).to eq("retired") end diff --git a/spec/models/orchestration_stack_retire_task_spec.rb b/spec/models/orchestration_stack_retire_task_spec.rb index e70a066809b..751b18556d1 100644 --- a/spec/models/orchestration_stack_retire_task_spec.rb +++ b/spec/models/orchestration_stack_retire_task_spec.rb @@ -2,7 +2,7 @@ let(:user) { FactoryBot.create(:user_with_group) } let(:orchestration_stack) { FactoryBot.create(:orchestration_stack) } let(:miq_request) { FactoryBot.create(:orchestration_stack_retire_request, :requester => user, :source => orchestration_stack) } - let(:orchestration_stack_retire_task) { FactoryBot.create(:orchestration_stack_retire_task, :source => orchestration_stack, :miq_request => miq_request, :options => {:src_ids => [orchestration_stack.id] }) } + let(:orchestration_stack_retire_task) { FactoryBot.create(:orchestration_stack_retire_task, :source => orchestration_stack, :miq_request => miq_request, :options => {:src_ids => [orchestration_stack.id]}) } let(:approver) { FactoryBot.create(:user_miq_request_approver) } it "should initialize properly" do diff --git a/spec/models/orchestration_template_spec.rb b/spec/models/orchestration_template_spec.rb index 3d78434c16b..f9b0aa09f89 100644 --- a/spec/models/orchestration_template_spec.rb +++ b/spec/models/orchestration_template_spec.rb @@ -116,8 +116,8 @@ before do allow(OrchestrationTemplate).to receive_messages(:eligible_manager_types => - [ManageIQ::Providers::Amazon::CloudManager, - ManageIQ::Providers::Openstack::CloudManager]) + [ManageIQ::Providers::Amazon::CloudManager, + ManageIQ::Providers::Openstack::CloudManager]) @template = FactoryBot.create(:orchestration_template) @aws = FactoryBot.create(:ems_amazon, :tenant => other_tenant) @openstack = FactoryBot.create(:ems_openstack, :tenant => tenant) diff --git a/spec/models/partition_alignment_spec.rb b/spec/models/partition_alignment_spec.rb index ff6b04716f8..56a03870261 100644 --- a/spec/models/partition_alignment_spec.rb +++ b/spec/models/partition_alignment_spec.rb @@ -4,85 +4,73 @@ aligned = 64.kilobytes not_aligned = 1 - @vm1 = FactoryBot.create(:vm_vmware, :name => "VM 1 Aligned", :hardware => FactoryBot.create(:hardware)) + @vm1 = FactoryBot.create(:vm_vmware, :name => "VM 1 Aligned", :hardware => FactoryBot.create(:hardware)) FactoryBot.create(:disk, - :device_type => "floppy", - :hardware_id => @vm1.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)] - ) + :device_type => "floppy", + :hardware_id => @vm1.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)]) FactoryBot.create(:disk, - :device_type => "cdrom-raw", - :hardware_id => @vm1.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)] - ) + :device_type => "cdrom-raw", + :hardware_id => @vm1.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)]) FactoryBot.create(:disk, - :device_type => "disk", - :disk_type => "rdm-raw", - :hardware_id => @vm1.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)] - ) + :device_type => "disk", + :disk_type => "rdm-raw", + :hardware_id => @vm1.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)]) FactoryBot.create(:disk, - :device_type => "disk", - :hardware_id => @vm1.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => aligned)] - ) + :device_type => "disk", + :hardware_id => @vm1.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => aligned)]) FactoryBot.create(:disk, - :device_type => "disk", - :hardware_id => @vm1.hardware.id, - :partitions => [ - FactoryBot.create(:partition, :start_address => aligned), - FactoryBot.create(:partition, :start_address => aligned) - ] - ) + :device_type => "disk", + :hardware_id => @vm1.hardware.id, + :partitions => [ + FactoryBot.create(:partition, :start_address => aligned), + FactoryBot.create(:partition, :start_address => aligned) + ]) @vm2 = FactoryBot.create(:vm_vmware, :name => "VM 2 Not Aligned", :hardware => FactoryBot.create(:hardware)) FactoryBot.create(:disk, - :device_type => "floppy", - :hardware_id => @vm2.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)] - ) + :device_type => "floppy", + :hardware_id => @vm2.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)]) FactoryBot.create(:disk, - :device_type => "cdrom-raw", - :hardware_id => @vm2.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)] - ) + :device_type => "cdrom-raw", + :hardware_id => @vm2.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)]) FactoryBot.create(:disk, - :device_type => "disk", - :disk_type => "rdm-raw", - :hardware_id => @vm2.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)] - ) + :device_type => "disk", + :disk_type => "rdm-raw", + :hardware_id => @vm2.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)]) FactoryBot.create(:disk, - :device_type => "disk", - :hardware_id => @vm2.hardware.id, - :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)] - ) + :device_type => "disk", + :hardware_id => @vm2.hardware.id, + :partitions => [FactoryBot.create(:partition, :start_address => not_aligned)]) FactoryBot.create(:disk, - :device_type => "disk", - :hardware_id => @vm2.hardware.id, - :partitions => [ - FactoryBot.create(:partition, :start_address => aligned), - FactoryBot.create(:partition, :start_address => not_aligned) - ] - ) + :device_type => "disk", + :hardware_id => @vm2.hardware.id, + :partitions => [ + FactoryBot.create(:partition, :start_address => aligned), + FactoryBot.create(:partition, :start_address => not_aligned) + ]) @vm3 = FactoryBot.create(:vm_vmware, :name => "VM 3 Unknown", :hardware => FactoryBot.create(:hardware)) @vm4 = FactoryBot.create(:vm_vmware, :name => "VM 4 Unknown", :hardware => FactoryBot.create(:hardware)) FactoryBot.create(:disk, - :device_type => "disk", - :hardware_id => @vm4.hardware.id - ) + :device_type => "disk", + :hardware_id => @vm4.hardware.id) @vm5 = FactoryBot.create(:vm_vmware, :name => "VM 4 Unknown", :hardware => FactoryBot.create(:hardware)) FactoryBot.create(:disk, - :device_type => "disk", - :hardware_id => @vm5.hardware.id, - :partitions => [ - FactoryBot.create(:partition, :start_address => aligned), - FactoryBot.create(:partition) - ] - ) + :device_type => "disk", + :hardware_id => @vm5.hardware.id, + :partitions => [ + FactoryBot.create(:partition, :start_address => aligned), + FactoryBot.create(:partition) + ]) end it "should return True for Vm alignment method" do diff --git a/spec/models/pglogical_subscription_spec.rb b/spec/models/pglogical_subscription_spec.rb index 6ac04cd6b1c..ac62922f06d 100644 --- a/spec/models/pglogical_subscription_spec.rb +++ b/spec/models/pglogical_subscription_spec.rb @@ -247,7 +247,7 @@ :user => "root", :password => "1234" } - expect(pglogical).to receive(:create_subscription).with("region_2_subscription", dsn, ['miq'], create_slot: false).and_return(double(:check => nil)) + expect(pglogical).to receive(:create_subscription).with("region_2_subscription", dsn, ['miq'], :create_slot => false).and_return(double(:check => nil)) sub = described_class.new(:host => "test-2.example.com", :user => "root", :password => "1234") @@ -272,7 +272,7 @@ :host => "other-host.example.com", :dbname => sub.dbname, :user => sub.user, - :password => "p=as\' s\'" + :password => "p=as' s'" } expect(pglogical).to receive(:set_subscription_conninfo).with(sub.id, new_dsn) @@ -318,14 +318,14 @@ :user => "root", :password => "1234" } - expect(pglogical).to receive(:create_subscription).with("region_2_subscription", dsn2, ['miq'], create_slot: false).and_return(double(:check => nil)) + expect(pglogical).to receive(:create_subscription).with("region_2_subscription", dsn2, ['miq'], :create_slot => false).and_return(double(:check => nil)) dsn3 = { :host => "test-3.example.com", :user => "miq", :password => "1234" } - expect(pglogical).to receive(:create_subscription).with("region_3_subscription", dsn3, ['miq'], create_slot: false).and_return(double(:check => nil)) + expect(pglogical).to receive(:create_subscription).with("region_3_subscription", dsn3, ['miq'], :create_slot => false).and_return(double(:check => nil)) to_save = [] to_save << described_class.new(dsn2) @@ -350,7 +350,7 @@ :user => "miq", :password => "1234" } - expect(pglogical).to receive(:create_subscription).ordered.with("region_3_subscription", dsn3, ['miq'], create_slot: false).and_return(double(:check => nil)) + expect(pglogical).to receive(:create_subscription).ordered.with("region_3_subscription", dsn3, ['miq'], :create_slot => false).and_return(double(:check => nil)) expect(pglogical).to receive(:create_subscription).ordered.and_raise("Error two") to_save = [] @@ -363,7 +363,7 @@ end expect { described_class.save_all!(to_save) }.to raise_error("Failed to save subscription " \ - "to test-2.example.com: Error one\nFailed to save subscription to test-4.example.com: Error two") + "to test-2.example.com: Error one\nFailed to save subscription to test-4.example.com: Error two") end end diff --git a/spec/models/physical_server_firmware_update_request_spec.rb b/spec/models/physical_server_firmware_update_request_spec.rb index 20e756fecf5..68c39ce9bca 100644 --- a/spec/models/physical_server_firmware_update_request_spec.rb +++ b/spec/models/physical_server_firmware_update_request_spec.rb @@ -46,7 +46,7 @@ end describe '.affected_physical_servers' do - let(:attrs) { { 'options' => {:src_ids => src_ids} } } + let(:attrs) { {'options' => {:src_ids => src_ids}} } let(:server1) { FactoryBot.create(:physical_server, :ems_id => 1) } let(:server2) { FactoryBot.create(:physical_server, :ems_id => 2) } let(:server3) { FactoryBot.create(:physical_server, :ems_id => 2) } diff --git a/spec/models/physical_server_firmware_update_task/state_machine_spec.rb b/spec/models/physical_server_firmware_update_task/state_machine_spec.rb index c0e942d01e9..af97b5a6f61 100644 --- a/spec/models/physical_server_firmware_update_task/state_machine_spec.rb +++ b/spec/models/physical_server_firmware_update_task/state_machine_spec.rb @@ -2,7 +2,7 @@ let(:server) { FactoryBot.create(:physical_server) } let(:src_ids) { [server.id] } - subject { described_class.new(:options => { :src_ids => src_ids }) } + subject { described_class.new(:options => {:src_ids => src_ids}) } describe '#run_firmware_update' do context 'when ok' do diff --git a/spec/models/physical_server_spec.rb b/spec/models/physical_server_spec.rb index 3bfa8054999..413294ffd80 100644 --- a/spec/models/physical_server_spec.rb +++ b/spec/models/physical_server_spec.rb @@ -1,5 +1,5 @@ RSpec.describe PhysicalServer do - let(:attrs) { { :manufacturer => 'manu', :model => 'model' } } + let(:attrs) { {:manufacturer => 'manu', :model => 'model'} } let!(:binary1) { FactoryBot.create(:firmware_binary) } let!(:binary2) { FactoryBot.create(:firmware_binary) } let!(:target) { FactoryBot.create(:firmware_target, **attrs, :firmware_binaries => [binary1]) } diff --git a/spec/models/provider_spec.rb b/spec/models/provider_spec.rb index d751f45c4f2..6838b9b64bd 100644 --- a/spec/models/provider_spec.rb +++ b/spec/models/provider_spec.rb @@ -70,7 +70,7 @@ provider.destroy_queue expect(MiqQueue.find_by(:instance_id => provider.id)).to have_attributes( 'method_name' => 'destroy', - 'class_name' => provider.class.name, + 'class_name' => provider.class.name ) end end @@ -82,7 +82,7 @@ task = MiqTask.create( :name => "Destroying #{self.class.name} with id: #{provider.id}", :state => MiqTask::STATE_QUEUED, - :status => MiqTask::STATUS_OK, + :status => MiqTask::STATUS_OK ) provider.destroy(task.id) task.reload diff --git a/spec/models/provider_tag_mapping_spec.rb b/spec/models/provider_tag_mapping_spec.rb index 97de97d3daa..6f34382a5d8 100644 --- a/spec/models/provider_tag_mapping_spec.rb +++ b/spec/models/provider_tag_mapping_spec.rb @@ -113,7 +113,7 @@ def map_to_tags(mapper, model_name, labels_kv) expect(tags).to be_empty expect(tags2).to contain_exactly(tag2) - # Note: this test doesn't cover creation of the category, eg. you can't have + # NOTE: this test doesn't cover creation of the category, eg. you can't have # /managed/kubernetes:name vs /managed/kubernetes:naME. end @@ -129,8 +129,8 @@ def map_to_tags(mapper, model_name, labels_kv) it "handles values that differ only past 50th character" do tags = map_to_tags(new_mapper, 'ContainerNode', 'name' => 'x' * 50) - tags2 = map_to_tags(new_mapper, 'ContainerNode', 'name' => 'x' * 50 + 'y') - tags3 = map_to_tags(new_mapper, 'ContainerNode', 'name' => 'x' * 50 + 'z') + tags2 = map_to_tags(new_mapper, 'ContainerNode', 'name' => ('x' * 50) + 'y') + tags3 = map_to_tags(new_mapper, 'ContainerNode', 'name' => ('x' * 50) + 'z') # TODO: They get mapped to the same tag, is this desired? # TODO: What do we want the description to be? expect(tags2).to eq(tags) diff --git a/spec/models/pxe_menu_ipxe_spec.rb b/spec/models/pxe_menu_ipxe_spec.rb index c4914c68874..b7fb035cc34 100644 --- a/spec/models/pxe_menu_ipxe_spec.rb +++ b/spec/models/pxe_menu_ipxe_spec.rb @@ -1,79 +1,79 @@ RSpec.describe PxeMenuIpxe do before do - @contents = <<-PXEMENU -#!ipxe -menu ManageIQ iPXE Boot Menu -item --gap -- -----Live Images: -item ud1204 Ubuntu 12.04 Desktop x64 -#item ud1204_commented_in_menu Ubuntu 12.04 Desktop x64 -item ud1204_commented_in_image Ubuntu 12.04 Desktop x64 -item --gap -item --gap -- -----MIQ Desktop Auto-Install: -item rhel62dsk RHEL6.2 Desktop AutoInstall - Be Careful -item --gap -item --gap -- -----MIQ Server Auto-Install: -item rhel62host RHEL6.2 Host -item esxi5 VMware ESXi 5.0.0----- -item --gap -item --gap -- -----Other Stuff: -item reboot Reboot the Machine -item ipxedemo iPXE Demo -choose os && goto ${os} + @contents = <<~PXEMENU + #!ipxe + menu ManageIQ iPXE Boot Menu + item --gap -- -----Live Images: + item ud1204 Ubuntu 12.04 Desktop x64 + #item ud1204_commented_in_menu Ubuntu 12.04 Desktop x64 + item ud1204_commented_in_image Ubuntu 12.04 Desktop x64 + item --gap + item --gap -- -----MIQ Desktop Auto-Install: + item rhel62dsk RHEL6.2 Desktop AutoInstall - Be Careful + item --gap + item --gap -- -----MIQ Server Auto-Install: + item rhel62host RHEL6.2 Host + item esxi5 VMware ESXi 5.0.0----- + item --gap + item --gap -- -----Other Stuff: + item reboot Reboot the Machine + item ipxedemo iPXE Demo + choose os && goto ${os} -########## MIQ Live Images ########## -:ud1204 -kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -boot + ########## MIQ Live Images ########## + :ud1204 + kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + boot -:ud1204_not_in_menu -kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -boot + :ud1204_not_in_menu + kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + boot -:ud1204_commented_in_menu -kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -boot + :ud1204_commented_in_menu + kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + boot -:ud1204_commented_in_image -#kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -#initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -#boot + :ud1204_commented_in_image + #kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + #initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + #boot -########## MIQ Desktop Images ########## -:rhel62dsk -kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=00:50:56:91:79:d5 -initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img -boot + ########## MIQ Desktop Images ########## + :rhel62dsk + kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=00:50:56:91:79:d5 + initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img + boot -########## MIQ Server Images ########## -:rhel62host -kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-host.ks.cfg -initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img -boot + ########## MIQ Server Images ########## + :rhel62host + kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-host.ks.cfg + initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img + boot -########## Other Stuff ########## -:reboot -reboot + ########## Other Stuff ########## + :reboot + reboot -:ipxedemo -chain http://boot.ipxe.org/demo/boot.php -PXEMENU + :ipxedemo + chain http://boot.ipxe.org/demo/boot.php + PXEMENU - @contents2 = <<-PXEMENU -#!ipxe -menu ManageIQ iPXE Boot Menu -item --gap -- -----Live Images: -item rhel62host RHEL6.2 Host -choose os && goto ${os} + @contents2 = <<~PXEMENU + #!ipxe + menu ManageIQ iPXE Boot Menu + item --gap -- -----Live Images: + item rhel62host RHEL6.2 Host + choose os && goto ${os} -########## MIQ Server Images ########## -:rhel62host -kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-host.ks.cfg -initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img -boot -PXEMENU + ########## MIQ Server Images ########## + :rhel62host + kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-host.ks.cfg + initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img + boot + PXEMENU end it ".parse_contents" do diff --git a/spec/models/pxe_menu_pxelinux_spec.rb b/spec/models/pxe_menu_pxelinux_spec.rb index 596be6c4342..9d6d795b7ee 100644 --- a/spec/models/pxe_menu_pxelinux_spec.rb +++ b/spec/models/pxe_menu_pxelinux_spec.rb @@ -1,79 +1,79 @@ RSpec.describe PxeMenuPxelinux do before do - @contents = <<-PXEMENU -default vesamenu.c32 -Menu Title ManageIQ TFTP Boot Menu - -label iPXE - menu default - menu label iPXE Boot - kernel ipxe/undionly.0 - -label VMware ESXi 4.1-260247 - menu label VMware ESXi 4.1-260247 - kernel VMware-VMvisor-Installer-4.1.0-260247/mboot.c32 - append VMware-VMvisor-Installer-4.1.0-260247/vmkboot.gz ks=http://192.168.252.60/ks.cfg --- VMware-VMvisor-Installer-4.1.0-260247/vmkernel.gz --- VMware-VMvisor-Installer-4.1.0-260247/sys.vgz --- VMware-VMvisor-Installer-4.1.0-260247/cim.vgz --- VMware-VMvisor-Installer-4.1.0-260247/ienviron.vgz --- VMware-VMvisor-Installer-4.1.0-260247/install.vgz - -label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - kernel ubuntu-10.10-desktop-i386/vmlinuz - append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 initrd=ubuntu-10.10-desktop-i386/initrd.lz -- quiet - -label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT - menu label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT - kernel ubuntu-10.10-desktop-amd64/vmlinuz - append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-amd64 initrd=ubuntu-10.10-desktop-amd64/initrd.lz -- quiet - -label Ubuntu-11.04-Server-amd64 - menu label Ubuntu-11.04-Server-amd64 - kernel ubuntu-11.04-server-amd64/linux - append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-11.04-server-amd64 initrd=ubuntu-11.04-server-amd64/initrd.gz -- quiet - -label RHEL6 - menu label RHEL6 - kernel rhel6/vmlinuz - append initrd=rhel6/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/01-78-2b-cb-00-f6-6c.ks.cfg ksdevice=78:2b:cb:00:f6:6c - -label RHEL6.2-Desktop - menu label RHEL6.2-Desktop - kernel rhel6.2-desktop/vmlinuz - append initrd=rhel6.2-desktop/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=78:2b:cb:00:f6:6c - -label VMware ESXi 5.0.0-381646 - menu label VMware ESXi 5.0.0-381646 - kernel VMware-VMvisor-Installer-5.0.0-381646.x86_64/mboot.c32 - append -c VMware-VMvisor-Installer-5.0.0-381646.x86_64/boot.cfg - -label VMware ESXi 5.0.0-469512 - menu label VMware ESXi 5.0.0-469512 - kernel VMware-VMvisor-Installer-5.0.0-469512.x86_64/mboot.c32 - append -c VMware-VMvisor-Installer-5.0.0-469512.x86_64/boot.cfg - -label VMware ESXi 5.0.0-504890 - menu label VMware ESXi 5.0.0-504890 - kernel VMware-VMvisor-Installer-5.0.0-504890.x86_64/mboot.c32 - append -c VMware-VMvisor-Installer-5.0.0-504890.x86_64/boot.cfg - -label Local_drive - localboot 0 - menu label Local Drive - -prompt 0 -timeout 600 -PXEMENU - - @contents2 = <<-PXEMENU -default vesamenu.c32 -Menu Title ManageIQ TFTP Boot Menu - -label RHEL6 - menu label RHEL6 - kernel rhel6/vmlinuz - append initrd=rhel6/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/01-78-2b-cb-00-f6-6c.ks.cfg ksdevice=78:2b:cb:00:f6:6c - -prompt 0 -timeout 600 -PXEMENU + @contents = <<~PXEMENU + default vesamenu.c32 + Menu Title ManageIQ TFTP Boot Menu + + label iPXE + menu default + menu label iPXE Boot + kernel ipxe/undionly.0 + + label VMware ESXi 4.1-260247 + menu label VMware ESXi 4.1-260247 + kernel VMware-VMvisor-Installer-4.1.0-260247/mboot.c32 + append VMware-VMvisor-Installer-4.1.0-260247/vmkboot.gz ks=http://192.168.252.60/ks.cfg --- VMware-VMvisor-Installer-4.1.0-260247/vmkernel.gz --- VMware-VMvisor-Installer-4.1.0-260247/sys.vgz --- VMware-VMvisor-Installer-4.1.0-260247/cim.vgz --- VMware-VMvisor-Installer-4.1.0-260247/ienviron.vgz --- VMware-VMvisor-Installer-4.1.0-260247/install.vgz + + label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + kernel ubuntu-10.10-desktop-i386/vmlinuz + append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 initrd=ubuntu-10.10-desktop-i386/initrd.lz -- quiet + + label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT + menu label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT + kernel ubuntu-10.10-desktop-amd64/vmlinuz + append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-amd64 initrd=ubuntu-10.10-desktop-amd64/initrd.lz -- quiet + + label Ubuntu-11.04-Server-amd64 + menu label Ubuntu-11.04-Server-amd64 + kernel ubuntu-11.04-server-amd64/linux + append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-11.04-server-amd64 initrd=ubuntu-11.04-server-amd64/initrd.gz -- quiet + + label RHEL6 + menu label RHEL6 + kernel rhel6/vmlinuz + append initrd=rhel6/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/01-78-2b-cb-00-f6-6c.ks.cfg ksdevice=78:2b:cb:00:f6:6c + + label RHEL6.2-Desktop + menu label RHEL6.2-Desktop + kernel rhel6.2-desktop/vmlinuz + append initrd=rhel6.2-desktop/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=78:2b:cb:00:f6:6c + + label VMware ESXi 5.0.0-381646 + menu label VMware ESXi 5.0.0-381646 + kernel VMware-VMvisor-Installer-5.0.0-381646.x86_64/mboot.c32 + append -c VMware-VMvisor-Installer-5.0.0-381646.x86_64/boot.cfg + + label VMware ESXi 5.0.0-469512 + menu label VMware ESXi 5.0.0-469512 + kernel VMware-VMvisor-Installer-5.0.0-469512.x86_64/mboot.c32 + append -c VMware-VMvisor-Installer-5.0.0-469512.x86_64/boot.cfg + + label VMware ESXi 5.0.0-504890 + menu label VMware ESXi 5.0.0-504890 + kernel VMware-VMvisor-Installer-5.0.0-504890.x86_64/mboot.c32 + append -c VMware-VMvisor-Installer-5.0.0-504890.x86_64/boot.cfg + + label Local_drive + localboot 0 + menu label Local Drive + + prompt 0 + timeout 600 + PXEMENU + + @contents2 = <<~PXEMENU + default vesamenu.c32 + Menu Title ManageIQ TFTP Boot Menu + + label RHEL6 + menu label RHEL6 + kernel rhel6/vmlinuz + append initrd=rhel6/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/01-78-2b-cb-00-f6-6c.ks.cfg ksdevice=78:2b:cb:00:f6:6c + + prompt 0 + timeout 600 + PXEMENU end it ".parse_contents" do diff --git a/spec/models/pxe_menu_spec.rb b/spec/models/pxe_menu_spec.rb index 82be17eb90f..b6830c9775b 100644 --- a/spec/models/pxe_menu_spec.rb +++ b/spec/models/pxe_menu_spec.rb @@ -1,127 +1,127 @@ RSpec.describe PxeMenu do before do - @contents_pxelinux = <<-PXEMENU -default vesamenu.c32 -Menu Title ManageIQ TFTP Boot Menu - -label iPXE - menu default - menu label iPXE Boot - kernel ipxe/undionly.0 - -label VMware ESXi 4.1-260247 - menu label VMware ESXi 4.1-260247 - kernel VMware-VMvisor-Installer-4.1.0-260247/mboot.c32 - append VMware-VMvisor-Installer-4.1.0-260247/vmkboot.gz ks=http://192.168.252.60/ks.cfg --- VMware-VMvisor-Installer-4.1.0-260247/vmkernel.gz --- VMware-VMvisor-Installer-4.1.0-260247/sys.vgz --- VMware-VMvisor-Installer-4.1.0-260247/cim.vgz --- VMware-VMvisor-Installer-4.1.0-260247/ienviron.vgz --- VMware-VMvisor-Installer-4.1.0-260247/install.vgz - -label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - kernel ubuntu-10.10-desktop-i386/vmlinuz - append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 initrd=ubuntu-10.10-desktop-i386/initrd.lz -- quiet - -label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT - menu label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT - kernel ubuntu-10.10-desktop-amd64/vmlinuz - append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-amd64 initrd=ubuntu-10.10-desktop-amd64/initrd.lz -- quiet - -label Ubuntu-11.04-Server-amd64 - menu label Ubuntu-11.04-Server-amd64 - kernel ubuntu-11.04-server-amd64/linux - append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-11.04-server-amd64 initrd=ubuntu-11.04-server-amd64/initrd.gz -- quiet - -label RHEL6 - menu label RHEL6 - kernel rhel6/vmlinuz - append initrd=rhel6/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/01-78-2b-cb-00-f6-6c.ks.cfg ksdevice=78:2b:cb:00:f6:6c - -label RHEL6.2-Desktop - menu label RHEL6.2-Desktop - kernel rhel6.2-desktop/vmlinuz - append initrd=rhel6.2-desktop/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=78:2b:cb:00:f6:6c - -label VMware ESXi 5.0.0-381646 - menu label VMware ESXi 5.0.0-381646 - kernel VMware-VMvisor-Installer-5.0.0-381646.x86_64/mboot.c32 - append -c VMware-VMvisor-Installer-5.0.0-381646.x86_64/boot.cfg - -label VMware ESXi 5.0.0-469512 - menu label VMware ESXi 5.0.0-469512 - kernel VMware-VMvisor-Installer-5.0.0-469512.x86_64/mboot.c32 - append -c VMware-VMvisor-Installer-5.0.0-469512.x86_64/boot.cfg - -label VMware ESXi 5.0.0-504890 - menu label VMware ESXi 5.0.0-504890 - kernel VMware-VMvisor-Installer-5.0.0-504890.x86_64/mboot.c32 - append -c VMware-VMvisor-Installer-5.0.0-504890.x86_64/boot.cfg - -label Local_drive - localboot 0 - menu label Local Drive - -prompt 0 -timeout 600 -PXEMENU - - @contents_ipxe = <<-PXEMENU -#!ipxe -menu ManageIQ iPXE Boot Menu -item --gap -- -----Live Images: -item ud1204 Ubuntu 12.04 Desktop x64 -#item ud1204_commented_in_menu Ubuntu 12.04 Desktop x64 -item ud1204_commented_in_image Ubuntu 12.04 Desktop x64 -item --gap -item --gap -- -----MIQ Desktop Auto-Install: -item rhel62dsk RHEL6.2 Desktop AutoInstall - Be Careful -item --gap -item --gap -- -----MIQ Server Auto-Install: -item rhel62host RHEL6.2 Host -item esxi5 VMware ESXi 5.0.0----- -item --gap -item --gap -- -----Other Stuff: -item reboot Reboot the Machine -item ipxedemo iPXE Demo -choose os && goto ${os} - -########## MIQ Live Images ########## -:ud1204 -kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -boot - -:ud1204_not_in_menu -kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -boot - -:ud1204_commented_in_menu -kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -boot - -:ud1204_commented_in_image -#kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet -#initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz -#boot - -########## MIQ Desktop Images ########## -:rhel62dsk -kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=00:50:56:91:79:d5 -initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img -boot - -########## MIQ Server Images ########## -:rhel62host -kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-host.ks.cfg -initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img -boot - -########## Other Stuff ########## -:reboot -reboot - -:ipxedemo -chain http://boot.ipxe.org/demo/boot.php -PXEMENU + @contents_pxelinux = <<~PXEMENU + default vesamenu.c32 + Menu Title ManageIQ TFTP Boot Menu + + label iPXE + menu default + menu label iPXE Boot + kernel ipxe/undionly.0 + + label VMware ESXi 4.1-260247 + menu label VMware ESXi 4.1-260247 + kernel VMware-VMvisor-Installer-4.1.0-260247/mboot.c32 + append VMware-VMvisor-Installer-4.1.0-260247/vmkboot.gz ks=http://192.168.252.60/ks.cfg --- VMware-VMvisor-Installer-4.1.0-260247/vmkernel.gz --- VMware-VMvisor-Installer-4.1.0-260247/sys.vgz --- VMware-VMvisor-Installer-4.1.0-260247/cim.vgz --- VMware-VMvisor-Installer-4.1.0-260247/ienviron.vgz --- VMware-VMvisor-Installer-4.1.0-260247/install.vgz + + label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + kernel ubuntu-10.10-desktop-i386/vmlinuz + append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 initrd=ubuntu-10.10-desktop-i386/initrd.lz -- quiet + + label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT + menu label Ubuntu-10.10-Desktop-amd64-LIVE_BOOT + kernel ubuntu-10.10-desktop-amd64/vmlinuz + append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-amd64 initrd=ubuntu-10.10-desktop-amd64/initrd.lz -- quiet + + label Ubuntu-11.04-Server-amd64 + menu label Ubuntu-11.04-Server-amd64 + kernel ubuntu-11.04-server-amd64/linux + append vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-11.04-server-amd64 initrd=ubuntu-11.04-server-amd64/initrd.gz -- quiet + + label RHEL6 + menu label RHEL6 + kernel rhel6/vmlinuz + append initrd=rhel6/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/01-78-2b-cb-00-f6-6c.ks.cfg ksdevice=78:2b:cb:00:f6:6c + + label RHEL6.2-Desktop + menu label RHEL6.2-Desktop + kernel rhel6.2-desktop/vmlinuz + append initrd=rhel6.2-desktop/initrd.img ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=78:2b:cb:00:f6:6c + + label VMware ESXi 5.0.0-381646 + menu label VMware ESXi 5.0.0-381646 + kernel VMware-VMvisor-Installer-5.0.0-381646.x86_64/mboot.c32 + append -c VMware-VMvisor-Installer-5.0.0-381646.x86_64/boot.cfg + + label VMware ESXi 5.0.0-469512 + menu label VMware ESXi 5.0.0-469512 + kernel VMware-VMvisor-Installer-5.0.0-469512.x86_64/mboot.c32 + append -c VMware-VMvisor-Installer-5.0.0-469512.x86_64/boot.cfg + + label VMware ESXi 5.0.0-504890 + menu label VMware ESXi 5.0.0-504890 + kernel VMware-VMvisor-Installer-5.0.0-504890.x86_64/mboot.c32 + append -c VMware-VMvisor-Installer-5.0.0-504890.x86_64/boot.cfg + + label Local_drive + localboot 0 + menu label Local Drive + + prompt 0 + timeout 600 + PXEMENU + + @contents_ipxe = <<~PXEMENU + #!ipxe + menu ManageIQ iPXE Boot Menu + item --gap -- -----Live Images: + item ud1204 Ubuntu 12.04 Desktop x64 + #item ud1204_commented_in_menu Ubuntu 12.04 Desktop x64 + item ud1204_commented_in_image Ubuntu 12.04 Desktop x64 + item --gap + item --gap -- -----MIQ Desktop Auto-Install: + item rhel62dsk RHEL6.2 Desktop AutoInstall - Be Careful + item --gap + item --gap -- -----MIQ Server Auto-Install: + item rhel62host RHEL6.2 Host + item esxi5 VMware ESXi 5.0.0----- + item --gap + item --gap -- -----Other Stuff: + item reboot Reboot the Machine + item ipxedemo iPXE Demo + choose os && goto ${os} + + ########## MIQ Live Images ########## + :ud1204 + kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + boot + + :ud1204_not_in_menu + kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + boot + + :ud1204_commented_in_menu + kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + boot + + :ud1204_commented_in_image + #kernel http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/vmlinuz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/boot/ubuntu-12.04-desktop ro root=/dev/nfs -- quiet + #initrd http://192.168.252.60/boot/ubuntu-12.04-desktop/casper/initrd.lz + #boot + + ########## MIQ Desktop Images ########## + :rhel62dsk + kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-desktop.ks.cfg ksdevice=00:50:56:91:79:d5 + initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img + boot + + ########## MIQ Server Images ########## + :rhel62host + kernel http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz ramdisk_size=10000 ks=http://192.168.252.60/pxelinux.cfg/rhel6.2-host.ks.cfg + initrd http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img + boot + + ########## Other Stuff ########## + :reboot + reboot + + :ipxedemo + chain http://boot.ipxe.org/demo/boot.php + PXEMENU end it ".class_from_contents" do diff --git a/spec/models/pxe_server_spec.rb b/spec/models/pxe_server_spec.rb index 639156a2028..5a0f347ba7a 100644 --- a/spec/models/pxe_server_spec.rb +++ b/spec/models/pxe_server_spec.rb @@ -68,7 +68,7 @@ def file_open(*args, &block) def file_write(file, contents) fname = test_full_path_to(file) FileUtils.mkdir_p(File.dirname(fname)) - File.open(fname, "w") { |fd| fd.write(contents) } + File.write(fname, contents) end end end @@ -107,16 +107,16 @@ def file_write(file, contents) it "without kickstart" do @pxe_server.sync_images expected_name = @pxe_server.test_full_path_to("#{@pxe_server.pxe_directory}/01-00-19-e3-d7-5b-0e") - expected_contents = <<-PXE -timeout 0 -default Ubuntu-10.10-Desktop-i386-LIVE_BOOT + expected_contents = <<~PXE + timeout 0 + default Ubuntu-10.10-Desktop-i386-LIVE_BOOT -label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - kernel ubuntu-10.10-desktop-i386/vmlinuz - append initrd=ubuntu-10.10-desktop-i386/initrd.lz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 -- quiet + label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + kernel ubuntu-10.10-desktop-i386/vmlinuz + append initrd=ubuntu-10.10-desktop-i386/initrd.lz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 -- quiet -PXE + PXE image = @pxe_server.pxe_images.find_by(:name => "Ubuntu-10.10-Desktop-i386-LIVE_BOOT") begin @pxe_server.create_provisioning_files(image, "00:19:e3:d7:5b:0e") @@ -141,16 +141,16 @@ def file_write(file, contents) ks_contents = "FOO" kickstart = FactoryBot.create(:customization_template_kickstart, :script => ks_contents) - expected_contents = <<-PXE -timeout 0 -default Ubuntu-10.10-Desktop-i386-LIVE_BOOT + expected_contents = <<~PXE + timeout 0 + default Ubuntu-10.10-Desktop-i386-LIVE_BOOT -label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT - kernel ubuntu-10.10-desktop-i386/vmlinuz - append initrd=ubuntu-10.10-desktop-i386/initrd.lz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 -- quiet ks=#{@pxe_server.access_url}/#{@pxe_server.customization_directory}/#{dashed_mac_address}.ks.cfg ksdevice=00:19:e3:d7:5b:0e + label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + menu label Ubuntu-10.10-Desktop-i386-LIVE_BOOT + kernel ubuntu-10.10-desktop-i386/vmlinuz + append initrd=ubuntu-10.10-desktop-i386/initrd.lz vga=normal boot=casper netboot=nfs nfsroot=192.168.252.60:/srv/nfsboot/ubuntu-10.10-desktop-i386 -- quiet ks=#{@pxe_server.access_url}/#{@pxe_server.customization_directory}/#{dashed_mac_address}.ks.cfg ksdevice=00:19:e3:d7:5b:0e -PXE + PXE begin @pxe_server.create_provisioning_files(image, "00:19:e3:d7:5b:0e", nil, kickstart) @@ -230,7 +230,7 @@ def file_open(*args, &block) def file_write(file, contents) fname = test_full_path_to(file) FileUtils.mkdir_p(File.dirname(fname)) - File.open(fname, "w") { |fd| fd.write(contents) } + File.write(fname, contents) end end end @@ -262,18 +262,17 @@ def file_write(file, contents) context "#create_provisioning_files" do it "without kickstart" do image = FactoryBot.create(:pxe_image_ipxe, - :pxe_server => @pxe_server, - :kernel => "http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz", - :kernel_options => "ramdisk_size=10000 ksdevice=00:50:56:91:79:d5", - :initrd => "http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img" - ) + :pxe_server => @pxe_server, + :kernel => "http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz", + :kernel_options => "ramdisk_size=10000 ksdevice=00:50:56:91:79:d5", + :initrd => "http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img") expected_name = @pxe_server.test_full_path_to("#{@pxe_server.pxe_directory}/00-19-e3-d7-5b-0e") - expected_contents = <<-PXE -#!ipxe -kernel #{image.kernel} ramdisk_size=10000 -initrd #{image.initrd} -boot -PXE + expected_contents = <<~PXE + #!ipxe + kernel #{image.kernel} ramdisk_size=10000 + initrd #{image.initrd} + boot + PXE begin @pxe_server.create_provisioning_files(image, "00:19:e3:d7:5b:0e") expect(File.exist?(expected_name)).to be_truthy @@ -292,21 +291,20 @@ def file_write(file, contents) expected_ks_name = "#{expected_name}.ks.cfg" image = FactoryBot.create(:pxe_image_ipxe, - :pxe_server => @pxe_server, - :kernel => "http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz", - :kernel_options => "ramdisk_size=10000 ksdevice=00:50:56:91:79:d5", - :initrd => "http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img" - ) + :pxe_server => @pxe_server, + :kernel => "http://192.168.252.60/ipxe/rhel6.2-desktop/vmlinuz", + :kernel_options => "ramdisk_size=10000 ksdevice=00:50:56:91:79:d5", + :initrd => "http://192.168.252.60/ipxe/rhel6.2-desktop/initrd.img") ks_contents = "FOO" kickstart = FactoryBot.create(:customization_template_kickstart, :script => ks_contents) - expected_contents = <<-PXE -#!ipxe -kernel #{image.kernel} ramdisk_size=10000 ksdevice=00:19:e3:d7:5b:0e ks=#{@pxe_server.access_url}/#{@pxe_server.customization_directory}/#{dashed_mac_address}.ks.cfg -initrd #{image.initrd} -boot -PXE + expected_contents = <<~PXE + #!ipxe + kernel #{image.kernel} ramdisk_size=10000 ksdevice=00:19:e3:d7:5b:0e ks=#{@pxe_server.access_url}/#{@pxe_server.customization_directory}/#{dashed_mac_address}.ks.cfg + initrd #{image.initrd} + boot + PXE begin @pxe_server.create_provisioning_files(image, "00:19:e3:d7:5b:0e", nil, kickstart) expect(File.exist?(expected_name)).to be_truthy @@ -330,7 +328,7 @@ def file_write(file, contents) end it "#pxe_images" do - expect(@pxe_server.pxe_images).to match_array([@advertised_image, @discovered_image]) + expect(@pxe_server.pxe_images).to match_array([@advertised_image, @discovered_image]) end it "#advertised_pxe_images" do diff --git a/spec/models/relationship_spec.rb b/spec/models/relationship_spec.rb index 421700db319..790b72df849 100644 --- a/spec/models/relationship_spec.rb +++ b/spec/models/relationship_spec.rb @@ -40,12 +40,12 @@ end it "includes multi" do - expect(Relationship.filter_by_resource_type(vms + hosts + storages, :of_type => %w(Host VmOrTemplate))) + expect(Relationship.filter_by_resource_type(vms + hosts + storages, :of_type => %w[Host VmOrTemplate])) .to match_array(vms + hosts) end it "includes everything" do - expect(Relationship.filter_by_resource_type(vms + hosts, :of_type => %w(Host VmOrTemplate))).to eq(vms + hosts) + expect(Relationship.filter_by_resource_type(vms + hosts, :of_type => %w[Host VmOrTemplate])).to eq(vms + hosts) end it "includes nothing" do @@ -59,16 +59,16 @@ end it "excludes multi" do - expect(Relationship.filter_by_resource_type(vms + hosts + storages, :except_type => %w(Host VmOrTemplate))) + expect(Relationship.filter_by_resource_type(vms + hosts + storages, :except_type => %w[Host VmOrTemplate])) .to eq(storages) end it "excludes everything" do - expect(Relationship.filter_by_resource_type(vms + hosts, :except_type => %w(Host VmOrTemplate))).to be_empty + expect(Relationship.filter_by_resource_type(vms + hosts, :except_type => %w[Host VmOrTemplate])).to be_empty end it "excludes nothing" do - expect(Relationship.filter_by_resource_type(vms, :except_type => %w(Host))).to eq(vms) + expect(Relationship.filter_by_resource_type(vms, :except_type => %w[Host])).to eq(vms) end it "includes and excludes" do @@ -85,8 +85,8 @@ hosts.map(&:save!) storages.map(&:save!) filtered_results = Relationship.filter_by_resource_type(Relationship.all, - :of_type => %w(Host VmOrTemplate), - :except_type => %w(Storage)) + :of_type => %w[Host VmOrTemplate], + :except_type => %w[Storage]) expect(filtered_results).not_to be_kind_of(Array) expect(filtered_results).to match_array(vms + hosts) end @@ -97,8 +97,8 @@ storages.map(&:save!) rels = Relationship.all.load filtered_results = Relationship.filter_by_resource_type(rels, - :of_type => %w(Host VmOrTemplate), - :except_type => %w(Storage)) + :of_type => %w[Host VmOrTemplate], + :except_type => %w[Storage]) expect(filtered_results).to be_kind_of(Array) end end @@ -112,7 +112,7 @@ vms.map(&:save!) hosts.map(&:save!) storages.map(&:save!) - filtered_results = Relationship.filtered(%w(Host VmOrTemplate), %w(Storage)) + filtered_results = Relationship.filtered(%w[Host VmOrTemplate], %w[Storage]) expect(filtered_results).to match_array(vms + hosts) end end diff --git a/spec/models/resource_action_spec.rb b/spec/models/resource_action_spec.rb index 3774cd6ab18..67b111f5bb7 100644 --- a/spec/models/resource_action_spec.rb +++ b/spec/models/resource_action_spec.rb @@ -143,9 +143,9 @@ context "uri validation" do let(:ra) do FactoryBot.build(:resource_action, - :ae_namespace => "NAMESPACE", - :ae_class => "CLASS", - :ae_instance => "INSTANCE") + :ae_namespace => "NAMESPACE", + :ae_class => "CLASS", + :ae_instance => "INSTANCE") end it "#ae_path" do diff --git a/spec/models/scan_item/seeding_spec.rb b/spec/models/scan_item/seeding_spec.rb index e330a857e26..0b2e79cd33c 100644 --- a/spec/models/scan_item/seeding_spec.rb +++ b/spec/models/scan_item/seeding_spec.rb @@ -12,7 +12,7 @@ before do FileUtils.mkdir_p(scan_item_dir) - FileUtils.cp_r(Rails.root.join("product", "scan_items", "scan_item_cat.yaml"), scan_item_dir, :preserve => true) + FileUtils.cp_r(Rails.root.join("product/scan_items/scan_item_cat.yaml"), scan_item_dir, :preserve => true) stub_const("ScanItem::Seeding::SCAN_ITEMS_DIR", scan_item_dir) expect(Vmdb::Plugins).to receive(:flat_map).at_least(:once) { [] } @@ -73,7 +73,7 @@ describe ".seed_files (private)" do it "will include files from core" do expect(described_class.send(:seed_files)).to include( - a_string_starting_with(Rails.root.join("product", "scan_items").to_s) + a_string_starting_with(Rails.root.join("product/scan_items").to_s) ) end diff --git a/spec/models/server_role_spec.rb b/spec/models/server_role_spec.rb index 6c3d75c99ea..5b4ee319275 100644 --- a/spec/models/server_role_spec.rb +++ b/spec/models/server_role_spec.rb @@ -67,7 +67,8 @@ roles = @csv.split("\n") roles.shift roles.each do |role| - next if role =~ /^#.*$/ # skip commented lines + next if /^#.*$/.match?(role) # skip commented lines + name, description, max_concurrent, external_failover, role_scope = role.split(',') max_concurrent = max_concurrent.to_i external_failover = true if external_failover == 'true' diff --git a/spec/models/service/linking_workflow_spec.rb b/spec/models/service/linking_workflow_spec.rb index 39c5fbe7b66..6dee53caf63 100644 --- a/spec/models/service/linking_workflow_spec.rb +++ b/spec/models/service/linking_workflow_spec.rb @@ -51,7 +51,7 @@ end context 'state transitions' do - %w(start refresh poll_refresh post_refresh finish abort_job cancel error).each do |signal| + %w[start refresh poll_refresh post_refresh finish abort_job cancel error].each do |signal| shared_examples_for "allows #{signal} signal" do it signal.to_s do expect(job).to receive(signal.to_sym) @@ -60,7 +60,7 @@ end end - %w(start refresh poll_refresh post_refresh).each do |signal| + %w[start refresh poll_refresh post_refresh].each do |signal| shared_examples_for "doesn't allow #{signal} signal" do it signal.to_s do expect { job.signal(signal.to_sym) }.to raise_error(RuntimeError, /#{signal} is not permitted at state #{job.state}/) diff --git a/spec/models/service/retirement_management_spec.rb b/spec/models/service/retirement_management_spec.rb index 8135715f50d..65af85e3bea 100644 --- a/spec/models/service/retirement_management_spec.rb +++ b/spec/models/service/retirement_management_spec.rb @@ -199,7 +199,7 @@ @service.mark_retired @service.reload expect(@service.retired).to be_truthy - expect(@service.retires_on).to be_between(Time.zone.now - 1.hour, Time.zone.now + 1.second) + expect(@service.retires_on).to be_between(1.hour.ago, 1.second.from_now) expect(@service.retirement_state).to eq("retired") end diff --git a/spec/models/service_ansible_playbook_spec.rb b/spec/models/service_ansible_playbook_spec.rb index c09143ca870..82eba36ba79 100644 --- a/spec/models/service_ansible_playbook_spec.rb +++ b/spec/models/service_ansible_playbook_spec.rb @@ -21,7 +21,7 @@ let(:executed_service) do FactoryBot.create(:service_ansible_playbook, :options => provision_options).tap do |service| - regex = /(#{ResourceAction::PROVISION})|(#{ResourceAction::RETIREMENT})/ + regex = /(#{ResourceAction::PROVISION})|(#{ResourceAction::RETIREMENT})/o allow(service).to receive(:job).with(regex).and_return(runner_job) end end @@ -63,18 +63,18 @@ { :credential_id => credential_2.id, :hosts => 'host3', - 'extra_vars' => { :var1 => 'new_val1', 'pswd' => encrypted_val2 } + 'extra_vars' => {:var1 => 'new_val1', 'pswd' => encrypted_val2} } end let(:provision_options) do { :provision_job_options => { - :credential => 1, + :credential => 1, :vault_credential => 2, - :inventory => 2, - :hosts => "default_host1,default_host2", - :extra_vars => {'var1' => 'value1', 'var2' => 'value2', 'pswd' => encrypted_val} + :inventory => 2, + :hosts => "default_host1,default_host2", + :extra_vars => {'var1' => 'value1', 'var2' => 'value2', 'pswd' => encrypted_val} } } end @@ -200,15 +200,15 @@ miq_request_task = FactoryBot.create(:miq_request_task, :miq_request => FactoryBot.create(:service_template_provision_request)) miq_request_task.update(:options => {:request_options => {:manageiq_extra_vars => control_extras}}) loaded_service.update(:evm_owner => FactoryBot.create(:user_with_group), - :miq_group => FactoryBot.create(:miq_group), - :miq_request_task => miq_request_task) + :miq_group => FactoryBot.create(:miq_group), + :miq_request_task => miq_request_task) end it 'creates an Ansible Runner job' do expect(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Job).to receive(:create_job) do |jobtemp, opts| expect(jobtemp).to eq(playbook) - exposed_miq = %w(api_url api_token service user group X_MIQ_Group request_task request) + control_extras.keys - exposed_connection = %w(url token X_MIQ_Group) + exposed_miq = %w[api_url api_token service user group X_MIQ_Group request_task request] + control_extras.keys + exposed_connection = %w[url token X_MIQ_Group] expect(opts[:extra_vars].delete('manageiq').keys).to include(*exposed_miq) expect(opts[:extra_vars].delete('manageiq_connection').keys).to include(*exposed_connection) diff --git a/spec/models/service_ansible_tower_spec.rb b/spec/models/service_ansible_tower_spec.rb index e487868d1ac..7185e297d3b 100644 --- a/spec/models/service_ansible_tower_spec.rb +++ b/spec/models/service_ansible_tower_spec.rb @@ -21,8 +21,8 @@ let(:service) do FactoryBot.create(:service_ansible_tower, - :evm_owner => FactoryBot.create(:user), - :miq_group => FactoryBot.create(:miq_group)) + :evm_owner => FactoryBot.create(:user), + :miq_group => FactoryBot.create(:miq_group)) end let(:service_with_dialog_options) do @@ -67,8 +67,8 @@ miq_request_task = FactoryBot.create(:miq_request_task, :miq_request => FactoryBot.create(:service_template_provision_request)) miq_request_task.update(:options => {:request_options => {:manageiq_extra_vars => control_extras}}) service.update(:evm_owner => FactoryBot.create(:user_with_group), - :miq_group => FactoryBot.create(:miq_group), - :miq_request_task => miq_request_task) + :miq_group => FactoryBot.create(:miq_group), + :miq_request_task => miq_request_task) end it 'launches a job through ansible tower provider' do @@ -77,8 +77,8 @@ expect(opts).to have_key(:limit) expect(opts).to have_key(:extra_vars) - exposed_miq = %w(api_url api_token service user group X_MIQ_Group request_task request) + control_extras.keys - exposed_connection = %w(url token X_MIQ_Group) + exposed_miq = %w[api_url api_token service user group X_MIQ_Group request_task request] + control_extras.keys + exposed_connection = %w[url token X_MIQ_Group] expect(opts[:extra_vars].delete('manageiq').keys).to include(*exposed_miq) expect(opts[:extra_vars].delete('manageiq_connection').keys).to include(*exposed_connection) end.and_return(double(:raw_job, diff --git a/spec/models/service_container_template_spec.rb b/spec/models/service_container_template_spec.rb index f12e59159a7..91fa3d432ae 100644 --- a/spec/models/service_container_template_spec.rb +++ b/spec/models/service_container_template_spec.rb @@ -29,8 +29,8 @@ end FactoryBot.create(:service_container_template, - :options => provision_options.merge(config_info_options), - :service_template => service_template).tap do |svc| + :options => provision_options.merge(config_info_options), + :service_template => service_template).tap do |svc| allow(svc).to receive(:container_template).and_return(container_template) allow(svc).to receive(:stack).and_return(stack) end @@ -136,7 +136,7 @@ describe '#check_completed' do it 'created container object ends in VMDB' do - allow(stack_status).to receive(:normalized_status).and_return(%w(create_complete completed)) + allow(stack_status).to receive(:normalized_status).and_return(%w[create_complete completed]) expect(loaded_service.check_completed(action)).to eq([true, nil]) end diff --git a/spec/models/service_orchestration/provision_tagging_spec.rb b/spec/models/service_orchestration/provision_tagging_spec.rb index 389dfcd224d..b5d08356cc4 100644 --- a/spec/models/service_orchestration/provision_tagging_spec.rb +++ b/spec/models/service_orchestration/provision_tagging_spec.rb @@ -19,7 +19,7 @@ let(:vm) { FactoryBot.create(:vm) } let(:service) { FactoryBot.build(:service_orchestration, :miq_request_task => miq_request_task) } let(:dialog_tag_options) do - { :dialog => { + {:dialog => { 'Array::dialog_tag_0_env' => 'Classification::1', 'Array::dialog_tag_1_network' => 'Classification::11', 'Array::dialog_tag_2_dept' => 'Classification::21,Classification::22,Classification::23' @@ -42,7 +42,7 @@ context 'Calls Classification.bulk_reassignment with VM and tag IDs for provision_priority 0' do let(:provision_priority) { 0 } - let(:tag_ids) { %w(1 11) } + let(:tag_ids) { %w[1 11] } it_behaves_like 'service_orchestration VM tagging' end @@ -59,21 +59,21 @@ context 'Calls Classification.bulk_reassignment with VM and tag IDs for provision_priority 0' do let(:provision_priority) { 0 } - let(:tag_ids) { %w(1 11) } + let(:tag_ids) { %w[1 11] } it_behaves_like 'service_orchestration VM tagging' end context 'Call Classification.bulk_reassignment with VM and tag IDs for provision_priority 1' do let(:provision_priority) { 1 } - let(:tag_ids) { %w(1 21 22 23) } + let(:tag_ids) { %w[1 21 22 23] } it_behaves_like 'service_orchestration VM tagging' end context 'Call Classification.bulk_reassignment with VM and tag IDs for provision_priority 2' do let(:provision_priority) { 2 } - let(:tag_ids) { %w(1) } + let(:tag_ids) { %w[1] } it_behaves_like 'service_orchestration VM tagging' end diff --git a/spec/models/service_order_spec.rb b/spec/models/service_order_spec.rb index b2e76659cdc..005c05d604c 100644 --- a/spec/models/service_order_spec.rb +++ b/spec/models/service_order_spec.rb @@ -1,8 +1,8 @@ RSpec.describe ServiceOrder do def create_request FactoryBot.create(:service_template_provision_request, - :process => false, - :requester => admin) + :process => false, + :requester => admin) end let(:admin) { FactoryBot.create(:user_with_group, :userid => "admin") } diff --git a/spec/models/service_reconfigure_task_spec.rb b/spec/models/service_reconfigure_task_spec.rb index f7fa6558ed2..7b8adbe6d08 100644 --- a/spec/models/service_reconfigure_task_spec.rb +++ b/spec/models/service_reconfigure_task_spec.rb @@ -33,7 +33,8 @@ expect(task).to receive(:update_and_notify_parent).with( :state => 'finished', :status => 'Ok', - :message => 'Service Reconfigure completed') + :message => 'Service Reconfigure completed' + ) task.after_ae_delivery('ok') end @@ -41,7 +42,8 @@ expect(task).to receive(:update_and_notify_parent).with( :state => 'finished', :status => 'Error', - :message => 'Service Reconfigure failed') + :message => 'Service Reconfigure failed' + ) task.after_ae_delivery('error') end @@ -75,10 +77,10 @@ context "automation entry point available" do before do FactoryBot.create(:resource_action, :action => 'Reconfigure', - :resource => template, - :ae_namespace => 'namespace', - :ae_class => 'class', - :ae_instance => 'instance') + :resource => template, + :ae_namespace => 'namespace', + :ae_class => 'class', + :ae_instance => 'instance') end it "queues the reconfigure automate entry point" do @@ -112,7 +114,8 @@ expect(task).to receive(:update_and_notify_parent).with( :state => 'pending', :status => 'Ok', - :message => 'Automation Starting') + :message => 'Automation Starting' + ) task.deliver_to_automate end end @@ -122,7 +125,8 @@ expect(task).to receive(:update_and_notify_parent).with( :state => 'finished', :status => 'Ok', - :message => 'Service Reconfigure completed') + :message => 'Service Reconfigure completed' + ) task.deliver_to_automate end end diff --git a/spec/models/service_retire_task_spec.rb b/spec/models/service_retire_task_spec.rb index 5570ca4fc60..7c2da9cd265 100644 --- a/spec/models/service_retire_task_spec.rb +++ b/spec/models/service_retire_task_spec.rb @@ -3,7 +3,7 @@ let(:vm) { FactoryBot.create(:vm) } let(:service) { FactoryBot.create(:service, :lifecycle_state => 'provisioned') } let(:miq_request) { FactoryBot.create(:service_retire_request, :requester => user, :source => service) } - let(:service_retire_task) { FactoryBot.create(:service_retire_task, :source => service, :miq_request => miq_request, :options => {:src_ids => [service.id] }) } + let(:service_retire_task) { FactoryBot.create(:service_retire_task, :source => service, :miq_request => miq_request, :options => {:src_ids => [service.id]}) } let(:reason) { "Why Not?" } let(:approver) { FactoryBot.create(:user_miq_request_approver) } let(:zone) { FactoryBot.create(:zone, :name => "fred") } @@ -87,29 +87,29 @@ context "ansible playbook service" do context "no_with_playbook" do - let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "no_with_playbook"} }}) } - let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id] }) } + let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "no_with_playbook"}}}) } + let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id]}) } it_behaves_like "no_remove_resource" end context "no_without_playbook" do - let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "no_without_playbook"} }}) } - let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id] }) } + let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "no_without_playbook"}}}) } + let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id]}) } it_behaves_like "no_remove_resource" end context "yes_with_playbook" do - let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "yes_with_playbook"} }}) } - let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id] }) } + let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "yes_with_playbook"}}}) } + let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id]}) } it_behaves_like "yes_remove_resource" end context "yes_without_playbook" do - let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "yes_without_playbook"} }}) } - let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id] }) } + let(:ap_service) { FactoryBot.create(:service_ansible_playbook, :options => {:config_info => {:retirement => {:remove_resources => "yes_without_playbook"}}}) } + let(:ap_service_retire_task) { FactoryBot.create(:service_retire_task, :source => ap_service, :miq_request => miq_request, :options => {:src_ids => [ap_service.id]}) } it_behaves_like "yes_remove_resource" end @@ -176,7 +176,7 @@ s1.add_resource!(FactoryBot.create(:vm_openstack)) s2.add_resource!(FactoryBot.create(:vm_openstack)) - service_retire_task1 = FactoryBot.create(:service_retire_task, :source => s1, :miq_request => miq_request, :options => {:src_ids => [s1.id, s2.id] }) + service_retire_task1 = FactoryBot.create(:service_retire_task, :source => s1, :miq_request => miq_request, :options => {:src_ids => [s1.id, s2.id]}) service_retire_task1.after_request_task_create expect(VmRetireTask.count).to eq(2) @@ -200,7 +200,7 @@ service.add_resource!(FactoryBot.create(:service_template)) @miq_request = FactoryBot.create(:service_retire_request, :requester => user) @miq_request.approve(approver, reason) - @service_retire_task = FactoryBot.create(:service_retire_task, :source => service, :miq_request => @miq_request, :options => {:src_ids => [service.id] }) + @service_retire_task = FactoryBot.create(:service_retire_task, :source => service, :miq_request => @miq_request, :options => {:src_ids => [service.id]}) end it "creates subtask for provisioned services but not templates" do diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb index 0d8bf3121a7..6543d294135 100644 --- a/spec/models/service_spec.rb +++ b/spec/models/service_spec.rb @@ -87,7 +87,7 @@ end it "#power_states" do - expect(@service.power_states).to eq %w(on on on on) + expect(@service.power_states).to eq %w[on on on on] end it "#update_progress" do @@ -933,7 +933,7 @@ def create_deep_tree end it "serializes the dialog returned by the workflow with all attributes" do - expect(dialog_serializer).to receive(:serialize).with(Array["workflow_dialog"], true) + expect(dialog_serializer).to receive(:serialize).with(["workflow_dialog"], true) service.reconfigure_dialog end diff --git a/spec/models/service_template/copy_spec.rb b/spec/models/service_template/copy_spec.rb index 9c07cd50f3b..e155f7774b6 100644 --- a/spec/models/service_template/copy_spec.rb +++ b/spec/models/service_template/copy_spec.rb @@ -177,7 +177,7 @@ def copy_template(template, name = nil) context "picture" do it "creates a duplicate picture" do - service_template.picture = { :content => 'foobar', :extension => 'jpg' } + service_template.picture = {:content => 'foobar', :extension => 'jpg'} new_template = service_template.template_copy expect(service_template.picture.id).to_not eq(new_template.picture.id) diff --git a/spec/models/service_template_ansible_playbook_spec.rb b/spec/models/service_template_ansible_playbook_spec.rb index 65f4a3d750f..74da56551c0 100644 --- a/spec/models/service_template_ansible_playbook_spec.rb +++ b/spec/models/service_template_ansible_playbook_spec.rb @@ -12,14 +12,14 @@ let(:playbook) do FactoryBot.create(:embedded_playbook, - :configuration_script_source => script_source, - :manager => ems) + :configuration_script_source => script_source, + :manager => ems) end let(:job_template) do FactoryBot.create(:embedded_ansible_configuration_script, - :variables => catalog_item_options.fetch_path(:config_info, :provision, :extra_vars), - :manager => ems) + :variables => catalog_item_options.fetch_path(:config_info, :provision, :extra_vars), + :manager => ems) end let(:catalog_item_options) do @@ -61,9 +61,9 @@ end let(:catalog_item_options_three) do - changed_items = { :name => 'test_update_ansible_item', - :description => 'test updated ansible item', - :config_info => { + changed_items = {:name => 'test_update_ansible_item', + :description => 'test updated ansible item', + :config_info => { :provision => { :new_dialog_name => 'test_dialog_updated', :become_enabled => false, @@ -126,7 +126,7 @@ context 'with remove_resources in retirement option' do it 'sets the corresponding entry point' do - %w(yes_without_playbook no_without_playbook no_with_playbook pre_with_playbook post_with_playbook).each do |opt| + %w[yes_without_playbook no_without_playbook no_with_playbook pre_with_playbook post_with_playbook].each do |opt| opts = described_class.send(:validate_config_info, :retirement => {:remove_resources => opt}) expect(opts[:retirement][:fqname]).to eq(described_class.const_get(:RETIREMENT_ENTRY_POINTS)[opt]) end diff --git a/spec/models/service_template_ansible_tower_spec.rb b/spec/models/service_template_ansible_tower_spec.rb index 095e170b1f1..19a42914e5c 100644 --- a/spec/models/service_template_ansible_tower_spec.rb +++ b/spec/models/service_template_ansible_tower_spec.rb @@ -32,7 +32,7 @@ expect(service_template.name).to eq('Ansible Tower') expect(service_template.service_resources.count).to eq(1) expect(service_template.dialogs.first).to eq(service_dialog) - expect(service_template.resource_actions.pluck(:action)).to match_array(%w(Provision Retirement)) + expect(service_template.resource_actions.pluck(:action)).to match_array(%w[Provision Retirement]) expect(service_template.job_template).to eq(configuration_script) expect(service_template.config_info).to eq(catalog_item_options[:config_info]) end @@ -46,7 +46,7 @@ end it 'accepts a configuration' do - catalog_item_options[:config_info] = { :configuration => configuration_script } + catalog_item_options[:config_info] = {:configuration => configuration_script} service_template = ServiceTemplateAnsibleTower.create_catalog_item(catalog_item_options) expect(service_template.job_template).to eq(configuration_script) @@ -84,7 +84,7 @@ expect(updated.name).to eq('Updated Ansible Tower') expect(updated.config_info).to eq(updated_catalog_item_options[:config_info]) expect(updated.job_template).to eq(new_configuration_script) - expect(updated.resource_actions.pluck(:action)).to match_array(%w(Provision Reconfigure)) + expect(updated.resource_actions.pluck(:action)).to match_array(%w[Provision Reconfigure]) end it 'requires a configuration or configuration_script_id' do @@ -115,7 +115,7 @@ job_template = FactoryBot.create(:configuration_script) service_template = FactoryBot.create(:service_template_ansible_tower, :job_template => job_template) ra = FactoryBot.create(:resource_action, :action => 'Provision', :fqname => '/a/b/c') - service_template.create_resource_actions(:provision => { :fqname => ra.fqname }) + service_template.create_resource_actions(:provision => {:fqname => ra.fqname}) expected_config_info = { :configuration_script_id => job_template.id, diff --git a/spec/models/service_template_filter_spec.rb b/spec/models/service_template_filter_spec.rb index 935372663f6..64879888275 100644 --- a/spec/models/service_template_filter_spec.rb +++ b/spec/models/service_template_filter_spec.rb @@ -16,21 +16,19 @@ def build_model "middle" => {:type => 'composite', :children => ['vm_service']}, "vm_service" => {:type => 'atomic', :request => {:target_name => "fred", :src_vm_id => @src_vm.id, - :number_of_vms => 1, :requester => @user} - } - } + :number_of_vms => 1, :requester => @user}}} build_service_template_tree(model) end context "#include_service" do it "all service templates" do - @allowed_service_templates = %w(top middle vm_service) + @allowed_service_templates = %w[top middle vm_service] @request.create_request_tasks expect(@request.miq_request_tasks.count).to eql(5) end it "filter out the atomic service" do - @allowed_service_templates = %w(top middle) + @allowed_service_templates = %w[top middle] @request.create_request_tasks expect(@request.miq_request_tasks.count).to eql(2) end @@ -42,7 +40,7 @@ def build_model end it "filter out middle service" do - @allowed_service_templates = %w(top) + @allowed_service_templates = %w[top] @request.create_request_tasks expect(@request.miq_request_tasks.count).to eql(1) end diff --git a/spec/models/service_template_orchestration_spec.rb b/spec/models/service_template_orchestration_spec.rb index 797b3ee4edb..ee8a0536373 100644 --- a/spec/models/service_template_orchestration_spec.rb +++ b/spec/models/service_template_orchestration_spec.rb @@ -150,7 +150,7 @@ end it 'requires both a template and a manager' do - catalog_item_options[:config_info] = { :manager => manager } + catalog_item_options[:config_info] = {:manager => manager} expect do ServiceTemplateOrchestration.create_catalog_item(catalog_item_options) @@ -158,7 +158,7 @@ end it 'accepts a manager and a template' do - catalog_item_options[:config_info] = { :manager => manager, :template => template } + catalog_item_options[:config_info] = {:manager => manager, :template => template} service_template = ServiceTemplateOrchestration.create_catalog_item(catalog_item_options) expect(service_template.orchestration_template).to eq(template) @@ -200,7 +200,7 @@ expect(updated.config_info).to eq(updated_catalog_item_options[:config_info]) expect(updated.orchestration_template).to eq(new_template) expect(updated.orchestration_manager).to eq(new_manager) - expect(updated.resource_actions.pluck(:action)).to match_array(%w(Provision Reconfigure)) + expect(updated.resource_actions.pluck(:action)).to match_array(%w[Provision Reconfigure]) end it 'requires both template and manager id' do @@ -233,10 +233,10 @@ template = FactoryBot.create(:orchestration_template) manager = FactoryBot.create(:ext_management_system) service_template = FactoryBot.create(:service_template_orchestration, - :orchestration_template => template, - :orchestration_manager => manager) + :orchestration_template => template, + :orchestration_manager => manager) ra = FactoryBot.create(:resource_action, :action => 'Provision', :fqname => '/a/b/c') - service_template.create_resource_actions(:provision => { :fqname => ra.fqname }) + service_template.create_resource_actions(:provision => {:fqname => ra.fqname}) expected_config_info = { :template_id => template.id, diff --git a/spec/models/service_template_provision_request_quota_spec.rb b/spec/models/service_template_provision_request_quota_spec.rb index 1cbc582caa9..4e394950553 100644 --- a/spec/models/service_template_provision_request_quota_spec.rb +++ b/spec/models/service_template_provision_request_quota_spec.rb @@ -7,12 +7,12 @@ context "for cloud and infra providers," do def create_request(user, template, prov_options = {}) FactoryBot.create(:service_template_provision_request, :requester => user, - :description => "request", - :tenant_id => user.current_tenant.id, - :source_type => "ServiceTemplate", - :source_id => template.id, - :process => true, - :options => prov_options.merge(:owner_email => user.email)) + :description => "request", + :tenant_id => user.current_tenant.id, + :source_type => "ServiceTemplate", + :source_id => template.id, + :process => true, + :options => prov_options.merge(:owner_email => user.email)) end def create_test_request(user, service_template) @@ -43,8 +43,8 @@ def create_service_bundle(user, items, options = {}) @vmware_user1 = FactoryBot.create(:user_with_email, :miq_groups => [group]) @vmware_user2 = FactoryBot.create(:user_with_email, :miq_groups => [group]) @vmware_template = FactoryBot.create(:template_vmware, - :ext_management_system => ems, - :hardware => FactoryBot.create(:hardware, :cpu1x2, :memory_mb => 512)) + :ext_management_system => ems, + :hardware => FactoryBot.create(:hardware, :cpu1x2, :memory_mb => 512)) @vmware_prov_options = {:number_of_vms => [2, '2'], :vm_memory => [1024, '1024'], :number_of_cpus => [2, '2']} requests = [] @@ -113,7 +113,7 @@ def build_google_service_item let(:google_requests) do ems = FactoryBot.create(:ems_google_with_authentication, - :availability_zones => [FactoryBot.create(:availability_zone_google)]) + :availability_zones => [FactoryBot.create(:availability_zone_google)]) group = FactoryBot.create(:miq_group, :tenant => FactoryBot.create(:tenant)) @google_user1 = FactoryBot.create(:user_with_email, :miq_groups => [group]) @google_user2 = FactoryBot.create(:user_with_email, :miq_groups => [group]) diff --git a/spec/models/service_template_provision_task_spec.rb b/spec/models/service_template_provision_task_spec.rb index e6d2225b6ae..20d33969cfe 100644 --- a/spec/models/service_template_provision_task_spec.rb +++ b/spec/models/service_template_provision_task_spec.rb @@ -4,8 +4,8 @@ @admin = FactoryBot.create(:user_with_group) @request = FactoryBot.create(:service_template_provision_request, - :description => 'Service Request', - :requester => @admin) + :description => 'Service Request', + :requester => @admin) @task_0 = create_stp('Task 0 (Top)') @task_1 = create_stp('Task 1', 'pending', 7, 1) @task_1_1 = create_stp('Task 1 - 1', 'pending', 1, 3) @@ -16,37 +16,37 @@ @request.miq_request_tasks = [@task_0, @task_1, @task_1_1, @task_1_2, @task_2, @task_2_1, @task_3] @task_0.miq_request_tasks = [@task_1, @task_2, @task_3] - @task_1.miq_request_task = @task_0 + @task_1.miq_request_task = @task_0 @task_1.miq_request_tasks = [@task_1_1, @task_1_2] @task_1_1.miq_request_task = @task_1 @task_1_2.miq_request_task = @task_1 @task_2.miq_request_task = @task_0 @task_2.miq_request_tasks = [@task_2_1] - @task_3.miq_request_task = @task_0 + @task_3.miq_request_task = @task_0 end let(:tracking_label) { "r#{@request.id}_service_template_provision_task_#{@task_0.id}" } def create_stp(description, state = 'pending', prov_index = nil, scaling_max = nil) - if prov_index && scaling_max - options = {:service_resource_id => service_resource_id(prov_index, scaling_max)} - else - options = {} - end + options = if prov_index && scaling_max + {:service_resource_id => service_resource_id(prov_index, scaling_max)} + else + {} + end FactoryBot.create(:service_template_provision_task, - :description => description, - :userid => @admin.userid, - :state => state, - :miq_request_id => @request.id, - :options => options) + :description => description, + :userid => @admin.userid, + :state => state, + :miq_request_id => @request.id, + :options => options) end def service_resource_id(index, scaling_max) FactoryBot.create(:service_resource, - :provision_index => index, - :scaling_min => 1, - :scaling_max => scaling_max, - :resource_type => 'ServiceTemplate').id + :provision_index => index, + :scaling_min => 1, + :scaling_max => scaling_max, + :resource_type => 'ServiceTemplate').id end describe "#before_ae_starts" do diff --git a/spec/models/service_template_spec.rb b/spec/models/service_template_spec.rb index 06937fe8f14..1d4f540ab69 100644 --- a/spec/models/service_template_spec.rb +++ b/spec/models/service_template_spec.rb @@ -152,22 +152,22 @@ true_expression = MiqExpression.new("=" => {"field" => "Service-name", "value" => "bar"}) false_expression = MiqExpression.new("=" => {"field" => "Service-name", "value" => "foo"}) FactoryBot.create(:custom_button, - :name => "enabled button", - :applies_to_class => "Service", - :enablement_expression => true_expression) + :name => "enabled button", + :applies_to_class => "Service", + :enablement_expression => true_expression) FactoryBot.create(:custom_button, - :name => "disabled button", - :applies_to_class => "Service", - :enablement_expression => false_expression) + :name => "disabled button", + :applies_to_class => "Service", + :enablement_expression => false_expression) FactoryBot.create(:custom_button_set).tap do |group| group.add_member(FactoryBot.create(:custom_button, - :name => "enabled button in group", - :applies_to_class => "Service", - :enablement_expression => true_expression)) + :name => "enabled button in group", + :applies_to_class => "Service", + :enablement_expression => true_expression)) group.add_member(FactoryBot.create(:custom_button, - :name => "disabled button in group", - :applies_to_class => "Service", - :enablement_expression => false_expression)) + :name => "disabled button in group", + :applies_to_class => "Service", + :enablement_expression => false_expression)) end expected = { @@ -195,21 +195,21 @@ true_expression = MiqExpression.new("=" => {"field" => "ServiceTemplate-name", "value" => service_template.name}) false_expression = MiqExpression.new("=" => {"field" => "ServiceTemplate-name", "value" => "bar"}) visible_button = FactoryBot.create(:custom_button, - :applies_to_class => "ServiceTemplate", - :applies_to_id => service_template.id, - :visibility_expression => true_expression) + :applies_to_class => "ServiceTemplate", + :applies_to_id => service_template.id, + :visibility_expression => true_expression) _hidden_button = FactoryBot.create(:custom_button, - :applies_to_class => "ServiceTemplate", - :applies_to_id => service_template.id, - :visibility_expression => false_expression) + :applies_to_class => "ServiceTemplate", + :applies_to_id => service_template.id, + :visibility_expression => false_expression) visible_button_in_group = FactoryBot.create(:custom_button, - :applies_to_class => "ServiceTemplate", - :applies_to_id => service_template.id, - :visibility_expression => true_expression) - hidden_button_in_group = FactoryBot.create(:custom_button, :applies_to_class => "ServiceTemplate", :applies_to_id => service_template.id, - :visibility_expression => false_expression) + :visibility_expression => true_expression) + hidden_button_in_group = FactoryBot.create(:custom_button, + :applies_to_class => "ServiceTemplate", + :applies_to_id => service_template.id, + :visibility_expression => false_expression) service_template.custom_button_sets << FactoryBot.create(:custom_button_set).tap do |group| group.add_member(visible_button_in_group) group.add_member(hidden_button_in_group) @@ -361,14 +361,14 @@ describe "#create_service" do let(:service_task) do FactoryBot.create(:service_template_provision_task, - :miq_request => service_template_request, - :options => {:service_resource_id => service_resource.id}) + :miq_request => service_template_request, + :options => {:service_resource_id => service_resource.id}) end let(:service_template_request) { FactoryBot.create(:service_template_provision_request, :requester => user) } let(:service_resource) do FactoryBot.create(:service_resource, - :resource_type => 'MiqRequest', - :resource_id => service_template_request.id) + :resource_type => 'MiqRequest', + :resource_id => service_template_request.id) end let(:user) { FactoryBot.create(:user) } let(:parent_service) { FactoryBot.create(:service) } @@ -688,9 +688,9 @@ dialog = FactoryBot.create(:dialog) template = FactoryBot.create(:service_template) request = FactoryBot.create(:service_template_provision_request, - :requester => @user, - :options => {:foo => 'bar', :baz => nil }) - template.create_resource_actions(:provision => { :fqname => @ra.fqname, :dialog_id => dialog.id }) + :requester => @user, + :options => {:foo => 'bar', :baz => nil}) + template.create_resource_actions(:provision => {:fqname => @ra.fqname, :dialog_id => dialog.id}) add_and_save_service(template, request) template.reload @@ -724,17 +724,17 @@ let(:ra2) { FactoryBot.create(:resource_action, :action => 'Retirement') } let(:ems) { FactoryBot.create(:ems_amazon) } let(:content) do - "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAABGdBTUEAALGP"\ - "C/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3Cc"\ - "ulE8AAAACXBIWXMAAAsTAAALEwEAmpwYAAABWWlUWHRYTUw6Y29tLmFkb2Jl"\ - "LnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIg"\ - "eDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpy"\ - "ZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1u"\ - "cyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAg"\ - "ICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYv"\ - "MS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3Jp"\ - "ZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpS"\ - "REY+CjwveDp4bXBtZXRhPgpMwidZAAAADUlEQVQIHWNgYGCwBQAAQgA+3N0+"\ + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAABGdBTUEAALGP" \ + "C/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3Cc" \ + "ulE8AAAACXBIWXMAAAsTAAALEwEAmpwYAAABWWlUWHRYTUw6Y29tLmFkb2Jl" \ + "LnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIg" \ + "eDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpy" \ + "ZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1u" \ + "cyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAg" \ + "ICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYv" \ + "MS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3Jp" \ + "ZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpS" \ + "REY+CjwveDp4bXBtZXRhPgpMwidZAAAADUlEQVQIHWNgYGCwBQAAQgA+3N0+" \ "xQAAAABJRU5ErkJggg==" end let(:vm) { FactoryBot.create(:vm_amazon, :ext_management_system => ems) } @@ -747,7 +747,7 @@ :service_type => 'atomic', :prov_type => 'amazon', :display => 'false', - :picture => { :content => content, :extension => 'jpg' }, + :picture => {:content => content, :extension => 'jpg'}, :description => 'a description', :config_info => { :miq_request_dialog_name => request_dialog.name, @@ -790,10 +790,10 @@ let(:picture) { Picture.create(catalog_item_options.delete(:picture)) } it "creates the picture without error" do - expect { + expect do service_template = ServiceTemplate.create_catalog_item(catalog_item_options, user) service_template.picture = picture - }.not_to raise_error + end.not_to raise_error end it "has the picture assigned properly" do @@ -815,7 +815,7 @@ { :name => 'Updated Template Name', :display => 'false', - :picture => { :content => new_picture_content, :extension => 'jpg' }, + :picture => {:content => new_picture_content, :extension => 'jpg'}, :description => 'a description', :config_info => { :miq_request_dialog_name => request_dialog.name, @@ -958,7 +958,7 @@ :args => [user.id, {}, {}], :class_name => "ServiceTemplate", :instance_id => service_template.id, - :method_name => "order", + :method_name => "order" ) end @@ -967,7 +967,7 @@ expect(resource_action_workflow).to receive(:validate_dialog).twice.and_return([]) service_template.order(user, {}, {}, Time.zone.now.utc.to_s) - service_template.order(user, {}, {}, (Time.zone.now + 1.hour).utc.to_s) + service_template.order(user, {}, {}, 1.hour.from_now.utc.to_s) expect(service_template.miq_schedules.length).to eq(2) end @@ -989,7 +989,7 @@ end it "provisions a service template with errors" do - expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w(Error1 Error2)) + expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w[Error1 Error2]) expect(resource_action_workflow).to receive(:request_options=).with({ :init_defaults => true, :provision_workflow => true }) @@ -1012,7 +1012,7 @@ end it "provisions a service template with errors" do - expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w(Error1 Error2)) + expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w[Error1 Error2]) expect(resource_action_workflow).to receive(:request_options=).with({ :initiator => 'control', :submit_workflow => true }) @@ -1059,7 +1059,7 @@ end it "provisions a service template with errors" do - expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w(Error1 Error2)) + expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w[Error1 Error2]) expect(resource_action_workflow).to receive(:request_options=).with({ :initiator => 'control', :provision_workflow => true }) @@ -1078,7 +1078,7 @@ end it "provisions a service template with errors" do - expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w(Error1 Error2)) + expect(resource_action_workflow).to receive(:validate_dialog).and_return(%w[Error1 Error2]) expect(resource_action_workflow).to receive(:request_options=).with({:provision_workflow => true}) expect { service_template.provision_request(user, arg1) }.to raise_error(RuntimeError) @@ -1091,19 +1091,19 @@ it "only returns generic with no providers" do expect(ServiceTemplate.catalog_item_types).to match( hash_including('amazon' => {:description => 'Amazon', :display => false}, - 'generic' => {:description => 'Generic', :display => true }) + 'generic' => {:description => 'Generic', :display => true}) ) end it "returns orchestration template and generic" do FactoryBot.create(:orchestration_template) expect(ServiceTemplate.catalog_item_types).to match( - hash_including('amazon' => { :description => 'Amazon', - :display => false }, - 'generic' => { :description => 'Generic', - :display => true }, - 'generic_orchestration' => { :description => 'Orchestration', - :display => true}) + hash_including('amazon' => {:description => 'Amazon', + :display => false}, + 'generic' => {:description => 'Generic', + :display => true}, + 'generic_orchestration' => {:description => 'Orchestration', + :display => true}) ) end end @@ -1169,6 +1169,7 @@ def add_and_save_service(p, c) def print_svc(svc, indent = "") return if indent.length > 10 + svc.service_resources.each do |s| puts indent + s.resource.name print_svc(s.resource, indent + " ") diff --git a/spec/models/settings_change_spec.rb b/spec/models/settings_change_spec.rb index 38e8f523fba..991d39d9b3b 100644 --- a/spec/models/settings_change_spec.rb +++ b/spec/models/settings_change_spec.rb @@ -11,12 +11,12 @@ describe "#key_path" do it "with multiple parts in the key" do change = described_class.new(:key => "/api/token_ttl") - expect(change.key_path).to eq %i(api token_ttl) + expect(change.key_path).to eq %i[api token_ttl] end it "with one part in the key" do change = described_class.new(:key => "/api") - expect(change.key_path).to eq %i(api) + expect(change.key_path).to eq %i[api] end it "with key of /" do diff --git a/spec/models/storage_file_spec.rb b/spec/models/storage_file_spec.rb index 7231e9a83d2..8a3189cbfab 100644 --- a/spec/models/storage_file_spec.rb +++ b/spec/models/storage_file_spec.rb @@ -67,21 +67,21 @@ it "marks as a snapshot file for files with extenstion .vmsd and .vmsn" do files = [] - %w(vmsd vmsn).each { |f| files << double(:ext_name => f) } + %w[vmsd vmsn].each { |f| files << double(:ext_name => f) } result = described_class.split_file_types(files) expect(result[:snapshot].size).to eq(2) end it "marks as vm_ram for files with extension .nvram and .vswp" do files = [] - %w(nvram vswp).each { |f| files << double(:ext_name => f) } + %w[nvram vswp].each { |f| files << double(:ext_name => f) } result = described_class.split_file_types(files) expect(result[:vm_ram].size).to eq(2) end it "marks as vm_misc for files with extension .vmx, .vmtx, .vmxf, .log and .hlog" do files = [] - %w(vmx vmtx vmxf log hlog).each { |f| files << double(:ext_name => f) } + %w[vmx vmtx vmxf log hlog].each { |f| files << double(:ext_name => f) } result = described_class.split_file_types(files) expect(result[:vm_misc].size).to eq(5) end diff --git a/spec/models/storage_spec.rb b/spec/models/storage_spec.rb index 06b7ef8d959..c0bd720dbc8 100644 --- a/spec/models/storage_spec.rb +++ b/spec/models/storage_spec.rb @@ -39,25 +39,25 @@ it "#scan_complete?" do miq_task = FactoryBot.create(:miq_task) - miq_task.context_data = {:targets => [], :complete => [], :pending => {}} + miq_task.context_data = {:targets => [], :complete => [], :pending => {}} miq_task.context_data[:targets] = [123, 456, 789] miq_task.context_data[:complete] = [] expect(Storage.scan_complete?(miq_task)).to be_falsey - miq_task.context_data[:complete] = [123, 456, 789] + miq_task.context_data[:complete] = [123, 456, 789] expect(Storage.scan_complete?(miq_task)).to be_truthy end it "#scan_complete_message" do miq_task = FactoryBot.create(:miq_task) - miq_task.context_data = {:targets => [], :complete => [], :pending => {}} - miq_task.context_data[:targets] = [123, 456, 789] + miq_task.context_data = {:targets => [], :complete => [], :pending => {}} + miq_task.context_data[:targets] = [123, 456, 789] expect(Storage.scan_complete_message(miq_task)).to eq("SmartState Analysis for 3 storages complete") end it "#scan_update_message" do miq_task = FactoryBot.create(:miq_task) - miq_task.context_data = {:targets => [], :complete => [], :pending => {}} + miq_task.context_data = {:targets => [], :complete => [], :pending => {}} miq_task.context_data[:targets] = [123, 456, 789] miq_task.context_data[:complete] = [123] miq_task.context_data[:pending][789] = 98765 @@ -119,7 +119,7 @@ it "#scan_storages_unprocessed" do miq_task = FactoryBot.create(:miq_task) - miq_task.context_data = {:targets => [], :complete => [], :pending => {}} + miq_task.context_data = {:targets => [], :complete => [], :pending => {}} miq_task.context_data[:targets] = [@storage1.id, @storage2.id, @storage3.id] miq_task.context_data[:complete] = [] miq_task.context_data[:pending] = {} @@ -134,7 +134,7 @@ miq_task.context_data[:pending].delete(@storage2.id) expect(Storage.scan_storages_unprocessed(miq_task)).to match_array [@storage1.id, @storage2.id] - miq_task.context_data[:complete] = [@storage1.id, @storage2.id, @storage3.id] + miq_task.context_data[:complete] = [@storage1.id, @storage2.id, @storage3.id] expect(Storage.scan_storages_unprocessed(miq_task)).to eq([]) end @@ -216,7 +216,7 @@ it "#scan_timer" do expect(Storage).to receive(:scan_queue_watchdog).never - miq_task = Storage.scan_timer(nil) + miq_task = Storage.scan_timer(nil) expect(miq_task).to be_nil expect(MiqTask.count).to eq(0) expect(MiqQueue.count).to eq(0) @@ -239,13 +239,13 @@ it "#scan_queue" do bogus_id = @storage1.id - 1 miq_task = FactoryBot.create(:miq_task) - miq_task.context_data = {:targets => [], :complete => [], :pending => {}} + miq_task.context_data = {:targets => [], :complete => [], :pending => {}} miq_task.context_data[:targets] = [bogus_id, @storage1.id, @storage2.id, @storage3.id] miq_task.context_data[:complete] = [] miq_task.context_data[:pending] = {} miq_task.save! - qitem1 = FactoryBot.create(:miq_queue) + qitem1 = FactoryBot.create(:miq_queue) allow_any_instance_of(Storage).to receive_messages(:scan_queue_item => qitem1) Storage.scan_queue(miq_task) miq_task.reload @@ -257,7 +257,7 @@ miq_task.context_data[:complete] << @storage1.id miq_task.context_data[:pending].delete(@storage1.id) miq_task.save! - qitem2 = FactoryBot.create(:miq_queue) + qitem2 = FactoryBot.create(:miq_queue) allow_any_instance_of(Storage).to receive_messages(:scan_queue_item => qitem2) Storage.scan_queue(miq_task) miq_task.reload @@ -281,7 +281,7 @@ max_qitems_per_scan_request = 1 allow(Storage).to receive_messages(:max_qitems_per_scan_request => max_qitems_per_scan_request) miq_task = FactoryBot.create(:miq_task) - miq_task.context_data = {:targets => [], :complete => [], :pending => {}} + miq_task.context_data = {:targets => [], :complete => [], :pending => {}} miq_task.context_data[:targets] = [@storage1.id, @storage2.id, @storage3.id] miq_task.context_data[:complete] = [] miq_task.context_data[:pending] = {} @@ -294,7 +294,7 @@ expect(miq_task.context_data[:complete]).to eq([]) expect(miq_task.context_data[:pending].length).to eq(0) - qitem1 = FactoryBot.create(:miq_queue) + qitem1 = FactoryBot.create(:miq_queue) miq_task.context_data[:pending][@storage1.id] = qitem1.id miq_task.save! expect(Storage).to receive(:scan_queue_watchdog).with(miq_task.id).once @@ -327,7 +327,7 @@ it "#scan_complete_callback" do miq_task = FactoryBot.create(:miq_task) - miq_task.context_data = {:targets => [], :complete => [], :pending => {}} + miq_task.context_data = {:targets => [], :complete => [], :pending => {}} miq_task.context_data[:targets] = [@storage1.id, @storage2.id, @storage3.id] miq_task.context_data[:complete] = [] miq_task.context_data[:pending][@storage1.id] = 123 diff --git a/spec/models/sysprep_file_spec.rb b/spec/models/sysprep_file_spec.rb index 25e04af3108..63c64dd8bf3 100644 --- a/spec/models/sysprep_file_spec.rb +++ b/spec/models/sysprep_file_spec.rb @@ -9,8 +9,8 @@ context "valid inputs" do ["INI", "XML"].each do |type| context type.to_s do - it("allows string") { expect(described_class.new(send("good_#{type.downcase}"))).to be_kind_of(SysprepFile) } - it("allows IO stream") { expect(described_class.new(StringIO.new(send("good_#{type.downcase}")))).to be_kind_of(SysprepFile) } + it("allows string") { expect(described_class.new(send(:"good_#{type.downcase}"))).to be_kind_of(SysprepFile) } + it("allows IO stream") { expect(described_class.new(StringIO.new(send(:"good_#{type.downcase}")))).to be_kind_of(SysprepFile) } end end end diff --git a/spec/models/tenant_quota_spec.rb b/spec/models/tenant_quota_spec.rb index d3158a73c2f..d0d2386cb2d 100644 --- a/spec/models/tenant_quota_spec.rb +++ b/spec/models/tenant_quota_spec.rb @@ -236,7 +236,7 @@ end it "displays available resources for 'Allocated Storage in GB' quota" do - expect(tenant_quota_storage.available).to eq(2.0 * 1.gigabytes - 1_000_000.0) + expect(tenant_quota_storage.available).to eq((2.0 * 1.gigabytes) - 1_000_000.0) end it "displays available resources for 'Allocated Number of Virtual Machines' quota" do diff --git a/spec/models/tenant_spec.rb b/spec/models/tenant_spec.rb index 569c9c619f7..bcadc9a9192 100644 --- a/spec/models/tenant_spec.rb +++ b/spec/models/tenant_spec.rb @@ -2,11 +2,11 @@ include_examples ".seed called multiple times", 1 let(:tenant) { described_class.new(:domain => 'x.com', :parent => default_tenant) } - let(:user_admin) { + let(:user_admin) do user = FactoryBot.create(:user_admin) allow(user).to receive(:get_timezone).and_return("UTC") user - } + end let(:default_tenant) do root_tenant @@ -91,7 +91,7 @@ it "detects Tenant" do expect(tenant.display_type).to eql 'Tenant' - expect(project.display_type).not_to eql 'Tenant' + expect(project.display_type).not_to eql 'Tenant' end it "detects Project" do @@ -325,14 +325,13 @@ let(:tenant1) { FactoryBot.create(:tenant) } let(:tenant1_admins) do FactoryBot.create(:miq_group, - :miq_user_role => admin_with_brand, - :tenant => tenant1 - ) + :miq_user_role => admin_with_brand, + :tenant => tenant1) end let(:tenant1_users) do FactoryBot.create(:miq_group, - :tenant => tenant1, - :miq_user_role => self_service_role) + :tenant => tenant1, + :miq_user_role => self_service_role) end let(:admin) { FactoryBot.create(:user, :miq_groups => [tenant1_users, tenant1_admins]) } let(:user1) { FactoryBot.create(:user, :miq_groups => [tenant1_users]) } @@ -365,10 +364,10 @@ dom3 = FactoryBot.create(:miq_ae_domain, :name => 'A', :tenant_id => root_tenant.id) dom4 = FactoryBot.create(:miq_ae_domain, :name => 'B', :tenant_id => root_tenant.id) - expect(root_tenant.visible_domains.collect(&:name)).to eq(%w(B A Redhat ManageIQ)) + expect(root_tenant.visible_domains.collect(&:name)).to eq(%w[B A Redhat ManageIQ]) ids = [dom4.id, dom3.id] root_tenant.reset_domain_priority_by_ordered_ids(ids) - expect(root_tenant.visible_domains.collect(&:name)).to eq(%w(A B Redhat ManageIQ)) + expect(root_tenant.visible_domains.collect(&:name)).to eq(%w[A B Redhat ManageIQ]) dom4.reload expect(dom4.priority).to eq(2) end @@ -382,10 +381,10 @@ FactoryBot.create(:miq_ae_domain, :name => 'T1_B', :tenant_id => t1.id) dom5 = FactoryBot.create(:miq_ae_domain, :name => 'T1_1_A', :tenant_id => t1_1.id) dom6 = FactoryBot.create(:miq_ae_domain, :name => 'T1_1_B', :tenant_id => t1_1.id) - expect(t1_1.visible_domains.collect(&:name)).to eq(%w(T1_1_B T1_1_A T1_B T1_A Redhat ManageIQ)) + expect(t1_1.visible_domains.collect(&:name)).to eq(%w[T1_1_B T1_1_A T1_B T1_A Redhat ManageIQ]) ids = [dom6.id, dom5.id] t1_1.reset_domain_priority_by_ordered_ids(ids) - expect(t1_1.visible_domains.collect(&:name)).to eq(%w(T1_1_A T1_1_B T1_B T1_A Redhat ManageIQ)) + expect(t1_1.visible_domains.collect(&:name)).to eq(%w[T1_1_A T1_1_B T1_B T1_A Redhat ManageIQ]) end end @@ -396,7 +395,7 @@ FactoryBot.create(:miq_ae_system_domain, :name => 'DOM10', :priority => 10, :tenant_id => root_tenant.id, :enabled => false) - expect(t1_1.sequenceable_domains.collect(&:name)).to eq(%w(DOM15)) + expect(t1_1.sequenceable_domains.collect(&:name)).to eq(%w[DOM15]) end context "visibility" do @@ -418,29 +417,29 @@ # This spec is here to confirm that we don't mutate the memoized # ancestor_ids value when calling `Tenant#visible_domains`. it "does not affect the memoized ancestor_ids variable" do - expected_ancestor_ids = t1_1.ancestor_ids.dup # dup required, don't remove + expected_ancestor_ids = t1_1.ancestor_ids.dup # dup required, don't remove t1_1.visible_domains expect(t1_1.ancestor_ids).to eq(expected_ancestor_ids) end it "#visibile_domains sub_tenant" do t1_1 - expect(t1_1.visible_domains.collect(&:name)).to eq(%w(DOM5 DOM3 DOM1 DOM15 DOM10)) + expect(t1_1.visible_domains.collect(&:name)).to eq(%w[DOM5 DOM3 DOM1 DOM15 DOM10]) end it "#enabled_domains sub_tenant" do t1_1 - expect(t1_1.enabled_domains.collect(&:name)).to eq(%w(DOM5 DOM3 DOM1 DOM15)) + expect(t1_1.enabled_domains.collect(&:name)).to eq(%w[DOM5 DOM3 DOM1 DOM15]) end it "#editable domains sub_tenant" do t1_1 - expect(t1_1.editable_domains.collect(&:name)).to eq(%w(DOM5 DOM3)) + expect(t1_1.editable_domains.collect(&:name)).to eq(%w[DOM5 DOM3]) end it "#visible_domains tenant" do t2 - expect(t2.visible_domains.collect(&:name)).to eq(%w(DOM2 DOM15 DOM10)) + expect(t2.visible_domains.collect(&:name)).to eq(%w[DOM2 DOM15 DOM10]) end end @@ -465,24 +464,24 @@ it "no editable domains available for current tenant" do t1_1 FactoryBot.create(:miq_ae_system_domain, - :name => 'non_editable', - :priority => 3, - :tenant_id => t1_1.id) + :name => 'non_editable', + :priority => 3, + :tenant_id => t1_1.id) expect(t1_1.any_editable_domains?).to eq(false) end it "editable domains available for current_tenant" do t1_1 FactoryBot.create(:miq_ae_domain, - :name => 'editable', - :priority => 3, - :tenant_id => t1_1.id) + :name => 'editable', + :priority => 3, + :tenant_id => t1_1.id) expect(t1_1.any_editable_domains?).to eq(true) end end describe ".set_quotas" do - let(:tenant) { FactoryBot.build(:tenant, :parent => default_tenant) } + let(:tenant) { FactoryBot.build(:tenant, :parent => default_tenant) } it "can set quotas" do tenant.set_quotas(:vms_allocated => {:value => 20}) @@ -537,7 +536,7 @@ tq = default_tenant.tenant_quotas expect(tq.length).to eql 2 - expect(tq.map(&:name).sort).to eql %w(mem_allocated vms_allocated) + expect(tq.map(&:name).sort).to eql %w[mem_allocated vms_allocated] end it "deletes existing quotas when nil value is passed" do @@ -554,7 +553,7 @@ end describe ".get_quotas" do - let(:tenant) { FactoryBot.build(:tenant, :parent => default_tenant) } + let(:tenant) { FactoryBot.build(:tenant, :parent => default_tenant) } it "can get quotas" do expect(tenant.get_quotas).not_to be_empty @@ -841,10 +840,10 @@ User.with_user(user_admin) do tenants, projects = Tenant.tenant_and_project_names - expect(tenants.map(&:first)).to eq(%w(root root/ten1 root/ten2 root/ten3)) + expect(tenants.map(&:first)).to eq(%w[root root/ten1 root/ten2 root/ten3]) expect(tenants.first.last).to eq(root_tenant.id) - expect(projects.map(&:first)).to eq(%w(root/proj3 root/ten1/proj1 root/ten2/proj2)) + expect(projects.map(&:first)).to eq(%w[root/proj3 root/ten1/proj1 root/ten2/proj2]) end end end @@ -958,7 +957,7 @@ end it "raises error when region is not passed" do - exception_message = "You need to pass specific region with :other_region: \n"\ + exception_message = "You need to pass specific region with :other_region: \n" \ "FactoryBot.create(:tenant, :in_other_region, :other_region => ) " expect { FactoryBot.create(:tenant, :in_other_region) }.to raise_error(exception_message) end diff --git a/spec/models/time_profile_spec.rb b/spec/models/time_profile_spec.rb index 5a7a2c4148b..e2ae28455fe 100644 --- a/spec/models/time_profile_spec.rb +++ b/spec/models/time_profile_spec.rb @@ -217,16 +217,16 @@ tp.profile_type = "global" tp.save FactoryBot.create(:time_profile, - :description => "test1", - :profile_type => "user", - :profile_key => "some_user", - :rollup_daily_metrics => true) + :description => "test1", + :profile_type => "user", + :profile_key => "some_user", + :rollup_daily_metrics => true) FactoryBot.create(:time_profile, - :description => "test2", - :profile_type => "user", - :profile_key => "foo", - :rollup_daily_metrics => true) + :description => "test2", + :profile_type => "user", + :profile_key => "foo", + :rollup_daily_metrics => true) tp = TimeProfile.profiles_for_user("foo", MiqRegion.my_region_number) expect(tp.count).to eq(2) end @@ -239,18 +239,18 @@ it "gets time profiles that matches user's tz and marked for daily Rollup" do FactoryBot.create(:time_profile, - :description => "test1", - :profile_type => "user", - :profile_key => "some_user", - :tz => "other_tz", - :rollup_daily_metrics => true) + :description => "test1", + :profile_type => "user", + :profile_key => "some_user", + :tz => "other_tz", + :rollup_daily_metrics => true) FactoryBot.create(:time_profile, - :description => "test2", - :profile_type => "user", - :profile_key => "foo", - :tz => "foo_tz", - :rollup_daily_metrics => true) + :description => "test2", + :profile_type => "user", + :profile_key => "foo", + :tz => "foo_tz", + :rollup_daily_metrics => true) tp = TimeProfile.profile_for_user_tz("foo", "foo_tz") expect(tp.description).to eq("test2") end @@ -288,7 +288,7 @@ FactoryBot.create(:time_profile, :tz => "tz") FactoryBot.create(:time_profile, :tz => "other_tz") - expect(TimeProfile.all_timezones).to match_array(%w(tz other_tz)) + expect(TimeProfile.all_timezones).to match_array(%w[tz other_tz]) end end diff --git a/spec/models/user_password_spec.rb b/spec/models/user_password_spec.rb index 6a71387fe74..3e77e7630ca 100644 --- a/spec/models/user_password_spec.rb +++ b/spec/models/user_password_spec.rb @@ -7,7 +7,7 @@ @old = 'smartvm' @admin = FactoryBot.create(:user, :userid => 'admin', - :password_digest => BCrypt::Password.create(@old)) + :password_digest => BCrypt::Password.create(@old)) end it "should have set password" do diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index e497fa74718..ce45180ef93 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -134,8 +134,8 @@ describe "role methods" do let(:user) do FactoryBot.create(:user, - :settings => {"Setting1" => 1, "Setting2" => 2, "Setting3" => 3}, - :role => role_name) + :settings => {"Setting1" => 1, "Setting2" => 2, "Setting3" => 3}, + :role => role_name) end describe "#self_service?" do @@ -231,8 +231,7 @@ :ldap_role => true, :user_suffix => "manageiq.com", :group_memberships_max_depth => 2, - :ldaphost => ["192.168.254.15"]} - } + :ldaphost => ["192.168.254.15"]}} stub_settings(@auth_config) @miq_ldap = double('miq_ldap') allow(@miq_ldap).to receive_messages(:bind => false) @@ -355,7 +354,7 @@ group = FactoryBot.create(:miq_group) @user = FactoryBot.create(:user, :miq_groups => [group]) @ems = FactoryBot.create(:ems_vmware, :name => "test_vcenter") - @storage = FactoryBot.create(:storage, :name => "test_storage_nfs", :store_type => "NFS") + @storage = FactoryBot.create(:storage, :name => "test_storage_nfs", :store_type => "NFS") @hw1 = FactoryBot.create(:hardware, :cpu_total_cores => @num_cpu, :memory_mb => @ram_size) @hw2 = FactoryBot.create(:hardware, :cpu_total_cores => @num_cpu, :memory_mb => @ram_size) @@ -367,25 +366,25 @@ @disk4 = FactoryBot.create(:disk, :device_type => "disk", :size => @disk_size, :hardware_id => @hw4.id) @active_vm = FactoryBot.create(:vm_vmware, - :name => "Active VM", - :evm_owner_id => @user.id, - :ems_id => @ems.id, - :storage_id => @storage.id, - :hardware => @hw1) + :name => "Active VM", + :evm_owner_id => @user.id, + :ems_id => @ems.id, + :storage_id => @storage.id, + :hardware => @hw1) @archived_vm = FactoryBot.create(:vm_vmware, - :name => "Archived VM", - :evm_owner_id => @user.id, - :hardware => @hw2) + :name => "Archived VM", + :evm_owner_id => @user.id, + :hardware => @hw2) @orphaned_vm = FactoryBot.create(:vm_vmware, - :name => "Orphaned VM", - :evm_owner_id => @user.id, - :storage_id => @storage.id, - :hardware => @hw3) - @retired_vm = FactoryBot.create(:vm_vmware, - :name => "Retired VM", + :name => "Orphaned VM", :evm_owner_id => @user.id, - :retired => true, - :hardware => @hw4) + :storage_id => @storage.id, + :hardware => @hw3) + @retired_vm = FactoryBot.create(:vm_vmware, + :name => "Retired VM", + :evm_owner_id => @user.id, + :retired => true, + :hardware => @hw4) end it "#active_vms" do @@ -408,7 +407,7 @@ expect(@user.provisioned_storage).to eq(@ram_size.megabyte + @disk_size) end - %w(allocated_memory allocated_vcpu allocated_storage provisioned_storage).each do |vcol| + %w[allocated_memory allocated_vcpu allocated_storage provisioned_storage].each do |vcol| it "should have virtual column #{vcol} " do expect(described_class).to have_virtual_column vcol.to_s, :integer end @@ -787,7 +786,7 @@ it "handles multiple columns" do a1 = FactoryBot.create(:miq_group, :features => "good") a2 = FactoryBot.create(:miq_group, :features => "something") - b = FactoryBot.create(:miq_group, :features => %w(good everything)) + b = FactoryBot.create(:miq_group, :features => %w[good everything]) c = FactoryBot.create(:miq_group, :features => "everything") u1 = FactoryBot.create(:user, :miq_groups => [a1]) diff --git a/spec/models/vim_performance_analysis_spec.rb b/spec/models/vim_performance_analysis_spec.rb index 9c446074675..a2b948a4552 100644 --- a/spec/models/vim_performance_analysis_spec.rb +++ b/spec/models/vim_performance_analysis_spec.rb @@ -5,7 +5,7 @@ let(:time_profile) { FactoryBot.create(:time_profile_with_rollup, :profile => {:tz => "UTC"}) } let(:ems) { FactoryBot.create(:ems_vmware) } - let(:good_day) { DateTime.current - 2.day } + let(:good_day) { DateTime.current - 2.days } let(:bad_day) { DateTime.current - 4.months } let(:vm1) do FactoryBot.create(:vm_vmware, :name => "test_vm", :tags => [tag_good], :ext_management_system => ems).tap do |vm| @@ -20,12 +20,12 @@ let(:storage) { FactoryBot.create(:storage_vmware) } let(:host1) do FactoryBot.create(:host, - :hardware => FactoryBot.create(:hardware, - :memory_mb => 8124, - :cpu_total_cores => 1, - :cpu_speed => 9576), - :vms => [vm1], - :storages => [storage]) + :hardware => FactoryBot.create(:hardware, + :memory_mb => 8124, + :cpu_total_cores => 1, + :cpu_speed => 9576), + :vms => [vm1], + :storages => [storage]) end let(:ems_cluster) do @@ -41,7 +41,7 @@ ems_cluster expect(MetricRollup.count).to be > 0 - cols = %i(id name project provider_id) + cols = %i[id name project provider_id] # :conditions => ["resource_type = ? and tag_names like ?", tag_klass, "%#{cat}/#{tag}%"] options = {:end_date => DateTime.current, :days => 30, :ext_options => {:time_profile => time_profile}} @@ -56,8 +56,8 @@ describe '.group_perf_by_timestamp' do let(:storage_metric) do FactoryBot.create(:metric_rollup, - :derived_storage_total => '42', - :derived_storage_free => '13') + :derived_storage_total => '42', + :derived_storage_free => '13') end let(:cols) { [:derived_storage_total, :derived_storage_free, :v_derived_storage_used] } @@ -85,14 +85,13 @@ def add_rollup(vm, timestamp, tag = tag_text) vm.metric_rollups << FactoryBot.create(:metric_rollup_vm_daily, :with_data, - :timestamp => timestamp, - :tag_names => tag, - :parent_host => vm.host, - :parent_ems_cluster => vm.ems_cluster, - :parent_ems => vm.ext_management_system, - :parent_storage => vm.storage, - :resource_name => vm.name, - :time_profile => time_profile, - ) + :timestamp => timestamp, + :tag_names => tag, + :parent_host => vm.host, + :parent_ems_cluster => vm.ems_cluster, + :parent_ems => vm.ext_management_system, + :parent_storage => vm.storage, + :resource_name => vm.name, + :time_profile => time_profile) end end diff --git a/spec/models/vim_performance_tag_spec.rb b/spec/models/vim_performance_tag_spec.rb index 95c02331ea7..0e35490ba8f 100644 --- a/spec/models/vim_performance_tag_spec.rb +++ b/spec/models/vim_performance_tag_spec.rb @@ -93,16 +93,14 @@ case_sets[vm.name.to_sym].each do |timestamp, value| if vm.name == "none" perf = FactoryBot.create(:metric_rollup_vm_hr, - :timestamp => timestamp, - :cpu_usagemhz_rate_average => value - ) + :timestamp => timestamp, + :cpu_usagemhz_rate_average => value) else tag = "environment/#{vm.name}" perf = FactoryBot.create(:metric_rollup_vm_hr, - :timestamp => timestamp, - :cpu_usagemhz_rate_average => value, - :tag_names => tag - ) + :timestamp => timestamp, + :cpu_usagemhz_rate_average => value, + :tag_names => tag) end vm.metric_rollups << perf end @@ -111,9 +109,8 @@ case_sets[:host].each do |timestamp, value| perf = FactoryBot.create(:metric_rollup_host_hr, - :timestamp => timestamp, - :cpu_usagemhz_rate_average => value - ) + :timestamp => timestamp, + :cpu_usagemhz_rate_average => value) @host.metric_rollups << perf end @host.save! @@ -140,9 +137,9 @@ results.each do |t| ts = t.timestamp.iso8601.to_s @classification_entries.each do |entry| - expect(@precomputed[ts][entry.to_sym]).to eq(t.send("cpu_usagemhz_rate_average_#{entry}")) + expect(@precomputed[ts][entry.to_sym]).to eq(t.send(:"cpu_usagemhz_rate_average_#{entry}")) end - expect(@precomputed[ts][:none]).to eq(t.send("cpu_usagemhz_rate_average__none_")) + expect(@precomputed[ts][:none]).to eq(t.send(:cpu_usagemhz_rate_average__none_)) end end end diff --git a/spec/models/vim_performance_tag_value_spec.rb b/spec/models/vim_performance_tag_value_spec.rb index fe1a14e6dbd..6a67f7a95c2 100644 --- a/spec/models/vim_performance_tag_value_spec.rb +++ b/spec/models/vim_performance_tag_value_spec.rb @@ -9,20 +9,20 @@ it "handles 'realtime' interval" do interval = 'realtime' - metrics = VimPerformanceTagValue.send('get_metrics', resources, ts, interval, vim_performance_daily, category) + metrics = VimPerformanceTagValue.send(:get_metrics, resources, ts, interval, vim_performance_daily, category) expect(metrics).to be_empty end it "handles 'hourly' interval" do interval = 'hourly' - metrics = VimPerformanceTagValue.send('get_metrics', resources, ts, interval, vim_performance_daily, category) + metrics = VimPerformanceTagValue.send(:get_metrics, resources, ts, interval, vim_performance_daily, category) expect(metrics).to be_empty end it "handles VimPerformanceDaily type" do interval = nil vim_performance_daily = true - metrics = VimPerformanceTagValue.send('get_metrics', resources, ts, interval, vim_performance_daily, category) + metrics = VimPerformanceTagValue.send(:get_metrics, resources, ts, interval, vim_performance_daily, category) expect(metrics).to be_empty end @@ -37,14 +37,14 @@ let(:finish_time) { report_run_time - 14.hours } before do - metric_rollup_params = { :tag_names => "environment/dev" } + metric_rollup_params = {:tag_names => "environment/dev"} add_metric_rollups_for(development_vm, start_time...finish_time, 1.hour, metric_rollup_params) end it "finds metrics" do interval = nil vim_performance_daily = true - metrics = VimPerformanceTagValue.send('get_metrics', [development_vm], start_time, interval, vim_performance_daily, "environment") + metrics = VimPerformanceTagValue.send(:get_metrics, [development_vm], start_time, interval, vim_performance_daily, "environment") expect(metrics).not_to be_empty end end diff --git a/spec/models/vm/operations_spec.rb b/spec/models/vm/operations_spec.rb index cc9e9561ce9..89dd4cef25a 100644 --- a/spec/models/vm/operations_spec.rb +++ b/spec/models/vm/operations_spec.rb @@ -3,7 +3,7 @@ @miq_server = EvmSpecHelper.local_miq_server @ems = FactoryBot.create(:ems_vmware, :zone => @miq_server.zone) @vm = FactoryBot.create(:vm_vmware, :ems_id => @ems.id) - ipaddresses = %w(fe80::21a:4aff:fe22:dde5 127.0.0.1) + ipaddresses = %w[fe80::21a:4aff:fe22:dde5 127.0.0.1] allow(@vm).to receive(:ipaddresses).and_return(ipaddresses) @hardware = FactoryBot.create(:hardware) @@ -18,15 +18,15 @@ end context 'cloud providers' do - before { @ipaddresses = %w(10.10.1.121 35.190.140.48) } + before { @ipaddresses = %w[10.10.1.121 35.190.140.48] } it 'returns the public ipv4 address for AWS' do ems = FactoryBot.create(:ems_google, :project => 'manageiq-dev') az = FactoryBot.create(:availability_zone_google) vm = FactoryBot.create(:vm_google, - :ext_management_system => ems, - :ems_ref => 123, - :availability_zone => az, - :hardware => @hardware) + :ext_management_system => ems, + :ems_ref => 123, + :availability_zone => az, + :hardware => @hardware) allow(vm).to receive(:ipaddresses).and_return(@ipaddresses) url = vm.send(:ipv4_address) expect(url).to eq('35.190.140.48') @@ -44,7 +44,7 @@ context '#public_address' do it 'returns a public ipv4 address' do - ipaddresses = %w(10.10.1.121 35.190.140.48) + ipaddresses = %w[10.10.1.121 35.190.140.48] ems = FactoryBot.create(:ems_amazon) vm = FactoryBot.create(:vm_amazon, :ext_management_system => ems, :hardware => @hardware) allow(vm).to receive(:ipaddresses).and_return(ipaddresses) diff --git a/spec/models/vm/retirement_management_spec.rb b/spec/models/vm/retirement_management_spec.rb index cc3ff2b52cb..e81da930ee2 100644 --- a/spec/models/vm/retirement_management_spec.rb +++ b/spec/models/vm/retirement_management_spec.rb @@ -260,7 +260,7 @@ @vm.reload expect(@vm.retired).to eq(true) - expect(@vm.retires_on).to be_between(Time.zone.now - 1.hour, Time.zone.now + 1.second) + expect(@vm.retires_on).to be_between(1.hour.ago, 1.second.from_now) expect(@vm.retirement_state).to eq("retired") end @@ -327,7 +327,7 @@ it "#raise_retirement_event with user" do event_name = 'foo' event_hash = {:userid => user, :vm => @vm, :host => @vm.host, :type => "ManageIQ::Providers::Vmware::InfraManager::Vm"} - options = {:zone => @zone.name, :user_id => user.id, :group_id => user.current_group_id, :tenant_id => user.current_tenant.id } + options = {:zone => @zone.name, :user_id => user.id, :group_id => user.current_group_id, :tenant_id => user.current_tenant.id} expect(MiqEvent).to receive(:raise_evm_event).with(@vm, event_name, event_hash, options).once @vm.raise_retirement_event(event_name, user) diff --git a/spec/models/vm/snapshotting_spec.rb b/spec/models/vm/snapshotting_spec.rb index 00d0459b483..96d9aff91fe 100644 --- a/spec/models/vm/snapshotting_spec.rb +++ b/spec/models/vm/snapshotting_spec.rb @@ -46,7 +46,6 @@ end end - describe ".v_snapshot_newest_name" do it "returns value" do FactoryBot.create(:snapshot, :create_time => 1.minute.ago, :vm_or_template => vm, :name => "the name") diff --git a/spec/models/vm_or_template_spec.rb b/spec/models/vm_or_template_spec.rb index a07823168fc..c3ed5c791f0 100644 --- a/spec/models/vm_or_template_spec.rb +++ b/spec/models/vm_or_template_spec.rb @@ -22,7 +22,7 @@ before { subject } context "with attrs of template => false, ems_id => nil, host_id => nil" do - let(:attrs) { { :template => false, :ems_id => nil, :host_id => nil } } + let(:attrs) { {:template => false, :ems_id => nil, :host_id => nil} } it("is not #registered?") { expect(subject.registered?).to be false } it("is not in registered_vms") { expect(registered_vms).to_not include subject } @@ -30,7 +30,7 @@ end context "with attrs template => false, ems_id => nil, host_id => [ID]" do - let(:attrs) { { :template => false, :ems_id => nil, :host_id => host.id } } + let(:attrs) { {:template => false, :ems_id => nil, :host_id => host.id} } it("is #registered?") { expect(subject.registered?).to be true } it("is in registered_vms") { expect(registered_vms).to include subject } @@ -38,7 +38,7 @@ end context "with attrs template => false, ems_id => [ID], host_id => nil" do - let(:attrs) { { :template => false, :ems_id => ems.id, :host_id => nil } } + let(:attrs) { {:template => false, :ems_id => ems.id, :host_id => nil} } it("is not #registered?") { expect(subject.registered?).to be false } it("is not in registered_vms") { expect(registered_vms).to_not include subject } @@ -46,7 +46,7 @@ end context "with attrs template => false, ems_id => [ID], host_id => [ID]" do - let(:attrs) { { :template => false, :ems_id => ems.id, :host_id => host.id } } + let(:attrs) { {:template => false, :ems_id => ems.id, :host_id => host.id} } it("is #registered?") { expect(subject.registered?).to be true } it("is in registered_vms") { expect(registered_vms).to include subject } @@ -54,7 +54,7 @@ end context "with attrs template => true, ems_id => nil, host_id => nil" do - let(:attrs) { { :template => true, :ems_id => nil, :host_id => nil } } + let(:attrs) { {:template => true, :ems_id => nil, :host_id => nil} } it("is not #registered?") { expect(subject.registered?).to be false } it("is not in registered_vms") { expect(registered_vms).to_not include subject } @@ -62,7 +62,7 @@ end context "with attrs if template => true, ems_id => nil, host_id => [ID]" do - let(:attrs) { { :template => true, :ems_id => nil, :host_id => host.id } } + let(:attrs) { {:template => true, :ems_id => nil, :host_id => host.id} } it("is not #registered?") { expect(subject.registered?).to be false } it("is not in registered_vms") { expect(registered_vms).to_not include subject } @@ -70,7 +70,7 @@ end context "with attrs if template => true, ems_id => [ID], host_id => nil" do - let(:attrs) { { :template => true, :ems_id => ems.id, :host_id => nil } } + let(:attrs) { {:template => true, :ems_id => ems.id, :host_id => nil} } it("is not #registered?") { expect(subject.registered?).to be false } it("is not in registered_vms") { expect(registered_vms).to_not include subject } @@ -78,7 +78,7 @@ end context "with attrs if template => true, ems_id => [ID], host_id => [ID]" do - let(:attrs) { { :template => true, :ems_id => ems.id, :host_id => host.id } } + let(:attrs) { {:template => true, :ems_id => ems.id, :host_id => host.id} } it("is #registered?") { expect(subject.registered?).to be true } it("is in registered_vms") { expect(registered_vms).to include subject } @@ -232,9 +232,9 @@ it "by IP Address" do ipaddress = "192.268.20.1" - hardware = FactoryBot.create(:hardware, :vm_or_template_id => @vm.id, :host => @host) - FactoryBot.create(:network, :hardware_id => hardware.id, :ipaddress => ipaddress) - event_msg = "Add EMS Event by IP address" + hardware = FactoryBot.create(:hardware, :vm_or_template_id => @vm.id, :host => @host) + FactoryBot.create(:network, :hardware_id => hardware.id, :ipaddress => ipaddress) + event_msg = "Add EMS Event by IP address" expect_any_instance_of(VmOrTemplate).to receive(:add_ems_event).with(@event_type, event_msg, @event_timestamp) VmOrTemplate.event_by_property("ipaddress", ipaddress, @event_type, event_msg) @@ -294,7 +294,7 @@ vm_no_host = FactoryBot.create(:vm_vmware, :name => "vm 2", :location => "/local/path", :uid_ems => "2", :ems_id => 102) ipaddress = "192.268.20.2" hardware_no_host = FactoryBot.create(:hardware, :vm_or_template_id => vm_no_host.id) - FactoryBot.create(:network, :hardware_id => hardware_no_host.id, :ipaddress => ipaddress) + FactoryBot.create(:network, :hardware_id => hardware_no_host.id, :ipaddress => ipaddress) event_msg = "Add EMS Event by IP address with no host" @event_hash[:message] = event_msg @@ -311,7 +311,7 @@ vm_no_ems = FactoryBot.create(:vm_vmware, :name => "vm 3", :location => "/local/path", :host => @host) ipaddress = "192.268.20.3" hardware_no_ems = FactoryBot.create(:hardware, :vm_or_template_id => vm_no_ems.id) - FactoryBot.create(:network, :hardware_id => hardware_no_ems.id, :ipaddress => ipaddress) + FactoryBot.create(:network, :hardware_id => hardware_no_ems.id, :ipaddress => ipaddress) event_msg = "Add EMS Event by IP address with no ems id" @event_hash[:message] = event_msg @@ -329,7 +329,7 @@ vm_no_host_no_ems = FactoryBot.create(:vm_vmware, :name => "vm 4", :location => "/local/path") ipaddress = "192.268.20.4" hardware_no_host_no_ems = FactoryBot.create(:hardware, :vm_or_template_id => vm_no_host_no_ems.id) - FactoryBot.create(:network, :hardware_id => hardware_no_host_no_ems.id, :ipaddress => ipaddress) + FactoryBot.create(:network, :hardware_id => hardware_no_host_no_ems.id, :ipaddress => ipaddress) event_msg = "Add EMS Event by IP address with no host and no ems id" @event_hash[:message] = event_msg @@ -345,7 +345,7 @@ context "#reconfigured_hardware_value?" do before do - @vm = FactoryBot.create(:vm_vmware) + @vm = FactoryBot.create(:vm_vmware) FactoryBot.create(:hardware, :vm_or_template_id => @vm.id, :memory_mb => 1024) @options = {:hdw_attr => :memory_mb} end @@ -444,11 +444,11 @@ @host2 = FactoryBot.create(:host, :name => 'host2', :storages => [@storage2]) @host3 = FactoryBot.create(:host, :name => 'host3', :storages => [@storage1, @storage2]) @vm = FactoryBot.create(:vm_vmware, - :host => @host1, - :name => 'vm', - :vendor => 'vmware', - :storage => @storage1, - :storages => [@storage1, @storage2]) + :host => @host1, + :name => 'vm', + :vendor => 'vmware', + :storage => @storage1, + :storages => [@storage1, @storage2]) @zone = FactoryBot.create(:zone, :name => 'zone') allow_any_instance_of(MiqServer).to receive_messages(:is_vix_disk? => true) @@ -508,11 +508,11 @@ @host2 = FactoryBot.create(:host, :name => 'host2', :storages => [@storage2]) @vm = FactoryBot.create(:vm_redhat, - :host => @host1, - :name => 'vm', - :vendor => 'redhat', - :storage => @storage1, - :storages => [@storage1]) + :host => @host1, + :name => 'vm', + :vendor => 'redhat', + :storage => @storage1, + :storages => [@storage1]) @svr1 = EvmSpecHelper.local_miq_server(:name => 'svr1') @svr2 = FactoryBot.create(:miq_server, :name => 'svr2', :zone => @svr1.zone) @@ -656,7 +656,7 @@ context "#supports?(:evacuate)" do it "returns false for querying vmware VM if it supports evacuate operation" do - vm = FactoryBot.create(:vm_vmware) + vm = FactoryBot.create(:vm_vmware) expect(vm.supports?(:evacuate)).to eq(false) end end @@ -672,7 +672,7 @@ end it "returns false for Amazon VM" do - vm = FactoryBot.create(:vm_amazon) + vm = FactoryBot.create(:vm_amazon) expect(vm.supports?(:smartstate_analysis)).to_not eq(true) end end @@ -857,7 +857,7 @@ hardware = FactoryBot.create(:hardware, :memory_mb => 10) vm = FactoryBot.create(:vm_vmware, :hardware => hardware) FactoryBot.create(:disk, :size_on_disk => 1024, :size => 10_240, :hardware => hardware) - expect(vm.used_storage).to eq(10 * 1024 * 1024 + 1024) # memory_mb + size on disk + expect(vm.used_storage).to eq((10 * 1024 * 1024) + 1024) # memory_mb + size on disk end end @@ -1009,32 +1009,32 @@ EvmSpecHelper.local_miq_server tp_id = TimeProfile.seed.id FactoryBot.create :metric_rollup_vm_daily, - :with_data, - :timestamp => 1.day.ago, - :time_profile_id => tp_id, - :resource_id => vm.id, - :min_max => { - :abs_max_cpu_usagemhz_rate_average_value => 100.00 - } + :with_data, + :timestamp => 1.day.ago, + :time_profile_id => tp_id, + :resource_id => vm.id, + :min_max => { + :abs_max_cpu_usagemhz_rate_average_value => 100.00 + } FactoryBot.create :metric_rollup_vm_daily, - :with_data, - :cpu_usagemhz_rate_average => 10.0, - :timestamp => 1.day.ago, - :time_profile_id => tp_id, - :resource_id => vm.id, - :min_max => { - :abs_max_cpu_usagemhz_rate_average_value => 900.00 - } + :with_data, + :cpu_usagemhz_rate_average => 10.0, + :timestamp => 1.day.ago, + :time_profile_id => tp_id, + :resource_id => vm.id, + :min_max => { + :abs_max_cpu_usagemhz_rate_average_value => 900.00 + } FactoryBot.create :metric_rollup_vm_daily, - :with_data, - :cpu_usagemhz_rate_average => 100.0, - :timestamp => 1.day.ago, - :time_profile_id => tp_id, - :resource_id => vm.id, - :min_max => { - :abs_max_cpu_usagemhz_rate_average_value => 500.00 - } + :with_data, + :cpu_usagemhz_rate_average => 100.0, + :timestamp => 1.day.ago, + :time_profile_id => tp_id, + :resource_id => vm.id, + :min_max => { + :abs_max_cpu_usagemhz_rate_average_value => 500.00 + } end it "calculates in ruby" do @@ -1058,31 +1058,31 @@ EvmSpecHelper.local_miq_server tp_id = TimeProfile.seed.id FactoryBot.create :metric_rollup_vm_daily, - :with_data, - :time_profile_id => tp_id, - :timestamp => 1.day.ago, - :resource_id => vm.id, - :min_max => { - :abs_max_derived_memory_used_value => 100.00 - } + :with_data, + :time_profile_id => tp_id, + :timestamp => 1.day.ago, + :resource_id => vm.id, + :min_max => { + :abs_max_derived_memory_used_value => 100.00 + } FactoryBot.create :metric_rollup_vm_daily, - :with_data, - :derived_memory_used => 10.0, - :timestamp => 1.day.ago, - :time_profile_id => tp_id, - :resource_id => vm.id, - :min_max => { - :abs_max_derived_memory_used_value => 500.00 - } + :with_data, + :derived_memory_used => 10.0, + :timestamp => 1.day.ago, + :time_profile_id => tp_id, + :resource_id => vm.id, + :min_max => { + :abs_max_derived_memory_used_value => 500.00 + } FactoryBot.create :metric_rollup_vm_daily, - :with_data, - :derived_memory_used => 1000.0, - :timestamp => 1.day.ago, - :time_profile_id => tp_id, - :resource_id => vm.id, - :min_max => { - :abs_max_derived_memory_used_value => 200.00 - } + :with_data, + :derived_memory_used => 1000.0, + :timestamp => 1.day.ago, + :time_profile_id => tp_id, + :resource_id => vm.id, + :min_max => { + :abs_max_derived_memory_used_value => 200.00 + } end it "calculates in ruby" do @@ -1298,8 +1298,8 @@ let(:ems) { FactoryBot.build(:ext_management_system) } let(:vm) do FactoryBot.build(:vm_or_template, - :ext_management_system => ems, - :ems_cluster => FactoryBot.build(:ems_cluster)) + :ext_management_system => ems, + :ems_cluster => FactoryBot.build(:ems_cluster)) end it "clears ems and cluster" do @@ -1550,9 +1550,9 @@ let!(:vm) do FactoryBot.create(klass, :storage => storage, - :ems_id => ems.try(:id), - :connection_state => connection_state, - :retired => retired) + :ems_id => ems.try(:id), + :connection_state => connection_state, + :retired => retired) end subject { vm.normalized_state } diff --git a/spec/models/vm_reconfigure_task_spec.rb b/spec/models/vm_reconfigure_task_spec.rb index d44d09526f3..03fc6532878 100644 --- a/spec/models/vm_reconfigure_task_spec.rb +++ b/spec/models/vm_reconfigure_task_spec.rb @@ -42,8 +42,8 @@ context "Single Disk add " do let(:request_options) { {:disk_add => [{"disk_size_in_mb" => "33", "persistent" => "true", "type" => "thin"}.with_indifferent_access]} } let(:description_partial) do - "Add Disks: 1 : #{request.options[:disk_add][0]["disk_size_in_mb"].to_i.megabytes.to_s(:human_size)}, Type: "\ - "#{request.options[:disk_add][0]["type"]} " + "Add Disks: 1 : #{request.options[:disk_add][0]["disk_size_in_mb"].to_i.megabytes.to_s(:human_size)}, Type: " \ + "#{request.options[:disk_add][0]["type"]} " end it_behaves_like ".get_description" @@ -55,9 +55,9 @@ {"disk_size_in_mb" => "44", "persistent" => "true", "type" => "thick"}.with_indifferent_access]} end let(:description_partial) do - "Add Disks: 2 : #{request.options[:disk_add][0]["disk_size_in_mb"].to_i.megabytes.to_s(:human_size)}, Type: "\ - "#{request.options[:disk_add][0]["type"]}, #{request.options[:disk_add][1]["disk_size_in_mb"].to_i.megabytes.to_s(:human_size)}, Type: "\ - "#{request.options[:disk_add][1]["type"]} " + "Add Disks: 2 : #{request.options[:disk_add][0]["disk_size_in_mb"].to_i.megabytes.to_s(:human_size)}, Type: " \ + "#{request.options[:disk_add][0]["type"]}, #{request.options[:disk_add][1]["disk_size_in_mb"].to_i.megabytes.to_s(:human_size)}, Type: " \ + "#{request.options[:disk_add][1]["type"]} " end it_behaves_like ".get_description" @@ -88,9 +88,9 @@ context "Network" do let(:request_options) do {:network_adapter_add => [ - {:cloud_network => 'vApp Network Name', :name => 'VM Name#NIC#2'}.with_indifferent_access, - {:cloud_network => nil, :name => 'VM Name#NIC#3'}.with_indifferent_access - ], + {:cloud_network => 'vApp Network Name', :name => 'VM Name#NIC#2'}.with_indifferent_access, + {:cloud_network => nil, :name => 'VM Name#NIC#3'}.with_indifferent_access + ], :network_adapter_remove => [{:network => {:name => 'VM Name#NIC#0'}.with_indifferent_access}], :network_adapter_edit => [{:network => "NFS Network", :name => "Network adapter 1"}.with_indifferent_access]} end @@ -102,10 +102,10 @@ context "CDROM" do let(:request_options) do {:cdrom_connect => [ - {:device_name => "CD/DVD drive 1", - :filename => "[NFS Share] ISO/centos.iso", - :storage_id => 1234}.with_indifferent_access - ], + {:device_name => "CD/DVD drive 1", + :filename => "[NFS Share] ISO/centos.iso", + :storage_id => 1234}.with_indifferent_access + ], :cdrom_disconnect => [{:device_name => "CD/DVD drive 2"}.with_indifferent_access]} end let(:description_partial) { "Attach CD/DVDs: 1, Detach CD/DVDs: 1" } diff --git a/spec/models/vm_retire_task_spec.rb b/spec/models/vm_retire_task_spec.rb index e3a425de9e3..2c2882d88e4 100644 --- a/spec/models/vm_retire_task_spec.rb +++ b/spec/models/vm_retire_task_spec.rb @@ -2,7 +2,7 @@ let(:user) { FactoryBot.create(:user_with_group) } let(:vm) { FactoryBot.create(:vm) } let(:miq_request) { FactoryBot.create(:vm_retire_request, :requester => user) } - let(:vm_retire_task) { FactoryBot.create(:vm_retire_task, :source => vm, :miq_request => miq_request, :options => {:src_ids => [vm.id] }) } + let(:vm_retire_task) { FactoryBot.create(:vm_retire_task, :source => vm, :miq_request => miq_request, :options => {:src_ids => [vm.id]}) } let(:approver) { FactoryBot.create(:user_miq_request_approver) } it "should initialize properly" do diff --git a/spec/models/vm_scan/dispatcher_embedded_scan_spec.rb b/spec/models/vm_scan/dispatcher_embedded_scan_spec.rb index d71987e2c0d..1b78a4f5571 100644 --- a/spec/models/vm_scan/dispatcher_embedded_scan_spec.rb +++ b/spec/models/vm_scan/dispatcher_embedded_scan_spec.rb @@ -20,17 +20,15 @@ def assert_at_most_x_scan_jobs_per_y_resource(x_scans, y_resource) end if y_resource == :miq_server - resource_hsh = vms_in_embedded_scanning.inject({}) do |hsh, target_id| + resource_hsh = vms_in_embedded_scanning.each_with_object({}) do |target_id, hsh| hsh[target_id] ||= 0 hsh[target_id] += 1 - hsh end else vms = VmOrTemplate.where(:id => vms_in_embedded_scanning) - resource_hsh = vms.inject({}) do |hsh, v| + resource_hsh = vms.each_with_object({}) do |v, hsh| hsh[v.send(method)] ||= 0 hsh[v.send(method)] += 1 - hsh end end @@ -54,7 +52,7 @@ def assert_at_most_x_scan_jobs_per_y_resource(x_scans, y_resource) :storages => storage_count, :vms => vm_count, :repo_vms => repo_vm_count, - :zone => zone, + :zone => zone ) end @@ -65,7 +63,7 @@ def assert_at_most_x_scan_jobs_per_y_resource(x_scans, y_resource) context "and embedded scans on ems" do context "and scans against ems limited to 2 and up to 10 scans per miqserver" do - it "should dispatch only 2 scan jobs per ems" do + it "should dispatch only 2 scan jobs per ems" do allow(ManageIQ::Providers::Vmware::InfraManager::Vm).to receive(:scan_via_ems?).and_return(true) VmScan::Dispatcher.dispatch assert_at_most_x_scan_jobs_per_y_resource(2, :ems) @@ -81,14 +79,14 @@ def assert_at_most_x_scan_jobs_per_y_resource(x_scans, y_resource) end context "and scans against ems limited to 4 and up to 10 scans per miqserver" do - it "should dispatch only 4 scan jobs per ems" do + it "should dispatch only 4 scan jobs per ems" do VmScan::Dispatcher.dispatch assert_at_most_x_scan_jobs_per_y_resource(4, :ems) end end context "and scans against ems limited to 4 and up to 2 scans per miqserver" do - it "should dispatch up to 4 per ems and 2 per miqserver" do + it "should dispatch up to 4 per ems and 2 per miqserver" do VmScan::Dispatcher.dispatch assert_at_most_x_scan_jobs_per_y_resource(4, :ems) assert_at_most_x_scan_jobs_per_y_resource(2, :miq_server) @@ -102,21 +100,21 @@ def assert_at_most_x_scan_jobs_per_y_resource(x_scans, y_resource) end context "and scans against host limited to 2 and up to 10 scans per miqserver" do - it "should dispatch only 2 scan jobs per host" do + it "should dispatch only 2 scan jobs per host" do VmScan::Dispatcher.dispatch assert_at_most_x_scan_jobs_per_y_resource(2, :host) end end context "and scans against host limited to 4 and up to 10 scans per miqserver" do - it "should dispatch only 4 scan jobs per host" do + it "should dispatch only 4 scan jobs per host" do VmScan::Dispatcher.dispatch assert_at_most_x_scan_jobs_per_y_resource(4, :host) end end context "and scans against host limited to 4 and up to 2 scans per miqserver" do - it "should dispatch up to 4 per host and 2 per miqserver" do + it "should dispatch up to 4 per host and 2 per miqserver" do VmScan::Dispatcher.dispatch assert_at_most_x_scan_jobs_per_y_resource(4, :host) assert_at_most_x_scan_jobs_per_y_resource(2, :miq_server) diff --git a/spec/models/vm_scan_spec.rb b/spec/models/vm_scan_spec.rb index 0e8550914ff..fef4b48ba16 100644 --- a/spec/models/vm_scan_spec.rb +++ b/spec/models/vm_scan_spec.rb @@ -108,11 +108,11 @@ # local zone @server1 = EvmSpecHelper.local_miq_server(:has_vix_disk_lib => true) @user = FactoryBot.create(:user_with_group, :userid => "tester") - @ems = FactoryBot.create(:ems_vmware_with_authentication, :name => "Test EMS", :zone => @server1.zone, - :tenant => FactoryBot.create(:tenant)) + @ems = FactoryBot.create(:ems_vmware_with_authentication, :name => "Test EMS", :zone => @server1.zone, + :tenant => FactoryBot.create(:tenant)) @storage = FactoryBot.create(:storage, :name => "test_storage", :store_type => "VMFS") @host = FactoryBot.create(:host, :name => "test_host", :hostname => "test_host", - :state => 'on', :ext_management_system => @ems) + :state => 'on', :ext_management_system => @ems) @vm = FactoryBot.create(:vm_vmware, :name => "test_vm", :location => "abc/abc.vmx", :raw_power_state => 'poweredOn', :host => @host, @@ -126,7 +126,7 @@ @user2 = FactoryBot.create(:user_with_group, :userid => "tester2") @storage2 = FactoryBot.create(:storage, :name => "test_storage2", :store_type => "VMFS") @host2 = FactoryBot.create(:host, :name => "test_host2", :hostname => "test_host2", - :state => 'on', :ext_management_system => @ems) + :state => 'on', :ext_management_system => @ems) @vm2 = FactoryBot.create(:vm_vmware, :name => "test_vm2", :location => "abc2/abc2.vmx", :raw_power_state => 'poweredOn', :host => @host2, diff --git a/spec/models/vm_spec.rb b/spec/models/vm_spec.rb index d6ce7f78ac8..bc8d2b70987 100644 --- a/spec/models/vm_spec.rb +++ b/spec/models/vm_spec.rb @@ -172,9 +172,8 @@ EvmSpecHelper.local_miq_server @host = FactoryBot.create(:host_vmware) @vm = FactoryBot.create(:vm_vmware, - :host => @host, - :miq_group => FactoryBot.create(:miq_group) - ) + :host => @host, + :miq_group => FactoryBot.create(:miq_group)) FactoryBot.create(:miq_event_definition, :name => :request_vm_start) # admin user is needed to process Events User.super_admin || FactoryBot.create(:user_with_group, :userid => "admin") @@ -243,24 +242,24 @@ expect(DriftState.count).to eq(1) expect(vm.drift_states.first.data).to eq({ - :class => "ManageIQ::Providers::Vmware::InfraManager::Vm", - :id => vm.id, - :location => vm.location, - :name => vm.name, - :vendor => "vmware", - - :files => [], - :filesystem_drivers => [], - :groups => [], - :guest_applications => [], - :kernel_drivers => [], - :linux_initprocesses => [], - :patches => [], - :registry_items => [], - :tags => [], - :users => [], - :win32_services => [], - }) + :class => "ManageIQ::Providers::Vmware::InfraManager::Vm", + :id => vm.id, + :location => vm.location, + :name => vm.name, + :vendor => "vmware", + + :files => [], + :filesystem_drivers => [], + :groups => [], + :guest_applications => [], + :kernel_drivers => [], + :linux_initprocesses => [], + :patches => [], + :registry_items => [], + :tags => [], + :users => [], + :win32_services => [], + }) end it '#set_remote_console_url' do diff --git a/spec/models/vmdb_database_connection_spec.rb b/spec/models/vmdb_database_connection_spec.rb index 7e58d830c10..83b891db84c 100644 --- a/spec/models/vmdb_database_connection_spec.rb +++ b/spec/models/vmdb_database_connection_spec.rb @@ -129,7 +129,7 @@ class << @buffer expect(lines.pop).to eq "ACTIVITY_STATS_CSV" header, *rows = CSV.parse lines.join - expect(header).to eq %w( + expect(header).to eq %w[ session_id xact_start last_request_start_time @@ -144,7 +144,7 @@ class << @buffer wait_event wait_time_ms blocked_by - ) + ] expect(rows.length).to be > 0 rows.each do |row| diff --git a/spec/models/zone_spec.rb b/spec/models/zone_spec.rb index 2ab2e31e169..bea46f1dace 100644 --- a/spec/models/zone_spec.rb +++ b/spec/models/zone_spec.rb @@ -67,7 +67,7 @@ describe "#clustered_hosts" do let(:zone) { FactoryBot.create(:zone) } let(:ems) { FactoryBot.create(:ems_vmware, :zone => zone) } - let(:cluster) { FactoryBot.create(:ems_cluster, :ext_management_system => ems)} + let(:cluster) { FactoryBot.create(:ems_cluster, :ext_management_system => ems) } let(:host_with_cluster) { FactoryBot.create(:host, :ext_management_system => ems, :ems_cluster => cluster) } let(:host) { FactoryBot.create(:host, :ext_management_system => ems) } @@ -82,7 +82,7 @@ describe "#non_clustered_hosts" do let(:zone) { FactoryBot.create(:zone) } let(:ems) { FactoryBot.create(:ems_vmware, :zone => zone) } - let(:cluster) { FactoryBot.create(:ems_cluster, :ext_management_system => ems)} + let(:cluster) { FactoryBot.create(:ems_cluster, :ext_management_system => ems) } let(:host_with_cluster) { FactoryBot.create(:host, :ext_management_system => ems, :ems_cluster => cluster) } let(:host) { FactoryBot.create(:host, :ext_management_system => ems) } @@ -111,7 +111,7 @@ end context ".determine_queue_zone" do - subject { described_class } + subject { described_class } before do allow(ServerRole).to receive_messages(:region_scoped_roles => [ServerRole.new(:name => "inregion")]) diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 52795b201a0..9558b150647 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -5,7 +5,7 @@ end ENV["RAILS_ENV"] ||= 'test' -require File.expand_path("../../config/environment", __FILE__) +require File.expand_path('../config/environment', __dir__) require 'rspec/rails' require 'rspec/mocks' require 'vcr' @@ -42,7 +42,7 @@ YamlPermittedClasses.app_yaml_permitted_classes |= [Ruport::Data::Record, Ruport::Data::Table] end - config.file_fixture_path = "#{::Rails.root}/spec/fixtures" + config.file_fixture_path = "#{Rails.root.join("spec/fixtures")}" config.use_transactional_fixtures = true config.use_instantiated_fixtures = false diff --git a/spec/support/auth_helper.rb b/spec/support/auth_helper.rb index fb1206622f5..5fa766af250 100644 --- a/spec/support/auth_helper.rb +++ b/spec/support/auth_helper.rb @@ -7,7 +7,7 @@ def http_login(username = 'username', password = 'password') def login_as(user, stub_controller: false) User.current_user = user - STDERR.puts "WARNING: double stubbing user - only use login_as or stub_user once" if user != User.current_user + warn "WARNING: double stubbing user - only use login_as or stub_user once" if user != User.current_user session[:userid] = user.userid session[:group] = user.current_group_id allow(controller).to receive(:current_user).and_return(user) if stub_controller diff --git a/spec/support/automation_helper.rb b/spec/support/automation_helper.rb index a02c54ba3e1..522e22e81d5 100644 --- a/spec/support/automation_helper.rb +++ b/spec/support/automation_helper.rb @@ -18,7 +18,7 @@ def create_ae_model(attrs = {}) ae_instances ||= {instance_name => {'field1' => {:value => 'hello world'}}} FactoryBot.create(:miq_ae_domain, :with_small_model, :with_instances, - attrs.merge('ae_fields' => ae_fields, 'ae_instances' => ae_instances)) + attrs.merge('ae_fields' => ae_fields, 'ae_instances' => ae_instances)) end def create_state_ae_model(attrs = {}) @@ -28,7 +28,7 @@ def create_state_ae_model(attrs = {}) ae_instances = {instance_name => {'field1' => {:value => 'phases of matter'}}} FactoryBot.create(:miq_ae_domain, :with_small_model, :with_instances, - attrs.merge('ae_fields' => ae_fields, 'ae_instances' => ae_instances)) + attrs.merge('ae_fields' => ae_fields, 'ae_instances' => ae_instances)) end def create_ae_model_with_method(attrs = {}) @@ -45,9 +45,9 @@ def create_ae_model_with_method(attrs = {}) :language => 'ruby', 'params' => method_params}} FactoryBot.create(:miq_ae_domain, :with_small_model, :with_instances, :with_methods, - attrs.merge('ae_fields' => ae_fields, - 'ae_instances' => ae_instances, - 'ae_methods' => ae_methods)) + attrs.merge('ae_fields' => ae_fields, + 'ae_instances' => ae_instances, + 'ae_methods' => ae_methods)) end def default_ae_model_attributes(attrs = {}) @@ -55,7 +55,8 @@ def default_ae_model_attributes(attrs = {}) :ae_class => 'CLASS1', :ae_namespace => 'A/B/C', :enabled => true, - :instance_name => 'instance1') + :instance_name => 'instance1' + ) end def send_ae_request_via_queue(args, timeout = nil) @@ -70,6 +71,7 @@ def send_ae_request_via_queue(args, timeout = nil) def deliver_ae_request_from_queue q = MiqQueue.all.detect { |item| item.state == 'ready' && item.class_name == "MiqAeEngine" } return nil unless q + q.state = 'dequeue' q.save q.deliver @@ -79,9 +81,10 @@ def add_call_method aec = MiqAeClass.lookup_by_fqname('/ManageIQ/System/Request') aei = aec.ae_instances.detect { |ins| ins.name == 'Call_Method' } if aec return if aei + aef = aec.ae_fields.detect { |fld| fld.name == 'meth1' } aei = MiqAeInstance.new('name' => 'Call_Method') - aev = MiqAeValue.new(:ae_field => aef, :value => "${/#namespace}/${/#class}.${/#method}") + aev = MiqAeValue.new(:ae_field => aef, :value => "${/#namespace}/${/#class}.${/#method}") aei.ae_values << aev aec.ae_instances << aei aec.save diff --git a/spec/support/chargeback_helper.rb b/spec/support/chargeback_helper.rb index c3947f6e0aa..69f53595301 100644 --- a/spec/support/chargeback_helper.rb +++ b/spec/support/chargeback_helper.rb @@ -2,9 +2,9 @@ module Spec module Support module ChargebackHelper def set_tier_param_for(metric, param, value, num_of_tier = 0) - detail = chargeback_rate.chargeback_rate_details.joins(:chargeable_field).where(:chargeable_fields => { :metric => metric }).first + detail = chargeback_rate.chargeback_rate_details.joins(:chargeable_field).where(:chargeable_fields => {:metric => metric}).first tier = detail.chargeback_tiers[num_of_tier] - tier.send("#{param}=", value) + tier.send(:"#{param}=", value) tier.save end @@ -55,10 +55,10 @@ def add_vim_performance_state_for(resources, range, step, state_data) range.step_value(step).each do |time| Array(resources).each do |resource| FactoryBot.create(:vim_performance_state, - :timestamp => time, - :resource => resource, - :state_data => state_data, - :capture_interval => 1.hour) + :timestamp => time, + :resource => resource, + :state_data => state_data, + :capture_interval => 1.hour) end end end diff --git a/spec/support/custom_matchers/be_same_time_as.rb b/spec/support/custom_matchers/be_same_time_as.rb index 1db10386de2..1689758c8af 100644 --- a/spec/support/custom_matchers/be_same_time_as.rb +++ b/spec/support/custom_matchers/be_same_time_as.rb @@ -2,10 +2,10 @@ match do |actual| regexp = /(.*_spec\.rb:\d+)/ called_from = caller.detect { |line| line =~ regexp } - puts <<-MESSAGE -\nWARNING: The `be_same_time_as` matcher is deprecated and will be removed shortly. -Use the `be_within` matcher instead: `be_same_time_as(expected_time).precision(1) == be_within(0.1).of(expected_time)` -#{"Called from " + called_from if called_from} + puts <<~MESSAGE + \nWARNING: The `be_same_time_as` matcher is deprecated and will be removed shortly. + Use the `be_within` matcher instead: `be_same_time_as(expected_time).precision(1) == be_within(0.1).of(expected_time)` + #{"Called from " + called_from if called_from} MESSAGE actual.round(precision) == expected.round(precision) diff --git a/spec/support/custom_matchers/have_attr_accessor.rb b/spec/support/custom_matchers/have_attr_accessor.rb index 705b8f0d19c..32bd20f716b 100644 --- a/spec/support/custom_matchers/have_attr_accessor.rb +++ b/spec/support/custom_matchers/have_attr_accessor.rb @@ -1,6 +1,6 @@ RSpec::Matchers.define :have_attr_accessor do |field_name| match do |object_instance| - object_instance.respond_to?(field_name) && object_instance.respond_to?("#{field_name}=") + object_instance.respond_to?(field_name) && object_instance.respond_to?(:"#{field_name}=") end failure_message do |object_instance| diff --git a/spec/support/evm_spec_helper.rb b/spec/support/evm_spec_helper.rb index 1ee6023258d..9e035e9131c 100644 --- a/spec/support/evm_spec_helper.rb +++ b/spec/support/evm_spec_helper.rb @@ -81,13 +81,14 @@ def self.clear_caches # Marshal dump of Settings loses the config_sources, so we need to compare manually current != @settings_backup || - current.instance_variable_get(:@config_sources) != @settings_backup.instance_variable_get(:@config_sources) + current.instance_variable_get(:@config_sources) != @settings_backup.instance_variable_get(:@config_sources) end def self.clear_instance_variables(instance) if instance.kind_of?(ActiveRecord::Base) || (instance.kind_of?(Class) && instance < ActiveRecord::Base) raise "instances variables should not be cleared from ActiveRecord objects" end + # Don't clear the rspec-mocks instance variables ivars = instance.instance_variables - [:@mock_proxy, :@__recorder] ivars.each { |ivar| clear_instance_variable(instance, ivar) } @@ -112,7 +113,7 @@ def self.local_guid_miq_server_zone end class << self - alias_method :create_guid_miq_server_zone, :local_guid_miq_server_zone + alias create_guid_miq_server_zone local_guid_miq_server_zone end def self.remote_miq_server(attrs = {}) diff --git a/spec/support/examples_group/shared_examples_for_log_collection.rb b/spec/support/examples_group/shared_examples_for_log_collection.rb index a87827b7878..bd41fd2f0be 100644 --- a/spec/support/examples_group/shared_examples_for_log_collection.rb +++ b/spec/support/examples_group/shared_examples_for_log_collection.rb @@ -1,5 +1,5 @@ shared_examples_for "Log Collection #synchronize_logs" do |type| - let(:instance) { instance_variable_get("@#{type}") } + let(:instance) { instance_variable_get(:"@#{type}") } it "#{type.camelize} no args" do expect(LogFile).to receive(:logs_from_server).with(MiqServer.my_server, hash_excluding(:only_current)) @@ -36,11 +36,11 @@ shared_examples_for "Log Collection should create 0 tasks and 0 queue items" do it "should create 0 unfinished tasks" do - expect(MiqTask.where("state != ?", "Finished").count).to eq(0) + expect(MiqTask.where.not(:state => "Finished").count).to eq(0) end it "should create 0 queue messages" do - expect(MiqQueue.where("state not in (?)", %w(ok ready error)).count).to eq(0) + expect(MiqQueue.where.not(:state => %w[ok ready error]).count).to eq(0) end end diff --git a/spec/support/examples_group/shared_examples_for_miq_policy_mixin.rb b/spec/support/examples_group/shared_examples_for_miq_policy_mixin.rb index 0a64a5993f9..fd2db3352ed 100644 --- a/spec/support/examples_group/shared_examples_for_miq_policy_mixin.rb +++ b/spec/support/examples_group/shared_examples_for_miq_policy_mixin.rb @@ -1,4 +1,4 @@ -# Note: This example group uses the `subject` defined by the calling spec +# NOTE: This example group uses the `subject` defined by the calling spec shared_examples_for "MiqPolicyMixin" do context "MiqPolicyMixin methods" do let(:policy) { FactoryBot.create(:miq_policy) } diff --git a/spec/support/examples_group/shared_examples_for_ownership_mixin.rb b/spec/support/examples_group/shared_examples_for_ownership_mixin.rb index cbf548acf36..2f4c380a2aa 100644 --- a/spec/support/examples_group/shared_examples_for_ownership_mixin.rb +++ b/spec/support/examples_group/shared_examples_for_ownership_mixin.rb @@ -4,8 +4,8 @@ let(:user) do FactoryBot.create(:user, - :userid => "ownership_user", - :miq_groups => FactoryBot.create_list(:miq_group, 1)) + :userid => "ownership_user", + :miq_groups => FactoryBot.create_list(:miq_group, 1)) end let(:user2) { FactoryBot.create(:user) } @@ -104,12 +104,12 @@ it "usable as arel" do group_name = user.current_group.description.downcase - sql = <<-SQL.strip_heredoc.split("\n").join(' ') - LOWER(((SELECT "miq_groups"."description" - FROM "miq_groups" - WHERE "miq_groups"."id" = "#{described_class.table_name}"."miq_group_id"))) = '#{group_name}' - SQL - attribute = described_class.arel_table[:owned_by_current_ldap_group] + sql = <<~SQL.split("\n").join(' ') + LOWER(((SELECT "miq_groups"."description" + FROM "miq_groups" + WHERE "miq_groups"."id" = "#{described_class.table_name}"."miq_group_id"))) = '#{group_name}' + SQL + attribute = described_class.arel_table[:owned_by_current_ldap_group] expect(stringify_arel(attribute)).to eq ["(#{sql})"] end @@ -209,12 +209,12 @@ it "usable as arel" do userid = user.userid.downcase - sql = <<-SQL.strip_heredoc.split("\n").join(' ') - LOWER(((SELECT "users"."userid" - FROM "users" - WHERE "users"."id" = "#{described_class.table_name}"."evm_owner_id"))) = '#{userid}' - SQL - attribute = described_class.arel_table[:owned_by_current_user] + sql = <<~SQL.split("\n").join(' ') + LOWER(((SELECT "users"."userid" + FROM "users" + WHERE "users"."id" = "#{described_class.table_name}"."evm_owner_id"))) = '#{userid}' + SQL + attribute = described_class.arel_table[:owned_by_current_user] expect(stringify_arel(attribute)).to eq ["(#{sql})"] end @@ -247,9 +247,9 @@ describe "reporting on ownership" do let(:exp_value) { "true" } - let(:exp) { { "="=> { "field" => "#{described_class}-owned_by_current_ldap_group", "value" => exp_value } } } + let(:exp) { {"="=> {"field" => "#{described_class}-owned_by_current_ldap_group", "value" => exp_value}} } let(:report) { MiqReport.new.tap { |r| r.db = described_class.to_s } } - let(:search_opts) { { :filter => MiqExpression.new(exp), :per_page => 20 } } + let(:search_opts) { {:filter => MiqExpression.new(exp), :per_page => 20} } before do expect(User).to receive(:server_timezone).and_return("UTC") @@ -276,8 +276,8 @@ end context "searching on owned by the current user" do - let(:search_opts) { { :filter => MiqExpression.new(exp), :per_page => 20 } } - let(:exp) { { "=" => { "field" => "#{described_class}-owned_by_current_user", "value" => "true" } } } + let(:search_opts) { {:filter => MiqExpression.new(exp), :per_page => 20} } + let(:exp) { {"=" => {"field" => "#{described_class}-owned_by_current_user", "value" => "true"}} } it "returns results owned by the user" do owned_ids = report.paged_view_search(search_opts).first.map(&:id) @@ -286,8 +286,8 @@ end context "searching on not owned by the current user" do - let(:search_opts) { { :filter => MiqExpression.new(exp), :per_page => 20 } } - let(:exp) { { "=" => { "field" => "#{described_class}-owned_by_current_user", "value" => "false" } } } + let(:search_opts) { {:filter => MiqExpression.new(exp), :per_page => 20} } + let(:exp) { {"=" => {"field" => "#{described_class}-owned_by_current_user", "value" => "false"}} } it "returns results not owned by the user, but have an owner" do owned_ids = report.paged_view_search(search_opts).first.map(&:id) diff --git a/spec/support/examples_group/shared_examples_for_seeding.rb b/spec/support/examples_group/shared_examples_for_seeding.rb index 5ee8183674d..8df3ff9a166 100644 --- a/spec/support/examples_group/shared_examples_for_seeding.rb +++ b/spec/support/examples_group/shared_examples_for_seeding.rb @@ -20,7 +20,7 @@ end shared_examples_for "seeding users with" do |klasses| - let(:users) { {'admin' => 'super_administrator' } } + let(:users) { {'admin' => 'super_administrator'} } it "seeds users #{klasses.present? ? 'with' : ''} #{klasses.collect(&:to_s).join(', ')}" do klasses.push(User) diff --git a/spec/support/factory_bot_helper.rb b/spec/support/factory_bot_helper.rb index 1cc378e2cdd..f5bc1676850 100644 --- a/spec/support/factory_bot_helper.rb +++ b/spec/support/factory_bot_helper.rb @@ -4,7 +4,7 @@ def toggle_on_name_seq(seq) def ip_from_seq(seq) ip = 0xFFFFFFFF - seq - "#{ip >> 24}.#{ip >> 16 & 0xFF}.#{ip >> 8 & 0xFF}.#{ip & 0xFF}" + "#{ip >> 24}.#{(ip >> 16) & 0xFF}.#{(ip >> 8) & 0xFF}.#{ip & 0xFF}" end def mac_from_seq(seq) @@ -41,7 +41,7 @@ def seq_padded_for_sorting(n) end unless evaluator.other_region - raise "You need to pass specific region with :other_region: \n"\ + raise "You need to pass specific region with :other_region: \n" \ "FactoryBot.create(:#{instance.class.to_s.tableize.singularize.to_sym}, :in_other_region, :other_region => ) " end @@ -52,4 +52,3 @@ def seq_padded_for_sorting(n) end end end - diff --git a/spec/support/job_proxy_dispatcher_helper.rb b/spec/support/job_proxy_dispatcher_helper.rb index e416f3fbe61..890878c34e5 100644 --- a/spec/support/job_proxy_dispatcher_helper.rb +++ b/spec/support/job_proxy_dispatcher_helper.rb @@ -40,9 +40,9 @@ def build_entities(options = {}) images_count.times do |idx| container_image_classes.each do |cic| FactoryBot.create(:container_image, - :name => "test_container_images_#{idx}", - :ext_management_system => ems_openshift, - :type => cic.name) + :name => "test_container_images_#{idx}", + :ext_management_system => ems_openshift, + :type => cic.name) end end end diff --git a/spec/support/miq_request_task_helper.rb b/spec/support/miq_request_task_helper.rb index 5ca37b9ca4e..31a2640b697 100644 --- a/spec/support/miq_request_task_helper.rb +++ b/spec/support/miq_request_task_helper.rb @@ -10,17 +10,19 @@ def call_method def check_post_install_callback return if @skip_post_install_check + allow(task).to receive(:for_destination) task.post_install_callback end def dequeue_method return unless (method = @queue.shift) + if method.to_s.start_with?("test_") send(method) else @current_state = method - send("test_#{@current_state}") + send(:"test_#{@current_state}") end true end @@ -39,4 +41,4 @@ def skip_post_install_check end end -Dir.glob(Rails.root.join("spec", "models", "**", "state_machine_spec_helper.rb")).each { |file| require file } +Dir.glob(Rails.root.join("spec/models/**/state_machine_spec_helper.rb")).each { |file| require file } diff --git a/spec/support/query_counter.rb b/spec/support/query_counter.rb index f429c88450e..f00fae337b4 100644 --- a/spec/support/query_counter.rb +++ b/spec/support/query_counter.rb @@ -6,7 +6,7 @@ def self.count(&block) new.count(&block) end - IGNORED_STATEMENTS = %w(CACHE SCHEMA) + IGNORED_STATEMENTS = %w[CACHE SCHEMA] IGNORED_QUERIES = /^(?:ROLLBACK|BEGIN|COMMIT|SAVEPOINT|RELEASE)/ def callback(_name, _start, _finish, _id, payload) @@ -14,7 +14,7 @@ def callback(_name, _start, _finish, _id, payload) end def callback_proc - lambda(&method(:callback)) + method(:callback) end def count(&block) diff --git a/spec/support/quota_helper.rb b/spec/support/quota_helper.rb index 8607592efc2..af3199ff32e 100644 --- a/spec/support/quota_helper.rb +++ b/spec/support/quota_helper.rb @@ -3,16 +3,18 @@ module Support module QuotaHelper def create_category_and_tag(category, tag) cat = Classification.lookup_by_name(category) - cat = Classification.create_category!(:name => category, - :single_value => false, - :description => category) unless cat - cat.add_entry(:description => tag, - :read_only => "0", - :syntax => "string", - :name => tag, - :example_text => nil, - :default => true, - :single_value => "0") if cat + cat ||= Classification.create_category!(:name => category, + :single_value => false, + :description => category) + if cat + cat.add_entry(:description => tag, + :read_only => "0", + :syntax => "string", + :name => tag, + :example_text => nil, + :default => true, + :single_value => "0") + end end def setup_tags @@ -57,54 +59,54 @@ def create_tenant_quota def create_vmware_vms @active_vm = FactoryBot.create(:vm_vmware, - :miq_group_id => @miq_group.id, - :evm_owner_id => @user.id, - :ems_id => @ems.id, - :storage_id => @storage.id, - :hardware => @hw1, - :tenant => @tenant) + :miq_group_id => @miq_group.id, + :evm_owner_id => @user.id, + :ems_id => @ems.id, + :storage_id => @storage.id, + :hardware => @hw1, + :tenant => @tenant) @archived_vm = FactoryBot.create(:vm_vmware, - :miq_group_id => @miq_group.id, - :evm_owner_id => @user.id, - :hardware => @hw2) + :miq_group_id => @miq_group.id, + :evm_owner_id => @user.id, + :hardware => @hw2) @orphaned_vm = FactoryBot.create(:vm_vmware, - :miq_group_id => @miq_group.id, - :evm_owner_id => @user.id, - :storage_id => @storage.id, - :hardware => @hw3) - @retired_vm = FactoryBot.create(:vm_vmware, :miq_group_id => @miq_group.id, :evm_owner_id => @user.id, - :retired => true, - :hardware => @hw4) + :storage_id => @storage.id, + :hardware => @hw3) + @retired_vm = FactoryBot.create(:vm_vmware, + :miq_group_id => @miq_group.id, + :evm_owner_id => @user.id, + :retired => true, + :hardware => @hw4) end def create_google_vms @active_vm = FactoryBot.create(:vm_google, - :miq_group_id => @miq_group.id, - :evm_owner_id => @user.id, - :ext_management_system => @ems, - :tenant => @tenant) + :miq_group_id => @miq_group.id, + :evm_owner_id => @user.id, + :ext_management_system => @ems, + :tenant => @tenant) @archived_vm = FactoryBot.create(:vm_google, - :miq_group_id => @miq_group.id, - :evm_owner_id => @user.id, - :tenant => @tenant) + :miq_group_id => @miq_group.id, + :evm_owner_id => @user.id, + :tenant => @tenant) @orphaned_vm = FactoryBot.create(:vm_google, - :miq_group_id => @miq_group.id, - :evm_owner_id => @user.id, - :tenant => @tenant) - @retired_vm = FactoryBot.create(:vm_google, :miq_group_id => @miq_group.id, :evm_owner_id => @user.id, - :retired => true, :tenant => @tenant) + @retired_vm = FactoryBot.create(:vm_google, + :miq_group_id => @miq_group.id, + :evm_owner_id => @user.id, + :retired => true, + :tenant => @tenant) end def create_request(prov_options) @miq_provision_request = FactoryBot.create(:miq_provision_request, - :requester => @user, - :source => @vm_template, - :options => prov_options) + :requester => @user, + :source => @vm_template, + :options => prov_options) @miq_request = @miq_provision_request end @@ -119,7 +121,7 @@ def vmware_requested_quota_values def vmware_template @ems = FactoryBot.create(:ems_vmware) FactoryBot.create(:template_vmware, - :hardware => FactoryBot.create(:hardware, :cpu1x2, :memory_mb => 512)) + :hardware => FactoryBot.create(:hardware, :cpu1x2, :memory_mb => 512)) end def vmware_model @@ -136,7 +138,7 @@ def vmware_reconfigure_model create_hardware create_vmware_vms @reconfigure_request = FactoryBot.create(:vm_reconfigure_request, :requester => @user) - @vm_hardware = FactoryBot.build(:hardware, :virtual_hw_version => "07", :cpu_total_cores => 2,\ + @vm_hardware = FactoryBot.build(:hardware, :virtual_hw_version => "07", :cpu_total_cores => 2, :memory_mb => 4096, :cpu_sockets => 2, :cpu_cores_per_socket => 1) @vm_vmware = FactoryBot.create(:vm_vmware, :hardware => @vm_hardware) @vm_vmware.update(:ems_id => @ems.id) @@ -156,7 +158,7 @@ def google_requested_quota_values def google_template @ems = FactoryBot.create(:ems_google_with_authentication, - :availability_zones => [FactoryBot.create(:availability_zone_google)]) + :availability_zones => [FactoryBot.create(:availability_zone_google)]) FactoryBot.create(:template_google, :ext_management_system => @ems) end @@ -164,19 +166,19 @@ def google_model @vm_template = google_template m2_small_flavor = FactoryBot.create(:flavor_google, :ems_id => @ems.id, :cloud_subnet_required => false, :cpus => 4, :cpu_cores => 1, :memory => 1024) - create_request(:number_of_vms => 1, :owner_email => 'user@example.com', - :src_vm_id => @vm_template.id, - :boot_disk_size => ["10.GB", "10 GB"], - :placement_auto => [true, 1], - :instance_type => [m2_small_flavor.id, m2_small_flavor.name]) + create_request(:number_of_vms => 1, :owner_email => 'user@example.com', + :src_vm_id => @vm_template.id, + :boot_disk_size => ["10.GB", "10 GB"], + :placement_auto => [true, 1], + :instance_type => [m2_small_flavor.id, m2_small_flavor.name]) create_google_vms end def generic_template FactoryBot.create(:service_template, - :name => 'generic', - :service_type => 'atomic', - :prov_type => 'generic') + :name => 'generic', + :service_type => 'atomic', + :prov_type => 'generic') end def build_generic_service_item @@ -186,9 +188,9 @@ def build_generic_service_item def build_generic_ansible_tower_service_item @service_template = FactoryBot.create(:service_template, - :name => 'generic_ansible_tower', - :service_type => 'atomic', - :prov_type => 'generic_ansible_tower') + :name => 'generic_ansible_tower', + :service_type => 'atomic', + :prov_type => 'generic_ansible_tower') @service_request = build_service_template_request("generic_ansible_tower", @user, :dialog => {"test" => "dialog"}) end @@ -230,7 +232,7 @@ def user_setup def setup_model(vendor = "vmware") user_setup create_tenant_quota - send("#{vendor}_model") unless vendor == 'generic' + send(:"#{vendor}_model") unless vendor == 'generic' end end end diff --git a/spec/support/report_helper.rb b/spec/support/report_helper.rb index cf9c03b262d..3782b38c645 100644 --- a/spec/support/report_helper.rb +++ b/spec/support/report_helper.rb @@ -16,21 +16,21 @@ def numeric_charts_2d_from_summaries(other) :db => "Vm", :sortby => ["ext_management_system.name"], :order => "Descending", - :cols => %w(name num_cpu), + :cols => %w[name num_cpu], :include => {"ext_management_system" => {"columns" => ["name"]}}, - :col_order => %w(name num_cpu ext_management_system.name), + :col_order => %w[name num_cpu ext_management_system.name], :headers => ["Name", "Number of CPUs", "Cloud/Infrastructure Provider Name"], :dims => 1, :group => "y", :rpt_options => {:summary => {:hide_detail_rows => false}}, - :col_options => {"num_cpu" => {:grouping => %i(avg max min total)}, + :col_options => {"num_cpu" => {:grouping => %i[avg max min total]}, "name" => {:break_label => "Cloud/Infrastructure Provider : Name: "}}, :graph => {:type => "Column", :mode => "values", :column => "Vm-num_cpu:total", :count => 2, :other => other}, - :extras => {}, + :extras => {} ) report.table = Ruport::Data::Table.new( - :column_names => %w(name num_cpu ext_management_system.name id), + :column_names => %w[name num_cpu ext_management_system.name id], :data => [ ["bar", 1, system_name_1 = 'blah', 352], ["foo", 3, system_name_1, 353], @@ -40,7 +40,7 @@ def numeric_charts_2d_from_summaries(other) ["gek", 2, system_name_3, 357], ["tik", 1, system_name_4 = 'blud', 358], ["tak", 1, system_name_4, 359], - ], + ] ) report end @@ -48,20 +48,20 @@ def numeric_charts_2d_from_summaries(other) def numeric_chart_simple report = MiqReport.new( :db => "Host", - :cols => %w(name ram_size), - :col_order => %w(name ram_size), + :cols => %w[name ram_size], + :col_order => %w[name ram_size], :headers => ["Name", "RAM Size (MB)"], :order => "Ascending", - :sortby => %w(name), + :sortby => %w[name], :group => nil, :graph => {:type => "Bar", :mode => "values", :column => "Host-ram_size", :count => 10, :other => false}, :dims => 1, :col_options => {}, - :extras => {}, + :extras => {} ) report.table = Ruport::Data::Table.new( - :column_names => %w(name ram_size id), + :column_names => %w[name ram_size id], :data => [ ['jenda', 512, 1], ['ladas', 1024, 2], @@ -101,20 +101,20 @@ def null_data_chart_with_complex_condition def numeric_chart_simple_with_long_strings report = MiqReport.new( :db => "Host", - :cols => %w(name ram_size), - :col_order => %w(name ram_size), + :cols => %w[name ram_size], + :col_order => %w[name ram_size], :headers => [long_header, "RAM Size (MB)"], :order => "Ascending", - :sortby => %w(name), + :sortby => %w[name], :group => nil, :graph => {:type => "Bar", :mode => "values", :column => "Host-ram_size", :count => 10, :other => false}, :dims => 1, :col_options => {}, - :extras => {}, + :extras => {} ) report.table = Ruport::Data::Table.new( - :column_names => %w(name ram_size id), + :column_names => %w[name ram_size id], :data => [ [long_category, 512, 1], ['ladas', 1024, 2], @@ -127,25 +127,25 @@ def numeric_chart_simple_with_long_strings def null_data_chart report = MiqReport.new( :db => "Vm", - :cols => %w(name), - :include => {"hardware" => {"columns" => %w(cpu_speed cpu_sockets memory_mb)}}, - :col_order => %w(name hardware.cpu_speed hardware.cpu_sockets hardware.memory_mb), + :cols => %w[name], + :include => {"hardware" => {"columns" => %w[cpu_speed cpu_sockets memory_mb]}}, + :col_order => %w[name hardware.cpu_speed hardware.cpu_sockets hardware.memory_mb], :headers => ["Name", "Hardware CPU Speed", "Hardware Number of CPUs", "Hardware RAM"], :order => "Ascending", - :sortby => %w(name), + :sortby => %w[name], :graph => {:type => "Bar", :mode => "values", :column => "Vm.hardware-cpu_sockets", :count => 10, :other => true}, :dims => 1, :col_options => {}, :rpt_options => {}, - :extras => {}, + :extras => {} ) report.table = Ruport::Data::Table.new( - :column_names => %w(name hardware.cpu_speed hardware.cpu_sockets hardware.memory_mb id), + :column_names => %w[name hardware.cpu_speed hardware.cpu_sockets hardware.memory_mb id], :data => [ ["Чук", nil, 4, 6_144, 42], ["Гек", nil, nil, 1_024, 49], - ], + ] ) report end @@ -153,22 +153,22 @@ def null_data_chart def numeric_chart_simple2(other) report = MiqReport.new( :db => "Host", - :sortby => %w(name), + :sortby => %w[name], :order => "Descending", - :cols => %w(name v_total_vms), + :cols => %w[name v_total_vms], :include => {}, - :col_order => %w(name v_total_vms), + :col_order => %w[name v_total_vms], :headers => ["Name", "Total VMs"], :dims => 1, :group => nil, :rpt_options => {}, :col_options => {}, :graph => {:type => "Bar", :mode => "values", :column => "Host-v_total_vms", :count => 4, :other => other}, - :extras => {}, + :extras => {} ) report.table = Ruport::Data::Table.new( - :column_names => %w(name v_total_vms id), + :column_names => %w[name v_total_vms id], :data => [ ["bar", 15, 1352], ["foo", 14, 1353], @@ -176,7 +176,7 @@ def numeric_chart_simple2(other) ["foo", 2, 1355], ["foo", 1, 1356], ["foo", 0, 1357], - ], + ] ) report end @@ -184,22 +184,22 @@ def numeric_chart_simple2(other) def numeric_chart_3d(other) report = MiqReport.new( :db => "Vm", - :cols => %w(os_image_name mem_cpu), + :cols => %w[os_image_name mem_cpu], :include => {"ext_management_system" => {"columns" => ["name"]}}, - :col_order => %w(ext_management_system.name os_image_name mem_cpu), + :col_order => %w[ext_management_system.name os_image_name mem_cpu], :headers => ["Cloud/Infrastructure Provider Name", "OS Name", "Memory"], :order => "Ascending", - :sortby => %w(ext_management_system.name os_image_name), + :sortby => %w[ext_management_system.name os_image_name], :group => "y", :graph => {:type => "StackedBar", :mode => "values", :column => "Vm-mem_cpu:total", :count => 2, :other => other}, :dims => 2, :col_options => {"name" => {:break_label => "Cloud/Infrastructure Provider : Name: "}, "mem_cpu" => {:grouping => [:total]}}, :rpt_options => {:summary => {:hide_detail_rows => false}}, - :extras => {}, + :extras => {} ) report.table = Ruport::Data::Table.new( - :column_names => %w(os_image_name mem_cpu ext_management_system.name id), + :column_names => %w[os_image_name mem_cpu ext_management_system.name id], :data => [ ["linux_centos", 6_144, "MTC-RHEVM-3.0", 67], ["linux_centos", 512, "MTC-RHEVM-3.0", 167], @@ -209,7 +209,7 @@ def numeric_chart_3d(other) ["windows", 1_024, "openstack", 71], ["linux_centos", 1_024, "ec2", 72], ["", 0, "", 79], - ], + ] ) report end @@ -217,12 +217,12 @@ def numeric_chart_3d(other) def chart_with_namespace_prefix report = MiqReport.new( :db => "ManageIQ::Providers::InfraManager::Vm", - :cols => %w(os_image_name cpu_total_cores num_cpu), - :include => {"host" => {"columns" => %w(name)}}, - :col_order => %w(os_image_name host.name cpu_total_cores num_cpu), + :cols => %w[os_image_name cpu_total_cores num_cpu], + :include => {"host" => {"columns" => %w[name]}}, + :col_order => %w[os_image_name host.name cpu_total_cores num_cpu], :headers => ["OS Name", "Host / Node Name", "Number of CPU Cores", "Number of CPUs"], :order => "Ascending", - :sortby => %w(host.name os_image_name), + :sortby => %w[host.name os_image_name], :group => "y", :graph => {:type => "Bar", :mode => "values", :column => "ManageIQ::Providers::InfraManager::Vm-num_cpu:total", :count => 10, :other => true}, :dims => 2, @@ -232,7 +232,7 @@ def chart_with_namespace_prefix ) report.table = Ruport::Data::Table.new( - :column_names => %w(os_image_name cpu_total_cores num_cpu host.name id), + :column_names => %w[os_image_name cpu_total_cores num_cpu host.name id], :data => [ ["linux_centos", 8, 2, "MTC-RHEVM-3.0", 67], ] @@ -243,19 +243,19 @@ def chart_with_namespace_prefix def cu_chart_without_grouping report = MiqReport.new( :db => "VimPerformanceDaily", - :cols => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available), - :include => {"resource" => {"columns" => %w(cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period)}}, - :col_order => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available), + :cols => %w[timestamp cpu_usagemhz_rate_average max_derived_cpu_available], + :include => {"resource" => {"columns" => %w[cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period]}}, + :col_order => %w[timestamp cpu_usagemhz_rate_average max_derived_cpu_available], :headers => ["Date/Time", "Avg Used", "Max Available"], :order => "Ascending", - :sortby => %w(timestamp), + :sortby => %w[timestamp], :group => "n", - :graph => {:type => "Line", :columns => %w(cpu_usagemhz_rate_average max_derived_cpu_available)}, + :graph => {:type => "Line", :columns => %w[cpu_usagemhz_rate_average max_derived_cpu_available]}, :extras => {:trend => {"trend_max_cpu_usagemhz_rate_average|max_derived_cpu_available"=>"Trending Down"}} ) report.table = Ruport::Data::Table.new( - :column_names => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available), + :column_names => %w[timestamp cpu_usagemhz_rate_average max_derived_cpu_available], :data => [ [Time.zone.local(2017, 8, 19, 0, 0, 0), 19_986.0, 41_584.0], [Time.zone.local(2017, 8, 20, 0, 0, 0), 205_632.0, 41_584.0] @@ -267,20 +267,20 @@ def cu_chart_without_grouping def cu_chart_with_grouping report = MiqReport.new( :db => "VimPerformanceDaily", - :cols => %w(timestamp cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa), - :include => {"resource" => {"columns" => %w(cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period)}}, - :col_order => %w(timestamp cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa), + :cols => %w[timestamp cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa], + :include => {"resource" => {"columns" => %w[cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period]}}, + :col_order => %w[timestamp cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa], :headers => ["Date/Time", "Avg Used", "Max Available"], :order => "Ascending", - :sortby => %w(timestamp), + :sortby => %w[timestamp], :group => "n", - :graph => {:type => "Line", :columns => %w(cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa)}, + :graph => {:type => "Line", :columns => %w[cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa]}, :extras => {:trend => {"trend_max_cpu_usagemhz_rate_average|max_derived_cpu_available"=>"Trending Down"}}, :performance => {:group_by_category=>"environment"} ) report.table = Ruport::Data::Table.new( - :column_names => %w(timestamp cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa), + :column_names => %w[timestamp cpu_usagemhz_rate_average__none_ max_derived_cpu_available_xa], :data => [ [Time.zone.local(2017, 8, 19, 0, 0, 0), 19_986.0, 41_584.0], [Time.zone.local(2017, 8, 20, 0, 0, 0), 205_632.0, 41_584.0] @@ -292,19 +292,19 @@ def cu_chart_with_grouping def cu_chart_with_no_data report = MiqReport.new( :db => "VimPerformanceDaily", - :cols => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available), - :include => {"resource" => {"columns" => %w(cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period)}}, - :col_order => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available), + :cols => %w[timestamp cpu_usagemhz_rate_average max_derived_cpu_available], + :include => {"resource" => {"columns" => %w[cpu_usagemhz_rate_average_high_over_time_period cpu_usagemhz_rate_average_low_over_time_period]}}, + :col_order => %w[timestamp cpu_usagemhz_rate_average max_derived_cpu_available], :headers => ["Date/Time", "Avg Used", "Max Available"], :order => "Ascending", - :sortby => %w(timestamp), + :sortby => %w[timestamp], :group => "n", - :graph => {:type => "Line", :columns => %w(cpu_usagemhz_rate_average max_derived_cpu_available)}, + :graph => {:type => "Line", :columns => %w[cpu_usagemhz_rate_average max_derived_cpu_available]}, :extras => {:trend => {"trend_max_cpu_usagemhz_rate_average|max_derived_cpu_available"=>"Trending Down"}} ) report.table = Ruport::Data::Table.new( - :column_names => %w(timestamp cpu_usagemhz_rate_average max_derived_cpu_available), + :column_names => %w[timestamp cpu_usagemhz_rate_average max_derived_cpu_available], :data => [] ) report diff --git a/spec/support/service_template_helper.rb b/spec/support/service_template_helper.rb index 0f5994a9831..3ccb5bb7887 100644 --- a/spec/support/service_template_helper.rb +++ b/spec/support/service_template_helper.rb @@ -15,17 +15,18 @@ def build_all_atomics(hash) next unless value[:type] == "atomic" item = FactoryBot.create(:service_template, :name => name, - :options => {:dialog => {}}, - :service_type => 'atomic') + :options => {:dialog => {}}, + :service_type => 'atomic') item.update(:prov_type => value[:prov_type]) if value[:prov_type].present? next if value[:prov_type] && value[:prov_type].starts_with?("generic") + options = value[:request] options ||= {} options[:dialog] = {} mprt = FactoryBot.create(:miq_provision_request_template, - :requester => get_user(options), - :src_vm_id => options[:src_vm_id], - :options => options) + :requester => get_user(options), + :src_vm_id => options[:src_vm_id], + :options => options) add_st_resource(item, mprt) end end @@ -34,6 +35,7 @@ def build_all_composites(hash) hash.each do |name, value| next unless value[:type] == "composite" next if ServiceTemplate.find_by(:name => name) + build_a_composite(name, hash) end end @@ -49,9 +51,9 @@ def build_model_from_vms(items) child_options[key] = {:provision_index => index} end - model['top'] = { :type => 'composite', - :children => children, - :child_options => child_options } + model['top'] = {:type => 'composite', + :children => children, + :child_options => child_options} build_service_template_tree(model) end @@ -62,15 +64,14 @@ def add_item(item) else {:type => 'atomic', :prov_type => item.vendor, - :request => {:src_vm_id => item.id, :number_of_vms => 1, :requester => @user} - } + :request => {:src_vm_id => item.id, :number_of_vms => 1, :requester => @user}} end end def build_a_composite(name, hash) item = FactoryBot.create(:service_template, :name => name, - :options => {:dialog => {}}, - :service_type => 'composite') + :options => {:dialog => {}}, + :service_type => 'composite') properties = hash[name] link_all_children(item, properties, hash) unless properties[:children].empty? item @@ -93,19 +94,20 @@ def add_st_resource(svc, resource, options = {}) def build_service_template_request(root_st_name, user, dialog_options = {}) root = ServiceTemplate.find_by(:name => root_st_name) return nil unless root + options = {:src_id => root.id, :target_name => "barney"}.merge(dialog_options) FactoryBot.create(:service_template_provision_request, - :description => 'Service Request', - :source_type => 'ServiceTemplate', - :type => 'ServiceTemplateProvisionRequest', - :request_type => 'clone_to_service', - :approval_state => 'approved', - :status => 'Ok', - :process => true, - :request_state => 'active', - :source_id => root.id, - :requester => user, - :options => options) + :description => 'Service Request', + :source_type => 'ServiceTemplate', + :type => 'ServiceTemplateProvisionRequest', + :request_type => 'clone_to_service', + :approval_state => 'approved', + :status => 'Ok', + :process => true, + :request_state => 'active', + :source_id => root.id, + :requester => user, + :options => options) end def request_stubs @@ -120,8 +122,8 @@ def build_small_environment @ems = FactoryBot.create(:ems_vmware_with_authentication) @host1 = FactoryBot.create(:host_vmware, :ems_id => @ems.id) @src_vm = FactoryBot.create(:vm_vmware, :host => @host1, - :ems_id => @ems.id, - :name => "barney") + :ems_id => @ems.id, + :name => "barney") end def service_template_stubs diff --git a/spec/support/settings_helper.rb b/spec/support/settings_helper.rb index 3217674723d..e022b414077 100644 --- a/spec/support/settings_helper.rb +++ b/spec/support/settings_helper.rb @@ -31,5 +31,5 @@ def stub_local_settings_file(path, content) p == path ? content : orig_io_call.call(p) end - ::Settings.reload! + Settings.reload! end diff --git a/spec/support/test_contamination.rb b/spec/support/test_contamination.rb index 86212a49a88..72eef6f480f 100644 --- a/spec/support/test_contamination.rb +++ b/spec/support/test_contamination.rb @@ -10,6 +10,7 @@ def self.patched_include def include(included) exceptions = ["ActionView::Helpers::NumberHelper", "SNMP::BER"] return super if included.to_s.in?(exceptions) + raise RuntimeError, "Unexpected module '#{included}' included globally, did you mean to include it in a class?", caller end end diff --git a/spec/support/workflow_spec_helper.rb b/spec/support/workflow_spec_helper.rb index 3f2b0eefc38..4fa8fb0c531 100644 --- a/spec/support/workflow_spec_helper.rb +++ b/spec/support/workflow_spec_helper.rb @@ -21,9 +21,10 @@ def assert_automate_dialog_lookup(user, category, platform, method = 'get_pre_di 'dialog_input_request_type' => 'template', 'dialog_input_target_type' => 'vm', 'platform_category' => category, - 'platform' => platform, + 'platform' => platform ), - anything).and_return(dialog_name) + anything + ).and_return(dialog_name) end def assert_automate_vm_name_lookup(user, vm_name = 'vm_name') @@ -36,7 +37,8 @@ def assert_automate_vm_name_lookup(user, vm_name = 'vm_name') 'message' => 'get_vmname', 'User::user' => user.id ), - anything).and_return("get_vmname_url") + anything + ).and_return("get_vmname_url") end def stub_automate_workspace(url, user, *result) diff --git a/spec/tools/copy_reports_structure_spec.rb b/spec/tools/copy_reports_structure_spec.rb index fb16cefe9ae..2de4d518fc8 100644 --- a/spec/tools/copy_reports_structure_spec.rb +++ b/spec/tools/copy_reports_structure_spec.rb @@ -5,8 +5,8 @@ RSpec.describe ReportStructure do let(:group_name) { "SourceGroup" } let(:settings) { {"reports_menus" => [["Configuration Management", ["Virtual Machines", ["Vendor and Type"]]]]} } - let(:role) { FactoryBot.create(:miq_user_role) } - let(:source_group) { FactoryBot.create(:miq_group, :settings => settings) } + let(:role) { FactoryBot.create(:miq_user_role) } + let(:source_group) { FactoryBot.create(:miq_group, :settings => settings) } let(:destination_group) { FactoryBot.create(:miq_group, :miq_user_role => role) } before do diff --git a/spec/tools/fix_auth/auth_config_model_spec.rb b/spec/tools/fix_auth/auth_config_model_spec.rb index 755560c4e04..e294a347fe3 100644 --- a/spec/tools/fix_auth/auth_config_model_spec.rb +++ b/spec/tools/fix_auth/auth_config_model_spec.rb @@ -64,7 +64,7 @@ subject.fix_passwords(request, options) expect(request).to be_changed new_options = YAML.load(request.options) - expect(new_options[:dialog]['password::special'.to_sym]).to be_encrypted(pass) + expect(new_options[:dialog][:"password::special"]).to be_encrypted(pass) expect(new_options[:root_password]).to be_encrypted(pass) expect(new_options[:sysprep_password]).to be_encrypted(pass) @@ -92,7 +92,7 @@ subject.fix_passwords(request, options) expect(request).to be_changed new_options = YAML.load(request.options) - expect(new_options[:dialog]['password::special'.to_sym]).to be_encrypted(pass) + expect(new_options[:dialog][:"password::special"]).to be_encrypted(pass) expect(new_options[:root_password]).to be_encrypted(pass) expect(new_options[:sysprep_password]).to be_encrypted(pass) diff --git a/spec/tools/fix_auth/auth_model_spec.rb b/spec/tools/fix_auth/auth_model_spec.rb index 59821ff5972..eb1ed6524fd 100644 --- a/spec/tools/fix_auth/auth_model_spec.rb +++ b/spec/tools/fix_auth/auth_model_spec.rb @@ -29,12 +29,12 @@ end it "should determine available_columns" do - expect(subject.available_columns).to eq(%w(password auth_key)) + expect(subject.available_columns).to eq(%w[password auth_key]) end it "should limit available_columns when not all columns are available" do - allow(subject).to receive_messages(:column_names => %w(password id)) - expect(subject.available_columns).to eq(%w(password)) + allow(subject).to receive_messages(:column_names => %w[password id]) + expect(subject.available_columns).to eq(%w[password]) end it "should build selection criteria (non selects)" do diff --git a/spec/tools/fix_auth/cli_spec.rb b/spec/tools/fix_auth/cli_spec.rb index 95047f437da..72b3cb6d50b 100644 --- a/spec/tools/fix_auth/cli_spec.rb +++ b/spec/tools/fix_auth/cli_spec.rb @@ -6,79 +6,83 @@ describe "#parse" do it "should assign defaults" do opts = described_class.new.parse([], {}) - .options.slice(:hostname, :port, :username, :password, :hardcode, :database) + .options.slice(:hostname, :port, :username, :password, :hardcode, :database) expect(opts).to eq( :username => "root", :database => "vmdb_production", - :port => 5432) + :port => 5432 + ) end it "should pickup env variables" do opts = described_class.new.parse([], "PGUSER" => "envuser", "PGPASSWORD" => "envpass", "PGHOST" => "envhost") - .options.slice(:hostname, :username, :password, :hardcode, :database) + .options.slice(:hostname, :username, :password, :hardcode, :database) expect(opts).to eq( :username => "envuser", :database => "vmdb_production", :password => "envpass", - :hostname => "envhost") + :hostname => "envhost" + ) end it "should parse database names" do - opts = described_class.new.parse(%w(DB1)) - .options.slice(:hostname, :username, :password, :hardcode, :database) + opts = described_class.new.parse(%w[DB1]) + .options.slice(:hostname, :username, :password, :hardcode, :database) expect(opts).to eq( :username => "root", - :database => "DB1") + :database => "DB1" + ) end it "should parse hardcoded password" do - opts = described_class.new.parse(%w(-P hardcoded)) - .options.slice(:hostname, :username, :password, :hardcode, :database) + opts = described_class.new.parse(%w[-P hardcoded]) + .options.slice(:hostname, :username, :password, :hardcode, :database) expect(opts).to eq( :username => "root", :database => "vmdb_production", - :hardcode => "hardcoded") + :hardcode => "hardcoded" + ) end it "defaults to updating the database" do - opts = described_class.new.parse(%w()) - .options.slice(:db, :databaseyml, :key) + opts = described_class.new.parse(%w[]) + .options.slice(:db, :databaseyml, :key) expect(opts).to eq(:db => true) end it "doesnt default to database if running another task" do - opts = described_class.new.parse(%w(--databaseyml)) - .options.slice(:db, :databaseyml, :key) + opts = described_class.new.parse(%w[--databaseyml]) + .options.slice(:db, :databaseyml, :key) expect(opts).to eq(:databaseyml => true) end it "doesnt default to database if running another task 2" do - opts = described_class.new.parse(%w(--key)) - .options.slice(:db, :databaseyml, :key) + opts = described_class.new.parse(%w[--key]) + .options.slice(:db, :databaseyml, :key) expect(opts).to eq(:key => true) end it "can run all 3 tasks" do - opts = described_class.new.parse(%w(--key --db --databaseyml)) - .options.slice(:db, :databaseyml, :key) + opts = described_class.new.parse(%w[--key --db --databaseyml]) + .options.slice(:db, :databaseyml, :key) expect(opts).to eq(:db => true, :databaseyml => true, :key => true) end it "parses legacy_keys" do - opts = described_class.new.parse(%w(--legacy-key v2.bak)) - .options.slice(:legacy_key) + opts = described_class.new.parse(%w[--legacy-key v2.bak]) + .options.slice(:legacy_key) expect(opts).to eq(:legacy_key => "v2.bak") end it "parses without legacy_keys specified" do - opts = described_class.new.parse(%w()) - .options.slice(:legacy_key) + opts = described_class.new.parse(%w[]) + .options.slice(:legacy_key) expect(opts[:legacy_key]).not_to be end describe "v2" do it "exists" do - expect { described_class.new.parse(%w(--v2)) }.not_to raise_error + expect { described_class.new.parse(%w[--v2]) }.not_to raise_error end end end diff --git a/spec/tools/miq_config_sssd_ldap/auth_establish_spec.rb b/spec/tools/miq_config_sssd_ldap/auth_establish_spec.rb index a3b030bca28..b9b4c5d9591 100644 --- a/spec/tools/miq_config_sssd_ldap/auth_establish_spec.rb +++ b/spec/tools/miq_config_sssd_ldap/auth_establish_spec.rb @@ -37,17 +37,17 @@ it 'invokes authconfig with valid parameters' do expect(AwesomeSpawn).to receive(:run) .with("authconfig", - :params => { :ldapserver= => "bob://hostname:22", - :ldapbasedn= => nil, - :enablesssd => nil, - :enablesssdauth => nil, - :enablelocauthorize => nil, - :enableldap => nil, - :enableldapauth => nil, - :disableldaptls => nil, - :enablerfc2307bis => nil, - :enablecachecreds => nil, - :update => nil}) + :params => {:ldapserver= => "bob://hostname:22", + :ldapbasedn= => nil, + :enablesssd => nil, + :enablesssdauth => nil, + :enablelocauthorize => nil, + :enableldap => nil, + :enableldapauth => nil, + :disableldaptls => nil, + :enablerfc2307bis => nil, + :enablecachecreds => nil, + :update => nil}) .and_return(double(:command_line => "authconfig", :failure? => false)) @auth_establish.run_auth_establish end diff --git a/spec/tools/miq_config_sssd_ldap/configure_apache_spec.rb b/spec/tools/miq_config_sssd_ldap/configure_apache_spec.rb index 987b969ccd3..8f873542797 100644 --- a/spec/tools/miq_config_sssd_ldap/configure_apache_spec.rb +++ b/spec/tools/miq_config_sssd_ldap/configure_apache_spec.rb @@ -12,19 +12,19 @@ describe '#onfigure' do let(:manageiq_pam_conf) do - <<-PAM_CONF.strip_heredoc + <<~PAM_CONF manageiq pam conf data PAM_CONF end let(:manageiq_remote_user_conf) do - <<-REMOTE_USER_CONF.strip_heredoc + <<~REMOTE_USER_CONF manageiq remote user conf data REMOTE_USER_CONF end let(:manageiq_external_auth_conf) do - <<-EXTERNAL_AUTH_KERB_CONF.strip_heredoc + <<~EXTERNAL_AUTH_KERB_CONF KrbMethodK5Passwd Off KrbAuthRealms <%= realm %> Krb5KeyTab /etc/http.keytab @@ -32,7 +32,7 @@ end let(:expected_manageiq_external_auth_conf) do - <<-EXPECTED_EXTERNAL_AUTH_KERB_CONF.strip_heredoc + <<~EXPECTED_EXTERNAL_AUTH_KERB_CONF KrbMethodK5Passwd Off KrbAuthRealms bob.your.uncle.com Krb5KeyTab /etc/http.keytab @@ -40,7 +40,7 @@ end let(:manageiq_external_auth_gssapi_conf) do - <<-EXTERNAL_AUTH_GSSAPI_CONF.strip_heredoc + <<~EXTERNAL_AUTH_GSSAPI_CONF AuthType GSSAPI AuthName "GSSAPI Single Sign On Login" GssapiCredStore keytab:/etc/http.keytab @@ -65,11 +65,9 @@ @pam_template_dir = FileUtils.mkdir_p("#{@template_dir}/#{@pam_conf_dir}")[0] stub_const("MiqConfigSssdLdap::AuthTemplateFiles::PAM_CONF_DIR", @pam_conf_dir) - File.open("#{@pam_template_dir}/httpd-auth", "w") { |f| f.write(manageiq_pam_conf) } - File.open("#{@httpd_template_dir}/manageiq-remote-user.conf", "w") { |f| f.write(manageiq_remote_user_conf) } - File.open("#{@httpd_template_dir}/manageiq-external-auth.conf.erb", "w") do |f| - f.write(manageiq_external_auth_conf) - end + File.write("#{@pam_template_dir}/httpd-auth", manageiq_pam_conf) + File.write("#{@httpd_template_dir}/manageiq-remote-user.conf", manageiq_remote_user_conf) + File.write("#{@httpd_template_dir}/manageiq-external-auth.conf.erb", manageiq_external_auth_conf) end after do @@ -84,9 +82,7 @@ end it 'silently ignores missing KrbAuthRealms when creating the gssapi httpd config file' do - File.open("#{@httpd_template_dir}/manageiq-external-auth.conf.erb", "w") do |f| - f.write(manageiq_external_auth_gssapi_conf) - end + File.write("#{@httpd_template_dir}/manageiq-external-auth.conf.erb", manageiq_external_auth_gssapi_conf) described_class.new(@initial_settings).configure expect(File.read("#{@httpd_conf_dir}/manageiq-external-auth.conf")).to eq(manageiq_external_auth_gssapi_conf) diff --git a/spec/tools/miq_config_sssd_ldap/configure_appliance_settings_spec.rb b/spec/tools/miq_config_sssd_ldap/configure_appliance_settings_spec.rb index ef241ec76bf..136c34d6778 100644 --- a/spec/tools/miq_config_sssd_ldap/configure_appliance_settings_spec.rb +++ b/spec/tools/miq_config_sssd_ldap/configure_appliance_settings_spec.rb @@ -1,4 +1,4 @@ -$LOAD_PATH << Rails.root.join("tools", "miq_config_sssd_ldap").to_s +$LOAD_PATH << Rails.root.join("tools/miq_config_sssd_ldap").to_s require "configure_appliance_settings" diff --git a/spec/tools/miq_config_sssd_ldap/configure_sssd_rules_spec.rb b/spec/tools/miq_config_sssd_ldap/configure_sssd_rules_spec.rb index fdfa758628e..88e49423013 100644 --- a/spec/tools/miq_config_sssd_ldap/configure_sssd_rules_spec.rb +++ b/spec/tools/miq_config_sssd_ldap/configure_sssd_rules_spec.rb @@ -12,7 +12,7 @@ describe '#disable_tls' do let(:disable_tls_conf) do - <<-CFG_RULES_CONF.strip_heredoc + <<~CFG_RULES_CONF option = ldap_auth_disable_tls_never_use_in_production CFG_RULES_CONF end diff --git a/spec/tools/miq_config_sssd_ldap/sssd_conf_spec.rb b/spec/tools/miq_config_sssd_ldap/sssd_conf_spec.rb index 4caae6fd283..d850d60f2df 100644 --- a/spec/tools/miq_config_sssd_ldap/sssd_conf_spec.rb +++ b/spec/tools/miq_config_sssd_ldap/sssd_conf_spec.rb @@ -12,7 +12,7 @@ describe '#configure' do let(:sssd_conf_erb) do - <<-SSSD_CONF_ERB.strip_heredoc + <<~SSSD_CONF_ERB [domain/default] autofs_provider = ldap ldap_schema = rfc2307bis @@ -33,7 +33,7 @@ end let(:sssd_conf_shell) do - <<-SSSD_CONF_INITIAL.strip_heredoc + <<~SSSD_CONF_INITIAL [domain/default] autofs_provider = ldap ldap_schema = rfc2307bis @@ -54,7 +54,7 @@ end let(:sssd_conf_updated) do - <<-SSSD_CONF_UPDATED.strip_heredoc + <<~SSSD_CONF_UPDATED [domain/] @@ -71,27 +71,27 @@ ldap_tls_cacertdir = /etc/openldap/cacerts/ entry_cache_timeout = 600 ldap_auth_disable_tls_never_use_in_production = true - ldap_default_bind_dn = - ldap_default_authtok = + ldap_default_bind_dn =#{' '} + ldap_default_authtok =#{' '} ldap_group_member = member ldap_group_name = cn ldap_group_object_class = groupOfNames ldap_group_search_base = my_basedn ldap_network_timeout = 3 ldap_pwd_policy = none - ldap_tls_cacert = + ldap_tls_cacert =#{' '} ldap_user_extra_attrs = mail, givenname, sn, displayname, domainname ldap_user_gid_number = gidNumber ldap_user_name = cn ldap_user_object_class = person - ldap_user_search_base = + ldap_user_search_base =#{' '} ldap_user_uid_number = uidNumber [sssd] services = nss, pam, ifp - domains = + domains =#{' '} config_file_version = 2 - default_domain_suffix = + default_domain_suffix =#{' '} sbus_timeout = 30 [pam] @@ -124,7 +124,7 @@ end it 'will create the sssd config file if needed' do - File.open("#{@sssd_template_dir}/sssd.conf.erb", "w") { |f| f.write(sssd_conf_erb) } + File.write("#{@sssd_template_dir}/sssd.conf.erb", sssd_conf_erb) described_class.new(@initial_settings) @@ -132,7 +132,7 @@ end it 'will populate the PAM section' do - File.open("#{@sssd_template_dir}/sssd.conf.erb", "w") { |f| f.write(sssd_conf_erb) } + File.write("#{@sssd_template_dir}/sssd.conf.erb", sssd_conf_erb) described_class.new(@initial_settings).update diff --git a/spec/tools/perf_generate.rb b/spec/tools/perf_generate.rb index 9b7a49d45cc..2e085956c95 100644 --- a/spec/tools/perf_generate.rb +++ b/spec/tools/perf_generate.rb @@ -44,19 +44,19 @@ IMPORT_HOURLY_FNAME = File.expand_path(File.join(File.dirname(__FILE__), "import_hourly.csv")) METRICS_COLS = [:capture_interval_name, :resource_type, :resource_id, :timestamp] -puts <<-EOL -Importing metrics for: - EMS: #{NUM_EMS} - Storages: #{NUM_STORAGES} - Clusters: #{NUM_CLUSTERS} - Hosts: #{NUM_HOSTS} - VMs: #{NUM_VMS} +puts <<~EOL + Importing metrics for: + EMS: #{NUM_EMS} + Storages: #{NUM_STORAGES} + Clusters: #{NUM_CLUSTERS} + Hosts: #{NUM_HOSTS} + VMs: #{NUM_VMS} - Realtime from: #{REALTIME_START.iso8601} - Hourly from: #{HOURLY_START.iso8601} + Realtime from: #{REALTIME_START.iso8601} + Hourly from: #{HOURLY_START.iso8601} - Number of realtime rows: #{Class.new.extend(ActionView::Helpers::NumberHelper).number_with_delimiter(realtime_count)} - Number of hourly rows: #{Class.new.extend(ActionView::Helpers::NumberHelper).number_with_delimiter(hourly_count)} + Number of realtime rows: #{Class.new.extend(ActionView::Helpers::NumberHelper).number_with_delimiter(realtime_count)} + Number of hourly rows: #{Class.new.extend(ActionView::Helpers::NumberHelper).number_with_delimiter(hourly_count)} EOL @@ -69,7 +69,7 @@ def insert_realtime(klass, id, timestamp) 180.times do |rt_count| - $out_csv_realtime << ["realtime", klass, id, (timestamp + 20 * rt_count).iso8601] + $out_csv_realtime << ["realtime", klass, id, (timestamp + (20 * rt_count)).iso8601] $pbar.increment end end @@ -84,7 +84,7 @@ def insert_hourly(klass, id, timestamp) def with_vms_and_hosts NUM_HOSTS.times do |h_id| VMS_PER_HOST.times do |v_count| - v_id = h_id * VMS_PER_HOST + v_count + v_id = (h_id * VMS_PER_HOST) + v_count yield "Vm", v_id + 1 end yield "Host", h_id + 1 diff --git a/tools/change_server_zone.rb b/tools/change_server_zone.rb index 38f2865ec45..cb5d87f157e 100755 --- a/tools/change_server_zone.rb +++ b/tools/change_server_zone.rb @@ -8,13 +8,13 @@ server_id, zone_name = ARGV -server = MiqServer.where(:id => server_id).take +server = MiqServer.find_by(:id => server_id) unless server puts "Unable to find server with id [#{server_id}]" exit 1 end -zone = Zone.where(:name => zone_name).take +zone = Zone.find_by(:name => zone_name) unless zone puts "Unable to find zone with name [#{zone_name}]" exit 1 diff --git a/tools/cleanup_duplicate_host_guest_devices.rb b/tools/cleanup_duplicate_host_guest_devices.rb index e4b769ebf23..5bd4374476b 100755 --- a/tools/cleanup_duplicate_host_guest_devices.rb +++ b/tools/cleanup_duplicate_host_guest_devices.rb @@ -59,7 +59,7 @@ foreign_key = assoc.join_primary_key if %i[delete destroy].include?(delete_meth) - assoc.klass.where(foreign_key => slice).send("#{delete_meth}_all") + assoc.klass.where(foreign_key => slice).send(:"#{delete_meth}_all") elsif delete_meth == :nullify assoc.klass.where(foreign_key => slice).update_all(foreign_key => nil) else diff --git a/tools/configure_server_settings.rb b/tools/configure_server_settings.rb index b417bf2d91b..3b54507fd75 100755 --- a/tools/configure_server_settings.rb +++ b/tools/configure_server_settings.rb @@ -2,7 +2,7 @@ require 'bundler/setup' require 'optimist' -TYPES = %w(string integer boolean symbol float array).freeze +TYPES = %w[string integer boolean symbol float array].freeze opts = Optimist.options(ARGV) do banner "USAGE: #{__FILE__} -s -p -v \n" \ @@ -83,7 +83,7 @@ def types_valid?(old_val, new_val) if opts[:force] puts "Change [#{opts[:path]}], old class: [#{path[key].class}], new class: [#{newval.class}]" elsif path[key] && !types_valid?(path[key], newval) - STDERR.puts "The new value's class #{newval.class} does not match the prior one's #{path[key].class}. Use -t to specify the type for the provided value. Use -f to force changing this value. Note, -f may break things! See -h for examples." + warn "The new value's class #{newval.class} does not match the prior one's #{path[key].class}. Use -t to specify the type for the provided value. Use -f to force changing this value. Note, -f may break things! See -h for examples." exit 1 end diff --git a/tools/copy_reports_structure.rb b/tools/copy_reports_structure.rb index c31041dc49b..e86882a8742 100644 --- a/tools/copy_reports_structure.rb +++ b/tools/copy_reports_structure.rb @@ -9,15 +9,15 @@ require 'copy_reports_structure/report_structure' opts = Optimist.options(ARGV) do - banner "Utility to: \n" \ - " - make report structure configured for a group available to another group\n" \ - " - make report structure configured for a group available to role\n" \ - " - reset report access to default for group or role\n" \ + banner "Utility to: \n " \ + "- make report structure configured for a group available to another group\n " \ + "- make report structure configured for a group available to role\n " \ + "- reset report access to default for group or role\n" \ "Example (Duplicate for Group): bundle exec ruby #{__FILE__} --source-group=EvmGroup --target-group=SomeGroup\n" \ "Example (Duplicate for Role): bundle exec ruby #{__FILE__} --source-group=EvmGroup --target-role=SomeRole\n" \ "Example (Reset to Default for Group): bundle exec ruby #{__FILE__} --reset-group=SomeGroup\n" \ "Example (Reset to Default for Role): bundle exec ruby #{__FILE__} --reset-role=SomeRole\n" - opt :dry_run, "Dry Run", :short => "d" + opt :dry_run, "Dry Run", :short => "d" opt :source_group, "Source group to take report structure from", :short => :none, :type => :string opt :target_group, "Target group to get report menue from source group", :short => :none, :type => :string opt :target_role, "Target role to get report menue from source group", :short => :none, :type => :string diff --git a/tools/copy_reports_structure/report_structure.rb b/tools/copy_reports_structure/report_structure.rb index 44746fda03d..a9d26760cc3 100644 --- a/tools/copy_reports_structure/report_structure.rb +++ b/tools/copy_reports_structure/report_structure.rb @@ -1,24 +1,23 @@ class ReportStructure - def self.duplicate_for_group(source_group_name, destination_group_name, dry_run = false) puts "Copying report structure from group '#{source_group_name}' to group ' #{destination_group_name}' ..." destination_group = find_group(destination_group_name) destination_group.update!(:settings => find_group(source_group_name).settings) unless dry_run puts "Reports structure was successfully copied from '#{source_group_name}' to '#{destination_group_name}'" - rescue StandardError => e - $stderr.puts "Copying failed: #{e.message}" + rescue => e + warn "Copying failed: #{e.message}" end def self.duplicate_for_role(source_group_name, destination_role_name, dry_run = false) puts "Copying report structure from group '#{source_group_name}' to role ' #{destination_role_name}' ..." source_group = find_group(source_group_name) find_role(destination_role_name).miq_groups.each do |destination_group| - begin - destination_group.update!(:settings => source_group.settings) unless dry_run - puts " Reports structure was successfully copied from '#{source_group_name}' to '#{destination_group.description}'" - rescue StandardError => e - $stderr.puts "Copying failed: #{e.message}" - end + + destination_group.update!(:settings => source_group.settings) unless dry_run + puts " Reports structure was successfully copied from '#{source_group_name}' to '#{destination_group.description}'" + rescue => e + warn "Copying failed: #{e.message}" + end end @@ -28,20 +27,20 @@ def self.reset_for_group(group_name, dry_run = false) begin group.update!(:settings => nil) unless dry_run puts "Successfully removed custom report structure for group '#{group_name}'" - rescue StandardError => e - $stderr.puts "Removing failed: #{e.message}" + rescue => e + warn "Removing failed: #{e.message}" end end def self.reset_for_role(role_name, dry_run = false) puts "Removing custom report structure for role '#{role_name}'..." find_role(role_name).miq_groups.each do |group| - begin - group.update!(:settings => nil) unless dry_run - puts "Successfully removed custom report structure for group '#{group.description}'" - rescue StandardError => e - $stderr.puts "Removing failed: #{e.message}" - end + + group.update!(:settings => nil) unless dry_run + puts "Successfully removed custom report structure for group '#{group.description}'" + rescue => e + warn "Removing failed: #{e.message}" + end end @@ -57,4 +56,3 @@ def self.find_role(role_name) role end end - diff --git a/tools/db_printers/print_network.rb b/tools/db_printers/print_network.rb index 1d8415cbc3c..30a43a72b83 100755 --- a/tools/db_printers/print_network.rb +++ b/tools/db_printers/print_network.rb @@ -42,6 +42,7 @@ def print_switch(indent, switch) puts " pNIC: (None)" host.switches.order(Arel.sql("lower(name)")).each do |switch| next if found_switches.include?(switch.name) + print_switch(" ", switch) end end diff --git a/tools/db_printers/print_scsi.rb b/tools/db_printers/print_scsi.rb index ebc20ad06e1..acff1cdf3f6 100755 --- a/tools/db_printers/print_scsi.rb +++ b/tools/db_printers/print_scsi.rb @@ -4,16 +4,18 @@ Host.all.each do |host| puts "Host: #{host.name} (id: #{host.id})" - host.hardware.guest_devices.where(:device_type => 'storage').order(Arel.sql("lower(device_name)")).each do |adapter| - sub_name = adapter.iscsi_name.nil? ? "" : " (#{adapter.iscsi_name})" - puts " SCSI Adapter: #{adapter.device_name}#{sub_name}" - adapter.miq_scsi_targets.order(Arel.sql("lower(target)")).each do |target| - puts " Target: #{target.iscsi_name} (#{target.target})" - target.miq_scsi_luns.order(Arel.sql("lower(lun)")).each do |lun| - puts " Lun: #{lun.canonical_name} (#{lun.lun})" + unless host.hardware.nil? + host.hardware.guest_devices.where(:device_type => 'storage').order(Arel.sql("lower(device_name)")).each do |adapter| + sub_name = adapter.iscsi_name.nil? ? "" : " (#{adapter.iscsi_name})" + puts " SCSI Adapter: #{adapter.device_name}#{sub_name}" + adapter.miq_scsi_targets.order(Arel.sql("lower(target)")).each do |target| + puts " Target: #{target.iscsi_name} (#{target.target})" + target.miq_scsi_luns.order(Arel.sql("lower(lun)")).each do |lun| + puts " Lun: #{lun.canonical_name} (#{lun.lun})" + end end end - end unless host.hardware.nil? + end puts("\n") end diff --git a/tools/doc/reportable_fields_to_csv.rb b/tools/doc/reportable_fields_to_csv.rb index 5696a2d7c2a..cd0ab7b7903 100755 --- a/tools/doc/reportable_fields_to_csv.rb +++ b/tools/doc/reportable_fields_to_csv.rb @@ -7,7 +7,7 @@ require 'csv' CSV.open("reportable_fields.csv", "w") do |csv| - csv << %w(model_display_name model field_display_name field) + csv << %w[model_display_name model field_display_name field] models.each do |model_display_name, model| puts "Generating list for #{model}" diff --git a/tools/doc/reports_to_csv.rb b/tools/doc/reports_to_csv.rb index 21fed77c076..29dd6d5610e 100755 --- a/tools/doc/reports_to_csv.rb +++ b/tools/doc/reports_to_csv.rb @@ -3,7 +3,7 @@ require 'csv' CSV.open("reports.csv", "w") do |csv| - csv << %w(Name Title Group Sorting Graph Filter) + csv << %w[Name Title Group Sorting Graph Filter] MiqReport.order(:name).each do |rpt| next if rpt.rpt_group == "Custom" || rpt.rpt_group == "Compare" diff --git a/tools/evm_dump.rb b/tools/evm_dump.rb index d76e3a51751..3308e9229d0 100755 --- a/tools/evm_dump.rb +++ b/tools/evm_dump.rb @@ -15,7 +15,7 @@ def yml_fname(klass) def yml_dump(yml_fname, items) File.delete(yml_fname) if File.exist?(yml_fname) - File.open(yml_fname, "w") { |fd| fd.write(YAML.dump(items)) } + File.write(yml_fname, YAML.dump(items)) end ### Main diff --git a/tools/export_tags.rb b/tools/export_tags.rb index 8770563738d..adaa89759d3 100755 --- a/tools/export_tags.rb +++ b/tools/export_tags.rb @@ -5,5 +5,5 @@ raise "No output file provided" if output.nil? puts "Exporting classification tags..." -File.open(output, "w") { |f| f.write(Classification.export_to_yaml) } +File.write(output, Classification.export_to_yaml) puts "Exporting classification tags... Complete" diff --git a/tools/feature_support_matrix.rb b/tools/feature_support_matrix.rb index dc967d313f9..09d482374c4 100755 --- a/tools/feature_support_matrix.rb +++ b/tools/feature_support_matrix.rb @@ -27,8 +27,8 @@ def matrix_for(model) matrix.model = model if model.included_modules.include?(SupportsFeatureMixin) - matrix.features = SupportsFeatureMixin::QUERYABLE_FEATURES.keys.each_with_object({}) do |feature, features| - features[feature] = model.supports?(feature) + matrix.features = SupportsFeatureMixin::QUERYABLE_FEATURES.keys.index_with do |feature| + model.supports?(feature) end end @@ -56,7 +56,7 @@ def visit(subject) def to_s headers = @rows.first.headers CSV.generate('', :headers => headers) do |csv| - header_row = CSV::Row.new(headers, %w(Model) + SupportsFeatureMixin::QUERYABLE_FEATURES.values) + header_row = CSV::Row.new(headers, %w[Model] + SupportsFeatureMixin::QUERYABLE_FEATURES.values) csv << header_row @rows.each { |row| csv << row } end @@ -66,4 +66,4 @@ def to_s matrix = matrix_for(ApplicationRecord) csv = CsvVisitor.new matrix.accept(csv) -puts csv.to_s +puts csv diff --git a/tools/fix_auth.rb b/tools/fix_auth.rb index d4bc0eb082c..a81e7a5326b 100755 --- a/tools/fix_auth.rb +++ b/tools/fix_auth.rb @@ -1,5 +1,5 @@ #!/usr/bin/env ruby -ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) +ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__) require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE']) # usage: ruby fix_auth -h @@ -9,7 +9,7 @@ if __FILE__ == $PROGRAM_NAME $LOAD_PATH.push(File.expand_path(__dir__)) - $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w(.. lib)))) + $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w[.. lib]))) end require 'active_support/all' diff --git a/tools/fix_auth/auth_config_model.rb b/tools/fix_auth/auth_config_model.rb index bc4515b153a..0b0de750762 100644 --- a/tools/fix_auth/auth_config_model.rb +++ b/tools/fix_auth/auth_config_model.rb @@ -4,8 +4,7 @@ module AuthConfigModel include FixAuth::AuthModel module ClassMethods - attr_accessor :password_fields - attr_accessor :password_prefix + attr_accessor :password_fields, :password_prefix # true if we want to output the keys as symbols (default: false - output as string keys) attr_accessor :symbol_keys @@ -45,8 +44,8 @@ def recrypt(old_value, options = {}) old_value.kind_of?(Hash) ? hash : hash.to_yaml rescue ArgumentError # undefined class/module unless options[:allow_failures] - STDERR.puts "potentially bad yaml:" - STDERR.puts old_value + warn "potentially bad yaml:" + warn old_value end raise end diff --git a/tools/fix_auth/auth_model.rb b/tools/fix_auth/auth_model.rb index 88a6273e273..2591f6c6981 100644 --- a/tools/fix_auth/auth_model.rb +++ b/tools/fix_auth/auth_model.rb @@ -62,7 +62,7 @@ def fix_passwords(obj, options) available_columns.each do |column| if (old_value = obj.send(column)).present? new_value = recrypt(old_value, options) - obj.send("#{column}=", new_value) if new_value != old_value + obj.send(:"#{column}=", new_value) if new_value != old_value end end obj @@ -70,6 +70,7 @@ def fix_passwords(obj, options) def highlight_password(value, options) return if value.blank? + if options[:hardcode] && (value == ManageIQ::Password.encrypt(options[:hardcode])) "#{value} HARDCODED" elsif options[:invalid] && (value == ManageIQ::Password.encrypt(options[:invalid])) @@ -89,8 +90,8 @@ def display_record(r) def display_column(r, column, options) v = r.send(column) - if r.send("#{column}_changed?") - puts " #{column}: #{r.send("#{column}_was").inspect} => #{highlight_password(v, options)}" + if r.send(:"#{column}_changed?") + puts " #{column}: #{r.send(:"#{column}_was").inspect} => #{highlight_password(v, options)}" elsif r.send(column).present? puts " #{column}: #{v.inspect} (not changed)" end @@ -98,6 +99,7 @@ def display_column(r, column, options) def run(options = {}) return if available_columns.empty? + puts "fixing #{table_name}.#{available_columns.join(", ")}" unless options[:silent] processed = 0 errors = 0 @@ -117,7 +119,7 @@ def run(options = {}) rescue ArgumentError # undefined class/module errors += 1 unless options[:allow_failures] - STDERR.puts "unable to fix #{r.class.table_name}:#{r.id}" unless options[:silent] + warn "unable to fix #{r.class.table_name}:#{r.id}" unless options[:silent] raise end end diff --git a/tools/fix_auth/cli.rb b/tools/fix_auth/cli.rb index 9cd27b12868..41eedd673e9 100644 --- a/tools/fix_auth/cli.rb +++ b/tools/fix_auth/cli.rb @@ -7,8 +7,8 @@ class Cli def parse(args, env = {}) args.shift if args.first == "--" # Handle when called through script/runner self.options = Optimist.options(args) do - banner "Usage: ruby #{$PROGRAM_NAME} [options] database [...]\n" \ - " ruby #{$PROGRAM_NAME} [options] -P new_password database [...] to replace all passwords" + banner "Usage: ruby #{$PROGRAM_NAME} [options] database [...]\n " \ + "ruby #{$PROGRAM_NAME} [options] -P new_password database [...] to replace all passwords" opt :verbose, "Verbose", :short => "v" opt :dry_run, "Dry Run", :short => "d" @@ -20,11 +20,11 @@ def parse(args, env = {}) opt :invalid, "Password to use for invalid passwords", :type => :string, :short => "i" opt :key, "Generate key", :type => :boolean, :short => "k" opt :v2, "ignored, available for backwards compatibility", :type => :boolean, :short => "f" - opt :root, "Rails Root", :type => :string, :short => "r", - :default => (env['RAILS_ROOT'] || File.expand_path(File.join(File.dirname(__FILE__), %w(.. ..)))) + opt :root, "Rails Root", :type => :string, :short => "r", + :default => (env['RAILS_ROOT'] || File.expand_path(File.join(File.dirname(__FILE__), %w[.. ..]))) opt :databaseyml, "Rewrite database.yml", :type => :boolean, :short => "y", :default => false - opt :db, "Upgrade database", :type => :boolean, :short => 'x', :default => false - opt :legacy_key, "Legacy Key", :type => :string, :short => "K" + opt :db, "Upgrade database", :type => :boolean, :short => 'x', :default => false + opt :legacy_key, "Legacy Key", :type => :string, :short => "K" opt :allow_failures, "Run through all records, even with errors", :type => :boolean, :short => nil, :default => false end diff --git a/tools/fix_auth/fix_auth.rb b/tools/fix_auth/fix_auth.rb index 2b55e2853c8..eef981c4b26 100644 --- a/tools/fix_auth/fix_auth.rb +++ b/tools/fix_auth/fix_auth.rb @@ -42,16 +42,16 @@ def generate_password ManageIQ::Password.generate_symmetric("#{cert_dir}/v2_key") rescue Errno::EEXIST => e $stderr.puts - $stderr.puts "Only generate one encryption_key (v2_key) per installation." - $stderr.puts "Chances are you did not want to overwrite this file." - $stderr.puts "If you do this all encrypted secrets in the database will not be readable." - $stderr.puts "Please backup your key and run again." + warn "Only generate one encryption_key (v2_key) per installation." + warn "Chances are you did not want to overwrite this file." + warn "If you do this all encrypted secrets in the database will not be readable." + warn "Please backup your key and run again." $stderr.puts raise Errno::EEXIST, e.message end def print_dry_run_warning - method = caller_locations.first.label + method = caller_locations(1..1).first.label # Move this message up to `run` if the other methods add dry-run support puts "** #{method} is executing in dry-run mode, and no actual changes will be made **" if options[:dry_run] end @@ -79,7 +79,7 @@ def fix_database_yml end def load_rails - require File.expand_path("../../../config/application.rb", __FILE__) + require File.expand_path('../../config/application.rb', __dir__) end def set_passwords diff --git a/tools/fix_auth/models.rb b/tools/fix_auth/models.rb index 76a6f378465..519f917a5b7 100644 --- a/tools/fix_auth/models.rb +++ b/tools/fix_auth/models.rb @@ -6,7 +6,7 @@ module FixAuth class FixAuthentication < ActiveRecord::Base include FixAuth::AuthModel self.table_name = "authentications" - self.password_columns = %w(password auth_key) + self.password_columns = %w[password auth_key] self.inheritance_column = :_type_disabled end @@ -24,8 +24,8 @@ class FixConfigurationScript < ActiveRecord::Base class FixMiqDatabase < ActiveRecord::Base include FixAuth::AuthModel self.table_name = "miq_databases" - self.password_columns = %w(registration_http_proxy_server registration_http_proxy_password - session_secret_token csrf_secret_token) + self.password_columns = %w[registration_http_proxy_server registration_http_proxy_password + session_secret_token csrf_secret_token] def self.hardcode(old_value, _new_value) super(old_value, SecureRandom.hex(64)) @@ -35,9 +35,9 @@ def self.hardcode(old_value, _new_value) class FixMiqAeValue < ActiveRecord::Base include FixAuth::AuthModel self.table_name = "miq_ae_values" - self.password_columns = %w(value) + self.password_columns = %w[value] - belongs_to :field, :class_name => "FixMiqAeField", :foreign_key => :field_id + belongs_to :field, :class_name => "FixMiqAeField" # add foreign keys so includes will work def self.select_columns @@ -54,7 +54,7 @@ def self.contenders class FixMiqAeField < ActiveRecord::Base include FixAuth::AuthModel self.table_name = "miq_ae_fields" - self.password_columns = %w(default_value) + self.password_columns = %w[default_value] # only fix columns with password values def self.contenders @@ -66,8 +66,8 @@ class FixMiqRequest < ActiveRecord::Base include FixAuth::AuthConfigModel # don't want to leverage STI self.inheritance_column = :_type_disabled - self.password_columns = %w(options) - self.password_fields = %w(root_password sysprep_password sysprep_domain_password) + self.password_columns = %w[options] + self.password_fields = %w[root_password sysprep_password sysprep_domain_password] self.password_prefix = "password::" self.symbol_keys = true self.table_name = "miq_requests" @@ -77,8 +77,8 @@ class FixMiqRequestTask < ActiveRecord::Base include FixAuth::AuthConfigModel # don't want to leverage STI self.inheritance_column = :_type_disabled - self.password_columns = %w(options) - self.password_fields = %w(root_password sysprep_password sysprep_domain_password) + self.password_columns = %w[options] + self.password_fields = %w[root_password sysprep_password sysprep_domain_password] self.password_prefix = "password::" self.symbol_keys = true self.table_name = "miq_request_tasks" @@ -87,7 +87,7 @@ class FixMiqRequestTask < ActiveRecord::Base class FixSettingsChange < ActiveRecord::Base include FixAuth::AuthModel self.table_name = "settings_changes" - self.password_columns = %w(value) + self.password_columns = %w[value] serialize :value @@ -102,18 +102,17 @@ def self.contenders # keys that contain protected fields in the settings def self.password_fields Vmdb::SettingsWalker::PASSWORD_FIELDS + - %w(openssl_verify_mode) + %w[openssl_verify_mode] end end class FixDatabaseYml - attr_accessor :id - attr_accessor :yml + attr_accessor :id, :yml + include FixAuth::AuthConfigModel class << self - attr_accessor :available_columns - attr_accessor :file_name + attr_accessor :available_columns, :file_name def table_name file_name.gsub(".yml", "") @@ -121,7 +120,7 @@ def table_name end def initialize(options = {}) - options.each { |n, v| public_send("#{n}=", v) } + options.each { |n, v| public_send(:"#{n}=", v) } end def load @@ -137,8 +136,8 @@ def save! File.write(id, @yml) end - self.password_fields = %w(password) - self.available_columns = %w(yml) + self.password_fields = %w[password] + self.available_columns = %w[yml] def self.contenders [new(:id => file_name).load] diff --git a/tools/fix_disk_sizes.rb b/tools/fix_disk_sizes.rb index caa00df2ca1..d865a655f27 100755 --- a/tools/fix_disk_sizes.rb +++ b/tools/fix_disk_sizes.rb @@ -21,9 +21,8 @@ def getDinfo(vim) log_header = "MIQ(#{__FILE__})" $log.info("#{log_header} Correcting Disk Sizes...") -disks_by_filename = Disk.all.inject({}) do |h, d| - h[d.filename] = d - h +disks_by_filename = Disk.all.index_by do |d| + d.filename end changed_disks = {} @@ -47,15 +46,15 @@ def getDinfo(vim) next if d.nil? data = { - :new => {:size => di[:capacityInKB].kilobytes, :disk_type => (di[:thinProvisioned] == 'true') ? 'thin' : 'thick', :mode => di[:diskMode]}, + :new => {:size => di[:capacityInKB].kilobytes, :disk_type => di[:thinProvisioned] == 'true' ? 'thin' : 'thick', :mode => di[:diskMode]}, :old => {:size => d.size, :disk_type => d.disk_type, :mode => d.mode} } - if data[:new] != data[:old] - # Only nil out 'size_on_disk' if the provision size does not match - data[:new][:size_on_disk] = nil if data[:new][:size] != data[:old][:size] - changed_disks[d.id] = data - d.update(data[:new]) - end + next unless data[:new] != data[:old] + + # Only nil out 'size_on_disk' if the provision size does not match + data[:new][:size_on_disk] = nil if data[:new][:size] != data[:old][:size] + changed_disks[d.id] = data + d.update(data[:new]) end $log.info("#{log_header} Collecting Disk Sizes for disks under ExtManagementSystem name: [#{e.name}], id: [#{e.id}]...Complete") diff --git a/tools/fix_vm_relationships.rb b/tools/fix_vm_relationships.rb index 8a81f041aeb..67ada215ef0 100755 --- a/tools/fix_vm_relationships.rb +++ b/tools/fix_vm_relationships.rb @@ -9,15 +9,15 @@ rels_to_delete = [] Vm.includes(:all_relationships).each do |v| - begin - v.parent_resource_pool - rescue ActiveRecord::RecordNotFound => err - puts "FIXING - #{v.name} - #{err}" - rels_to_delete += v.all_relationships.to_a.select { |r| r.relationship == "ems_metadata" } - fixed_vms << v.reload - else - puts "OK - #{v.name}" - end + + v.parent_resource_pool +rescue ActiveRecord::RecordNotFound => err + puts "FIXING - #{v.name} - #{err}" + rels_to_delete += v.all_relationships.to_a.select { |r| r.relationship == "ems_metadata" } + fixed_vms << v.reload +else + puts "OK - #{v.name}" + end Relationship.delete(rels_to_delete) diff --git a/tools/import_v4_provision_dialogs.rb b/tools/import_v4_provision_dialogs.rb index 2812a76226b..3777f437444 100755 --- a/tools/import_v4_provision_dialogs.rb +++ b/tools/import_v4_provision_dialogs.rb @@ -4,21 +4,21 @@ # Convert v4-style provisioning dialogs from Ruby files into YAML format # and store in the miq_dialogs table. Dir.glob(Rails.root.join("db/fixtures/*.rb")) do |dialog_file| - begin - dialog_text = File.read(dialog_file) - next unless dialog_text.include?('module MiqProvisionDialogs') && dialog_text.include?('def self.dialogs') - dialog_name = File.basename(dialog_file, '.*') + dialog_text = File.read(dialog_file) + next unless dialog_text.include?('module MiqProvisionDialogs') && dialog_text.include?('def self.dialogs') - if MiqDialog.exists?(:name => dialog_name) - puts "Dialog record already exists for name:<#{dialog_name}>" - else - puts "Importing dialog name:<#{dialog_name}>" - load(dialog_file) - MiqDialog.create(:name => dialog_name, :description => dialog_name, :dialog_type => 'MiqProvisionWorkflow', :content => MiqProvisionDialogs.dialogs) - Object.send(:remove_const, :MiqProvisionDialogs) - end - rescue ScriptError, StandardError => err - puts "Failed to import dialog from file <#{dialog_file}>. Error: <#{err}>" + dialog_name = File.basename(dialog_file, '.*') + + if MiqDialog.exists?(:name => dialog_name) + puts "Dialog record already exists for name:<#{dialog_name}>" + else + puts "Importing dialog name:<#{dialog_name}>" + load(dialog_file) + MiqDialog.create(:name => dialog_name, :description => dialog_name, :dialog_type => 'MiqProvisionWorkflow', :content => MiqProvisionDialogs.dialogs) + Object.send(:remove_const, :MiqProvisionDialogs) end +rescue ScriptError, StandardError => err + puts "Failed to import dialog from file <#{dialog_file}>. Error: <#{err}>" + end diff --git a/tools/ldap_ping.rb b/tools/ldap_ping.rb index 471d0ca25db..50763455084 100755 --- a/tools/ldap_ping.rb +++ b/tools/ldap_ping.rb @@ -19,10 +19,10 @@ def self.resolve_ldap_host? end end -ldap_hosts = ::Settings.authentication.ldaphost -username = ::Settings.authentication.bind_dn -password = ::Settings.authentication.bind_pwd -bind_timeout = ::Settings.authentication.bind_timeout.to_i_with_method +ldap_hosts = Settings.authentication.ldaphost +username = Settings.authentication.bind_dn +password = Settings.authentication.bind_pwd +bind_timeout = Settings.authentication.bind_timeout.to_i_with_method if ldap_hosts.to_s.strip.empty? $log.info("LDAP Host cannot be blank") exit @@ -36,22 +36,22 @@ def self.resolve_ldap_host? end ldap_addresses.each do |address| - begin - $log.info("----------------------------------") - $log.info("Binding to LDAP: Host: <#{address}>, User: <#{username}>...") - ldap = MiqLdap.new(:host => address) - raw_ldap = ldap.ldap - raw_ldap.authenticate(username, password) - Timeout.timeout(bind_timeout) do - if raw_ldap.bind - $log.info("Binding to LDAP: Host: <#{address}>, User: <#{username}>... successful") - else - $log.warn("Binding to LDAP: Host: <#{address}>, User: <#{username}>... unsuccessful because <#{raw_ldap.get_operation_result.message}>") - end + + $log.info("----------------------------------") + $log.info("Binding to LDAP: Host: <#{address}>, User: <#{username}>...") + ldap = MiqLdap.new(:host => address) + raw_ldap = ldap.ldap + raw_ldap.authenticate(username, password) + Timeout.timeout(bind_timeout) do + if raw_ldap.bind + $log.info("Binding to LDAP: Host: <#{address}>, User: <#{username}>... successful") + else + $log.warn("Binding to LDAP: Host: <#{address}>, User: <#{username}>... unsuccessful because <#{raw_ldap.get_operation_result.message}>") end - rescue Exception => err - $log.warn("Binding to LDAP: Host: <#{address}>, User: <#{username}>... failed because <#{err.message}>") end +rescue Exception => err + $log.warn("Binding to LDAP: Host: <#{address}>, User: <#{username}>... failed because <#{err.message}>") + end $log.info("Done") diff --git a/tools/list_evm_snapshots.rb b/tools/list_evm_snapshots.rb index 876caf60907..7cf80c9dcea 100755 --- a/tools/list_evm_snapshots.rb +++ b/tools/list_evm_snapshots.rb @@ -4,11 +4,11 @@ require 'VMwareWebService/MiqVim' if ARGV.length != 1 - $stderr.puts "Usage: #{$0} ems_name" + warn "Usage: #{$0} ems_name" exit 1 end -ems_name = ARGV[0] +ems_name = ARGV[0] # server = ARGV[0] # username = ARGV[1] # password = ARGV[2] @@ -21,7 +21,7 @@ puts "Done." puts "vim.class: #{vim.class}" - puts "#{vim.server} is #{(vim.isVirtualCenter? ? 'VC' : 'ESX')}" + puts "#{vim.server} is #{vim.isVirtualCenter? ? 'VC' : 'ESX'}" puts "API version: #{vim.apiVersion}" puts @@ -33,7 +33,7 @@ end end rescue => err - puts err.to_s + puts err puts err.backtrace.join("\n") ensure vim.disconnect diff --git a/tools/log_processing/broker_registry_counts.rb b/tools/log_processing/broker_registry_counts.rb index 653af3e312c..53c7d45d20f 100755 --- a/tools/log_processing/broker_registry_counts.rb +++ b/tools/log_processing/broker_registry_counts.rb @@ -1,5 +1,5 @@ #!/usr/bin/env ruby -RAILS_ROOT = File.expand_path(File.join(__dir__, %w(.. ..))) +RAILS_ROOT = File.expand_path(File.join(__dir__, %w[.. ..])) require 'manageiq/gems/pending' require 'miq_logger_processor' @@ -12,6 +12,7 @@ puts "Processing file..." MiqLoggerProcessor.new(logfile).each do |line| next unless line =~ /MiqBrokerObjRegistry\.([^:]+): ([^ ]+) object_id: (\d+)/ + mode, type, object_id = $1, $2, $3 counts[type][object_id] << mode end @@ -22,7 +23,7 @@ counts.keys.sort.each do |type| object_ids = counts[type] - incorrect = object_ids.reject { |_object_id, modes| modes.length == 3 && modes.uniq.sort == %w(registerBrokerObj release unregisterBrokerObj) } + incorrect = object_ids.reject { |_object_id, modes| modes.length == 3 && modes.uniq.sort == %w[registerBrokerObj release unregisterBrokerObj] } incorrect = incorrect.reject { |_object_id, modes| (c = modes.count('registerBrokerObj')) == modes.count('release') && c == modes.count('unregisterBrokerObj') } unreleased, overreleased = incorrect.partition { |_object_id, modes| modes.count('registerBrokerObj') > modes.count('unregisterBrokerObj') } diff --git a/tools/log_processing/ems_refresh_timings.rb b/tools/log_processing/ems_refresh_timings.rb index 70f2af58f4d..ffb31d350fe 100755 --- a/tools/log_processing/ems_refresh_timings.rb +++ b/tools/log_processing/ems_refresh_timings.rb @@ -1,5 +1,5 @@ #!/usr/bin/env ruby -RAILS_ROOT = File.expand_path(File.join(__dir__, %w(.. ..))) +RAILS_ROOT = File.expand_path(File.join(__dir__, %w[.. ..])) require 'manageiq/gems/pending' require 'miq_logger_processor' @@ -12,16 +12,16 @@ def parse_args(argv) logfiles = [] opts = Optimist.options do - banner <<-EOS -Parse EMS Refreshes from a set of evm.log files and filter on a set of -provided conditions. + banner <<~EOS + Parse EMS Refreshes from a set of evm.log files and filter on a set of + provided conditions. -Usage: - ruby ems_refresh_timings.rb [OPTION]... ... + Usage: + ruby ems_refresh_timings.rb [OPTION]... ... -Options: -EOS - opt :sort_by, 'Column to sort by, options are start_time, end_time, '\ + Options: + EOS + opt :sort_by, 'Column to sort by, options are start_time, end_time, ' \ 'duration, ems, target_type, and target', :type => :string, :default => 'start_time' opt :target, 'Filter by specific targets, comma separated', :type => :strings @@ -43,6 +43,7 @@ def filter(hash, opts = {}) return false unless opts[:target_type].nil? || opts[:target_type].include?(hash[:target_type]) return false unless opts[:target].nil? || opts[:target].include?(hash[:target]) return false unless opts[:ems].nil? || opts[:ems].include?(hash[:ems]) + true end @@ -66,11 +67,13 @@ def parse_refresh_timings(line, targets) # Find the most recent refresh target for our ems, since there is # only one refresh worker this "has to be" the right one # If this changes in the future we'll have to add a PID lookup here - refresh_target = targets[ems].last - refresh_target = { - :target => "unknown", - :target_type => "unknown" - } if refresh_target.nil? + refresh_target = targets[ems].last + if refresh_target.nil? + refresh_target = { + :target => "unknown", + :target_type => "unknown" + } + end # Add other useful information to the refresh timings refresh_timings[:ems] = ems diff --git a/tools/log_processing/method_call_processor.rb b/tools/log_processing/method_call_processor.rb index e25b3236d5e..8add3657cbc 100755 --- a/tools/log_processing/method_call_processor.rb +++ b/tools/log_processing/method_call_processor.rb @@ -1,6 +1,6 @@ #!/usr/bin/env ruby -RAILS_ROOT = File.expand_path(File.join(__dir__, %w(.. ..))) +RAILS_ROOT = File.expand_path(File.join(__dir__, %w[.. ..])) require 'manageiq/gems/pending' require 'miq_logger_processor' @@ -12,6 +12,7 @@ method_call_hash = MiqLoggerProcessor.new(logfile).each_with_object({}) do |line, hash| next unless line.fq_method + hash[line.fq_method] ||= 0 hash[line.fq_method] += 1 end diff --git a/tools/log_processing/perf_timings.rb b/tools/log_processing/perf_timings.rb index 29760a7fcf7..7e9d0d62b8d 100755 --- a/tools/log_processing/perf_timings.rb +++ b/tools/log_processing/perf_timings.rb @@ -1,6 +1,6 @@ #!/usr/bin/env ruby -RAILS_ROOT = File.expand_path(File.join(__dir__, %w(.. ..))) +RAILS_ROOT = File.expand_path(File.join(__dir__, %w[.. ..])) require 'manageiq/gems/pending' require 'miq_logger_processor' @@ -57,6 +57,7 @@ def dump_csv(type, hashes) MiqLoggerProcessor.new(logfile).each do |line| next unless line =~ /MIQ\((Vm|Host|Storage|EmsCluster|ExtManagementSystem|MiqEnterprise)\.(vim_collect_perf_data|perf_capture_?[a-z]*|perf_process|perf_rollup)\).+Timings:? (\{.+)$/ + target, method, timings = $1, $2, $3 target.downcase! @@ -73,6 +74,7 @@ def dump_csv(type, hashes) unless target == "storage" prev_timings = vim_collect_timings.delete(line.pid) next if prev_timings.nil? + timings = prev_timings.merge(timings) end diff --git a/tools/log_processing/split_log_by_pid_tid.rb b/tools/log_processing/split_log_by_pid_tid.rb index 9892c9b2914..5614ad0dbd0 100755 --- a/tools/log_processing/split_log_by_pid_tid.rb +++ b/tools/log_processing/split_log_by_pid_tid.rb @@ -1,6 +1,6 @@ #!/usr/bin/env ruby -RAILS_ROOT = File.expand_path(File.join(__dir__, %w(.. ..))) +RAILS_ROOT = File.expand_path(File.join(__dir__, %w[.. ..])) require 'manageiq/gems/pending' require 'miq_logger_processor' diff --git a/tools/log_processing/ui_request_parser.rb b/tools/log_processing/ui_request_parser.rb index 2878f2e464c..c8133075d48 100755 --- a/tools/log_processing/ui_request_parser.rb +++ b/tools/log_processing/ui_request_parser.rb @@ -1,6 +1,6 @@ #!/usr/bin/env ruby -RAILS_ROOT = File.expand_path(File.join(__dir__, %w(.. ..))) +RAILS_ROOT = File.expand_path(File.join(__dir__, %w[.. ..])) require 'manageiq/gems/pending' require 'miq_logger_processor' @@ -11,12 +11,12 @@ puts "Gathering requests..." request_hash = MiqLoggerProcessor.new(logfile).each_with_object({}) do |line, hash| - if match = line.match(/Started\s(\w*)\s\"([\/\w\d\.\-]*)\"/) + if match = line.match(/Started\s(\w*)\s"([\/\w\d.-]*)"/) action, path = match.captures hash[action] ||= {} hash[action][path] ||= 0 hash[action][path] += 1 - elsif match = line[/Processing\sby\s([\d\w\:\.\#\_\-]*)/, 1] + elsif match = line[/Processing\sby\s([\d\w:.\#_-]*)/, 1] hash["Method"] ||= {} hash["Method"][match] ||= 0 hash["Method"][match] += 1 diff --git a/tools/metrics_populate_retro_tags.rb b/tools/metrics_populate_retro_tags.rb index 80fdb15867c..f8538b13cb2 100755 --- a/tools/metrics_populate_retro_tags.rb +++ b/tools/metrics_populate_retro_tags.rb @@ -23,13 +23,13 @@ puts "Processing VM IDs: #{vm_ids.sort.inspect} for time range: #{time_cond.inspect}" vm_perf_recs = MetricRollup.where(time_cond).where(:capture_interval_name => 'hourly', :resource_id => vm_ids) - .includes(:vm => {:taggings => :tag}) - .select(:id, :resource_type, :resource_id, :resource_name, :parent_host_id) + .includes(:vm => {:taggings => :tag}) + .select(:id, :resource_type, :resource_id, :resource_name, :parent_host_id) vm_perf_recs.group_by(&:resource_id).sort.each do |resource_id, perfs| puts "Updating tags in performance data for VM: ID: #{resource_id} => #{perfs.first.resource_name}" MetricRollup.update_all( {:tag_names => perfs.first.vm.perf_tags}, - :id => perfs.collect(&:id) + :id => perfs.collect(&:id) ) end diff --git a/tools/miq_config_sssd_ldap/auth_establish.rb b/tools/miq_config_sssd_ldap/auth_establish.rb index c08740c7503..fa990a79da3 100644 --- a/tools/miq_config_sssd_ldap/auth_establish.rb +++ b/tools/miq_config_sssd_ldap/auth_establish.rb @@ -22,7 +22,7 @@ def authselect_found? end def run_auth_select - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") result = AwesomeSpawn.run("authselect select sssd --force") LOGGER.debug("Ran command: #{result.command_line}") @@ -35,7 +35,7 @@ def run_auth_select end def run_auth_config - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") ldapserver = "#{initial_settings[:mode]}://#{initial_settings[:ldaphost][0]}:#{initial_settings[:ldapport]}" params = { diff --git a/tools/miq_config_sssd_ldap/auth_template_files.rb b/tools/miq_config_sssd_ldap/auth_template_files.rb index 551a375bb25..2e16664f315 100644 --- a/tools/miq_config_sssd_ldap/auth_template_files.rb +++ b/tools/miq_config_sssd_ldap/auth_template_files.rb @@ -13,7 +13,7 @@ class AuthTemplateFiles attr_reader :initial_settings, :template_dir def initialize(initial_settings) - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") @initial_settings = initial_settings diff --git a/tools/miq_config_sssd_ldap/cli_config.rb b/tools/miq_config_sssd_ldap/cli_config.rb index d5018153253..893558ed3d0 100644 --- a/tools/miq_config_sssd_ldap/cli_config.rb +++ b/tools/miq_config_sssd_ldap/cli_config.rb @@ -8,7 +8,7 @@ class CliConfig < Cli def parse(args) args.shift if args.first == "--" # Handle when called through script/runner - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") self.opts = Optimist.options(args) do banner "Usage: ruby #{$PROGRAM_NAME} [opts]\n" @@ -125,12 +125,12 @@ def default_port_from_mode def ldaphost_and_ldapport_valid? begin Timeout.timeout(1) do - begin - TCPSocket.new(opts[:ldaphost], opts[:ldapport]).close - return true - rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH - return false - end + + TCPSocket.new(opts[:ldaphost], opts[:ldapport]).close + return true + rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH + return false + end rescue Timeout::Error return false @@ -140,9 +140,10 @@ def ldaphost_and_ldapport_valid? end def bind_dn_and_bind_pwd_valid? - if opts[:mode] == "ldap" || opts[:ldap_role] == true - return false if opts[:bind_dn].nil? || opts[:bind_pwd].nil? + if (opts[:mode] == "ldap" || opts[:ldap_role] == true) && (opts[:bind_dn].nil? || opts[:bind_pwd].nil?) + return false end + true end diff --git a/tools/miq_config_sssd_ldap/cli_convert.rb b/tools/miq_config_sssd_ldap/cli_convert.rb index c065da1b983..da1de6b5145 100644 --- a/tools/miq_config_sssd_ldap/cli_convert.rb +++ b/tools/miq_config_sssd_ldap/cli_convert.rb @@ -6,7 +6,7 @@ class CliConvert < Cli def parse(args) args.shift if args.first == "--" # Handle when called through script/runner - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") self.opts = Optimist.options(args) do banner "Usage: ruby #{$PROGRAM_NAME} [opts]\n" diff --git a/tools/miq_config_sssd_ldap/configure_apache.rb b/tools/miq_config_sssd_ldap/configure_apache.rb index b28c728d42e..64be24a4b78 100644 --- a/tools/miq_config_sssd_ldap/configure_apache.rb +++ b/tools/miq_config_sssd_ldap/configure_apache.rb @@ -6,7 +6,7 @@ class ConfigureApacheError < StandardError; end class ConfigureApache < AuthTemplateFiles def configure - LOGGER.debug("Invoked #{self.class}\##{__method__} template_dir #{template_dir}") + LOGGER.debug("Invoked #{self.class}##{__method__} template_dir #{template_dir}") create_files update_realm end @@ -14,7 +14,7 @@ def configure private def create_files - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") begin FileUtils.cp("#{template_dir}#{PAM_CONF_DIR}/httpd-auth", "#{PAM_CONF_DIR}/httpd-auth") @@ -28,7 +28,7 @@ def create_files end def update_realm - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") begin miq_ext_auth = File.read("#{HTTPD_CONF_DIR}/manageiq-external-auth.conf") diff --git a/tools/miq_config_sssd_ldap/configure_appliance_settings.rb b/tools/miq_config_sssd_ldap/configure_appliance_settings.rb index 422bb4ec32c..e1e2fc3c246 100644 --- a/tools/miq_config_sssd_ldap/configure_appliance_settings.rb +++ b/tools/miq_config_sssd_ldap/configure_appliance_settings.rb @@ -11,7 +11,7 @@ def initialize(initial_settings) end def configure - LOGGER.debug("Invoked #{self.class}\##{__method__} initial_settings #{initial_settings} ") + LOGGER.debug("Invoked #{self.class}##{__method__} initial_settings #{initial_settings} ") ldap_role = initial_settings[:ldap_role].nil? ? Settings.authentication.ldap_role : initial_settings[:ldap_role] diff --git a/tools/miq_config_sssd_ldap/configure_database.rb b/tools/miq_config_sssd_ldap/configure_database.rb index 8dd0880df57..0e985db1a8a 100644 --- a/tools/miq_config_sssd_ldap/configure_database.rb +++ b/tools/miq_config_sssd_ldap/configure_database.rb @@ -10,13 +10,13 @@ class ConfigureDatabase attr_reader :sssd_domain def initialize - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") @sssd_domain = domain_from_sssd LOGGER.debug("#{__method__} sssd_domain #{sssd_domain}") end def change_userids_to_upn - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") LOGGER.debug("Normalizing userids to User Principal Name (UPN)") return unless CHANGE_MODES.include?(Settings.authentication.to_hash[:mode]) @@ -47,7 +47,7 @@ def update_the_userid(user) end def save_new_or_delete_duplicate_userid(user) - LOGGER.debug("Invoked #{self.class}\##{__method__} userid #{user.userid}") + LOGGER.debug("Invoked #{self.class}##{__method__} userid #{user.userid}") check_duplicate_u = find_user(user.userid) if check_duplicate_u.nil? || check_duplicate_u.id == user.id LOGGER.debug("Saving userid #{user.userid}") diff --git a/tools/miq_config_sssd_ldap/configure_selinux.rb b/tools/miq_config_sssd_ldap/configure_selinux.rb index 563bae5f40d..b14d8190f0c 100644 --- a/tools/miq_config_sssd_ldap/configure_selinux.rb +++ b/tools/miq_config_sssd_ldap/configure_selinux.rb @@ -11,7 +11,7 @@ def initialize(initial_settings) end def configure - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") enable_non_standard_ldap_port(initial_settings[:ldapport]) establish_permission("allow_httpd_mod_auth_pam") establish_permission("httpd_dbus_sssd") @@ -20,7 +20,7 @@ def configure private def enable_non_standard_ldap_port(port_number) - LOGGER.debug("Invoked #{self.class}\##{__method__}(#{port_number})") + LOGGER.debug("Invoked #{self.class}##{__method__}(#{port_number})") return if %w[389 636].include?(port_number) params = { @@ -43,8 +43,8 @@ def enable_non_standard_ldap_port(port_number) end def establish_permission(permission_name) - LOGGER.debug("Invoked #{self.class}\##{__method__}(#{permission_name})") - params = {:P => [permission_name, "on"] } + LOGGER.debug("Invoked #{self.class}##{__method__}(#{permission_name})") + params = {:P => [permission_name, "on"]} result = AwesomeSpawn.run("setsebool", :params => params) LOGGER.debug("Ran command: #{result.command_line}") diff --git a/tools/miq_config_sssd_ldap/configure_sssd_rules.rb b/tools/miq_config_sssd_ldap/configure_sssd_rules.rb index fbf66b7ca04..182a971c965 100644 --- a/tools/miq_config_sssd_ldap/configure_sssd_rules.rb +++ b/tools/miq_config_sssd_ldap/configure_sssd_rules.rb @@ -7,7 +7,7 @@ class ConfigureSssdRules CFG_RULES_FILE = "/usr/share/sssd/cfg_rules.ini".freeze def self.disable_tls - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") message = "Converting from unsecured LDAP authentication to SSSD. This is dangerous. Passwords are not encrypted" puts(message) diff --git a/tools/miq_config_sssd_ldap/miqldap_configuration.rb b/tools/miq_config_sssd_ldap/miqldap_configuration.rb index f105489a222..229eff775f1 100644 --- a/tools/miq_config_sssd_ldap/miqldap_configuration.rb +++ b/tools/miq_config_sssd_ldap/miqldap_configuration.rb @@ -56,7 +56,7 @@ def check_for_tls_certs end def current_authentication_settings - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") settings = Settings.authentication.to_hash LOGGER.debug("Current authentication settings: #{settings}") diff --git a/tools/miq_config_sssd_ldap/services.rb b/tools/miq_config_sssd_ldap/services.rb index da64689dcda..ab371e44d9a 100644 --- a/tools/miq_config_sssd_ldap/services.rb +++ b/tools/miq_config_sssd_ldap/services.rb @@ -3,7 +3,7 @@ module MiqConfigSssdLdap class Services def self.restart - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") LOGGER.debug("\nRestarting httpd, if running ...") httpd_service = LinuxAdmin::Service.new("httpd") diff --git a/tools/miq_config_sssd_ldap/sssd_conf.rb b/tools/miq_config_sssd_ldap/sssd_conf.rb index 67809652bd2..79add1d86dd 100644 --- a/tools/miq_config_sssd_ldap/sssd_conf.rb +++ b/tools/miq_config_sssd_ldap/sssd_conf.rb @@ -14,7 +14,7 @@ class SssdConf < AuthTemplateFiles attr_reader :src_path, :dest_path, :ldap_uri, :ldap_search_base, :initial_settings, :sssd_conf_contents def initialize(initial_settings) - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") super @@ -26,7 +26,7 @@ def initialize(initial_settings) end def update - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") [Domain, Sssd, Pam, Ifp].each do |section_class| section = section_class.new(initial_settings) @@ -39,10 +39,10 @@ def update private def create - LOGGER.debug("Invoked #{self.class}\##{__method__}") + LOGGER.debug("Invoked #{self.class}##{__method__}") begin - File.write(dest_path, ERB.new(File.read(src_path), nil, '-').result(binding)) + File.write(dest_path, ERB.new(File.read(src_path), :trim_mode => '-').result(binding)) FileUtils.chmod(0o600, dest_path) rescue Errno::ENOENT, IndexError => e LOGGER.fatal(e.message) diff --git a/tools/miq_config_sssd_ldap/sssd_conf/pam.rb b/tools/miq_config_sssd_ldap/sssd_conf/pam.rb index 96db797cc11..320cc4d5fe2 100644 --- a/tools/miq_config_sssd_ldap/sssd_conf/pam.rb +++ b/tools/miq_config_sssd_ldap/sssd_conf/pam.rb @@ -3,7 +3,7 @@ module MiqConfigSssdLdap class Pam < Common def initialize(initial_settings) - super(%w(pam_app_services pam_initgroups_scheme), initial_settings) + super(%w[pam_app_services pam_initgroups_scheme], initial_settings) end def pam_app_services diff --git a/tools/pg_inspector.rb b/tools/pg_inspector.rb index f14d5d92cea..36fa7e55b0e 100755 --- a/tools/pg_inspector.rb +++ b/tools/pg_inspector.rb @@ -5,7 +5,7 @@ if __FILE__ == $PROGRAM_NAME $LOAD_PATH.push(File.expand_path(__dir__)) - $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w(.. lib)))) + $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w[.. lib]))) end require 'pg_inspector/util' diff --git a/tools/pg_inspector/active_connections_to_human.rb b/tools/pg_inspector/active_connections_to_human.rb index a4e5b66a816..5ea7a81452f 100644 --- a/tools/pg_inspector/active_connections_to_human.rb +++ b/tools/pg_inspector/active_connections_to_human.rb @@ -77,11 +77,11 @@ def push_new_server(result, activity) def filter_activity_for_server(activity) activity.select do |k, _v| - %w(server_id + %w[server_id name pid zone_id - zone_name) + zone_name] .include?(k) end end @@ -98,10 +98,10 @@ def push_new_worker(result, activity) def filter_activity_for_worker(activity) activity.select do |k, _v| - %w(worker_id + %w[worker_id server_id class_name - pid) + pid] .include?(k) end end @@ -113,7 +113,7 @@ def push_connection(result, activity) def filter_activity_for_connection(activity) activity.select do |k, _v| - %w(worker_id + %w[worker_id server_id datid datname @@ -129,7 +129,7 @@ def filter_activity_for_connection(activity) state backend_xid backend_xmin - query) + query] .include?(k) end end @@ -157,6 +157,7 @@ def server_activity?(activity) elsif old_application_name?(activity) && activity["application_name"].include?(" Server") return true end + false end @@ -166,6 +167,7 @@ def worker_activity?(activity) elsif old_application_name?(activity) && !activity["application_name"].include?(" Server") return true end + false end @@ -190,7 +192,7 @@ def process_miq_activity(activity) end def to_utc(time_str) - return Time.parse(time_str).utc.to_s if time_str + Time.parse(time_str).utc.to_s if time_str end def process_miq_activity_application_name(activity) @@ -203,7 +205,7 @@ def process_miq_activity_application_name(activity) # MIQ|||||| # Both previous and current is truncated up to 64 characters if activity["application_name"].end_with?("...") - $stderr.puts("Warning: the application_name #{activity["application_name"]} is incomplete.") + warn("Warning: the application_name #{activity["application_name"]} is incomplete.") end if new_application_name?(activity) _, pid, server_id, worker_id, zone_id, class_name, zone_name = activity["application_name"].split("|") @@ -236,9 +238,9 @@ def process_miq_info_file(file_name) def process_miq_server(server) server["server_id"] = server["id"].to_i server.select do |k, _v| - %w(server_id + %w[server_id hostname - ipaddress) + ipaddress] .include?(k) end end @@ -263,6 +265,7 @@ def split_id(id) def compress_id(id) if $old_version return nil if id.nil? + region_number, short_id = split_id(id) region_number.zero? ? short_id.to_s : "#{region_number}#{COMPRESSED_ID_SEPARATOR}#{short_id}" else @@ -273,7 +276,8 @@ def compress_id(id) def uncompress_id(id) if $old_version return nil if id.nil? - id.to_s =~ RE_COMPRESSED_ID ? ($1.to_i * rails_sequence_factor + $2.to_i) : id.to_i + + id.to_s =~ RE_COMPRESSED_ID ? (($1.to_i * rails_sequence_factor) + $2.to_i) : id.to_i else Class.new.include(ActiveRecord::IdRegions).uncompress_id(id) end diff --git a/tools/pg_inspector/active_connections_to_yaml.rb b/tools/pg_inspector/active_connections_to_yaml.rb index a4b3fb8f72c..f85bc1fe0aa 100644 --- a/tools/pg_inspector/active_connections_to_yaml.rb +++ b/tools/pg_inspector/active_connections_to_yaml.rb @@ -9,14 +9,14 @@ class ActiveConnectionsYAML < PgInspectorOperation HELP_MSG_SHORT = "Dump active connections to YAML file".freeze def parse_options(args) self.options = Optimist.options(args) do - banner <<-BANNER + banner <<~BANNER -#{HELP_MSG_SHORT} + #{HELP_MSG_SHORT} -Use password in PGPASSWORD environment variable if no password file given. + Use password in PGPASSWORD environment variable if no password file given. -Options: -BANNER + Options: + BANNER opt(:pg_host, "PostgreSQL host name or address", :type => :string, :short => "s", :default => "127.0.0.1") opt(:port, "PostgreSQL server port", @@ -84,11 +84,11 @@ def connect_pg_server end def rows_in_table(conn, table_name, order_by = nil) - query = <<-SQL -SELECT * -FROM #{table_name} -#{"ORDER BY #{order_by}" if order_by} -SQL + query = <<~SQL + SELECT * + FROM #{table_name} + #{"ORDER BY #{order_by}" if order_by} + SQL conn.exec_params(query) rescue PG::Error => e Util.error_exit(e) @@ -107,9 +107,10 @@ def filter_by_application_name(rows_array) rows_array.each do |row| next unless row["application_name"].end_with?('..') + error_msg = "The application name for MIQ server/worker: #{row["application_name"]} is truncated" if options[:ignore_error] - $stderr.puts error_msg + warn error_msg else raise error_msg end diff --git a/tools/pg_inspector/cli.rb b/tools/pg_inspector/cli.rb index 340ae23b0d7..00435cd422e 100644 --- a/tools/pg_inspector/cli.rb +++ b/tools/pg_inspector/cli.rb @@ -8,6 +8,7 @@ module PgInspector class Cli attr_accessor :cmd + SUB_COMMANDS = { :connections => ActiveConnectionsYAML, :servers => ServersYAML, @@ -23,19 +24,19 @@ def parse(args) args.shift if args.first == "--" # Handle when called through script/runner op_help = operation_help Optimist.options(args) do - banner <<-BANNER -pg_inspector is a tool to inspect ManageIQ process caused deadlock in PostgreSQL. + banner <<~BANNER + pg_inspector is a tool to inspect ManageIQ process caused deadlock in PostgreSQL. - Usage: - #{$PROGRAM_NAME} operation options_for_operation - #{$PROGRAM_NAME} options + Usage: + #{$PROGRAM_NAME} operation options_for_operation + #{$PROGRAM_NAME} options - Operations: -#{op_help} - Use `pg_inspector.rb operation -h' to see help for each operation + Operations: + #{op_help} + Use `pg_inspector.rb operation -h' to see help for each operation - Options: -BANNER + Options: + BANNER stop_on(SUB_COMMANDS.keys.map(&:to_s)) end current_operation = args.shift diff --git a/tools/pg_inspector/connection_locks.rb b/tools/pg_inspector/connection_locks.rb index b41b5516976..1d9c9ea6fa8 100644 --- a/tools/pg_inspector/connection_locks.rb +++ b/tools/pg_inspector/connection_locks.rb @@ -47,11 +47,11 @@ def merge_lock_and_connection self.blocked_connections = [] connections["connections"].each do |conn| conn["blocked_by"] = find_lock_blocking_spid(conn["spid"]) - unless conn["blocked_by"].empty? - some_connection_blocked = true - puts "Connection #{conn["spid"]} is blocked by #{conn["blocked_by"]}." - blocked_connections << {conn["spid"] => conn["blocked_by"]} - end + next if conn["blocked_by"].empty? + + some_connection_blocked = true + puts "Connection #{conn["spid"]} is blocked by #{conn["blocked_by"]}." + blocked_connections << {conn["spid"] => conn["blocked_by"]} end unless some_connection_blocked puts "Every connection is OK and not blocked. No need to generate lock graph." @@ -83,6 +83,7 @@ def process_lock_file def connection_spids_blocking_lock(lock) return if lock["granted"] == "t" + blocking_locks = blocking_lock_relation(lock).select do |l| lock["spid"] != l["spid"] && l["granted"] == "t" @@ -101,7 +102,7 @@ def blocking_lock_relation(lock) when "tuple" select_lock(lock, "database", "relation", "page", "tuple") else - $stderr.puts("Warning: unexpected lock type #{lock["locktype"]} encountered.") + warn("Warning: unexpected lock type #{lock["locktype"]} encountered.") end end diff --git a/tools/pg_inspector/inspect_pg.rb b/tools/pg_inspector/inspect_pg.rb index c76c5f62726..8381c2a689a 100755 --- a/tools/pg_inspector/inspect_pg.rb +++ b/tools/pg_inspector/inspect_pg.rb @@ -3,7 +3,7 @@ ## Run pg_inspector using parameters given in local database.yml and v2 key. if __FILE__ == $PROGRAM_NAME - $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w(.. .. lib)))) + $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w[.. .. lib]))) end require 'yaml' diff --git a/tools/pg_inspector/inspect_pg_server.rb b/tools/pg_inspector/inspect_pg_server.rb index f80c57be988..711431e7bc1 100755 --- a/tools/pg_inspector/inspect_pg_server.rb +++ b/tools/pg_inspector/inspect_pg_server.rb @@ -4,7 +4,7 @@ ## Run pg_inspector servers using parameters given in local database.yml and v2 key. if __FILE__ == $PROGRAM_NAME - $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w(.. .. lib)))) + $LOAD_PATH.push(File.expand_path(File.join(__dir__, %w[.. .. lib]))) end require 'yaml' diff --git a/tools/pg_inspector/servers_to_yaml.rb b/tools/pg_inspector/servers_to_yaml.rb index e7ca9f8a261..0285684bede 100644 --- a/tools/pg_inspector/servers_to_yaml.rb +++ b/tools/pg_inspector/servers_to_yaml.rb @@ -11,16 +11,16 @@ class ServersYAML < PgInspectorOperation def parse_options(args) self.options = Optimist.options(args) do - banner <<-BANNER + banner <<~BANNER -#{HELP_MSG_SHORT} + #{HELP_MSG_SHORT} -Use password in PGPASSWORD environment if no password file given. -The output file will overwrite the previous one with same name after -successfully dumped. + Use password in PGPASSWORD environment if no password file given. + The output file will overwrite the previous one with same name after + successfully dumped. -Options: -BANNER + Options: + BANNER opt(:pg_host, "PostgreSQL host name or address", :type => :string, :short => "s", :default => "127.0.0.1") opt(:port, "PostgreSQL server port", @@ -67,10 +67,10 @@ def connect_pg_server end def table_from_db_conn(conn, table_name) - query = <<-SQL -SELECT * -FROM #{table_name}; -SQL + query = <<~SQL + SELECT * + FROM #{table_name}; + SQL result = [] res = conn.exec_params(query) res.each do |row| diff --git a/tools/pg_inspector/util.rb b/tools/pg_inspector/util.rb index 9e02eed72f5..c4a15914617 100644 --- a/tools/pg_inspector/util.rb +++ b/tools/pg_inspector/util.rb @@ -11,12 +11,12 @@ def self.dump_to_yml_file(obj, name, output) end def self.error_exit(e, exit_code = 1) - $stderr.puts e.message + warn e.message exit(exit_code) end def self.error_msg_exit(e_msg, exit_code = 1) - $stderr.puts e_msg + warn e_msg exit(exit_code) end end diff --git a/tools/purge_archived_vms.rb b/tools/purge_archived_vms.rb index e43c79b10f3..e51a3d0e552 100755 --- a/tools/purge_archived_vms.rb +++ b/tools/purge_archived_vms.rb @@ -22,15 +22,15 @@ query.archived.find_in_batches do |vms| vms.each do |vm| - begin - archived += 1 - unless REPORT_ONLY - $log.info("Deleting archived VM '#{vm.name}' (id #{vm.id})") - vm.destroy - end - rescue => err - $log.log_backtrace(err) + + archived += 1 + unless REPORT_ONLY + $log.info("Deleting archived VM '#{vm.name}' (id #{vm.id})") + vm.destroy end + rescue => err + $log.log_backtrace(err) + end end diff --git a/tools/purge_metrics.rb b/tools/purge_metrics.rb index d7976b93733..200716b2e4a 100755 --- a/tools/purge_metrics.rb +++ b/tools/purge_metrics.rb @@ -2,7 +2,7 @@ require File.expand_path('../config/environment', __dir__) require 'optimist' -MODES = %w(count purge) +MODES = %w[count purge] ARGV.shift if ARGV[0] == '--' # if invoked with rails runner opts = Optimist.options do @@ -30,7 +30,7 @@ def log(msg) log("Purge Counts") dates = {} counts = {} -%w(realtime hourly daily).each do |interval| +%w[realtime hourly daily].each do |interval| dates[interval] = opts[interval.to_sym].to_i_with_method.seconds.ago.utc counts[interval] = Metric::Purging.purge_count(dates[interval], interval) log(" #{"#{interval.titleize}:".ljust(9)} #{formatter.number_with_delimiter(counts[interval])}") @@ -41,7 +41,7 @@ def log(msg) log("Purging...") require 'ruby-progressbar' -%w(realtime hourly daily).each do |interval| +%w[realtime hourly daily].each do |interval| pbar = ProgressBar.create(:title => interval.titleize, :total => counts[interval], :autofinish => false) if counts[interval] > 0 Metric::Purging.purge(dates[interval], interval, opts[:window], opts[:limit]) do |count, _| diff --git a/tools/purge_miq_report_results.rb b/tools/purge_miq_report_results.rb index ea8d1aa2654..c6bf1683c20 100755 --- a/tools/purge_miq_report_results.rb +++ b/tools/purge_miq_report_results.rb @@ -2,7 +2,7 @@ require File.expand_path('../config/environment', __dir__) require 'optimist' -MODES = %w(count purge) +MODES = %w[count purge] ARGV.shift if ARGV[0] == '--' # if invoked with rails runner opts = Optimist.options do diff --git a/tools/rebuild_provision_request.rb b/tools/rebuild_provision_request.rb index 5d9f459e4ae..b2a9e1856f6 100755 --- a/tools/rebuild_provision_request.rb +++ b/tools/rebuild_provision_request.rb @@ -21,30 +21,30 @@ ARGV.shift if ARGV[0] == '--' opts = Optimist.options do - banner <<-EOS + banner <<~EOS -Reconstruct provision request parameters based on an existing request + Reconstruct provision request parameters based on an existing request -Common Examples\n\t -Output the hash parameters of a request + Common Examples\n\t + Output the hash parameters of a request - #{PROGRAM_STRING} --request-id=3 + #{PROGRAM_STRING} --request-id=3 -Show the hash parameters and the console command needed to rerun a request + Show the hash parameters and the console command needed to rerun a request - #{PROGRAM_STRING} --request-id=3 --console + #{PROGRAM_STRING} --request-id=3 --console -Show the hash parameters and rerun the request + Show the hash parameters and rerun the request - #{PROGRAM_STRING} --request-id=3 --run-it + #{PROGRAM_STRING} --request-id=3 --run-it -Show a list of 5 recent requests + Show a list of 5 recent requests - #{PROGRAM_STRING} --last-requests + #{PROGRAM_STRING} --last-requests -Help! #{PROGRAM_STRING} --help + Help! #{PROGRAM_STRING} --help -Usage: #{PROGRAM_STRING} [--options]\n\nOptions:\n\t + Usage: #{PROGRAM_STRING} [--options]\n\nOptions:\n\t EOS opt :api_host, "The hostname to run the api call against", :default => 'localhost', :type => :string @@ -67,7 +67,7 @@ Optimist.die :count, "must be greater than 0" if opts[:count] <= 0 else Optimist.die :request_id, "must be a number greater than 0" if opts[:request_id] <= 0 - Optimist.die :output, "must be either hash or json" unless %w(hash json).include?(opts[:output]) + Optimist.die :output, "must be either hash or json" unless %w[hash json].include?(opts[:output]) end class Tab @@ -318,6 +318,7 @@ def build_url def find_request_options @provision_options = MiqProvisionRequest.find_by(:id => @request_id) return no_provision_found if @provision_options.nil? + @provision_options end diff --git a/tools/remove_grouping_from_report_results.rb b/tools/remove_grouping_from_report_results.rb index 0c425446b5b..16a5adebdeb 100755 --- a/tools/remove_grouping_from_report_results.rb +++ b/tools/remove_grouping_from_report_results.rb @@ -34,29 +34,30 @@ fixed = 0 MiqReportResult.find_each(:batch_size => opts[:batch_size]).with_index do |rr, i| - begin - break if opts[:count].positive? && i == opts[:count] - total += 1 - - next if rr.report.nil? || rr.report.extras.nil? - - if rr.report.extras.key?(:grouping) - rr.report.extras.except!(:grouping) - rr.save! - if rr.reload.report.extras.key?(:grouping) - puts "MiqReportResult: #{rr.id} could NOT be fixed" - else - puts "MiqReportResult: #{rr.id} fixed" - fixed += 1 - end + + break if opts[:count].positive? && i == opts[:count] + + total += 1 + + next if rr.report.nil? || rr.report.extras.nil? + + if rr.report.extras.key?(:grouping) + rr.report.extras.except!(:grouping) + rr.save! + if rr.reload.report.extras.key?(:grouping) + puts "MiqReportResult: #{rr.id} could NOT be fixed" else - puts "MiqReportResult: #{rr.id} doesn't need fixing" + puts "MiqReportResult: #{rr.id} fixed" + fixed += 1 end - rescue => err - puts "\nWarning: Rolling back all changes since an error occurred on MiqReportResult with id: #{rr.try(:id)}: #{err.message}" - ActiveRecord::Base.connection.rollback_transaction unless opts[:dry_run] - exit 1 + else + puts "MiqReportResult: #{rr.id} doesn't need fixing" end +rescue => err + puts "\nWarning: Rolling back all changes since an error occurred on MiqReportResult with id: #{rr.try(:id)}: #{err.message}" + ActiveRecord::Base.connection.rollback_transaction unless opts[:dry_run] + exit 1 + end ActiveRecord::Base.connection.commit_transaction unless opts[:dry_run] diff --git a/tools/rm_evm_snapshots.rb b/tools/rm_evm_snapshots.rb index bb2106979bf..93340ecf92a 100755 --- a/tools/rm_evm_snapshots.rb +++ b/tools/rm_evm_snapshots.rb @@ -4,11 +4,11 @@ require 'VMwareWebService/MiqVim' if ARGV.length != 1 - $stderr.puts "Usage: #{$0} ems_name" + warn "Usage: #{$0} ems_name" exit 1 end -ems_name = ARGV[0] +ems_name = ARGV[0] # server = ARGV[0] # username = ARGV[1] # password = ARGV[2] @@ -21,26 +21,26 @@ puts "Done." puts "vim.class: #{vim.class}" - puts "#{vim.server} is #{(vim.isVirtualCenter? ? 'VC' : 'ESX')}" + puts "#{vim.server} is #{vim.isVirtualCenter? ? 'VC' : 'ESX'}" puts "API version: #{vim.apiVersion}" puts vim.virtualMachinesByMor.each_value do |vm| miqVm = vim.getVimVmByMor(vm['MOR']) - if miqVm.hasSnapshot?(MiqVimVm::EVM_SNAPSHOT_NAME) - sso = miqVm.searchSsTree(miqVm.snapshotInfo['rootSnapshotList'], 'name', MiqVimVm::EVM_SNAPSHOT_NAME) - unless sso - $stderr.puts "#{miqVm.name}: could not determine the MOR of the EVM snapshot. Skipping." - next - end - puts "Deleting EVM snapshot for #{miqVm.name}..." - miqVm.removeSnapshot(sso['snapshot']) - puts "done." - puts + next unless miqVm.hasSnapshot?(MiqVimVm::EVM_SNAPSHOT_NAME) + + sso = miqVm.searchSsTree(miqVm.snapshotInfo['rootSnapshotList'], 'name', MiqVimVm::EVM_SNAPSHOT_NAME) + unless sso + warn "#{miqVm.name}: could not determine the MOR of the EVM snapshot. Skipping." + next end + puts "Deleting EVM snapshot for #{miqVm.name}..." + miqVm.removeSnapshot(sso['snapshot']) + puts "done." + puts end rescue => err - puts err.to_s + puts err puts err.backtrace.join("\n") ensure vim.disconnect diff --git a/tools/show_host_file_entries_for_vnc.rb b/tools/show_host_file_entries_for_vnc.rb index e7a92376fe1..3a8910fc193 100755 --- a/tools/show_host_file_entries_for_vnc.rb +++ b/tools/show_host_file_entries_for_vnc.rb @@ -3,12 +3,12 @@ ManageIQ::Providers::Vmware::Host.all.each do |host| if host.ipaddress.blank? - STDERR.puts "Host ID=#{host.id.inspect}, Name=#{host.name.inspect} has no IP Address" + warn "Host ID=#{host.id.inspect}, Name=#{host.name.inspect} has no IP Address" next end if host.guid.blank? - STDERR.puts "Host ID=#{host.id.inspect}, Name=#{host.name.inspect} has no GUID" + warn "Host ID=#{host.id.inspect}, Name=#{host.name.inspect} has no GUID" next end @@ -19,7 +19,7 @@ begin ipaddress = TCPSocket.gethostbyname(host.ipaddress.split(',').first).last rescue SocketError => err - STDERR.puts "Cannot resolve hostname(#{host.ipaddress}) for Host ID=#{host.id.inspect}, Name=#{host.name.inspect} because #{err.message}" + warn "Cannot resolve hostname(#{host.ipaddress}) for Host ID=#{host.id.inspect}, Name=#{host.name.inspect} because #{err.message}" next end end diff --git a/tools/vim_collect_inventory.rb b/tools/vim_collect_inventory.rb index 0b2f02f79bc..121c5ccc4c1 100755 --- a/tools/vim_collect_inventory.rb +++ b/tools/vim_collect_inventory.rb @@ -11,13 +11,13 @@ opt :bypass, "Bypass broker usage", :type => :boolean opt :dir, "Output directory", :default => "." end -Optimist.die :ip, "is an invalid format" unless opts[:ip] =~ /^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$/ +Optimist.die :ip, "is an invalid format" unless /^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$/.match?(opts[:ip]) def process(accessor, dir) puts "Reading #{accessor}..." data = yield puts "Writing #{accessor}..." - File.open(File.join(dir, "#{accessor}.yml"), "w") { |f| f.write(data.to_yaml(:SortKeys => true)) } + File.write(File.join(dir, "#{accessor}.yml"), data.to_yaml(:SortKeys => true)) data end @@ -40,35 +40,35 @@ def process(accessor, dir) require 'VMwareWebService/MiqVim' vim = MiqVim.new(:server => opts[:ip], :username => opts[:user], :password => opts[:pass]) - VC_ACCESSORS.each do |accessor, type| + VC_ACCESSORS.each do |accessor, _type| process(accessor, dir) { vim.send(accessor) } end process(:storageDevice, dir) do data = {} vim.hostSystemsByMor.keys.each do |host_mor| - begin - vim_host = vim.getVimHostByMor(host_mor) - data[host_mor] = vim_host.storageDevice - ensure - vim_host.release if vim_host rescue nil - end + + vim_host = vim.getVimHostByMor(host_mor) + data[host_mor] = vim_host.storageDevice + ensure + vim_host.release if vim_host rescue nil + end data end process(:getAllCustomizationSpecs, dir) do - begin - vim_csm = vim.getVimCustomizationSpecManager - vim_csm.getAllCustomizationSpecs - rescue RuntimeError => err - raise unless err.message.include?("not supported on this system") - [] - ensure - vim_csm.release if vim_csm rescue nil - end - end + vim_csm = vim.getVimCustomizationSpecManager + vim_csm.getAllCustomizationSpecs + rescue RuntimeError => err + raise unless err.message.include?("not supported on this system") + + [] + ensure + vim_csm.release if vim_csm rescue nil + + end ensure vim.release unless vim.nil? rescue nil end diff --git a/tools/vim_collect_perf_history.rb b/tools/vim_collect_perf_history.rb index ffcb080a1de..92625c847ae 100755 --- a/tools/vim_collect_perf_history.rb +++ b/tools/vim_collect_perf_history.rb @@ -11,7 +11,7 @@ opt :bypass, "Bypass broker usage", :type => :boolean opt :dir, "Output directory", :default => "." end -Optimist.die :ip, "is an invalid format" unless opts[:ip] =~ /^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$/ +Optimist.die :ip, "is an invalid format" unless /^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$/.match?(opts[:ip]) targets = eval(ARGV.first) rescue nil if targets.nil? || !targets.kind_of?(Array) || targets.empty? @@ -60,7 +60,7 @@ def process(accessor, dir) puts "Reading #{accessor}..." data = yield puts "Writing #{accessor}..." - File.open(File.join(dir, "#{accessor}.yml"), "w") { |f| f.write(data.to_yaml(:SortKeys => true)) } + File.write(File.join(dir, "#{accessor}.yml"), data.to_yaml(:SortKeys => true)) data end diff --git a/tools/vm_retirement.rb b/tools/vm_retirement.rb index 87cfeb83e3a..640d0e10d77 100755 --- a/tools/vm_retirement.rb +++ b/tools/vm_retirement.rb @@ -57,7 +57,8 @@ def parse_command_line_option(arg) opt, value = arg.split('=') else raise "No Value Provided for Command Line Option: #{arg.inspect}" unless $ARGV.length > 0 - opt = arg + + opt = arg value = $ARGV.shift end return opt, value