Skip to content

Commit

Permalink
Merge pull request ManageIQ#17445 from jrafanie/upload_automate_model…
Browse files Browse the repository at this point in the history
…s_dialogs_during_log_collection

Upload automate models dialogs during log collection
  • Loading branch information
bdunne authored May 25, 2018
2 parents 1a2a40f + 5dab507 commit d868f6f
Show file tree
Hide file tree
Showing 5 changed files with 168 additions and 127 deletions.
134 changes: 91 additions & 43 deletions app/models/miq_server/log_management.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,46 +8,6 @@ module MiqServer::LogManagement
has_many :log_files, :dependent => :destroy, :as => :resource
end

def format_log_time(time)
time.respond_to?(:strftime) ? time.strftime("%Y%m%d_%H%M%S") : "unknown"
end

def post_historical_logs(taskid, log_depot)
task = MiqTask.find(taskid)
log_prefix = "Task: [#{task.id}]"
log_type = "Archive"

# Post all compressed logs for a specific date + configs, creating a new row per day
VMDB::Util.compressed_log_patterns.each do |pattern|
log_start, log_end = log_start_and_end_for_pattern(pattern)
date_string = "#{format_log_time(log_start)}_#{format_log_time(log_end)}" unless log_start.nil? && log_end.nil?

cond = {:historical => true, :name => LogFile.logfile_name(self, log_type, date_string), :state => 'available'}
cond[:logging_started_on] = log_start unless log_start.nil?
cond[:logging_ended_on] = log_end unless log_end.nil?
logfile = log_files.find_by(cond)

if logfile && logfile.log_uri.nil?
_log.info("#{log_prefix} #{log_type} logfile already exists with id: [#{logfile.id}] for [#{who_am_i}] with contents from: [#{log_start}] to: [#{log_end}]")
next
else
logfile = LogFile.historical_logfile
end

logfile.update(:file_depot => log_depot, :miq_task => task)
post_one_log_pattern(pattern, logfile, log_type)
end
end

def log_patterns(log_type, base_pattern = nil)
case log_type.to_s.downcase
when "archive"
Array(::Settings.log.collection.archive.pattern).unshift(base_pattern)
when "current"
current_log_patterns
end
end

def _post_my_logs(options)
# Make the request to the MiqServer whose logs are needed
MiqQueue.create_with(
Expand Down Expand Up @@ -82,6 +42,11 @@ def last_log_sync_message
last_log.try(:miq_task).try!(:message)
end

def include_automate_models_and_dialogs?(value)
return value unless value.nil?
Settings.log.collection.include_automate_models_and_dialogs
end

def post_logs(options)
taskid = options[:taskid]
task = MiqTask.find(taskid)
Expand All @@ -91,6 +56,11 @@ def post_logs(options)
raise _("Log depot settings not configured") unless context_log_depot
context_log_depot.update_attributes(:support_case => options[:support_case].presence)

if include_automate_models_and_dialogs?(options[:include_automate_models_and_dialogs])
post_automate_models(taskid, context_log_depot)
post_automate_dialogs(taskid, context_log_depot)
end

post_historical_logs(taskid, context_log_depot) unless options[:only_current]
post_current_logs(taskid, context_log_depot)
task.update_status("Finished", "Ok", "Log files were successfully collected")
Expand Down Expand Up @@ -120,6 +90,21 @@ def log_start_and_end_for_pattern(pattern)
VMDB::Util.get_log_start_end_times(evm)
end

def format_log_time(time)
time.respond_to?(:strftime) ? time.strftime("%Y%m%d_%H%M%S") : "unknown"
end

def log_patterns(log_type, base_pattern = nil)
case log_type.to_s.downcase
when "archive"
Array(::Settings.log.collection.archive.pattern).unshift(base_pattern)
when "current"
current_log_patterns
else
[base_pattern]
end
end

def post_one_log_pattern(pattern, logfile, log_type)
task = logfile.miq_task
log_prefix = "Task: [#{task.id}]"
Expand All @@ -132,7 +117,7 @@ def post_one_log_pattern(pattern, logfile, log_type)
task.update_status("Active", "Ok", msg)

begin
local_file = VMDB::Util.zip_logs("evm.zip", log_patterns(log_type, pattern), "system")
local_file = VMDB::Util.zip_logs(log_type.to_s.downcase.concat(".zip"), log_patterns(log_type, pattern), "system")
self.log_files << logfile

logfile.update_attributes(
Expand All @@ -144,22 +129,85 @@ def post_one_log_pattern(pattern, logfile, log_type)
)

logfile.upload

rescue StandardError, Timeout::Error => err
_log.error("#{log_prefix} Posting of #{log_type.downcase} logs failed for #{who_am_i} due to error: [#{err.class.name}] [#{err}]")
logfile.update_attributes(:state => "error")
raise
ensure
FileUtils.rm_f(local_file) if local_file && File.exist?(local_file)
end
msg = "#{log_type} log files from #{who_am_i} are posted"
_log.info("#{log_prefix} #{msg}")
task.update_status("Active", "Ok", msg)
end

def post_automate_models(taskid, log_depot)
domain_zip = Rails.root.join("log", "domain.zip")
backup_automate_models(domain_zip)

logfile = LogFile.historical_logfile
logfile.update(:file_depot => log_depot, :miq_task => MiqTask.find(taskid))
post_one_log_pattern(domain_zip, logfile, "Models")
ensure
FileUtils.rm_rf(domain_zip)
end

def backup_automate_models(domain_zip)
Dir.chdir(Rails.root) do
MiqAeDatastore.backup('zip_file' => domain_zip, 'overwrite' => false)
end
end

def post_automate_dialogs(taskid, log_depot)
dialog_directory = Rails.root.join("log", "service_dialogs")
FileUtils.mkdir_p(dialog_directory)
backup_automate_dialogs(dialog_directory)

logfile = LogFile.historical_logfile
logfile.update(:file_depot => log_depot, :miq_task => MiqTask.find(taskid))
post_one_log_pattern(dialog_directory.join("*"), logfile, "Dialogs")
ensure
FileUtils.rm_rf(dialog_directory)
end

def backup_automate_dialogs(dialog_directory)
Dir.chdir(Rails.root) do
TaskHelpers::Exports::ServiceDialogs.new.export(:keep_spaces => false, :directory => dialog_directory)
end
end

def post_historical_logs(taskid, log_depot)
task = MiqTask.find(taskid)
log_prefix = "Task: [#{task.id}]"
log_type = "Archive"

# Post all compressed logs for a specific date + configs, creating a new row per day
VMDB::Util.compressed_log_patterns.each do |pattern|
log_start, log_end = log_start_and_end_for_pattern(pattern)
date_string = "#{format_log_time(log_start)}_#{format_log_time(log_end)}" unless log_start.nil? && log_end.nil?

cond = {:historical => true, :name => LogFile.logfile_name(self, log_type, date_string), :state => 'available'}
cond[:logging_started_on] = log_start unless log_start.nil?
cond[:logging_ended_on] = log_end unless log_end.nil?
logfile = log_files.find_by(cond)

if logfile && logfile.log_uri.nil?
_log.info("#{log_prefix} #{log_type} logfile already exists with id: [#{logfile.id}] for [#{who_am_i}] with contents from: [#{log_start}] to: [#{log_end}]")
next
else
logfile = LogFile.historical_logfile
end

logfile.update(:file_depot => log_depot, :miq_task => task)
post_one_log_pattern(pattern, logfile, log_type)
end
end

def post_current_logs(taskid, log_depot)
delete_old_requested_logs

logfile = LogFile.current_logfile
logfile.update(:file_depot => log_depot, :miq_task => MiqTask.find(taskid))
logfile.update(:file_depot => log_depot, :miq_task => MiqTask.find(taskid))
post_one_log_pattern("log/*.log", logfile, "Current")
end

Expand Down
9 changes: 8 additions & 1 deletion app/models/zone.rb
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,14 @@ def self.determine_queue_zone(options)
end

def synchronize_logs(*args)
active_miq_servers.each { |s| s.synchronize_logs(*args) }
options = args.extract_options!
enabled = Settings.log.collection.include_automate_models_and_dialogs

active_miq_servers.each_with_index do |s, index|
# If enabled, export the automate domains and dialogs on the first active server
include_models_and_dialogs = enabled ? index.zero? : false
s.synchronize_logs(*args, options.merge(:include_automate_models_and_dialogs => include_models_and_dialogs))
end
end

def last_log_sync_on
Expand Down
9 changes: 6 additions & 3 deletions spec/models/log_file_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -99,25 +99,28 @@

context "with post_logs message" do
it "#post_logs will only post current logs if flag enabled" do
message.deliver
message.args.first[:only_current] = true
expect_any_instance_of(MiqServer).to receive(:post_historical_logs).never
expect_any_instance_of(MiqServer).to receive(:post_current_logs).once
expect_any_instance_of(MiqServer).to receive(:post_automate_dialogs).once
expect_any_instance_of(MiqServer).to receive(:post_automate_models).once
message.delivered(*message.deliver)
end

it "#post_logs will post both historical and current logs if flag nil" do
message.deliver
expect_any_instance_of(MiqServer).to receive(:post_historical_logs).once
expect_any_instance_of(MiqServer).to receive(:post_current_logs).once
expect_any_instance_of(MiqServer).to receive(:post_automate_dialogs).once
expect_any_instance_of(MiqServer).to receive(:post_automate_models).once
message.delivered(*message.deliver)
end

it "#post_logs will post both historical and current logs if flag false" do
message.deliver
message.args.first[:only_current] = false
expect_any_instance_of(MiqServer).to receive(:post_historical_logs).once
expect_any_instance_of(MiqServer).to receive(:post_current_logs).once
expect_any_instance_of(MiqServer).to receive(:post_automate_dialogs).once
expect_any_instance_of(MiqServer).to receive(:post_automate_models).once
message.delivered(*message.deliver)
end
end
Expand Down
137 changes: 60 additions & 77 deletions spec/models/miq_server/log_management_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,66 @@ def stub_vmdb_util_methods_for_collection_log
expect(@miq_server.current_log_patterns).to match_array %w(/var/log/syslog* /var/lib/pgsql/data/*.conf)
end

context "post current/historical/models/dialogs" do
let(:task) { FactoryGirl.create(:miq_task) }
let(:compressed_log_patterns) { [Rails.root.join("log", "evm*.log.gz").to_s] }
let(:current_log_patterns) { [Rails.root.join("log", "evm.log").to_s] }
let(:compressed_evm_log) { Rails.root.join("evm.log-20180319.gz").to_s }
let(:log_start) { Time.zone.parse("2018-05-11 11:33:12 UTC") }
let(:log_end) { Time.zone.parse("2018-05-11 15:34:16 UTC") }
let(:daily_log) { Rails.root.join("data", "user", "system", "evm_server_daily.zip").to_s }
let(:log_depot) { FactoryGirl.create(:file_depot) }
let!(:region) { MiqRegion.seed }
let(:zone) { @miq_server.zone }
before do
require 'vmdb/util'
allow(VMDB::Util).to receive(:compressed_log_patterns).and_return(compressed_log_patterns)
allow(VMDB::Util).to receive(:get_evm_log_for_date).and_return(compressed_evm_log)
allow(VMDB::Util).to receive(:get_log_start_end_times).and_return([log_start, log_end])
allow(VMDB::Util).to receive(:zip_logs).and_return(daily_log)
allow(@miq_server).to receive(:current_log_patterns).and_return(current_log_patterns)
allow(@miq_server).to receive(:backup_automate_dialogs)
allow(@miq_server).to receive(:backup_automate_models)
%w(historical_logfile current_logfile).each do |kind|
logfile = FactoryGirl.create(:log_file, :historical => kind == "historical_logfile")
allow(logfile).to receive(:upload)
allow(LogFile).to receive(kind).and_return(logfile)
end
end

%w(
Archive post_historical_logs
Current post_current_logs
Models post_automate_models
Dialogs post_automate_dialogs
).each_slice(2) do |name, method|
it "##{method}" do
@miq_server.send(method, task.id, log_depot)
logfile = @miq_server.reload.log_files.first
expected_name = [name, "region", region.region, zone.name, zone.id, @miq_server.name, @miq_server.id, "20180511_113312 20180511_153416"].join(" ")
expect(logfile).to have_attributes(
:file_depot => log_depot,
:local_file => daily_log,
:logging_started_on => log_start,
:logging_ended_on => log_end,
:name => expected_name,
:description => "Logs for Zone #{@miq_server.zone.name} Server #{@miq_server.name} 20180511_113312 20180511_153416",
:miq_task_id => task.id
)

expected_filename = "#{name}_region_#{region.region}_#{zone.name}_#{zone.id}_#{@miq_server.name}_#{@miq_server.id}_20180511_113312_20180511_153416.zip"
expected_filename.gsub!(/\s+/, "_")
expect(logfile.destination_file_name).to eq(expected_filename)

expect(task.reload).to have_attributes(
:message => "#{name} log files from #{@miq_server.name} #{@miq_server.zone.name} MiqServer #{@miq_server.id} are posted",
:state => "Active",
:status => "Ok",
)
end
end
end

context "#synchronize_logs" do
it "passes along server override" do
@miq_server.synchronize_logs("system", @miq_server2)
Expand Down Expand Up @@ -149,83 +209,6 @@ def stub_vmdb_util_methods_for_collection_log
describe "#post_historical_logs" do
context "Server" do
include_examples "post_[type_of_log]_logs", "MiqServer", :historical

context "new tests" do
let(:task) { FactoryGirl.create(:miq_task) }
let(:compressed_log_patterns) { [Rails.root.join("log/evm*.log.gz").to_s] }
let(:current_log_patterns) { [Rails.root.join("log/evm.log").to_s] }
let(:compressed_evm_log) { Rails.root.join("evm.log-20180319.gz").to_s}
let(:log_start) { Time.parse("2018-05-11 11:33:12 UTC") }
let(:log_end) { Time.parse("2018-05-11 15:34:16 UTC") }
let(:daily_log) { Rails.root.join("data", "user", "system", "evm_server_daily.zip").to_s}
let(:log_depot) { FactoryGirl.create(:file_depot) }
let!(:region) { MiqRegion.seed }
let(:zone) { @miq_server.zone }
before do
require 'vmdb/util'
allow(VMDB::Util).to receive(:compressed_log_patterns).and_return(compressed_log_patterns)
allow(VMDB::Util).to receive(:get_evm_log_for_date).and_return(compressed_evm_log)
allow(VMDB::Util).to receive(:get_log_start_end_times).and_return([log_start, log_end])
allow(VMDB::Util).to receive(:zip_logs).and_return(daily_log)
%w{historical_logfile current_logfile}.each do |kind|
logfile = FactoryGirl.create(:log_file, :historical => kind == "historical_logfile")
allow(logfile).to receive(:upload)
allow(LogFile).to receive(kind).and_return(logfile)
end
end

it "no prior historical logfile" do
@miq_server.post_historical_logs(task.id, log_depot)
logfile = @miq_server.reload.log_files.first
expected_name = ["Archive", "region", region.region, zone.name, zone.id, @miq_server.name, @miq_server.id, "20180511_113312 20180511_153416"].join(" ")
expect(logfile).to have_attributes(
:file_depot => log_depot,
:local_file => daily_log,
:logging_started_on => log_start,
:logging_ended_on => log_end,
:name => expected_name,
:description => "Logs for Zone #{@miq_server.zone.name} Server #{@miq_server.name} 20180511_113312 20180511_153416",
:miq_task_id => task.id
)

expected_filename = "Archive_region_#{region.region}_#{zone.name}_#{zone.id}_#{@miq_server.name}_#{@miq_server.id}_20180511_113312_20180511_153416.zip"
expected_filename.gsub!(/\s+/, "_")
expect(logfile.destination_file_name).to eq(expected_filename)

expect(task.reload).to have_attributes(
:message => "Archive log files from #{@miq_server.name} #{@miq_server.zone.name} MiqServer #{@miq_server.id} are posted",
:state => "Active",
:status => "Ok",
)

end

it "no prior current logfile" do
allow(@miq_server).to receive(:current_log_patterns).and_return(current_log_patterns)
@miq_server.post_current_logs(task.id, log_depot)
logfile = @miq_server.reload.log_files.first
expected_name = ["Current", "region", region.region, zone.name, zone.id, @miq_server.name, @miq_server.id, "20180511_113312 20180511_153416"].join(" ")
expect(logfile).to have_attributes(
:file_depot => log_depot,
:local_file => daily_log,
:logging_started_on => log_start,
:logging_ended_on => log_end,
:name => expected_name,
:description => "Logs for Zone #{@miq_server.zone.name} Server #{@miq_server.name} 20180511_113312 20180511_153416",
:miq_task_id => task.id
)

expected_filename = "Current_region_#{region.region}_#{zone.name}_#{zone.id}_#{@miq_server.name}_#{@miq_server.id}_20180511_113312_20180511_153416.zip"
expected_filename.gsub!(/\s+/, "_")
expect(logfile.destination_file_name).to eq(expected_filename)

expect(task.reload).to have_attributes(
:message => "Current log files from #{@miq_server.name} #{@miq_server.zone.name} MiqServer #{@miq_server.id} are posted",
:state => "Active",
:status => "Ok",
)
end
end
end

context "Zone" do
Expand Down
Loading

0 comments on commit d868f6f

Please sign in to comment.