Skip to content

Commit

Permalink
Fix merging race conditions
Browse files Browse the repository at this point in the history
Ensure we cache and synchronize appropriately so we don't merge more than
once or do unsafe reads of the resultset. This has the added benefit of
reducing the runtime of at_exit hooks by over 66%, since we now do less
than 1/3 the work.

See https://gist.github.com/jenseng/62465f674f8c02de09ef776f23d4dca4
for simple repro script.

Basically there are 3 main problems when using merging:

1. `SimpleCov::ResultMerger.stored_data` doesn't synchronize its read,
   so it can see an empty file if another process is writing it.
2. `SimpleCov::ResultMerger.resultset` calls `.stored_data` twice and
   never caches the result.
3. `SimpleCov.result` doesn't cache the `@result`, so the default `at_exit`
   behavior causes it to store and merge 3 times. Due to 1. and 2.,
   this is extra bad

This can cause the formatter to miss out on coverage data and/or get the
wrong values for covered percentages.

Furthermore, if you use OJ, `JSON.parse("") -> nil`, which means
`.resultset` can be nil, so this race condition causes exceptions as
seen in #406. In addition to fixing the race condition, also add ` || {}`
to make  `.stored_data` a bit more robust and protect against an empty
.resultset.json.
  • Loading branch information
jenseng committed Jul 12, 2017
1 parent 607986f commit 0e1d6e8
Show file tree
Hide file tree
Showing 4 changed files with 176 additions and 28 deletions.
21 changes: 13 additions & 8 deletions lib/simplecov.rb
Original file line number Diff line number Diff line change
Expand Up @@ -75,23 +75,21 @@ def add_not_loaded_files(result)
# from cache using SimpleCov::ResultMerger if use_merging is activated (default)
#
def result
# Ensure the variable is defined to avoid ruby warnings
@result = nil unless defined?(@result)
return @result if result?

# Collect our coverage result
if running && !result?
if running
@result = SimpleCov::Result.new add_not_loaded_files(Coverage.result)
end

# If we're using merging of results, store the current result
# first, then merge the results and return those
# first (if there is one), then merge the results and return those
if use_merging
SimpleCov::ResultMerger.store_result(@result) if result?

SimpleCov::ResultMerger.merged_result
else
@result
@result = SimpleCov::ResultMerger.merged_result
end

@result
ensure
self.running = false
end
Expand Down Expand Up @@ -158,6 +156,13 @@ def usable?
false
end
end

#
# Clear out the previously cached .result. Primarily useful in testing
#
def clear_result
@result = nil
end
end
end

Expand Down
52 changes: 39 additions & 13 deletions lib/simplecov/result_merger.rb
Original file line number Diff line number Diff line change
Expand Up @@ -17,25 +17,31 @@ def resultset_writelock
File.join(SimpleCov.coverage_path, ".resultset.json.lock")
end

# Loads the cached resultset from JSON and returns it as a Hash
# Loads the cached resultset from JSON and returns it as a Hash,
# caching it for subsequent accesses.
def resultset
if stored_data
begin
JSON.parse(stored_data)
rescue
@resultset ||= begin
data = stored_data
if data
begin
JSON.parse(data) || {}
rescue
{}
end
else
{}
end
else
{}
end
end

# Returns the contents of the resultset cache as a string or if the file is missing or empty nil
def stored_data
return unless File.exist?(resultset_path)
data = File.read(resultset_path)
return if data.nil? || data.length < 2
data
synchronize_resultset do
return unless File.exist?(resultset_path)
data = File.read(resultset_path)
return if data.nil? || data.length < 2
data
end
end

# Gets the resultset hash and re-creates all included instances
Expand Down Expand Up @@ -76,8 +82,9 @@ def merged_result

# Saves the given SimpleCov::Result in the resultset cache
def store_result(result)
File.open(resultset_writelock, "w+") do |f|
f.flock(File::LOCK_EX)
synchronize_resultset do
# Ensure we have the latest, in case it was already cached
@resultset = nil
new_set = resultset
command_name, data = result.to_hash.first
new_set[command_name] = data
Expand All @@ -87,6 +94,25 @@ def store_result(result)
end
true
end

private

# Ensure only one process is reading or writing the resultset at any
# given time
def synchronize_resultset
# make it reentrant
return yield if defined?(@resultset_locked) && @resultset_locked

begin
@resultset_locked = true
File.open(resultset_writelock, "w+") do |f|
f.flock(File::LOCK_EX)
yield
end
ensure
@resultset_locked = false
end
end
end
end
end
22 changes: 15 additions & 7 deletions spec/result_merger_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
describe SimpleCov::ResultMerger do
describe "with two faked coverage resultsets" do
before do
SimpleCov.use_merging true
@resultset1 = {
source_fixture("sample.rb") => [nil, 1, 1, 1, nil, nil, 1, 1, nil, nil],
source_fixture("app/models/user.rb") => [nil, 1, 1, 1, nil, nil, 1, 0, nil, nil],
Expand Down Expand Up @@ -73,14 +72,23 @@
expect(SimpleCov::ResultMerger.results.length).to eq(1)
end
end
end
end

context "with merging disabled" do
before { SimpleCov.use_merging false }
describe ".store_result" do
it "refreshes the resultset" do
set = SimpleCov::ResultMerger.resultset
SimpleCov::ResultMerger.store_result({})
new_set = SimpleCov::ResultMerger.resultset
expect(new_set).not_to be(set)
end
end

it "returns nil for SimpleCov.result" do
expect(SimpleCov.result).to be_nil
end
end
describe ".resultset" do
it "caches by default" do
set = SimpleCov::ResultMerger.resultset
new_set = SimpleCov::ResultMerger.resultset
expect(new_set).to be(set)
end
end
end
Expand Down
109 changes: 109 additions & 0 deletions spec/simplecov_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
require "helper"

if SimpleCov.usable?
describe SimpleCov do
describe ".result" do
before do
SimpleCov.clear_result
allow(Coverage).to receive(:result).once.and_return({})
end

context "with merging disabled" do
before do
allow(SimpleCov).to receive(:use_merging).once.and_return(false)
end

context "when not running" do
before do
allow(SimpleCov).to receive(:running).and_return(false)
end

it "returns nil" do
expect(SimpleCov.result).to be_nil
end
end

context "when running" do
before do
allow(SimpleCov).to receive(:running).and_return(true, false)
end

it "uses the result from Coverage" do
expect(Coverage).to receive(:result).once.and_return(__FILE__ => [0, 1])
expect(SimpleCov.result.filenames).to eq [__FILE__]
end

it "adds not-loaded-files" do
expect(SimpleCov).to receive(:add_not_loaded_files).once.and_return({})
SimpleCov.result
end

it "doesn't store the current coverage" do
expect(SimpleCov::ResultMerger).not_to receive(:store_result)
SimpleCov.result
end

it "doesn't merge the result" do
expect(SimpleCov::ResultMerger).not_to receive(:merged_result)
SimpleCov.result
end

it "caches its result" do
result = SimpleCov.result
expect(SimpleCov.result).to be(result)
end
end
end

context "with merging enabled" do
let(:the_merged_result) { double }

before do
allow(SimpleCov).to receive(:use_merging).once.and_return(true)
allow(SimpleCov::ResultMerger).to receive(:store_result).once
allow(SimpleCov::ResultMerger).to receive(:merged_result).once.and_return(the_merged_result)
end

context "when not running" do
before do
allow(SimpleCov).to receive(:running).and_return(false)
end

it "merges the result" do
expect(SimpleCov.result).to be(the_merged_result)
end
end

context "when running" do
before do
allow(SimpleCov).to receive(:running).and_return(true, false)
end

it "uses the result from Coverage" do
expect(Coverage).to receive(:result).once.and_return({})
SimpleCov.result
end

it "adds not-loaded-files" do
expect(SimpleCov).to receive(:add_not_loaded_files).once.and_return({})
SimpleCov.result
end

it "stores the current coverage" do
expect(SimpleCov::ResultMerger).to receive(:store_result).once
SimpleCov.result
end

it "merges the result" do
expect(SimpleCov.result).to be(the_merged_result)
end

it "caches its result" do
result = SimpleCov.result
expect(SimpleCov.result).to be(result)
end
end
end
end
end
end

0 comments on commit 0e1d6e8

Please sign in to comment.