2019-06-20 21:33:41 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2019-06-20 20:59:01 -04:00
|
|
|
module TurboTests
|
|
|
|
class Runner
|
2019-06-27 10:48:38 -04:00
|
|
|
def self.run(opts = {})
|
|
|
|
files = opts[:files]
|
|
|
|
formatters = opts[:formatters]
|
|
|
|
start_time = opts.fetch(:start_time) { Time.now }
|
|
|
|
verbose = opts.fetch(:verbose, false)
|
2019-10-09 10:40:06 -04:00
|
|
|
fail_fast = opts.fetch(:fail_fast, nil)
|
2019-06-27 10:48:38 -04:00
|
|
|
|
|
|
|
if verbose
|
|
|
|
STDERR.puts "VERBOSE"
|
|
|
|
end
|
|
|
|
|
|
|
|
reporter = Reporter.from_config(formatters, start_time)
|
2019-06-20 20:59:01 -04:00
|
|
|
|
2019-06-27 10:48:38 -04:00
|
|
|
new(
|
|
|
|
reporter: reporter,
|
|
|
|
files: files,
|
2019-10-09 10:40:06 -04:00
|
|
|
verbose: verbose,
|
|
|
|
fail_fast: fail_fast
|
2019-06-27 10:48:38 -04:00
|
|
|
).run
|
2019-06-20 20:59:01 -04:00
|
|
|
end
|
|
|
|
|
2019-06-27 10:48:38 -04:00
|
|
|
def initialize(opts)
|
|
|
|
@reporter = opts[:reporter]
|
|
|
|
@files = opts[:files]
|
|
|
|
@verbose = opts[:verbose]
|
2019-10-09 10:40:06 -04:00
|
|
|
@fail_fast = opts[:fail_fast]
|
|
|
|
@failure_count = 0
|
2019-06-27 10:48:38 -04:00
|
|
|
|
2019-06-20 20:59:01 -04:00
|
|
|
@messages = Queue.new
|
|
|
|
@threads = []
|
|
|
|
end
|
|
|
|
|
|
|
|
def run
|
2019-06-27 11:41:09 -04:00
|
|
|
check_for_migrations
|
|
|
|
|
2019-06-20 20:59:01 -04:00
|
|
|
@num_processes = ParallelTests.determine_number_of_processes(nil)
|
2019-08-30 11:25:17 -04:00
|
|
|
use_runtime_info = @files == ['spec']
|
|
|
|
|
|
|
|
group_opts = {}
|
|
|
|
|
|
|
|
if use_runtime_info
|
|
|
|
group_opts[:runtime_log] = "tmp/turbo_rspec_runtime.log"
|
|
|
|
else
|
|
|
|
group_opts[:group_by] = :filesize
|
|
|
|
end
|
2019-06-20 20:59:01 -04:00
|
|
|
|
|
|
|
tests_in_groups =
|
|
|
|
ParallelTests::RSpec::Runner.tests_in_groups(
|
|
|
|
@files,
|
|
|
|
@num_processes,
|
2019-08-30 11:25:17 -04:00
|
|
|
**group_opts,
|
2019-06-20 20:59:01 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
setup_tmp_dir
|
|
|
|
|
2019-08-30 11:25:17 -04:00
|
|
|
subprocess_opts = {
|
|
|
|
record_runtime: use_runtime_info
|
|
|
|
}
|
|
|
|
|
|
|
|
start_multisite_subprocess(@files, **subprocess_opts)
|
2019-08-29 05:56:43 -04:00
|
|
|
|
|
|
|
tests_in_groups.each_with_index do |tests, process_id|
|
2019-08-30 11:25:17 -04:00
|
|
|
start_regular_subprocess(tests, process_id + 1, **subprocess_opts)
|
2019-06-20 20:59:01 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
handle_messages
|
|
|
|
|
|
|
|
@reporter.finish
|
|
|
|
|
|
|
|
@threads.each(&:join)
|
2019-07-09 03:51:23 -04:00
|
|
|
|
|
|
|
@reporter.failed_examples.empty?
|
2019-06-20 20:59:01 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
protected
|
|
|
|
|
2019-06-27 11:41:09 -04:00
|
|
|
def check_for_migrations
|
|
|
|
config =
|
|
|
|
ActiveRecord::Base
|
|
|
|
.configurations["test"]
|
|
|
|
.merge("database" => "discourse_test_1")
|
|
|
|
|
2019-10-02 03:05:47 -04:00
|
|
|
ActiveRecord::Migrator.migrations_paths = ['db/migrate', 'db/post_migrate']
|
|
|
|
|
2019-06-27 11:41:09 -04:00
|
|
|
conn = ActiveRecord::Base.establish_connection(config).connection
|
|
|
|
begin
|
|
|
|
ActiveRecord::Migration.check_pending!(conn)
|
|
|
|
rescue ActiveRecord::PendingMigrationError
|
|
|
|
puts "There are pending migrations, run rake parallel:migrate"
|
|
|
|
exit 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-20 20:59:01 -04:00
|
|
|
def setup_tmp_dir
|
|
|
|
begin
|
|
|
|
FileUtils.rm_r('tmp/test-pipes')
|
|
|
|
rescue Errno::ENOENT
|
|
|
|
end
|
|
|
|
|
|
|
|
FileUtils.mkdir_p('tmp/test-pipes/')
|
|
|
|
end
|
|
|
|
|
2019-08-30 11:25:17 -04:00
|
|
|
def start_multisite_subprocess(tests, **opts)
|
2019-08-29 05:56:43 -04:00
|
|
|
start_subprocess(
|
|
|
|
{},
|
|
|
|
["--tag", "type:multisite"],
|
|
|
|
tests,
|
2019-08-30 11:25:17 -04:00
|
|
|
"multisite",
|
|
|
|
**opts
|
2019-08-29 05:56:43 -04:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-08-30 11:25:17 -04:00
|
|
|
def start_regular_subprocess(tests, process_id, **opts)
|
2019-08-29 05:56:43 -04:00
|
|
|
start_subprocess(
|
|
|
|
{ 'TEST_ENV_NUMBER' => process_id.to_s },
|
|
|
|
["--tag", "~type:multisite"],
|
|
|
|
tests,
|
2019-08-30 11:25:17 -04:00
|
|
|
process_id,
|
|
|
|
**opts
|
2019-08-29 05:56:43 -04:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-08-30 11:25:17 -04:00
|
|
|
def start_subprocess(env, extra_args, tests, process_id, record_runtime:)
|
2019-06-20 20:59:01 -04:00
|
|
|
if tests.empty?
|
2019-06-20 21:33:41 -04:00
|
|
|
@messages << {
|
|
|
|
type: 'exit',
|
2019-08-29 05:56:43 -04:00
|
|
|
process_id: process_id
|
2019-06-20 21:33:41 -04:00
|
|
|
}
|
2019-06-20 20:59:01 -04:00
|
|
|
else
|
2019-08-29 05:56:43 -04:00
|
|
|
tmp_filename = "tmp/test-pipes/subprocess-#{process_id}"
|
|
|
|
|
2019-06-20 20:59:01 -04:00
|
|
|
begin
|
2019-08-29 05:56:43 -04:00
|
|
|
File.mkfifo(tmp_filename)
|
2019-06-20 20:59:01 -04:00
|
|
|
rescue Errno::EEXIST
|
|
|
|
end
|
|
|
|
|
2019-08-29 05:56:43 -04:00
|
|
|
env['RSPEC_SILENCE_FILTER_ANNOUNCEMENTS'] = '1'
|
|
|
|
|
2019-08-30 11:25:17 -04:00
|
|
|
record_runtime_options =
|
|
|
|
if record_runtime
|
|
|
|
[
|
|
|
|
"--format", "ParallelTests::RSpec::RuntimeLogger",
|
|
|
|
"--out", "tmp/turbo_rspec_runtime.log",
|
|
|
|
]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2019-06-27 10:48:38 -04:00
|
|
|
command = [
|
|
|
|
"bundle", "exec", "rspec",
|
2019-08-29 05:56:43 -04:00
|
|
|
*extra_args,
|
|
|
|
"--format", "TurboTests::JsonRowsFormatter",
|
|
|
|
"--out", tmp_filename,
|
2019-08-30 11:25:17 -04:00
|
|
|
*record_runtime_options,
|
2019-06-27 10:48:38 -04:00
|
|
|
*tests
|
|
|
|
]
|
|
|
|
|
|
|
|
if @verbose
|
|
|
|
command_str = [
|
|
|
|
env.map { |k, v| "#{k}=#{v}" }.join(' '),
|
|
|
|
command.join(' ')
|
2019-08-29 05:56:43 -04:00
|
|
|
].select { |x| x.size > 0 }.join(' ')
|
2019-06-27 10:48:38 -04:00
|
|
|
|
2019-08-29 05:56:43 -04:00
|
|
|
STDERR.puts "Process #{process_id}: #{command_str}"
|
2019-06-27 10:48:38 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
_stdin, stdout, stderr, _wait_thr = Open3.popen3(env, *command)
|
2019-06-20 20:59:01 -04:00
|
|
|
|
|
|
|
@threads <<
|
|
|
|
Thread.new do
|
2019-08-29 05:56:43 -04:00
|
|
|
File.open(tmp_filename) do |fd|
|
2019-06-20 20:59:01 -04:00
|
|
|
fd.each_line do |line|
|
|
|
|
message = JSON.parse(line)
|
|
|
|
message = message.symbolize_keys
|
2019-08-29 05:56:43 -04:00
|
|
|
message[:process_id] = process_id
|
2019-06-20 20:59:01 -04:00
|
|
|
@messages << message
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-29 05:56:43 -04:00
|
|
|
@messages << { type: 'exit', process_id: process_id }
|
2019-06-20 20:59:01 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
@threads << start_copy_thread(stdout, STDOUT)
|
|
|
|
@threads << start_copy_thread(stderr, STDERR)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def start_copy_thread(src, dst)
|
|
|
|
Thread.new do
|
|
|
|
while true
|
|
|
|
begin
|
|
|
|
msg = src.readpartial(4096)
|
|
|
|
rescue EOFError
|
|
|
|
break
|
|
|
|
else
|
|
|
|
dst.write(msg)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_messages
|
|
|
|
exited = 0
|
|
|
|
|
|
|
|
begin
|
|
|
|
while true
|
|
|
|
message = @messages.pop
|
|
|
|
case message[:type]
|
|
|
|
when 'example_passed'
|
|
|
|
example = FakeExample.from_obj(message[:example])
|
|
|
|
@reporter.example_passed(example)
|
|
|
|
when 'example_pending'
|
|
|
|
example = FakeExample.from_obj(message[:example])
|
|
|
|
@reporter.example_pending(example)
|
|
|
|
when 'example_failed'
|
|
|
|
example = FakeExample.from_obj(message[:example])
|
|
|
|
@reporter.example_failed(example)
|
2019-10-09 10:40:06 -04:00
|
|
|
@failure_count += 1
|
|
|
|
if fail_fast_met
|
|
|
|
@threads.each(&:kill)
|
|
|
|
break
|
|
|
|
end
|
2019-06-20 20:59:01 -04:00
|
|
|
when 'seed'
|
|
|
|
when 'close'
|
|
|
|
when 'exit'
|
|
|
|
exited += 1
|
2019-08-29 05:56:43 -04:00
|
|
|
if exited == @num_processes + 1
|
2019-06-20 20:59:01 -04:00
|
|
|
break
|
|
|
|
end
|
|
|
|
else
|
|
|
|
STDERR.puts("Unhandled message in main process: #{message}")
|
|
|
|
end
|
|
|
|
|
|
|
|
STDOUT.flush
|
|
|
|
end
|
|
|
|
rescue Interrupt
|
|
|
|
end
|
|
|
|
end
|
2019-10-09 10:40:06 -04:00
|
|
|
|
|
|
|
def fail_fast_met
|
|
|
|
!@fail_fast.nil? && @fail_fast >= @failure_count
|
|
|
|
end
|
2019-06-20 20:59:01 -04:00
|
|
|
end
|
|
|
|
end
|