Commit f6d5d730 by clytwynec

Merge pull request #4117 from edx/clytwynec/rake2paver-bokchoy

deprecating bok-choy tasks from rake to paver
parents d18d078e 49a2369e
......@@ -243,29 +243,29 @@ the supported development enviornment for the edX Platform.
To run all the bok choy acceptance tests:
rake test:bok_choy
paver test_bokchoy
Once the database has been set up and the static files collected, you can use the 'fast'
option to skip those tasks. This option can also be used with any of the test specs below:
rake test:bok_choy:fast
paver test_bokchoy --fasttest
To run single test, specify the name of the test file. For example:
rake test:bok_choy[test_lms.py]
paver test_bokchoy -t test_lms.py
To run single test faster by not repeating setup tasks:
rake test:bok_choy:fast[test_lms.py]
paver test_bokchoy -t test_lms.py --fasttest
To test only a certain feature, specify the file and the testcase class:
rake test:bok_choy:fast[test_lms.py:RegistrationTest]
paver test_bokchoy -t test_lms.py:RegistrationTest
To execute only a certain test case, specify the file name, class, and
test case method:
rake test:bok_choy:fast[test_lms.py:RegistrationTest.test_register]
paver test_bokchoy -t test_lms.py:RegistrationTest.test_register
During acceptance test execution, log files and also screenshots of failed tests
are captured in test_root/log.
......
"""
paver commands
"""
from . import assets, servers, docs, prereqs, quality, tests, js_test, i18n
from . import assets, servers, docs, prereqs, quality, tests, js_test, i18n, bok_choy
"""
Run acceptance tests that use the bok-choy framework
http://bok-choy.readthedocs.org/en/latest/
"""
from paver.easy import task, needs, cmdopts, sh
from pavelib.utils.test.suites.bokchoy_suite import BokChoyTestSuite
from pavelib.utils.envs import Env
from optparse import make_option
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text # pylint: disable-msg=invalid-name
__test__ = False # do not collect
@task
@needs('pavelib.prereqs.install_prereqs')
@cmdopts([
('test_spec=', 't', 'Specific test to run'),
('fasttest', 'a', 'Skip some setup'),
make_option("--verbose", action="store_const", const=2, dest="verbosity"),
make_option("-q", "--quiet", action="store_const", const=0, dest="verbosity"),
make_option("-v", "--verbosity", action="count", dest="verbosity"),
])
def test_bokchoy(options):
"""
Run acceptance tests that use the bok-choy framework.
Skips some setup if `fasttest` is True.
`test_spec` is a nose-style test specifier relative to the test directory
Examples:
- path/to/test.py
- path/to/test.py:TestFoo
- path/to/test.py:TestFoo.test_bar
It can also be left blank to run all tests in the suite.
"""
opts = {
'test_spec': getattr(options, 'test_spec', None),
'fasttest': getattr(options, 'fasttest', False),
'verbosity': getattr(options, 'verbosity', 2)
}
test_suite = BokChoyTestSuite('bok-choy', **opts)
test_suite.run()
@task
def bokchoy_coverage():
"""
Generate coverage reports for bok-choy tests
"""
Env.BOK_CHOY_REPORT_DIR.makedirs_p()
coveragerc = Env.BOK_CHOY_COVERAGERC
msg = colorize('green', "Combining coverage reports")
print(msg)
sh("coverage combine --rcfile={}".format(coveragerc))
msg = colorize('green', "Generating coverage reports")
print(msg)
sh("coverage html --rcfile={}".format(coveragerc))
sh("coverage xml --rcfile={}".format(coveragerc))
sh("coverage report --rcfile={}".format(coveragerc))
......@@ -7,7 +7,7 @@ import sys
import json
from lazy import lazy
from path import path
import memcache
class Env(object):
"""
......@@ -20,6 +20,66 @@ class Env(object):
# Reports Directory
REPORT_DIR = REPO_ROOT / 'reports'
# Bok_choy dirs
BOK_CHOY_DIR = REPO_ROOT / "common" / "test" / "acceptance"
BOK_CHOY_LOG_DIR = REPO_ROOT / "test_root" / "log"
BOK_CHOY_REPORT_DIR = REPORT_DIR / "bok_choy"
BOK_CHOY_COVERAGERC = BOK_CHOY_DIR / ".coveragerc"
# For the time being, stubs are used by both the bok-choy and lettuce acceptance tests
# For this reason, the stubs package is currently located in the Django app called "terrain"
# where other lettuce configuration is stored.
BOK_CHOY_STUB_DIR = REPO_ROOT / "common" / "djangoapps" / "terrain"
# Directory that videos are served from
VIDEO_SOURCE_DIR = REPO_ROOT / "test_root" / "data" / "video"
BOK_CHOY_SERVERS = {
'lms': {
'port': 8003,
'log': BOK_CHOY_LOG_DIR / "bok_choy_lms.log"
},
'cms': {
'port': 8031,
'log': BOK_CHOY_LOG_DIR / "bok_choy_studio.log"
}
}
BOK_CHOY_STUBS = {
'xqueue': {
'port': 8040,
'log': BOK_CHOY_LOG_DIR / "bok_choy_xqueue.log",
'config': 'register_submission_url=http://0.0.0.0:8041/test/register_submission',
},
'ora': {
'port': 8041,
'log': BOK_CHOY_LOG_DIR / "bok_choy_ora.log",
'config': '',
},
'comments': {
'port': 4567,
'log': BOK_CHOY_LOG_DIR / "bok_choy_comments.log",
},
'video': {
'port': 8777,
'log': BOK_CHOY_LOG_DIR / "bok_choy_video_sources.log",
'config': "root_dir={}".format(VIDEO_SOURCE_DIR),
},
'youtube': {
'port': 9080,
'log': BOK_CHOY_LOG_DIR / "bok_choy_youtube.log",
}
}
# Mongo databases that will be dropped before/after the tests run
BOK_CHOY_MONGO_DATABASE = "test"
BOK_CHOY_CACHE = memcache.Client(['0.0.0.0:11211'], debug=0)
# Test Ids Directory
TEST_DIR = REPO_ROOT / ".testids"
......
......@@ -7,6 +7,7 @@ import os
import subprocess
import signal
import psutil
import atexit
def kill_process(proc):
......@@ -45,6 +46,9 @@ def run_multi_processes(cmd_list, out_log=None, err_log=None):
pids.extend([subprocess.Popen(cmd, **kwargs)])
def _signal_handler(*args):
"""
What to do when process is ended
"""
print("\nEnding...")
signal.signal(signal.SIGINT, _signal_handler)
......@@ -68,3 +72,36 @@ def run_process(cmd, out_log=None, err_log=None):
Terminates the process on CTRL-C or if an error occurs.
"""
return run_multi_processes([cmd], out_log=out_log, err_log=err_log)
def run_background_process(cmd, out_log=None, err_log=None, cwd=None):
"""
Runs a command as a background process. Sends SIGINT at exit.
"""
kwargs = {'shell': True, 'cwd': cwd}
if out_log:
out_log_file = open(out_log, 'w')
kwargs['stdout'] = out_log_file
if err_log:
err_log_file = open(err_log, 'w')
kwargs['stderr'] = err_log_file
proc = subprocess.Popen(cmd, **kwargs)
def exit_handler():
"""
Send SIGINT to the process's children. This is important
for running commands under coverage, as coverage will not
produce the correct artifacts if the child process isn't
killed properly.
"""
p1_group = psutil.Process(proc.pid)
child_pids = p1_group.get_children(recursive=True)
for child_pid in child_pids:
os.kill(child_pid.pid, signal.SIGINT)
atexit.register(exit_handler)
"""
Helper functions for bok_choy test tasks
"""
import sys
import os
import time
import httplib
from paver.easy import sh
from pavelib.utils.envs import Env
from pavelib.utils.process import run_background_process
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text # pylint: disable-msg=invalid-name
__test__ = False # do not collect
def start_servers():
"""
Start the servers we will run tests on, returns PIDs for servers.
"""
def start_server(cmd, logfile, cwd=None):
"""
Starts a single server.
"""
print cmd, logfile
run_background_process(cmd, out_log=logfile, err_log=logfile, cwd=cwd)
for service, info in Env.BOK_CHOY_SERVERS.iteritems():
address = "0.0.0.0:{}".format(info['port'])
cmd = (
"coverage run --rcfile={coveragerc} -m "
"manage {service} --settings bok_choy runserver "
"{address} --traceback --noreload".format(
coveragerc=Env.BOK_CHOY_COVERAGERC,
service=service,
address=address,
)
)
start_server(cmd, info['log'])
for service, info in Env.BOK_CHOY_STUBS.iteritems():
cmd = (
"python -m stubs.start {service} {port} "
"{config}".format(
service=service,
port=info['port'],
config=info.get('config', ''),
)
)
start_server(cmd, info['log'], cwd=Env.BOK_CHOY_STUB_DIR)
def wait_for_server(server, port):
"""
Wait for a server to respond with status 200
"""
print(
"Checking server {server} on port {port}".format(
server=server,
port=port,
)
)
attempts = 0
server_ok = False
while attempts < 20:
try:
connection = httplib.HTTPConnection(server, port, timeout=10)
connection.request('GET', '/')
response = connection.getresponse()
if int(response.status) == 200:
server_ok = True
break
except: # pylint: disable-msg=bare-except
pass
attempts += 1
time.sleep(1)
return server_ok
def wait_for_test_servers():
"""
Wait until we get a successful response from the servers or time out
"""
for service, info in Env.BOK_CHOY_SERVERS.iteritems():
ready = wait_for_server("0.0.0.0", info['port'])
if not ready:
msg = colorize(
"red",
"Could not contact {} test server".format(service)
)
print(msg)
sys.exit(1)
def is_mongo_running():
"""
Returns True if mongo is running, False otherwise.
"""
# The mongo command will connect to the service,
# failing with a non-zero exit code if it cannot connect.
output = os.popen('mongo --eval "print(\'running\')"').read()
return (output and "running" in output)
def is_memcache_running():
"""
Returns True if memcache is running, False otherwise.
"""
# Attempt to set a key in memcache. If we cannot do so because the
# service is not available, then this will return False.
return Env.BOK_CHOY_CACHE.set('test', 'test')
def is_mysql_running():
"""
Returns True if mysql is running, False otherwise.
"""
# We use the MySQL CLI client and capture its stderr
# If the client cannot connect successfully, stderr will be non-empty
output = os.popen('mysql -e "" 2>&1').read()
return output == ""
def clear_mongo():
"""
Clears mongo database.
"""
sh(
"mongo {} --eval 'db.dropDatabase()' > /dev/null".format(
Env.BOK_CHOY_MONGO_DATABASE,
)
)
def check_mongo():
"""
Check that mongo is running
"""
if not is_mongo_running():
msg = colorize('red', "Mongo is not running locally.")
print(msg)
sys.exit(1)
def check_memcache():
"""
Check that memcache is running
"""
if not is_memcache_running():
msg = colorize('red', "Memcache is not running locally.")
print(msg)
sys.exit(1)
def check_mysql():
"""
Check that mysql is running
"""
if not is_mysql_running():
msg = colorize('red', "MySQL is not running locally.")
print(msg)
sys.exit(1)
def check_services():
"""
Check that all required services are running
"""
check_mongo()
check_memcache()
check_mysql()
"""
Class used for defining and running Bok Choy acceptance test suite
"""
from paver.easy import sh
from pavelib.utils.test.suites import TestSuite
from pavelib.utils.envs import Env
from pavelib.utils.test import bokchoy_utils
from pavelib.utils.test import utils as test_utils
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text # pylint: disable-msg=invalid-name
__test__ = False # do not collect
class BokChoyTestSuite(TestSuite):
"""
TestSuite for running Bok Choy tests
"""
def __init__(self, *args, **kwargs):
super(BokChoyTestSuite, self).__init__(*args, **kwargs)
self.test_dir = Env.BOK_CHOY_DIR / "tests"
self.log_dir = Env.BOK_CHOY_LOG_DIR
self.report_dir = Env.BOK_CHOY_REPORT_DIR
self.xunit_report = self.report_dir / "xunit.xml"
self.cache = Env.BOK_CHOY_CACHE
self.fasttest = kwargs.get('fasttest', False)
self.test_spec = kwargs.get('test_spec', None)
self.verbosity = kwargs.get('verbosity', 2)
def __enter__(self):
super(BokChoyTestSuite, self).__enter__()
# Ensure that we have a directory to put logs and reports
self.log_dir.makedirs_p()
self.report_dir.makedirs_p()
test_utils.clean_reports_dir()
msg = colorize('green', "Checking for mongo, memchache, and mysql...")
print(msg)
bokchoy_utils.check_services()
if not self.fasttest:
# Process assets and set up database for bok-choy tests
# Reset the database
sh("{}/scripts/reset-test-db.sh".format(Env.REPO_ROOT))
# Collect static assets
sh("paver update_assets --settings=bok_choy")
# Clear any test data already in Mongo or MySQLand invalidate
# the cache
bokchoy_utils.clear_mongo()
self.cache.flush_all()
sh(
"./manage.py lms --settings bok_choy loaddata --traceback"
" common/test/db_fixtures/*.json"
)
# Ensure the test servers are available
msg = colorize('green', "Starting test servers...")
print(msg)
bokchoy_utils.start_servers()
msg = colorize('green', "Waiting for servers to start...")
print(msg)
bokchoy_utils.wait_for_test_servers()
def __exit__(self, exc_type, exc_value, traceback):
super(BokChoyTestSuite, self).__exit__(exc_type, exc_value, traceback)
msg = colorize('green', "Cleaning up databases...")
print(msg)
# Clean up data we created in the databases
sh("./manage.py lms --settings bok_choy flush --traceback --noinput")
bokchoy_utils.clear_mongo()
@property
def cmd(self):
# Default to running all tests if no specific test is specified
if not self.test_spec:
test_spec = self.test_dir
else:
test_spec = self.test_dir / self.test_spec
# Construct the nosetests command, specifying where to save
# screenshots and XUnit XML reports
cmd = [
"SCREENSHOT_DIR='{}'".format(self.log_dir),
"nosetests",
test_spec,
"--with-xunit",
"--with-flaky",
"--xunit-file={}".format(self.xunit_report),
"--verbosity={}".format(self.verbosity),
]
cmd = (" ").join(cmd)
return cmd
# Run acceptance tests that use the bok-choy framework
# http://bok-choy.readthedocs.org/en/latest/
require 'dalli'
# Mongo databases that will be dropped before/after the tests run
BOK_CHOY_MONGO_DATABASE = "test"
# Control parallel test execution with environment variables
# Process timeout is the maximum amount of time to wait for results from a particular test case
BOK_CHOY_NUM_PARALLEL = ENV.fetch('NUM_PARALLEL', 1).to_i
BOK_CHOY_TEST_TIMEOUT = ENV.fetch("TEST_TIMEOUT", 300).to_f
# Ensure that we have a directory to put logs and reports
BOK_CHOY_DIR = File.join(REPO_ROOT, "common", "test", "acceptance")
BOK_CHOY_TEST_DIR = File.join(BOK_CHOY_DIR, "tests")
BOK_CHOY_LOG_DIR = File.join(REPO_ROOT, "test_root", "log")
directory BOK_CHOY_LOG_DIR
# Reports
BOK_CHOY_REPORT_DIR = report_dir_path("bok_choy")
BOK_CHOY_XUNIT_REPORT = File.join(BOK_CHOY_REPORT_DIR, "xunit.xml")
BOK_CHOY_COVERAGE_RC = File.join(BOK_CHOY_DIR, ".coveragerc")
directory BOK_CHOY_REPORT_DIR
# Directory that videos are served from
VIDEO_SOURCE_DIR = File.join(REPO_ROOT, "test_root", "data", "video")
BOK_CHOY_SERVERS = {
:lms => { :port => 8003, :log => File.join(BOK_CHOY_LOG_DIR, "bok_choy_lms.log") },
:cms => { :port => 8031, :log => File.join(BOK_CHOY_LOG_DIR, "bok_choy_studio.log") }
}
BOK_CHOY_STUBS = {
:xqueue => {
:port => 8040,
:log => File.join(BOK_CHOY_LOG_DIR, "bok_choy_xqueue.log"),
:config => 'register_submission_url=http://0.0.0.0:8041/test/register_submission'
},
:ora => {
:port => 8041,
:log => File.join(BOK_CHOY_LOG_DIR, "bok_choy_ora.log"),
:config => ''
},
:comments => {
:port => 4567,
:log => File.join(BOK_CHOY_LOG_DIR, "bok_choy_comments.log")
},
:video => {
:port => 8777,
:log => File.join(BOK_CHOY_LOG_DIR, "bok_choy_video_sources.log"),
:config => "root_dir=#{VIDEO_SOURCE_DIR}"
},
:youtube => {
:port => 9080,
:log => File.join(BOK_CHOY_LOG_DIR, "bok_choy_youtube.log")
}
}
# For the time being, stubs are used by both the bok-choy and lettuce acceptance tests
# For this reason, the stubs package is currently located in the Django app called "terrain"
# where other lettuce configuration is stored.
BOK_CHOY_STUB_DIR = File.join(REPO_ROOT, "common", "djangoapps", "terrain")
BOK_CHOY_CACHE = Dalli::Client.new('localhost:11211')
# Start the servers we will run tests on
def start_servers()
BOK_CHOY_SERVERS.each do | service, info |
address = "0.0.0.0:#{info[:port]}"
cmd = "coverage run --rcfile=#{BOK_CHOY_COVERAGE_RC} -m manage #{service} --settings bok_choy runserver #{address} --traceback --noreload"
singleton_process(cmd, logfile=info[:log])
end
BOK_CHOY_STUBS.each do | service, info |
Dir.chdir(BOK_CHOY_STUB_DIR) do
singleton_process(
"python -m stubs.start #{service} #{info[:port]} #{info[:config]}",
logfile=info[:log]
)
end
end
end
# Wait until we get a successful response from the servers or time out
def wait_for_test_servers()
BOK_CHOY_SERVERS.merge(BOK_CHOY_STUBS).each do | service, info |
ready = wait_for_server("0.0.0.0", info[:port])
if not ready
fail("Could not contact #{service} test server")
end
end
end
def is_mongo_running()
# The mongo command will connect to the service,
# failing with a non-zero exit code if it cannot connect.
output = `mongo --eval "print('running')"`
return (output and output.include? "running")
end
def is_memcache_running()
# We use a Ruby memcache client to attempt to set a key
# in memcache. If we cannot do so because the service is not
# available, then this will raise an exception.
BOK_CHOY_CACHE.set('test', 'test')
return true
rescue Dalli::DalliError
return false
end
def is_mysql_running()
# We use the MySQL CLI client and capture its stderr
# If the client cannot connect successfully, stderr will be non-empty
output = `mysql -e "" 2>&1`
return output == ""
end
# Run the bok choy tests
# `test_spec` is a nose-style test specifier relative to the test directory
# Examples:
# - path/to/test.py
# - path/to/test.py:TestFoo
# - path/to/test.py:TestFoo.test_bar
# It can also be left blank to run all tests in the suite.
def run_bok_choy(test_spec)
# Default to running all tests if no specific test is specified
if test_spec.nil?
test_spec = BOK_CHOY_TEST_DIR
else
test_spec = File.join(BOK_CHOY_TEST_DIR, test_spec)
end
# Construct the nosetests command, specifying where to save screenshots and XUnit XML reports
cmd = [
"SCREENSHOT_DIR='#{BOK_CHOY_LOG_DIR}'", "nosetests", test_spec,
"--with-xunit", "--with-flaky", "--xunit-file=#{BOK_CHOY_XUNIT_REPORT}", "--verbosity=2"
]
# Configure parallel test execution, if specified
if BOK_CHOY_NUM_PARALLEL > 1
cmd += ["--processes=#{BOK_CHOY_NUM_PARALLEL}", "--process-timeout=#{BOK_CHOY_TEST_TIMEOUT}"]
end
# Run the nosetests command
sh(cmd.join(" "))
end
def clear_mongo()
sh("mongo #{BOK_CHOY_MONGO_DATABASE} --eval 'db.dropDatabase()' > /dev/null")
end
# Clean up data we created in the databases
def cleanup()
sh(django_admin('lms', 'bok_choy', 'flush', '--noinput'))
clear_mongo()
end
namespace :'test:bok_choy' do
# Check that mongo is running
task :check_mongo do
if not is_mongo_running()
fail("Mongo is not running locally.")
end
end
# Check that memcache is running
task :check_memcache do
if not is_memcache_running()
fail("Memcache is not running locally.")
end
end
# Check that mysql is running
task :check_mysql do
if not is_mysql_running()
fail("MySQL is not running locally.")
end
end
# Check that all required services are running
task :check_services => [:check_mongo, :check_memcache, :check_mysql]
desc "Process assets and set up database for bok-choy tests"
task :setup => [:check_mysql, :install_prereqs, BOK_CHOY_LOG_DIR] do
# Reset the database
sh("#{REPO_ROOT}/scripts/reset-test-db.sh")
# Collect static assets
sh("paver update_assets --settings=bok_choy")
end
desc "Run acceptance tests that use the bok-choy framework but skip setup"
task :fast, [:test_spec] => [
:check_services, BOK_CHOY_LOG_DIR, BOK_CHOY_REPORT_DIR, :clean_reports_dir
] do |t, args|
# Clear any test data already in Mongo or MySQL and invalidate the cache
clear_mongo()
BOK_CHOY_CACHE.flush()
sh(django_admin('lms', 'bok_choy', 'loaddata', 'common/test/db_fixtures/*.json'))
# Ensure the test servers are available
puts "Starting test servers...".green
start_servers()
puts "Waiting for servers to start...".green
wait_for_test_servers()
begin
puts "Running test suite...".green
run_bok_choy(args.test_spec)
rescue
puts "Tests failed!".red
exit 1
ensure
puts "Cleaning up databases...".green
cleanup()
end
end
desc "Generate coverage reports for bok-choy tests"
task :coverage => BOK_CHOY_REPORT_DIR do | t, args |
puts "Combining coverage reports".red
sh("coverage combine --rcfile=#{BOK_CHOY_COVERAGE_RC}")
puts "Generating coverage reports".red
sh("coverage html --rcfile=#{BOK_CHOY_COVERAGE_RC}")
sh("coverage xml --rcfile=#{BOK_CHOY_COVERAGE_RC}")
sh("coverage report --rcfile=#{BOK_CHOY_COVERAGE_RC}")
end
end
# Default: set up and run the tests
desc "Run acceptance tests that use the bok-choy framework"
task :'test:bok_choy', [:test_spec] => [:'test:bok_choy:setup'] do |t, args|
Rake::Task["test:bok_choy:fast"].invoke(args.test_spec)
end
# test tasks deprecated to paver
require 'colorize'
def deprecated(deprecated, deprecated_by, *args)
task deprecated, [:test_spec] do |t,args|
args.with_defaults(:test_spec => nil)
new_cmd = "#{deprecated_by}"
if !args.test_spec.nil?
new_cmd = "#{new_cmd} -t #{args.test_spec}"
end
puts("Task #{deprecated} has been deprecated. Using #{new_cmd} instead.".red)
sh(new_cmd)
end
end
deprecated('test:bok_choy', 'paver test_bokchoy')
deprecated('test:bok_choy:coverage', 'paver bokchoy_coverage')
deprecated('test:bok_choy:fast', 'paver test_bokchoy --fasttest')
deprecated('test:bok_choy:setup', 'paver test_bokchoy')
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment