notebook/IPython/testing/iptestcontroller.py

673 lines
24 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
"""IPython Test Process Controller
This module runs one or more subprocesses which will actually run the IPython
test suite.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
2013-09-06 06:37:30 +08:00
import argparse
import json
import multiprocessing.pool
import os
2013-09-07 02:02:41 +08:00
import shutil
import signal
import sys
import subprocess
import time
import re
from .iptest import have, test_group_names as py_test_group_names, test_sections, StreamCapturer
from IPython.utils.path import compress_user
2013-09-07 07:54:06 +08:00
from IPython.utils.py3compat import bytes_to_str
from IPython.utils.sysinfo import get_sys_info
from IPython.utils.tempdir import TemporaryDirectory
2014-06-11 03:05:46 +08:00
from IPython.utils.text import strip_ansi
try:
# Python >= 3.3
from subprocess import TimeoutExpired
def popen_wait(p, timeout):
return p.wait(timeout)
except ImportError:
class TimeoutExpired(Exception):
pass
def popen_wait(p, timeout):
"""backport of Popen.wait from Python 3"""
for i in range(int(10 * timeout)):
if p.poll() is not None:
return
time.sleep(0.1)
if p.poll() is None:
raise TimeoutExpired
NOTEBOOK_SHUTDOWN_TIMEOUT = 10
class TestController(object):
"""Run tests in a subprocess
"""
2013-09-06 06:37:30 +08:00
#: str, IPython test suite to be executed.
section = None
#: list, command line arguments to be executed
cmd = None
#: dict, extra environment variables to set for the subprocess
env = None
#: list, TemporaryDirectory instances to clear up when the process finishes
dirs = None
#: subprocess.Popen instance
process = None
#: str, process stdout+stderr
stdout = None
def __init__(self):
self.cmd = []
2013-09-06 06:37:30 +08:00
self.env = {}
self.dirs = []
def setup(self):
"""Create temporary directories etc.
This is only called when we know the test group will be run. Things
created here may be cleaned up by self.cleanup().
"""
pass
def launch(self, buffer_output=False):
2013-09-06 06:37:30 +08:00
# print('*** ENV:', self.env) # dbg
# print('*** CMD:', self.cmd) # dbg
env = os.environ.copy()
env.update(self.env)
output = subprocess.PIPE if buffer_output else None
stdout = subprocess.STDOUT if buffer_output else None
2013-09-06 06:37:30 +08:00
self.process = subprocess.Popen(self.cmd, stdout=output,
2013-09-06 07:24:36 +08:00
stderr=stdout, env=env)
2013-09-06 07:24:36 +08:00
def wait(self):
self.stdout, _ = self.process.communicate()
return self.process.returncode
def print_extra_info(self):
"""Print extra information about this test run.
If we're running in parallel and showing the concise view, this is only
called if the test group fails. Otherwise, it's called before the test
group is started.
The base implementation does nothing, but it can be overridden by
subclasses.
"""
return
2013-09-06 06:44:31 +08:00
def cleanup_process(self):
"""Cleanup on exit by killing any leftover processes."""
2013-09-06 06:37:30 +08:00
subp = self.process
if subp is None or (subp.poll() is not None):
return # Process doesn't exist, or is already dead.
2013-09-06 06:37:30 +08:00
try:
print('Cleaning up stale PID: %d' % subp.pid)
subp.kill()
except: # (OSError, WindowsError) ?
# This is just a best effort, if we fail or the process was
# really gone, ignore it.
pass
else:
for i in range(10):
if subp.poll() is None:
time.sleep(0.1)
else:
break
2013-09-06 06:37:30 +08:00
if subp.poll() is None:
# The process did not die...
print('... failed. Manual cleanup may be required.')
2013-09-06 06:44:31 +08:00
def cleanup(self):
"Kill process if it's still alive, and clean up temporary directories"
self.cleanup_process()
2013-09-06 06:37:30 +08:00
for td in self.dirs:
td.cleanup()
2013-09-06 06:37:30 +08:00
__del__ = cleanup
2014-04-10 04:19:28 +08:00
class PyTestController(TestController):
"""Run Python tests using IPython.testing.iptest"""
#: str, Python command to execute in subprocess
pycmd = None
def __init__(self, section, options):
"""Create new test runner."""
TestController.__init__(self)
self.section = section
# pycmd is put into cmd[2] in PyTestController.launch()
self.cmd = [sys.executable, '-c', None, section]
self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()"
self.options = options
def setup(self):
ipydir = TemporaryDirectory()
self.dirs.append(ipydir)
self.env['IPYTHONDIR'] = ipydir.name
self.workingdir = workingdir = TemporaryDirectory()
self.dirs.append(workingdir)
self.env['IPTEST_WORKING_DIR'] = workingdir.name
# This means we won't get odd effects from our own matplotlib config
self.env['MPLCONFIGDIR'] = workingdir.name
# From options:
if self.options.xunit:
self.add_xunit()
if self.options.coverage:
self.add_coverage()
self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams
self.cmd.extend(self.options.extra_args)
@property
def will_run(self):
try:
return test_sections[self.section].will_run
except KeyError:
return True
def add_xunit(self):
xunit_file = os.path.abspath(self.section + '.xunit.xml')
self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file])
def add_coverage(self):
2013-09-20 06:07:11 +08:00
try:
sources = test_sections[self.section].includes
except KeyError:
sources = ['IPython']
coverage_rc = ("[run]\n"
"data_file = {data_file}\n"
"source =\n"
" {source}\n"
).format(data_file=os.path.abspath('.coverage.'+self.section),
2013-09-20 06:07:11 +08:00
source="\n ".join(sources))
config_file = os.path.join(self.workingdir.name, '.coveragerc')
with open(config_file, 'w') as f:
f.write(coverage_rc)
self.env['COVERAGE_PROCESS_START'] = config_file
self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd
def launch(self, buffer_output=False):
self.cmd[2] = self.pycmd
super(PyTestController, self).launch(buffer_output=buffer_output)
2014-04-10 04:19:28 +08:00
2014-02-08 08:21:54 +08:00
js_prefix = 'js/'
2014-02-08 07:39:26 +08:00
def get_js_test_dir():
import IPython.html.tests as t
2014-02-08 13:17:05 +08:00
return os.path.join(os.path.dirname(t.__file__), '')
def all_js_groups():
import glob
2014-02-08 07:39:26 +08:00
test_dir = get_js_test_dir()
be explicit about skipping js tests this way those running the test suite can discover that there are a bunch of js test groups. New output of `iptest -j` looks like this ``` Test group: kernel.inprocess -------------------------------------- OK Test group: config ------------------------------------------------ OK Test group: extensions -------------------------------------------- OK Test group: lib --------------------------------------------------- OK Test group: testing ----------------------------------------------- OK Test group: core -------------------------------------------------- OK Test group: terminal ---------------------------------------------- OK Test group: utils ------------------------------------------------- OK Test group: kernel ------------------------------------------------ OK Test group: nbformat ---------------------------------------------- OK Test group: qt ---------------------------------------------------- OK Test group: autoreload -------------------------------------------- OK Test group: html -------------------------------------------------- OK Test group: nbconvert --------------------------------------------- OK Test group: parallel ----------------------------------------- NOT RUN Test group: js/base ------------------------------------------ NOT RUN Test group: js/notebook -------------------------------------- NOT RUN Test group: js/widgets --------------------------------------- NOT RUN Test group: js/tree ------------------------------------------ NOT RUN Test group: js/casperjs -------------------------------------- NOT RUN Test group: js/services -------------------------------------- NOT RUN ```
2014-05-09 03:37:12 +08:00
all_subdirs = glob.glob(test_dir + '[!_]*/')
return [js_prefix+os.path.relpath(x, test_dir) for x in all_subdirs]
class JSController(TestController):
"""Run CasperJS tests """
be explicit about skipping js tests this way those running the test suite can discover that there are a bunch of js test groups. New output of `iptest -j` looks like this ``` Test group: kernel.inprocess -------------------------------------- OK Test group: config ------------------------------------------------ OK Test group: extensions -------------------------------------------- OK Test group: lib --------------------------------------------------- OK Test group: testing ----------------------------------------------- OK Test group: core -------------------------------------------------- OK Test group: terminal ---------------------------------------------- OK Test group: utils ------------------------------------------------- OK Test group: kernel ------------------------------------------------ OK Test group: nbformat ---------------------------------------------- OK Test group: qt ---------------------------------------------------- OK Test group: autoreload -------------------------------------------- OK Test group: html -------------------------------------------------- OK Test group: nbconvert --------------------------------------------- OK Test group: parallel ----------------------------------------- NOT RUN Test group: js/base ------------------------------------------ NOT RUN Test group: js/notebook -------------------------------------- NOT RUN Test group: js/widgets --------------------------------------- NOT RUN Test group: js/tree ------------------------------------------ NOT RUN Test group: js/casperjs -------------------------------------- NOT RUN Test group: js/services -------------------------------------- NOT RUN ```
2014-05-09 03:37:12 +08:00
requirements = ['zmq', 'tornado', 'jinja2', 'casperjs', 'sqlite3']
display_slimer_output = False
def __init__(self, section, enabled=True, engine='phantomjs'):
"""Create new test runner."""
TestController.__init__(self)
self.engine = engine
2014-02-08 08:24:18 +08:00
self.section = section
be explicit about skipping js tests this way those running the test suite can discover that there are a bunch of js test groups. New output of `iptest -j` looks like this ``` Test group: kernel.inprocess -------------------------------------- OK Test group: config ------------------------------------------------ OK Test group: extensions -------------------------------------------- OK Test group: lib --------------------------------------------------- OK Test group: testing ----------------------------------------------- OK Test group: core -------------------------------------------------- OK Test group: terminal ---------------------------------------------- OK Test group: utils ------------------------------------------------- OK Test group: kernel ------------------------------------------------ OK Test group: nbformat ---------------------------------------------- OK Test group: qt ---------------------------------------------------- OK Test group: autoreload -------------------------------------------- OK Test group: html -------------------------------------------------- OK Test group: nbconvert --------------------------------------------- OK Test group: parallel ----------------------------------------- NOT RUN Test group: js/base ------------------------------------------ NOT RUN Test group: js/notebook -------------------------------------- NOT RUN Test group: js/widgets --------------------------------------- NOT RUN Test group: js/tree ------------------------------------------ NOT RUN Test group: js/casperjs -------------------------------------- NOT RUN Test group: js/services -------------------------------------- NOT RUN ```
2014-05-09 03:37:12 +08:00
self.enabled = enabled
2014-04-10 04:19:28 +08:00
self.slimer_failure = re.compile('^FAIL.*', flags=re.MULTILINE)
js_test_dir = get_js_test_dir()
includes = '--includes=' + os.path.join(js_test_dir,'util.js')
test_cases = os.path.join(js_test_dir, self.section[len(js_prefix):])
self.cmd = ['casperjs', 'test', includes, test_cases, '--engine=%s' % self.engine]
def setup(self):
2013-09-27 05:41:02 +08:00
self.ipydir = TemporaryDirectory()
2014-02-06 03:18:09 +08:00
self.nbdir = TemporaryDirectory()
2013-09-27 05:41:02 +08:00
self.dirs.append(self.ipydir)
2014-02-06 03:18:09 +08:00
self.dirs.append(self.nbdir)
os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir1', u'sub ∂ir 1a')))
os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir2', u'sub ∂ir 1b')))
# start the ipython notebook, so we get the port number
self.server_port = 0
self._init_server()
if self.server_port:
self.cmd.append("--port=%i" % self.server_port)
else:
# don't launch tests if the server didn't start
self.cmd = [sys.executable, '-c', 'raise SystemExit(1)']
def launch(self, buffer_output):
# If the engine is SlimerJS, we need to buffer the output because
# SlimerJS does not support exit codes, so CasperJS always returns 0.
if self.engine == 'slimerjs' and not buffer_output:
self.display_slimer_output = True
return super(JSController, self).launch(buffer_output=True)
else:
return super(JSController, self).launch(buffer_output=buffer_output)
def wait(self, *pargs, **kwargs):
"""Wait for the JSController to finish"""
ret = super(JSController, self).wait(*pargs, **kwargs)
2014-06-07 02:12:46 +08:00
# If this is a SlimerJS controller, check the captured stdout for
# errors. Otherwise, just return the return code.
if self.engine == 'slimerjs':
2014-04-09 03:41:49 +08:00
stdout = bytes_to_str(self.stdout)
if self.display_slimer_output:
print(stdout)
if ret != 0:
# This could still happen e.g. if it's stopped by SIGINT
return ret
return bool(self.slimer_failure.search(strip_ansi(stdout)))
else:
return ret
2014-04-10 04:19:28 +08:00
def print_extra_info(self):
print("Running tests with notebook directory %r" % self.nbdir.name)
@property
def will_run(self):
return self.enabled and all(have[a] for a in self.requirements + [self.engine])
def _init_server(self):
"Start the notebook server in a separate process"
self.server_command = command = [sys.executable,
'-m', 'IPython.html',
'--no-browser',
'--ipython-dir', self.ipydir.name,
'--notebook-dir', self.nbdir.name,
]
# ipc doesn't work on Windows, and darwin has crazy-long temp paths,
# which run afoul of ipc's maximum path length.
if sys.platform.startswith('linux'):
command.append('--KernelManager.transport=ipc')
self.stream_capturer = c = StreamCapturer()
c.start()
self.server = subprocess.Popen(command, stdout=c.writefd, stderr=subprocess.STDOUT)
self.server_info_file = os.path.join(self.ipydir.name,
'profile_default', 'security', 'nbserver-%i.json' % self.server.pid
)
self._wait_for_server()
def _wait_for_server(self):
"""Wait 30 seconds for the notebook server to start"""
for i in range(300):
if self.server.poll() is not None:
return self._failed_to_start()
if os.path.exists(self.server_info_file):
try:
self._load_server_info()
except ValueError:
# If the server is halfway through writing the file, we may
# get invalid JSON; it should be ready next iteration.
pass
else:
2014-06-05 04:19:52 +08:00
return
time.sleep(0.1)
print("Notebook server-info file never arrived: %s" % self.server_info_file,
file=sys.stderr
)
def _failed_to_start(self):
"""Notebook server exited prematurely"""
captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
print("Notebook failed to start: ", file=sys.stderr)
print(self.server_command)
print(captured, file=sys.stderr)
def _load_server_info(self):
"""Notebook server started, load connection info from JSON"""
with open(self.server_info_file) as f:
info = json.load(f)
self.server_port = info['port']
def cleanup(self):
try:
self.server.terminate()
except OSError:
# already dead
pass
# wait 10s for the server to shutdown
try:
popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
except TimeoutExpired:
# server didn't terminate, kill it
try:
print("Failed to terminate notebook server, killing it.",
file=sys.stderr
)
self.server.kill()
except OSError:
# already dead
pass
# wait another 10s
try:
popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT)
except TimeoutExpired:
print("Notebook server still running (%s)" % self.server_info_file,
file=sys.stderr
)
self.stream_capturer.halt()
TestController.cleanup(self)
def prepare_controllers(options):
2013-09-26 06:25:46 +08:00
"""Returns two lists of TestController instances, those to run, and those
not to run."""
testgroups = options.testgroups
if testgroups:
if 'js' in testgroups:
js_testgroups = all_js_groups()
else:
js_testgroups = [g for g in testgroups if g.startswith(js_prefix)]
py_testgroups = [g for g in testgroups if g not in ['js'] + js_testgroups]
else:
py_testgroups = py_test_group_names
if not options.all:
js_testgroups = []
2013-09-26 06:25:46 +08:00
test_sections['parallel'].enabled = False
else:
js_testgroups = all_js_groups()
2013-09-26 06:25:46 +08:00
2014-06-07 02:12:46 +08:00
engine = 'slimerjs' if options.slimerjs else 'phantomjs'
c_js = [JSController(name, engine=engine) for name in js_testgroups]
c_py = [PyTestController(name, options) for name in py_testgroups]
controllers = c_py + c_js
2013-09-26 06:25:46 +08:00
to_run = [c for c in controllers if c.will_run]
not_run = [c for c in controllers if not c.will_run]
2013-09-07 07:30:30 +08:00
return to_run, not_run
2013-09-06 06:37:30 +08:00
def do_run(controller, buffer_output=True):
"""Setup and run a test controller.
If buffer_output is True, no output is displayed, to avoid it appearing
interleaved. In this case, the caller is responsible for displaying test
output on failure.
Returns
-------
controller : TestController
The same controller as passed in, as a convenience for using map() type
APIs.
exitcode : int
The exit code of the test subprocess. Non-zero indicates failure.
"""
2013-09-06 06:37:30 +08:00
try:
try:
controller.setup()
if not buffer_output:
controller.print_extra_info()
controller.launch(buffer_output=buffer_output)
2013-09-06 06:37:30 +08:00
except Exception:
import traceback
traceback.print_exc()
return controller, 1 # signal failure
2013-09-06 07:24:36 +08:00
exitcode = controller.wait()
2013-09-06 06:37:30 +08:00
return controller, exitcode
2013-09-06 06:37:30 +08:00
except KeyboardInterrupt:
return controller, -signal.SIGINT
2013-09-06 07:24:36 +08:00
finally:
controller.cleanup()
def report():
"""Return a string with a summary report of test-related variables."""
inf = get_sys_info()
out = []
def _add(name, value):
out.append((name, value))
_add('IPython version', inf['ipython_version'])
_add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source']))
_add('IPython package', compress_user(inf['ipython_path']))
_add('Python version', inf['sys_version'].replace('\n',''))
_add('sys.executable', compress_user(inf['sys_executable']))
_add('Platform', inf['platform'])
width = max(len(n) for (n,v) in out)
out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out]
avail = []
not_avail = []
for k, is_avail in have.items():
if is_avail:
avail.append(k)
else:
not_avail.append(k)
if avail:
out.append('\nTools and libraries available at test time:\n')
avail.sort()
out.append(' ' + ' '.join(avail)+'\n')
if not_avail:
out.append('\nTools and libraries NOT available at test time:\n')
not_avail.sort()
out.append(' ' + ' '.join(not_avail)+'\n')
return ''.join(out)
2013-09-20 06:07:11 +08:00
def run_iptestall(options):
"""Run the entire IPython test suite by calling nose and trial.
This function constructs :class:`IPTester` instances for all IPython
modules and package and then runs each of them. This causes the modules
and packages of IPython to be tested each in their own subprocess using
nose.
Parameters
----------
2013-09-20 06:07:11 +08:00
All parameters are passed as attributes of the options object.
testgroups : list of str
Run only these sections of the test suite. If empty, run all the available
sections.
fast : int or None
Run the test suite in parallel, using n simultaneous processes. If None
is passed, one process is used per CPU core. Default 1 (i.e. sequential)
inc_slow : bool
Include slow tests, like IPython.parallel. By default, these tests aren't
run.
slimerjs : bool
Use slimerjs if it's installed instead of phantomjs for casperjs tests.
2013-09-20 06:07:11 +08:00
xunit : bool
Produce Xunit XML output. This is written to multiple foo.xunit.xml files.
coverage : bool or str
Measure code coverage from tests. True will store the raw coverage data,
or pass 'html' or 'xml' to get reports.
extra_args : list
Extra arguments to pass to the test subprocesses, e.g. '-v'
"""
to_run, not_run = prepare_controllers(options)
2013-09-07 07:30:30 +08:00
def justify(ltext, rtext, width=70, fill='-'):
ltext += ' '
rtext = (' ' + rtext).rjust(width - len(ltext), fill)
return ltext + rtext
# Run all test runners, tracking execution time
failed = []
t_start = time.time()
2013-09-20 06:07:11 +08:00
print()
if options.fast == 1:
# This actually means sequential, i.e. with 1 job
2013-09-07 07:30:30 +08:00
for controller in to_run:
print('Test group:', controller.section)
sys.stdout.flush() # Show in correct order when output is piped
controller, res = do_run(controller, buffer_output=False)
2013-09-06 07:24:36 +08:00
if res:
failed.append(controller)
if res == -signal.SIGINT:
print("Interrupted")
break
print()
else:
2013-09-20 06:07:11 +08:00
# Run tests concurrently
2013-09-06 07:24:36 +08:00
try:
2013-09-20 06:07:11 +08:00
pool = multiprocessing.pool.ThreadPool(options.fast)
2013-09-07 07:30:30 +08:00
for (controller, res) in pool.imap_unordered(do_run, to_run):
res_string = 'OK' if res == 0 else 'FAILED'
print(justify('Test group: ' + controller.section, res_string))
2013-09-06 07:24:36 +08:00
if res:
controller.print_extra_info()
print(bytes_to_str(controller.stdout))
2013-09-06 07:24:36 +08:00
failed.append(controller)
if res == -signal.SIGINT:
print("Interrupted")
break
except KeyboardInterrupt:
return
2013-09-07 07:30:30 +08:00
for controller in not_run:
print(justify('Test group: ' + controller.section, 'NOT RUN'))
2013-09-07 07:30:30 +08:00
t_end = time.time()
t_tests = t_end - t_start
2013-09-07 07:30:30 +08:00
nrunners = len(to_run)
nfail = len(failed)
# summarize results
2013-09-20 06:07:11 +08:00
print('_'*70)
print('Test suite completed for system with the following information:')
print(report())
2013-10-15 07:19:36 +08:00
took = "Took %.3fs." % t_tests
2013-09-20 06:07:11 +08:00
print('Status: ', end='')
if not failed:
print('OK (%d test groups).' % nrunners, took)
else:
# If anything went wrong, point out what command to rerun manually to
# see the actual errors and individual summary
2013-09-20 06:07:11 +08:00
failed_sections = [c.section for c in failed]
print('ERROR - {} out of {} test groups failed ({}).'.format(nfail,
2013-10-15 07:19:36 +08:00
nrunners, ', '.join(failed_sections)), took)
2013-09-20 06:07:11 +08:00
print()
print('You may wish to rerun these, with:')
print(' iptest', *failed_sections)
print()
if options.coverage:
2013-09-07 02:02:41 +08:00
from coverage import coverage
cov = coverage(data_file='.coverage')
cov.combine()
cov.save()
# Coverage HTML report
2013-09-20 06:07:11 +08:00
if options.coverage == 'html':
2013-09-07 02:02:41 +08:00
html_dir = 'ipy_htmlcov'
shutil.rmtree(html_dir, ignore_errors=True)
print("Writing HTML coverage report to %s/ ... " % html_dir, end="")
sys.stdout.flush()
# Custom HTML reporter to clean up module names.
from coverage.html import HtmlReporter
class CustomHtmlReporter(HtmlReporter):
def find_code_units(self, morfs):
super(CustomHtmlReporter, self).find_code_units(morfs)
for cu in self.code_units:
nameparts = cu.name.split(os.sep)
if 'IPython' not in nameparts:
continue
ix = nameparts.index('IPython')
cu.name = '.'.join(nameparts[ix:])
# Reimplement the html_report method with our custom reporter
cov._harvest_data()
cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir,
2013-09-07 02:02:41 +08:00
html_title='IPython test coverage',
)
reporter = CustomHtmlReporter(cov, cov.config)
reporter.report(None)
print('done.')
# Coverage XML report
2013-09-20 06:07:11 +08:00
elif options.coverage == 'xml':
2013-09-07 02:02:41 +08:00
cov.xml_report(outfile='ipy_coverage.xml')
if failed:
# Ensure that our exit code indicates failure
sys.exit(1)
argparser = argparse.ArgumentParser(description='Run IPython test suite')
argparser.add_argument('testgroups', nargs='*',
help='Run specified groups of tests. If omitted, run '
'all tests.')
argparser.add_argument('--all', action='store_true',
help='Include slow tests not run by default.')
argparser.add_argument('--slimerjs', action='store_true',
help="Use slimerjs if it's installed instead of phantomjs for casperjs tests.")
argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int,
help='Run test sections in parallel. This starts as many '
'processes as you have cores, or you can specify a number.')
argparser.add_argument('--xunit', action='store_true',
help='Produce Xunit XML results')
argparser.add_argument('--coverage', nargs='?', const=True, default=False,
help="Measure test coverage. Specify 'html' or "
"'xml' to get reports.")
argparser.add_argument('--subproc-streams', default='capture',
help="What to do with stdout/stderr from subprocesses. "
"'capture' (default), 'show' and 'discard' are the options.")
def default_options():
"""Get an argparse Namespace object with the default arguments, to pass to
:func:`run_iptestall`.
"""
options = argparser.parse_args([])
options.extra_args = []
return options
def main():
# iptest doesn't work correctly if the working directory is the
# root of the IPython source tree. Tell the user to avoid
# frustration.
if os.path.exists(os.path.join(os.getcwd(),
'IPython', 'testing', '__main__.py')):
print("Don't run iptest from the IPython source directory",
file=sys.stderr)
sys.exit(1)
# Arguments after -- should be passed through to nose. Argparse treats
# everything after -- as regular positional arguments, so we separate them
# first.
try:
ix = sys.argv.index('--')
except ValueError:
to_parse = sys.argv[1:]
extra_args = []
else:
to_parse = sys.argv[1:ix]
extra_args = sys.argv[ix+1:]
options = argparser.parse_args(to_parse)
options.extra_args = extra_args
2013-09-06 06:37:30 +08:00
2013-09-20 06:07:11 +08:00
run_iptestall(options)
if __name__ == '__main__':
main()