2008-06-07 06:16:36 +08:00
|
|
|
# encoding: utf-8
|
2008-06-07 06:24:37 +08:00
|
|
|
"""
|
|
|
|
This module defines the things that are used in setup.py for building IPython
|
|
|
|
|
|
|
|
This includes:
|
|
|
|
|
|
|
|
* The basic arguments to setup
|
|
|
|
* Functions for finding things like packages, package data, etc.
|
|
|
|
* A function for checking dependencies.
|
|
|
|
"""
|
2008-06-07 06:16:36 +08:00
|
|
|
|
2014-04-10 06:52:59 +08:00
|
|
|
# Copyright (c) IPython Development Team.
|
|
|
|
# Distributed under the terms of the Modified BSD License.
|
|
|
|
|
|
|
|
from __future__ import print_function
|
2008-06-07 06:16:36 +08:00
|
|
|
|
2013-11-01 08:38:34 +08:00
|
|
|
import errno
|
2010-10-27 08:57:12 +08:00
|
|
|
import os
|
|
|
|
import sys
|
2008-06-07 06:16:36 +08:00
|
|
|
|
2014-07-24 06:33:08 +08:00
|
|
|
from distutils import log
|
2010-10-27 08:57:12 +08:00
|
|
|
from distutils.command.build_py import build_py
|
2013-08-22 02:29:24 +08:00
|
|
|
from distutils.command.build_scripts import build_scripts
|
2013-11-01 08:38:34 +08:00
|
|
|
from distutils.command.install import install
|
|
|
|
from distutils.command.install_scripts import install_scripts
|
2013-05-01 07:18:52 +08:00
|
|
|
from distutils.cmd import Command
|
2014-12-27 16:16:28 +08:00
|
|
|
from distutils.errors import DistutilsExecError
|
2014-03-15 02:21:38 +08:00
|
|
|
from fnmatch import fnmatch
|
2008-06-07 06:16:36 +08:00
|
|
|
from glob import glob
|
2014-12-27 16:16:28 +08:00
|
|
|
from subprocess import Popen, PIPE
|
2008-06-07 06:16:36 +08:00
|
|
|
|
|
|
|
from setupext import install_data_ext
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------------
|
|
|
|
# Useful globals and utility functions
|
|
|
|
#-------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
# A few handy globals
|
|
|
|
isfile = os.path.isfile
|
|
|
|
pjoin = os.path.join
|
2013-05-05 04:38:32 +08:00
|
|
|
repo_root = os.path.dirname(os.path.abspath(__file__))
|
2008-06-07 06:16:36 +08:00
|
|
|
|
|
|
|
def oscmd(s):
|
2010-10-27 08:57:12 +08:00
|
|
|
print(">", s)
|
2008-06-07 06:16:36 +08:00
|
|
|
os.system(s)
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2012-04-14 16:26:34 +08:00
|
|
|
# Py3 compatibility hacks, without assuming IPython itself is installed with
|
|
|
|
# the full py3compat machinery.
|
|
|
|
|
2011-08-05 05:32:15 +08:00
|
|
|
try:
|
|
|
|
execfile
|
|
|
|
except NameError:
|
|
|
|
def execfile(fname, globs, locs=None):
|
|
|
|
locs = locs or globs
|
|
|
|
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
|
2008-06-07 06:16:36 +08:00
|
|
|
|
|
|
|
# A little utility we'll need below, since glob() does NOT allow you to do
|
|
|
|
# exclusion on multiple endings!
|
|
|
|
def file_doesnt_endwith(test,endings):
|
|
|
|
"""Return true if test is a file and its name does NOT end with any
|
|
|
|
of the strings listed in endings."""
|
|
|
|
if not isfile(test):
|
|
|
|
return False
|
|
|
|
for e in endings:
|
|
|
|
if test.endswith(e):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Basic project information
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2009-08-04 23:22:09 +08:00
|
|
|
# release.py contains version, authors, license, url, keywords, etc.
|
2013-05-05 04:38:32 +08:00
|
|
|
execfile(pjoin(repo_root, 'IPython','core','release.py'), globals())
|
2008-06-07 06:16:36 +08:00
|
|
|
|
|
|
|
# Create a dict with the basic information
|
|
|
|
# This dict is eventually passed to setup after additional keys are added.
|
|
|
|
setup_args = dict(
|
|
|
|
name = name,
|
|
|
|
version = version,
|
|
|
|
description = description,
|
|
|
|
long_description = long_description,
|
|
|
|
author = author,
|
|
|
|
author_email = author_email,
|
|
|
|
url = url,
|
|
|
|
download_url = download_url,
|
|
|
|
license = license,
|
|
|
|
platforms = platforms,
|
|
|
|
keywords = keywords,
|
2011-08-22 03:34:59 +08:00
|
|
|
classifiers = classifiers,
|
2008-06-07 06:16:36 +08:00
|
|
|
cmdclass = {'install_data': install_data_ext},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Find packages
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def find_packages():
|
2008-06-07 06:24:37 +08:00
|
|
|
"""
|
|
|
|
Find all of IPython's packages.
|
|
|
|
"""
|
2012-04-15 19:49:46 +08:00
|
|
|
excludes = ['deathrow', 'quarantine']
|
2011-07-31 05:16:38 +08:00
|
|
|
packages = []
|
|
|
|
for dir,subdirs,files in os.walk('IPython'):
|
|
|
|
package = dir.replace(os.path.sep, '.')
|
2012-04-15 19:49:46 +08:00
|
|
|
if any(package.startswith('IPython.'+exc) for exc in excludes):
|
2011-07-31 05:16:38 +08:00
|
|
|
# package is to be excluded (e.g. deathrow)
|
|
|
|
continue
|
|
|
|
if '__init__.py' not in files:
|
|
|
|
# not a package
|
|
|
|
continue
|
|
|
|
packages.append(package)
|
2008-06-07 06:16:36 +08:00
|
|
|
return packages
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Find package data
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def find_package_data():
|
2008-06-07 06:24:37 +08:00
|
|
|
"""
|
|
|
|
Find IPython's package_data.
|
|
|
|
"""
|
2008-06-07 06:16:36 +08:00
|
|
|
# This is not enough for these things to appear in an sdist.
|
|
|
|
# We need to muck with the MANIFEST to get this to work
|
2011-12-06 13:19:56 +08:00
|
|
|
|
2014-03-15 02:21:38 +08:00
|
|
|
# exclude components and less from the walk;
|
2014-02-01 03:31:24 +08:00
|
|
|
# we will build the components separately
|
2014-03-15 02:21:38 +08:00
|
|
|
excludes = [
|
|
|
|
pjoin('static', 'components'),
|
|
|
|
pjoin('static', '*', 'less'),
|
|
|
|
]
|
2011-12-06 13:19:56 +08:00
|
|
|
|
2011-05-04 06:21:03 +08:00
|
|
|
# walk notebook resources:
|
|
|
|
cwd = os.getcwd()
|
2013-06-20 22:54:02 +08:00
|
|
|
os.chdir(os.path.join('IPython', 'html'))
|
2011-05-04 06:21:03 +08:00
|
|
|
static_data = []
|
2014-02-01 03:31:24 +08:00
|
|
|
for parent, dirs, files in os.walk('static'):
|
2014-03-15 02:21:38 +08:00
|
|
|
if any(fnmatch(parent, pat) for pat in excludes):
|
|
|
|
# prevent descending into subdirs
|
|
|
|
dirs[:] = []
|
2011-12-06 13:19:56 +08:00
|
|
|
continue
|
2011-05-04 06:21:03 +08:00
|
|
|
for f in files:
|
2014-02-01 03:31:24 +08:00
|
|
|
static_data.append(pjoin(parent, f))
|
2014-03-15 02:21:38 +08:00
|
|
|
|
2014-02-01 03:31:24 +08:00
|
|
|
components = pjoin("static", "components")
|
|
|
|
# select the components we actually need to install
|
|
|
|
# (there are lots of resources we bundle for sdist-reasons that we don't actually use)
|
|
|
|
static_data.extend([
|
|
|
|
pjoin(components, "backbone", "backbone-min.js"),
|
2014-06-11 04:00:47 +08:00
|
|
|
pjoin(components, "bootstrap", "js", "bootstrap.min.js"),
|
2014-03-13 07:06:38 +08:00
|
|
|
pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
|
2014-02-22 07:53:58 +08:00
|
|
|
pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
|
2014-11-14 03:59:15 +08:00
|
|
|
pjoin(components, "es6-promise", "*.js"),
|
2014-07-07 21:55:47 +08:00
|
|
|
pjoin(components, "font-awesome", "fonts", "*.*"),
|
2014-02-28 09:12:09 +08:00
|
|
|
pjoin(components, "google-caja", "html-css-sanitizer-minified.js"),
|
2014-02-01 03:31:24 +08:00
|
|
|
pjoin(components, "jquery", "jquery.min.js"),
|
|
|
|
pjoin(components, "jquery-ui", "ui", "minified", "jquery-ui.min.js"),
|
|
|
|
pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
|
2014-03-26 13:20:18 +08:00
|
|
|
pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
|
2014-02-01 03:31:24 +08:00
|
|
|
pjoin(components, "marked", "lib", "marked.js"),
|
2014-02-01 06:42:40 +08:00
|
|
|
pjoin(components, "requirejs", "require.js"),
|
2014-02-01 03:31:24 +08:00
|
|
|
pjoin(components, "underscore", "underscore-min.js"),
|
2014-07-25 04:30:50 +08:00
|
|
|
pjoin(components, "moment", "moment.js"),
|
2014-10-07 03:05:17 +08:00
|
|
|
pjoin(components, "moment", "min", "moment.min.js"),
|
2014-10-06 12:14:06 +08:00
|
|
|
pjoin(components, "term.js", "src", "term.js"),
|
2014-10-07 03:05:17 +08:00
|
|
|
pjoin(components, "text-encoding", "lib", "encoding.js"),
|
2014-02-01 03:31:24 +08:00
|
|
|
])
|
|
|
|
|
|
|
|
# Ship all of Codemirror's CSS and JS
|
|
|
|
for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
|
|
|
|
for f in files:
|
|
|
|
if f.endswith(('.js', '.css')):
|
|
|
|
static_data.append(pjoin(parent, f))
|
|
|
|
|
2013-09-27 08:39:06 +08:00
|
|
|
os.chdir(os.path.join('tests',))
|
2014-02-08 13:15:22 +08:00
|
|
|
js_tests = glob('*.js') + glob('*/*.js')
|
2014-01-08 09:15:55 +08:00
|
|
|
|
|
|
|
os.chdir(os.path.join(cwd, 'IPython', 'nbconvert'))
|
2014-01-09 02:14:52 +08:00
|
|
|
nbconvert_templates = [os.path.join(dirpath, '*.*')
|
2014-01-08 09:15:55 +08:00
|
|
|
for dirpath, _, _ in os.walk('templates')]
|
|
|
|
|
2013-09-27 08:39:06 +08:00
|
|
|
os.chdir(cwd)
|
|
|
|
|
2008-07-02 12:19:24 +08:00
|
|
|
package_data = {
|
2011-10-31 04:35:11 +08:00
|
|
|
'IPython.config.profile' : ['README*', '*/*.py'],
|
2013-05-30 05:10:57 +08:00
|
|
|
'IPython.core.tests' : ['*.png', '*.jpg'],
|
2013-11-12 02:24:28 +08:00
|
|
|
'IPython.lib.tests' : ['*.wav'],
|
2012-06-30 15:50:34 +08:00
|
|
|
'IPython.testing.plugin' : ['*.txt'],
|
2013-06-20 22:54:02 +08:00
|
|
|
'IPython.html' : ['templates/*'] + static_data,
|
2013-09-27 08:39:06 +08:00
|
|
|
'IPython.html.tests' : js_tests,
|
2013-06-19 21:25:47 +08:00
|
|
|
'IPython.qt.console' : ['resources/icon/*.svg'],
|
2014-01-08 09:15:55 +08:00
|
|
|
'IPython.nbconvert' : nbconvert_templates +
|
2014-04-25 06:35:38 +08:00
|
|
|
[
|
|
|
|
'tests/files/*.*',
|
|
|
|
'exporters/tests/files/*.*',
|
2014-06-27 06:16:07 +08:00
|
|
|
'preprocessors/tests/files/*.*',
|
2014-04-25 06:35:38 +08:00
|
|
|
],
|
2014-01-15 08:46:57 +08:00
|
|
|
'IPython.nbconvert.filters' : ['marked.js'],
|
2014-06-25 11:13:57 +08:00
|
|
|
'IPython.nbformat' : [
|
|
|
|
'tests/*.ipynb',
|
|
|
|
'v3/nbformat.v3.schema.json',
|
2014-06-25 11:13:57 +08:00
|
|
|
'v4/nbformat.v4.schema.json',
|
2014-12-14 09:46:25 +08:00
|
|
|
],
|
|
|
|
'IPython.kernel': ['resources/*.*'],
|
2008-07-02 12:19:24 +08:00
|
|
|
}
|
2014-02-01 06:42:40 +08:00
|
|
|
|
2014-02-06 13:36:49 +08:00
|
|
|
return package_data
|
|
|
|
|
2014-02-08 06:12:53 +08:00
|
|
|
|
2014-02-06 13:36:49 +08:00
|
|
|
def check_package_data(package_data):
|
|
|
|
"""verify that package_data globs make sense"""
|
|
|
|
print("checking package data")
|
2014-02-01 06:42:40 +08:00
|
|
|
for pkg, data in package_data.items():
|
|
|
|
pkg_root = pjoin(*pkg.split('.'))
|
|
|
|
for d in data:
|
|
|
|
path = pjoin(pkg_root, d)
|
|
|
|
if '*' in path:
|
|
|
|
assert len(glob(path)) > 0, "No files match pattern %s" % path
|
|
|
|
else:
|
|
|
|
assert os.path.exists(path), "Missing package data: %s" % path
|
|
|
|
|
2008-06-07 06:16:36 +08:00
|
|
|
|
2014-02-08 06:12:53 +08:00
|
|
|
def check_package_data_first(command):
|
|
|
|
"""decorator for checking package_data before running a given command
|
|
|
|
|
|
|
|
Probably only needs to wrap build_py
|
|
|
|
"""
|
|
|
|
class DecoratedCommand(command):
|
|
|
|
def run(self):
|
|
|
|
check_package_data(self.package_data)
|
|
|
|
command.run(self)
|
|
|
|
return DecoratedCommand
|
2008-06-07 06:16:36 +08:00
|
|
|
|
|
|
|
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Find data files
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2008-08-15 18:21:15 +08:00
|
|
|
def make_dir_struct(tag,base,out_base):
|
|
|
|
"""Make the directory structure of all files below a starting dir.
|
|
|
|
|
|
|
|
This is just a convenience routine to help build a nested directory
|
|
|
|
hierarchy because distutils is too stupid to do this by itself.
|
|
|
|
|
|
|
|
XXX - this needs a proper docstring!
|
|
|
|
"""
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2008-08-15 18:21:15 +08:00
|
|
|
# we'll use these a lot below
|
|
|
|
lbase = len(base)
|
|
|
|
pathsep = os.path.sep
|
|
|
|
lpathsep = len(pathsep)
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2008-08-15 18:21:15 +08:00
|
|
|
out = []
|
|
|
|
for (dirpath,dirnames,filenames) in os.walk(base):
|
|
|
|
# we need to strip out the dirpath from the base to map it to the
|
|
|
|
# output (installation) path. This requires possibly stripping the
|
|
|
|
# path separator, because otherwise pjoin will not work correctly
|
|
|
|
# (pjoin('foo/','/bar') returns '/bar').
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2008-08-15 18:21:15 +08:00
|
|
|
dp_eff = dirpath[lbase:]
|
|
|
|
if dp_eff.startswith(pathsep):
|
|
|
|
dp_eff = dp_eff[lpathsep:]
|
2011-10-04 22:14:41 +08:00
|
|
|
# The output path must be anchored at the out_base marker
|
2008-08-15 18:21:15 +08:00
|
|
|
out_path = pjoin(out_base,dp_eff)
|
|
|
|
# Now we can generate the final filenames. Since os.walk only produces
|
|
|
|
# filenames, we must join back with the dirpath to get full valid file
|
|
|
|
# paths:
|
|
|
|
pfiles = [pjoin(dirpath,f) for f in filenames]
|
2010-10-27 15:16:56 +08:00
|
|
|
# Finally, generate the entry we need, which is a pari of (output
|
2008-08-15 18:21:15 +08:00
|
|
|
# path, files) for use as a data_files parameter in install_data.
|
2010-10-27 15:16:56 +08:00
|
|
|
out.append((out_path, pfiles))
|
2008-08-15 18:21:15 +08:00
|
|
|
|
|
|
|
return out
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2008-08-15 18:21:15 +08:00
|
|
|
|
2008-06-07 06:16:36 +08:00
|
|
|
def find_data_files():
|
2008-06-07 06:24:37 +08:00
|
|
|
"""
|
|
|
|
Find IPython's data_files.
|
2008-08-15 18:21:15 +08:00
|
|
|
|
2014-02-01 03:50:00 +08:00
|
|
|
Just man pages at this point.
|
2008-06-07 06:24:37 +08:00
|
|
|
"""
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2009-07-03 05:35:36 +08:00
|
|
|
manpagebase = pjoin('share', 'man', 'man1')
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2008-08-15 18:21:15 +08:00
|
|
|
# Simple file lists can be made by hand
|
2012-09-04 00:39:06 +08:00
|
|
|
manpages = [f for f in glob(pjoin('docs','man','*.1.gz')) if isfile(f)]
|
2011-07-22 06:21:31 +08:00
|
|
|
if not manpages:
|
|
|
|
# When running from a source tree, the manpages aren't gzipped
|
2012-09-04 00:39:06 +08:00
|
|
|
manpages = [f for f in glob(pjoin('docs','man','*.1')) if isfile(f)]
|
|
|
|
|
2008-08-15 18:21:15 +08:00
|
|
|
# And assemble the entire output list
|
2014-02-01 03:50:00 +08:00
|
|
|
data_files = [ (manpagebase, manpages) ]
|
2011-01-10 05:26:42 +08:00
|
|
|
|
2008-08-15 10:33:53 +08:00
|
|
|
return data_files
|
2008-06-07 06:16:36 +08:00
|
|
|
|
2009-07-31 09:16:44 +08:00
|
|
|
|
|
|
|
def make_man_update_target(manpage):
|
|
|
|
"""Return a target_update-compliant tuple for the given manpage.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
manpage : string
|
|
|
|
Name of the manpage, must include the section number (trailing number).
|
|
|
|
|
|
|
|
Example
|
|
|
|
-------
|
|
|
|
|
|
|
|
>>> make_man_update_target('ipython.1') #doctest: +NORMALIZE_WHITESPACE
|
|
|
|
('docs/man/ipython.1.gz',
|
|
|
|
['docs/man/ipython.1'],
|
|
|
|
'cd docs/man && gzip -9c ipython.1 > ipython.1.gz')
|
|
|
|
"""
|
|
|
|
man_dir = pjoin('docs', 'man')
|
|
|
|
manpage_gz = manpage + '.gz'
|
|
|
|
manpath = pjoin(man_dir, manpage)
|
|
|
|
manpath_gz = pjoin(man_dir, manpage_gz)
|
|
|
|
gz_cmd = ( "cd %(man_dir)s && gzip -9c %(manpage)s > %(manpage_gz)s" %
|
|
|
|
locals() )
|
|
|
|
return (manpath_gz, [manpath], gz_cmd)
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2011-12-09 06:31:57 +08:00
|
|
|
# The two functions below are copied from IPython.utils.path, so we don't need
|
|
|
|
# to import IPython during setup, which fails on Python 3.
|
|
|
|
|
|
|
|
def target_outdated(target,deps):
|
|
|
|
"""Determine whether a target is out of date.
|
|
|
|
|
|
|
|
target_outdated(target,deps) -> 1/0
|
|
|
|
|
|
|
|
deps: list of filenames which MUST exist.
|
|
|
|
target: single filename which may or may not exist.
|
|
|
|
|
|
|
|
If target doesn't exist or is older than any file listed in deps, return
|
|
|
|
true, otherwise return false.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
target_time = os.path.getmtime(target)
|
|
|
|
except os.error:
|
|
|
|
return 1
|
|
|
|
for dep in deps:
|
|
|
|
dep_time = os.path.getmtime(dep)
|
|
|
|
if dep_time > target_time:
|
|
|
|
#print "For target",target,"Dep failed:",dep # dbg
|
|
|
|
#print "times (dep,tar):",dep_time,target_time # dbg
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
|
def target_update(target,deps,cmd):
|
|
|
|
"""Update a target with a given command given a list of dependencies.
|
|
|
|
|
|
|
|
target_update(target,deps,cmd) -> runs cmd if target is outdated.
|
|
|
|
|
|
|
|
This is just a wrapper around target_outdated() which calls the given
|
|
|
|
command if target is outdated."""
|
|
|
|
|
|
|
|
if target_outdated(target,deps):
|
2012-02-29 02:26:03 +08:00
|
|
|
os.system(cmd)
|
2011-12-09 06:31:57 +08:00
|
|
|
|
2008-06-07 06:16:36 +08:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Find scripts
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2013-11-01 08:38:34 +08:00
|
|
|
def find_entry_points():
|
2014-11-12 02:45:18 +08:00
|
|
|
"""Defines the command line entry points for IPython
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2014-11-12 02:45:18 +08:00
|
|
|
This always uses setuptools-style entry points. When setuptools is not in
|
|
|
|
use, our own build_scripts_entrypt class below parses these and builds
|
|
|
|
command line scripts.
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2014-11-12 02:45:18 +08:00
|
|
|
Each of our entry points gets both a plain name, e.g. ipython, and one
|
|
|
|
suffixed with the Python major version number, e.g. ipython3.
|
2008-06-07 06:24:37 +08:00
|
|
|
"""
|
2013-11-01 08:38:34 +08:00
|
|
|
ep = [
|
2013-07-05 03:50:36 +08:00
|
|
|
'ipython%s = IPython:start_ipython',
|
2011-08-14 04:48:26 +08:00
|
|
|
'ipcontroller%s = IPython.parallel.apps.ipcontrollerapp:launch_new_instance',
|
|
|
|
'ipengine%s = IPython.parallel.apps.ipengineapp:launch_new_instance',
|
|
|
|
'ipcluster%s = IPython.parallel.apps.ipclusterapp:launch_new_instance',
|
2013-09-07 07:51:11 +08:00
|
|
|
'iptest%s = IPython.testing.iptestcontroller:main',
|
2011-05-10 04:48:49 +08:00
|
|
|
]
|
2013-11-01 08:38:34 +08:00
|
|
|
suffix = str(sys.version_info[0])
|
|
|
|
return [e % '' for e in ep] + [e % suffix for e in ep]
|
|
|
|
|
|
|
|
script_src = """#!{executable}
|
2013-11-06 00:51:44 +08:00
|
|
|
# This script was automatically generated by setup.py
|
2013-12-16 02:33:13 +08:00
|
|
|
if __name__ == '__main__':
|
2013-12-17 07:02:39 +08:00
|
|
|
from {mod} import {func}
|
2013-12-16 02:33:13 +08:00
|
|
|
{func}()
|
2013-11-01 08:38:34 +08:00
|
|
|
"""
|
|
|
|
|
|
|
|
class build_scripts_entrypt(build_scripts):
|
2014-11-12 02:45:18 +08:00
|
|
|
"""Build the command line scripts
|
|
|
|
|
|
|
|
Parse setuptools style entry points and write simple scripts to run the
|
|
|
|
target functions.
|
|
|
|
|
|
|
|
On Windows, this also creates .cmd wrappers for the scripts so that you can
|
|
|
|
easily launch them from a command line.
|
|
|
|
"""
|
2013-11-01 08:38:34 +08:00
|
|
|
def run(self):
|
|
|
|
self.mkpath(self.build_dir)
|
|
|
|
outfiles = []
|
|
|
|
for script in find_entry_points():
|
|
|
|
name, entrypt = script.split('=')
|
|
|
|
name = name.strip()
|
|
|
|
entrypt = entrypt.strip()
|
|
|
|
outfile = os.path.join(self.build_dir, name)
|
|
|
|
outfiles.append(outfile)
|
|
|
|
print('Writing script to', outfile)
|
|
|
|
|
|
|
|
mod, func = entrypt.split(':')
|
|
|
|
with open(outfile, 'w') as f:
|
|
|
|
f.write(script_src.format(executable=sys.executable,
|
|
|
|
mod=mod, func=func))
|
2014-11-12 02:38:05 +08:00
|
|
|
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
# Write .cmd wrappers for Windows so 'ipython' etc. work at the
|
|
|
|
# command line
|
|
|
|
cmd_file = os.path.join(self.build_dir, name + '.cmd')
|
|
|
|
cmd = '@"{python}" "%~dp0\{script}" %*\r\n'.format(
|
|
|
|
python=sys.executable, script=name)
|
|
|
|
log.info("Writing %s wrapper script" % cmd_file)
|
|
|
|
with open(cmd_file, 'w') as f:
|
|
|
|
f.write(cmd)
|
2013-11-01 08:38:34 +08:00
|
|
|
|
|
|
|
return outfiles, outfiles
|
|
|
|
|
|
|
|
class install_lib_symlink(Command):
|
|
|
|
user_options = [
|
|
|
|
('install-dir=', 'd', "directory to install to"),
|
|
|
|
]
|
2011-05-10 04:48:49 +08:00
|
|
|
|
2013-11-01 08:38:34 +08:00
|
|
|
def initialize_options(self):
|
|
|
|
self.install_dir = None
|
|
|
|
|
|
|
|
def finalize_options(self):
|
|
|
|
self.set_undefined_options('symlink',
|
|
|
|
('install_lib', 'install_dir'),
|
|
|
|
)
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
raise Exception("This doesn't work on Windows.")
|
|
|
|
pkg = os.path.join(os.getcwd(), 'IPython')
|
|
|
|
dest = os.path.join(self.install_dir, 'IPython')
|
2013-12-17 03:42:04 +08:00
|
|
|
if os.path.islink(dest):
|
|
|
|
print('removing existing symlink at %s' % dest)
|
|
|
|
os.unlink(dest)
|
2013-11-01 08:38:34 +08:00
|
|
|
print('symlinking %s -> %s' % (pkg, dest))
|
2013-12-17 03:42:04 +08:00
|
|
|
os.symlink(pkg, dest)
|
2013-11-01 08:38:34 +08:00
|
|
|
|
2013-12-17 07:12:39 +08:00
|
|
|
class unsymlink(install):
|
|
|
|
def run(self):
|
|
|
|
dest = os.path.join(self.install_lib, 'IPython')
|
|
|
|
if os.path.islink(dest):
|
|
|
|
print('removing symlink at %s' % dest)
|
|
|
|
os.unlink(dest)
|
|
|
|
else:
|
|
|
|
print('No symlink exists at %s' % dest)
|
2013-11-01 08:38:34 +08:00
|
|
|
|
|
|
|
class install_symlinked(install):
|
|
|
|
def run(self):
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
raise Exception("This doesn't work on Windows.")
|
2013-12-14 10:08:14 +08:00
|
|
|
|
|
|
|
# Run all sub-commands (at least those that need to be run)
|
|
|
|
for cmd_name in self.get_sub_commands():
|
|
|
|
self.run_command(cmd_name)
|
2013-08-22 02:29:24 +08:00
|
|
|
|
2013-11-01 08:38:34 +08:00
|
|
|
# 'sub_commands': a list of commands this command might have to run to
|
|
|
|
# get its work done. See cmd.py for more info.
|
|
|
|
sub_commands = [('install_lib_symlink', lambda self:True),
|
|
|
|
('install_scripts_sym', lambda self:True),
|
|
|
|
]
|
|
|
|
|
|
|
|
class install_scripts_for_symlink(install_scripts):
|
|
|
|
"""Redefined to get options from 'symlink' instead of 'install'.
|
|
|
|
|
|
|
|
I love distutils almost as much as I love setuptools.
|
|
|
|
"""
|
|
|
|
def finalize_options(self):
|
|
|
|
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
|
|
|
|
self.set_undefined_options('symlink',
|
|
|
|
('install_scripts', 'install_dir'),
|
|
|
|
('force', 'force'),
|
|
|
|
('skip_build', 'skip_build'),
|
|
|
|
)
|
2013-08-22 02:29:24 +08:00
|
|
|
|
2008-06-07 06:16:36 +08:00
|
|
|
#---------------------------------------------------------------------------
|
2008-08-15 18:21:15 +08:00
|
|
|
# Verify all dependencies
|
2008-06-07 06:16:36 +08:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def check_for_dependencies():
|
2008-06-07 06:24:37 +08:00
|
|
|
"""Check for IPython's dependencies.
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2008-06-07 06:24:37 +08:00
|
|
|
This function should NOT be called if running under setuptools!
|
|
|
|
"""
|
2008-06-07 06:16:36 +08:00
|
|
|
from setupext.setupext import (
|
2010-10-27 08:57:12 +08:00
|
|
|
print_line, print_raw, print_status,
|
2008-06-07 06:16:36 +08:00
|
|
|
check_for_sphinx, check_for_pygments,
|
2011-02-25 10:13:51 +08:00
|
|
|
check_for_nose, check_for_pexpect,
|
2013-07-01 06:48:45 +08:00
|
|
|
check_for_pyzmq, check_for_readline,
|
2013-08-06 13:17:05 +08:00
|
|
|
check_for_jinja2, check_for_tornado
|
2008-06-07 06:16:36 +08:00
|
|
|
)
|
|
|
|
print_line()
|
|
|
|
print_raw("BUILDING IPYTHON")
|
|
|
|
print_status('python', sys.version)
|
|
|
|
print_status('platform', sys.platform)
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
print_status('Windows version', sys.getwindowsversion())
|
2011-10-04 22:14:41 +08:00
|
|
|
|
2008-06-07 06:16:36 +08:00
|
|
|
print_raw("")
|
|
|
|
print_raw("OPTIONAL DEPENDENCIES")
|
|
|
|
|
|
|
|
check_for_sphinx()
|
|
|
|
check_for_pygments()
|
|
|
|
check_for_nose()
|
2014-01-31 07:00:43 +08:00
|
|
|
if os.name == 'posix':
|
|
|
|
check_for_pexpect()
|
2011-02-25 10:13:51 +08:00
|
|
|
check_for_pyzmq()
|
2013-08-06 13:17:05 +08:00
|
|
|
check_for_tornado()
|
2011-04-09 08:04:24 +08:00
|
|
|
check_for_readline()
|
2013-07-01 06:23:11 +08:00
|
|
|
check_for_jinja2()
|
2010-10-27 08:57:12 +08:00
|
|
|
|
2013-05-01 07:18:52 +08:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# VCS related
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
2013-05-04 06:42:10 +08:00
|
|
|
# utils.submodule has checks for submodule status
|
|
|
|
execfile(pjoin('IPython','utils','submodule.py'), globals())
|
2013-05-01 07:18:52 +08:00
|
|
|
|
|
|
|
class UpdateSubmodules(Command):
|
|
|
|
"""Update git submodules
|
|
|
|
|
|
|
|
IPython's external javascript dependencies live in a separate repo.
|
|
|
|
"""
|
|
|
|
description = "Update git submodules"
|
|
|
|
user_options = []
|
|
|
|
|
|
|
|
def initialize_options(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def finalize_options(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
failure = False
|
|
|
|
try:
|
|
|
|
self.spawn('git submodule init'.split())
|
|
|
|
self.spawn('git submodule update --recursive'.split())
|
|
|
|
except Exception as e:
|
|
|
|
failure = e
|
|
|
|
print(e)
|
|
|
|
|
2013-05-05 04:38:32 +08:00
|
|
|
if not check_submodule_status(repo_root) == 'clean':
|
2013-05-01 07:18:52 +08:00
|
|
|
print("submodules could not be checked out")
|
|
|
|
sys.exit(1)
|
2013-05-04 06:42:10 +08:00
|
|
|
|
2013-05-01 07:18:52 +08:00
|
|
|
|
|
|
|
def git_prebuild(pkg_dir, build_cmd=build_py):
|
|
|
|
"""Return extended build or sdist command class for recording commit
|
2012-03-27 05:40:06 +08:00
|
|
|
|
|
|
|
records git commit in IPython.utils._sysinfo.commit
|
|
|
|
|
|
|
|
for use in IPython.utils.sysinfo.sys_info() calls after installation.
|
2013-05-01 07:18:52 +08:00
|
|
|
|
|
|
|
Also ensures that submodules exist prior to running
|
2010-10-27 08:57:12 +08:00
|
|
|
"""
|
2012-03-27 05:40:06 +08:00
|
|
|
|
2010-10-27 08:57:12 +08:00
|
|
|
class MyBuildPy(build_cmd):
|
|
|
|
''' Subclass to write commit data into installation tree '''
|
|
|
|
def run(self):
|
2011-10-09 01:27:08 +08:00
|
|
|
build_cmd.run(self)
|
2012-07-01 06:17:46 +08:00
|
|
|
# this one will only fire for build commands
|
|
|
|
if hasattr(self, 'build_lib'):
|
|
|
|
self._record_commit(self.build_lib)
|
|
|
|
|
|
|
|
def make_release_tree(self, base_dir, files):
|
|
|
|
# this one will fire for sdist
|
|
|
|
build_cmd.make_release_tree(self, base_dir, files)
|
|
|
|
self._record_commit(base_dir)
|
|
|
|
|
|
|
|
def _record_commit(self, base_dir):
|
2010-10-27 08:57:12 +08:00
|
|
|
import subprocess
|
|
|
|
proc = subprocess.Popen('git rev-parse --short HEAD',
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
shell=True)
|
|
|
|
repo_commit, _ = proc.communicate()
|
2012-07-01 06:17:46 +08:00
|
|
|
repo_commit = repo_commit.strip().decode("ascii")
|
|
|
|
|
|
|
|
out_pth = pjoin(base_dir, pkg_dir, 'utils', '_sysinfo.py')
|
|
|
|
if os.path.isfile(out_pth) and not repo_commit:
|
|
|
|
# nothing to write, don't clobber
|
|
|
|
return
|
|
|
|
|
|
|
|
print("writing git commit '%s' to %s" % (repo_commit, out_pth))
|
|
|
|
|
|
|
|
# remove to avoid overwriting original via hard link
|
|
|
|
try:
|
|
|
|
os.remove(out_pth)
|
|
|
|
except (IOError, OSError):
|
|
|
|
pass
|
2012-04-15 01:32:49 +08:00
|
|
|
with open(out_pth, 'w') as out_file:
|
|
|
|
out_file.writelines([
|
2012-04-14 16:26:34 +08:00
|
|
|
'# GENERATED BY setup.py\n',
|
2014-09-28 21:20:04 +08:00
|
|
|
'commit = u"%s"\n' % repo_commit,
|
2012-04-15 01:32:49 +08:00
|
|
|
])
|
2013-05-04 06:42:10 +08:00
|
|
|
return require_submodules(MyBuildPy)
|
2013-05-01 07:18:52 +08:00
|
|
|
|
|
|
|
|
|
|
|
def require_submodules(command):
|
|
|
|
"""decorator for instructing a command to check for submodules before running"""
|
|
|
|
class DecoratedCommand(command):
|
|
|
|
def run(self):
|
2013-05-05 04:38:32 +08:00
|
|
|
if not check_submodule_status(repo_root) == 'clean':
|
2013-05-01 07:18:52 +08:00
|
|
|
print("submodules missing! Run `setup.py submodule` and try again")
|
|
|
|
sys.exit(1)
|
|
|
|
command.run(self)
|
|
|
|
return DecoratedCommand
|
2013-09-15 03:21:05 +08:00
|
|
|
|
2014-01-31 07:03:12 +08:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# bdist related
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def get_bdist_wheel():
|
|
|
|
"""Construct bdist_wheel command for building wheels
|
|
|
|
|
|
|
|
Constructs py2-none-any tag, instead of py2.7-none-any
|
|
|
|
"""
|
|
|
|
class RequiresWheel(Command):
|
2014-02-01 06:42:02 +08:00
|
|
|
description = "Dummy command for missing bdist_wheel"
|
|
|
|
user_options = []
|
|
|
|
|
|
|
|
def initialize_options(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def finalize_options(self):
|
|
|
|
pass
|
|
|
|
|
2014-01-31 07:03:12 +08:00
|
|
|
def run(self):
|
|
|
|
print("bdist_wheel requires the wheel package")
|
2014-02-01 06:42:02 +08:00
|
|
|
sys.exit(1)
|
|
|
|
|
2014-01-31 07:03:12 +08:00
|
|
|
if 'setuptools' not in sys.modules:
|
|
|
|
return RequiresWheel
|
|
|
|
else:
|
|
|
|
try:
|
2014-01-31 10:44:29 +08:00
|
|
|
from wheel.bdist_wheel import bdist_wheel, read_pkg_info, write_pkg_info
|
2014-01-31 07:03:12 +08:00
|
|
|
except ImportError:
|
|
|
|
return RequiresWheel
|
2014-01-31 10:44:29 +08:00
|
|
|
|
2014-01-31 07:03:12 +08:00
|
|
|
class bdist_wheel_tag(bdist_wheel):
|
2014-01-31 10:44:29 +08:00
|
|
|
|
|
|
|
def add_requirements(self, metadata_path):
|
|
|
|
"""transform platform-dependent requirements"""
|
|
|
|
pkg_info = read_pkg_info(metadata_path)
|
|
|
|
# pkg_info is an email.Message object (?!)
|
|
|
|
# we have to remove the unconditional 'readline' and/or 'pyreadline' entries
|
|
|
|
# and transform them to conditionals
|
|
|
|
requires = pkg_info.get_all('Requires-Dist')
|
|
|
|
del pkg_info['Requires-Dist']
|
|
|
|
def _remove_startswith(lis, prefix):
|
|
|
|
"""like list.remove, but with startswith instead of =="""
|
|
|
|
found = False
|
|
|
|
for idx, item in enumerate(lis):
|
|
|
|
if item.startswith(prefix):
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found:
|
|
|
|
lis.pop(idx)
|
|
|
|
|
2014-02-21 10:13:34 +08:00
|
|
|
for pkg in ("gnureadline", "pyreadline", "mock"):
|
2014-01-31 10:44:29 +08:00
|
|
|
_remove_startswith(requires, pkg)
|
2014-02-22 04:16:19 +08:00
|
|
|
requires.append("gnureadline; sys.platform == 'darwin' and platform.python_implementation == 'CPython'")
|
2014-04-10 06:52:59 +08:00
|
|
|
requires.append("pyreadline (>=2.0); extra == 'terminal' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
|
|
|
|
requires.append("pyreadline (>=2.0); extra == 'all' and sys.platform == 'win32' and platform.python_implementation == 'CPython'")
|
2014-03-05 01:49:40 +08:00
|
|
|
requires.append("mock; extra == 'test' and python_version < '3.3'")
|
2014-01-31 10:44:29 +08:00
|
|
|
for r in requires:
|
|
|
|
pkg_info['Requires-Dist'] = r
|
|
|
|
write_pkg_info(metadata_path, pkg_info)
|
|
|
|
|
2014-01-31 07:03:12 +08:00
|
|
|
return bdist_wheel_tag
|
|
|
|
|
2013-09-15 03:21:05 +08:00
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
# Notebook related
|
|
|
|
#---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
class CompileCSS(Command):
|
|
|
|
"""Recompile Notebook CSS
|
|
|
|
|
|
|
|
Regenerate the compiled CSS from LESS sources.
|
|
|
|
|
2014-10-16 07:37:27 +08:00
|
|
|
Requires various dev dependencies, such as invoke and lessc.
|
2013-09-15 03:21:05 +08:00
|
|
|
"""
|
|
|
|
description = "Recompile Notebook CSS"
|
2014-07-24 05:19:27 +08:00
|
|
|
user_options = [
|
|
|
|
('minify', 'x', "minify CSS"),
|
2014-07-24 06:36:47 +08:00
|
|
|
('force', 'f', "force recompilation of CSS"),
|
|
|
|
]
|
2013-09-15 03:21:05 +08:00
|
|
|
|
|
|
|
def initialize_options(self):
|
2014-07-24 05:19:27 +08:00
|
|
|
self.minify = False
|
2014-07-24 06:36:47 +08:00
|
|
|
self.force = False
|
2013-09-15 03:21:05 +08:00
|
|
|
|
|
|
|
def finalize_options(self):
|
2014-07-24 05:19:27 +08:00
|
|
|
self.minify = bool(self.minify)
|
2014-07-24 06:36:47 +08:00
|
|
|
self.force = bool(self.force)
|
2013-09-15 03:21:05 +08:00
|
|
|
|
|
|
|
def run(self):
|
2014-10-16 07:37:27 +08:00
|
|
|
cmd = ['invoke', 'css']
|
|
|
|
if self.minify:
|
|
|
|
cmd.append('--minify')
|
|
|
|
if self.force:
|
|
|
|
cmd.append('--force')
|
2014-12-27 16:16:28 +08:00
|
|
|
try:
|
|
|
|
p = Popen(cmd, cwd=pjoin(repo_root, "IPython", "html"), stderr=PIPE)
|
|
|
|
except OSError:
|
|
|
|
raise DistutilsExecError("invoke is required to rebuild css (pip install invoke)")
|
|
|
|
out, err = p.communicate()
|
|
|
|
if p.returncode:
|
|
|
|
if sys.version_info[0] >= 3:
|
|
|
|
err = err.decode('utf8', 'replace')
|
|
|
|
raise DistutilsExecError(err.strip())
|
2014-07-24 05:19:27 +08:00
|
|
|
|
2013-10-26 05:01:31 +08:00
|
|
|
|
|
|
|
class JavascriptVersion(Command):
|
|
|
|
"""write the javascript version to notebook javascript"""
|
|
|
|
description = "Write IPython version to javascript"
|
|
|
|
user_options = []
|
|
|
|
|
|
|
|
def initialize_options(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def finalize_options(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
nsfile = pjoin(repo_root, "IPython", "html", "static", "base", "js", "namespace.js")
|
|
|
|
with open(nsfile) as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
with open(nsfile, 'w') as f:
|
|
|
|
for line in lines:
|
|
|
|
if line.startswith("IPython.version"):
|
|
|
|
line = 'IPython.version = "{0}";\n'.format(version)
|
|
|
|
f.write(line)
|
2014-07-24 05:19:27 +08:00
|
|
|
|
|
|
|
|
2014-07-24 06:33:08 +08:00
|
|
|
def css_js_prerelease(command, strict=True):
|
2014-07-24 05:19:27 +08:00
|
|
|
"""decorator for building js/minified css prior to a release"""
|
|
|
|
class DecoratedCommand(command):
|
|
|
|
def run(self):
|
|
|
|
self.distribution.run_command('jsversion')
|
|
|
|
css = self.distribution.get_command_obj('css')
|
|
|
|
css.minify = True
|
2014-07-24 06:33:08 +08:00
|
|
|
try:
|
|
|
|
self.distribution.run_command('css')
|
|
|
|
except Exception as e:
|
|
|
|
if strict:
|
|
|
|
raise
|
|
|
|
else:
|
2014-12-27 16:16:28 +08:00
|
|
|
log.warn("rebuilding css and sourcemaps failed (not a problem)")
|
|
|
|
log.warn(str(e))
|
2014-07-24 05:19:27 +08:00
|
|
|
command.run(self)
|
|
|
|
return DecoratedCommand
|